From d6e9cb72ed096d655206b3f55417b3a249eabf72 Mon Sep 17 00:00:00 2001 From: Norbert Date: Mon, 18 Nov 2024 08:59:34 +0100 Subject: [PATCH] reupload --- D1.png | Bin 0 -> 1404 bytes D2.png | Bin 0 -> 1478 bytes H1.png | Bin 0 -> 1318 bytes H2.png | Bin 0 -> 1428 bytes H3.png | Bin 0 -> 1455 bytes H4.png | Bin 0 -> 1353 bytes H5.png | Bin 0 -> 1429 bytes __pycache__/ids.cpython-312.pyc | Bin 0 -> 22344 bytes __pycache__/ids.cpython-313.pyc | Bin 0 -> 22620 bytes env/bin/activate | 76 + env/bin/activate.csh | 37 + env/bin/activate.fish | 75 + env/bin/chardetect | 12 + env/bin/easy_install | 11 + env/bin/easy_install-3.6 | 11 + env/bin/pip | 12 + env/bin/pip3 | 12 + env/bin/pip3.6 | 12 + env/bin/python | 1 + env/bin/python 2 | 1 + env/bin/python3 | 1 + env/bin/python3 2 | 1 + env/bin/twitter | 12 + env/bin/twitter-archiver | 12 + env/bin/twitter-follow | 12 + env/bin/twitter-log | 12 + env/bin/twitter-stream-example | 12 + env/bin/twitterbot | 12 + env/bin/wheel | 12 + .../PySocks-1.6.8.dist-info/INSTALLER | 1 + .../PySocks-1.6.8.dist-info/METADATA | 17 + .../PySocks-1.6.8.dist-info/RECORD | 9 + .../PySocks-1.6.8.dist-info/WHEEL | 5 + .../PySocks-1.6.8.dist-info/top_level.txt | 2 + .../DESCRIPTION.rst | 48 + .../certifi-2018.8.24.dist-info/INSTALLER | 1 + .../certifi-2018.8.24.dist-info/LICENSE.txt | 21 + .../certifi-2018.8.24.dist-info/METADATA | 71 + .../certifi-2018.8.24.dist-info/RECORD | 15 + .../certifi-2018.8.24.dist-info/WHEEL | 6 + .../certifi-2018.8.24.dist-info/metadata.json | 1 + .../certifi-2018.8.24.dist-info/top_level.txt | 1 + .../site-packages/certifi/cacert.pem | 4300 +++++++++ .../python3.6/site-packages/certifi/core.py | 37 + .../chardet-3.0.4.dist-info/DESCRIPTION.rst | 70 + .../chardet-3.0.4.dist-info/INSTALLER | 1 + .../chardet-3.0.4.dist-info/METADATA | 96 + .../chardet-3.0.4.dist-info/RECORD | 91 + .../chardet-3.0.4.dist-info/WHEEL | 6 + .../chardet-3.0.4.dist-info/entry_points.txt | 3 + .../chardet-3.0.4.dist-info/metadata.json | 1 + .../chardet-3.0.4.dist-info/top_level.txt | 1 + .../site-packages/chardet/big5freq.py | 386 + .../site-packages/chardet/big5prober.py | 47 + .../site-packages/chardet/chardistribution.py | 233 + .../chardet/charsetgroupprober.py | 106 + .../site-packages/chardet/charsetprober.py | 145 + .../site-packages/chardet/cli/chardetect.py | 85 + .../chardet/codingstatemachine.py | 88 + .../python3.6/site-packages/chardet/compat.py | 34 + .../site-packages/chardet/cp949prober.py | 49 + .../python3.6/site-packages/chardet/enums.py | 76 + .../site-packages/chardet/escprober.py | 101 + .../python3.6/site-packages/chardet/escsm.py | 246 + .../site-packages/chardet/eucjpprober.py | 92 + .../site-packages/chardet/euckrfreq.py | 195 + .../site-packages/chardet/euckrprober.py | 47 + .../site-packages/chardet/euctwfreq.py | 387 + .../site-packages/chardet/euctwprober.py | 46 + .../site-packages/chardet/gb2312freq.py | 283 + .../site-packages/chardet/gb2312prober.py | 46 + .../site-packages/chardet/hebrewprober.py | 292 + .../site-packages/chardet/jisfreq.py | 325 + .../python3.6/site-packages/chardet/jpcntx.py | 233 + .../chardet/langbulgarianmodel.py | 228 + .../chardet/langcyrillicmodel.py | 333 + .../site-packages/chardet/langgreekmodel.py | 225 + .../site-packages/chardet/langhebrewmodel.py | 200 + .../chardet/langhungarianmodel.py | 225 + .../site-packages/chardet/langthaimodel.py | 199 + .../site-packages/chardet/langturkishmodel.py | 193 + .../site-packages/chardet/latin1prober.py | 145 + .../site-packages/chardet/mbcharsetprober.py | 91 + .../site-packages/chardet/mbcsgroupprober.py | 54 + .../python3.6/site-packages/chardet/mbcssm.py | 572 ++ .../site-packages/chardet/sbcharsetprober.py | 132 + .../site-packages/chardet/sbcsgroupprober.py | 73 + .../site-packages/chardet/sjisprober.py | 92 + .../chardet/universaldetector.py | 286 + .../site-packages/chardet/utf8prober.py | 82 + .../site-packages/chardet/version.py | 9 + .../site-packages/dateutil/_common.py | 34 + .../site-packages/dateutil/_version.py | 10 + .../site-packages/dateutil/easter.py | 89 + .../site-packages/dateutil/parser.py | 1374 +++ .../site-packages/dateutil/relativedelta.py | 549 ++ .../python3.6/site-packages/dateutil/rrule.py | 1610 ++++ .../site-packages/dateutil/tz/_common.py | 394 + .../python3.6/site-packages/dateutil/tz/tz.py | 1511 +++ .../site-packages/dateutil/tz/win.py | 332 + .../python3.6/site-packages/dateutil/tzwin.py | 2 + .../zoneinfo/dateutil-zoneinfo.tar.gz | Bin 0 -> 138881 bytes .../dateutil/zoneinfo/rebuild.py | 52 + .../python3.6/site-packages/easy_install.py | 5 + .../python3.6/site-packages/examples/oauth.py | 33 + .../site-packages/examples/streaming.py | 35 + .../httplib2-0.10.3-py3.6.egg-info/PKG-INFO | 63 + .../SOURCES.txt | 28 + .../dependency_links.txt | 1 + .../installed-files.txt | 9 + .../top_level.txt | 1 + .../site-packages/httplib2/cacerts.txt | 2167 +++++ .../site-packages/httplib2/iri2uri.py | 110 + .../idna-2.7.dist-info/INSTALLER | 1 + .../idna-2.7.dist-info/LICENSE.txt | 80 + .../site-packages/idna-2.7.dist-info/METADATA | 239 + .../site-packages/idna-2.7.dist-info/RECORD | 22 + .../site-packages/idna-2.7.dist-info/WHEEL | 6 + .../idna-2.7.dist-info/top_level.txt | 1 + env/lib/python3.6/site-packages/idna/codec.py | 118 + .../python3.6/site-packages/idna/compat.py | 12 + env/lib/python3.6/site-packages/idna/core.py | 399 + .../python3.6/site-packages/idna/idnadata.py | 1893 ++++ .../python3.6/site-packages/idna/intranges.py | 53 + .../site-packages/idna/package_data.py | 2 + .../python3.6/site-packages/idna/uts46data.py | 8179 +++++++++++++++++ .../oauthlib-2.1.0.dist-info/INSTALLER | 1 + .../oauthlib-2.1.0.dist-info/LICENSE.txt | 27 + .../oauthlib-2.1.0.dist-info/METADATA | 167 + .../oauthlib-2.1.0.dist-info/RECORD | 102 + .../oauthlib-2.1.0.dist-info/WHEEL | 6 + .../oauthlib-2.1.0.dist-info/top_level.txt | 1 + .../site-packages/oauthlib/common.py | 465 + .../oauth1/rfc5849/endpoints/access_token.py | 215 + .../oauth1/rfc5849/endpoints/authorization.py | 162 + .../oauthlib/oauth1/rfc5849/endpoints/base.py | 213 + .../rfc5849/endpoints/pre_configured.py | 14 + .../oauth1/rfc5849/endpoints/request_token.py | 209 + .../oauth1/rfc5849/endpoints/resource.py | 165 + .../rfc5849/endpoints/signature_only.py | 84 + .../oauthlib/oauth1/rfc5849/errors.py | 79 + .../oauthlib/oauth1/rfc5849/parameters.py | 139 + .../oauth1/rfc5849/request_validator.py | 833 ++ .../oauthlib/oauth1/rfc5849/signature.py | 627 ++ .../oauthlib/oauth1/rfc5849/utils.py | 90 + .../rfc6749/clients/backend_application.py | 60 + .../oauthlib/oauth2/rfc6749/clients/base.py | 499 + .../rfc6749/clients/legacy_application.py | 72 + .../rfc6749/clients/mobile_application.py | 172 + .../rfc6749/clients/service_application.py | 175 + .../oauth2/rfc6749/clients/web_application.py | 176 + .../oauth2/rfc6749/endpoints/authorization.py | 117 + .../oauthlib/oauth2/rfc6749/endpoints/base.py | 65 + .../rfc6749/endpoints/pre_configured.py | 223 + .../oauth2/rfc6749/endpoints/resource.py | 87 + .../oauth2/rfc6749/endpoints/revocation.py | 134 + .../oauth2/rfc6749/endpoints/token.py | 117 + .../oauthlib/oauth2/rfc6749/errors.py | 415 + .../rfc6749/grant_types/authorization_code.py | 431 + .../oauth2/rfc6749/grant_types/base.py | 185 + .../rfc6749/grant_types/client_credentials.py | 120 + .../oauth2/rfc6749/grant_types/implicit.py | 382 + .../rfc6749/grant_types/openid_connect.py | 390 + .../rfc6749/grant_types/refresh_token.py | 131 + .../resource_owner_password_credentials.py | 197 + .../oauthlib/oauth2/rfc6749/parameters.py | 409 + .../oauth2/rfc6749/request_validator.py | 573 ++ .../oauthlib/oauth2/rfc6749/tokens.py | 305 + .../oauthlib/oauth2/rfc6749/utils.py | 94 + .../site-packages/oauthlib/signals.py | 41 + .../site-packages/oauthlib/uri_validate.py | 216 + .../pip-18.0.dist-info/INSTALLER | 1 + .../pip-18.0.dist-info/LICENSE.txt | 20 + .../site-packages/pip-18.0.dist-info/METADATA | 69 + .../site-packages/pip-18.0.dist-info/RECORD | 582 ++ .../site-packages/pip-18.0.dist-info/WHEEL | 6 + .../pip-18.0.dist-info/entry_points.txt | 5 + .../pip-18.0.dist-info/top_level.txt | 1 + .../pip/_internal/basecommand.py | 274 + .../site-packages/pip/_internal/baseparser.py | 240 + .../site-packages/pip/_internal/build_env.py | 126 + .../site-packages/pip/_internal/cache.py | 202 + .../site-packages/pip/_internal/cmdoptions.py | 619 ++ .../pip/_internal/commands/check.py | 41 + .../pip/_internal/commands/completion.py | 94 + .../pip/_internal/commands/configuration.py | 227 + .../pip/_internal/commands/download.py | 236 + .../pip/_internal/commands/freeze.py | 96 + .../pip/_internal/commands/hash.py | 57 + .../pip/_internal/commands/help.py | 36 + .../pip/_internal/commands/install.py | 516 ++ .../pip/_internal/commands/list.py | 304 + .../pip/_internal/commands/search.py | 135 + .../pip/_internal/commands/show.py | 168 + .../pip/_internal/commands/uninstall.py | 77 + .../pip/_internal/commands/wheel.py | 183 + .../site-packages/pip/_internal/compat.py | 235 + .../pip/_internal/configuration.py | 380 + .../site-packages/pip/_internal/download.py | 921 ++ .../site-packages/pip/_internal/exceptions.py | 249 + .../site-packages/pip/_internal/index.py | 1127 +++ .../site-packages/pip/_internal/locations.py | 194 + .../pip/_internal/models/index.py | 15 + .../pip/_internal/operations/check.py | 148 + .../pip/_internal/operations/freeze.py | 253 + .../pip/_internal/operations/prepare.py | 355 + .../site-packages/pip/_internal/pep425tags.py | 317 + .../pip/_internal/req/req_file.py | 338 + .../pip/_internal/req/req_install.py | 1142 +++ .../pip/_internal/req/req_set.py | 161 + .../pip/_internal/req/req_tracker.py | 76 + .../pip/_internal/req/req_uninstall.py | 457 + .../site-packages/pip/_internal/resolve.py | 353 + .../pip/_internal/status_codes.py | 8 + .../pip/_internal/utils/appdirs.py | 258 + .../pip/_internal/utils/deprecation.py | 89 + .../pip/_internal/utils/encoding.py | 33 + .../pip/_internal/utils/filesystem.py | 28 + .../pip/_internal/utils/glibc.py | 84 + .../pip/_internal/utils/hashes.py | 94 + .../pip/_internal/utils/logging.py | 225 + .../site-packages/pip/_internal/utils/misc.py | 899 ++ .../pip/_internal/utils/outdated.py | 145 + .../pip/_internal/utils/packaging.py | 70 + .../pip/_internal/utils/setuptools_build.py | 8 + .../pip/_internal/utils/temp_dir.py | 82 + .../pip/_internal/utils/typing.py | 29 + .../site-packages/pip/_internal/utils/ui.py | 421 + .../site-packages/pip/_internal/vcs/bazaar.py | 110 + .../site-packages/pip/_internal/vcs/git.py | 309 + .../pip/_internal/vcs/mercurial.py | 102 + .../pip/_internal/vcs/subversion.py | 254 + .../site-packages/pip/_internal/wheel.py | 827 ++ .../site-packages/pip/_vendor/appdirs.py | 604 ++ .../pip/_vendor/cachecontrol/_cmd.py | 57 + .../pip/_vendor/cachecontrol/adapter.py | 133 + .../pip/_vendor/cachecontrol/cache.py | 39 + .../_vendor/cachecontrol/caches/file_cache.py | 146 + .../cachecontrol/caches/redis_cache.py | 33 + .../pip/_vendor/cachecontrol/compat.py | 29 + .../pip/_vendor/cachecontrol/controller.py | 367 + .../pip/_vendor/cachecontrol/filewrapper.py | 80 + .../pip/_vendor/cachecontrol/heuristics.py | 135 + .../pip/_vendor/cachecontrol/serialize.py | 186 + .../pip/_vendor/cachecontrol/wrapper.py | 29 + .../pip/_vendor/certifi/cacert.pem | 4400 +++++++++ .../site-packages/pip/_vendor/certifi/core.py | 37 + .../pip/_vendor/chardet/big5freq.py | 386 + .../pip/_vendor/chardet/big5prober.py | 47 + .../pip/_vendor/chardet/chardistribution.py | 233 + .../pip/_vendor/chardet/charsetgroupprober.py | 106 + .../pip/_vendor/chardet/charsetprober.py | 145 + .../pip/_vendor/chardet/cli/chardetect.py | 85 + .../pip/_vendor/chardet/codingstatemachine.py | 88 + .../pip/_vendor/chardet/compat.py | 34 + .../pip/_vendor/chardet/cp949prober.py | 49 + .../pip/_vendor/chardet/enums.py | 76 + .../pip/_vendor/chardet/escprober.py | 101 + .../pip/_vendor/chardet/escsm.py | 246 + .../pip/_vendor/chardet/eucjpprober.py | 92 + .../pip/_vendor/chardet/euckrfreq.py | 195 + .../pip/_vendor/chardet/euckrprober.py | 47 + .../pip/_vendor/chardet/euctwfreq.py | 387 + .../pip/_vendor/chardet/euctwprober.py | 46 + .../pip/_vendor/chardet/gb2312freq.py | 283 + .../pip/_vendor/chardet/gb2312prober.py | 46 + .../pip/_vendor/chardet/hebrewprober.py | 292 + .../pip/_vendor/chardet/jisfreq.py | 325 + .../pip/_vendor/chardet/jpcntx.py | 233 + .../pip/_vendor/chardet/langbulgarianmodel.py | 228 + .../pip/_vendor/chardet/langcyrillicmodel.py | 333 + .../pip/_vendor/chardet/langgreekmodel.py | 225 + .../pip/_vendor/chardet/langhebrewmodel.py | 200 + .../pip/_vendor/chardet/langhungarianmodel.py | 225 + .../pip/_vendor/chardet/langthaimodel.py | 199 + .../pip/_vendor/chardet/langturkishmodel.py | 193 + .../pip/_vendor/chardet/latin1prober.py | 145 + .../pip/_vendor/chardet/mbcharsetprober.py | 91 + .../pip/_vendor/chardet/mbcsgroupprober.py | 54 + .../pip/_vendor/chardet/mbcssm.py | 572 ++ .../pip/_vendor/chardet/sbcharsetprober.py | 132 + .../pip/_vendor/chardet/sbcsgroupprober.py | 73 + .../pip/_vendor/chardet/sjisprober.py | 92 + .../pip/_vendor/chardet/universaldetector.py | 286 + .../pip/_vendor/chardet/utf8prober.py | 82 + .../pip/_vendor/chardet/version.py | 9 + .../pip/_vendor/colorama/ansi.py | 102 + .../pip/_vendor/colorama/ansitowin32.py | 236 + .../pip/_vendor/colorama/initialise.py | 82 + .../pip/_vendor/colorama/win32.py | 156 + .../pip/_vendor/colorama/winterm.py | 162 + .../pip/_vendor/distlib/_backport/misc.py | 41 + .../pip/_vendor/distlib/_backport/shutil.py | 761 ++ .../_vendor/distlib/_backport/sysconfig.cfg | 84 + .../_vendor/distlib/_backport/sysconfig.py | 788 ++ .../pip/_vendor/distlib/_backport/tarfile.py | 2607 ++++++ .../pip/_vendor/distlib/compat.py | 1120 +++ .../pip/_vendor/distlib/database.py | 1336 +++ .../pip/_vendor/distlib/index.py | 516 ++ .../pip/_vendor/distlib/locators.py | 1292 +++ .../pip/_vendor/distlib/manifest.py | 393 + .../pip/_vendor/distlib/markers.py | 131 + .../pip/_vendor/distlib/metadata.py | 1091 +++ .../pip/_vendor/distlib/resources.py | 355 + .../pip/_vendor/distlib/scripts.py | 415 + .../site-packages/pip/_vendor/distlib/t32.exe | Bin 0 -> 92672 bytes .../site-packages/pip/_vendor/distlib/t64.exe | Bin 0 -> 102400 bytes .../site-packages/pip/_vendor/distlib/util.py | 1755 ++++ .../pip/_vendor/distlib/version.py | 736 ++ .../site-packages/pip/_vendor/distlib/w32.exe | Bin 0 -> 89088 bytes .../site-packages/pip/_vendor/distlib/w64.exe | Bin 0 -> 99328 bytes .../pip/_vendor/distlib/wheel.py | 984 ++ .../site-packages/pip/_vendor/distro.py | 1197 +++ .../pip/_vendor/html5lib/_ihatexml.py | 288 + .../pip/_vendor/html5lib/_inputstream.py | 923 ++ .../pip/_vendor/html5lib/_tokenizer.py | 1721 ++++ .../pip/_vendor/html5lib/_trie/_base.py | 37 + .../pip/_vendor/html5lib/_trie/datrie.py | 44 + .../pip/_vendor/html5lib/_trie/py.py | 67 + .../pip/_vendor/html5lib/_utils.py | 124 + .../pip/_vendor/html5lib/constants.py | 2947 ++++++ .../filters/alphabeticalattributes.py | 29 + .../pip/_vendor/html5lib/filters/base.py | 12 + .../html5lib/filters/inject_meta_charset.py | 73 + .../pip/_vendor/html5lib/filters/lint.py | 93 + .../_vendor/html5lib/filters/optionaltags.py | 207 + .../pip/_vendor/html5lib/filters/sanitizer.py | 896 ++ .../_vendor/html5lib/filters/whitespace.py | 38 + .../pip/_vendor/html5lib/html5parser.py | 2791 ++++++ .../pip/_vendor/html5lib/serializer.py | 409 + .../_vendor/html5lib/treeadapters/genshi.py | 54 + .../pip/_vendor/html5lib/treeadapters/sax.py | 50 + .../pip/_vendor/html5lib/treebuilders/base.py | 417 + .../pip/_vendor/html5lib/treebuilders/dom.py | 236 + .../_vendor/html5lib/treebuilders/etree.py | 340 + .../html5lib/treebuilders/etree_lxml.py | 366 + .../pip/_vendor/html5lib/treewalkers/base.py | 252 + .../pip/_vendor/html5lib/treewalkers/dom.py | 43 + .../pip/_vendor/html5lib/treewalkers/etree.py | 130 + .../html5lib/treewalkers/etree_lxml.py | 213 + .../_vendor/html5lib/treewalkers/genshi.py | 69 + .../site-packages/pip/_vendor/idna/codec.py | 118 + .../site-packages/pip/_vendor/idna/compat.py | 12 + .../site-packages/pip/_vendor/idna/core.py | 399 + .../pip/_vendor/idna/idnadata.py | 1893 ++++ .../pip/_vendor/idna/intranges.py | 53 + .../pip/_vendor/idna/package_data.py | 2 + .../pip/_vendor/idna/uts46data.py | 8179 +++++++++++++++++ .../site-packages/pip/_vendor/ipaddress.py | 2419 +++++ .../pip/_vendor/lockfile/linklockfile.py | 73 + .../pip/_vendor/lockfile/mkdirlockfile.py | 84 + .../pip/_vendor/lockfile/pidlockfile.py | 190 + .../pip/_vendor/lockfile/sqlitelockfile.py | 156 + .../pip/_vendor/lockfile/symlinklockfile.py | 70 + .../pip/_vendor/msgpack/_version.py | 1 + .../pip/_vendor/msgpack/exceptions.py | 41 + .../pip/_vendor/msgpack/fallback.py | 977 ++ .../pip/_vendor/packaging/_compat.py | 30 + .../pip/_vendor/packaging/_structures.py | 70 + .../pip/_vendor/packaging/markers.py | 301 + .../pip/_vendor/packaging/requirements.py | 130 + .../pip/_vendor/packaging/specifiers.py | 774 ++ .../pip/_vendor/packaging/utils.py | 63 + .../pip/_vendor/packaging/version.py | 441 + .../pip/_vendor/pkg_resources/py31compat.py | 22 + .../site-packages/pip/_vendor/progress/bar.py | 94 + .../pip/_vendor/progress/counter.py | 48 + .../pip/_vendor/progress/helpers.py | 91 + .../pip/_vendor/progress/spinner.py | 44 + .../site-packages/pip/_vendor/pyparsing.py | 5720 ++++++++++++ .../site-packages/pip/_vendor/pytoml/core.py | 13 + .../pip/_vendor/pytoml/parser.py | 374 + .../pip/_vendor/pytoml/writer.py | 127 + .../pip/_vendor/requests/_internal_utils.py | 42 + .../pip/_vendor/requests/adapters.py | 530 ++ .../site-packages/pip/_vendor/requests/api.py | 152 + .../pip/_vendor/requests/auth.py | 305 + .../pip/_vendor/requests/certs.py | 18 + .../pip/_vendor/requests/compat.py | 75 + .../pip/_vendor/requests/cookies.py | 546 ++ .../pip/_vendor/requests/exceptions.py | 126 + .../pip/_vendor/requests/help.py | 120 + .../pip/_vendor/requests/hooks.py | 34 + .../pip/_vendor/requests/models.py | 952 ++ .../pip/_vendor/requests/packages.py | 16 + .../pip/_vendor/requests/sessions.py | 741 ++ .../pip/_vendor/requests/status_codes.py | 120 + .../pip/_vendor/requests/structures.py | 103 + .../pip/_vendor/requests/utils.py | 976 ++ .../site-packages/pip/_vendor/retrying.py | 267 + .../site-packages/pip/_vendor/six.py | 891 ++ .../pip/_vendor/urllib3/_collections.py | 332 + .../pip/_vendor/urllib3/connection.py | 403 + .../pip/_vendor/urllib3/connectionpool.py | 906 ++ .../contrib/_securetransport/bindings.py | 593 ++ .../contrib/_securetransport/low_level.py | 346 + .../pip/_vendor/urllib3/contrib/appengine.py | 305 + .../pip/_vendor/urllib3/contrib/ntlmpool.py | 112 + .../pip/_vendor/urllib3/contrib/pyopenssl.py | 457 + .../urllib3/contrib/securetransport.py | 804 ++ .../pip/_vendor/urllib3/contrib/socks.py | 192 + .../pip/_vendor/urllib3/exceptions.py | 246 + .../pip/_vendor/urllib3/fields.py | 178 + .../pip/_vendor/urllib3/filepost.py | 98 + .../urllib3/packages/backports/makefile.py | 53 + .../_vendor/urllib3/packages/ordered_dict.py | 259 + .../pip/_vendor/urllib3/packages/six.py | 868 ++ .../ssl_match_hostname/_implementation.py | 157 + .../pip/_vendor/urllib3/poolmanager.py | 449 + .../pip/_vendor/urllib3/request.py | 150 + .../pip/_vendor/urllib3/response.py | 676 ++ .../pip/_vendor/urllib3/util/connection.py | 126 + .../pip/_vendor/urllib3/util/queue.py | 21 + .../pip/_vendor/urllib3/util/request.py | 118 + .../pip/_vendor/urllib3/util/response.py | 81 + .../pip/_vendor/urllib3/util/retry.py | 411 + .../pip/_vendor/urllib3/util/ssl_.py | 396 + .../pip/_vendor/urllib3/util/timeout.py | 242 + .../pip/_vendor/urllib3/util/url.py | 230 + .../pip/_vendor/urllib3/util/wait.py | 153 + .../pip/_vendor/webencodings/labels.py | 231 + .../pip/_vendor/webencodings/mklabels.py | 59 + .../pip/_vendor/webencodings/tests.py | 153 + .../_vendor/webencodings/x_user_defined.py | 325 + .../pkg_resources/_vendor/appdirs.py | 552 ++ .../_vendor/packaging/_compat.py | 30 + .../_vendor/packaging/_structures.py | 68 + .../_vendor/packaging/markers.py | 287 + .../_vendor/packaging/requirements.py | 127 + .../_vendor/packaging/specifiers.py | 774 ++ .../pkg_resources/_vendor/packaging/utils.py | 14 + .../_vendor/packaging/version.py | 393 + .../pkg_resources/_vendor/pyparsing.py | 5696 ++++++++++++ .../pkg_resources/_vendor/six.py | 868 ++ .../DESCRIPTION.rst | 5 + .../python_dateutil-2.6.1.dist-info/INSTALLER | 1 + .../python_dateutil-2.6.1.dist-info/METADATA | 31 + .../python_dateutil-2.6.1.dist-info/RECORD | 37 + .../python_dateutil-2.6.1.dist-info/WHEEL | 6 + .../metadata.json | 1 + .../top_level.txt | 1 + .../python_dateutil-2.6.1.dist-info/zip-safe | 1 + .../pytz-2018.5.dist-info/DESCRIPTION.rst | 589 ++ .../pytz-2018.5.dist-info/INSTALLER | 1 + .../pytz-2018.5.dist-info/LICENSE.txt | 19 + .../pytz-2018.5.dist-info/METADATA | 623 ++ .../pytz-2018.5.dist-info/RECORD | 619 ++ .../site-packages/pytz-2018.5.dist-info/WHEEL | 6 + .../pytz-2018.5.dist-info/metadata.json | 1 + .../pytz-2018.5.dist-info/top_level.txt | 1 + .../pytz-2018.5.dist-info/zip-safe | 1 + .../site-packages/pytz/exceptions.py | 48 + env/lib/python3.6/site-packages/pytz/lazy.py | 172 + .../python3.6/site-packages/pytz/reference.py | 140 + .../python3.6/site-packages/pytz/tzfile.py | 134 + .../python3.6/site-packages/pytz/tzinfo.py | 577 ++ .../pytz/zoneinfo/Africa/Abidjan | Bin 0 -> 170 bytes .../site-packages/pytz/zoneinfo/Africa/Accra | Bin 0 -> 842 bytes .../pytz/zoneinfo/Africa/Addis_Ababa | Bin 0 -> 285 bytes .../pytz/zoneinfo/Africa/Algiers | Bin 0 -> 760 bytes .../site-packages/pytz/zoneinfo/Africa/Asmara | Bin 0 -> 285 bytes .../site-packages/pytz/zoneinfo/Africa/Asmera | Bin 0 -> 285 bytes .../site-packages/pytz/zoneinfo/Africa/Bamako | Bin 0 -> 170 bytes .../site-packages/pytz/zoneinfo/Africa/Bangui | Bin 0 -> 171 bytes .../site-packages/pytz/zoneinfo/Africa/Banjul | Bin 0 -> 170 bytes .../site-packages/pytz/zoneinfo/Africa/Bissau | Bin 0 -> 208 bytes .../pytz/zoneinfo/Africa/Blantyre | Bin 0 -> 171 bytes .../pytz/zoneinfo/Africa/Brazzaville | Bin 0 -> 171 bytes .../pytz/zoneinfo/Africa/Bujumbura | Bin 0 -> 171 bytes .../site-packages/pytz/zoneinfo/Africa/Cairo | Bin 0 -> 1972 bytes .../pytz/zoneinfo/Africa/Casablanca | Bin 0 -> 1643 bytes .../site-packages/pytz/zoneinfo/Africa/Ceuta | Bin 0 -> 2059 bytes .../pytz/zoneinfo/Africa/Conakry | Bin 0 -> 170 bytes .../site-packages/pytz/zoneinfo/Africa/Dakar | Bin 0 -> 170 bytes .../pytz/zoneinfo/Africa/Dar_es_Salaam | Bin 0 -> 285 bytes .../pytz/zoneinfo/Africa/Djibouti | Bin 0 -> 285 bytes .../site-packages/pytz/zoneinfo/Africa/Douala | Bin 0 -> 171 bytes .../pytz/zoneinfo/Africa/El_Aaiun | Bin 0 -> 1473 bytes .../pytz/zoneinfo/Africa/Freetown | Bin 0 -> 170 bytes .../pytz/zoneinfo/Africa/Gaborone | Bin 0 -> 171 bytes .../site-packages/pytz/zoneinfo/Africa/Harare | Bin 0 -> 171 bytes .../pytz/zoneinfo/Africa/Johannesburg | Bin 0 -> 271 bytes .../site-packages/pytz/zoneinfo/Africa/Juba | Bin 0 -> 683 bytes .../pytz/zoneinfo/Africa/Kampala | Bin 0 -> 285 bytes .../pytz/zoneinfo/Africa/Khartoum | Bin 0 -> 713 bytes .../site-packages/pytz/zoneinfo/Africa/Kigali | Bin 0 -> 171 bytes .../pytz/zoneinfo/Africa/Kinshasa | Bin 0 -> 171 bytes .../site-packages/pytz/zoneinfo/Africa/Lagos | Bin 0 -> 171 bytes .../pytz/zoneinfo/Africa/Libreville | Bin 0 -> 171 bytes .../site-packages/pytz/zoneinfo/Africa/Lome | Bin 0 -> 170 bytes .../site-packages/pytz/zoneinfo/Africa/Luanda | Bin 0 -> 171 bytes .../pytz/zoneinfo/Africa/Lubumbashi | Bin 0 -> 171 bytes .../site-packages/pytz/zoneinfo/Africa/Lusaka | Bin 0 -> 171 bytes .../site-packages/pytz/zoneinfo/Africa/Malabo | Bin 0 -> 171 bytes .../site-packages/pytz/zoneinfo/Africa/Maputo | Bin 0 -> 171 bytes .../site-packages/pytz/zoneinfo/Africa/Maseru | Bin 0 -> 271 bytes .../pytz/zoneinfo/Africa/Mbabane | Bin 0 -> 271 bytes .../pytz/zoneinfo/Africa/Mogadishu | Bin 0 -> 285 bytes .../pytz/zoneinfo/Africa/Monrovia | Bin 0 -> 233 bytes .../pytz/zoneinfo/Africa/Nairobi | Bin 0 -> 285 bytes .../pytz/zoneinfo/Africa/Ndjamena | Bin 0 -> 225 bytes .../site-packages/pytz/zoneinfo/Africa/Niamey | Bin 0 -> 171 bytes .../pytz/zoneinfo/Africa/Nouakchott | Bin 0 -> 170 bytes .../pytz/zoneinfo/Africa/Ouagadougou | Bin 0 -> 170 bytes .../pytz/zoneinfo/Africa/Porto-Novo | Bin 0 -> 171 bytes .../pytz/zoneinfo/Africa/Sao_Tome | Bin 0 -> 234 bytes .../pytz/zoneinfo/Africa/Timbuktu | Bin 0 -> 170 bytes .../pytz/zoneinfo/Africa/Tripoli | Bin 0 -> 655 bytes .../site-packages/pytz/zoneinfo/Africa/Tunis | Bin 0 -> 710 bytes .../pytz/zoneinfo/Africa/Windhoek | Bin 0 -> 988 bytes .../site-packages/pytz/zoneinfo/America/Adak | Bin 0 -> 2365 bytes .../pytz/zoneinfo/America/Anchorage | Bin 0 -> 2380 bytes .../pytz/zoneinfo/America/Anguilla | Bin 0 -> 170 bytes .../pytz/zoneinfo/America/Antigua | Bin 0 -> 170 bytes .../pytz/zoneinfo/America/Araguaina | Bin 0 -> 910 bytes .../zoneinfo/America/Argentina/Buenos_Aires | Bin 0 -> 1109 bytes .../pytz/zoneinfo/America/Argentina/Catamarca | Bin 0 -> 1109 bytes .../zoneinfo/America/Argentina/ComodRivadavia | Bin 0 -> 1109 bytes .../pytz/zoneinfo/America/Argentina/Cordoba | Bin 0 -> 1109 bytes .../pytz/zoneinfo/America/Argentina/Jujuy | Bin 0 -> 1081 bytes .../pytz/zoneinfo/America/Argentina/La_Rioja | Bin 0 -> 1123 bytes .../pytz/zoneinfo/America/Argentina/Mendoza | Bin 0 -> 1109 bytes .../zoneinfo/America/Argentina/Rio_Gallegos | Bin 0 -> 1109 bytes .../pytz/zoneinfo/America/Argentina/Salta | Bin 0 -> 1081 bytes .../pytz/zoneinfo/America/Argentina/San_Juan | Bin 0 -> 1123 bytes .../pytz/zoneinfo/America/Argentina/San_Luis | Bin 0 -> 1139 bytes .../pytz/zoneinfo/America/Argentina/Tucuman | Bin 0 -> 1137 bytes .../pytz/zoneinfo/America/Argentina/Ushuaia | Bin 0 -> 1109 bytes .../site-packages/pytz/zoneinfo/America/Aruba | Bin 0 -> 212 bytes .../pytz/zoneinfo/America/Asuncion | Bin 0 -> 2077 bytes .../pytz/zoneinfo/America/Atikokan | Bin 0 -> 345 bytes .../site-packages/pytz/zoneinfo/America/Atka | Bin 0 -> 2365 bytes .../site-packages/pytz/zoneinfo/America/Bahia | Bin 0 -> 1050 bytes .../pytz/zoneinfo/America/Bahia_Banderas | Bin 0 -> 1588 bytes .../pytz/zoneinfo/America/Barbados | Bin 0 -> 344 bytes .../site-packages/pytz/zoneinfo/America/Belem | Bin 0 -> 602 bytes .../pytz/zoneinfo/America/Belize | Bin 0 -> 978 bytes .../pytz/zoneinfo/America/Blanc-Sablon | Bin 0 -> 307 bytes .../pytz/zoneinfo/America/Boa_Vista | Bin 0 -> 658 bytes .../pytz/zoneinfo/America/Bogota | Bin 0 -> 271 bytes .../site-packages/pytz/zoneinfo/America/Boise | Bin 0 -> 2403 bytes .../pytz/zoneinfo/America/Buenos_Aires | Bin 0 -> 1109 bytes .../pytz/zoneinfo/America/Cambridge_Bay | Bin 0 -> 2098 bytes .../pytz/zoneinfo/America/Campo_Grande | Bin 0 -> 2016 bytes .../pytz/zoneinfo/America/Cancun | Bin 0 -> 816 bytes .../pytz/zoneinfo/America/Caracas | Bin 0 -> 289 bytes .../pytz/zoneinfo/America/Catamarca | Bin 0 -> 1109 bytes .../pytz/zoneinfo/America/Cayenne | Bin 0 -> 224 bytes .../pytz/zoneinfo/America/Cayman | Bin 0 -> 203 bytes .../pytz/zoneinfo/America/Chicago | Bin 0 -> 3585 bytes .../pytz/zoneinfo/America/Chihuahua | Bin 0 -> 1522 bytes .../pytz/zoneinfo/America/Coral_Harbour | Bin 0 -> 345 bytes .../pytz/zoneinfo/America/Cordoba | Bin 0 -> 1109 bytes .../pytz/zoneinfo/America/Costa_Rica | Bin 0 -> 341 bytes .../pytz/zoneinfo/America/Creston | Bin 0 -> 233 bytes .../pytz/zoneinfo/America/Cuiaba | Bin 0 -> 1988 bytes .../pytz/zoneinfo/America/Curacao | Bin 0 -> 212 bytes .../pytz/zoneinfo/America/Danmarkshavn | Bin 0 -> 712 bytes .../pytz/zoneinfo/America/Dawson | Bin 0 -> 2093 bytes .../pytz/zoneinfo/America/Dawson_Creek | Bin 0 -> 1059 bytes .../pytz/zoneinfo/America/Denver | Bin 0 -> 2453 bytes .../pytz/zoneinfo/America/Detroit | Bin 0 -> 2188 bytes .../pytz/zoneinfo/America/Dominica | Bin 0 -> 170 bytes .../pytz/zoneinfo/America/Edmonton | Bin 0 -> 2402 bytes .../pytz/zoneinfo/America/Eirunepe | Bin 0 -> 690 bytes .../pytz/zoneinfo/America/El_Salvador | Bin 0 -> 250 bytes .../pytz/zoneinfo/America/Ensenada | Bin 0 -> 2356 bytes .../pytz/zoneinfo/America/Fort_Nelson | Bin 0 -> 2249 bytes .../pytz/zoneinfo/America/Fort_Wayne | Bin 0 -> 1675 bytes .../pytz/zoneinfo/America/Fortaleza | Bin 0 -> 742 bytes .../pytz/zoneinfo/America/Glace_Bay | Bin 0 -> 2206 bytes .../pytz/zoneinfo/America/Godthab | Bin 0 -> 1892 bytes .../pytz/zoneinfo/America/Goose_Bay | Bin 0 -> 3219 bytes .../pytz/zoneinfo/America/Grand_Turk | Bin 0 -> 1881 bytes .../pytz/zoneinfo/America/Grenada | Bin 0 -> 170 bytes .../pytz/zoneinfo/America/Guadeloupe | Bin 0 -> 170 bytes .../pytz/zoneinfo/America/Guatemala | Bin 0 -> 306 bytes .../pytz/zoneinfo/America/Guayaquil | Bin 0 -> 271 bytes .../pytz/zoneinfo/America/Guyana | Bin 0 -> 266 bytes .../pytz/zoneinfo/America/Halifax | Bin 0 -> 3438 bytes .../pytz/zoneinfo/America/Havana | Bin 0 -> 2437 bytes .../pytz/zoneinfo/America/Hermosillo | Bin 0 -> 454 bytes .../zoneinfo/America/Indiana/Indianapolis | Bin 0 -> 1675 bytes .../pytz/zoneinfo/America/Indiana/Knox | Bin 0 -> 2437 bytes .../pytz/zoneinfo/America/Indiana/Marengo | Bin 0 -> 1731 bytes .../pytz/zoneinfo/America/Indiana/Petersburg | Bin 0 -> 1913 bytes .../pytz/zoneinfo/America/Indiana/Tell_City | Bin 0 -> 1735 bytes .../pytz/zoneinfo/America/Indiana/Vevay | Bin 0 -> 1423 bytes .../pytz/zoneinfo/America/Indiana/Vincennes | Bin 0 -> 1703 bytes .../pytz/zoneinfo/America/Indiana/Winamac | Bin 0 -> 1787 bytes .../pytz/zoneinfo/America/Indianapolis | Bin 0 -> 1675 bytes .../pytz/zoneinfo/America/Inuvik | Bin 0 -> 1928 bytes .../pytz/zoneinfo/America/Iqaluit | Bin 0 -> 2046 bytes .../pytz/zoneinfo/America/Jamaica | Bin 0 -> 507 bytes .../site-packages/pytz/zoneinfo/America/Jujuy | Bin 0 -> 1081 bytes .../pytz/zoneinfo/America/Juneau | Bin 0 -> 2362 bytes .../pytz/zoneinfo/America/Kentucky/Louisville | Bin 0 -> 2781 bytes .../pytz/zoneinfo/America/Kentucky/Monticello | Bin 0 -> 2361 bytes .../pytz/zoneinfo/America/Knox_IN | Bin 0 -> 2437 bytes .../pytz/zoneinfo/America/Kralendijk | Bin 0 -> 212 bytes .../pytz/zoneinfo/America/La_Paz | Bin 0 -> 257 bytes .../site-packages/pytz/zoneinfo/America/Lima | Bin 0 -> 431 bytes .../pytz/zoneinfo/America/Los_Angeles | Bin 0 -> 2845 bytes .../pytz/zoneinfo/America/Louisville | Bin 0 -> 2781 bytes .../pytz/zoneinfo/America/Lower_Princes | Bin 0 -> 212 bytes .../pytz/zoneinfo/America/Maceio | Bin 0 -> 770 bytes .../pytz/zoneinfo/America/Managua | Bin 0 -> 463 bytes .../pytz/zoneinfo/America/Manaus | Bin 0 -> 630 bytes .../pytz/zoneinfo/America/Marigot | Bin 0 -> 170 bytes .../pytz/zoneinfo/America/Martinique | Bin 0 -> 257 bytes .../pytz/zoneinfo/America/Matamoros | Bin 0 -> 1416 bytes .../pytz/zoneinfo/America/Mazatlan | Bin 0 -> 1564 bytes .../pytz/zoneinfo/America/Mendoza | Bin 0 -> 1109 bytes .../pytz/zoneinfo/America/Menominee | Bin 0 -> 2283 bytes .../pytz/zoneinfo/America/Merida | Bin 0 -> 1456 bytes .../pytz/zoneinfo/America/Metlakatla | Bin 0 -> 1418 bytes .../pytz/zoneinfo/America/Mexico_City | Bin 0 -> 1618 bytes .../pytz/zoneinfo/America/Miquelon | Bin 0 -> 1696 bytes .../pytz/zoneinfo/America/Moncton | Bin 0 -> 3163 bytes .../pytz/zoneinfo/America/Monterrey | Bin 0 -> 1416 bytes .../pytz/zoneinfo/America/Montevideo | Bin 0 -> 1564 bytes .../pytz/zoneinfo/America/Montreal | Bin 0 -> 3503 bytes .../pytz/zoneinfo/America/Montserrat | Bin 0 -> 170 bytes .../pytz/zoneinfo/America/Nassau | Bin 0 -> 2284 bytes .../pytz/zoneinfo/America/New_York | Bin 0 -> 3545 bytes .../pytz/zoneinfo/America/Nipigon | Bin 0 -> 2131 bytes .../site-packages/pytz/zoneinfo/America/Nome | Bin 0 -> 2376 bytes .../pytz/zoneinfo/America/Noronha | Bin 0 -> 742 bytes .../pytz/zoneinfo/America/North_Dakota/Beulah | Bin 0 -> 2389 bytes .../pytz/zoneinfo/America/North_Dakota/Center | Bin 0 -> 2389 bytes .../zoneinfo/America/North_Dakota/New_Salem | Bin 0 -> 2389 bytes .../pytz/zoneinfo/America/Ojinaga | Bin 0 -> 1522 bytes .../pytz/zoneinfo/America/Panama | Bin 0 -> 203 bytes .../pytz/zoneinfo/America/Pangnirtung | Bin 0 -> 2108 bytes .../pytz/zoneinfo/America/Paramaribo | Bin 0 -> 296 bytes .../pytz/zoneinfo/America/Phoenix | Bin 0 -> 353 bytes .../pytz/zoneinfo/America/Port-au-Prince | Bin 0 -> 1455 bytes .../pytz/zoneinfo/America/Port_of_Spain | Bin 0 -> 170 bytes .../pytz/zoneinfo/America/Porto_Acre | Bin 0 -> 662 bytes .../pytz/zoneinfo/America/Porto_Velho | Bin 0 -> 602 bytes .../pytz/zoneinfo/America/Puerto_Rico | Bin 0 -> 255 bytes .../pytz/zoneinfo/America/Punta_Arenas | Bin 0 -> 1911 bytes .../pytz/zoneinfo/America/Rainy_River | Bin 0 -> 2131 bytes .../pytz/zoneinfo/America/Rankin_Inlet | Bin 0 -> 1930 bytes .../pytz/zoneinfo/America/Recife | Bin 0 -> 742 bytes .../pytz/zoneinfo/America/Regina | Bin 0 -> 994 bytes .../pytz/zoneinfo/America/Resolute | Bin 0 -> 1930 bytes .../pytz/zoneinfo/America/Rio_Branco | Bin 0 -> 662 bytes .../pytz/zoneinfo/America/Rosario | Bin 0 -> 1109 bytes .../pytz/zoneinfo/America/Santa_Isabel | Bin 0 -> 2356 bytes .../pytz/zoneinfo/America/Santarem | Bin 0 -> 632 bytes .../pytz/zoneinfo/America/Santiago | Bin 0 -> 2538 bytes .../pytz/zoneinfo/America/Santo_Domingo | Bin 0 -> 491 bytes .../pytz/zoneinfo/America/Sao_Paulo | Bin 0 -> 2016 bytes .../pytz/zoneinfo/America/Scoresbysund | Bin 0 -> 1930 bytes .../pytz/zoneinfo/America/Shiprock | Bin 0 -> 2453 bytes .../site-packages/pytz/zoneinfo/America/Sitka | Bin 0 -> 2350 bytes .../pytz/zoneinfo/America/St_Barthelemy | Bin 0 -> 170 bytes .../pytz/zoneinfo/America/St_Johns | Bin 0 -> 3664 bytes .../pytz/zoneinfo/America/St_Kitts | Bin 0 -> 170 bytes .../pytz/zoneinfo/America/St_Lucia | Bin 0 -> 170 bytes .../pytz/zoneinfo/America/St_Thomas | Bin 0 -> 170 bytes .../pytz/zoneinfo/America/St_Vincent | Bin 0 -> 170 bytes .../pytz/zoneinfo/America/Swift_Current | Bin 0 -> 574 bytes .../pytz/zoneinfo/America/Tegucigalpa | Bin 0 -> 278 bytes .../site-packages/pytz/zoneinfo/America/Thule | Bin 0 -> 1528 bytes .../pytz/zoneinfo/America/Thunder_Bay | Bin 0 -> 2211 bytes .../pytz/zoneinfo/America/Tijuana | Bin 0 -> 2356 bytes .../pytz/zoneinfo/America/Toronto | Bin 0 -> 3503 bytes .../pytz/zoneinfo/America/Tortola | Bin 0 -> 170 bytes .../pytz/zoneinfo/America/Vancouver | Bin 0 -> 2901 bytes .../pytz/zoneinfo/America/Virgin | Bin 0 -> 170 bytes .../pytz/zoneinfo/America/Whitehorse | Bin 0 -> 2093 bytes .../pytz/zoneinfo/America/Winnipeg | Bin 0 -> 2891 bytes .../pytz/zoneinfo/America/Yakutat | Bin 0 -> 2314 bytes .../pytz/zoneinfo/America/Yellowknife | Bin 0 -> 1980 bytes .../pytz/zoneinfo/Antarctica/Casey | Bin 0 -> 311 bytes .../pytz/zoneinfo/Antarctica/Davis | Bin 0 -> 311 bytes .../pytz/zoneinfo/Antarctica/DumontDUrville | Bin 0 -> 216 bytes .../pytz/zoneinfo/Antarctica/Macquarie | Bin 0 -> 1543 bytes .../pytz/zoneinfo/Antarctica/Mawson | Bin 0 -> 225 bytes .../pytz/zoneinfo/Antarctica/McMurdo | Bin 0 -> 2460 bytes .../pytz/zoneinfo/Antarctica/Palmer | Bin 0 -> 1432 bytes .../pytz/zoneinfo/Antarctica/Rothera | Bin 0 -> 186 bytes .../pytz/zoneinfo/Antarctica/South_Pole | Bin 0 -> 2460 bytes .../pytz/zoneinfo/Antarctica/Syowa | Bin 0 -> 187 bytes .../pytz/zoneinfo/Antarctica/Troll | Bin 0 -> 1176 bytes .../pytz/zoneinfo/Antarctica/Vostok | Bin 0 -> 187 bytes .../pytz/zoneinfo/Arctic/Longyearbyen | Bin 0 -> 2251 bytes .../site-packages/pytz/zoneinfo/Asia/Aden | Bin 0 -> 187 bytes .../site-packages/pytz/zoneinfo/Asia/Almaty | Bin 0 -> 1031 bytes .../site-packages/pytz/zoneinfo/Asia/Amman | Bin 0 -> 1877 bytes .../site-packages/pytz/zoneinfo/Asia/Anadyr | Bin 0 -> 1222 bytes .../site-packages/pytz/zoneinfo/Asia/Aqtau | Bin 0 -> 1017 bytes .../site-packages/pytz/zoneinfo/Asia/Aqtobe | Bin 0 -> 1047 bytes .../site-packages/pytz/zoneinfo/Asia/Ashgabat | Bin 0 -> 651 bytes .../pytz/zoneinfo/Asia/Ashkhabad | Bin 0 -> 651 bytes .../site-packages/pytz/zoneinfo/Asia/Atyrau | Bin 0 -> 1025 bytes .../site-packages/pytz/zoneinfo/Asia/Baghdad | Bin 0 -> 1004 bytes .../site-packages/pytz/zoneinfo/Asia/Bahrain | Bin 0 -> 225 bytes .../site-packages/pytz/zoneinfo/Asia/Baku | Bin 0 -> 1269 bytes .../site-packages/pytz/zoneinfo/Asia/Bangkok | Bin 0 -> 220 bytes .../site-packages/pytz/zoneinfo/Asia/Barnaul | Bin 0 -> 1255 bytes .../site-packages/pytz/zoneinfo/Asia/Beirut | Bin 0 -> 2175 bytes .../site-packages/pytz/zoneinfo/Asia/Bishkek | Bin 0 -> 1045 bytes .../site-packages/pytz/zoneinfo/Asia/Brunei | Bin 0 -> 229 bytes .../site-packages/pytz/zoneinfo/Asia/Calcutta | Bin 0 -> 312 bytes .../site-packages/pytz/zoneinfo/Asia/Chita | Bin 0 -> 1257 bytes .../pytz/zoneinfo/Asia/Choibalsan | Bin 0 -> 991 bytes .../pytz/zoneinfo/Asia/Chongqing | Bin 0 -> 414 bytes .../pytz/zoneinfo/Asia/Chungking | Bin 0 -> 414 bytes .../site-packages/pytz/zoneinfo/Asia/Colombo | Bin 0 -> 413 bytes .../site-packages/pytz/zoneinfo/Asia/Dacca | Bin 0 -> 370 bytes .../site-packages/pytz/zoneinfo/Asia/Damascus | Bin 0 -> 2320 bytes .../site-packages/pytz/zoneinfo/Asia/Dhaka | Bin 0 -> 370 bytes .../site-packages/pytz/zoneinfo/Asia/Dili | Bin 0 -> 253 bytes .../site-packages/pytz/zoneinfo/Asia/Dubai | Bin 0 -> 187 bytes .../site-packages/pytz/zoneinfo/Asia/Dushanbe | Bin 0 -> 621 bytes .../pytz/zoneinfo/Asia/Famagusta | Bin 0 -> 2042 bytes .../site-packages/pytz/zoneinfo/Asia/Gaza | Bin 0 -> 2295 bytes .../site-packages/pytz/zoneinfo/Asia/Harbin | Bin 0 -> 414 bytes .../site-packages/pytz/zoneinfo/Asia/Hebron | Bin 0 -> 2323 bytes .../pytz/zoneinfo/Asia/Ho_Chi_Minh | Bin 0 -> 389 bytes .../pytz/zoneinfo/Asia/Hong_Kong | Bin 0 -> 1189 bytes .../site-packages/pytz/zoneinfo/Asia/Hovd | Bin 0 -> 921 bytes .../site-packages/pytz/zoneinfo/Asia/Irkutsk | Bin 0 -> 1276 bytes .../site-packages/pytz/zoneinfo/Asia/Istanbul | Bin 0 -> 2166 bytes .../site-packages/pytz/zoneinfo/Asia/Jakarta | Bin 0 -> 392 bytes .../site-packages/pytz/zoneinfo/Asia/Jayapura | Bin 0 -> 251 bytes .../pytz/zoneinfo/Asia/Jerusalem | Bin 0 -> 2265 bytes .../site-packages/pytz/zoneinfo/Asia/Kabul | Bin 0 -> 229 bytes .../pytz/zoneinfo/Asia/Kamchatka | Bin 0 -> 1198 bytes .../site-packages/pytz/zoneinfo/Asia/Karachi | Bin 0 -> 417 bytes .../site-packages/pytz/zoneinfo/Asia/Kashgar | Bin 0 -> 187 bytes .../pytz/zoneinfo/Asia/Kathmandu | Bin 0 -> 238 bytes .../site-packages/pytz/zoneinfo/Asia/Katmandu | Bin 0 -> 238 bytes .../site-packages/pytz/zoneinfo/Asia/Khandyga | Bin 0 -> 1311 bytes .../site-packages/pytz/zoneinfo/Asia/Kolkata | Bin 0 -> 312 bytes .../pytz/zoneinfo/Asia/Krasnoyarsk | Bin 0 -> 1243 bytes .../pytz/zoneinfo/Asia/Kuala_Lumpur | Bin 0 -> 424 bytes .../site-packages/pytz/zoneinfo/Asia/Kuching | Bin 0 -> 521 bytes .../site-packages/pytz/zoneinfo/Asia/Kuwait | Bin 0 -> 187 bytes .../site-packages/pytz/zoneinfo/Asia/Macao | Bin 0 -> 771 bytes .../site-packages/pytz/zoneinfo/Asia/Macau | Bin 0 -> 771 bytes .../site-packages/pytz/zoneinfo/Asia/Magadan | Bin 0 -> 1258 bytes .../site-packages/pytz/zoneinfo/Asia/Makassar | Bin 0 -> 288 bytes .../site-packages/pytz/zoneinfo/Asia/Manila | Bin 0 -> 367 bytes .../site-packages/pytz/zoneinfo/Asia/Muscat | Bin 0 -> 187 bytes .../site-packages/pytz/zoneinfo/Asia/Nicosia | Bin 0 -> 2016 bytes .../pytz/zoneinfo/Asia/Novokuznetsk | Bin 0 -> 1197 bytes .../pytz/zoneinfo/Asia/Novosibirsk | Bin 0 -> 1255 bytes .../site-packages/pytz/zoneinfo/Asia/Omsk | Bin 0 -> 1243 bytes .../site-packages/pytz/zoneinfo/Asia/Oral | Bin 0 -> 1039 bytes .../pytz/zoneinfo/Asia/Phnom_Penh | Bin 0 -> 220 bytes .../pytz/zoneinfo/Asia/Pontianak | Bin 0 -> 395 bytes .../pytz/zoneinfo/Asia/Pyongyang | Bin 0 -> 267 bytes .../site-packages/pytz/zoneinfo/Asia/Qatar | Bin 0 -> 225 bytes .../pytz/zoneinfo/Asia/Qyzylorda | Bin 0 -> 1047 bytes .../site-packages/pytz/zoneinfo/Asia/Rangoon | Bin 0 -> 297 bytes .../site-packages/pytz/zoneinfo/Asia/Riyadh | Bin 0 -> 187 bytes .../site-packages/pytz/zoneinfo/Asia/Saigon | Bin 0 -> 389 bytes .../site-packages/pytz/zoneinfo/Asia/Sakhalin | Bin 0 -> 1234 bytes .../pytz/zoneinfo/Asia/Samarkand | Bin 0 -> 619 bytes .../site-packages/pytz/zoneinfo/Asia/Seoul | Bin 0 -> 531 bytes .../site-packages/pytz/zoneinfo/Asia/Shanghai | Bin 0 -> 414 bytes .../pytz/zoneinfo/Asia/Singapore | Bin 0 -> 424 bytes .../pytz/zoneinfo/Asia/Srednekolymsk | Bin 0 -> 1244 bytes .../site-packages/pytz/zoneinfo/Asia/Taipei | Bin 0 -> 790 bytes .../site-packages/pytz/zoneinfo/Asia/Tashkent | Bin 0 -> 635 bytes .../site-packages/pytz/zoneinfo/Asia/Tbilisi | Bin 0 -> 1080 bytes .../site-packages/pytz/zoneinfo/Asia/Tehran | Bin 0 -> 1718 bytes .../site-packages/pytz/zoneinfo/Asia/Tel_Aviv | Bin 0 -> 2265 bytes .../site-packages/pytz/zoneinfo/Asia/Thimbu | Bin 0 -> 229 bytes .../site-packages/pytz/zoneinfo/Asia/Thimphu | Bin 0 -> 229 bytes .../site-packages/pytz/zoneinfo/Asia/Tokyo | Bin 0 -> 318 bytes .../site-packages/pytz/zoneinfo/Asia/Tomsk | Bin 0 -> 1255 bytes .../pytz/zoneinfo/Asia/Ujung_Pandang | Bin 0 -> 288 bytes .../pytz/zoneinfo/Asia/Ulaanbaatar | Bin 0 -> 921 bytes .../pytz/zoneinfo/Asia/Ulan_Bator | Bin 0 -> 921 bytes .../site-packages/pytz/zoneinfo/Asia/Urumqi | Bin 0 -> 187 bytes .../site-packages/pytz/zoneinfo/Asia/Ust-Nera | Bin 0 -> 1290 bytes .../pytz/zoneinfo/Asia/Vientiane | Bin 0 -> 220 bytes .../pytz/zoneinfo/Asia/Vladivostok | Bin 0 -> 1244 bytes .../site-packages/pytz/zoneinfo/Asia/Yakutsk | Bin 0 -> 1243 bytes .../site-packages/pytz/zoneinfo/Asia/Yangon | Bin 0 -> 297 bytes .../pytz/zoneinfo/Asia/Yekaterinburg | Bin 0 -> 1281 bytes .../site-packages/pytz/zoneinfo/Asia/Yerevan | Bin 0 -> 1213 bytes .../pytz/zoneinfo/Atlantic/Azores | Bin 0 -> 3493 bytes .../pytz/zoneinfo/Atlantic/Bermuda | Bin 0 -> 2004 bytes .../pytz/zoneinfo/Atlantic/Canary | Bin 0 -> 1911 bytes .../pytz/zoneinfo/Atlantic/Cape_Verde | Bin 0 -> 284 bytes .../pytz/zoneinfo/Atlantic/Faeroe | Bin 0 -> 1829 bytes .../pytz/zoneinfo/Atlantic/Faroe | Bin 0 -> 1829 bytes .../pytz/zoneinfo/Atlantic/Jan_Mayen | Bin 0 -> 2251 bytes .../pytz/zoneinfo/Atlantic/Madeira | Bin 0 -> 3484 bytes .../pytz/zoneinfo/Atlantic/Reykjavik | Bin 0 -> 1188 bytes .../pytz/zoneinfo/Atlantic/South_Georgia | Bin 0 -> 181 bytes .../pytz/zoneinfo/Atlantic/St_Helena | Bin 0 -> 170 bytes .../pytz/zoneinfo/Atlantic/Stanley | Bin 0 -> 1251 bytes .../site-packages/pytz/zoneinfo/Australia/ACT | Bin 0 -> 2223 bytes .../pytz/zoneinfo/Australia/Adelaide | Bin 0 -> 2238 bytes .../pytz/zoneinfo/Australia/Brisbane | Bin 0 -> 452 bytes .../pytz/zoneinfo/Australia/Broken_Hill | Bin 0 -> 2274 bytes .../pytz/zoneinfo/Australia/Canberra | Bin 0 -> 2223 bytes .../pytz/zoneinfo/Australia/Currie | Bin 0 -> 2223 bytes .../pytz/zoneinfo/Australia/Darwin | Bin 0 -> 323 bytes .../pytz/zoneinfo/Australia/Eucla | Bin 0 -> 503 bytes .../pytz/zoneinfo/Australia/Hobart | Bin 0 -> 2335 bytes .../site-packages/pytz/zoneinfo/Australia/LHI | Bin 0 -> 1889 bytes .../pytz/zoneinfo/Australia/Lindeman | Bin 0 -> 522 bytes .../pytz/zoneinfo/Australia/Lord_Howe | Bin 0 -> 1889 bytes .../pytz/zoneinfo/Australia/Melbourne | Bin 0 -> 2223 bytes .../site-packages/pytz/zoneinfo/Australia/NSW | Bin 0 -> 2223 bytes .../pytz/zoneinfo/Australia/North | Bin 0 -> 323 bytes .../pytz/zoneinfo/Australia/Perth | Bin 0 -> 479 bytes .../pytz/zoneinfo/Australia/Queensland | Bin 0 -> 452 bytes .../pytz/zoneinfo/Australia/South | Bin 0 -> 2238 bytes .../pytz/zoneinfo/Australia/Sydney | Bin 0 -> 2223 bytes .../pytz/zoneinfo/Australia/Tasmania | Bin 0 -> 2335 bytes .../pytz/zoneinfo/Australia/Victoria | Bin 0 -> 2223 bytes .../pytz/zoneinfo/Australia/West | Bin 0 -> 479 bytes .../pytz/zoneinfo/Australia/Yancowinna | Bin 0 -> 2274 bytes .../site-packages/pytz/zoneinfo/Brazil/Acre | Bin 0 -> 662 bytes .../pytz/zoneinfo/Brazil/DeNoronha | Bin 0 -> 742 bytes .../site-packages/pytz/zoneinfo/Brazil/East | Bin 0 -> 2016 bytes .../site-packages/pytz/zoneinfo/Brazil/West | Bin 0 -> 630 bytes .../python3.6/site-packages/pytz/zoneinfo/CET | Bin 0 -> 2102 bytes .../site-packages/pytz/zoneinfo/CST6CDT | Bin 0 -> 2294 bytes .../pytz/zoneinfo/Canada/Atlantic | Bin 0 -> 3438 bytes .../pytz/zoneinfo/Canada/Central | Bin 0 -> 2891 bytes .../pytz/zoneinfo/Canada/Eastern | Bin 0 -> 3503 bytes .../pytz/zoneinfo/Canada/Mountain | Bin 0 -> 2402 bytes .../pytz/zoneinfo/Canada/Newfoundland | Bin 0 -> 3664 bytes .../pytz/zoneinfo/Canada/Pacific | Bin 0 -> 2901 bytes .../pytz/zoneinfo/Canada/Saskatchewan | Bin 0 -> 994 bytes .../site-packages/pytz/zoneinfo/Canada/Yukon | Bin 0 -> 2093 bytes .../pytz/zoneinfo/Chile/Continental | Bin 0 -> 2538 bytes .../pytz/zoneinfo/Chile/EasterIsland | Bin 0 -> 2242 bytes .../site-packages/pytz/zoneinfo/Cuba | Bin 0 -> 2437 bytes .../python3.6/site-packages/pytz/zoneinfo/EET | Bin 0 -> 1876 bytes .../python3.6/site-packages/pytz/zoneinfo/EST | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/EST5EDT | Bin 0 -> 2294 bytes .../site-packages/pytz/zoneinfo/Egypt | Bin 0 -> 1972 bytes .../site-packages/pytz/zoneinfo/Eire | Bin 0 -> 3531 bytes .../site-packages/pytz/zoneinfo/Etc/GMT | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/Etc/GMT+0 | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/Etc/GMT+1 | Bin 0 -> 148 bytes .../site-packages/pytz/zoneinfo/Etc/GMT+10 | Bin 0 -> 149 bytes .../site-packages/pytz/zoneinfo/Etc/GMT+11 | Bin 0 -> 149 bytes .../site-packages/pytz/zoneinfo/Etc/GMT+12 | Bin 0 -> 149 bytes .../site-packages/pytz/zoneinfo/Etc/GMT+2 | Bin 0 -> 148 bytes .../site-packages/pytz/zoneinfo/Etc/GMT+3 | Bin 0 -> 148 bytes .../site-packages/pytz/zoneinfo/Etc/GMT+4 | Bin 0 -> 148 bytes .../site-packages/pytz/zoneinfo/Etc/GMT+5 | Bin 0 -> 148 bytes .../site-packages/pytz/zoneinfo/Etc/GMT+6 | Bin 0 -> 148 bytes .../site-packages/pytz/zoneinfo/Etc/GMT+7 | Bin 0 -> 148 bytes .../site-packages/pytz/zoneinfo/Etc/GMT+8 | Bin 0 -> 148 bytes .../site-packages/pytz/zoneinfo/Etc/GMT+9 | Bin 0 -> 148 bytes .../site-packages/pytz/zoneinfo/Etc/GMT-0 | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/Etc/GMT-1 | Bin 0 -> 149 bytes .../site-packages/pytz/zoneinfo/Etc/GMT-10 | Bin 0 -> 150 bytes .../site-packages/pytz/zoneinfo/Etc/GMT-11 | Bin 0 -> 150 bytes .../site-packages/pytz/zoneinfo/Etc/GMT-12 | Bin 0 -> 150 bytes .../site-packages/pytz/zoneinfo/Etc/GMT-13 | Bin 0 -> 150 bytes .../site-packages/pytz/zoneinfo/Etc/GMT-14 | Bin 0 -> 150 bytes .../site-packages/pytz/zoneinfo/Etc/GMT-2 | Bin 0 -> 149 bytes .../site-packages/pytz/zoneinfo/Etc/GMT-3 | Bin 0 -> 149 bytes .../site-packages/pytz/zoneinfo/Etc/GMT-4 | Bin 0 -> 149 bytes .../site-packages/pytz/zoneinfo/Etc/GMT-5 | Bin 0 -> 149 bytes .../site-packages/pytz/zoneinfo/Etc/GMT-6 | Bin 0 -> 149 bytes .../site-packages/pytz/zoneinfo/Etc/GMT-7 | Bin 0 -> 149 bytes .../site-packages/pytz/zoneinfo/Etc/GMT-8 | Bin 0 -> 149 bytes .../site-packages/pytz/zoneinfo/Etc/GMT-9 | Bin 0 -> 149 bytes .../site-packages/pytz/zoneinfo/Etc/GMT0 | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/Etc/Greenwich | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/Etc/UCT | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/Etc/UTC | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/Etc/Universal | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/Etc/Zulu | Bin 0 -> 127 bytes .../pytz/zoneinfo/Europe/Amsterdam | Bin 0 -> 2949 bytes .../pytz/zoneinfo/Europe/Andorra | Bin 0 -> 1751 bytes .../pytz/zoneinfo/Europe/Astrakhan | Bin 0 -> 1197 bytes .../site-packages/pytz/zoneinfo/Europe/Athens | Bin 0 -> 2271 bytes .../pytz/zoneinfo/Europe/Belfast | Bin 0 -> 3687 bytes .../pytz/zoneinfo/Europe/Belgrade | Bin 0 -> 1957 bytes .../site-packages/pytz/zoneinfo/Europe/Berlin | Bin 0 -> 2335 bytes .../pytz/zoneinfo/Europe/Bratislava | Bin 0 -> 2338 bytes .../pytz/zoneinfo/Europe/Brussels | Bin 0 -> 2970 bytes .../pytz/zoneinfo/Europe/Bucharest | Bin 0 -> 2221 bytes .../pytz/zoneinfo/Europe/Budapest | Bin 0 -> 2405 bytes .../pytz/zoneinfo/Europe/Busingen | Bin 0 -> 1918 bytes .../pytz/zoneinfo/Europe/Chisinau | Bin 0 -> 2445 bytes .../pytz/zoneinfo/Europe/Copenhagen | Bin 0 -> 2160 bytes .../site-packages/pytz/zoneinfo/Europe/Dublin | Bin 0 -> 3531 bytes .../pytz/zoneinfo/Europe/Gibraltar | Bin 0 -> 3061 bytes .../pytz/zoneinfo/Europe/Guernsey | Bin 0 -> 3687 bytes .../pytz/zoneinfo/Europe/Helsinki | Bin 0 -> 1909 bytes .../pytz/zoneinfo/Europe/Isle_of_Man | Bin 0 -> 3687 bytes .../pytz/zoneinfo/Europe/Istanbul | Bin 0 -> 2166 bytes .../site-packages/pytz/zoneinfo/Europe/Jersey | Bin 0 -> 3687 bytes .../pytz/zoneinfo/Europe/Kaliningrad | Bin 0 -> 1518 bytes .../site-packages/pytz/zoneinfo/Europe/Kiev | Bin 0 -> 2097 bytes .../site-packages/pytz/zoneinfo/Europe/Kirov | Bin 0 -> 1167 bytes .../site-packages/pytz/zoneinfo/Europe/Lisbon | Bin 0 -> 3469 bytes .../pytz/zoneinfo/Europe/Ljubljana | Bin 0 -> 1957 bytes .../site-packages/pytz/zoneinfo/Europe/London | Bin 0 -> 3687 bytes .../pytz/zoneinfo/Europe/Luxembourg | Bin 0 -> 2974 bytes .../site-packages/pytz/zoneinfo/Europe/Madrid | Bin 0 -> 2637 bytes .../site-packages/pytz/zoneinfo/Europe/Malta | Bin 0 -> 2629 bytes .../pytz/zoneinfo/Europe/Mariehamn | Bin 0 -> 1909 bytes .../site-packages/pytz/zoneinfo/Europe/Minsk | Bin 0 -> 1370 bytes .../site-packages/pytz/zoneinfo/Europe/Monaco | Bin 0 -> 2953 bytes .../site-packages/pytz/zoneinfo/Europe/Moscow | Bin 0 -> 1544 bytes .../pytz/zoneinfo/Europe/Nicosia | Bin 0 -> 2016 bytes .../site-packages/pytz/zoneinfo/Europe/Oslo | Bin 0 -> 2251 bytes .../site-packages/pytz/zoneinfo/Europe/Paris | Bin 0 -> 2971 bytes .../pytz/zoneinfo/Europe/Podgorica | Bin 0 -> 1957 bytes .../site-packages/pytz/zoneinfo/Europe/Prague | Bin 0 -> 2338 bytes .../site-packages/pytz/zoneinfo/Europe/Riga | Bin 0 -> 2235 bytes .../site-packages/pytz/zoneinfo/Europe/Rome | Bin 0 -> 2692 bytes .../site-packages/pytz/zoneinfo/Europe/Samara | Bin 0 -> 1253 bytes .../pytz/zoneinfo/Europe/San_Marino | Bin 0 -> 2692 bytes .../pytz/zoneinfo/Europe/Sarajevo | Bin 0 -> 1957 bytes .../pytz/zoneinfo/Europe/Saratov | Bin 0 -> 1197 bytes .../pytz/zoneinfo/Europe/Simferopol | Bin 0 -> 1490 bytes .../site-packages/pytz/zoneinfo/Europe/Skopje | Bin 0 -> 1957 bytes .../site-packages/pytz/zoneinfo/Europe/Sofia | Bin 0 -> 2130 bytes .../pytz/zoneinfo/Europe/Stockholm | Bin 0 -> 1918 bytes .../pytz/zoneinfo/Europe/Tallinn | Bin 0 -> 2187 bytes .../site-packages/pytz/zoneinfo/Europe/Tirane | Bin 0 -> 2098 bytes .../pytz/zoneinfo/Europe/Tiraspol | Bin 0 -> 2445 bytes .../pytz/zoneinfo/Europe/Ulyanovsk | Bin 0 -> 1281 bytes .../pytz/zoneinfo/Europe/Uzhgorod | Bin 0 -> 2103 bytes .../site-packages/pytz/zoneinfo/Europe/Vaduz | Bin 0 -> 1918 bytes .../pytz/zoneinfo/Europe/Vatican | Bin 0 -> 2692 bytes .../site-packages/pytz/zoneinfo/Europe/Vienna | Bin 0 -> 2237 bytes .../pytz/zoneinfo/Europe/Vilnius | Bin 0 -> 2199 bytes .../pytz/zoneinfo/Europe/Volgograd | Bin 0 -> 1167 bytes .../site-packages/pytz/zoneinfo/Europe/Warsaw | Bin 0 -> 2705 bytes .../site-packages/pytz/zoneinfo/Europe/Zagreb | Bin 0 -> 1957 bytes .../pytz/zoneinfo/Europe/Zaporozhye | Bin 0 -> 2115 bytes .../site-packages/pytz/zoneinfo/Europe/Zurich | Bin 0 -> 1918 bytes .../site-packages/pytz/zoneinfo/Factory | Bin 0 -> 148 bytes .../python3.6/site-packages/pytz/zoneinfo/GB | Bin 0 -> 3687 bytes .../site-packages/pytz/zoneinfo/GB-Eire | Bin 0 -> 3687 bytes .../python3.6/site-packages/pytz/zoneinfo/GMT | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/GMT+0 | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/GMT-0 | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/GMT0 | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/Greenwich | Bin 0 -> 127 bytes .../python3.6/site-packages/pytz/zoneinfo/HST | Bin 0 -> 128 bytes .../site-packages/pytz/zoneinfo/Hongkong | Bin 0 -> 1189 bytes .../site-packages/pytz/zoneinfo/Iceland | Bin 0 -> 1188 bytes .../pytz/zoneinfo/Indian/Antananarivo | Bin 0 -> 285 bytes .../site-packages/pytz/zoneinfo/Indian/Chagos | Bin 0 -> 225 bytes .../pytz/zoneinfo/Indian/Christmas | Bin 0 -> 182 bytes .../site-packages/pytz/zoneinfo/Indian/Cocos | Bin 0 -> 191 bytes .../site-packages/pytz/zoneinfo/Indian/Comoro | Bin 0 -> 285 bytes .../pytz/zoneinfo/Indian/Kerguelen | Bin 0 -> 187 bytes .../site-packages/pytz/zoneinfo/Indian/Mahe | Bin 0 -> 187 bytes .../pytz/zoneinfo/Indian/Maldives | Bin 0 -> 220 bytes .../pytz/zoneinfo/Indian/Mauritius | Bin 0 -> 267 bytes .../pytz/zoneinfo/Indian/Mayotte | Bin 0 -> 285 bytes .../pytz/zoneinfo/Indian/Reunion | Bin 0 -> 187 bytes .../site-packages/pytz/zoneinfo/Iran | Bin 0 -> 1718 bytes .../site-packages/pytz/zoneinfo/Israel | Bin 0 -> 2265 bytes .../site-packages/pytz/zoneinfo/Jamaica | Bin 0 -> 507 bytes .../site-packages/pytz/zoneinfo/Japan | Bin 0 -> 318 bytes .../site-packages/pytz/zoneinfo/Kwajalein | Bin 0 -> 259 bytes .../site-packages/pytz/zoneinfo/Libya | Bin 0 -> 655 bytes .../python3.6/site-packages/pytz/zoneinfo/MET | Bin 0 -> 2102 bytes .../python3.6/site-packages/pytz/zoneinfo/MST | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/MST7MDT | Bin 0 -> 2294 bytes .../pytz/zoneinfo/Mexico/BajaNorte | Bin 0 -> 2356 bytes .../pytz/zoneinfo/Mexico/BajaSur | Bin 0 -> 1564 bytes .../pytz/zoneinfo/Mexico/General | Bin 0 -> 1618 bytes .../python3.6/site-packages/pytz/zoneinfo/NZ | Bin 0 -> 2460 bytes .../site-packages/pytz/zoneinfo/NZ-CHAT | Bin 0 -> 2087 bytes .../site-packages/pytz/zoneinfo/Navajo | Bin 0 -> 2453 bytes .../python3.6/site-packages/pytz/zoneinfo/PRC | Bin 0 -> 414 bytes .../site-packages/pytz/zoneinfo/PST8PDT | Bin 0 -> 2294 bytes .../site-packages/pytz/zoneinfo/Pacific/Apia | Bin 0 -> 1134 bytes .../pytz/zoneinfo/Pacific/Auckland | Bin 0 -> 2460 bytes .../pytz/zoneinfo/Pacific/Bougainville | Bin 0 -> 296 bytes .../pytz/zoneinfo/Pacific/Chatham | Bin 0 -> 2087 bytes .../site-packages/pytz/zoneinfo/Pacific/Chuuk | Bin 0 -> 183 bytes .../pytz/zoneinfo/Pacific/Easter | Bin 0 -> 2242 bytes .../site-packages/pytz/zoneinfo/Pacific/Efate | Bin 0 -> 492 bytes .../pytz/zoneinfo/Pacific/Enderbury | Bin 0 -> 259 bytes .../pytz/zoneinfo/Pacific/Fakaofo | Bin 0 -> 221 bytes .../site-packages/pytz/zoneinfo/Pacific/Fiji | Bin 0 -> 1104 bytes .../pytz/zoneinfo/Pacific/Funafuti | Bin 0 -> 183 bytes .../pytz/zoneinfo/Pacific/Galapagos | Bin 0 -> 268 bytes .../pytz/zoneinfo/Pacific/Gambier | Bin 0 -> 186 bytes .../pytz/zoneinfo/Pacific/Guadalcanal | Bin 0 -> 188 bytes .../site-packages/pytz/zoneinfo/Pacific/Guam | Bin 0 -> 225 bytes .../pytz/zoneinfo/Pacific/Honolulu | Bin 0 -> 276 bytes .../pytz/zoneinfo/Pacific/Johnston | Bin 0 -> 276 bytes .../pytz/zoneinfo/Pacific/Kiritimati | Bin 0 -> 263 bytes .../pytz/zoneinfo/Pacific/Kosrae | Bin 0 -> 251 bytes .../pytz/zoneinfo/Pacific/Kwajalein | Bin 0 -> 259 bytes .../pytz/zoneinfo/Pacific/Majuro | Bin 0 -> 221 bytes .../pytz/zoneinfo/Pacific/Marquesas | Bin 0 -> 195 bytes .../pytz/zoneinfo/Pacific/Midway | Bin 0 -> 196 bytes .../site-packages/pytz/zoneinfo/Pacific/Nauru | Bin 0 -> 282 bytes .../site-packages/pytz/zoneinfo/Pacific/Niue | Bin 0 -> 266 bytes .../pytz/zoneinfo/Pacific/Norfolk | Bin 0 -> 323 bytes .../pytz/zoneinfo/Pacific/Noumea | Bin 0 -> 328 bytes .../pytz/zoneinfo/Pacific/Pago_Pago | Bin 0 -> 196 bytes .../site-packages/pytz/zoneinfo/Pacific/Palau | Bin 0 -> 182 bytes .../pytz/zoneinfo/Pacific/Pitcairn | Bin 0 -> 223 bytes .../pytz/zoneinfo/Pacific/Pohnpei | Bin 0 -> 183 bytes .../pytz/zoneinfo/Pacific/Ponape | Bin 0 -> 183 bytes .../pytz/zoneinfo/Pacific/Port_Moresby | Bin 0 -> 206 bytes .../pytz/zoneinfo/Pacific/Rarotonga | Bin 0 -> 602 bytes .../pytz/zoneinfo/Pacific/Saipan | Bin 0 -> 225 bytes .../site-packages/pytz/zoneinfo/Pacific/Samoa | Bin 0 -> 196 bytes .../pytz/zoneinfo/Pacific/Tahiti | Bin 0 -> 187 bytes .../pytz/zoneinfo/Pacific/Tarawa | Bin 0 -> 183 bytes .../pytz/zoneinfo/Pacific/Tongatapu | Bin 0 -> 393 bytes .../site-packages/pytz/zoneinfo/Pacific/Truk | Bin 0 -> 183 bytes .../site-packages/pytz/zoneinfo/Pacific/Wake | Bin 0 -> 183 bytes .../pytz/zoneinfo/Pacific/Wallis | Bin 0 -> 183 bytes .../site-packages/pytz/zoneinfo/Pacific/Yap | Bin 0 -> 183 bytes .../site-packages/pytz/zoneinfo/Poland | Bin 0 -> 2705 bytes .../site-packages/pytz/zoneinfo/Portugal | Bin 0 -> 3469 bytes .../python3.6/site-packages/pytz/zoneinfo/ROC | Bin 0 -> 790 bytes .../python3.6/site-packages/pytz/zoneinfo/ROK | Bin 0 -> 531 bytes .../site-packages/pytz/zoneinfo/Singapore | Bin 0 -> 424 bytes .../site-packages/pytz/zoneinfo/Turkey | Bin 0 -> 2166 bytes .../python3.6/site-packages/pytz/zoneinfo/UCT | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/US/Alaska | Bin 0 -> 2380 bytes .../site-packages/pytz/zoneinfo/US/Aleutian | Bin 0 -> 2365 bytes .../site-packages/pytz/zoneinfo/US/Arizona | Bin 0 -> 353 bytes .../site-packages/pytz/zoneinfo/US/Central | Bin 0 -> 3585 bytes .../pytz/zoneinfo/US/East-Indiana | Bin 0 -> 1675 bytes .../site-packages/pytz/zoneinfo/US/Eastern | Bin 0 -> 3545 bytes .../site-packages/pytz/zoneinfo/US/Hawaii | Bin 0 -> 276 bytes .../pytz/zoneinfo/US/Indiana-Starke | Bin 0 -> 2437 bytes .../site-packages/pytz/zoneinfo/US/Michigan | Bin 0 -> 2188 bytes .../site-packages/pytz/zoneinfo/US/Mountain | Bin 0 -> 2453 bytes .../site-packages/pytz/zoneinfo/US/Pacific | Bin 0 -> 2845 bytes .../site-packages/pytz/zoneinfo/US/Samoa | Bin 0 -> 196 bytes .../python3.6/site-packages/pytz/zoneinfo/UTC | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/Universal | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/W-SU | Bin 0 -> 1544 bytes .../python3.6/site-packages/pytz/zoneinfo/WET | Bin 0 -> 1873 bytes .../site-packages/pytz/zoneinfo/Zulu | Bin 0 -> 127 bytes .../site-packages/pytz/zoneinfo/iso3166.tab | 274 + .../site-packages/pytz/zoneinfo/leapseconds | 61 + .../site-packages/pytz/zoneinfo/posixrules | Bin 0 -> 3545 bytes .../site-packages/pytz/zoneinfo/tzdata.zi | 4162 +++++++++ .../site-packages/pytz/zoneinfo/zone.tab | 448 + .../site-packages/pytz/zoneinfo/zone1970.tab | 382 + .../requests-2.19.1.dist-info/DESCRIPTION.rst | 1684 ++++ .../requests-2.19.1.dist-info/INSTALLER | 1 + .../requests-2.19.1.dist-info/LICENSE.txt | 13 + .../requests-2.19.1.dist-info/METADATA | 1721 ++++ .../requests-2.19.1.dist-info/RECORD | 44 + .../requests-2.19.1.dist-info/WHEEL | 6 + .../requests-2.19.1.dist-info/metadata.json | 1 + .../requests-2.19.1.dist-info/top_level.txt | 1 + .../site-packages/requests/_internal_utils.py | 42 + .../site-packages/requests/adapters.py | 530 ++ .../python3.6/site-packages/requests/api.py | 152 + .../python3.6/site-packages/requests/auth.py | 305 + .../python3.6/site-packages/requests/certs.py | 18 + .../site-packages/requests/compat.py | 71 + .../site-packages/requests/cookies.py | 546 ++ .../site-packages/requests/exceptions.py | 126 + .../python3.6/site-packages/requests/help.py | 120 + .../python3.6/site-packages/requests/hooks.py | 34 + .../site-packages/requests/models.py | 952 ++ .../site-packages/requests/packages.py | 14 + .../site-packages/requests/sessions.py | 741 ++ .../site-packages/requests/status_codes.py | 120 + .../site-packages/requests/structures.py | 103 + .../python3.6/site-packages/requests/utils.py | 976 ++ .../INSTALLER | 1 + .../LICENSE.txt | 15 + .../METADATA | 204 + .../requests_oauthlib-1.0.0.dist-info/RECORD | 34 + .../requests_oauthlib-1.0.0.dist-info/WHEEL | 6 + .../top_level.txt | 1 + .../compliance_fixes/douban.py | 18 + .../compliance_fixes/facebook.py | 33 + .../compliance_fixes/fitbit.py | 26 + .../compliance_fixes/linkedin.py | 24 + .../compliance_fixes/mailchimp.py | 22 + .../compliance_fixes/plentymarkets.py | 27 + .../compliance_fixes/slack.py | 37 + .../compliance_fixes/weibo.py | 17 + .../requests_oauthlib/oauth1_auth.py | 95 + .../requests_oauthlib/oauth1_session.py | 396 + .../requests_oauthlib/oauth2_auth.py | 36 + .../requests_oauthlib/oauth2_session.py | 376 + .../DESCRIPTION.rst | 243 + .../setuptools-28.8.0.dist-info/INSTALLER | 1 + .../setuptools-28.8.0.dist-info/METADATA | 272 + .../setuptools-28.8.0.dist-info/RECORD | 143 + .../setuptools-28.8.0.dist-info/WHEEL | 6 + .../dependency_links.txt | 2 + .../entry_points.txt | 63 + .../setuptools-28.8.0.dist-info/metadata.json | 1 + .../setuptools-28.8.0.dist-info/top_level.txt | 3 + .../setuptools-28.8.0.dist-info/zip-safe | 1 + .../site-packages/setuptools/archive_util.py | 173 + .../site-packages/setuptools/cli-32.exe | Bin 0 -> 65536 bytes .../site-packages/setuptools/cli-64.exe | Bin 0 -> 74752 bytes .../site-packages/setuptools/cli.exe | Bin 0 -> 65536 bytes .../site-packages/setuptools/command/alias.py | 80 + .../setuptools/command/bdist_egg.py | 472 + .../setuptools/command/bdist_rpm.py | 43 + .../setuptools/command/bdist_wininst.py | 21 + .../setuptools/command/build_ext.py | 328 + .../setuptools/command/build_py.py | 270 + .../setuptools/command/develop.py | 197 + .../setuptools/command/easy_install.py | 2287 +++++ .../setuptools/command/egg_info.py | 697 ++ .../setuptools/command/install.py | 125 + .../setuptools/command/install_egg_info.py | 62 + .../setuptools/command/install_lib.py | 121 + .../setuptools/command/install_scripts.py | 65 + .../setuptools/command/launcher manifest.xml | 15 + .../setuptools/command/py36compat.py | 136 + .../setuptools/command/register.py | 10 + .../setuptools/command/rotate.py | 66 + .../setuptools/command/saveopts.py | 22 + .../site-packages/setuptools/command/sdist.py | 202 + .../setuptools/command/setopt.py | 149 + .../site-packages/setuptools/command/test.py | 247 + .../setuptools/command/upload.py | 38 + .../setuptools/command/upload_docs.py | 206 + .../site-packages/setuptools/depends.py | 217 + .../site-packages/setuptools/dist.py | 914 ++ .../site-packages/setuptools/extension.py | 57 + .../site-packages/setuptools/glob.py | 176 + .../site-packages/setuptools/gui-32.exe | Bin 0 -> 65536 bytes .../site-packages/setuptools/gui-64.exe | Bin 0 -> 75264 bytes .../site-packages/setuptools/gui.exe | Bin 0 -> 65536 bytes .../site-packages/setuptools/launch.py | 35 + .../site-packages/setuptools/lib2to3_ex.py | 62 + .../site-packages/setuptools/monkey.py | 186 + .../site-packages/setuptools/msvc.py | 1193 +++ .../site-packages/setuptools/namespaces.py | 93 + .../site-packages/setuptools/package_index.py | 1115 +++ .../site-packages/setuptools/py26compat.py | 31 + .../site-packages/setuptools/py27compat.py | 18 + .../site-packages/setuptools/py31compat.py | 56 + .../site-packages/setuptools/sandbox.py | 492 + .../setuptools/script (dev).tmpl | 5 + .../site-packages/setuptools/script.tmpl | 3 + .../site-packages/setuptools/site-patch.py | 74 + .../site-packages/setuptools/ssl_support.py | 250 + .../site-packages/setuptools/unicode_utils.py | 44 + .../site-packages/setuptools/version.py | 6 + .../setuptools/windows_support.py | 29 + .../six-1.11.0.dist-info/DESCRIPTION.rst | 27 + .../six-1.11.0.dist-info/INSTALLER | 1 + .../six-1.11.0.dist-info/METADATA | 43 + .../site-packages/six-1.11.0.dist-info/RECORD | 9 + .../site-packages/six-1.11.0.dist-info/WHEEL | 6 + .../six-1.11.0.dist-info/metadata.json | 1 + .../six-1.11.0.dist-info/top_level.txt | 1 + env/lib/python3.6/site-packages/six.py | 891 ++ env/lib/python3.6/site-packages/socks.py | 870 ++ .../python3.6/site-packages/sockshandler.py | 79 + .../tweepy-3.6.0.dist-info/DESCRIPTION.rst | 3 + .../tweepy-3.6.0.dist-info/INSTALLER | 1 + .../tweepy-3.6.0.dist-info/METADATA | 32 + .../tweepy-3.6.0.dist-info/RECORD | 36 + .../tweepy-3.6.0.dist-info/WHEEL | 6 + .../tweepy-3.6.0.dist-info/metadata.json | 1 + .../tweepy-3.6.0.dist-info/top_level.txt | 2 + .../tweepy-3.6.0.dist-info/zip-safe | 1 + env/lib/python3.6/site-packages/tweepy/api.py | 1345 +++ .../python3.6/site-packages/tweepy/auth.py | 178 + .../python3.6/site-packages/tweepy/binder.py | 261 + .../python3.6/site-packages/tweepy/cache.py | 437 + .../python3.6/site-packages/tweepy/cursor.py | 214 + .../python3.6/site-packages/tweepy/error.py | 34 + .../python3.6/site-packages/tweepy/models.py | 495 + .../python3.6/site-packages/tweepy/parsers.py | 109 + .../site-packages/tweepy/streaming.py | 471 + .../python3.6/site-packages/tweepy/utils.py | 58 + .../twitter-1.18.0.dist-info/DESCRIPTION.rst | 374 + .../twitter-1.18.0.dist-info/INSTALLER | 1 + .../twitter-1.18.0.dist-info/METADATA | 406 + .../twitter-1.18.0.dist-info/RECORD | 51 + .../twitter-1.18.0.dist-info/WHEEL | 6 + .../twitter-1.18.0.dist-info/entry_points.txt | 10 + .../twitter-1.18.0.dist-info/metadata.json | 1 + .../twitter-1.18.0.dist-info/top_level.txt | 1 + .../twitter-1.18.0.dist-info/zip-safe | 1 + .../python3.6/site-packages/twitter/ansi.py | 102 + .../python3.6/site-packages/twitter/api.py | 566 ++ .../site-packages/twitter/archiver.py | 457 + .../python3.6/site-packages/twitter/auth.py | 62 + .../site-packages/twitter/cmdline.py | 800 ++ .../site-packages/twitter/corrupt.py | 0 .../python3.6/site-packages/twitter/follow.py | 255 + .../python3.6/site-packages/twitter/ircbot.py | 365 + .../python3.6/site-packages/twitter/logger.py | 107 + .../python3.6/site-packages/twitter/oauth.py | 144 + .../python3.6/site-packages/twitter/oauth2.py | 93 + .../site-packages/twitter/oauth_dance.py | 119 + .../python3.6/site-packages/twitter/stream.py | 293 + .../site-packages/twitter/stream_example.py | 92 + .../site-packages/twitter/timezones.py | 81 + .../site-packages/twitter/twitter_globals.py | 42 + .../python3.6/site-packages/twitter/util.py | 177 + .../urllib3-1.23.dist-info/DESCRIPTION.rst | 1040 +++ .../urllib3-1.23.dist-info/INSTALLER | 1 + .../urllib3-1.23.dist-info/LICENSE.txt | 19 + .../urllib3-1.23.dist-info/METADATA | 1077 +++ .../urllib3-1.23.dist-info/RECORD | 80 + .../urllib3-1.23.dist-info/WHEEL | 6 + .../urllib3-1.23.dist-info/metadata.json | 1 + .../urllib3-1.23.dist-info/top_level.txt | 1 + .../site-packages/urllib3/_collections.py | 332 + .../site-packages/urllib3/connection.py | 403 + .../site-packages/urllib3/connectionpool.py | 906 ++ .../contrib/_securetransport/bindings.py | 593 ++ .../contrib/_securetransport/low_level.py | 346 + .../urllib3/contrib/appengine.py | 305 + .../site-packages/urllib3/contrib/ntlmpool.py | 112 + .../urllib3/contrib/pyopenssl.py | 457 + .../urllib3/contrib/securetransport.py | 804 ++ .../site-packages/urllib3/contrib/socks.py | 192 + .../site-packages/urllib3/exceptions.py | 246 + .../python3.6/site-packages/urllib3/fields.py | 178 + .../site-packages/urllib3/filepost.py | 98 + .../urllib3/packages/backports/makefile.py | 53 + .../urllib3/packages/ordered_dict.py | 259 + .../site-packages/urllib3/packages/six.py | 868 ++ .../ssl_match_hostname/_implementation.py | 157 + .../site-packages/urllib3/poolmanager.py | 449 + .../site-packages/urllib3/request.py | 150 + .../site-packages/urllib3/response.py | 676 ++ .../site-packages/urllib3/util/connection.py | 126 + .../site-packages/urllib3/util/queue.py | 21 + .../site-packages/urllib3/util/request.py | 118 + .../site-packages/urllib3/util/response.py | 81 + .../site-packages/urllib3/util/retry.py | 411 + .../site-packages/urllib3/util/ssl_.py | 396 + .../site-packages/urllib3/util/timeout.py | 242 + .../site-packages/urllib3/util/url.py | 230 + .../site-packages/urllib3/util/wait.py | 153 + .../wheel-0.31.1.dist-info/INSTALLER | 1 + .../wheel-0.31.1.dist-info/LICENSE.txt | 22 + .../wheel-0.31.1.dist-info/METADATA | 395 + .../wheel-0.31.1.dist-info/RECORD | 42 + .../wheel-0.31.1.dist-info/WHEEL | 6 + .../wheel-0.31.1.dist-info/entry_points.txt | 6 + .../wheel-0.31.1.dist-info/top_level.txt | 1 + .../python3.6/site-packages/wheel/archive.py | 77 + .../site-packages/wheel/bdist_wheel.py | 409 + .../site-packages/wheel/egg2wheel.py | 97 + .../python3.6/site-packages/wheel/install.py | 512 ++ .../python3.6/site-packages/wheel/metadata.py | 130 + .../python3.6/site-packages/wheel/paths.py | 43 + .../site-packages/wheel/pep425tags.py | 180 + .../python3.6/site-packages/wheel/pkginfo.py | 43 + .../site-packages/wheel/signatures/djbec.py | 323 + .../wheel/signatures/ed25519py.py | 50 + .../site-packages/wheel/signatures/keys.py | 101 + env/lib/python3.6/site-packages/wheel/util.py | 156 + .../site-packages/wheel/wininst2wheel.py | 219 + env/pip-selfcheck.json | 1 + env/pyvenv.cfg | 3 + gJD.png | Bin 0 -> 1551 bytes gJE.png | Bin 0 -> 1500 bytes gJE2.png | Bin 0 -> 1560 bytes mB.png | Bin 0 -> 1479 bytes mC.png | Bin 0 -> 1495 bytes mC2.png | Bin 0 -> 1578 bytes wB.png | Bin 0 -> 1449 bytes wD.png | Bin 0 -> 1369 bytes 1278 files changed, 200343 insertions(+) create mode 100644 D1.png create mode 100644 D2.png create mode 100644 H1.png create mode 100644 H2.png create mode 100644 H3.png create mode 100644 H4.png create mode 100644 H5.png create mode 100644 __pycache__/ids.cpython-312.pyc create mode 100644 __pycache__/ids.cpython-313.pyc create mode 100644 env/bin/activate create mode 100644 env/bin/activate.csh create mode 100644 env/bin/activate.fish create mode 100755 env/bin/chardetect create mode 100755 env/bin/easy_install create mode 100755 env/bin/easy_install-3.6 create mode 100755 env/bin/pip create mode 100755 env/bin/pip3 create mode 100755 env/bin/pip3.6 create mode 120000 env/bin/python create mode 120000 env/bin/python 2 create mode 120000 env/bin/python3 create mode 120000 env/bin/python3 2 create mode 100755 env/bin/twitter create mode 100755 env/bin/twitter-archiver create mode 100755 env/bin/twitter-follow create mode 100755 env/bin/twitter-log create mode 100755 env/bin/twitter-stream-example create mode 100755 env/bin/twitterbot create mode 100755 env/bin/wheel create mode 100644 env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/INSTALLER create mode 100644 env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/METADATA create mode 100644 env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/RECORD create mode 100644 env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/WHEEL create mode 100644 env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/top_level.txt create mode 100644 env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/DESCRIPTION.rst create mode 100644 env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/INSTALLER create mode 100644 env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/LICENSE.txt create mode 100644 env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/METADATA create mode 100644 env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/RECORD create mode 100644 env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/WHEEL create mode 100644 env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/metadata.json create mode 100644 env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/top_level.txt create mode 100644 env/lib/python3.6/site-packages/certifi/cacert.pem create mode 100644 env/lib/python3.6/site-packages/certifi/core.py create mode 100644 env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst create mode 100644 env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/INSTALLER create mode 100644 env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/METADATA create mode 100644 env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/RECORD create mode 100644 env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/WHEEL create mode 100644 env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/entry_points.txt create mode 100644 env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/metadata.json create mode 100644 env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/top_level.txt create mode 100644 env/lib/python3.6/site-packages/chardet/big5freq.py create mode 100644 env/lib/python3.6/site-packages/chardet/big5prober.py create mode 100644 env/lib/python3.6/site-packages/chardet/chardistribution.py create mode 100644 env/lib/python3.6/site-packages/chardet/charsetgroupprober.py create mode 100644 env/lib/python3.6/site-packages/chardet/charsetprober.py create mode 100644 env/lib/python3.6/site-packages/chardet/cli/chardetect.py create mode 100644 env/lib/python3.6/site-packages/chardet/codingstatemachine.py create mode 100644 env/lib/python3.6/site-packages/chardet/compat.py create mode 100644 env/lib/python3.6/site-packages/chardet/cp949prober.py create mode 100644 env/lib/python3.6/site-packages/chardet/enums.py create mode 100644 env/lib/python3.6/site-packages/chardet/escprober.py create mode 100644 env/lib/python3.6/site-packages/chardet/escsm.py create mode 100644 env/lib/python3.6/site-packages/chardet/eucjpprober.py create mode 100644 env/lib/python3.6/site-packages/chardet/euckrfreq.py create mode 100644 env/lib/python3.6/site-packages/chardet/euckrprober.py create mode 100644 env/lib/python3.6/site-packages/chardet/euctwfreq.py create mode 100644 env/lib/python3.6/site-packages/chardet/euctwprober.py create mode 100644 env/lib/python3.6/site-packages/chardet/gb2312freq.py create mode 100644 env/lib/python3.6/site-packages/chardet/gb2312prober.py create mode 100644 env/lib/python3.6/site-packages/chardet/hebrewprober.py create mode 100644 env/lib/python3.6/site-packages/chardet/jisfreq.py create mode 100644 env/lib/python3.6/site-packages/chardet/jpcntx.py create mode 100644 env/lib/python3.6/site-packages/chardet/langbulgarianmodel.py create mode 100644 env/lib/python3.6/site-packages/chardet/langcyrillicmodel.py create mode 100644 env/lib/python3.6/site-packages/chardet/langgreekmodel.py create mode 100644 env/lib/python3.6/site-packages/chardet/langhebrewmodel.py create mode 100644 env/lib/python3.6/site-packages/chardet/langhungarianmodel.py create mode 100644 env/lib/python3.6/site-packages/chardet/langthaimodel.py create mode 100644 env/lib/python3.6/site-packages/chardet/langturkishmodel.py create mode 100644 env/lib/python3.6/site-packages/chardet/latin1prober.py create mode 100644 env/lib/python3.6/site-packages/chardet/mbcharsetprober.py create mode 100644 env/lib/python3.6/site-packages/chardet/mbcsgroupprober.py create mode 100644 env/lib/python3.6/site-packages/chardet/mbcssm.py create mode 100644 env/lib/python3.6/site-packages/chardet/sbcharsetprober.py create mode 100644 env/lib/python3.6/site-packages/chardet/sbcsgroupprober.py create mode 100644 env/lib/python3.6/site-packages/chardet/sjisprober.py create mode 100644 env/lib/python3.6/site-packages/chardet/universaldetector.py create mode 100644 env/lib/python3.6/site-packages/chardet/utf8prober.py create mode 100644 env/lib/python3.6/site-packages/chardet/version.py create mode 100644 env/lib/python3.6/site-packages/dateutil/_common.py create mode 100644 env/lib/python3.6/site-packages/dateutil/_version.py create mode 100644 env/lib/python3.6/site-packages/dateutil/easter.py create mode 100644 env/lib/python3.6/site-packages/dateutil/parser.py create mode 100644 env/lib/python3.6/site-packages/dateutil/relativedelta.py create mode 100644 env/lib/python3.6/site-packages/dateutil/rrule.py create mode 100644 env/lib/python3.6/site-packages/dateutil/tz/_common.py create mode 100644 env/lib/python3.6/site-packages/dateutil/tz/tz.py create mode 100644 env/lib/python3.6/site-packages/dateutil/tz/win.py create mode 100644 env/lib/python3.6/site-packages/dateutil/tzwin.py create mode 100644 env/lib/python3.6/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz create mode 100644 env/lib/python3.6/site-packages/dateutil/zoneinfo/rebuild.py create mode 100644 env/lib/python3.6/site-packages/easy_install.py create mode 100644 env/lib/python3.6/site-packages/examples/oauth.py create mode 100644 env/lib/python3.6/site-packages/examples/streaming.py create mode 100644 env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/PKG-INFO create mode 100644 env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/SOURCES.txt create mode 100644 env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/dependency_links.txt create mode 100644 env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/installed-files.txt create mode 100644 env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/top_level.txt create mode 100644 env/lib/python3.6/site-packages/httplib2/cacerts.txt create mode 100644 env/lib/python3.6/site-packages/httplib2/iri2uri.py create mode 100644 env/lib/python3.6/site-packages/idna-2.7.dist-info/INSTALLER create mode 100755 env/lib/python3.6/site-packages/idna-2.7.dist-info/LICENSE.txt create mode 100755 env/lib/python3.6/site-packages/idna-2.7.dist-info/METADATA create mode 100644 env/lib/python3.6/site-packages/idna-2.7.dist-info/RECORD create mode 100755 env/lib/python3.6/site-packages/idna-2.7.dist-info/WHEEL create mode 100755 env/lib/python3.6/site-packages/idna-2.7.dist-info/top_level.txt create mode 100755 env/lib/python3.6/site-packages/idna/codec.py create mode 100755 env/lib/python3.6/site-packages/idna/compat.py create mode 100755 env/lib/python3.6/site-packages/idna/core.py create mode 100755 env/lib/python3.6/site-packages/idna/idnadata.py create mode 100755 env/lib/python3.6/site-packages/idna/intranges.py create mode 100755 env/lib/python3.6/site-packages/idna/package_data.py create mode 100755 env/lib/python3.6/site-packages/idna/uts46data.py create mode 100644 env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/INSTALLER create mode 100644 env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/LICENSE.txt create mode 100644 env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/METADATA create mode 100644 env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/RECORD create mode 100644 env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/WHEEL create mode 100644 env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/top_level.txt create mode 100644 env/lib/python3.6/site-packages/oauthlib/common.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/access_token.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/authorization.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/base.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/pre_configured.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/request_token.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/resource.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/signature_only.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/errors.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/parameters.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/request_validator.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/signature.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/utils.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/backend_application.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/base.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/legacy_application.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/mobile_application.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/service_application.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/web_application.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/authorization.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/base.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/pre_configured.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/resource.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/revocation.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/token.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/errors.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/authorization_code.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/base.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/client_credentials.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/implicit.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/openid_connect.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/refresh_token.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/parameters.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/request_validator.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/tokens.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/utils.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/signals.py create mode 100644 env/lib/python3.6/site-packages/oauthlib/uri_validate.py create mode 100644 env/lib/python3.6/site-packages/pip-18.0.dist-info/INSTALLER create mode 100644 env/lib/python3.6/site-packages/pip-18.0.dist-info/LICENSE.txt create mode 100644 env/lib/python3.6/site-packages/pip-18.0.dist-info/METADATA create mode 100644 env/lib/python3.6/site-packages/pip-18.0.dist-info/RECORD create mode 100644 env/lib/python3.6/site-packages/pip-18.0.dist-info/WHEEL create mode 100644 env/lib/python3.6/site-packages/pip-18.0.dist-info/entry_points.txt create mode 100644 env/lib/python3.6/site-packages/pip-18.0.dist-info/top_level.txt create mode 100644 env/lib/python3.6/site-packages/pip/_internal/basecommand.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/baseparser.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/build_env.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/cache.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/cmdoptions.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/commands/check.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/commands/completion.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/commands/configuration.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/commands/download.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/commands/freeze.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/commands/hash.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/commands/help.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/commands/install.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/commands/list.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/commands/search.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/commands/show.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/commands/uninstall.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/commands/wheel.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/compat.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/configuration.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/download.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/exceptions.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/index.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/locations.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/models/index.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/operations/check.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/operations/freeze.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/operations/prepare.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/pep425tags.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/req/req_file.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/req/req_install.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/req/req_set.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/req/req_tracker.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/req/req_uninstall.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/resolve.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/status_codes.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/utils/appdirs.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/utils/deprecation.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/utils/encoding.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/utils/filesystem.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/utils/glibc.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/utils/hashes.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/utils/logging.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/utils/misc.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/utils/outdated.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/utils/packaging.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/utils/setuptools_build.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/utils/temp_dir.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/utils/typing.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/utils/ui.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/vcs/bazaar.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/vcs/git.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/vcs/mercurial.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/vcs/subversion.py create mode 100644 env/lib/python3.6/site-packages/pip/_internal/wheel.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/appdirs.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/_cmd.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/adapter.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/cache.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/compat.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/controller.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/filewrapper.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/heuristics.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/serialize.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/wrapper.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/certifi/cacert.pem create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/certifi/core.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/big5freq.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/big5prober.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/chardistribution.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/charsetgroupprober.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/charsetprober.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/cli/chardetect.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/codingstatemachine.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/compat.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/cp949prober.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/enums.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/escprober.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/escsm.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/eucjpprober.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/euckrfreq.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/euckrprober.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/euctwfreq.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/euctwprober.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/gb2312freq.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/gb2312prober.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/hebrewprober.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/jisfreq.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/jpcntx.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/langbulgarianmodel.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/langcyrillicmodel.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/langgreekmodel.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/langhebrewmodel.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/langhungarianmodel.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/langthaimodel.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/langturkishmodel.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/latin1prober.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/mbcharsetprober.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/mbcsgroupprober.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/mbcssm.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/sbcharsetprober.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/sbcsgroupprober.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/sjisprober.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/universaldetector.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/utf8prober.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/chardet/version.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/colorama/ansi.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/colorama/ansitowin32.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/colorama/initialise.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/colorama/win32.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/colorama/winterm.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/misc.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/shutil.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/sysconfig.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/tarfile.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/compat.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/database.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/index.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/locators.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/manifest.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/markers.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/metadata.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/resources.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/scripts.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/t32.exe create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/t64.exe create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/util.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/version.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/w32.exe create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/w64.exe create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distlib/wheel.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/distro.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/_ihatexml.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/_inputstream.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/_tokenizer.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/_trie/_base.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/_trie/datrie.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/_trie/py.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/_utils.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/constants.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/base.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/lint.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/optionaltags.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/sanitizer.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/whitespace.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/html5parser.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/serializer.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/treeadapters/sax.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/base.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/dom.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/etree.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/base.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/dom.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/etree.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/idna/codec.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/idna/compat.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/idna/core.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/idna/idnadata.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/idna/intranges.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/idna/package_data.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/idna/uts46data.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/ipaddress.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/lockfile/linklockfile.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/lockfile/mkdirlockfile.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/lockfile/pidlockfile.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/lockfile/sqlitelockfile.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/lockfile/symlinklockfile.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/msgpack/_version.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/msgpack/exceptions.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/msgpack/fallback.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/packaging/_compat.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/packaging/_structures.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/packaging/markers.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/packaging/requirements.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/packaging/specifiers.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/packaging/utils.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/packaging/version.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/pkg_resources/py31compat.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/progress/bar.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/progress/counter.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/progress/helpers.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/progress/spinner.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/pyparsing.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/pytoml/core.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/pytoml/parser.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/pytoml/writer.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/requests/_internal_utils.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/requests/adapters.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/requests/api.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/requests/auth.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/requests/certs.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/requests/compat.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/requests/cookies.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/requests/exceptions.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/requests/help.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/requests/hooks.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/requests/models.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/requests/packages.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/requests/sessions.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/requests/status_codes.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/requests/structures.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/requests/utils.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/retrying.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/six.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/_collections.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/connection.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/connectionpool.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/appengine.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/securetransport.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/socks.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/exceptions.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/fields.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/filepost.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/packages/ordered_dict.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/packages/six.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/poolmanager.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/request.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/response.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/connection.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/queue.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/request.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/response.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/retry.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/ssl_.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/timeout.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/url.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/wait.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/webencodings/labels.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/webencodings/mklabels.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/webencodings/tests.py create mode 100644 env/lib/python3.6/site-packages/pip/_vendor/webencodings/x_user_defined.py create mode 100644 env/lib/python3.6/site-packages/pkg_resources/_vendor/appdirs.py create mode 100644 env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/_compat.py create mode 100644 env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/_structures.py create mode 100644 env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/markers.py create mode 100644 env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/requirements.py create mode 100644 env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/specifiers.py create mode 100644 env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/utils.py create mode 100644 env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/version.py create mode 100644 env/lib/python3.6/site-packages/pkg_resources/_vendor/pyparsing.py create mode 100644 env/lib/python3.6/site-packages/pkg_resources/_vendor/six.py create mode 100644 env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/DESCRIPTION.rst create mode 100644 env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/INSTALLER create mode 100644 env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/METADATA create mode 100644 env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/RECORD create mode 100644 env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/WHEEL create mode 100644 env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/metadata.json create mode 100644 env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/top_level.txt create mode 100644 env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/zip-safe create mode 100644 env/lib/python3.6/site-packages/pytz-2018.5.dist-info/DESCRIPTION.rst create mode 100644 env/lib/python3.6/site-packages/pytz-2018.5.dist-info/INSTALLER create mode 100644 env/lib/python3.6/site-packages/pytz-2018.5.dist-info/LICENSE.txt create mode 100644 env/lib/python3.6/site-packages/pytz-2018.5.dist-info/METADATA create mode 100644 env/lib/python3.6/site-packages/pytz-2018.5.dist-info/RECORD create mode 100644 env/lib/python3.6/site-packages/pytz-2018.5.dist-info/WHEEL create mode 100644 env/lib/python3.6/site-packages/pytz-2018.5.dist-info/metadata.json create mode 100644 env/lib/python3.6/site-packages/pytz-2018.5.dist-info/top_level.txt create mode 100644 env/lib/python3.6/site-packages/pytz-2018.5.dist-info/zip-safe create mode 100644 env/lib/python3.6/site-packages/pytz/exceptions.py create mode 100644 env/lib/python3.6/site-packages/pytz/lazy.py create mode 100644 env/lib/python3.6/site-packages/pytz/reference.py create mode 100644 env/lib/python3.6/site-packages/pytz/tzfile.py create mode 100644 env/lib/python3.6/site-packages/pytz/tzinfo.py create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Abidjan create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Accra create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Addis_Ababa create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Algiers create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Asmara create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Asmera create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Bamako create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Bangui create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Banjul create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Bissau create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Blantyre create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Brazzaville create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Bujumbura create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Cairo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Casablanca create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Ceuta create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Conakry create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Dakar create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Dar_es_Salaam create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Djibouti create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Douala create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/El_Aaiun create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Freetown create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Gaborone create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Harare create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Johannesburg create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Juba create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Kampala create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Khartoum create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Kigali create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Kinshasa create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Lagos create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Libreville create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Lome create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Luanda create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Lubumbashi create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Lusaka create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Malabo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Maputo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Maseru create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Mbabane create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Mogadishu create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Monrovia create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Nairobi create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Ndjamena create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Niamey create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Nouakchott create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Ouagadougou create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Porto-Novo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Sao_Tome create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Timbuktu create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Tripoli create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Tunis create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Windhoek create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Adak create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Anchorage create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Anguilla create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Antigua create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Araguaina create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Buenos_Aires create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Catamarca create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/ComodRivadavia create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Cordoba create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Jujuy create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/La_Rioja create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Mendoza create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Rio_Gallegos create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Salta create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/San_Juan create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/San_Luis create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Tucuman create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Ushuaia create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Aruba create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Asuncion create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Atikokan create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Atka create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Bahia create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Bahia_Banderas create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Barbados create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Belem create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Belize create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Blanc-Sablon create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Boa_Vista create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Bogota create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Boise create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Buenos_Aires create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cambridge_Bay create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Campo_Grande create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cancun create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Caracas create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Catamarca create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cayenne create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cayman create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Chicago create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Chihuahua create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Coral_Harbour create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cordoba create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Costa_Rica create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Creston create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cuiaba create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Curacao create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Danmarkshavn create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Dawson create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Dawson_Creek create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Denver create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Detroit create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Dominica create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Edmonton create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Eirunepe create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/El_Salvador create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Ensenada create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Fort_Nelson create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Fort_Wayne create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Fortaleza create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Glace_Bay create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Godthab create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Goose_Bay create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Grand_Turk create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Grenada create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Guadeloupe create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Guatemala create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Guayaquil create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Guyana create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Halifax create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Havana create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Hermosillo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Indianapolis create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Knox create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Marengo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Petersburg create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Tell_City create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Vevay create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Vincennes create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Winamac create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indianapolis create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Inuvik create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Iqaluit create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Jamaica create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Jujuy create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Juneau create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Kentucky/Louisville create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Kentucky/Monticello create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Knox_IN create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Kralendijk create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/La_Paz create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Lima create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Los_Angeles create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Louisville create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Lower_Princes create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Maceio create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Managua create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Manaus create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Marigot create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Martinique create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Matamoros create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Mazatlan create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Mendoza create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Menominee create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Merida create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Metlakatla create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Mexico_City create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Miquelon create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Moncton create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Monterrey create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Montevideo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Montreal create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Montserrat create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Nassau create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/New_York create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Nipigon create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Nome create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Noronha create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/North_Dakota/Beulah create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/North_Dakota/Center create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/North_Dakota/New_Salem create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Ojinaga create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Panama create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Pangnirtung create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Paramaribo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Phoenix create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Port-au-Prince create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Port_of_Spain create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Porto_Acre create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Porto_Velho create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Puerto_Rico create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Punta_Arenas create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Rainy_River create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Rankin_Inlet create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Recife create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Regina create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Resolute create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Rio_Branco create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Rosario create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Santa_Isabel create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Santarem create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Santiago create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Santo_Domingo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Sao_Paulo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Scoresbysund create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Shiprock create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Sitka create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Barthelemy create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Johns create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Kitts create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Lucia create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Thomas create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Vincent create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Swift_Current create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Tegucigalpa create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Thule create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Thunder_Bay create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Tijuana create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Toronto create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Tortola create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Vancouver create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Virgin create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Whitehorse create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Winnipeg create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Yakutat create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/America/Yellowknife create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Casey create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Davis create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/DumontDUrville create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Macquarie create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Mawson create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/McMurdo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Palmer create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Rothera create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/South_Pole create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Syowa create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Troll create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Vostok create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Arctic/Longyearbyen create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Aden create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Almaty create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Amman create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Anadyr create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Aqtau create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Aqtobe create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ashgabat create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ashkhabad create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Atyrau create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Baghdad create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Bahrain create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Baku create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Bangkok create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Barnaul create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Beirut create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Bishkek create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Brunei create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Calcutta create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Chita create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Choibalsan create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Chongqing create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Chungking create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Colombo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Dacca create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Damascus create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Dhaka create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Dili create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Dubai create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Dushanbe create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Famagusta create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Gaza create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Harbin create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Hebron create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ho_Chi_Minh create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Hong_Kong create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Hovd create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Irkutsk create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Istanbul create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Jakarta create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Jayapura create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Jerusalem create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kabul create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kamchatka create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Karachi create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kashgar create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kathmandu create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Katmandu create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Khandyga create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kolkata create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Krasnoyarsk create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kuala_Lumpur create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kuching create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kuwait create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Macao create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Macau create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Magadan create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Makassar create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Manila create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Muscat create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Nicosia create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Novokuznetsk create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Novosibirsk create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Omsk create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Oral create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Phnom_Penh create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Pontianak create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Pyongyang create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Qatar create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Qyzylorda create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Rangoon create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Riyadh create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Saigon create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Sakhalin create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Samarkand create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Seoul create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Shanghai create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Singapore create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Srednekolymsk create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Taipei create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tashkent create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tbilisi create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tehran create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tel_Aviv create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Thimbu create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Thimphu create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tokyo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tomsk create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ujung_Pandang create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ulaanbaatar create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ulan_Bator create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Urumqi create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ust-Nera create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Vientiane create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Vladivostok create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Yakutsk create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Yangon create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Yekaterinburg create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Yerevan create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Azores create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Bermuda create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Canary create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Cape_Verde create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Faeroe create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Faroe create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Jan_Mayen create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Madeira create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Reykjavik create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/South_Georgia create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/St_Helena create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Stanley create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/ACT create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Adelaide create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Brisbane create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Broken_Hill create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Canberra create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Currie create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Darwin create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Eucla create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Hobart create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/LHI create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Lindeman create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Lord_Howe create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Melbourne create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/NSW create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/North create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Perth create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Queensland create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/South create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Sydney create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Tasmania create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Victoria create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/West create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Yancowinna create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Brazil/Acre create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Brazil/DeNoronha create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Brazil/East create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Brazil/West create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/CET create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/CST6CDT create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Atlantic create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Central create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Eastern create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Mountain create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Newfoundland create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Pacific create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Saskatchewan create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Yukon create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Chile/Continental create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Chile/EasterIsland create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Cuba create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/EET create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/EST create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/EST5EDT create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Egypt create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Eire create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+0 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+1 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+10 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+11 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+12 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+2 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+3 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+4 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+5 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+6 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+7 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+8 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+9 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-0 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-1 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-10 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-11 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-12 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-13 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-14 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-2 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-3 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-4 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-5 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-6 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-7 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-8 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-9 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT0 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/Greenwich create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/UCT create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/UTC create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/Universal create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/Zulu create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Amsterdam create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Andorra create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Astrakhan create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Athens create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Belfast create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Belgrade create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Berlin create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Bratislava create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Brussels create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Bucharest create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Budapest create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Busingen create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Chisinau create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Copenhagen create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Dublin create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Gibraltar create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Guernsey create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Helsinki create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Isle_of_Man create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Istanbul create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Jersey create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Kaliningrad create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Kiev create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Kirov create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Lisbon create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Ljubljana create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/London create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Luxembourg create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Madrid create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Malta create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Mariehamn create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Minsk create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Monaco create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Moscow create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Nicosia create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Oslo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Paris create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Podgorica create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Prague create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Riga create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Rome create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Samara create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/San_Marino create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Sarajevo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Saratov create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Simferopol create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Skopje create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Sofia create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Stockholm create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Tallinn create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Tirane create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Tiraspol create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Ulyanovsk create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Uzhgorod create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Vaduz create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Vatican create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Vienna create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Vilnius create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Volgograd create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Warsaw create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Zagreb create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Zaporozhye create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Zurich create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Factory create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/GB create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/GB-Eire create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/GMT create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/GMT+0 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/GMT-0 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/GMT0 create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Greenwich create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/HST create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Hongkong create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Iceland create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Antananarivo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Chagos create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Christmas create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Cocos create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Comoro create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Kerguelen create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Mahe create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Maldives create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Mauritius create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Mayotte create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Reunion create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Iran create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Israel create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Jamaica create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Japan create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Kwajalein create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Libya create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/MET create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/MST create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/MST7MDT create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Mexico/BajaNorte create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Mexico/BajaSur create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Mexico/General create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/NZ create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/NZ-CHAT create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Navajo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/PRC create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/PST8PDT create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Apia create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Auckland create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Bougainville create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Chatham create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Chuuk create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Easter create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Efate create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Enderbury create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Fakaofo create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Fiji create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Funafuti create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Galapagos create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Gambier create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Guadalcanal create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Guam create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Honolulu create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Johnston create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Kiritimati create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Kosrae create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Kwajalein create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Majuro create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Marquesas create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Midway create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Nauru create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Niue create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Norfolk create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Noumea create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Pago_Pago create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Palau create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Pitcairn create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Pohnpei create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Ponape create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Port_Moresby create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Rarotonga create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Saipan create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Samoa create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Tahiti create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Tarawa create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Tongatapu create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Truk create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Wake create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Wallis create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Yap create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Poland create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Portugal create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/ROC create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/ROK create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Singapore create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Turkey create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/UCT create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/US/Alaska create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/US/Aleutian create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/US/Arizona create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/US/Central create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/US/East-Indiana create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/US/Eastern create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/US/Hawaii create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/US/Indiana-Starke create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/US/Michigan create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/US/Mountain create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/US/Pacific create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/US/Samoa create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/UTC create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Universal create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/W-SU create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/WET create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/Zulu create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/iso3166.tab create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/leapseconds create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/posixrules create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/tzdata.zi create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/zone.tab create mode 100644 env/lib/python3.6/site-packages/pytz/zoneinfo/zone1970.tab create mode 100644 env/lib/python3.6/site-packages/requests-2.19.1.dist-info/DESCRIPTION.rst create mode 100644 env/lib/python3.6/site-packages/requests-2.19.1.dist-info/INSTALLER create mode 100644 env/lib/python3.6/site-packages/requests-2.19.1.dist-info/LICENSE.txt create mode 100644 env/lib/python3.6/site-packages/requests-2.19.1.dist-info/METADATA create mode 100644 env/lib/python3.6/site-packages/requests-2.19.1.dist-info/RECORD create mode 100644 env/lib/python3.6/site-packages/requests-2.19.1.dist-info/WHEEL create mode 100644 env/lib/python3.6/site-packages/requests-2.19.1.dist-info/metadata.json create mode 100644 env/lib/python3.6/site-packages/requests-2.19.1.dist-info/top_level.txt create mode 100644 env/lib/python3.6/site-packages/requests/_internal_utils.py create mode 100644 env/lib/python3.6/site-packages/requests/adapters.py create mode 100644 env/lib/python3.6/site-packages/requests/api.py create mode 100644 env/lib/python3.6/site-packages/requests/auth.py create mode 100644 env/lib/python3.6/site-packages/requests/certs.py create mode 100644 env/lib/python3.6/site-packages/requests/compat.py create mode 100644 env/lib/python3.6/site-packages/requests/cookies.py create mode 100644 env/lib/python3.6/site-packages/requests/exceptions.py create mode 100644 env/lib/python3.6/site-packages/requests/help.py create mode 100644 env/lib/python3.6/site-packages/requests/hooks.py create mode 100644 env/lib/python3.6/site-packages/requests/models.py create mode 100644 env/lib/python3.6/site-packages/requests/packages.py create mode 100644 env/lib/python3.6/site-packages/requests/sessions.py create mode 100644 env/lib/python3.6/site-packages/requests/status_codes.py create mode 100644 env/lib/python3.6/site-packages/requests/structures.py create mode 100644 env/lib/python3.6/site-packages/requests/utils.py create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/INSTALLER create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/LICENSE.txt create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/METADATA create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/RECORD create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/WHEEL create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/top_level.txt create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/douban.py create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/facebook.py create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/fitbit.py create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/linkedin.py create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/mailchimp.py create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/plentymarkets.py create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/slack.py create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/weibo.py create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib/oauth1_auth.py create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib/oauth1_session.py create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib/oauth2_auth.py create mode 100644 env/lib/python3.6/site-packages/requests_oauthlib/oauth2_session.py create mode 100644 env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/DESCRIPTION.rst create mode 100644 env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/INSTALLER create mode 100644 env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/METADATA create mode 100644 env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/RECORD create mode 100644 env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/WHEEL create mode 100644 env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/dependency_links.txt create mode 100644 env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/entry_points.txt create mode 100644 env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/metadata.json create mode 100644 env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/top_level.txt create mode 100644 env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/zip-safe create mode 100644 env/lib/python3.6/site-packages/setuptools/archive_util.py create mode 100644 env/lib/python3.6/site-packages/setuptools/cli-32.exe create mode 100644 env/lib/python3.6/site-packages/setuptools/cli-64.exe create mode 100644 env/lib/python3.6/site-packages/setuptools/cli.exe create mode 100644 env/lib/python3.6/site-packages/setuptools/command/alias.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/bdist_egg.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/bdist_rpm.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/bdist_wininst.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/build_ext.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/build_py.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/develop.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/easy_install.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/egg_info.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/install.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/install_egg_info.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/install_lib.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/install_scripts.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/launcher manifest.xml create mode 100644 env/lib/python3.6/site-packages/setuptools/command/py36compat.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/register.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/rotate.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/saveopts.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/sdist.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/setopt.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/test.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/upload.py create mode 100644 env/lib/python3.6/site-packages/setuptools/command/upload_docs.py create mode 100644 env/lib/python3.6/site-packages/setuptools/depends.py create mode 100644 env/lib/python3.6/site-packages/setuptools/dist.py create mode 100644 env/lib/python3.6/site-packages/setuptools/extension.py create mode 100644 env/lib/python3.6/site-packages/setuptools/glob.py create mode 100644 env/lib/python3.6/site-packages/setuptools/gui-32.exe create mode 100644 env/lib/python3.6/site-packages/setuptools/gui-64.exe create mode 100644 env/lib/python3.6/site-packages/setuptools/gui.exe create mode 100644 env/lib/python3.6/site-packages/setuptools/launch.py create mode 100644 env/lib/python3.6/site-packages/setuptools/lib2to3_ex.py create mode 100644 env/lib/python3.6/site-packages/setuptools/monkey.py create mode 100644 env/lib/python3.6/site-packages/setuptools/msvc.py create mode 100644 env/lib/python3.6/site-packages/setuptools/namespaces.py create mode 100644 env/lib/python3.6/site-packages/setuptools/package_index.py create mode 100644 env/lib/python3.6/site-packages/setuptools/py26compat.py create mode 100644 env/lib/python3.6/site-packages/setuptools/py27compat.py create mode 100644 env/lib/python3.6/site-packages/setuptools/py31compat.py create mode 100644 env/lib/python3.6/site-packages/setuptools/sandbox.py create mode 100644 env/lib/python3.6/site-packages/setuptools/script (dev).tmpl create mode 100644 env/lib/python3.6/site-packages/setuptools/script.tmpl create mode 100644 env/lib/python3.6/site-packages/setuptools/site-patch.py create mode 100644 env/lib/python3.6/site-packages/setuptools/ssl_support.py create mode 100644 env/lib/python3.6/site-packages/setuptools/unicode_utils.py create mode 100644 env/lib/python3.6/site-packages/setuptools/version.py create mode 100644 env/lib/python3.6/site-packages/setuptools/windows_support.py create mode 100644 env/lib/python3.6/site-packages/six-1.11.0.dist-info/DESCRIPTION.rst create mode 100644 env/lib/python3.6/site-packages/six-1.11.0.dist-info/INSTALLER create mode 100644 env/lib/python3.6/site-packages/six-1.11.0.dist-info/METADATA create mode 100644 env/lib/python3.6/site-packages/six-1.11.0.dist-info/RECORD create mode 100644 env/lib/python3.6/site-packages/six-1.11.0.dist-info/WHEEL create mode 100644 env/lib/python3.6/site-packages/six-1.11.0.dist-info/metadata.json create mode 100644 env/lib/python3.6/site-packages/six-1.11.0.dist-info/top_level.txt create mode 100644 env/lib/python3.6/site-packages/six.py create mode 100644 env/lib/python3.6/site-packages/socks.py create mode 100644 env/lib/python3.6/site-packages/sockshandler.py create mode 100644 env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/DESCRIPTION.rst create mode 100644 env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/INSTALLER create mode 100644 env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/METADATA create mode 100644 env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/RECORD create mode 100644 env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/WHEEL create mode 100644 env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/metadata.json create mode 100644 env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/top_level.txt create mode 100644 env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/zip-safe create mode 100644 env/lib/python3.6/site-packages/tweepy/api.py create mode 100644 env/lib/python3.6/site-packages/tweepy/auth.py create mode 100644 env/lib/python3.6/site-packages/tweepy/binder.py create mode 100644 env/lib/python3.6/site-packages/tweepy/cache.py create mode 100644 env/lib/python3.6/site-packages/tweepy/cursor.py create mode 100644 env/lib/python3.6/site-packages/tweepy/error.py create mode 100644 env/lib/python3.6/site-packages/tweepy/models.py create mode 100644 env/lib/python3.6/site-packages/tweepy/parsers.py create mode 100644 env/lib/python3.6/site-packages/tweepy/streaming.py create mode 100644 env/lib/python3.6/site-packages/tweepy/utils.py create mode 100644 env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/DESCRIPTION.rst create mode 100644 env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/INSTALLER create mode 100644 env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/METADATA create mode 100644 env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/RECORD create mode 100644 env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/WHEEL create mode 100644 env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/entry_points.txt create mode 100644 env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/metadata.json create mode 100644 env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/top_level.txt create mode 100644 env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/zip-safe create mode 100644 env/lib/python3.6/site-packages/twitter/ansi.py create mode 100644 env/lib/python3.6/site-packages/twitter/api.py create mode 100644 env/lib/python3.6/site-packages/twitter/archiver.py create mode 100644 env/lib/python3.6/site-packages/twitter/auth.py create mode 100644 env/lib/python3.6/site-packages/twitter/cmdline.py create mode 100644 env/lib/python3.6/site-packages/twitter/corrupt.py create mode 100644 env/lib/python3.6/site-packages/twitter/follow.py create mode 100644 env/lib/python3.6/site-packages/twitter/ircbot.py create mode 100644 env/lib/python3.6/site-packages/twitter/logger.py create mode 100644 env/lib/python3.6/site-packages/twitter/oauth.py create mode 100644 env/lib/python3.6/site-packages/twitter/oauth2.py create mode 100644 env/lib/python3.6/site-packages/twitter/oauth_dance.py create mode 100644 env/lib/python3.6/site-packages/twitter/stream.py create mode 100644 env/lib/python3.6/site-packages/twitter/stream_example.py create mode 100644 env/lib/python3.6/site-packages/twitter/timezones.py create mode 100644 env/lib/python3.6/site-packages/twitter/twitter_globals.py create mode 100644 env/lib/python3.6/site-packages/twitter/util.py create mode 100755 env/lib/python3.6/site-packages/urllib3-1.23.dist-info/DESCRIPTION.rst create mode 100644 env/lib/python3.6/site-packages/urllib3-1.23.dist-info/INSTALLER create mode 100755 env/lib/python3.6/site-packages/urllib3-1.23.dist-info/LICENSE.txt create mode 100755 env/lib/python3.6/site-packages/urllib3-1.23.dist-info/METADATA create mode 100644 env/lib/python3.6/site-packages/urllib3-1.23.dist-info/RECORD create mode 100755 env/lib/python3.6/site-packages/urllib3-1.23.dist-info/WHEEL create mode 100755 env/lib/python3.6/site-packages/urllib3-1.23.dist-info/metadata.json create mode 100755 env/lib/python3.6/site-packages/urllib3-1.23.dist-info/top_level.txt create mode 100755 env/lib/python3.6/site-packages/urllib3/_collections.py create mode 100755 env/lib/python3.6/site-packages/urllib3/connection.py create mode 100755 env/lib/python3.6/site-packages/urllib3/connectionpool.py create mode 100755 env/lib/python3.6/site-packages/urllib3/contrib/_securetransport/bindings.py create mode 100755 env/lib/python3.6/site-packages/urllib3/contrib/_securetransport/low_level.py create mode 100755 env/lib/python3.6/site-packages/urllib3/contrib/appengine.py create mode 100755 env/lib/python3.6/site-packages/urllib3/contrib/ntlmpool.py create mode 100755 env/lib/python3.6/site-packages/urllib3/contrib/pyopenssl.py create mode 100755 env/lib/python3.6/site-packages/urllib3/contrib/securetransport.py create mode 100755 env/lib/python3.6/site-packages/urllib3/contrib/socks.py create mode 100755 env/lib/python3.6/site-packages/urllib3/exceptions.py create mode 100755 env/lib/python3.6/site-packages/urllib3/fields.py create mode 100755 env/lib/python3.6/site-packages/urllib3/filepost.py create mode 100755 env/lib/python3.6/site-packages/urllib3/packages/backports/makefile.py create mode 100755 env/lib/python3.6/site-packages/urllib3/packages/ordered_dict.py create mode 100755 env/lib/python3.6/site-packages/urllib3/packages/six.py create mode 100755 env/lib/python3.6/site-packages/urllib3/packages/ssl_match_hostname/_implementation.py create mode 100755 env/lib/python3.6/site-packages/urllib3/poolmanager.py create mode 100755 env/lib/python3.6/site-packages/urllib3/request.py create mode 100755 env/lib/python3.6/site-packages/urllib3/response.py create mode 100755 env/lib/python3.6/site-packages/urllib3/util/connection.py create mode 100755 env/lib/python3.6/site-packages/urllib3/util/queue.py create mode 100755 env/lib/python3.6/site-packages/urllib3/util/request.py create mode 100755 env/lib/python3.6/site-packages/urllib3/util/response.py create mode 100755 env/lib/python3.6/site-packages/urllib3/util/retry.py create mode 100755 env/lib/python3.6/site-packages/urllib3/util/ssl_.py create mode 100755 env/lib/python3.6/site-packages/urllib3/util/timeout.py create mode 100755 env/lib/python3.6/site-packages/urllib3/util/url.py create mode 100755 env/lib/python3.6/site-packages/urllib3/util/wait.py create mode 100644 env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/INSTALLER create mode 100644 env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/LICENSE.txt create mode 100644 env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/METADATA create mode 100644 env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/RECORD create mode 100644 env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/WHEEL create mode 100644 env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/entry_points.txt create mode 100644 env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/top_level.txt create mode 100644 env/lib/python3.6/site-packages/wheel/archive.py create mode 100644 env/lib/python3.6/site-packages/wheel/bdist_wheel.py create mode 100644 env/lib/python3.6/site-packages/wheel/egg2wheel.py create mode 100644 env/lib/python3.6/site-packages/wheel/install.py create mode 100644 env/lib/python3.6/site-packages/wheel/metadata.py create mode 100644 env/lib/python3.6/site-packages/wheel/paths.py create mode 100644 env/lib/python3.6/site-packages/wheel/pep425tags.py create mode 100644 env/lib/python3.6/site-packages/wheel/pkginfo.py create mode 100644 env/lib/python3.6/site-packages/wheel/signatures/djbec.py create mode 100644 env/lib/python3.6/site-packages/wheel/signatures/ed25519py.py create mode 100644 env/lib/python3.6/site-packages/wheel/signatures/keys.py create mode 100644 env/lib/python3.6/site-packages/wheel/util.py create mode 100644 env/lib/python3.6/site-packages/wheel/wininst2wheel.py create mode 100644 env/pip-selfcheck.json create mode 100644 env/pyvenv.cfg create mode 100644 gJD.png create mode 100644 gJE.png create mode 100644 gJE2.png create mode 100644 mB.png create mode 100644 mC.png create mode 100644 mC2.png create mode 100644 wB.png create mode 100644 wD.png diff --git a/D1.png b/D1.png new file mode 100644 index 0000000000000000000000000000000000000000..638f8fcf9d3f228bc0f148c92a21d53e8187b839 GIT binary patch literal 1404 zcmV-?1%vvDP)$85a zP`6qwnOIb+%`y%G7{CAqFn|FJ;D-S_bLG&PnNj5@GuuFAX1mdh%52Z*YwaE>HGp>k zV@XmC@fl2Y+Qyo&5|TceFxBa4he?v!$4U+0UBJ=IN=Uk`NtU1X04hma#^^dxFtfuu zwHUyU0d9Z!)ctw>1{^anewz&F_R$1Yl_vf8J}n0DV}mjZH4u#^>9}`S>N~a=z(;~e zQtN$yw2jXuiu&pmBz-iwit*X|0I?*MpXeR~_$oMN^;@yMb1FQ=iH&9duhOJ3fG&pZi4e_u6 zvSZq9Wk4ECzzV@(Yl5ulcHR~w{kTEp(e^_;8j!XCrkXC8fHi?=F2Fq3t1oV$S}*}5 zeI%*O!ZdJ&U}v`B;^AAF>EsuK4s%3EE#n3_S{h8i>cB*2pt3_}Tae`>eKeYtneze6 za=PT~CJ4aijxl{&Kftg8}yLEk4QzNX`IR)7i84Y^qcB&IdRI6R;|fYmP@zr_vcz z{=ZJ0%G-kUPP2eW`WuM)w}QZTVEblp0%~CQR&WCxZgIE%jvugJBo#O;lDdWbWj(VK zXoBpxcL4!FmT#bRQvEuS%zFj5rT%5uqNjR>KVDXPtb;O#B?^7G9t zPC%LS^X1>X(E)(`RjJWvaLigFmkH*I`ro9Qs#kIXU0?lY_vVM=F8{pocf4ScU1{Q% zzq>sGER&ia77QH#H1tactKFgzAip7I_5s`vXASYMCcsNo>FYfC>h@r@Oe!pYt$y#T zuY(g%)(b}Rhe$QbWl}eAOR8TCasfc+0{}!5@dWVPOD7Ax|{>pWUznJHP<^L)bH7*~ZoRbH>#5C~hfUzbY z`1N6*-%3@?q2NrX4>b+Uq~0x9bBS5qx5br#en1|#2u}NUFvQV_&zeI7-w;Pj`tSdm zfZMZxJ4y}UJ-{a>0NloqYf@;LHGqo%dp|)h?M>BJY0nYd961o4K_$;?) zWY~hNXdWvyfbR@mBZ0s*AHZ)7lM4X4-GcCRg7As%F@UduKK(;Yw;MOk&<|Vu*03|U zpj0LAIyv@IDxAMyTkD+66w4L(-+%8d5J+Bz%VQbjBz+X=YaeKtv5ixQppx2twmrD!(I^dHNLSz6* zk3T5D=YgbcJZ#)h@K~tNN07>1wMGfE$z{$+a%BR}AXO6I%{FF_xsG7V7nVH%C zzM=;324E&voXsyQVM=F{Rc-!ML+o3P_Y^gNHv(nQ9+(mt6dmal%hWJfBBN5fcN)O& z1t0Po1Nd5zq(0RG6q2;y6ux%{dSbaPAU&J26H-a4KhQS@@GUSKEQ0EIU`kA~sv|v{ zsML{85m2=GkjPj)#EAVH4I@3cq9<51;8)- z#_4$~Nh9X{!B|pVs1IAfBZ2tieUs`!()l}QpIl?S;EjK012~rcPS!= zumwC6xEg^#<<+~73Vqk6$yawEzmXRZOMd~?w`)P*3&i#B*MbYskX%^j8(;(+_)2^& z2=J8y)K`Mb<-fNLMnFOd8$DjCE-!ZehR&&*D?z6Lu&dW&1eAv@dA3CDNG{*S*cNqnSrcrD3j51z z%h>>KOpA8U9KGBE4sNxp_HADLYI_1S{f~$W8xv=H0>m`ef(vfe)YpQpdSe*3Mco+2 z4N;AHV;DC?jh=?__2Pm-XONopKmg)}bfN0jRz5qFN^f`p3w#Cg=d+~$mJ6CZGBH!b z{t~$HUp)i#SAq*LzGCve2}WyY`zt~46TH6?T-Z{L8(;(j5gVdDyMa_|K0y7yMpXl> zIYOVu4g3t?8Vgu+L7#KOzAMG^aq;_#1r*Zg0@h?}*y6T5J{af~@|++z?fc0Pt&yI! z9ufS4Xe}GR{9|Ix68uQt7{Ir{VFy_44|z-tfKLpV60o|^nj9E1f*uQekr7m>^IEiU z2_bGTh$Sdoo2S_EyV(K)_%lF%A;=EHE}qTR#EY~2(#cj!*#w;ScNH~&Hvy*vTu$KG z{3XHJjANyKN=yQl%Y1^Dzrh;svvrMn3z9WB=sA*<9gyH3JE&;x=m zUI5>EFJ=l-Ocb4U+-KrW@`hY5;E! z4(mfe8r;c1M|uWXdCEs@jk&xBQ3H4jFdGsX?l}mVtaP_TO^x#lI9f;weJpT|1xU|n z;gz~I`tPm;x)Oi7wD8XZzIY)--U!4$6yRTv$@_GQ)1=;$EP)Ky&6fl+~ z?Ey^G1;i%+vLnb;u8fg3Qe9?X{u?@Cjfl*IL|s zFGzc2vI2-kyj$pd^{*5)fIk_;J=7CW?z3ZjGO5bjw4W_NBsm2Dq`i+Bt50-|0elD! z@d)Hi)<+}I_ZV{d|7l13=irqb;Ej{upRdq8jk*9CqX1-R$#13v9D=`_`VB1zSB zDk#88K(7G)xgVYHM<#hEP&%0?y^;h4SRFW70Q2Qw@AFRvKl6_}cSoE}_^lP7HZxZr z;b3NFPapADfWuY>d7Jits&6})YO{3MU{&6P1tIGLyCjmJ$q&h`em*5w7o^XL0-_OA z5%u3ceH-+y>Gs{=bok~qKe)!*+yhbHmWsMtDr#}(oJ$1F8DQ_~+Xw=`{KrKKE+_1= zMmJvp8-U7|-qkymgRv0JPrneB*&NdFB3=URY1qoqP%#p&mVC`;1mS_xA7><6NN zCyE-tXMj!t+`_*-5J{B#Onu~P0Y~7nFhFw)3a}FJ$_KzN18_cnG#3S~){0!}L_q;o z0$z$ia}9`cpUv@L>hnKs+DFo?5@IbUmtF zRgUWsX;5WjKI^xHxYE5>n0QmEHgDq~f)R{h1S1&12!0x{GhYtfZZxAho^3a7fZ892 z+V+g$tlcxEM(`nEDoHv4$%LsY+F=W3N3tF;D|OyslBD*zQX}}W;MV^ew^06*tRH_6 zQ18=Ue?ZmO6GL*r=Ccj*U!Em3f{y^Ve|XpJlL@M?CozJ-;g>y|2_WmokEt<&pBt1P z?n647qKOf=E&u<^Xw{O0#FkBv&+9pG^LJdI@uYRFd3x^cy4iDma*# z*%25wP@eKOy@07I&e@QN@|3qAGc(&iS845|uKSC1D>Yph! zf*%YLdUZlN1hV0eLh5>jS$TCUZs8?T|rTsK2^`Q{V3`faydR zUBRmZ{aUZAY@ZH^Z1xF5A@yS-nVIdzM6$AZuG9#AFc>C*!Svr=gh^oDl1xxa*ExV+ zGQ>H+3c>o&S%?93yJSMHsB;a8WXE&fBZH!eCSX;dj|B{0MuVc>r@c(n2BQxFbOt8s z;=KFSCM!?qSyChTXs|BSV=_DD>XXqZt0LHfqJd;I+3~#p3{w9ZaQouXv%v-f3}i1Q zReo>3vHS6&5d8h}+Y4W16lDFl1?mI4{#`Xj@DqZ=7EG1x(+kM``6M}D*qY3R_29=# zO(u||{P|wc$Ep(duX{m%c>9fUz5x3250ZO9FZBTKPVn`{UOeLn$3-yHkof!jV2L0| zb(}eXU;YC!A0K>Gs&hYov%-(QA%|AQ<@eV3&Q|@;!e>iT-5YA$ycL|w;Z;xu^`YAV z=B}+em+I*Mo2oe{@EgBzGuy=GjZ%-VO8rxrTh*QJ1VgLZ-Q)4~PZ#e1Y`?u1WBFR$ zO)r1EAVy}u{;~{qyF?0so(Z|D)Po=Ga=}~pe@b;6+q+S`8 zS?I=PsSDk>EOnt9ms>S-pwM$^H2Sk3xejS_I0VpT+Lgg z;$VL#cx}%1PVjvupyG{E9e=gd4GU7ah4{;BSnz*NDo+IGFnP=fS`~Q92LQp0i5I|{z^oq= znX3;W2pBR1OsDfc9nAa@zYP34#RC2g;4L2jirWAWMxki@Nu?U_0pe%^RtWlmugc8t z4z>fsb`A<86HphD-zji-uG9!V0?hi+jH>jL0VwKRlAtbpB9I+@z$m7Ktqz=50Y3M8 zb_nF&BP2=voDv4i3xsF_)&u@-Vn3UHNev{rt^NSt@9_dyCwP5ANY-s_zVpjMDxlvh z)H3=?mczl1mKwo_fWLb}$Y>B2edeIsjb`TH8Vfkbq|g=}ToC-75+gCqD}Fa(2q$)yWWs>yB+fe;nCMhxQxuqyE76pGhNd{}e@ iBN)L5MlgckH~3%k0$+asq`r09wll7Uk~ie%xn$m z0Vp>sI3}Xp=DP(zxusbkGc#L0R@4aI7n}oR=V`u~W7@3ATaG3wvX)~|0Lin(z&mPV z1jmx!=N(LPK&lhc)~f=!qNd;dsZCA*WQyCs34z`YKq5)1vo?_*=cy#ht?v>Sg|~UM z1565h(f~j%g@J$36Z)KB5%PT_c7UmXy(RF3q&n02#UXs29!!*~{rZrijvZj4pt-8p znayBh#m>?bP}EtA45~tH5|#d3+S1A_+eqg!edi8AC!sv%MD6gc6X0#O4@4a}PK zgj{9IwDV3ty7YuO&IZ8Uva=bK+Xssp!8?Ob-ashAcEJ1oE|o*M&D8>MUJbsZHb!tP zg+0FfE}#wW;IPsw>Z~O!0>LC#d`<4)`9OZ>O=Bw-$Uax#9kp==&@7HKBvnh2<7Zn0 zfGkb{69v;l@b#y?N$^>c*(%ck%vG=D3rJ2NN4XvZ2gakfQzfoh-dN)-eCuMc#?dx@ zJE$NA!PEH1!=L!<6~lvbRMa)NQ7~(f$rUvm%^-;C_%)O&x4B;)SXqC8sLuW3)-Muu zSkS9a^=8m#XKZf;efop-tzdYn3|Be&RL7*bG9E`*zXkw;;ny1g&5y@R|4+YF3~PXa z?mzQui0_7v8;py(8vZ@UpKAT*tzdYns~2hj_~uryZ<`Okx&aK?)vzo>qRJ2GE90?Q zU}96&bY>>^BFs zHUdy`wT`oUtp82@Q@vR|`t09Frn}L;`Zrh9W53s{avfxHK04f-+)seF67LD_q0Acu zMt?^(!ymo774oz}|2uXI1FDbvp<+d4>0fgMyH(o$uNv8mbY2p-P0w)SK-WXODdycc->3{Zf$=KKmFqy6IQ)2|*3ixan5cIq&{CM}FL6Xk1 zgIp05Leu~g0XeI8pOVYPKShWREyEh%!WZz4+8DvHq|3BFM}3@oLcb~qQA2SXI7RU3 zh7j)*z~2TSvm!VORdcaCc%Y~eyfgUe4Iv@+I~)+$_737;0F37XrV54|LNaxU%u4Od zWnGp)Qvj;zYzC0; zY;N+`h268p2)+%_JA?cQ5wo%;)zP4`r8f>&EBccHioSs9f#(}S!o?sL1-di@ig$A# z%+|zv0;dFiazjWeAFz~#6Cxt~sp;QlQ3FIUf)R{h1S2>B@L#6LjaB4kbXx!b002ov JPDHLkV1gMWu=)T1 literal 0 HcmV?d00001 diff --git a/H4.png b/H4.png new file mode 100644 index 0000000000000000000000000000000000000000..7c7133b3aaf01543b9d5d880d0a2728028eee894 GIT binary patch literal 1353 zcmV-P1-AN$P)(O^6otwMzS5N}bbloZ z54Adai~|A~zyJm?fB_8PmjQcP+xH|1;KXc-fSP1Bk*LWXCjVCUN>Ky&6fl(}H2~rn z(f++T!vAv43lLrKkaX5-0-g9%CZ20n#xR)u3l0qEfrF9pG>Zaez&Miw!VC zu9T!1w&9!VFFM3B6_B1OIe7CR*Z`XXM@FFPJ;sC*t0T-B)Dh_z0Vvy|PecGkPyyQm z^8pm<>ey#q7v#efL9XR)OeX>;Isl*=^g#t|2{aYp_xZ)wYc6J?I)DOze268vGPr?T z1k*8sSeb=SS024%NbW2mBz2e~04nTX!$GqbQu#|Y#BeSS6vA*g^& zfr|>5VJ4quSXvMOP`*(CGPBUPpaM1m!eTq|45=w3>5ax$V>Z8B>DPuUkNMA80LlB0 z`wRpC|MxR{d=Y3#K-=E+&ZwxS&|o&eKB$0Afw2(a-As(OEs*mgQklZ|Dv`tM0=++N`duxOGvZ`R=U7Gk~`2Qq7>d%nt7X>N%s5^(oEE3f*? zmKkg#o&kWOi7E#R0KVCy*F^nmWyP(|s*8o;)<5s4fDiZhr~B&1!54|@R>^BJ$O=O2 zDZNHse#p`?#i(@03;5$%eN+YT(a-A9wfd;2ZpC}JBkH63Dzq|4?*>Y!d_dqGQ>`cPrA}YU4J?<{+xV2$(|g<^*k@DZBY`6cw4?=o zeTYD+Sq$++;1W0fZetAKJy4_td1#?vUl=eZ09~o2I51QMZ3~2t?awev50sUJE%iT$ zC7@gk#gGfwBsembRPXyNo+t+_C&G|xyx7+ifYa`^q6YADLGK0lbAOWm%zXu5X%zyJ zj=6j?DAWXQ5A1Wz%UR%9Bw6$cK!+2vSi0nZsCxfeQ3Lo4Fdrt8m%;#)I^~)Ow54>G zof#PuK&+0h6*YjL3|=>U0EM>(mz19!6_J$(1HkpvSWp350_{MMbnLS!f~0mgCIERT zeAuVmBh_yK#qT!80Iq zY5<=B=6xcfu3BajX?FzqkRVdoT`{33bW`B(H-uPOc%`FH0r(?<*5V&m7XEXuPO=nW7a4YT0z1eO@UUkvgT!z%kc6u`WgJ(RkE>{D zQgvi~)Gh~ zDC)cQ=)uQ=b_FmtK-xto3PpXk3X&n5O~dG9Z2-RlY!4hsl82=>?JxykiS(F^p^SEb zNM{~=z-RQ}WiYWyzfFd4hIjzRXrfp&=~AY_ys7ed7u*3Jn%NamcV3~0-&fej5|s!S{Q3k zEut|1kT+?+69J@N1#A!;cND6k?PIG8l3@yHrD=k|C5-=H3n5A{dPU#6lV%vdpEP0Z=hZhH#>pBzAi!^(tURWqU%B?*bQ9rMIHK$fj$nUWe-*1lSU20)SH6lK~_(dw^nz)P{YU%%q7vTvQL<16*zhG4FfDyaPm$ zOmzapB@*ZZL7N26H-t>a=}QJsl-fSFcSpdT6?$KxU=RKv`22zhV+|s20T&Sh=ne<~ zHX`J0fUSak97dgNak7(9wlZ7!0}_l4!`v-Z?bKFzYb(pOy;UAn ze(dSKxEBBkSvPW|?5$#>A77t7eY*Sf>C?@>GZ?fKeE;okaIC$DqW(Kh@Si*v`SP!f z6m^|K6p{p}VeVHlEP=E%Af1v8%OqT$d{{v+%3&3vhSf+mtU>Z&Em92YkaAd$RKtdG zYS>5{C#(4;^1tI0QvXcyj)cpRQ1?Y?M4_~zP&!d4y(pAH6v`+HWfFxli$YmMp~^&| ztfElmqEHp0P?d!!m_26NTx#~9Dzs%>S}=P`qqd@LB2e4W4iTuGXqO07HL{67?M8b< zp!T8~5vYA=zX;R;R4W2yM+Zfq4xz&$P<5zY1gZf!M4+6=B?9F}9ucTU5&!2-GM- zB2Y9M6M-5>6CzN4G${gg0R=>$rch7>Y8uUmKwU(P2vi7#MW7<+k_gl+dRYYOGGaxb zzJsD7P%-pf5vW(toCwtS(Dy~4et>=`0`;%ZzZQXd70ru4EubqRP_Ln@B2d4Jek1~Q z4aG&E7SZb>P~ztO8=|E8Ci=067Jq`Si$EpNTOv@uhkhyobp!pr2-F{-KNNvVqMIU6 zKSMtkfx3nMjR@4gMSmm$wS?Xlf%*k{M+E9!^v5Dl{|^0$2-I!#o(R;xNB=iyz* zkN$MnfgV5W;fw=MYw){|I3v`U?oF&_@t%xh%2&<>&ZdIbfG$4ehiq z684Wp0z=c0na}Zo^K&T^f*p>GTU7^v5S=t|<8QV|~t8v!bdTEBCgRK0_ zNn3wb+UsO5PK3iVp(h#|CW1`egfECje1SlHz(4M*4@MeB$NhC9{$RsddgSSmNqRIK zY6wP7LSb)vXIK5~w69^-*D%fiAE$id^pm3#zNs1ibTBmGpK0wK7(C?i>~nRy8ut@O ztJ~{sZ1J#fefuWEv~TLvG!tfg{_vAH5%!G)=vHS-v)9?|-aiA(426ARy7kgD!$g9J z-Y<$ZkeRtiX13$US&d5&n_}>3PHrxC=G%ATK5}d8+$1)4v)jogzI}`L7h7XXOVb0z zb|22{O4hPbY9KbZAU63RCpGtDmReJD(<7Byi`(UFekiE{XC>y^8CK)rg?3Lqc!DfZ z&%MVx)fycwk6LIh*Fy;nID@HXDxPW}vqoNKj0yA}XthSh0+C1Fi-%h+(EWIeA>o-iI1(;LNUMbVN>|fjK#0O@hVrxb<%nY+H{~ z|F(EsO%FGnfHM#inNFIi_PV~vI2}Z+ifmMU?OEwm`(vI_&0hB-6;-p>(cJP-GYU8p zL^WG1s|@PUnNI>{8|;a3)Nw@lJkXVNs3ub!KG{Jm!hj z*z9=36U*s&NOPyzA&6>L5EaqvXF*od*~cxa=H^Fy2=Thyj)&9i%}zm7<6Zo$BBC1a z>%!Bj>oHF&?<3b&PVd91cHm6VSAwjtzEZ=1!wJiN#beaRW95~L^12&{xxokH?bGa>9`H7)Zy`-w!k(IX}A3VV&oDu8?>bY}c zaG1;Ru;zi|UA7)N1P&8$4MhSpSI`*22d2I|H%{7ya5hT^!%Sp~EA&RJxx$8aTRY7} z#zKX4TUb+@b2Ckki`5S~ZEaJwv$TJLW(X~hhtRrn$yC|wdQVSP5nN`PX{^K+Pa>_7>|QhbVWiJ2nU`E{MI(+OKhY^TzQ-hPgd4* zc7Q~ktb8C6CKQ}atm0H8FaZe=ohz&4M2A!0@=^?f?~Y^9)d*AGLt1gLs-c-)TW5O@ zq2^_kUHH^P=z;SBQq1&Bt`;n?E?A$94yZ5h=^b`K6_?u?QNd95-}z{QwCwh{omqLW z&lk+fn;cHQ^n>6i=F9DDgj#2P6Hw}Qx|=`%)1wz~kKM4flX4HM8l0g0m{%+#NOEHd zNthsf=?EmjZgR8AlfK|M4JmIkC>7-inz=v+A=Lsxl0(@ATn!eGvbU$#*4__8@ITl#^{RZ%M+Nl@4O9<8{Dd`}EuUAV&j{k$##E z~^xI4c0YX&RjdPw;WndiLpo z=^>H@zUZMa15NG6&9*jqnq1x-0iK7}A&?*)9L+u40vS`368^9^{mWlMaGeSZWnr94 zzA*o|jN%{d@V}kZ`F4tmNy2!JkuNG>^0lIptx$$Yc%hb}nEhdVh$r79dD6zGxL+jQ zR9A*6yn|i;@Ntx4QVA864pC7mCcWAzr(S`$T|U5I5Gy+uI>cbM?aHiz!6MGe0xvr1nuG)prfEC8F$cCWVK*$^Zfg3}Br zl5j)k^lUIN?L+v20N)`nKZm)(4Ddml(TsW~uGw^Gg?Ro=aD_ZabGN8t1z+ zdds?!(wEH*tjnm%9Shy-I?8BU*ORO-p#`dFK=DRM1~d$j!U2s^;hX>|iW?AKpfG^A z7WoQ>Qh4E4Fx;>&#H3M4R7xmFk!%}|@@5EMET1k0Fs&K~);Yj`;&f{QwuYLNG`D zMpGU?eWP~q#pE+-&4Ia2BJ6nhjh2rMJLe@Cb9wxkMO8wQ*b_I*%jciDq5>frciq*Q z=1wx#fkKh&@H8f}`vrv8DNVt0m=t6OnA`^v6_y5>pt;2Z@)RccE0R)Ed=l@k8$KjK zQhr2uiN~a2?C2%mq?wQ9e~}Ey^KC_iS9R~;UceWVg|S6LzNic-c=|DUR6be3m*jtu z@?DkCwxXcz$~=aTA+>;17Vw~)#G=^n6_hLZTKq546v<8%-p*H!3cMigg-)0${RJHD z;8FQsq$_AgsLS{Hd-dO@cFC8FdcF@aRRQf%<77Za=nX$As;Fvmcb;!PhD^xFa~r!K z#$1SbK&zJGRv&;a`zN8T|0HyIG5S9JuPE$WR5e8Hr6g2=MiMp`3H3Y`mt1}SW!dx8 zti)b9a8D5lkJUA2mHu&1)AT)c?YYo*T!&-!dlK6{-9&gQP(R~iK#jBd(P^+}gW3|dNn5BNiT1S|34ad%fYU>Z$&xOt%9bp=dk&Wc@au^%dSj!dIb0v7uv*xK6+S)rh zySmTk4&hmfm@T&cJGa32WmuK1oAH6M%xcfgp5qVd8qRBdF4P{`0#s_JFgW0cZP^~J zh5PFfF3uWXn3_G$H*hY*YHdR^emXEV%}m)XSv5mnjDT$&g7ZEdW_}8kvWgkl{KL#O z99QM~m{lTrbQm7H;sRgg>Dtcsy$ z0=`k&u4ZoFy3lrL2JXG-thC#o)pq+|8V%6CAdRx}5L}LDHNFwB>x2Gq7+At0&&q>v z@e{%(9qgv$%c*qW9q*saUqI0n_zSV1Kdv7R_o?HKe!xsegC^_P_Jd7B~`KiR%prd^UGwKjG<{-eUADA-=u2Ckn(tiG=&n^RRHn>(lY>j;gla}(xVB?qM|?6_ zme$k&i@CHvDNSqk!fwV@i5n73(v#NI=5zFkkz@_G8()-8{y+NEAnc+ z)MmQTuo0B9EG%rZQQ0K?W5mX968i!+d`O<(CIw3J{5Cp?RlU}OvIGq6e#+VTPD)*RKUsx`zodeb0lwPt0%Yd6!X7`m}PuD*fg=Vy$h7> z&OC;XMMY4MP*;zEQKv%ER$|&=b^JVawKu1Bbpyo4<1{5+-%Jt*H*xSY9Q+&yw;;$$ zd<^E@Zeo59C)N1)QfvV-OBnn%4zLbm-oe4UIKaw_`C}Xq<)y~j>Lu&}Q0LTK2u${w z8Mq9zYlx0wejkIe4avNZgFnT=2RQgM2(l{9)Q4ke0Bq0@Hq~-^$!6m;lL4EeA*^sg z^#z*VC1ztBHG0QS6Ka<-ABbU%}GcU)g!a{k6tbuGFOO?2Mc^437wbZD-34HU|^ zQD=JNTo0TKEEU%ci-yGL$L2lr@^!gfV_Y+p#XZ+s7h98-W$WIwsb)#CVzOt{JLST#GoUe{2Tz3WCwV~$JawfLZ6o;&&Vx=x}oeyTCt3+bWDzdG8| zaa4NesI-$(6e$Hfjq#X#iM2QU*O4SjU7{E%lEx?`%f;moSLEVKh^umOHN-W!xEA8N zTwD)vLoRNFxG5JmL)?;!mqFZ`ir_(mt{PWj+~23SL@Kw8QZ{}kETY5tIF1!dx8djGvbuoPz<@4wceo~8|PvH zw+o^hz?FlKu$Wi2Y*XO~TZ!{}z#SLxF?HDUbpg8&18mvmya=}H;dz_wLXYYmm(6uy0#5uJs4rY^J(k7^U>%9IxsPP2^(Yig10F!|Fr$!1qFk}Hx>@z z<6BMxRA2=qXn>b4M8Kef4*wTG{1yC#-hu!u6)*~_lLwbohcnjw^V*E6W=XrOa%HRs zabiz$a#`ibSZi=%Z<1YBIXA!#;F9X(6U(Z)jI}61>hLACg{13o;?NRMYspycIAKfn zEvxD?)@n%TDpK-F?qHkSV}MbL5)QUCkSpe3%kLp@02TI}SEQ3z$#3{3vDL8QD|wVH z*o{l=#pEa}v=x={90+@;JS^NnO*8O2?^D>3KMq#&l6#DLY|OumbWt;(ZQ( zi_)vRwzyJ6t|o4JB><0wW0pjuch90($Ta9xNF!+KJ{eFhWCO0j<}jEHGqHx%sc zzksfO^9IlS5+K+c^fH_vz)$C&VSWWUc0IUivCrsrOrgZS&h7etM=CSZvv4+oFe`^+ z9z?-KO13g#qlXCD>xn%=f={7l=>I|h>snrT0!dr<{d1f?bOx}(_2z5+3;l@$O9yTr zPT8NF?@#NW`eo&^zIU!GqcdMSws0&_lh#$wb$({D&dV~UGR|g5n|9C3Glv}?o1Dp^ zxa_)mQH}TVzc<@bCg;2yf{a>!O}C&+c79xLPYk_2v{G*Wd-cI}6=g18S5q3p{4?{a z4EB0`y}q5d`G2JQ$1ln#v+K9na(f6*&F@!qc&T62?e0`7?^J6#4ca?iL#N_C2Z0(b zMZIcx)l_&G$a@%(gkN%n-o;7mZrj(=ZJEd@Ar0nUPP0*Y&+ ztYjP3TpQ&A+E}s;4yQ(WTyYzOKDtM z8{2J!r&@ugIG(oW@q{Ne%IA<)3N+PzUQYR6w1byY49ty5d%lE^<@K1*Qjv6p`nvo$ zL<zZAR9p)DoH`Z!fqJTquqIp6eZFZx%O%| zw6%w{r9pf1(jl{^990TAg#P_jvw#vapb1~iUW^|EE1T>)?mtCdhpl9KDV8|U0@D6Q=8geshUvBr-M-5=i!wo7J;)W|G zq>dTI?v&@d$qo$^xj7?nK|3`B5XuuM%>(YkPv0|(-Z*q5+MsUJ(P zgOeu=G48`#+Ja<(yO0*%7=RN6xLs$KGx4t`GD`3O$%0etfecg>Q^5;X3h$_nL4-pnz+Vu;E|jn5sHF4O3pbD5 zZu-#q-V-1C-fLai`xH5a6BbzIi4!)Lt^ae}GY0#iS<5+mz8M+&j;EgcKgHPR`0UHd zPryrR?q5L!h>M40wfTPtV%0o_dy*Kue;Mk|)9lXOHpSo}o}V@Hz1UC`;?MXY{7HWube%0}iV3vDadsnk&GcY}~c3r3{vMS$y|=?_5tt ztB1oac2<9-rHs}$-SOEsj(%*g!RriN8739?Ewm=u)4E-2+p3eQCCN?glJlk^y{&$} zC*HQu7r%I=e_cTtcf%DNU;PtYea%?*rgwGQQ-4#Fw)Uirz4Hn7u-b_t%yobwry+2dAGvQmDOj;JFQu~`%Dw_*R z7yH3$F3`HPV&3G}ni-qtflcQji-tm~o)vv{#<~r}{sc7L`ib?>vh~o?$yIAhrlv73@4NZL zV*i@ad~I@JGPU(k+IScS&}h9jwJ>#~qhKEFs81Uk<`wvslD?{6gDIzYtuL|rwWsr# z+q$&v$S+mDmi)E$*UrB-q)xq%e(Htal$BpUxOgz}V!G_`eAnHQ-Mw42<9ci{c57&< z>bB+WZMWOr-kILll&)%yE7z=*ad@+p+`VGmo7rhw?7q8wH$2yBiFe<%;t^?v5jm3F zyJFq9wtZLP(oc^hFTT|p?}o0vAO5iWz3AU`EjRTo*Yy3`y<9Vp?|r5M3+Ooz&DcDFUILw<)Nh>kY?ZQ zi*NU*E1#HGf!@%ecRff|B`N}>yg!Zl6=EXg<7zuf}yGrE@ z-0}Fp9Xf-u3b=NJH-ggOOjb7lFCtFo9<=>WQ2NZL5M(t&6Y!7z__YLi(!u;ETyg^k z*a6D?2nYWI2S339eh5dNPLTUavY&CzOuUmYe~W|bIQTIHVAf1cBX|UVjQK4j@kK}I ze}gr$E|W+kpHZgID9wMRc78@#KcmX->J4+cj8ZkH$dDLp1DcFRJEtZAzVgt{$upGv zRo(Y=Ur>~xSMe24Jgz9n>LXbv9*}jMp!y0?C1~{!RBs`QuXs1l#7TncFGPvU8YC5m z6eVbt$SUY#KdCrS*y6J!>x=>%uCV7RGnDdG!}kpHvk67YxNDWF{)P-?%2>Th*}eg> ziH3ZBdX?IhukoO?Y(BV3?abph5!{sVAc~uq-jtzA8Evc7ZlTfCmV+tdp;hW|LC&F+ z@$f2DSCCVeGS;tB4Fx&&l=0vybx6ob7*oart5mI!k=nAcQo@un)~-_aZ-At>Y@}N% z$B{BRS1A`y=TdyOc&skP*_3h5Dz!I{c%_)T&4^Uhfnx41#bYVszEx^}p2)^#QdPAn zqkWY+cz;Oza>}@SmD(d9QdJvzcqvuoNf{egDX)OpSZQ{X5K1uja`C`k+DN2C#>W1{ zBSjPVQv6c!Dk&l8O|5RAIh!b+vL(3KL}Ek6W)hp&+9>4Bj6WE$q18+A2WM;|`rwQQ z6W!Fg4OM0n+LUqcDpkWzm)XR|HN81wlOowvMN_`l@yQcY$69(ms3@YEJazr nkgFutFUk}W%NNG&5Ij)_nh!N6({fq|Mo*J)3iOeRj{T{P^MM z?2oej8QTeof%`LW5QC*b+d_o?x=E`s>4I3hgC?8nD{ zXC{a@2t*)RA3^hP87+gjyieY%pcM|DrleIIMonuFL2HqM)*&UWM=IKY)U*+4Xj45w zn}^MTN}-C}yPiPWXJrjCp5h=jMCl}<^pa2pNhqTvlt~iGED2?igtAIP*(9M#B%$n* zP^FSkWs*?k`6w7Y)?sV0(Ss_`rh0ka=qZlcjJ8NXZAIH8pthqO5>S=MApuo|c1l3) zLe&ybyU`vAs2a3a0?LW@NkHvK2PB|sQJn--J#tAvxsgW#%8PswPz}g00o90_B%qqn zK?$fs=&%G-3pyeJ^$a>H0o97yB%qF=b_u8s)F}bgg}No6dQh(fR3ADn0o9KNB%n^9 zlM+zRqJRVxLW2@eL3Bz2>NGkd0re&HWeKPuG%Nx29C}^?>IL)_38)Ynk$@USV-ip# z8kc~YK$8+sVf3N|)L9gffSN*-1k^N|k$^gfXbGq&ib+7l(Rm4|S@e9{ecA3*U+2<)Ys8BB%r>D{+$HWzeoQ;0_t06UIOY8x-0?p3VKxn>f7i$5>QuA zLIP?5U6p{6Htt`OB-Pi^8xm^#F1jWGl|S7P zekcKT3;jn4s6RnJl7L!5wS7J{#*j;4ti4p>Mzh=NC1Yic7y@mP2~ z9zoObnGf)W^8+~!6ga?p-Vam|IMokL9nsOrSuz|UX$R_JbnZGwH%ZeZ#VAi5cl5LJ zZZ~soG8UVO9;~mQr0Cko5H&UuibU!n;fYWk6|Wzi2-l8;sru98$cd2`$=_)1g+?M|i@UkW?{4z$nE_@{EEFSK&QH^HoEjtdNMa3S zW-XAJqo;?_c|@_P0w;L6d6=0m-iQt4*4n;KY~CiXn@N808aEbOLvwTEBgM7>XHGR^ zTPrmXn^zQ@GQdmC`;?{D*wpw$rPl2AxSJkJYQR~Mv382l`2?XoR0jIEDe8Opbcb4l ztNBR_&Et72p#f(w)GUQV4P@3J$c(mt-UF@HK-=KsiMQhMCJfgKT8A0bf8e=rvLe<;G%BR|%@`!5kd!MMNn*6S&=EoXQz?mqj*+NmB z#-f^i+M;UoKiN^$M;!<;7rg{Mp0DQ zZd7Bdr}r4Em^${f)m4M*NzXyI`_Wc*F>?_(69?8ryD+e@s3v;bSp@`7d0;g(xt{RA za{C_B+-Y)&qM8*&#cB4_AS?Oo(-u`z(-Yo=_&r|N<7xILw@tWxqZ6+s)IxtP;Lk`A7p!@bce&{O#kv8(DcjpV#eS<)`_qZXbN|V0RNM3vf_( zGpj&@Jiz0^-+Vq`L*9`kd*L%nZDy5y=wvsS)$3xFZ9RcLKH`pJKTy}*8-XwL1RrA^ z=;?5Dkx_7%fNLloA^C)cINmVz-o0_$fiRgNsTduf;uHOGdp5DY&Cy2E@$qPW-e$(q z>Rw-_&%+pz+tE7ZI8BBpNt!Fm=i|z{d*h_r(H5ddLUbq!KAdQj=fl^;m^!}X=(s?I zVlk4cZ{spGxEOouu|{0=L>Ou&9bICM1~+3l-ZQ{423{U&mYm@7_<`Qhu%27Lhba%Z z9f9d6v@jIOR_^dNg;=G>?ea6K!4O5&;fUJ>Ga^h)0EUk@#7!WFzPoQx;fd&Mn2JH8 zFs_N!4v=w@nw%cTagUcV4}{4vlAei>;V8^3j_P9!-6Tbk;~c zicLqxYTIaXbTU?3(Bl{aC7l?-9M*B-a(fxeX^*3io{q(Y4pF+@jV{K};pYTThhmeg z(&u)27&Yz)%(d4IV%FeT7nB2uFoiXzrlZl3ti)i1wt-}TE1%otV-;Pe2e?l+qa29G zxDworjOt`OG6@k7orhKPqQfz8d8q>6yW?5(G{BJeakaP@4Vvk8w6}F}rTH0k2j2B? z<$?18V)XP(HWy5=4w#<}F37L!>i)6^vUt4ixCXki|L!}JT+Lpe*Uc)sLm`S)HoDwG z>RZ$(=F97DfLy0TlaT6ldmBLj)1zl`i@mV4bLl=t6PP5!m{%+#i1K|2Q5YaX>Ig)^ zZt^nf;~{E-gqXhxl#05Kq|cHR#F{}!N=Q44v%v&XcXxF=+WMi3ye^+Nt_7aIeD|Hm zOr4{Lqj{SdRbL!4$sv4<3J(t)^SeOHC_3A)TJXCXn;6wVc!rZnqss-o1hYIuk7Hz$ z%iF+eyTg(35Di_76`c=M%JEhhw_L=}$^%&I_+2pBe*49JkfV|KNIyx&@YH%HN`~ms z$-xUVWQ%K0HUa}YfaTNDu$IzEPfxY1B@RwwQUrX{|^f!`}3d--X}}Vn|FI!c^`V5-yh!!lfPI zZ6~@r2!fIg;W5TNK@pS76_jD+jJWXN8G;Da5X2DfVeScVm)e9F|K17L)z;-#;8Gf} z`yU>@G)nFug7Ru2NKo>AxDsa|9K>meJCy@81~Q7_=zbcr?^LrY8jG4$L|_1_CqaY2 zTLCJ7mD8Y;qcf3k%&B12RD2554l8?!RfnkPEJ;VPARG<{ySF|Vg=jrBO@l&-)wfU2 zQjzJ<7(OV#rwFy4`tVq^Zsr23;))zMTzjo+7Q{}vD)nJ}K z|H`2o>g2iWy2YyN#1YNYz#i~J2prRWrau(e8&hvg^t7o?#b&? z@upv1w;Q3`n<1DZey1x< z4Bptg@ciPlXlpnc$HoAAQ_Si z{UMel9|F1ZPrxdC<=!w|6iAt?EhyfuYrrjmhf)k-%Z7V`3ZxQhpp-#npiD@~y^$Je znq`zKuio-ps_+H%YDFH^dG)IUn*~hnotLf>atUvwE0COKXseJfs1Y&xgRg_~I0?!l zCyi|aF82nm40&~l`Gq!*)PK#@I*b;rb)P(Yr|4@cYLo1m1vQy3fx!(#1x zc`ux5kMQ4&d1z|(IiZ5#D5H0vnJ^g{pQfjrHdafM=i*>-N5Km~#^~=sNvvuHOoAAF z1%GR@ZDiGBupYo7|oV!#p>Ijf}L zI4FwE09aeOC#&Frcf6X?zksC6@QX4stl%HW6{^xz`_^lRuO41h-a2#BoVM4`Yd=vF zI^%rzJ>#}zFu+}i|`{sp&=bL}93i+BZH>b+>+=?#wet02ma4#ELFbA8j zZcc4I@KbcB^e10R+ZylLT9$1sZ->&hqx0>nI>VLn%jGHSuEoGoDOYMnXS%ZO(ze8l zizR7YHL#eChZp5(-7Z+$_$&!il3w(sb$fFZV{&A%nqU8fV)riv?m)4A5tOo4EqWGX zY2ATb$&p1*TDNDeom&$>ec+aXsI{i~u`7m$QG71-jqP0q;-3truC0pVi)A@1jZfRi zc$5vf*3c0Zv|>nHc!P?7xR^jYvrho~2-ZAEnOio+l9WRw0xW=)!kPz`B3MZCYEue1 zMdhy=st_^-Rj{09O$~IG`fXdyTLkV9}%5V_ZoJGjL?+)NwkC{v!;=7AE~O9Q+&yZ{gstAYe88MgTUe5wK9B*p%bd z6okB{GGRkCiWM-ZLW=%JK>P@P(f;s$9|!YCS~zq3?adHM3o$@W{D zt`DRw`{%k>LAiVp)n+u$b-|v&R(8#_U`md@XWcoke4te7%&V4?gzs9*Ld&9U*}f}n zsa}$$EzXR!;+lEEoQ(b8;`NJ5&n{Qizh`xUt!&-*p@y*SdSE7W)`V774lyzihkuyftgL4RPhB2>9$-2Z5rX16{z`+*@(Y4r@b{0D&BTUJ9 zS{&~Z;KH2a_}#Z=fp?7NS`JQk3#R82%6p;Q?l2i0CnIC^tr2i)(xEYuv9G7seTGa! zb$xZtINop+l{rq1@7yR=lo(lesxFRIc(Fp_SVy;ltOIcxe5sD^yYGyiB_os5=dmG| zOSnyd>8S4qzbY6zq48K0EEgiHEy}Pm6Sc()XX9WDLWBPq5Pt{1=yeFd$^nC-a&cc; zb0A~iGq2BRs+aU>jVEK@ha)=|UrcLU8GAL3>{?{f8uuDl4Nj?CJebzhX6ywKE{~A1 zH^1mQ9NE7Fr8Q^lP8@M8_NFy;8G9u}3}q>0IloQD6Ipn~ZSp>#O4}w2s}?2CtyZVy z0lXxvc>>txSo0L!7%PUb=HZ@V>v(o!oLd_e0uSQ)s~i&7`e5D)PpM%2RR>i8aoa4^ z1CtKcbfk63kgi$I`?k34SuvzusE^VHwXj+b!)mPy>WY?X5V888-cFn{MCJ8FRNf6+ z-05Ewvn~Rn<&fj8A^Uk!-oJxinJGPGC~ij-)S|?-KrZkHVh2uCPKo@$N)HY!&aIW`P+_&2o(KYRY7Fz)CSsl0A=angoJEJvTF3v-wHxdv-qjPt^QPx5`8$f7(-O%m z&($f;E_t4FNG{ne2To3*R;mHU(QlkVbe9%+_v3_tq;aQ7hlv;9mup7uPk1JJsaF!s(K+pGst_-S?t`_!B zl={q9@GgR*LoNq#LeA+3_+#@ylaiRS;2jM7w8iQkv9896G~{(XG=rPkl2=muZLlqu(Y91{0NK}*o&lOw|x;zjKcXjc9a`b(%E>-KsA zODpu>2}0z?$hE}8?YW$JbTdb{aCF#TAgh?ejQKxND#mSjNLVlPkT#4IaePfB8`NYM zYs!^eye1xrYsw)DYvOn>>b5KsqygQ@Jt62dR36mvXEsASvt;h@p%x;j0*7K7c#e9+ zIM?1Zn2pV25oJq)CBw6M$K;_Q~l< zv95w%5c3;y{T0l!QS$VpNJTO{7rSJkudQu6+MZ3J^HTU(CZv!B#{|V&wL|1eFLM2P zWP1(tsZ?ojF$2cN-$-|IE+w{8}1O1GWUZ` zRY*fxZscJLrfO|k(9EU5xr>;GzptP@*p00kk;;`0C2`dLjy$|j?+HN))xwU)G)_48 z@OKgDMG$N;uP6PtIN?V)fK3=dV>`CMe};W$*i8HwFQ`Z4Z{QPW_~IXI7!1k=IrlCv zY|J2dtmFf<7Sf!uPYm$tyZa8f8zT6?9gfr5i|yyNw`Io_wKJsb&5jf{nkunsB& z7pkH$k~?A|>kdbzN8uXT5%}N{DjDo+u)pWiIbzX$Yv|^YJB@F<-#qws=*^aY*>#k2 z8E`Bx>b^dQ$I<@*ZW)c;?Tl?0Z^K5$zv`>w|4j4Z2O3OQ_Q6GR{x4xfh)001`rO|` zF7#IY zA_iCY@I9y2NH`Rwe~g9y=Qy~7gMWs=Y2~iFfP;?T;}+TcvWjDa0anGi!0Ee?hgD65 z;06c%*WC9E77D*}ehm|Juyc>S09QBo`x#NNnuz?ZvEwU_qx?WK7Y;A_BS8L7_(gvX zBl`E8`+TKr?~*O8+YcvKyk~r6TYcK%`oeDUb)NXdv)ttn&KE!btZ+Ry^;rd!emR7B&z1S=eja_BkaJj zAN~_PVYa{SP0YS__&t*YuA~@Ba6yUCrIuvdZwxzDw^S}_mSi{eOYWPd^p?8$u0-pl z-o&|A`ae_=<|;U86tdsL*;maauls+q<>;@g)Ap{kxqDs(`}{=PD~Ik|E0VGWebNmc z|CiF%noQ}|Wa~n2^4vmys@k6}^=GP^5?ws$YJaADdor}};-YO~Dz&F6UEY+Jy3h{? z0%BR`)7ITzs4Po);@l-BS#{~+Jxk59rDn-^&(g4LX~L>j&;y;NTznI#*KW#n${b{yenYuJ}<5=FX*jAS|*Uzi)*(~|0aTUg&>XqJP z)vG6Rm^+3Q$Dv{$2M!m{KQ)R*nw+uB7zZzN`C^&!mjz2#2p?d|E^jp>S}gnHFpo`9=si&bg+uFQ7FLg)RhRd8#qInjCFjytFc zI_S{iF7RosZrzbQ|Gh(t=dQOTI-#{ci@n|X=F7kCSZ?fHuI_!;yIehx6F{a6i|GuA zX=w3Sx~wKseJGcBzqE-I+EYzFw1fcJbWJ!KLSycOSZP z?t0C#`7mf0vvsbA*HbywVobhR&9H_6xDPUzz1;UdAPbEhr>qW5!aqO2cSX1h zE%e{x6mZiJob#6c4*vRYIQT9OzK?@n;NTx|fEPL55sQ}>`k!zBTP{Mdb*84r;12_i z&<}wI)(jE--(aFVP{?Gm_X*4UgzjI7?e7!z_lc7GM$?=jqt?u+GTaxe4Z4g@Kd0pa zd}yPeQ)UR|w+vr5Kvkx0)hAG5k1CJU%aQsXLF(a9z4@pjWp#0=?tGMx@qUhp;~c6# zA0>?x;4&grk+O=A^4i$XWgN(_G02flslZbVOQAYLsJ~_Ux@mqksY;o5tPqu-K~Sg6 zl`DkfGZ5>_P|i=U5Ib@?9u-&2Q!B*w9DZGa>j;l3aUIj^2%41Hu|iadm8LfBOPTkt z5C`%o`%~rvD@1J`r8Z@*TOsQ6D9)65-wLr`q$JHLbIl5|S0to1t<99Qq|AF)2f0M*qbstSBQNZLlPHK=BgE9r-(>ZtZCu-RD~~PZdf7wB4%x-*>yrF!rV)R9eaLl zAw>vl+mnbF4B+#L^M$jdh@jWix~9z8WZ{r4!o|8G))3Ygv5u{^LSE1KqY-OrJ)d|q zVO^n*COo>(b&Xq-Wj3i#nRl%a)xvO@O|G5O>j~==$+|4M!WmPfmUXJ6uxDqJS-=A& tQ9&zGg#9C>MrQx0L?yF*WZnwF!7>QyHp?nL^4ny#kN1RSvQlnA{J(x|3CI8d literal 0 HcmV?d00001 diff --git a/env/bin/activate b/env/bin/activate new file mode 100644 index 0000000..8ec6c05 --- /dev/null +++ b/env/bin/activate @@ -0,0 +1,76 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "$_OLD_VIRTUAL_PATH" ] ; then + PATH="$_OLD_VIRTUAL_PATH" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "$_OLD_VIRTUAL_PYTHONHOME" ] ; then + PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then + hash -r + fi + + if [ -n "$_OLD_VIRTUAL_PS1" ] ; then + PS1="$_OLD_VIRTUAL_PS1" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + if [ ! "$1" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV="/Users/norbert/Desktop/ESG LATEX/env" +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "$PYTHONHOME" ] ; then + _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME" + unset PYTHONHOME +fi + +if [ -z "$VIRTUAL_ENV_DISABLE_PROMPT" ] ; then + _OLD_VIRTUAL_PS1="$PS1" + if [ "x(env) " != x ] ; then + PS1="(env) $PS1" + else + if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1" + else + PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1" + fi + fi + export PS1 +fi + +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then + hash -r +fi diff --git a/env/bin/activate.csh b/env/bin/activate.csh new file mode 100644 index 0000000..2ec746d --- /dev/null +++ b/env/bin/activate.csh @@ -0,0 +1,37 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "/Users/norbert/Desktop/ESG LATEX/env" + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/bin:$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + if ("env" != "") then + set env_name = "env" + else + if (`basename "VIRTUAL_ENV"` == "__") then + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + set env_name = `basename \`dirname "$VIRTUAL_ENV"\`` + else + set env_name = `basename "$VIRTUAL_ENV"` + endif + endif + set prompt = "[$env_name] $prompt" + unset env_name +endif + +alias pydoc python -m pydoc + +rehash diff --git a/env/bin/activate.fish b/env/bin/activate.fish new file mode 100644 index 0000000..c81e262 --- /dev/null +++ b/env/bin/activate.fish @@ -0,0 +1,75 @@ +# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org) +# you cannot run it directly + +function deactivate -d "Exit virtualenv and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + functions -e fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + + set -e VIRTUAL_ENV + if test "$argv[1]" != "nondestructive" + # Self destruct! + functions -e deactivate + end +end + +# unset irrelevant variables +deactivate nondestructive + +set -gx VIRTUAL_ENV "/Users/norbert/Desktop/ESG LATEX/env" + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/bin" $PATH + +# unset PYTHONHOME if set +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # save the current fish_prompt function as the function _old_fish_prompt + functions -c fish_prompt _old_fish_prompt + + # with the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command + set -l old_status $status + + # Prompt override? + if test -n "(env) " + printf "%s%s" "(env) " (set_color normal) + else + # ...Otherwise, prepend env + set -l _checkbase (basename "$VIRTUAL_ENV") + if test $_checkbase = "__" + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) + else + printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) + end + end + + # Restore the return status of the previous command. + echo "exit $old_status" | . + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/env/bin/chardetect b/env/bin/chardetect new file mode 100755 index 0000000..8e2c3fe --- /dev/null +++ b/env/bin/chardetect @@ -0,0 +1,12 @@ +#!/bin/sh +'''exec' "/Users/norbert/Desktop/ESG LATEX/env/bin/python3" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys + +from chardet.cli.chardetect import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/env/bin/easy_install b/env/bin/easy_install new file mode 100755 index 0000000..db175e4 --- /dev/null +++ b/env/bin/easy_install @@ -0,0 +1,11 @@ +#!"/Users/norbert/Desktop/ESG LATEX/env/bin/python3" + +# -*- coding: utf-8 -*- +import re +import sys + +from setuptools.command.easy_install import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/env/bin/easy_install-3.6 b/env/bin/easy_install-3.6 new file mode 100755 index 0000000..db175e4 --- /dev/null +++ b/env/bin/easy_install-3.6 @@ -0,0 +1,11 @@ +#!"/Users/norbert/Desktop/ESG LATEX/env/bin/python3" + +# -*- coding: utf-8 -*- +import re +import sys + +from setuptools.command.easy_install import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/env/bin/pip b/env/bin/pip new file mode 100755 index 0000000..4ab9b65 --- /dev/null +++ b/env/bin/pip @@ -0,0 +1,12 @@ +#!/bin/sh +'''exec' "/Users/norbert/Desktop/ESG LATEX/env/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys + +from pip._internal import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/env/bin/pip3 b/env/bin/pip3 new file mode 100755 index 0000000..4ab9b65 --- /dev/null +++ b/env/bin/pip3 @@ -0,0 +1,12 @@ +#!/bin/sh +'''exec' "/Users/norbert/Desktop/ESG LATEX/env/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys + +from pip._internal import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/env/bin/pip3.6 b/env/bin/pip3.6 new file mode 100755 index 0000000..4ab9b65 --- /dev/null +++ b/env/bin/pip3.6 @@ -0,0 +1,12 @@ +#!/bin/sh +'''exec' "/Users/norbert/Desktop/ESG LATEX/env/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys + +from pip._internal import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/env/bin/python b/env/bin/python new file mode 120000 index 0000000..b8a0adb --- /dev/null +++ b/env/bin/python @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/env/bin/python 2 b/env/bin/python 2 new file mode 120000 index 0000000..b8a0adb --- /dev/null +++ b/env/bin/python 2 @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/env/bin/python3 b/env/bin/python3 new file mode 120000 index 0000000..79ab74b --- /dev/null +++ b/env/bin/python3 @@ -0,0 +1 @@ +/usr/local/bin/python3 \ No newline at end of file diff --git a/env/bin/python3 2 b/env/bin/python3 2 new file mode 120000 index 0000000..79ab74b --- /dev/null +++ b/env/bin/python3 2 @@ -0,0 +1 @@ +/usr/local/bin/python3 \ No newline at end of file diff --git a/env/bin/twitter b/env/bin/twitter new file mode 100755 index 0000000..f30ea68 --- /dev/null +++ b/env/bin/twitter @@ -0,0 +1,12 @@ +#!/bin/sh +'''exec' "/Users/norbert/Desktop/ESG LATEX/env/bin/python3" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys + +from twitter.cmdline import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/env/bin/twitter-archiver b/env/bin/twitter-archiver new file mode 100755 index 0000000..21f780f --- /dev/null +++ b/env/bin/twitter-archiver @@ -0,0 +1,12 @@ +#!/bin/sh +'''exec' "/Users/norbert/Desktop/ESG LATEX/env/bin/python3" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys + +from twitter.archiver import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/env/bin/twitter-follow b/env/bin/twitter-follow new file mode 100755 index 0000000..adada27 --- /dev/null +++ b/env/bin/twitter-follow @@ -0,0 +1,12 @@ +#!/bin/sh +'''exec' "/Users/norbert/Desktop/ESG LATEX/env/bin/python3" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys + +from twitter.follow import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/env/bin/twitter-log b/env/bin/twitter-log new file mode 100755 index 0000000..d769b7e --- /dev/null +++ b/env/bin/twitter-log @@ -0,0 +1,12 @@ +#!/bin/sh +'''exec' "/Users/norbert/Desktop/ESG LATEX/env/bin/python3" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys + +from twitter.logger import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/env/bin/twitter-stream-example b/env/bin/twitter-stream-example new file mode 100755 index 0000000..6f161d6 --- /dev/null +++ b/env/bin/twitter-stream-example @@ -0,0 +1,12 @@ +#!/bin/sh +'''exec' "/Users/norbert/Desktop/ESG LATEX/env/bin/python3" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys + +from twitter.stream_example import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/env/bin/twitterbot b/env/bin/twitterbot new file mode 100755 index 0000000..15720d5 --- /dev/null +++ b/env/bin/twitterbot @@ -0,0 +1,12 @@ +#!/bin/sh +'''exec' "/Users/norbert/Desktop/ESG LATEX/env/bin/python3" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys + +from twitter.ircbot import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/env/bin/wheel b/env/bin/wheel new file mode 100755 index 0000000..754428c --- /dev/null +++ b/env/bin/wheel @@ -0,0 +1,12 @@ +#!/bin/sh +'''exec' "/Users/norbert/Desktop/ESG LATEX/env/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys + +from wheel.tool import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/INSTALLER b/env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/METADATA b/env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/METADATA new file mode 100644 index 0000000..9fde5ae --- /dev/null +++ b/env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/METADATA @@ -0,0 +1,17 @@ +Metadata-Version: 2.1 +Name: PySocks +Version: 1.6.8 +Summary: A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information. +Home-page: https://github.com/Anorov/PySocks +Author: Anorov +Author-email: anorov.vorona@gmail.com +License: BSD +Keywords: socks,proxy +Platform: UNKNOWN +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 + +UNKNOWN + + diff --git a/env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/RECORD b/env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/RECORD new file mode 100644 index 0000000..79979c5 --- /dev/null +++ b/env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/RECORD @@ -0,0 +1,9 @@ +socks.py,sha256=woV5F-0vsQXxz-rQhc0v1TAsog2pyaz27L-yW8oisVo,32281 +sockshandler.py,sha256=ENwUUO3vt84_1yRHAq1A147TL_TAuFux2t_1QvNV2Vo,2913 +PySocks-1.6.8.dist-info/METADATA,sha256=DaHdxr_C3zxtnNqKamDUSpUYRm3U7EXJ2XwBWFczzbQ,460 +PySocks-1.6.8.dist-info/RECORD,, +PySocks-1.6.8.dist-info/WHEEL,sha256=qDQm22Y5ojsezftdkHsFPuhvCxnQSxD3jSPcnpBLUV4,93 +PySocks-1.6.8.dist-info/top_level.txt,sha256=TKSOIfCFBoK9EY8FBYbYqC3PWd3--G15ph9n8-QHPDk,19 +PySocks-1.6.8.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +__pycache__/sockshandler.cpython-36.pyc,, +__pycache__/socks.cpython-36.pyc,, diff --git a/env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/WHEEL b/env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/WHEEL new file mode 100644 index 0000000..289c267 --- /dev/null +++ b/env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: true +Tag: cp36-none-any + diff --git a/env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/top_level.txt b/env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/top_level.txt new file mode 100644 index 0000000..9476163 --- /dev/null +++ b/env/lib/python3.6/site-packages/PySocks-1.6.8.dist-info/top_level.txt @@ -0,0 +1,2 @@ +socks +sockshandler diff --git a/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/DESCRIPTION.rst b/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..643a4f9 --- /dev/null +++ b/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/DESCRIPTION.rst @@ -0,0 +1,48 @@ +Certifi: Python SSL Certificates +================================ + +`Certifi`_ is a carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python2.7/site-packages/certifi/cacert.pem' + +Enjoy! + +1024-bit Root Certificates +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Browsers and certificate authorities have concluded that 1024-bit keys are +unacceptably weak for certificates, particularly root certificates. For this +reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its +bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) +certificate from the same CA. Because Mozilla removed these certificates from +its bundle, ``certifi`` removed them as well. + +In previous versions, ``certifi`` provided the ``certifi.old_where()`` function +to intentionally re-add the 1024-bit roots back into your bundle. This was not +recommended in production and therefore was removed. To assist in migrating old +code, the function ``certifi.old_where()`` continues to exist as an alias of +``certifi.where()``. Please update your code to use ``certifi.where()`` +instead. ``certifi.old_where()`` will be removed in 2018. + +.. _`Certifi`: http://certifi.io/en/latest/ +.. _`Requests`: http://docs.python-requests.org/en/latest/ + + diff --git a/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/INSTALLER b/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/LICENSE.txt b/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/LICENSE.txt new file mode 100644 index 0000000..802b53f --- /dev/null +++ b/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/LICENSE.txt @@ -0,0 +1,21 @@ +This packge contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1# +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/METADATA b/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/METADATA new file mode 100644 index 0000000..2f8eeb9 --- /dev/null +++ b/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/METADATA @@ -0,0 +1,71 @@ +Metadata-Version: 2.0 +Name: certifi +Version: 2018.8.24 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: http://certifi.io/ +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 + +Certifi: Python SSL Certificates +================================ + +`Certifi`_ is a carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python2.7/site-packages/certifi/cacert.pem' + +Enjoy! + +1024-bit Root Certificates +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Browsers and certificate authorities have concluded that 1024-bit keys are +unacceptably weak for certificates, particularly root certificates. For this +reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its +bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) +certificate from the same CA. Because Mozilla removed these certificates from +its bundle, ``certifi`` removed them as well. + +In previous versions, ``certifi`` provided the ``certifi.old_where()`` function +to intentionally re-add the 1024-bit roots back into your bundle. This was not +recommended in production and therefore was removed. To assist in migrating old +code, the function ``certifi.old_where()`` continues to exist as an alias of +``certifi.where()``. Please update your code to use ``certifi.where()`` +instead. ``certifi.old_where()`` will be removed in 2018. + +.. _`Certifi`: http://certifi.io/en/latest/ +.. _`Requests`: http://docs.python-requests.org/en/latest/ + + diff --git a/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/RECORD b/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/RECORD new file mode 100644 index 0000000..160c94d --- /dev/null +++ b/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/RECORD @@ -0,0 +1,15 @@ +certifi/__init__.py,sha256=5lCYV1iWxoirX1OAaSHkBYUuZGdcwEjEBS6DS_trL0s,63 +certifi/__main__.py,sha256=FiOYt1Fltst7wk9DRa6GCoBr8qBUxlNQu_MKJf04E6s,41 +certifi/cacert.pem,sha256=XA-4HVBsOrBD5lfg-b3PiUzAvwUd2qlIzwXypIMIRGM,263074 +certifi/core.py,sha256=xPQDdG_siy5A7BfqGWa7RJhcA61xXEqPiSrw9GNyhHE,836 +certifi-2018.8.24.dist-info/DESCRIPTION.rst,sha256=jXrtxvB2mFIsHbuK8aP8RXrMx5yecyAIMZ2cn8Xb_ro,1679 +certifi-2018.8.24.dist-info/LICENSE.txt,sha256=anCkv2sBABbVmmS4rkrY3H9e8W8ftFPMLs13HFo0ETE,1048 +certifi-2018.8.24.dist-info/METADATA,sha256=-ocZf77oRzmADbXuEzjj9Enskw6EFUCTZkmxjKigUNo,2570 +certifi-2018.8.24.dist-info/RECORD,, +certifi-2018.8.24.dist-info/WHEEL,sha256=5wvfB7GvgZAbKBSE9uX9Zbi6LCL-_KgezgHblXhCRnM,113 +certifi-2018.8.24.dist-info/metadata.json,sha256=kzmm48EBIVIV46xxkvsUtEaV0UrYUjxOQOD-WDpIPrY,1006 +certifi-2018.8.24.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi-2018.8.24.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi/__pycache__/__main__.cpython-36.pyc,, +certifi/__pycache__/core.cpython-36.pyc,, +certifi/__pycache__/__init__.cpython-36.pyc,, diff --git a/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/WHEEL b/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/WHEEL new file mode 100644 index 0000000..7bf9daa --- /dev/null +++ b/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0.a0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/metadata.json b/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/metadata.json new file mode 100644 index 0000000..7162025 --- /dev/null +++ b/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)", "Natural Language :: English", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6"], "extensions": {"python.details": {"contacts": [{"email": "me@kennethreitz.com", "name": "Kenneth Reitz", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "http://certifi.io/"}}}, "generator": "bdist_wheel (0.30.0.a0)", "license": "MPL-2.0", "metadata_version": "2.0", "name": "certifi", "summary": "Python package for providing Mozilla's CA Bundle.", "version": "2018.8.24"} \ No newline at end of file diff --git a/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/top_level.txt b/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/top_level.txt new file mode 100644 index 0000000..963eac5 --- /dev/null +++ b/env/lib/python3.6/site-packages/certifi-2018.8.24.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/env/lib/python3.6/site-packages/certifi/cacert.pem b/env/lib/python3.6/site-packages/certifi/cacert.pem new file mode 100644 index 0000000..85de024 --- /dev/null +++ b/env/lib/python3.6/site-packages/certifi/cacert.pem @@ -0,0 +1,4300 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Label: "GlobalSign Root CA - R2" +# Serial: 4835703278459682885658125 +# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 +# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe +# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 +MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL +v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 +eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq +tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd +C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa +zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB +mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH +V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n +bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG +3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs +J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO +291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS +ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd +AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 +TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Label: "Verisign Class 3 Public Primary Certification Authority - G3" +# Serial: 206684696279472310254277870180966723415 +# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 +# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 +# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b +N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t +KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu +kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm +CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ +Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu +imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te +2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe +DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC +/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p +F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt +TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Label: "AddTrust External Root" +# Serial: 1 +# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f +# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 +# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 +-----BEGIN CERTIFICATE----- +MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs +IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 +MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux +FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h +bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v +dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt +H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 +uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX +mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX +a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN +E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 +WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD +VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 +Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU +cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx +IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN +AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH +YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 +6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC +Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX +c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a +mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. +# Label: "GeoTrust Global CA" +# Serial: 144470 +# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 +# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 +# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a +-----BEGIN CERTIFICATE----- +MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i +YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG +EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg +R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 +9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq +fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv +iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU +1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ +bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW +MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA +ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l +uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn +Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS +tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF +PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un +hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV +5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Label: "GeoTrust Universal CA" +# Serial: 1 +# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 +# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 +# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 +-----BEGIN CERTIFICATE----- +MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy +c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE +BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 +IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV +VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 +cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT +QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh +F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v +c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w +mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd +VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX +teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ +f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe +Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ +nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB +/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY +MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG +9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc +aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX +IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn +ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z +uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN +Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja +QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW +koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 +ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt +DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm +bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Label: "GeoTrust Universal CA 2" +# Serial: 1 +# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 +# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 +# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy +c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD +VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 +c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 +WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG +FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq +XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL +se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb +KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd +IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 +y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt +hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc +QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 +Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV +HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ +KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z +dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ +L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr +Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo +ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY +T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz +GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m +1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV +OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH +6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX +QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS +-----END CERTIFICATE----- + +# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association +# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association +# Label: "Visa eCommerce Root" +# Serial: 25952180776285836048024890241505565794 +# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02 +# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62 +# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22 +-----BEGIN CERTIFICATE----- +MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr +MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl +cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv +bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw +CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h +dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l +cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h +2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E +lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV +ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq +299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t +vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL +dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF +AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR +zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3 +LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd +7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw +++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt +398znM/jra6O1I7mT1GvFpLgXPYHDw== +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority +# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority +# Label: "QuoVadis Root CA" +# Serial: 985026699 +# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24 +# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9 +# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73 +-----BEGIN CERTIFICATE----- +MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz +MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw +IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR +dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp +li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D +rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ +WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug +F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU +xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC +Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv +dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw +ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl +IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh +c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy +ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh +Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI +KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T +KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq +y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p +dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD +VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL +MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk +fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8 +7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R +cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y +mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW +xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK +SnQ2+Q== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 +# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 +# Label: "Security Communication Root CA" +# Serial: 0 +# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a +# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 +# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY +MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t +dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 +WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD +VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 +9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ +DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 +Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N +QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ +xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G +A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG +kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr +Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 +Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU +JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot +RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== +-----END CERTIFICATE----- + +# Issuer: CN=Sonera Class2 CA O=Sonera +# Subject: CN=Sonera Class2 CA O=Sonera +# Label: "Sonera Class 2 Root CA" +# Serial: 29 +# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb +# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27 +# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27 +-----BEGIN CERTIFICATE----- +MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP +MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx +MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV +BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o +Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt +5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s +3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej +vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu +8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw +DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG +MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil +zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/ +3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD +FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6 +Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2 +ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: O=Government Root Certification Authority +# Subject: O=Government Root Certification Authority +# Label: "Taiwan GRCA" +# Serial: 42023070807708724159991140556527066870 +# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e +# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9 +# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3 +-----BEGIN CERTIFICATE----- +MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/ +MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow +PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR +IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q +gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy +yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts +F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2 +jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx +ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC +VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK +YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH +EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN +Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud +DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE +MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK +UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ +TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf +qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK +ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE +JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7 +hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1 +EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm +nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX +udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz +ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe +LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl +pYYsfPQS +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=Class 2 Primary CA O=Certplus +# Subject: CN=Class 2 Primary CA O=Certplus +# Label: "Certplus Class 2 Primary CA" +# Serial: 177770208045934040241468760488327595043 +# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b +# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb +# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb +-----BEGIN CERTIFICATE----- +MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw +PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz +cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9 +MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz +IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ +ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR +VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL +kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd +EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas +H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0 +HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud +DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4 +QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu +Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/ +AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8 +yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR +FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA +ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB +kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 +l7+ijrRU +-----END CERTIFICATE----- + +# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Label: "DST Root CA X3" +# Serial: 91299735575339953335919266965803778155 +# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 +# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 +# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 +-----BEGIN CERTIFICATE----- +MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ +MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT +DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow +PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD +Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O +rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq +OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b +xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw +7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD +aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG +SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 +ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr +AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz +R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 +JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo +Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Label: "GeoTrust Primary Certification Authority" +# Serial: 32798226551256963324313806436981982369 +# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf +# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 +# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c +-----BEGIN CERTIFICATE----- +MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY +MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo +R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx +MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK +Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 +AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA +ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 +7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W +kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI +mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ +KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 +6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl +4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K +oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj +UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU +AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA" +# Serial: 69529181992039203566298953787712940909 +# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 +# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 +# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB +qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV +BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw +NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j +LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG +A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl +IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs +W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta +3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk +6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 +Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J +NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP +r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU +DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz +YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX +xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 +/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ +LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 +jVaMaA== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" +# Serial: 33037644167568058970164719475676101450 +# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c +# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 +# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df +-----BEGIN CERTIFICATE----- +MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB +yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW +ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 +nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex +t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz +SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG +BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ +rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ +NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E +BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH +BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy +aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv +MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE +p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y +5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK +WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ +4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N +hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Label: "Network Solutions Certificate Authority" +# Serial: 116697915152937497490437556386812487904 +# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e +# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce +# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi +MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV +UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO +ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz +c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP +OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl +mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF +BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 +qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw +gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu +bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp +dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 +6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ +h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH +/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN +pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GA CA" +# Serial: 86718877871133159090080555911823548314 +# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93 +# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9 +# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5 +-----BEGIN CERTIFICATE----- +MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB +ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly +aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl +ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w +NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G +A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD +VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX +SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR +VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2 +w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF +mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg +4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9 +4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw +EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx +SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2 +ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8 +vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa +hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi +Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ +/L7fCg0= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center +# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center +# Label: "Deutsche Telekom Root CA 2" +# Serial: 38 +# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08 +# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf +# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3 +-----BEGIN CERTIFICATE----- +MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc +MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj +IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB +IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE +RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl +U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290 +IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU +ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC +QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr +rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S +NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc +QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH +txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP +BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC +AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp +tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa +IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl +6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+ +xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU +Cm26OWMohpLzGITY+9HPBVZkVw== +-----END CERTIFICATE----- + +# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc +# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc +# Label: "Cybertrust Global Root" +# Serial: 4835703278459682877484360 +# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 +# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 +# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 +-----BEGIN CERTIFICATE----- +MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG +A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh +bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE +ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS +b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 +7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS +J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y +HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP +t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz +FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY +XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ +MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw +hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js +MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA +A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj +Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx +XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o +omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc +A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW +WL1WMRJOEcgh4LMRkWXbtKaIOM5V +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G3" +# Serial: 28809105769928564313984085209975885599 +# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 +# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd +# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 +-----BEGIN CERTIFICATE----- +MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB +mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT +MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s +eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv +cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ +BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg +MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 +BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz ++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm +hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn +5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W +JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL +DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC +huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw +HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB +AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB +zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN +kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD +AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH +SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G +spki4cErx5z481+oghLrGREt +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G2" +# Serial: 71758320672825410020661621085256472406 +# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f +# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 +# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 +-----BEGIN CERTIFICATE----- +MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp +IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi +BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw +MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh +d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig +YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v +dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ +BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 +papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K +DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 +KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox +XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G3" +# Serial: 127614157056681299805556476275995414779 +# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 +# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 +# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB +rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV +BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa +Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl +LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u +MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl +ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm +gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 +YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf +b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 +9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S +zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk +OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV +HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA +2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW +oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu +t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c +KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM +m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu +MdRAGmI0Nj81Aa6sY6A= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G2" +# Serial: 80682863203381065782177908751794619243 +# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a +# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 +# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 +-----BEGIN CERTIFICATE----- +MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL +MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj +KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 +MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 +eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV +BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw +NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV +BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH +MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL +So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal +tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG +CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT +qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz +rD6ogRLQy7rQkgu2npaqBA+K +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Universal Root Certification Authority" +# Serial: 85209574734084581917763752644031726877 +# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 +# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 +# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c +-----BEGIN CERTIFICATE----- +MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB +vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W +ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX +MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 +IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y +IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh +bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF +9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH +H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H +LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN +/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT +rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud +EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw +WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs +exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud +DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 +sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ +seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz +4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ +BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR +lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 +7M2CYfE45k+XmCpajQ== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" +# Serial: 63143484348153506665311985501458640051 +# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 +# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a +# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 +-----BEGIN CERTIFICATE----- +MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp +U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg +SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln +biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm +GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve +fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ +aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj +aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW +kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC +4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga +FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden +# Label: "Staat der Nederlanden Root CA - G2" +# Serial: 10000012 +# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a +# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 +# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f +-----BEGIN CERTIFICATE----- +MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX +DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl +ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv +b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 +qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp +uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU +Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE +pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp +5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M +UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN +GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy +5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv +6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK +eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 +B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ +BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov +L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG +SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS +CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen +5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 +IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK +gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL ++63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL +vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm +bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk +N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC +Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z +ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Label: "Hongkong Post Root CA 1" +# Serial: 1000 +# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca +# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 +# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 +-----BEGIN CERTIFICATE----- +MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx +FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg +Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG +A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr +b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ +jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn +PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh +ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 +nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h +q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED +MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC +mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 +7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB +oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs +EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO +fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi +AmvZWg== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 6047274297262753887 +# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 +# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa +# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy +MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD +VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp +cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv +ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl +AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF +661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 +am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 +ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 +PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS +3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k +SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF +3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM +ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g +StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz +Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB +jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. +# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. +# Label: "Chambers of Commerce Root - 2008" +# Serial: 11806822484801597146 +# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7 +# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c +# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0 +-----BEGIN CERTIFICATE----- +MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD +VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 +IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 +MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz +IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz +MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj +dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw +EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp +MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G +CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9 +28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq +VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q +DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR +5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL +ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a +Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl +UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s ++12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5 +Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj +ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx +hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV +HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1 ++HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN +YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t +L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy +ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt +IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV +HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w +DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW +PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF +5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1 +glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH +FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2 +pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD +xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG +tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq +jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De +fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg +OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ +d0jQ +-----END CERTIFICATE----- + +# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. +# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. +# Label: "Global Chambersign Root - 2008" +# Serial: 14541511773111788494 +# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3 +# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c +# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca +-----BEGIN CERTIFICATE----- +MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD +VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 +IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 +MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD +aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx +MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy +cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG +A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl +BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI +hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed +KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7 +G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2 +zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4 +ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG +HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2 +Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V +yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e +beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r +6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh +wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog +zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW +BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr +ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp +ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk +cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt +YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC +CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow +KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI +hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ +UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz +X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x +fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz +a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd +Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd +SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O +AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso +M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge +v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z +09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2011" +# Serial: 0 +# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 +# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d +# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 +-----BEGIN CERTIFICATE----- +MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix +RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p +YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw +NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK +EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl +cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz +dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ +fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns +bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD +75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP +FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV +HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp +5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu +b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA +A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p +6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 +TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 +dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys +Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI +l7WdmplNsDz4SgCbZN2fOUvRJ9e4 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: O=Trustis Limited OU=Trustis FPS Root CA +# Subject: O=Trustis Limited OU=Trustis FPS Root CA +# Label: "Trustis FPS Root CA" +# Serial: 36053640375399034304724988975563710553 +# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d +# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04 +# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d +-----BEGIN CERTIFICATE----- +MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL +ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx +MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc +MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+ +AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH +iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj +vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA +0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB +OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/ +BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E +FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01 +GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW +zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4 +1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE +f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F +jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN +ZetX2fNXlrtIzYE= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus +# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus +# Label: "EE Certification Centre Root CA" +# Serial: 112324828676200291871926431888494945866 +# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f +# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7 +# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76 +-----BEGIN CERTIFICATE----- +MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1 +MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1 +czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG +CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy +MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl +ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS +b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy +euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO +bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw +WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d +MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE +1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/ +zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB +BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF +BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV +v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG +E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u +uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW +iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v +GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Label: "E-Tugra Certification Authority" +# Serial: 7667447206703254355 +# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 +# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 +# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV +BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC +aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV +BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 +Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz +MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ +BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp +em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN +ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY +B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH +D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF +Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo +q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D +k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH +fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut +dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM +ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 +zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn +rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX +U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 +Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 +XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF +Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR +HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY +GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c +77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 ++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK +vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 +FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl +yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P +AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD +y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d +NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 14367148294922964480859022125800977897474 +# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e +# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb +# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c +-----BEGIN CERTIFICATE----- +MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ +FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F +uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX +kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs +ewv4n4Q= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden +# Label: "Staat der Nederlanden Root CA - G3" +# Serial: 10003001 +# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37 +# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc +# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28 +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX +DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl +ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv +b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP +cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW +IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX +xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy +KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR +9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az +5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8 +6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7 +Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP +bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt +BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt +XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd +INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD +U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp +LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8 +Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp +gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh +/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw +0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A +fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq +4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR +1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/ +QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM +94B7IWcnMFk= +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Label: "Staat der Nederlanden EV Root CA" +# Serial: 10000013 +# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba +# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb +# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a +-----BEGIN CERTIFICATE----- +MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y +MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg +TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS +b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS +M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC +UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d +Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p +rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l +pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb +j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC +KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS +/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X +cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH +1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP +px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 +MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI +eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u +2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS +v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC +wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy +CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e +vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 +Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa +Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL +eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 +FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc +7uzXLg== +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 +# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 +# Label: "Certinomis - Root CA" +# Serial: 1 +# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f +# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8 +# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58 +-----BEGIN CERTIFICATE----- +MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET +MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb +BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz +MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx +FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g +Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2 +fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl +LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV +WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF +TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb +5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc +CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri +wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ +wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG +m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4 +F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng +WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0 +2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF +AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/ +0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw +F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS +g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj +qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN +h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/ +ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V +btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj +Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ +8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW +gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A. +# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A. +# Label: "LuxTrust Global Root 2" +# Serial: 59914338225734147123941058376788110305822489521 +# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c +# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f +# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5 +-----BEGIN CERTIFICATE----- +MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL +BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV +BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw +MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B +LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F +ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem +hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1 +EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn +Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4 +zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ +96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m +j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g +DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+ +8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j +X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH +hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB +KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0 +Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT ++Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL +BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9 +BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO +jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9 +loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c +qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+ +2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/ +JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre +zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf +LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+ +x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6 +oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-1" +# Serial: 15752444095811006489 +# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 +# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a +# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y +IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB +pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h +IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG +A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU +cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid +RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V +seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme +9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV +EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW +hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ +DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD +ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I +/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf +ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ +yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts +L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN +zl/HHk484IkzlQsPpTLWPFp5LBk= +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-2" +# Serial: 2711694510199101698 +# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 +# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 +# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 +-----BEGIN CERTIFICATE----- +MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig +Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk +MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg +Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD +VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy +dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ +QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq +1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp +2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK +DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape +az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF +3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 +oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM +g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 +mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh +8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd +BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U +nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw +DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX +dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ +MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL +/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX +CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa +ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW +2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 +N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 +Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB +As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp +5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu +1uwJ +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor ECA-1" +# Serial: 9548242946988625984 +# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c +# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd +# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y +IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig +RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb +3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA +BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 +3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou +owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ +wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF +ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf +BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ +MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv +civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 +AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F +hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 +soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI +WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi +tJ/X5g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- diff --git a/env/lib/python3.6/site-packages/certifi/core.py b/env/lib/python3.6/site-packages/certifi/core.py new file mode 100644 index 0000000..eab9d1d --- /dev/null +++ b/env/lib/python3.6/site-packages/certifi/core.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem. +""" +import os +import warnings + + +class DeprecatedBundleWarning(DeprecationWarning): + """ + The weak security bundle is being deprecated. Please bother your service + provider to get them to stop using cross-signed roots. + """ + + +def where(): + f = os.path.dirname(__file__) + + return os.path.join(f, 'cacert.pem') + + +def old_where(): + warnings.warn( + "The weak security bundle has been removed. certifi.old_where() is now an alias " + "of certifi.where(). Please update your code to use certifi.where() instead. " + "certifi.old_where() will be removed in 2018.", + DeprecatedBundleWarning + ) + return where() + +if __name__ == '__main__': + print(where()) diff --git a/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst b/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..c0f044d --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst @@ -0,0 +1,70 @@ +Chardet: The Universal Character Encoding Detector +-------------------------------------------------- + +.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg + :alt: Build status + :target: https://travis-ci.org/chardet/chardet + +.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg + :target: https://coveralls.io/r/chardet/chardet + +.. image:: https://img.shields.io/pypi/v/chardet.svg + :target: https://warehouse.python.org/project/chardet/ + :alt: Latest version on PyPI + +.. image:: https://img.shields.io/pypi/l/chardet.svg + :alt: License + + +Detects + - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants) + - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese) + - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese) + - EUC-KR, ISO-2022-KR (Korean) + - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic) + - ISO-8859-5, windows-1251 (Bulgarian) + - ISO-8859-1, windows-1252 (Western European languages) + - ISO-8859-7, windows-1253 (Greek) + - ISO-8859-8, windows-1255 (Visual and Logical Hebrew) + - TIS-620 (Thai) + +.. note:: + Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily + disabled until we can retrain the models. + +Requires Python 2.6, 2.7, or 3.3+. + +Installation +------------ + +Install from `PyPI `_:: + + pip install chardet + +Documentation +------------- + +For users, docs are now available at https://chardet.readthedocs.io/. + +Command-line Tool +----------------- + +chardet comes with a command-line script which reports on the encodings of one +or more files:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +About +----- + +This is a continuation of Mark Pilgrim's excellent chardet. Previously, two +versions needed to be maintained: one that supported python 2.x and one that +supported python 3.x. We've recently merged with `Ian Cordasco `_'s +`charade `_ fork, so now we have one +coherent version that works for Python 2.6+. + +:maintainer: Dan Blanchard + + diff --git a/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/INSTALLER b/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/METADATA b/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/METADATA new file mode 100644 index 0000000..1427867 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/METADATA @@ -0,0 +1,96 @@ +Metadata-Version: 2.0 +Name: chardet +Version: 3.0.4 +Summary: Universal encoding detector for Python 2 and 3 +Home-page: https://github.com/chardet/chardet +Author: Daniel Blanchard +Author-email: dan.blanchard@gmail.com +License: LGPL +Keywords: encoding,i18n,xml +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Linguistic + +Chardet: The Universal Character Encoding Detector +-------------------------------------------------- + +.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg + :alt: Build status + :target: https://travis-ci.org/chardet/chardet + +.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg + :target: https://coveralls.io/r/chardet/chardet + +.. image:: https://img.shields.io/pypi/v/chardet.svg + :target: https://warehouse.python.org/project/chardet/ + :alt: Latest version on PyPI + +.. image:: https://img.shields.io/pypi/l/chardet.svg + :alt: License + + +Detects + - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants) + - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese) + - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese) + - EUC-KR, ISO-2022-KR (Korean) + - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic) + - ISO-8859-5, windows-1251 (Bulgarian) + - ISO-8859-1, windows-1252 (Western European languages) + - ISO-8859-7, windows-1253 (Greek) + - ISO-8859-8, windows-1255 (Visual and Logical Hebrew) + - TIS-620 (Thai) + +.. note:: + Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily + disabled until we can retrain the models. + +Requires Python 2.6, 2.7, or 3.3+. + +Installation +------------ + +Install from `PyPI `_:: + + pip install chardet + +Documentation +------------- + +For users, docs are now available at https://chardet.readthedocs.io/. + +Command-line Tool +----------------- + +chardet comes with a command-line script which reports on the encodings of one +or more files:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +About +----- + +This is a continuation of Mark Pilgrim's excellent chardet. Previously, two +versions needed to be maintained: one that supported python 2.x and one that +supported python 3.x. We've recently merged with `Ian Cordasco `_'s +`charade `_ fork, so now we have one +coherent version that works for Python 2.6+. + +:maintainer: Dan Blanchard + + diff --git a/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/RECORD b/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/RECORD new file mode 100644 index 0000000..0f03987 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/RECORD @@ -0,0 +1,91 @@ +chardet/__init__.py,sha256=YsP5wQlsHJ2auF1RZJfypiSrCA7_bQiRm3ES_NI76-Y,1559 +chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 +chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 +chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 +chardet/charsetgroupprober.py,sha256=6bDu8YIiRuScX4ca9Igb0U69TA2PGXXDej6Cc4_9kO4,3787 +chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 +chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 +chardet/compat.py,sha256=PKTzHkSbtbHDqS9PyujMbX74q1a8mMpeQTDVsQhZMRw,1134 +chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 +chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 +chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 +chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 +chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 +chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 +chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 +chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 +chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 +chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 +chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 +chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 +chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 +chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 +chardet/langbulgarianmodel.py,sha256=1HqQS9Pbtnj1xQgxitJMvw8X6kKr5OockNCZWfEQrPE,12839 +chardet/langcyrillicmodel.py,sha256=LODajvsetH87yYDDQKA2CULXUH87tI223dhfjh9Zx9c,17948 +chardet/langgreekmodel.py,sha256=8YAW7bU8YwSJap0kIJSbPMw1BEqzGjWzqcqf0WgUKAA,12688 +chardet/langhebrewmodel.py,sha256=JSnqmE5E62tDLTPTvLpQsg5gOMO4PbdWRvV7Avkc0HA,11345 +chardet/langhungarianmodel.py,sha256=RhapYSG5l0ZaO-VV4Fan5sW0WRGQqhwBM61yx3yxyOA,12592 +chardet/langthaimodel.py,sha256=8l0173Gu_W6G8mxmQOTEF4ls2YdE7FxWf3QkSxEGXJQ,11290 +chardet/langturkishmodel.py,sha256=W22eRNJsqI6uWAfwXSKVWWnCerYqrI8dZQTm_M0lRFk,11102 +chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 +chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 +chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 +chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 +chardet/sbcharsetprober.py,sha256=LDSpCldDCFlYwUkGkwD2oFxLlPWIWXT09akH_2PiY74,5657 +chardet/sbcsgroupprober.py,sha256=1IprcCB_k1qfmnxGC6MBbxELlKqD3scW6S8YIwdeyXA,3546 +chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 +chardet/universaldetector.py,sha256=qL0174lSZE442eB21nnktT9_VcAye07laFWUeUrjttY,12485 +chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 +chardet/version.py,sha256=sp3B08mrDXB-pf3K9fqJ_zeDHOCLC8RrngQyDFap_7g,242 +chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +chardet/cli/chardetect.py,sha256=YBO8L4mXo0WR6_-Fjh_8QxPBoEBNqB9oNxNrdc54AQs,2738 +chardet-3.0.4.dist-info/DESCRIPTION.rst,sha256=PQ4sBsMyKFZkjC6QpmbpLn0UtCNyeb-ZqvCGEgyZMGk,2174 +chardet-3.0.4.dist-info/METADATA,sha256=RV_2I4B1Z586DL8oVO5Kp7X5bUdQ5EuKAvNoAEF8wSw,3239 +chardet-3.0.4.dist-info/RECORD,, +chardet-3.0.4.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +chardet-3.0.4.dist-info/entry_points.txt,sha256=fAMmhu5eJ-zAJ-smfqQwRClQ3-nozOCmvJ6-E8lgGJo,60 +chardet-3.0.4.dist-info/metadata.json,sha256=0htbRM18ujyGZDdfowgAqj6Hq2eQtwzwyhaEveKntgo,1375 +chardet-3.0.4.dist-info/top_level.txt,sha256=AowzBbZy4x8EirABDdJSLJZMkJ_53iIag8xfKR6D7kI,8 +../../../bin/chardetect,sha256=v5o2IbfnL76o3DF_VI-wE0RPTEZrTbwyvoMy6ZdoWFs,291 +chardet-3.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +chardet/__pycache__/langgreekmodel.cpython-36.pyc,, +chardet/__pycache__/sjisprober.cpython-36.pyc,, +chardet/__pycache__/mbcharsetprober.cpython-36.pyc,, +chardet/__pycache__/eucjpprober.cpython-36.pyc,, +chardet/__pycache__/cp949prober.cpython-36.pyc,, +chardet/__pycache__/euctwfreq.cpython-36.pyc,, +chardet/__pycache__/gb2312prober.cpython-36.pyc,, +chardet/__pycache__/sbcsgroupprober.cpython-36.pyc,, +chardet/__pycache__/langturkishmodel.cpython-36.pyc,, +chardet/__pycache__/langhebrewmodel.cpython-36.pyc,, +chardet/__pycache__/version.cpython-36.pyc,, +chardet/__pycache__/euckrprober.cpython-36.pyc,, +chardet/__pycache__/euckrfreq.cpython-36.pyc,, +chardet/__pycache__/chardistribution.cpython-36.pyc,, +chardet/__pycache__/escsm.cpython-36.pyc,, +chardet/__pycache__/euctwprober.cpython-36.pyc,, +chardet/__pycache__/big5freq.cpython-36.pyc,, +chardet/__pycache__/compat.cpython-36.pyc,, +chardet/__pycache__/hebrewprober.cpython-36.pyc,, +chardet/__pycache__/latin1prober.cpython-36.pyc,, +chardet/__pycache__/universaldetector.cpython-36.pyc,, +chardet/__pycache__/escprober.cpython-36.pyc,, +chardet/__pycache__/codingstatemachine.cpython-36.pyc,, +chardet/__pycache__/utf8prober.cpython-36.pyc,, +chardet/__pycache__/langhungarianmodel.cpython-36.pyc,, +chardet/__pycache__/gb2312freq.cpython-36.pyc,, +chardet/__pycache__/enums.cpython-36.pyc,, +chardet/__pycache__/langthaimodel.cpython-36.pyc,, +chardet/__pycache__/jpcntx.cpython-36.pyc,, +chardet/__pycache__/mbcssm.cpython-36.pyc,, +chardet/__pycache__/big5prober.cpython-36.pyc,, +chardet/__pycache__/sbcharsetprober.cpython-36.pyc,, +chardet/__pycache__/langcyrillicmodel.cpython-36.pyc,, +chardet/__pycache__/__init__.cpython-36.pyc,, +chardet/__pycache__/jisfreq.cpython-36.pyc,, +chardet/__pycache__/charsetgroupprober.cpython-36.pyc,, +chardet/__pycache__/langbulgarianmodel.cpython-36.pyc,, +chardet/__pycache__/mbcsgroupprober.cpython-36.pyc,, +chardet/__pycache__/charsetprober.cpython-36.pyc,, +chardet/cli/__pycache__/chardetect.cpython-36.pyc,, +chardet/cli/__pycache__/__init__.cpython-36.pyc,, diff --git a/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/WHEEL b/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/entry_points.txt b/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/entry_points.txt new file mode 100644 index 0000000..a884269 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +chardetect = chardet.cli.chardetect:main + diff --git a/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/metadata.json b/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/metadata.json new file mode 100644 index 0000000..8cdf025 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Text Processing :: Linguistic"], "extensions": {"python.commands": {"wrap_console": {"chardetect": "chardet.cli.chardetect:main"}}, "python.details": {"contacts": [{"email": "dan.blanchard@gmail.com", "name": "Daniel Blanchard", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/chardet/chardet"}}, "python.exports": {"console_scripts": {"chardetect": "chardet.cli.chardetect:main"}}}, "generator": "bdist_wheel (0.29.0)", "keywords": ["encoding", "i18n", "xml"], "license": "LGPL", "metadata_version": "2.0", "name": "chardet", "summary": "Universal encoding detector for Python 2 and 3", "test_requires": [{"requires": ["hypothesis", "pytest"]}], "version": "3.0.4"} \ No newline at end of file diff --git a/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/top_level.txt b/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/top_level.txt new file mode 100644 index 0000000..79236f2 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet-3.0.4.dist-info/top_level.txt @@ -0,0 +1 @@ +chardet diff --git a/env/lib/python3.6/site-packages/chardet/big5freq.py b/env/lib/python3.6/site-packages/chardet/big5freq.py new file mode 100644 index 0000000..38f3251 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/big5freq.py @@ -0,0 +1,386 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Big5 frequency table +# by Taiwan's Mandarin Promotion Council +# +# +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +#Char to FreqOrder table +BIG5_TABLE_SIZE = 5376 + +BIG5_CHAR_TO_FREQ_ORDER = ( + 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 +3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 +1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 + 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 +3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 +4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 +5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 + 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 + 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 + 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 +2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 +1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 +3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 + 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 +3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 +2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 + 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 +3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 +1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 +5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 + 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 +5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 +1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 + 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 + 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 +3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 +3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 + 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 +2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 +2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 + 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 + 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 +3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 +1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 +1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 +1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 +2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 + 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 +4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 +1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 +5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 +2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 + 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 + 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 + 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 + 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 +5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 + 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 +1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 + 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 + 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 +5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 +1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 + 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 +3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 +4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 +3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 + 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 + 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 +1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 +4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 +3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 +3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 +2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 +5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 +3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 +5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 +1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 +2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 +1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 + 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 +1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 +4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 +3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 + 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 + 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 + 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 +2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 +5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 +1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 +2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 +1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 +1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 +5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 +5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 +5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 +3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 +4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 +4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 +2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 +5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 +3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 + 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 +5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 +5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 +1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 +2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 +3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 +4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 +5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 +3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 +4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 +1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 +1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 +4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 +1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 + 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 +1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 +1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 +3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 + 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 +5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 +2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 +1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 +1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 +5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 + 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 +4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 + 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 +2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 + 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 +1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 +1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 + 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 +4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 +4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 +1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 +3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 +5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 +5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 +1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 +2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 +1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 +3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 +2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 +3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 +2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 +4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 +4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 +3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 + 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 +3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 + 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 +3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 +4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 +3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 +1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 +5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 + 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 +5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 +1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 + 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 +4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 +4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 + 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 +2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 +2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 +3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 +1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 +4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 +2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 +1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 +1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 +2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 +3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 +1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 +5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 +1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 +4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 +1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 + 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 +1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 +4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 +4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 +2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 +1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 +4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 + 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 +5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 +2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 +3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 +4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 + 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 +5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 +5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 +1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 +4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 +4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 +2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 +3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 +3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 +2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 +1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 +4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 +3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 +3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 +2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 +4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 +5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 +3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 +2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 +3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 +1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 +2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 +3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 +4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 +2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 +2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 +5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 +1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 +2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 +1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 +3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 +4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 +2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 +3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 +3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 +2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 +4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 +2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 +3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 +4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 +5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 +3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 + 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 +1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 +4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 +1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 +4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 +5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 + 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 +5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 +5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 +2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 +3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 +2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 +2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 + 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 +1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 +4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 +3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 +3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 + 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 +2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 + 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 +2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 +4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 +1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 +4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 +1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 +3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 + 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 +3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 +5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 +5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 +3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 +3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 +1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 +2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 +5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 +1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 +1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 +3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 + 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 +1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 +4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 +5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 +2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 +3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 + 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 +1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 +2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 +2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 +5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 +5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 +5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 +2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 +2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 +1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 +4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 +3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 +3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 +4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 +4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 +2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 +2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 +5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 +4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 +5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 +4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 + 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 + 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 +1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 +3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 +4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 +1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 +5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 +2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 +2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 +3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 +5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 +1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 +3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 +5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 +1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 +5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 +2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 +3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 +2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 +3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 +3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 +3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 +4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 + 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 +2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 +4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 +3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 +5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 +1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 +5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 + 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 +1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 + 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 +4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 +1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 +4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 +1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 + 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 +3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 +4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 +5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 + 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 +3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 + 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 +2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 +) + diff --git a/env/lib/python3.6/site-packages/chardet/big5prober.py b/env/lib/python3.6/site-packages/chardet/big5prober.py new file mode 100644 index 0000000..98f9970 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/big5prober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import Big5DistributionAnalysis +from .mbcssm import BIG5_SM_MODEL + + +class Big5Prober(MultiByteCharSetProber): + def __init__(self): + super(Big5Prober, self).__init__() + self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) + self.distribution_analyzer = Big5DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "Big5" + + @property + def language(self): + return "Chinese" diff --git a/env/lib/python3.6/site-packages/chardet/chardistribution.py b/env/lib/python3.6/site-packages/chardet/chardistribution.py new file mode 100644 index 0000000..c0395f4 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/chardistribution.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, + EUCTW_TYPICAL_DISTRIBUTION_RATIO) +from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, + EUCKR_TYPICAL_DISTRIBUTION_RATIO) +from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, + GB2312_TYPICAL_DISTRIBUTION_RATIO) +from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, + BIG5_TYPICAL_DISTRIBUTION_RATIO) +from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, + JIS_TYPICAL_DISTRIBUTION_RATIO) + + +class CharDistributionAnalysis(object): + ENOUGH_DATA_THRESHOLD = 1024 + SURE_YES = 0.99 + SURE_NO = 0.01 + MINIMUM_DATA_THRESHOLD = 3 + + def __init__(self): + # Mapping table to get frequency order from char order (get from + # GetOrder()) + self._char_to_freq_order = None + self._table_size = None # Size of above table + # This is a constant value which varies from language to language, + # used in calculating confidence. See + # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html + # for further detail. + self.typical_distribution_ratio = None + self._done = None + self._total_chars = None + self._freq_chars = None + self.reset() + + def reset(self): + """reset analyser, clear any state""" + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + self._total_chars = 0 # Total characters encountered + # The number of characters whose frequency order is less than 512 + self._freq_chars = 0 + + def feed(self, char, char_len): + """feed a character with known length""" + if char_len == 2: + # we only care about 2-bytes character in our distribution analysis + order = self.get_order(char) + else: + order = -1 + if order >= 0: + self._total_chars += 1 + # order is valid + if order < self._table_size: + if 512 > self._char_to_freq_order[order]: + self._freq_chars += 1 + + def get_confidence(self): + """return confidence based on existing data""" + # if we didn't receive any character in our consideration range, + # return negative answer + if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: + return self.SURE_NO + + if self._total_chars != self._freq_chars: + r = (self._freq_chars / ((self._total_chars - self._freq_chars) + * self.typical_distribution_ratio)) + if r < self.SURE_YES: + return r + + # normalize confidence (we don't want to be 100% sure) + return self.SURE_YES + + def got_enough_data(self): + # It is not necessary to receive all data to draw conclusion. + # For charset detection, certain amount of data is enough + return self._total_chars > self.ENOUGH_DATA_THRESHOLD + + def get_order(self, byte_str): + # We do not handle characters based on the original encoding string, + # but convert this encoding string to a number, here called order. + # This allows multiple encodings of a language to share one frequency + # table. + return -1 + + +class EUCTWDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCTWDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER + self._table_size = EUCTW_TABLE_SIZE + self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-TW encoding, we are interested + # first byte range: 0xc4 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xC4: + return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 + else: + return -1 + + +class EUCKRDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCKRDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER + self._table_size = EUCKR_TABLE_SIZE + self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-KR encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xB0: + return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 + else: + return -1 + + +class GB2312DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(GB2312DistributionAnalysis, self).__init__() + self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER + self._table_size = GB2312_TABLE_SIZE + self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for GB2312 encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0xB0) and (second_char >= 0xA1): + return 94 * (first_char - 0xB0) + second_char - 0xA1 + else: + return -1 + + +class Big5DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(Big5DistributionAnalysis, self).__init__() + self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER + self._table_size = BIG5_TABLE_SIZE + self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for big5 encoding, we are interested + # first byte range: 0xa4 -- 0xfe + # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if first_char >= 0xA4: + if second_char >= 0xA1: + return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 + else: + return 157 * (first_char - 0xA4) + second_char - 0x40 + else: + return -1 + + +class SJISDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(SJISDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for sjis encoding, we are interested + # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe + # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0x81) and (first_char <= 0x9F): + order = 188 * (first_char - 0x81) + elif (first_char >= 0xE0) and (first_char <= 0xEF): + order = 188 * (first_char - 0xE0 + 31) + else: + return -1 + order = order + second_char - 0x40 + if second_char > 0x7F: + order = -1 + return order + + +class EUCJPDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCJPDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-JP encoding, we are interested + # first byte range: 0xa0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + char = byte_str[0] + if char >= 0xA0: + return 94 * (char - 0xA1) + byte_str[1] - 0xa1 + else: + return -1 diff --git a/env/lib/python3.6/site-packages/chardet/charsetgroupprober.py b/env/lib/python3.6/site-packages/chardet/charsetgroupprober.py new file mode 100644 index 0000000..8b3738e --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/charsetgroupprober.py @@ -0,0 +1,106 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState +from .charsetprober import CharSetProber + + +class CharSetGroupProber(CharSetProber): + def __init__(self, lang_filter=None): + super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) + self._active_num = 0 + self.probers = [] + self._best_guess_prober = None + + def reset(self): + super(CharSetGroupProber, self).reset() + self._active_num = 0 + for prober in self.probers: + if prober: + prober.reset() + prober.active = True + self._active_num += 1 + self._best_guess_prober = None + + @property + def charset_name(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.charset_name + + @property + def language(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.language + + def feed(self, byte_str): + for prober in self.probers: + if not prober: + continue + if not prober.active: + continue + state = prober.feed(byte_str) + if not state: + continue + if state == ProbingState.FOUND_IT: + self._best_guess_prober = prober + return self.state + elif state == ProbingState.NOT_ME: + prober.active = False + self._active_num -= 1 + if self._active_num <= 0: + self._state = ProbingState.NOT_ME + return self.state + return self.state + + def get_confidence(self): + state = self.state + if state == ProbingState.FOUND_IT: + return 0.99 + elif state == ProbingState.NOT_ME: + return 0.01 + best_conf = 0.0 + self._best_guess_prober = None + for prober in self.probers: + if not prober: + continue + if not prober.active: + self.logger.debug('%s not active', prober.charset_name) + continue + conf = prober.get_confidence() + self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) + if best_conf < conf: + best_conf = conf + self._best_guess_prober = prober + if not self._best_guess_prober: + return 0.0 + return best_conf diff --git a/env/lib/python3.6/site-packages/chardet/charsetprober.py b/env/lib/python3.6/site-packages/chardet/charsetprober.py new file mode 100644 index 0000000..eac4e59 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/charsetprober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging +import re + +from .enums import ProbingState + + +class CharSetProber(object): + + SHORTCUT_THRESHOLD = 0.95 + + def __init__(self, lang_filter=None): + self._state = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + + def reset(self): + self._state = ProbingState.DETECTING + + @property + def charset_name(self): + return None + + def feed(self, buf): + pass + + @property + def state(self): + return self._state + + def get_confidence(self): + return 0.0 + + @staticmethod + def filter_high_byte_only(buf): + buf = re.sub(b'([\x00-\x7F])+', b' ', buf) + return buf + + @staticmethod + def filter_international_words(buf): + """ + We define three types of bytes: + alphabet: english alphabets [a-zA-Z] + international: international characters [\x80-\xFF] + marker: everything else [^a-zA-Z\x80-\xFF] + + The input buffer can be thought to contain a series of words delimited + by markers. This function works to filter all words that contain at + least one international character. All contiguous sequences of markers + are replaced by a single space ascii character. + + This filter applies to all scripts which do not use English characters. + """ + filtered = bytearray() + + # This regex expression filters out only words that have at-least one + # international character. The word may include one marker character at + # the end. + words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', + buf) + + for word in words: + filtered.extend(word[:-1]) + + # If the last character in the word is a marker, replace it with a + # space as markers shouldn't affect our analysis (they are used + # similarly across all languages and may thus have similar + # frequencies). + last_char = word[-1:] + if not last_char.isalpha() and last_char < b'\x80': + last_char = b' ' + filtered.extend(last_char) + + return filtered + + @staticmethod + def filter_with_english_letters(buf): + """ + Returns a copy of ``buf`` that retains only the sequences of English + alphabet and high byte characters that are not between <> characters. + Also retains English alphabet and high byte characters immediately + before occurrences of >. + + This filter can be applied to all scripts which contain both English + characters and extended ASCII characters, but is currently only used by + ``Latin1Prober``. + """ + filtered = bytearray() + in_tag = False + prev = 0 + + for curr in range(len(buf)): + # Slice here to get bytes instead of an int with Python 3 + buf_char = buf[curr:curr + 1] + # Check if we're coming out of or entering an HTML tag + if buf_char == b'>': + in_tag = False + elif buf_char == b'<': + in_tag = True + + # If current character is not extended-ASCII and not alphabetic... + if buf_char < b'\x80' and not buf_char.isalpha(): + # ...and we're not in a tag + if curr > prev and not in_tag: + # Keep everything after last non-extended-ASCII, + # non-alphabetic character + filtered.extend(buf[prev:curr]) + # Output a space to delimit stretch we kept + filtered.extend(b' ') + prev = curr + 1 + + # If we're not in a tag... + if not in_tag: + # Keep everything after last non-extended-ASCII, non-alphabetic + # character + filtered.extend(buf[prev:]) + + return filtered diff --git a/env/lib/python3.6/site-packages/chardet/cli/chardetect.py b/env/lib/python3.6/site-packages/chardet/cli/chardetect.py new file mode 100644 index 0000000..f0a4cc5 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/cli/chardetect.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python +""" +Script which takes one or more file paths and reports on their detected +encodings + +Example:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +If no paths are provided, it takes its input from stdin. + +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import argparse +import sys + +from chardet import __version__ +from chardet.compat import PY2 +from chardet.universaldetector import UniversalDetector + + +def description_of(lines, name='stdin'): + """ + Return a string describing the probable encoding of a file or + list of strings. + + :param lines: The lines to get the encoding of. + :type lines: Iterable of bytes + :param name: Name of file or collection of lines + :type name: str + """ + u = UniversalDetector() + for line in lines: + line = bytearray(line) + u.feed(line) + # shortcut out of the loop to save reading further - particularly useful if we read a BOM. + if u.done: + break + u.close() + result = u.result + if PY2: + name = name.decode(sys.getfilesystemencoding(), 'ignore') + if result['encoding']: + return '{0}: {1} with confidence {2}'.format(name, result['encoding'], + result['confidence']) + else: + return '{0}: no result'.format(name) + + +def main(argv=None): + """ + Handles command line arguments and gets things started. + + :param argv: List of arguments, as if specified on the command-line. + If None, ``sys.argv[1:]`` is used instead. + :type argv: list of str + """ + # Get command line arguments + parser = argparse.ArgumentParser( + description="Takes one or more file paths and reports their detected \ + encodings") + parser.add_argument('input', + help='File whose encoding we would like to determine. \ + (default: stdin)', + type=argparse.FileType('rb'), nargs='*', + default=[sys.stdin if PY2 else sys.stdin.buffer]) + parser.add_argument('--version', action='version', + version='%(prog)s {0}'.format(__version__)) + args = parser.parse_args(argv) + + for f in args.input: + if f.isatty(): + print("You are running chardetect interactively. Press " + + "CTRL-D twice at the start of a blank line to signal the " + + "end of your input. If you want help, run chardetect " + + "--help\n", file=sys.stderr) + print(description_of(f, f.name)) + + +if __name__ == '__main__': + main() diff --git a/env/lib/python3.6/site-packages/chardet/codingstatemachine.py b/env/lib/python3.6/site-packages/chardet/codingstatemachine.py new file mode 100644 index 0000000..68fba44 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/codingstatemachine.py @@ -0,0 +1,88 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging + +from .enums import MachineState + + +class CodingStateMachine(object): + """ + A state machine to verify a byte sequence for a particular encoding. For + each byte the detector receives, it will feed that byte to every active + state machine available, one byte at a time. The state machine changes its + state based on its previous state and the byte it receives. There are 3 + states in a state machine that are of interest to an auto-detector: + + START state: This is the state to start with, or a legal byte sequence + (i.e. a valid code point) for character has been identified. + + ME state: This indicates that the state machine identified a byte sequence + that is specific to the charset it is designed for and that + there is no other possible encoding which can contain this byte + sequence. This will to lead to an immediate positive answer for + the detector. + + ERROR state: This indicates the state machine identified an illegal byte + sequence for that encoding. This will lead to an immediate + negative answer for this encoding. Detector will exclude this + encoding from consideration from here on. + """ + def __init__(self, sm): + self._model = sm + self._curr_byte_pos = 0 + self._curr_char_len = 0 + self._curr_state = None + self.logger = logging.getLogger(__name__) + self.reset() + + def reset(self): + self._curr_state = MachineState.START + + def next_state(self, c): + # for each byte we get its class + # if it is first byte, we also get byte length + byte_class = self._model['class_table'][c] + if self._curr_state == MachineState.START: + self._curr_byte_pos = 0 + self._curr_char_len = self._model['char_len_table'][byte_class] + # from byte's class and state_table, we get its next state + curr_state = (self._curr_state * self._model['class_factor'] + + byte_class) + self._curr_state = self._model['state_table'][curr_state] + self._curr_byte_pos += 1 + return self._curr_state + + def get_current_charlen(self): + return self._curr_char_len + + def get_coding_state_machine(self): + return self._model['name'] + + @property + def language(self): + return self._model['language'] diff --git a/env/lib/python3.6/site-packages/chardet/compat.py b/env/lib/python3.6/site-packages/chardet/compat.py new file mode 100644 index 0000000..ddd7468 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/compat.py @@ -0,0 +1,34 @@ +######################## BEGIN LICENSE BLOCK ######################## +# Contributor(s): +# Dan Blanchard +# Ian Cordasco +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import sys + + +if sys.version_info < (3, 0): + PY2 = True + PY3 = False + base_str = (str, unicode) + text_type = unicode +else: + PY2 = False + PY3 = True + base_str = (bytes, str) + text_type = str diff --git a/env/lib/python3.6/site-packages/chardet/cp949prober.py b/env/lib/python3.6/site-packages/chardet/cp949prober.py new file mode 100644 index 0000000..efd793a --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/cp949prober.py @@ -0,0 +1,49 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .chardistribution import EUCKRDistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import CP949_SM_MODEL + + +class CP949Prober(MultiByteCharSetProber): + def __init__(self): + super(CP949Prober, self).__init__() + self.coding_sm = CodingStateMachine(CP949_SM_MODEL) + # NOTE: CP949 is a superset of EUC-KR, so the distribution should be + # not different. + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "CP949" + + @property + def language(self): + return "Korean" diff --git a/env/lib/python3.6/site-packages/chardet/enums.py b/env/lib/python3.6/site-packages/chardet/enums.py new file mode 100644 index 0000000..0451207 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/enums.py @@ -0,0 +1,76 @@ +""" +All of the Enums that are used throughout the chardet package. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + + +class InputState(object): + """ + This enum represents the different states a universal detector can be in. + """ + PURE_ASCII = 0 + ESC_ASCII = 1 + HIGH_BYTE = 2 + + +class LanguageFilter(object): + """ + This enum represents the different language filters we can apply to a + ``UniversalDetector``. + """ + CHINESE_SIMPLIFIED = 0x01 + CHINESE_TRADITIONAL = 0x02 + JAPANESE = 0x04 + KOREAN = 0x08 + NON_CJK = 0x10 + ALL = 0x1F + CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL + CJK = CHINESE | JAPANESE | KOREAN + + +class ProbingState(object): + """ + This enum represents the different states a prober can be in. + """ + DETECTING = 0 + FOUND_IT = 1 + NOT_ME = 2 + + +class MachineState(object): + """ + This enum represents the different states a state machine can be in. + """ + START = 0 + ERROR = 1 + ITS_ME = 2 + + +class SequenceLikelihood(object): + """ + This enum represents the likelihood of a character following the previous one. + """ + NEGATIVE = 0 + UNLIKELY = 1 + LIKELY = 2 + POSITIVE = 3 + + @classmethod + def get_num_categories(cls): + """:returns: The number of likelihood categories in the enum.""" + return 4 + + +class CharacterCategory(object): + """ + This enum represents the different categories language models for + ``SingleByteCharsetProber`` put characters into. + + Anything less than CONTROL is considered a letter. + """ + UNDEFINED = 255 + LINE_BREAK = 254 + SYMBOL = 253 + DIGIT = 252 + CONTROL = 251 diff --git a/env/lib/python3.6/site-packages/chardet/escprober.py b/env/lib/python3.6/site-packages/chardet/escprober.py new file mode 100644 index 0000000..c70493f --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/escprober.py @@ -0,0 +1,101 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .codingstatemachine import CodingStateMachine +from .enums import LanguageFilter, ProbingState, MachineState +from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, + ISO2022KR_SM_MODEL) + + +class EscCharSetProber(CharSetProber): + """ + This CharSetProber uses a "code scheme" approach for detecting encodings, + whereby easily recognizable escape or shift sequences are relied on to + identify these encodings. + """ + + def __init__(self, lang_filter=None): + super(EscCharSetProber, self).__init__(lang_filter=lang_filter) + self.coding_sm = [] + if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: + self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) + self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) + if self.lang_filter & LanguageFilter.JAPANESE: + self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) + if self.lang_filter & LanguageFilter.KOREAN: + self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) + self.active_sm_count = None + self._detected_charset = None + self._detected_language = None + self._state = None + self.reset() + + def reset(self): + super(EscCharSetProber, self).reset() + for coding_sm in self.coding_sm: + if not coding_sm: + continue + coding_sm.active = True + coding_sm.reset() + self.active_sm_count = len(self.coding_sm) + self._detected_charset = None + self._detected_language = None + + @property + def charset_name(self): + return self._detected_charset + + @property + def language(self): + return self._detected_language + + def get_confidence(self): + if self._detected_charset: + return 0.99 + else: + return 0.00 + + def feed(self, byte_str): + for c in byte_str: + for coding_sm in self.coding_sm: + if not coding_sm or not coding_sm.active: + continue + coding_state = coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + coding_sm.active = False + self.active_sm_count -= 1 + if self.active_sm_count <= 0: + self._state = ProbingState.NOT_ME + return self.state + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + self._detected_charset = coding_sm.get_coding_state_machine() + self._detected_language = coding_sm.language + return self.state + + return self.state diff --git a/env/lib/python3.6/site-packages/chardet/escsm.py b/env/lib/python3.6/site-packages/chardet/escsm.py new file mode 100644 index 0000000..0069523 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/escsm.py @@ -0,0 +1,246 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +HZ_CLS = ( +1,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,0,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,4,0,5,2,0, # 78 - 7f +1,1,1,1,1,1,1,1, # 80 - 87 +1,1,1,1,1,1,1,1, # 88 - 8f +1,1,1,1,1,1,1,1, # 90 - 97 +1,1,1,1,1,1,1,1, # 98 - 9f +1,1,1,1,1,1,1,1, # a0 - a7 +1,1,1,1,1,1,1,1, # a8 - af +1,1,1,1,1,1,1,1, # b0 - b7 +1,1,1,1,1,1,1,1, # b8 - bf +1,1,1,1,1,1,1,1, # c0 - c7 +1,1,1,1,1,1,1,1, # c8 - cf +1,1,1,1,1,1,1,1, # d0 - d7 +1,1,1,1,1,1,1,1, # d8 - df +1,1,1,1,1,1,1,1, # e0 - e7 +1,1,1,1,1,1,1,1, # e8 - ef +1,1,1,1,1,1,1,1, # f0 - f7 +1,1,1,1,1,1,1,1, # f8 - ff +) + +HZ_ST = ( +MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17 + 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f + 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27 + 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f +) + +HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +HZ_SM_MODEL = {'class_table': HZ_CLS, + 'class_factor': 6, + 'state_table': HZ_ST, + 'char_len_table': HZ_CHAR_LEN_TABLE, + 'name': "HZ-GB-2312", + 'language': 'Chinese'} + +ISO2022CN_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,3,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,4,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022CN_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27 + 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f +) + +ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS, + 'class_factor': 9, + 'state_table': ISO2022CN_ST, + 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, + 'name': "ISO-2022-CN", + 'language': 'Chinese'} + +ISO2022JP_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,2,2, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,7,0,0,0, # 20 - 27 +3,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +6,0,4,0,8,0,0,0, # 40 - 47 +0,9,5,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022JP_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47 +) + +ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS, + 'class_factor': 10, + 'state_table': ISO2022JP_ST, + 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, + 'name': "ISO-2022-JP", + 'language': 'Japanese'} + +ISO2022KR_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,3,0,0,0, # 20 - 27 +0,4,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,5,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022KR_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27 +) + +ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS, + 'class_factor': 6, + 'state_table': ISO2022KR_ST, + 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, + 'name': "ISO-2022-KR", + 'language': 'Korean'} + + diff --git a/env/lib/python3.6/site-packages/chardet/eucjpprober.py b/env/lib/python3.6/site-packages/chardet/eucjpprober.py new file mode 100644 index 0000000..20ce8f7 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/eucjpprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState, MachineState +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCJPDistributionAnalysis +from .jpcntx import EUCJPContextAnalysis +from .mbcssm import EUCJP_SM_MODEL + + +class EUCJPProber(MultiByteCharSetProber): + def __init__(self): + super(EUCJPProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) + self.distribution_analyzer = EUCJPDistributionAnalysis() + self.context_analyzer = EUCJPContextAnalysis() + self.reset() + + def reset(self): + super(EUCJPProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return "EUC-JP" + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char, char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/env/lib/python3.6/site-packages/chardet/euckrfreq.py b/env/lib/python3.6/site-packages/chardet/euckrfreq.py new file mode 100644 index 0000000..b68078c --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/euckrfreq.py @@ -0,0 +1,195 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology + +# 128 --> 0.79 +# 256 --> 0.92 +# 512 --> 0.986 +# 1024 --> 0.99944 +# 2048 --> 0.99999 +# +# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 +# Random Distribution Ration = 512 / (2350-512) = 0.279. +# +# Typical Distribution Ratio + +EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 + +EUCKR_TABLE_SIZE = 2352 + +# Char to FreqOrder table , +EUCKR_CHAR_TO_FREQ_ORDER = ( + 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, +1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, +1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, + 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, + 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, + 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, +1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, + 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, + 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, +1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, +1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, +1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, +1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, +1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, + 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, +1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, +1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, +1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, +1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, + 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, +1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, + 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, + 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, +1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, + 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, +1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, + 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, + 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, +1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, +1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, +1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, +1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, + 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, +1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, + 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, + 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, +1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, +1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, +1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, +1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, +1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, +1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, + 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, + 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, + 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, +1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, + 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, +1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, + 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, + 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, +2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, + 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, + 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, +2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, +2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, +2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, + 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, + 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, +2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, + 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, +1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, +2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, +1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, +2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, +2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, +1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, + 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, +2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, +2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, + 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, + 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, +2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, +1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, +2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, +2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, +2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, +2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, +2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, +2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, +1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, +2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, +2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, +2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, +2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, +2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, +1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, +1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, +2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, +1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, +2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, +1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, + 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, +2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, + 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, +2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, + 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, +2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, +2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, + 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, +2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, +1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, + 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, +1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, +2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, +1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, +2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, + 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, +2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, +1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, +2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, +1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, +2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, +1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, + 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, +2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, +2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, + 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, + 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, +1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, +1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, + 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, +2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, +2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, + 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, + 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, + 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, +2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, + 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, + 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, +2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, +2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, + 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, +2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, +1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, + 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, +2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, +2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, +2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, + 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, + 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, + 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, +2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, +2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, +2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, +1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, +2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, + 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 +) + diff --git a/env/lib/python3.6/site-packages/chardet/euckrprober.py b/env/lib/python3.6/site-packages/chardet/euckrprober.py new file mode 100644 index 0000000..345a060 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/euckrprober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCKRDistributionAnalysis +from .mbcssm import EUCKR_SM_MODEL + + +class EUCKRProber(MultiByteCharSetProber): + def __init__(self): + super(EUCKRProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-KR" + + @property + def language(self): + return "Korean" diff --git a/env/lib/python3.6/site-packages/chardet/euctwfreq.py b/env/lib/python3.6/site-packages/chardet/euctwfreq.py new file mode 100644 index 0000000..ed7a995 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/euctwfreq.py @@ -0,0 +1,387 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# EUCTW frequency table +# Converted from big5 work +# by Taiwan's Mandarin Promotion Council +# + +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +# Char to FreqOrder table , +EUCTW_TABLE_SIZE = 5376 + +EUCTW_CHAR_TO_FREQ_ORDER = ( + 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 +3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 +1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 + 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 +3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 +4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 +7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 + 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 + 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 + 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 +2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 +1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 +3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 + 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 +3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 +2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 + 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 +3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 +1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 +7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 + 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 +7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 +1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 + 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 + 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 +3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 +3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 + 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 +2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 +2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 + 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 + 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 +3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 +1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 +1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 +1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 +2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 + 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 +4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 +1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 +7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 +2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 + 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 + 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 + 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 + 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 +7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 + 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 +1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 + 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 + 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 +7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 +1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 + 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 +3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 +4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 +3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 + 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 + 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 +1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 +4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 +3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 +3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 +2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 +7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 +3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 +7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 +1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 +2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 +1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 + 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 +1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 +4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 +3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 + 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 + 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 + 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 +2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 +7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 +1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 +2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 +1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 +1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 +7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 +7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 +7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 +3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 +4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 +1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 +7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 +2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 +7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 +3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 +3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 +7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 +2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 +7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 + 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 +4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 +2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 +7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 +3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 +2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 +2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 + 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 +2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 +1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 +1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 +2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 +1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 +7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 +7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 +2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 +4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 +1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 +7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 + 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 +4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 + 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 +2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 + 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 +1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 +1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 + 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 +3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 +3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 +1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 +3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 +7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 +7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 +1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 +2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 +1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 +3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 +2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 +3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 +2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 +4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 +4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 +3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 + 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 +3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 + 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 +3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 +3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 +3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 +1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 +7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 + 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 +7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 +1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 + 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 +4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 +3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 + 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 +2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 +2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 +3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 +1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 +4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 +2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 +1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 +1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 +2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 +3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 +1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 +7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 +1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 +4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 +1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 + 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 +1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 +3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 +3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 +2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 +1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 +4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 + 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 +7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 +2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 +3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 +4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 + 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 +7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 +7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 +1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 +4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 +3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 +2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 +3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 +3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 +2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 +1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 +4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 +3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 +3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 +2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 +4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 +7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 +3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 +2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 +3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 +1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 +2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 +3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 +4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 +2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 +2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 +7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 +1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 +2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 +1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 +3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 +4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 +2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 +3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 +3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 +2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 +4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 +2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 +3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 +4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 +7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 +3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 + 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 +1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 +4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 +1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 +4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 +7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 + 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 +7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 +2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 +1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 +1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 +3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 + 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 + 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 + 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 +3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 +2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 + 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 +7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 +1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 +3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 +7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 +1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 +7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 +4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 +1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 +2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 +2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 +4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 + 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 + 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 +3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 +3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 +1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 +2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 +7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 +1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 +1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 +3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 + 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 +1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 +4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 +7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 +2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 +3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 + 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 +1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 +2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 +2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 +7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 +7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 +7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 +2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 +2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 +1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 +4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 +3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 +3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 +4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 +4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 +2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 +2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 +7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 +4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 +7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 +2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 +1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 +3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 +4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 +2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 + 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 +2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 +1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 +2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 +2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 +4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 +7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 +1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 +3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 +7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 +1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 +8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 +2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 +8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 +2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 +2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 +8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 +8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 +8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 + 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 +8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 +4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 +3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 +8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 +1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 +8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 + 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 +1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 + 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 +4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 +1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 +4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 +1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 + 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 +3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 +4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 +8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 + 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 +3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 + 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 +2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 +) + diff --git a/env/lib/python3.6/site-packages/chardet/euctwprober.py b/env/lib/python3.6/site-packages/chardet/euctwprober.py new file mode 100644 index 0000000..35669cc --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/euctwprober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCTWDistributionAnalysis +from .mbcssm import EUCTW_SM_MODEL + +class EUCTWProber(MultiByteCharSetProber): + def __init__(self): + super(EUCTWProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) + self.distribution_analyzer = EUCTWDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-TW" + + @property + def language(self): + return "Taiwan" diff --git a/env/lib/python3.6/site-packages/chardet/gb2312freq.py b/env/lib/python3.6/site-packages/chardet/gb2312freq.py new file mode 100644 index 0000000..697837b --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/gb2312freq.py @@ -0,0 +1,283 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# GB2312 most frequently used character table +# +# Char to FreqOrder table , from hz6763 + +# 512 --> 0.79 -- 0.79 +# 1024 --> 0.92 -- 0.13 +# 2048 --> 0.98 -- 0.06 +# 6768 --> 1.00 -- 0.02 +# +# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 +# Random Distribution Ration = 512 / (3755 - 512) = 0.157 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR + +GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 + +GB2312_TABLE_SIZE = 3760 + +GB2312_CHAR_TO_FREQ_ORDER = ( +1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, +2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, +2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, + 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, +1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, +1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, + 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, +1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, +2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, +3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, + 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, +1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, + 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, +2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, + 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, +2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, +1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, +3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, + 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, +1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, + 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, +2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, +1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, +3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, +1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, +2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, +1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, + 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, +3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, +3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, + 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, +3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, + 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, +1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, +3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, +2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, +1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, + 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, +1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, +4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, + 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, +3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, +3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, + 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, +1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, +2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, +1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, +1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, + 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, +3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, +3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, +4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, + 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, +3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, +1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, +1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, +4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, + 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, + 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, +3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, +1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, + 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, +1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, +2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, + 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, + 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, + 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, +3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, +4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, +3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, + 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, +2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, +2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, +2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, + 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, +2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, + 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, + 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, + 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, +3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, +2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, +2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, +1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, + 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, +2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, + 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, + 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, +1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, +1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, + 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, + 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, +1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, +2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, +3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, +2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, +2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, +2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, +3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, +1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, +1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, +2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, +1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, +3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, +1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, +1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, +3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, + 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, +2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, +1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, +4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, +1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, +1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, +3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, +1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, + 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, + 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, +1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, + 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, +1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, +1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, + 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, +3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, +4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, +3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, +2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, +2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, +1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, +3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, +2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, +1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, +1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, + 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, +2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, +2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, +3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, +4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, +3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, + 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, +3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, +2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, +1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, + 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, + 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, +3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, +4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, +2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, +1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, +1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, + 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, +1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, +3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, + 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, + 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, +1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, + 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, +1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, + 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, +2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, + 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, +2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, +2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, +1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, +1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, +2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, + 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, +1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, +1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, +2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, +2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, +3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, +1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, +4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, + 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, + 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, +3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, +1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, + 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, +3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, +1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, +4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, +1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, +2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, +1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, + 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, +1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, +3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, + 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, +2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, + 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, +1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, +1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, +1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, +3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, +2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, +3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, +3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, +3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, + 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, +2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, + 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, +2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, + 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, +1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, + 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, + 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, +1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, +3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, +3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, +1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, +1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, +3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, +2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, +2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, +1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, +3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, + 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, +4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, +1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, +2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, +3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, +3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, +1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, + 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, + 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, +2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, + 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, +1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, + 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, +1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, +1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, +1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, +1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, +1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, + 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, + 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 +) + diff --git a/env/lib/python3.6/site-packages/chardet/gb2312prober.py b/env/lib/python3.6/site-packages/chardet/gb2312prober.py new file mode 100644 index 0000000..8446d2d --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/gb2312prober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import GB2312DistributionAnalysis +from .mbcssm import GB2312_SM_MODEL + +class GB2312Prober(MultiByteCharSetProber): + def __init__(self): + super(GB2312Prober, self).__init__() + self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) + self.distribution_analyzer = GB2312DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "GB2312" + + @property + def language(self): + return "Chinese" diff --git a/env/lib/python3.6/site-packages/chardet/hebrewprober.py b/env/lib/python3.6/site-packages/chardet/hebrewprober.py new file mode 100644 index 0000000..b0e1bf4 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/hebrewprober.py @@ -0,0 +1,292 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Shy Shalom +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +# This prober doesn't actually recognize a language or a charset. +# It is a helper prober for the use of the Hebrew model probers + +### General ideas of the Hebrew charset recognition ### +# +# Four main charsets exist in Hebrew: +# "ISO-8859-8" - Visual Hebrew +# "windows-1255" - Logical Hebrew +# "ISO-8859-8-I" - Logical Hebrew +# "x-mac-hebrew" - ?? Logical Hebrew ?? +# +# Both "ISO" charsets use a completely identical set of code points, whereas +# "windows-1255" and "x-mac-hebrew" are two different proper supersets of +# these code points. windows-1255 defines additional characters in the range +# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific +# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. +# x-mac-hebrew defines similar additional code points but with a different +# mapping. +# +# As far as an average Hebrew text with no diacritics is concerned, all four +# charsets are identical with respect to code points. Meaning that for the +# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters +# (including final letters). +# +# The dominant difference between these charsets is their directionality. +# "Visual" directionality means that the text is ordered as if the renderer is +# not aware of a BIDI rendering algorithm. The renderer sees the text and +# draws it from left to right. The text itself when ordered naturally is read +# backwards. A buffer of Visual Hebrew generally looks like so: +# "[last word of first line spelled backwards] [whole line ordered backwards +# and spelled backwards] [first word of first line spelled backwards] +# [end of line] [last word of second line] ... etc' " +# adding punctuation marks, numbers and English text to visual text is +# naturally also "visual" and from left to right. +# +# "Logical" directionality means the text is ordered "naturally" according to +# the order it is read. It is the responsibility of the renderer to display +# the text from right to left. A BIDI algorithm is used to place general +# punctuation marks, numbers and English text in the text. +# +# Texts in x-mac-hebrew are almost impossible to find on the Internet. From +# what little evidence I could find, it seems that its general directionality +# is Logical. +# +# To sum up all of the above, the Hebrew probing mechanism knows about two +# charsets: +# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are +# backwards while line order is natural. For charset recognition purposes +# the line order is unimportant (In fact, for this implementation, even +# word order is unimportant). +# Logical Hebrew - "windows-1255" - normal, naturally ordered text. +# +# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be +# specifically identified. +# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew +# that contain special punctuation marks or diacritics is displayed with +# some unconverted characters showing as question marks. This problem might +# be corrected using another model prober for x-mac-hebrew. Due to the fact +# that x-mac-hebrew texts are so rare, writing another model prober isn't +# worth the effort and performance hit. +# +#### The Prober #### +# +# The prober is divided between two SBCharSetProbers and a HebrewProber, +# all of which are managed, created, fed data, inquired and deleted by the +# SBCSGroupProber. The two SBCharSetProbers identify that the text is in +# fact some kind of Hebrew, Logical or Visual. The final decision about which +# one is it is made by the HebrewProber by combining final-letter scores +# with the scores of the two SBCharSetProbers to produce a final answer. +# +# The SBCSGroupProber is responsible for stripping the original text of HTML +# tags, English characters, numbers, low-ASCII punctuation characters, spaces +# and new lines. It reduces any sequence of such characters to a single space. +# The buffer fed to each prober in the SBCS group prober is pure text in +# high-ASCII. +# The two SBCharSetProbers (model probers) share the same language model: +# Win1255Model. +# The first SBCharSetProber uses the model normally as any other +# SBCharSetProber does, to recognize windows-1255, upon which this model was +# built. The second SBCharSetProber is told to make the pair-of-letter +# lookup in the language model backwards. This in practice exactly simulates +# a visual Hebrew model using the windows-1255 logical Hebrew model. +# +# The HebrewProber is not using any language model. All it does is look for +# final-letter evidence suggesting the text is either logical Hebrew or visual +# Hebrew. Disjointed from the model probers, the results of the HebrewProber +# alone are meaningless. HebrewProber always returns 0.00 as confidence +# since it never identifies a charset by itself. Instead, the pointer to the +# HebrewProber is passed to the model probers as a helper "Name Prober". +# When the Group prober receives a positive identification from any prober, +# it asks for the name of the charset identified. If the prober queried is a +# Hebrew model prober, the model prober forwards the call to the +# HebrewProber to make the final decision. In the HebrewProber, the +# decision is made according to the final-letters scores maintained and Both +# model probers scores. The answer is returned in the form of the name of the +# charset identified, either "windows-1255" or "ISO-8859-8". + +class HebrewProber(CharSetProber): + # windows-1255 / ISO-8859-8 code points of interest + FINAL_KAF = 0xea + NORMAL_KAF = 0xeb + FINAL_MEM = 0xed + NORMAL_MEM = 0xee + FINAL_NUN = 0xef + NORMAL_NUN = 0xf0 + FINAL_PE = 0xf3 + NORMAL_PE = 0xf4 + FINAL_TSADI = 0xf5 + NORMAL_TSADI = 0xf6 + + # Minimum Visual vs Logical final letter score difference. + # If the difference is below this, don't rely solely on the final letter score + # distance. + MIN_FINAL_CHAR_DISTANCE = 5 + + # Minimum Visual vs Logical model score difference. + # If the difference is below this, don't rely at all on the model score + # distance. + MIN_MODEL_DISTANCE = 0.01 + + VISUAL_HEBREW_NAME = "ISO-8859-8" + LOGICAL_HEBREW_NAME = "windows-1255" + + def __init__(self): + super(HebrewProber, self).__init__() + self._final_char_logical_score = None + self._final_char_visual_score = None + self._prev = None + self._before_prev = None + self._logical_prober = None + self._visual_prober = None + self.reset() + + def reset(self): + self._final_char_logical_score = 0 + self._final_char_visual_score = 0 + # The two last characters seen in the previous buffer, + # mPrev and mBeforePrev are initialized to space in order to simulate + # a word delimiter at the beginning of the data + self._prev = ' ' + self._before_prev = ' ' + # These probers are owned by the group prober. + + def set_model_probers(self, logicalProber, visualProber): + self._logical_prober = logicalProber + self._visual_prober = visualProber + + def is_final(self, c): + return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, + self.FINAL_PE, self.FINAL_TSADI] + + def is_non_final(self, c): + # The normal Tsadi is not a good Non-Final letter due to words like + # 'lechotet' (to chat) containing an apostrophe after the tsadi. This + # apostrophe is converted to a space in FilterWithoutEnglishLetters + # causing the Non-Final tsadi to appear at an end of a word even + # though this is not the case in the original text. + # The letters Pe and Kaf rarely display a related behavior of not being + # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' + # for example legally end with a Non-Final Pe or Kaf. However, the + # benefit of these letters as Non-Final letters outweighs the damage + # since these words are quite rare. + return c in [self.NORMAL_KAF, self.NORMAL_MEM, + self.NORMAL_NUN, self.NORMAL_PE] + + def feed(self, byte_str): + # Final letter analysis for logical-visual decision. + # Look for evidence that the received buffer is either logical Hebrew + # or visual Hebrew. + # The following cases are checked: + # 1) A word longer than 1 letter, ending with a final letter. This is + # an indication that the text is laid out "naturally" since the + # final letter really appears at the end. +1 for logical score. + # 2) A word longer than 1 letter, ending with a Non-Final letter. In + # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, + # should not end with the Non-Final form of that letter. Exceptions + # to this rule are mentioned above in isNonFinal(). This is an + # indication that the text is laid out backwards. +1 for visual + # score + # 3) A word longer than 1 letter, starting with a final letter. Final + # letters should not appear at the beginning of a word. This is an + # indication that the text is laid out backwards. +1 for visual + # score. + # + # The visual score and logical score are accumulated throughout the + # text and are finally checked against each other in GetCharSetName(). + # No checking for final letters in the middle of words is done since + # that case is not an indication for either Logical or Visual text. + # + # We automatically filter out all 7-bit characters (replace them with + # spaces) so the word boundary detection works properly. [MAP] + + if self.state == ProbingState.NOT_ME: + # Both model probers say it's not them. No reason to continue. + return ProbingState.NOT_ME + + byte_str = self.filter_high_byte_only(byte_str) + + for cur in byte_str: + if cur == ' ': + # We stand on a space - a word just ended + if self._before_prev != ' ': + # next-to-last char was not a space so self._prev is not a + # 1 letter word + if self.is_final(self._prev): + # case (1) [-2:not space][-1:final letter][cur:space] + self._final_char_logical_score += 1 + elif self.is_non_final(self._prev): + # case (2) [-2:not space][-1:Non-Final letter][ + # cur:space] + self._final_char_visual_score += 1 + else: + # Not standing on a space + if ((self._before_prev == ' ') and + (self.is_final(self._prev)) and (cur != ' ')): + # case (3) [-2:space][-1:final letter][cur:not space] + self._final_char_visual_score += 1 + self._before_prev = self._prev + self._prev = cur + + # Forever detecting, till the end or until both model probers return + # ProbingState.NOT_ME (handled above) + return ProbingState.DETECTING + + @property + def charset_name(self): + # Make the decision: is it Logical or Visual? + # If the final letter score distance is dominant enough, rely on it. + finalsub = self._final_char_logical_score - self._final_char_visual_score + if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # It's not dominant enough, try to rely on the model scores instead. + modelsub = (self._logical_prober.get_confidence() + - self._visual_prober.get_confidence()) + if modelsub > self.MIN_MODEL_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if modelsub < -self.MIN_MODEL_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # Still no good, back to final letter distance, maybe it'll save the + # day. + if finalsub < 0.0: + return self.VISUAL_HEBREW_NAME + + # (finalsub > 0 - Logical) or (don't know what to do) default to + # Logical. + return self.LOGICAL_HEBREW_NAME + + @property + def language(self): + return 'Hebrew' + + @property + def state(self): + # Remain active as long as any of the model probers are active. + if (self._logical_prober.state == ProbingState.NOT_ME) and \ + (self._visual_prober.state == ProbingState.NOT_ME): + return ProbingState.NOT_ME + return ProbingState.DETECTING diff --git a/env/lib/python3.6/site-packages/chardet/jisfreq.py b/env/lib/python3.6/site-packages/chardet/jisfreq.py new file mode 100644 index 0000000..83fc082 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/jisfreq.py @@ -0,0 +1,325 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology +# +# Japanese frequency table, applied to both S-JIS and EUC-JP +# They are sorted in order. + +# 128 --> 0.77094 +# 256 --> 0.85710 +# 512 --> 0.92635 +# 1024 --> 0.97130 +# 2048 --> 0.99431 +# +# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 +# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 +# +# Typical Distribution Ratio, 25% of IDR + +JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 + +# Char to FreqOrder table , +JIS_TABLE_SIZE = 4368 + +JIS_CHAR_TO_FREQ_ORDER = ( + 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 +3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 +1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 +2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 +2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 +5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 +1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 +5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 +5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 +5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 +5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 +5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 +5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 +1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 +1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 +1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 +2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 +3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 +3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 + 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 + 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 +1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 + 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 +5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 + 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 + 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 + 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 + 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 + 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 +5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 +5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 +5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 +4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 +5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 +5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 +5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 +5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 +5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 +5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 +5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 +5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 +5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 +3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 +5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 +5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 +5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 +5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 +5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 +5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 +5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 +5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 +5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 +5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 +5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 +5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 +5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 +5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 +5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 +5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 +5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 +5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 +5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 +5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 +5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 +5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 +5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 +5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 +5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 +5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 +5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 +5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 +5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 +5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 +5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 +5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 +5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 +5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 +5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 +5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 +5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 +5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 +6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 +6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 +6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 +6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 +6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 +6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 +6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 +6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 +4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 + 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 + 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 +1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 +1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 + 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 +3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 +3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 + 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 +3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 +3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 + 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 +2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 + 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 +3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 +1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 + 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 +1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 + 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 +2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 +2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 +2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 +2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 +1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 +1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 +1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 +1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 +2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 +1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 +2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 +1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 +1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 +1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 +1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 +1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 +1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 + 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 + 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 +1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 +2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 +2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 +2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 +3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 +3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 + 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 +3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 +1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 + 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 +2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 +1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 + 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 +3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 +4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 +2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 +1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 +2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 +1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 + 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 + 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 +1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 +2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 +2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 +2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 +3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 +1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 +2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 + 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 + 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 + 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 +1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 +2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 + 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 +1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 +1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 + 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 +1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 +1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 +1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 + 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 +2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 + 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 +2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 +3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 +2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 +1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 +6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 +1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 +2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 +1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 + 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 + 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 +3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 +3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 +1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 +1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 +1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 +1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 + 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 + 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 +2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 + 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 +3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 +2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 + 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 +1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 +2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 + 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 +1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 + 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 +4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 +2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 +1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 + 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 +1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 +2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 + 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 +6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 +1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 +1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 +2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 +3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 + 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 +3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 +1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 + 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 +1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 + 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 +3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 + 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 +2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 + 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 +4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 +2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 +1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 +1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 +1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 + 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 +1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 +3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 +1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 +3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 + 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 + 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 + 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 +2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 +1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 + 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 +1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 + 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 +1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 + 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 + 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 + 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 +1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 +1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 +2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 +4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 + 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 +1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 + 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 +1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 +3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 +1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 +2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 +2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 +1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 +1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 +2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 + 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 +2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 +1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 +1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 +1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 +1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 +3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 +2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 +2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 + 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 +3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 +3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 +1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 +2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 +1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 +2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 +) + + diff --git a/env/lib/python3.6/site-packages/chardet/jpcntx.py b/env/lib/python3.6/site-packages/chardet/jpcntx.py new file mode 100644 index 0000000..20044e4 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/jpcntx.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +# This is hiragana 2-char sequence table, the number in each cell represents its frequency category +jp2CharContext = ( +(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), +(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), +(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), +(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), +(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), +(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), +(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), +(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), +(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), +(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), +(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), +(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), +(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), +(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), +(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), +(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), +(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), +(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), +(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), +(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), +(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), +(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), +(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), +(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), +(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), +(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), +(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), +(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), +(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), +(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), +(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), +(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), +(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), +(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), +(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), +(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), +(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), +(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), +(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), +(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), +(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), +(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), +(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), +(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), +(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), +(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), +(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), +(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), +(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), +(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), +(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), +(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), +(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), +(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), +(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), +(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), +(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), +(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), +(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), +(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), +(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), +(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), +(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), +(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), +(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), +(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), +(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), +(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), +(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), +(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), +(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), +(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), +(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), +(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), +) + +class JapaneseContextAnalysis(object): + NUM_OF_CATEGORY = 6 + DONT_KNOW = -1 + ENOUGH_REL_THRESHOLD = 100 + MAX_REL_THRESHOLD = 1000 + MINIMUM_DATA_THRESHOLD = 4 + + def __init__(self): + self._total_rel = None + self._rel_sample = None + self._need_to_skip_char_num = None + self._last_char_order = None + self._done = None + self.reset() + + def reset(self): + self._total_rel = 0 # total sequence received + # category counters, each integer counts sequence in its category + self._rel_sample = [0] * self.NUM_OF_CATEGORY + # if last byte in current buffer is not the last byte of a character, + # we need to know how many bytes to skip in next buffer + self._need_to_skip_char_num = 0 + self._last_char_order = -1 # The order of previous char + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + + def feed(self, byte_str, num_bytes): + if self._done: + return + + # The buffer we got is byte oriented, and a character may span in more than one + # buffers. In case the last one or two byte in last buffer is not + # complete, we record how many byte needed to complete that character + # and skip these bytes here. We can choose to record those bytes as + # well and analyse the character once it is complete, but since a + # character will not make much difference, by simply skipping + # this character will simply our logic and improve performance. + i = self._need_to_skip_char_num + while i < num_bytes: + order, char_len = self.get_order(byte_str[i:i + 2]) + i += char_len + if i > num_bytes: + self._need_to_skip_char_num = i - num_bytes + self._last_char_order = -1 + else: + if (order != -1) and (self._last_char_order != -1): + self._total_rel += 1 + if self._total_rel > self.MAX_REL_THRESHOLD: + self._done = True + break + self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 + self._last_char_order = order + + def got_enough_data(self): + return self._total_rel > self.ENOUGH_REL_THRESHOLD + + def get_confidence(self): + # This is just one way to calculate confidence. It works well for me. + if self._total_rel > self.MINIMUM_DATA_THRESHOLD: + return (self._total_rel - self._rel_sample[0]) / self._total_rel + else: + return self.DONT_KNOW + + def get_order(self, byte_str): + return -1, 1 + +class SJISContextAnalysis(JapaneseContextAnalysis): + def __init__(self): + super(SJISContextAnalysis, self).__init__() + self._charset_name = "SHIFT_JIS" + + @property + def charset_name(self): + return self._charset_name + + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): + char_len = 2 + if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): + self._charset_name = "CP932" + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 202) and (0x9F <= second_char <= 0xF1): + return second_char - 0x9F, char_len + + return -1, char_len + +class EUCJPContextAnalysis(JapaneseContextAnalysis): + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): + char_len = 2 + elif first_char == 0x8F: + char_len = 3 + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): + return second_char - 0xA1, char_len + + return -1, char_len + + diff --git a/env/lib/python3.6/site-packages/chardet/langbulgarianmodel.py b/env/lib/python3.6/site-packages/chardet/langbulgarianmodel.py new file mode 100644 index 0000000..2aa4fb2 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/langbulgarianmodel.py @@ -0,0 +1,228 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +# this table is modified base on win1251BulgarianCharToOrderMap, so +# only number <64 is sure valid + +Latin5_BulgarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 +110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 +253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 +116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 +194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 +210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 + 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 + 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 + 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 + 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 + 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 + 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 +) + +win1251BulgarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 +110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 +253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 +116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 +206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 +221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 + 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 + 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 + 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 + 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 + 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 + 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 96.9392% +# first 1024 sequences:3.0618% +# rest sequences: 0.2992% +# negative sequences: 0.0020% +BulgarianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, +3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, +0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, +0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, +0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, +0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, +0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, +2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, +3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, +1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, +3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, +1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, +2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, +2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, +3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, +1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, +2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, +2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, +3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, +1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, +2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, +2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, +2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, +1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, +2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, +1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, +3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, +1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, +3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, +1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, +2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, +1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, +2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, +1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, +2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, +1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, +1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, +1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, +2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, +1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, +2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, +1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, +0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, +1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, +1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, +1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, +0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, +0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, +0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, +1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, +1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, +1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, +1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +) + +Latin5BulgarianModel = { + 'char_to_order_map': Latin5_BulgarianCharToOrderMap, + 'precedence_matrix': BulgarianLangModel, + 'typical_positive_ratio': 0.969392, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-5", + 'language': 'Bulgairan', +} + +Win1251BulgarianModel = { + 'char_to_order_map': win1251BulgarianCharToOrderMap, + 'precedence_matrix': BulgarianLangModel, + 'typical_positive_ratio': 0.969392, + 'keep_english_letter': False, + 'charset_name': "windows-1251", + 'language': 'Bulgarian', +} diff --git a/env/lib/python3.6/site-packages/chardet/langcyrillicmodel.py b/env/lib/python3.6/site-packages/chardet/langcyrillicmodel.py new file mode 100644 index 0000000..e5f9a1f --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/langcyrillicmodel.py @@ -0,0 +1,333 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# KOI8-R language model +# Character Mapping Table: +KOI8R_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 +223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 +238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 + 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 + 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 + 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 + 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 +) + +win1251_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, +239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +) + +latin5_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, +) + +macCyrillic_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, +239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, +) + +IBM855_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, +206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, + 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, +220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, +230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, + 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, + 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, +250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, +) + +IBM866_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 97.6601% +# first 1024 sequences: 2.3389% +# rest sequences: 0.1237% +# negative sequences: 0.0009% +RussianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, +3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, +0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, +0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, +1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, +1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, +2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, +1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, +3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, +1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, +2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, +1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, +1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, +1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, +2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, +1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, +3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, +1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, +2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, +1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, +2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, +1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, +1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, +1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, +3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, +2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, +3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, +1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, +1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, +0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, +1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, +1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, +0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, +1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, +2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, +1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, +1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, +2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, +1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, +0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, +2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, +1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, +1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, +0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, +0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, +0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, +0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, +0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, +0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, +2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, +0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, +) + +Koi8rModel = { + 'char_to_order_map': KOI8R_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "KOI8-R", + 'language': 'Russian', +} + +Win1251CyrillicModel = { + 'char_to_order_map': win1251_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "windows-1251", + 'language': 'Russian', +} + +Latin5CyrillicModel = { + 'char_to_order_map': latin5_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-5", + 'language': 'Russian', +} + +MacCyrillicModel = { + 'char_to_order_map': macCyrillic_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "MacCyrillic", + 'language': 'Russian', +} + +Ibm866Model = { + 'char_to_order_map': IBM866_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "IBM866", + 'language': 'Russian', +} + +Ibm855Model = { + 'char_to_order_map': IBM855_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "IBM855", + 'language': 'Russian', +} diff --git a/env/lib/python3.6/site-packages/chardet/langgreekmodel.py b/env/lib/python3.6/site-packages/chardet/langgreekmodel.py new file mode 100644 index 0000000..5332221 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/langgreekmodel.py @@ -0,0 +1,225 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin7_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 + 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 +253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 + 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 +253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 +253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0 +110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 + 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 +124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 + 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 +) + +win1253_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 + 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 +253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 + 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 +253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 +253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0 +110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 + 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 +124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 + 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 98.2851% +# first 1024 sequences:1.7001% +# rest sequences: 0.0359% +# negative sequences: 0.0148% +GreekLangModel = ( +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0, +3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0, +2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0, +0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0, +2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0, +2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0, +0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0, +2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0, +0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0, +3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0, +3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0, +2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0, +2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0, +0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0, +0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0, +0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2, +0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0, +0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2, +0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0, +0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2, +0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2, +0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0, +0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2, +0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0, +0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0, +0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0, +0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0, +0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2, +0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2, +0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2, +0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2, +0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0, +0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1, +0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2, +0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2, +0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2, +0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0, +0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0, +0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1, +0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0, +0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0, +0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +Latin7GreekModel = { + 'char_to_order_map': Latin7_char_to_order_map, + 'precedence_matrix': GreekLangModel, + 'typical_positive_ratio': 0.982851, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-7", + 'language': 'Greek', +} + +Win1253GreekModel = { + 'char_to_order_map': win1253_char_to_order_map, + 'precedence_matrix': GreekLangModel, + 'typical_positive_ratio': 0.982851, + 'keep_english_letter': False, + 'charset_name': "windows-1253", + 'language': 'Greek', +} diff --git a/env/lib/python3.6/site-packages/chardet/langhebrewmodel.py b/env/lib/python3.6/site-packages/chardet/langhebrewmodel.py new file mode 100644 index 0000000..58f4c87 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/langhebrewmodel.py @@ -0,0 +1,200 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Simon Montagu +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Shoshannah Forbes - original C code (?) +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Windows-1255 language model +# Character Mapping Table: +WIN1255_CHAR_TO_ORDER_MAP = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40 + 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50 +253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60 + 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70 +124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214, +215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221, + 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227, +106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234, + 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237, +238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250, + 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23, + 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 98.4004% +# first 1024 sequences: 1.5981% +# rest sequences: 0.087% +# negative sequences: 0.0015% +HEBREW_LANG_MODEL = ( +0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0, +3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2, +1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2, +1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3, +1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2, +1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2, +1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2, +0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2, +0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2, +1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2, +0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1, +0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0, +0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2, +0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2, +0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2, +0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2, +0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1, +0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2, +0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2, +0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2, +0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2, +0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0, +1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2, +0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3, +0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0, +0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0, +0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, +0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0, +2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0, +0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0, +0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1, +1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1, +0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1, +2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1, +1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1, +2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1, +2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0, +0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1, +0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0, +) + +Win1255HebrewModel = { + 'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP, + 'precedence_matrix': HEBREW_LANG_MODEL, + 'typical_positive_ratio': 0.984004, + 'keep_english_letter': False, + 'charset_name': "windows-1255", + 'language': 'Hebrew', +} diff --git a/env/lib/python3.6/site-packages/chardet/langhungarianmodel.py b/env/lib/python3.6/site-packages/chardet/langhungarianmodel.py new file mode 100644 index 0000000..bb7c095 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/langhungarianmodel.py @@ -0,0 +1,225 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin2_HungarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, + 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, +253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, + 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, +159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174, +175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190, +191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205, + 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, +221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231, +232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241, + 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85, +245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253, +) + +win1250HungarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, + 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, +253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, + 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, +161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176, +177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190, +191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205, + 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, +221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231, +232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241, + 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87, +245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 94.7368% +# first 1024 sequences:5.2623% +# rest sequences: 0.8894% +# negative sequences: 0.0009% +HungarianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, +3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2, +3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0, +3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3, +0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2, +0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0, +1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0, +1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0, +1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1, +3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1, +2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1, +2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1, +2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1, +2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0, +2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, +3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1, +2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1, +2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1, +2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1, +1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1, +1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1, +3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0, +1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1, +1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1, +2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1, +2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0, +2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1, +3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1, +2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1, +1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0, +1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0, +2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1, +2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1, +1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0, +1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1, +2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0, +1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0, +1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0, +2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1, +2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1, +2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, +1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1, +1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1, +1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0, +0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0, +2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1, +2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1, +1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1, +2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1, +1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0, +1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0, +2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0, +2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1, +2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0, +1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0, +2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0, +0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0, +0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +) + +Latin2HungarianModel = { + 'char_to_order_map': Latin2_HungarianCharToOrderMap, + 'precedence_matrix': HungarianLangModel, + 'typical_positive_ratio': 0.947368, + 'keep_english_letter': True, + 'charset_name': "ISO-8859-2", + 'language': 'Hungarian', +} + +Win1250HungarianModel = { + 'char_to_order_map': win1250HungarianCharToOrderMap, + 'precedence_matrix': HungarianLangModel, + 'typical_positive_ratio': 0.947368, + 'keep_english_letter': True, + 'charset_name': "windows-1250", + 'language': 'Hungarian', +} diff --git a/env/lib/python3.6/site-packages/chardet/langthaimodel.py b/env/lib/python3.6/site-packages/chardet/langthaimodel.py new file mode 100644 index 0000000..15f94c2 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/langthaimodel.py @@ -0,0 +1,199 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# The following result for thai was collected from a limited sample (1M). + +# Character Mapping Table: +TIS620CharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40 +188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50 +253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60 + 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70 +209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222, +223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235, +236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57, + 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54, + 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63, + 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244, + 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247, + 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 92.6386% +# first 1024 sequences:7.3177% +# rest sequences: 1.0230% +# negative sequences: 0.0436% +ThaiLangModel = ( +0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3, +0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2, +3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3, +0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1, +3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2, +3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1, +3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2, +3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1, +3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1, +3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1, +2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1, +3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1, +0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1, +0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2, +1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0, +3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3, +3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0, +1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2, +0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3, +0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0, +3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1, +2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0, +3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2, +0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2, +3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, +3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0, +2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, +3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1, +2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1, +3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0, +3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1, +3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1, +3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1, +1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2, +0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3, +0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1, +3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0, +3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1, +1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0, +3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1, +3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2, +0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0, +0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0, +1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1, +1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1, +3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1, +0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0, +3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0, +0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1, +0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0, +0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1, +0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1, +0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0, +0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1, +0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0, +3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0, +0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0, +0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0, +3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1, +2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1, +0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0, +3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0, +1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0, +1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0, +1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +TIS620ThaiModel = { + 'char_to_order_map': TIS620CharToOrderMap, + 'precedence_matrix': ThaiLangModel, + 'typical_positive_ratio': 0.926386, + 'keep_english_letter': False, + 'charset_name': "TIS-620", + 'language': 'Thai', +} diff --git a/env/lib/python3.6/site-packages/chardet/langturkishmodel.py b/env/lib/python3.6/site-packages/chardet/langturkishmodel.py new file mode 100644 index 0000000..a427a45 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/langturkishmodel.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Özgür Baskın - Turkish Language Model +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin5_TurkishCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42, + 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255, +255, 1, 21, 28, 12, 2, 18, 27, 25, 3, 24, 10, 5, 13, 4, 15, + 26, 64, 7, 8, 9, 14, 32, 57, 58, 11, 22,255,255,255,255,255, +180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165, +164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106, +150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136, + 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125, +124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119, + 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86, + 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96, + 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17, 6, 19,107, +) + +TurkishLangModel = ( +3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3, +3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, +3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3, +3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1, +3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3, +3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1, +3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2, +2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1, +3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2, +2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0, +1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2, +3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1, +3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2, +2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1, +3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2, +2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0, +3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2, +3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, +3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3, +0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1, +3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1, +0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0, +3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1, +3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3, +2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3, +2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, +3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0, +0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0, +1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2, +3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1, +1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0, +3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0, +0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0, +3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1, +0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1, +1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0, +1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3, +2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1, +2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0, +0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, +3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0, +0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0, +3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1, +1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2, +0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1, +3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1, +0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0, +3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0, +3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0, +1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2, +2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1, +0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0, +3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0, +0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0, +0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0, +3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0, +0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0, +0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0, +3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0, +0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1, +3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0, +0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1, +0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0, +3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, +0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0, +0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0, +3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0, +0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0, +0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0, +0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0, +3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0, +0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, +0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0, +0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0, +0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0, +1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0, +0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1, +0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0, +3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0, +0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0, +2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0, +2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0, +0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1, +0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +Latin5TurkishModel = { + 'char_to_order_map': Latin5_TurkishCharToOrderMap, + 'precedence_matrix': TurkishLangModel, + 'typical_positive_ratio': 0.970290, + 'keep_english_letter': True, + 'charset_name': "ISO-8859-9", + 'language': 'Turkish', +} diff --git a/env/lib/python3.6/site-packages/chardet/latin1prober.py b/env/lib/python3.6/site-packages/chardet/latin1prober.py new file mode 100644 index 0000000..7d1e8c2 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/latin1prober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +FREQ_CAT_NUM = 4 + +UDF = 0 # undefined +OTH = 1 # other +ASC = 2 # ascii capital letter +ASS = 3 # ascii small letter +ACV = 4 # accent capital vowel +ACO = 5 # accent capital other +ASV = 6 # accent small vowel +ASO = 7 # accent small other +CLASS_NUM = 8 # total classes + +Latin1_CharToClass = ( + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F + OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 + ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F + OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 + ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F + OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 + OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F + UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 + OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF + ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 + ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF + ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 + ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF + ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 + ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF + ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 + ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF +) + +# 0 : illegal +# 1 : very unlikely +# 2 : normal +# 3 : very likely +Latin1ClassModel = ( +# UDF OTH ASC ASS ACV ACO ASV ASO + 0, 0, 0, 0, 0, 0, 0, 0, # UDF + 0, 3, 3, 3, 3, 3, 3, 3, # OTH + 0, 3, 3, 3, 3, 3, 3, 3, # ASC + 0, 3, 3, 3, 1, 1, 3, 3, # ASS + 0, 3, 3, 3, 1, 2, 1, 2, # ACV + 0, 3, 3, 3, 3, 3, 3, 3, # ACO + 0, 3, 1, 3, 1, 1, 1, 3, # ASV + 0, 3, 1, 3, 1, 1, 3, 3, # ASO +) + + +class Latin1Prober(CharSetProber): + def __init__(self): + super(Latin1Prober, self).__init__() + self._last_char_class = None + self._freq_counter = None + self.reset() + + def reset(self): + self._last_char_class = OTH + self._freq_counter = [0] * FREQ_CAT_NUM + CharSetProber.reset(self) + + @property + def charset_name(self): + return "ISO-8859-1" + + @property + def language(self): + return "" + + def feed(self, byte_str): + byte_str = self.filter_with_english_letters(byte_str) + for c in byte_str: + char_class = Latin1_CharToClass[c] + freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) + + char_class] + if freq == 0: + self._state = ProbingState.NOT_ME + break + self._freq_counter[freq] += 1 + self._last_char_class = char_class + + return self.state + + def get_confidence(self): + if self.state == ProbingState.NOT_ME: + return 0.01 + + total = sum(self._freq_counter) + if total < 0.01: + confidence = 0.0 + else: + confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) + / total) + if confidence < 0.0: + confidence = 0.0 + # lower the confidence of latin1 so that other more accurate + # detector can take priority. + confidence = confidence * 0.73 + return confidence diff --git a/env/lib/python3.6/site-packages/chardet/mbcharsetprober.py b/env/lib/python3.6/site-packages/chardet/mbcharsetprober.py new file mode 100644 index 0000000..6256ecf --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/mbcharsetprober.py @@ -0,0 +1,91 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState + + +class MultiByteCharSetProber(CharSetProber): + """ + MultiByteCharSetProber + """ + + def __init__(self, lang_filter=None): + super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) + self.distribution_analyzer = None + self.coding_sm = None + self._last_char = [0, 0] + + def reset(self): + super(MultiByteCharSetProber, self).reset() + if self.coding_sm: + self.coding_sm.reset() + if self.distribution_analyzer: + self.distribution_analyzer.reset() + self._last_char = [0, 0] + + @property + def charset_name(self): + raise NotImplementedError + + @property + def language(self): + raise NotImplementedError + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.distribution_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + return self.distribution_analyzer.get_confidence() diff --git a/env/lib/python3.6/site-packages/chardet/mbcsgroupprober.py b/env/lib/python3.6/site-packages/chardet/mbcsgroupprober.py new file mode 100644 index 0000000..530abe7 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/mbcsgroupprober.py @@ -0,0 +1,54 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .utf8prober import UTF8Prober +from .sjisprober import SJISProber +from .eucjpprober import EUCJPProber +from .gb2312prober import GB2312Prober +from .euckrprober import EUCKRProber +from .cp949prober import CP949Prober +from .big5prober import Big5Prober +from .euctwprober import EUCTWProber + + +class MBCSGroupProber(CharSetGroupProber): + def __init__(self, lang_filter=None): + super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) + self.probers = [ + UTF8Prober(), + SJISProber(), + EUCJPProber(), + GB2312Prober(), + EUCKRProber(), + CP949Prober(), + Big5Prober(), + EUCTWProber() + ] + self.reset() diff --git a/env/lib/python3.6/site-packages/chardet/mbcssm.py b/env/lib/python3.6/site-packages/chardet/mbcssm.py new file mode 100644 index 0000000..8360d0f --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/mbcssm.py @@ -0,0 +1,572 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +# BIG5 + +BIG5_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 4,4,4,4,4,4,4,4, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 4,3,3,3,3,3,3,3, # a0 - a7 + 3,3,3,3,3,3,3,3, # a8 - af + 3,3,3,3,3,3,3,3, # b0 - b7 + 3,3,3,3,3,3,3,3, # b8 - bf + 3,3,3,3,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +BIG5_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 +) + +BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) + +BIG5_SM_MODEL = {'class_table': BIG5_CLS, + 'class_factor': 5, + 'state_table': BIG5_ST, + 'char_len_table': BIG5_CHAR_LEN_TABLE, + 'name': 'Big5'} + +# CP949 + +CP949_CLS = ( + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f + 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f + 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f + 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f + 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f + 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f + 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f + 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f + 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af + 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf + 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff +) + +CP949_ST = ( +#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 +) + +CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) + +CP949_SM_MODEL = {'class_table': CP949_CLS, + 'class_factor': 10, + 'state_table': CP949_ST, + 'char_len_table': CP949_CHAR_LEN_TABLE, + 'name': 'CP949'} + +# EUC-JP + +EUCJP_CLS = ( + 4,4,4,4,4,4,4,4, # 00 - 07 + 4,4,4,4,4,4,5,5, # 08 - 0f + 4,4,4,4,4,4,4,4, # 10 - 17 + 4,4,4,5,4,4,4,4, # 18 - 1f + 4,4,4,4,4,4,4,4, # 20 - 27 + 4,4,4,4,4,4,4,4, # 28 - 2f + 4,4,4,4,4,4,4,4, # 30 - 37 + 4,4,4,4,4,4,4,4, # 38 - 3f + 4,4,4,4,4,4,4,4, # 40 - 47 + 4,4,4,4,4,4,4,4, # 48 - 4f + 4,4,4,4,4,4,4,4, # 50 - 57 + 4,4,4,4,4,4,4,4, # 58 - 5f + 4,4,4,4,4,4,4,4, # 60 - 67 + 4,4,4,4,4,4,4,4, # 68 - 6f + 4,4,4,4,4,4,4,4, # 70 - 77 + 4,4,4,4,4,4,4,4, # 78 - 7f + 5,5,5,5,5,5,5,5, # 80 - 87 + 5,5,5,5,5,5,1,3, # 88 - 8f + 5,5,5,5,5,5,5,5, # 90 - 97 + 5,5,5,5,5,5,5,5, # 98 - 9f + 5,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,0,5 # f8 - ff +) + +EUCJP_ST = ( + 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f + 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 +) + +EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) + +EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, + 'class_factor': 6, + 'state_table': EUCJP_ST, + 'char_len_table': EUCJP_CHAR_LEN_TABLE, + 'name': 'EUC-JP'} + +# EUC-KR + +EUCKR_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,3,3,3, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,3,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 2,2,2,2,2,2,2,2, # e0 - e7 + 2,2,2,2,2,2,2,2, # e8 - ef + 2,2,2,2,2,2,2,2, # f0 - f7 + 2,2,2,2,2,2,2,0 # f8 - ff +) + +EUCKR_ST = ( + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f +) + +EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) + +EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, + 'class_factor': 4, + 'state_table': EUCKR_ST, + 'char_len_table': EUCKR_CHAR_LEN_TABLE, + 'name': 'EUC-KR'} + +# EUC-TW + +EUCTW_CLS = ( + 2,2,2,2,2,2,2,2, # 00 - 07 + 2,2,2,2,2,2,0,0, # 08 - 0f + 2,2,2,2,2,2,2,2, # 10 - 17 + 2,2,2,0,2,2,2,2, # 18 - 1f + 2,2,2,2,2,2,2,2, # 20 - 27 + 2,2,2,2,2,2,2,2, # 28 - 2f + 2,2,2,2,2,2,2,2, # 30 - 37 + 2,2,2,2,2,2,2,2, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,2, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,6,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,3,4,4,4,4,4,4, # a0 - a7 + 5,5,1,1,1,1,1,1, # a8 - af + 1,1,1,1,1,1,1,1, # b0 - b7 + 1,1,1,1,1,1,1,1, # b8 - bf + 1,1,3,1,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +EUCTW_ST = ( + MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 + MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 + MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) + +EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, + 'class_factor': 7, + 'state_table': EUCTW_ST, + 'char_len_table': EUCTW_CHAR_LEN_TABLE, + 'name': 'x-euc-tw'} + +# GB2312 + +GB2312_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 3,3,3,3,3,3,3,3, # 30 - 37 + 3,3,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,4, # 78 - 7f + 5,6,6,6,6,6,6,6, # 80 - 87 + 6,6,6,6,6,6,6,6, # 88 - 8f + 6,6,6,6,6,6,6,6, # 90 - 97 + 6,6,6,6,6,6,6,6, # 98 - 9f + 6,6,6,6,6,6,6,6, # a0 - a7 + 6,6,6,6,6,6,6,6, # a8 - af + 6,6,6,6,6,6,6,6, # b0 - b7 + 6,6,6,6,6,6,6,6, # b8 - bf + 6,6,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 6,6,6,6,6,6,6,6, # e0 - e7 + 6,6,6,6,6,6,6,6, # e8 - ef + 6,6,6,6,6,6,6,6, # f0 - f7 + 6,6,6,6,6,6,6,0 # f8 - ff +) + +GB2312_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 + 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +# To be accurate, the length of class 6 can be either 2 or 4. +# But it is not necessary to discriminate between the two since +# it is used for frequency analysis only, and we are validating +# each code range there as well. So it is safe to set it to be +# 2 here. +GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) + +GB2312_SM_MODEL = {'class_table': GB2312_CLS, + 'class_factor': 7, + 'state_table': GB2312_ST, + 'char_len_table': GB2312_CHAR_LEN_TABLE, + 'name': 'GB2312'} + +# Shift_JIS + +SJIS_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 3,3,3,3,3,2,2,3, # 80 - 87 + 3,3,3,3,3,3,3,3, # 88 - 8f + 3,3,3,3,3,3,3,3, # 90 - 97 + 3,3,3,3,3,3,3,3, # 98 - 9f + #0xa0 is illegal in sjis encoding, but some pages does + #contain such byte. We need to be more error forgiven. + 2,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,4,4,4, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,0,0,0) # f8 - ff + + +SJIS_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 +) + +SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) + +SJIS_SM_MODEL = {'class_table': SJIS_CLS, + 'class_factor': 6, + 'state_table': SJIS_ST, + 'char_len_table': SJIS_CHAR_LEN_TABLE, + 'name': 'Shift_JIS'} + +# UCS2-BE + +UCS2BE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2BE_ST = ( + 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 + 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f + 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 + 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f + 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) + +UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, + 'class_factor': 6, + 'state_table': UCS2BE_ST, + 'char_len_table': UCS2BE_CHAR_LEN_TABLE, + 'name': 'UTF-16BE'} + +# UCS2-LE + +UCS2LE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2LE_ST = ( + 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f + 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 + 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) + +UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, + 'class_factor': 6, + 'state_table': UCS2LE_ST, + 'char_len_table': UCS2LE_CHAR_LEN_TABLE, + 'name': 'UTF-16LE'} + +# UTF-8 + +UTF8_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 2,2,2,2,3,3,3,3, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 5,5,5,5,5,5,5,5, # a0 - a7 + 5,5,5,5,5,5,5,5, # a8 - af + 5,5,5,5,5,5,5,5, # b0 - b7 + 5,5,5,5,5,5,5,5, # b8 - bf + 0,0,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 7,8,8,8,8,8,8,8, # e0 - e7 + 8,8,8,8,8,9,8,8, # e8 - ef + 10,11,11,11,11,11,11,11, # f0 - f7 + 12,13,13,13,14,15,0,0 # f8 - ff +) + +UTF8_ST = ( + MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 + 9, 11, 8, 7, 6, 5, 4, 3,#08-0f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f + MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f + MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f + MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f + MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af + MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf +) + +UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) + +UTF8_SM_MODEL = {'class_table': UTF8_CLS, + 'class_factor': 16, + 'state_table': UTF8_ST, + 'char_len_table': UTF8_CHAR_LEN_TABLE, + 'name': 'UTF-8'} diff --git a/env/lib/python3.6/site-packages/chardet/sbcharsetprober.py b/env/lib/python3.6/site-packages/chardet/sbcharsetprober.py new file mode 100644 index 0000000..0adb51d --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/sbcharsetprober.py @@ -0,0 +1,132 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import CharacterCategory, ProbingState, SequenceLikelihood + + +class SingleByteCharSetProber(CharSetProber): + SAMPLE_SIZE = 64 + SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 + POSITIVE_SHORTCUT_THRESHOLD = 0.95 + NEGATIVE_SHORTCUT_THRESHOLD = 0.05 + + def __init__(self, model, reversed=False, name_prober=None): + super(SingleByteCharSetProber, self).__init__() + self._model = model + # TRUE if we need to reverse every pair in the model lookup + self._reversed = reversed + # Optional auxiliary prober for name decision + self._name_prober = name_prober + self._last_order = None + self._seq_counters = None + self._total_seqs = None + self._total_char = None + self._freq_char = None + self.reset() + + def reset(self): + super(SingleByteCharSetProber, self).reset() + # char order of last character + self._last_order = 255 + self._seq_counters = [0] * SequenceLikelihood.get_num_categories() + self._total_seqs = 0 + self._total_char = 0 + # characters that fall in our sampling range + self._freq_char = 0 + + @property + def charset_name(self): + if self._name_prober: + return self._name_prober.charset_name + else: + return self._model['charset_name'] + + @property + def language(self): + if self._name_prober: + return self._name_prober.language + else: + return self._model.get('language') + + def feed(self, byte_str): + if not self._model['keep_english_letter']: + byte_str = self.filter_international_words(byte_str) + if not byte_str: + return self.state + char_to_order_map = self._model['char_to_order_map'] + for i, c in enumerate(byte_str): + # XXX: Order is in range 1-64, so one would think we want 0-63 here, + # but that leads to 27 more test failures than before. + order = char_to_order_map[c] + # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but + # CharacterCategory.SYMBOL is actually 253, so we use CONTROL + # to make it closer to the original intent. The only difference + # is whether or not we count digits and control characters for + # _total_char purposes. + if order < CharacterCategory.CONTROL: + self._total_char += 1 + if order < self.SAMPLE_SIZE: + self._freq_char += 1 + if self._last_order < self.SAMPLE_SIZE: + self._total_seqs += 1 + if not self._reversed: + i = (self._last_order * self.SAMPLE_SIZE) + order + model = self._model['precedence_matrix'][i] + else: # reverse the order of the letters in the lookup + i = (order * self.SAMPLE_SIZE) + self._last_order + model = self._model['precedence_matrix'][i] + self._seq_counters[model] += 1 + self._last_order = order + + charset_name = self._model['charset_name'] + if self.state == ProbingState.DETECTING: + if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: + confidence = self.get_confidence() + if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, we have a winner', + charset_name, confidence) + self._state = ProbingState.FOUND_IT + elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, below negative ' + 'shortcut threshhold %s', charset_name, + confidence, + self.NEGATIVE_SHORTCUT_THRESHOLD) + self._state = ProbingState.NOT_ME + + return self.state + + def get_confidence(self): + r = 0.01 + if self._total_seqs > 0: + r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / + self._total_seqs / self._model['typical_positive_ratio']) + r = r * self._freq_char / self._total_char + if r >= 1.0: + r = 0.99 + return r diff --git a/env/lib/python3.6/site-packages/chardet/sbcsgroupprober.py b/env/lib/python3.6/site-packages/chardet/sbcsgroupprober.py new file mode 100644 index 0000000..98e95dc --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/sbcsgroupprober.py @@ -0,0 +1,73 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .sbcharsetprober import SingleByteCharSetProber +from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, + Latin5CyrillicModel, MacCyrillicModel, + Ibm866Model, Ibm855Model) +from .langgreekmodel import Latin7GreekModel, Win1253GreekModel +from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel +# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel +from .langthaimodel import TIS620ThaiModel +from .langhebrewmodel import Win1255HebrewModel +from .hebrewprober import HebrewProber +from .langturkishmodel import Latin5TurkishModel + + +class SBCSGroupProber(CharSetGroupProber): + def __init__(self): + super(SBCSGroupProber, self).__init__() + self.probers = [ + SingleByteCharSetProber(Win1251CyrillicModel), + SingleByteCharSetProber(Koi8rModel), + SingleByteCharSetProber(Latin5CyrillicModel), + SingleByteCharSetProber(MacCyrillicModel), + SingleByteCharSetProber(Ibm866Model), + SingleByteCharSetProber(Ibm855Model), + SingleByteCharSetProber(Latin7GreekModel), + SingleByteCharSetProber(Win1253GreekModel), + SingleByteCharSetProber(Latin5BulgarianModel), + SingleByteCharSetProber(Win1251BulgarianModel), + # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) + # after we retrain model. + # SingleByteCharSetProber(Latin2HungarianModel), + # SingleByteCharSetProber(Win1250HungarianModel), + SingleByteCharSetProber(TIS620ThaiModel), + SingleByteCharSetProber(Latin5TurkishModel), + ] + hebrew_prober = HebrewProber() + logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, + False, hebrew_prober) + visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True, + hebrew_prober) + hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober) + self.probers.extend([hebrew_prober, logical_hebrew_prober, + visual_hebrew_prober]) + + self.reset() diff --git a/env/lib/python3.6/site-packages/chardet/sjisprober.py b/env/lib/python3.6/site-packages/chardet/sjisprober.py new file mode 100644 index 0000000..9e29623 --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/sjisprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import SJISDistributionAnalysis +from .jpcntx import SJISContextAnalysis +from .mbcssm import SJIS_SM_MODEL +from .enums import ProbingState, MachineState + + +class SJISProber(MultiByteCharSetProber): + def __init__(self): + super(SJISProber, self).__init__() + self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) + self.distribution_analyzer = SJISDistributionAnalysis() + self.context_analyzer = SJISContextAnalysis() + self.reset() + + def reset(self): + super(SJISProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return self.context_analyzer.charset_name + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char[2 - char_len:], + char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3 + - char_len], char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/env/lib/python3.6/site-packages/chardet/universaldetector.py b/env/lib/python3.6/site-packages/chardet/universaldetector.py new file mode 100644 index 0000000..7b4e92d --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/universaldetector.py @@ -0,0 +1,286 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### +""" +Module containing the UniversalDetector detector class, which is the primary +class a user of ``chardet`` should use. + +:author: Mark Pilgrim (initial port to Python) +:author: Shy Shalom (original C code) +:author: Dan Blanchard (major refactoring for 3.0) +:author: Ian Cordasco +""" + + +import codecs +import logging +import re + +from .charsetgroupprober import CharSetGroupProber +from .enums import InputState, LanguageFilter, ProbingState +from .escprober import EscCharSetProber +from .latin1prober import Latin1Prober +from .mbcsgroupprober import MBCSGroupProber +from .sbcsgroupprober import SBCSGroupProber + + +class UniversalDetector(object): + """ + The ``UniversalDetector`` class underlies the ``chardet.detect`` function + and coordinates all of the different charset probers. + + To get a ``dict`` containing an encoding and its confidence, you can simply + run: + + .. code:: + + u = UniversalDetector() + u.feed(some_bytes) + u.close() + detected = u.result + + """ + + MINIMUM_THRESHOLD = 0.20 + HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]') + ESC_DETECTOR = re.compile(b'(\033|~{)') + WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]') + ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252', + 'iso-8859-2': 'Windows-1250', + 'iso-8859-5': 'Windows-1251', + 'iso-8859-6': 'Windows-1256', + 'iso-8859-7': 'Windows-1253', + 'iso-8859-8': 'Windows-1255', + 'iso-8859-9': 'Windows-1254', + 'iso-8859-13': 'Windows-1257'} + + def __init__(self, lang_filter=LanguageFilter.ALL): + self._esc_charset_prober = None + self._charset_probers = [] + self.result = None + self.done = None + self._got_data = None + self._input_state = None + self._last_char = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + self._has_win_bytes = None + self.reset() + + def reset(self): + """ + Reset the UniversalDetector and all of its probers back to their + initial states. This is called by ``__init__``, so you only need to + call this directly in between analyses of different documents. + """ + self.result = {'encoding': None, 'confidence': 0.0, 'language': None} + self.done = False + self._got_data = False + self._has_win_bytes = False + self._input_state = InputState.PURE_ASCII + self._last_char = b'' + if self._esc_charset_prober: + self._esc_charset_prober.reset() + for prober in self._charset_probers: + prober.reset() + + def feed(self, byte_str): + """ + Takes a chunk of a document and feeds it through all of the relevant + charset probers. + + After calling ``feed``, you can check the value of the ``done`` + attribute to see if you need to continue feeding the + ``UniversalDetector`` more data, or if it has made a prediction + (in the ``result`` attribute). + + .. note:: + You should always call ``close`` when you're done feeding in your + document if ``done`` is not already ``True``. + """ + if self.done: + return + + if not len(byte_str): + return + + if not isinstance(byte_str, bytearray): + byte_str = bytearray(byte_str) + + # First check for known BOMs, since these are guaranteed to be correct + if not self._got_data: + # If the data starts with BOM, we know it is UTF + if byte_str.startswith(codecs.BOM_UTF8): + # EF BB BF UTF-8 with BOM + self.result = {'encoding': "UTF-8-SIG", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_UTF32_LE, + codecs.BOM_UTF32_BE)): + # FF FE 00 00 UTF-32, little-endian BOM + # 00 00 FE FF UTF-32, big-endian BOM + self.result = {'encoding': "UTF-32", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\xFE\xFF\x00\x00'): + # FE FF 00 00 UCS-4, unusual octet order BOM (3412) + self.result = {'encoding': "X-ISO-10646-UCS-4-3412", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\x00\x00\xFF\xFE'): + # 00 00 FF FE UCS-4, unusual octet order BOM (2143) + self.result = {'encoding': "X-ISO-10646-UCS-4-2143", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): + # FF FE UTF-16, little endian BOM + # FE FF UTF-16, big endian BOM + self.result = {'encoding': "UTF-16", + 'confidence': 1.0, + 'language': ''} + + self._got_data = True + if self.result['encoding'] is not None: + self.done = True + return + + # If none of those matched and we've only see ASCII so far, check + # for high bytes and escape sequences + if self._input_state == InputState.PURE_ASCII: + if self.HIGH_BYTE_DETECTOR.search(byte_str): + self._input_state = InputState.HIGH_BYTE + elif self._input_state == InputState.PURE_ASCII and \ + self.ESC_DETECTOR.search(self._last_char + byte_str): + self._input_state = InputState.ESC_ASCII + + self._last_char = byte_str[-1:] + + # If we've seen escape sequences, use the EscCharSetProber, which + # uses a simple state machine to check for known escape sequences in + # HZ and ISO-2022 encodings, since those are the only encodings that + # use such sequences. + if self._input_state == InputState.ESC_ASCII: + if not self._esc_charset_prober: + self._esc_charset_prober = EscCharSetProber(self.lang_filter) + if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': + self._esc_charset_prober.charset_name, + 'confidence': + self._esc_charset_prober.get_confidence(), + 'language': + self._esc_charset_prober.language} + self.done = True + # If we've seen high bytes (i.e., those with values greater than 127), + # we need to do more complicated checks using all our multi-byte and + # single-byte probers that are left. The single-byte probers + # use character bigram distributions to determine the encoding, whereas + # the multi-byte probers use a combination of character unigram and + # bigram distributions. + elif self._input_state == InputState.HIGH_BYTE: + if not self._charset_probers: + self._charset_probers = [MBCSGroupProber(self.lang_filter)] + # If we're checking non-CJK encodings, use single-byte prober + if self.lang_filter & LanguageFilter.NON_CJK: + self._charset_probers.append(SBCSGroupProber()) + self._charset_probers.append(Latin1Prober()) + for prober in self._charset_probers: + if prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': prober.charset_name, + 'confidence': prober.get_confidence(), + 'language': prober.language} + self.done = True + break + if self.WIN_BYTE_DETECTOR.search(byte_str): + self._has_win_bytes = True + + def close(self): + """ + Stop analyzing the current document and come up with a final + prediction. + + :returns: The ``result`` attribute, a ``dict`` with the keys + `encoding`, `confidence`, and `language`. + """ + # Don't bother with checks if we're already done + if self.done: + return self.result + self.done = True + + if not self._got_data: + self.logger.debug('no data received!') + + # Default to ASCII if it is all we've seen so far + elif self._input_state == InputState.PURE_ASCII: + self.result = {'encoding': 'ascii', + 'confidence': 1.0, + 'language': ''} + + # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD + elif self._input_state == InputState.HIGH_BYTE: + prober_confidence = None + max_prober_confidence = 0.0 + max_prober = None + for prober in self._charset_probers: + if not prober: + continue + prober_confidence = prober.get_confidence() + if prober_confidence > max_prober_confidence: + max_prober_confidence = prober_confidence + max_prober = prober + if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): + charset_name = max_prober.charset_name + lower_charset_name = max_prober.charset_name.lower() + confidence = max_prober.get_confidence() + # Use Windows encoding name instead of ISO-8859 if we saw any + # extra Windows-specific bytes + if lower_charset_name.startswith('iso-8859'): + if self._has_win_bytes: + charset_name = self.ISO_WIN_MAP.get(lower_charset_name, + charset_name) + self.result = {'encoding': charset_name, + 'confidence': confidence, + 'language': max_prober.language} + + # Log all prober confidences if none met MINIMUM_THRESHOLD + if self.logger.getEffectiveLevel() == logging.DEBUG: + if self.result['encoding'] is None: + self.logger.debug('no probers hit minimum threshold') + for group_prober in self._charset_probers: + if not group_prober: + continue + if isinstance(group_prober, CharSetGroupProber): + for prober in group_prober.probers: + self.logger.debug('%s %s confidence = %s', + prober.charset_name, + prober.language, + prober.get_confidence()) + else: + self.logger.debug('%s %s confidence = %s', + prober.charset_name, + prober.language, + prober.get_confidence()) + return self.result diff --git a/env/lib/python3.6/site-packages/chardet/utf8prober.py b/env/lib/python3.6/site-packages/chardet/utf8prober.py new file mode 100644 index 0000000..6c3196c --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/utf8prober.py @@ -0,0 +1,82 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState +from .codingstatemachine import CodingStateMachine +from .mbcssm import UTF8_SM_MODEL + + + +class UTF8Prober(CharSetProber): + ONE_CHAR_PROB = 0.5 + + def __init__(self): + super(UTF8Prober, self).__init__() + self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) + self._num_mb_chars = None + self.reset() + + def reset(self): + super(UTF8Prober, self).reset() + self.coding_sm.reset() + self._num_mb_chars = 0 + + @property + def charset_name(self): + return "utf-8" + + @property + def language(self): + return "" + + def feed(self, byte_str): + for c in byte_str: + coding_state = self.coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + if self.coding_sm.get_current_charlen() >= 2: + self._num_mb_chars += 1 + + if self.state == ProbingState.DETECTING: + if self.get_confidence() > self.SHORTCUT_THRESHOLD: + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + unlike = 0.99 + if self._num_mb_chars < 6: + unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars + return 1.0 - unlike + else: + return unlike diff --git a/env/lib/python3.6/site-packages/chardet/version.py b/env/lib/python3.6/site-packages/chardet/version.py new file mode 100644 index 0000000..bb2a34a --- /dev/null +++ b/env/lib/python3.6/site-packages/chardet/version.py @@ -0,0 +1,9 @@ +""" +This module exists only to simplify retrieving the version number of chardet +from within setup.py and from chardet subpackages. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + +__version__ = "3.0.4" +VERSION = __version__.split('.') diff --git a/env/lib/python3.6/site-packages/dateutil/_common.py b/env/lib/python3.6/site-packages/dateutil/_common.py new file mode 100644 index 0000000..e8b4af7 --- /dev/null +++ b/env/lib/python3.6/site-packages/dateutil/_common.py @@ -0,0 +1,34 @@ +""" +Common code used in multiple modules. +""" + + +class weekday(object): + __slots__ = ["weekday", "n"] + + def __init__(self, weekday, n=None): + self.weekday = weekday + self.n = n + + def __call__(self, n): + if n == self.n: + return self + else: + return self.__class__(self.weekday, n) + + def __eq__(self, other): + try: + if self.weekday != other.weekday or self.n != other.n: + return False + except AttributeError: + return False + return True + + __hash__ = None + + def __repr__(self): + s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday] + if not self.n: + return s + else: + return "%s(%+d)" % (s, self.n) diff --git a/env/lib/python3.6/site-packages/dateutil/_version.py b/env/lib/python3.6/site-packages/dateutil/_version.py new file mode 100644 index 0000000..c1a0357 --- /dev/null +++ b/env/lib/python3.6/site-packages/dateutil/_version.py @@ -0,0 +1,10 @@ +""" +Contains information about the dateutil version. +""" + +VERSION_MAJOR = 2 +VERSION_MINOR = 6 +VERSION_PATCH = 1 + +VERSION_TUPLE = (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH) +VERSION = '.'.join(map(str, VERSION_TUPLE)) diff --git a/env/lib/python3.6/site-packages/dateutil/easter.py b/env/lib/python3.6/site-packages/dateutil/easter.py new file mode 100644 index 0000000..e4def97 --- /dev/null +++ b/env/lib/python3.6/site-packages/dateutil/easter.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +""" +This module offers a generic easter computing method for any given year, using +Western, Orthodox or Julian algorithms. +""" + +import datetime + +__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"] + +EASTER_JULIAN = 1 +EASTER_ORTHODOX = 2 +EASTER_WESTERN = 3 + + +def easter(year, method=EASTER_WESTERN): + """ + This method was ported from the work done by GM Arts, + on top of the algorithm by Claus Tondering, which was + based in part on the algorithm of Ouding (1940), as + quoted in "Explanatory Supplement to the Astronomical + Almanac", P. Kenneth Seidelmann, editor. + + This algorithm implements three different easter + calculation methods: + + 1 - Original calculation in Julian calendar, valid in + dates after 326 AD + 2 - Original method, with date converted to Gregorian + calendar, valid in years 1583 to 4099 + 3 - Revised method, in Gregorian calendar, valid in + years 1583 to 4099 as well + + These methods are represented by the constants: + + * ``EASTER_JULIAN = 1`` + * ``EASTER_ORTHODOX = 2`` + * ``EASTER_WESTERN = 3`` + + The default method is method 3. + + More about the algorithm may be found at: + + http://users.chariot.net.au/~gmarts/eastalg.htm + + and + + http://www.tondering.dk/claus/calendar.html + + """ + + if not (1 <= method <= 3): + raise ValueError("invalid method") + + # g - Golden year - 1 + # c - Century + # h - (23 - Epact) mod 30 + # i - Number of days from March 21 to Paschal Full Moon + # j - Weekday for PFM (0=Sunday, etc) + # p - Number of days from March 21 to Sunday on or before PFM + # (-6 to 28 methods 1 & 3, to 56 for method 2) + # e - Extra days to add for method 2 (converting Julian + # date to Gregorian date) + + y = year + g = y % 19 + e = 0 + if method < 3: + # Old method + i = (19*g + 15) % 30 + j = (y + y//4 + i) % 7 + if method == 2: + # Extra dates to convert Julian to Gregorian date + e = 10 + if y > 1600: + e = e + y//100 - 16 - (y//100 - 16)//4 + else: + # New method + c = y//100 + h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30 + i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11)) + j = (y + y//4 + i + 2 - c + c//4) % 7 + + # p can be from -6 to 56 corresponding to dates 22 March to 23 May + # (later dates apply to method 2, although 23 May never actually occurs) + p = i - j + e + d = 1 + (p + 27 + (p + 6)//40) % 31 + m = 3 + (p + 26)//30 + return datetime.date(int(y), int(m), int(d)) diff --git a/env/lib/python3.6/site-packages/dateutil/parser.py b/env/lib/python3.6/site-packages/dateutil/parser.py new file mode 100644 index 0000000..595331f --- /dev/null +++ b/env/lib/python3.6/site-packages/dateutil/parser.py @@ -0,0 +1,1374 @@ +# -*- coding: utf-8 -*- +""" +This module offers a generic date/time string parser which is able to parse +most known formats to represent a date and/or time. + +This module attempts to be forgiving with regards to unlikely input formats, +returning a datetime object even for dates which are ambiguous. If an element +of a date/time stamp is omitted, the following rules are applied: +- If AM or PM is left unspecified, a 24-hour clock is assumed, however, an hour + on a 12-hour clock (``0 <= hour <= 12``) *must* be specified if AM or PM is + specified. +- If a time zone is omitted, a timezone-naive datetime is returned. + +If any other elements are missing, they are taken from the +:class:`datetime.datetime` object passed to the parameter ``default``. If this +results in a day number exceeding the valid number of days per month, the +value falls back to the end of the month. + +Additional resources about date/time string formats can be found below: + +- `A summary of the international standard date and time notation + `_ +- `W3C Date and Time Formats `_ +- `Time Formats (Planetary Rings Node) `_ +- `CPAN ParseDate module + `_ +- `Java SimpleDateFormat Class + `_ +""" +from __future__ import unicode_literals + +import datetime +import string +import time +import collections +import re +from io import StringIO +from calendar import monthrange + +from six import text_type, binary_type, integer_types + +from . import relativedelta +from . import tz + +__all__ = ["parse", "parserinfo"] + + +class _timelex(object): + # Fractional seconds are sometimes split by a comma + _split_decimal = re.compile("([.,])") + + def __init__(self, instream): + if isinstance(instream, binary_type): + instream = instream.decode() + + if isinstance(instream, text_type): + instream = StringIO(instream) + + if getattr(instream, 'read', None) is None: + raise TypeError('Parser must be a string or character stream, not ' + '{itype}'.format(itype=instream.__class__.__name__)) + + self.instream = instream + self.charstack = [] + self.tokenstack = [] + self.eof = False + + def get_token(self): + """ + This function breaks the time string into lexical units (tokens), which + can be parsed by the parser. Lexical units are demarcated by changes in + the character set, so any continuous string of letters is considered + one unit, any continuous string of numbers is considered one unit. + + The main complication arises from the fact that dots ('.') can be used + both as separators (e.g. "Sep.20.2009") or decimal points (e.g. + "4:30:21.447"). As such, it is necessary to read the full context of + any dot-separated strings before breaking it into tokens; as such, this + function maintains a "token stack", for when the ambiguous context + demands that multiple tokens be parsed at once. + """ + if self.tokenstack: + return self.tokenstack.pop(0) + + seenletters = False + token = None + state = None + + while not self.eof: + # We only realize that we've reached the end of a token when we + # find a character that's not part of the current token - since + # that character may be part of the next token, it's stored in the + # charstack. + if self.charstack: + nextchar = self.charstack.pop(0) + else: + nextchar = self.instream.read(1) + while nextchar == '\x00': + nextchar = self.instream.read(1) + + if not nextchar: + self.eof = True + break + elif not state: + # First character of the token - determines if we're starting + # to parse a word, a number or something else. + token = nextchar + if self.isword(nextchar): + state = 'a' + elif self.isnum(nextchar): + state = '0' + elif self.isspace(nextchar): + token = ' ' + break # emit token + else: + break # emit token + elif state == 'a': + # If we've already started reading a word, we keep reading + # letters until we find something that's not part of a word. + seenletters = True + if self.isword(nextchar): + token += nextchar + elif nextchar == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0': + # If we've already started reading a number, we keep reading + # numbers until we find something that doesn't fit. + if self.isnum(nextchar): + token += nextchar + elif nextchar == '.' or (nextchar == ',' and len(token) >= 2): + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == 'a.': + # If we've seen some letters and a dot separator, continue + # parsing, and the tokens will be broken up later. + seenletters = True + if nextchar == '.' or self.isword(nextchar): + token += nextchar + elif self.isnum(nextchar) and token[-1] == '.': + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0.': + # If we've seen at least one dot separator, keep going, we'll + # break up the tokens later. + if nextchar == '.' or self.isnum(nextchar): + token += nextchar + elif self.isword(nextchar) and token[-1] == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + + if (state in ('a.', '0.') and (seenletters or token.count('.') > 1 or + token[-1] in '.,')): + l = self._split_decimal.split(token) + token = l[0] + for tok in l[1:]: + if tok: + self.tokenstack.append(tok) + + if state == '0.' and token.count('.') == 0: + token = token.replace(',', '.') + + return token + + def __iter__(self): + return self + + def __next__(self): + token = self.get_token() + if token is None: + raise StopIteration + + return token + + def next(self): + return self.__next__() # Python 2.x support + + @classmethod + def split(cls, s): + return list(cls(s)) + + @classmethod + def isword(cls, nextchar): + """ Whether or not the next character is part of a word """ + return nextchar.isalpha() + + @classmethod + def isnum(cls, nextchar): + """ Whether the next character is part of a number """ + return nextchar.isdigit() + + @classmethod + def isspace(cls, nextchar): + """ Whether the next character is whitespace """ + return nextchar.isspace() + + +class _resultbase(object): + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def _repr(self, classname): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (classname, ", ".join(l)) + + def __len__(self): + return (sum(getattr(self, attr) is not None + for attr in self.__slots__)) + + def __repr__(self): + return self._repr(self.__class__.__name__) + + +class parserinfo(object): + """ + Class which handles what inputs are accepted. Subclass this to customize + the language and acceptable values for each parameter. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM + and YMD. Default is ``False``. + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken + to be the year, otherwise the last number is taken to be the year. + Default is ``False``. + """ + + # m from a.m/p.m, t from ISO T separator + JUMP = [" ", ".", ",", ";", "-", "/", "'", + "at", "on", "and", "ad", "m", "t", "of", + "st", "nd", "rd", "th"] + + WEEKDAYS = [("Mon", "Monday"), + ("Tue", "Tuesday"), + ("Wed", "Wednesday"), + ("Thu", "Thursday"), + ("Fri", "Friday"), + ("Sat", "Saturday"), + ("Sun", "Sunday")] + MONTHS = [("Jan", "January"), + ("Feb", "February"), + ("Mar", "March"), + ("Apr", "April"), + ("May", "May"), + ("Jun", "June"), + ("Jul", "July"), + ("Aug", "August"), + ("Sep", "Sept", "September"), + ("Oct", "October"), + ("Nov", "November"), + ("Dec", "December")] + HMS = [("h", "hour", "hours"), + ("m", "minute", "minutes"), + ("s", "second", "seconds")] + AMPM = [("am", "a"), + ("pm", "p")] + UTCZONE = ["UTC", "GMT", "Z"] + PERTAIN = ["of"] + TZOFFSET = {} + + def __init__(self, dayfirst=False, yearfirst=False): + self._jump = self._convert(self.JUMP) + self._weekdays = self._convert(self.WEEKDAYS) + self._months = self._convert(self.MONTHS) + self._hms = self._convert(self.HMS) + self._ampm = self._convert(self.AMPM) + self._utczone = self._convert(self.UTCZONE) + self._pertain = self._convert(self.PERTAIN) + + self.dayfirst = dayfirst + self.yearfirst = yearfirst + + self._year = time.localtime().tm_year + self._century = self._year // 100 * 100 + + def _convert(self, lst): + dct = {} + for i, v in enumerate(lst): + if isinstance(v, tuple): + for v in v: + dct[v.lower()] = i + else: + dct[v.lower()] = i + return dct + + def jump(self, name): + return name.lower() in self._jump + + def weekday(self, name): + if len(name) >= min(len(n) for n in self._weekdays.keys()): + try: + return self._weekdays[name.lower()] + except KeyError: + pass + return None + + def month(self, name): + if len(name) >= min(len(n) for n in self._months.keys()): + try: + return self._months[name.lower()] + 1 + except KeyError: + pass + return None + + def hms(self, name): + try: + return self._hms[name.lower()] + except KeyError: + return None + + def ampm(self, name): + try: + return self._ampm[name.lower()] + except KeyError: + return None + + def pertain(self, name): + return name.lower() in self._pertain + + def utczone(self, name): + return name.lower() in self._utczone + + def tzoffset(self, name): + if name in self._utczone: + return 0 + + return self.TZOFFSET.get(name) + + def convertyear(self, year, century_specified=False): + if year < 100 and not century_specified: + year += self._century + if abs(year - self._year) >= 50: + if year < self._year: + year += 100 + else: + year -= 100 + return year + + def validate(self, res): + # move to info + if res.year is not None: + res.year = self.convertyear(res.year, res.century_specified) + + if res.tzoffset == 0 and not res.tzname or res.tzname == 'Z': + res.tzname = "UTC" + res.tzoffset = 0 + elif res.tzoffset != 0 and res.tzname and self.utczone(res.tzname): + res.tzoffset = 0 + return True + + +class _ymd(list): + def __init__(self, tzstr, *args, **kwargs): + super(self.__class__, self).__init__(*args, **kwargs) + self.century_specified = False + self.tzstr = tzstr + + @staticmethod + def token_could_be_year(token, year): + try: + return int(token) == year + except ValueError: + return False + + @staticmethod + def find_potential_year_tokens(year, tokens): + return [token for token in tokens if _ymd.token_could_be_year(token, year)] + + def find_probable_year_index(self, tokens): + """ + attempt to deduce if a pre 100 year was lost + due to padded zeros being taken off + """ + for index, token in enumerate(self): + potential_year_tokens = _ymd.find_potential_year_tokens(token, tokens) + if len(potential_year_tokens) == 1 and len(potential_year_tokens[0]) > 2: + return index + + def append(self, val): + if hasattr(val, '__len__'): + if val.isdigit() and len(val) > 2: + self.century_specified = True + elif val > 100: + self.century_specified = True + + super(self.__class__, self).append(int(val)) + + def resolve_ymd(self, mstridx, yearfirst, dayfirst): + len_ymd = len(self) + year, month, day = (None, None, None) + + if len_ymd > 3: + raise ValueError("More than three YMD values") + elif len_ymd == 1 or (mstridx != -1 and len_ymd == 2): + # One member, or two members with a month string + if mstridx != -1: + month = self[mstridx] + del self[mstridx] + + if len_ymd > 1 or mstridx == -1: + if self[0] > 31: + year = self[0] + else: + day = self[0] + + elif len_ymd == 2: + # Two members with numbers + if self[0] > 31: + # 99-01 + year, month = self + elif self[1] > 31: + # 01-99 + month, year = self + elif dayfirst and self[1] <= 12: + # 13-01 + day, month = self + else: + # 01-13 + month, day = self + + elif len_ymd == 3: + # Three members + if mstridx == 0: + month, day, year = self + elif mstridx == 1: + if self[0] > 31 or (yearfirst and self[2] <= 31): + # 99-Jan-01 + year, month, day = self + else: + # 01-Jan-01 + # Give precendence to day-first, since + # two-digit years is usually hand-written. + day, month, year = self + + elif mstridx == 2: + # WTF!? + if self[1] > 31: + # 01-99-Jan + day, year, month = self + else: + # 99-01-Jan + year, day, month = self + + else: + if self[0] > 31 or \ + self.find_probable_year_index(_timelex.split(self.tzstr)) == 0 or \ + (yearfirst and self[1] <= 12 and self[2] <= 31): + # 99-01-01 + if dayfirst and self[2] <= 12: + year, day, month = self + else: + year, month, day = self + elif self[0] > 12 or (dayfirst and self[1] <= 12): + # 13-01-01 + day, month, year = self + else: + # 01-13-01 + month, day, year = self + + return year, month, day + + +class parser(object): + def __init__(self, info=None): + self.info = info or parserinfo() + + def parse(self, timestr, default=None, ignoretz=False, tzinfos=None, **kwargs): + """ + Parse the date/time string into a :class:`datetime.datetime` object. + + :param timestr: + Any date/time string using the supported formats. + + :param default: + The default datetime object, if this is a datetime object and not + ``None``, elements specified in ``timestr`` replace elements in the + default object. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a + naive :class:`datetime.datetime` object is returned. + + :param tzinfos: + Additional time zone names / aliases which may be present in the + string. This argument maps time zone names (and optionally offsets + from those time zones) to time zones. This parameter can be a + dictionary with timezone aliases mapping time zone names to time + zones or a function taking two parameters (``tzname`` and + ``tzoffset``) and returning a time zone. + + The timezones to which the names are mapped can be an integer + offset from UTC in minutes or a :class:`tzinfo` object. + + .. doctest:: + :options: +NORMALIZE_WHITESPACE + + >>> from dateutil.parser import parse + >>> from dateutil.tz import gettz + >>> tzinfos = {"BRST": -10800, "CST": gettz("America/Chicago")} + >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -10800)) + >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, + tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) + + This parameter is ignored if ``ignoretz`` is set. + + :param **kwargs: + Keyword arguments as passed to ``_parse()``. + + :return: + Returns a :class:`datetime.datetime` object or, if the + ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the + first element being a :class:`datetime.datetime` object, the second + a tuple containing the fuzzy tokens. + + :raises ValueError: + Raised for invalid or unknown string format, if the provided + :class:`tzinfo` is not in a valid format, or if an invalid date + would be created. + + :raises TypeError: + Raised for non-string or character stream input. + + :raises OverflowError: + Raised if the parsed date exceeds the largest valid C integer on + your system. + """ + + if default is None: + default = datetime.datetime.now().replace(hour=0, minute=0, + second=0, microsecond=0) + + res, skipped_tokens = self._parse(timestr, **kwargs) + + if res is None: + raise ValueError("Unknown string format") + + if len(res) == 0: + raise ValueError("String does not contain a date.") + + repl = {} + for attr in ("year", "month", "day", "hour", + "minute", "second", "microsecond"): + value = getattr(res, attr) + if value is not None: + repl[attr] = value + + if 'day' not in repl: + # If the default day exceeds the last day of the month, fall back to + # the end of the month. + cyear = default.year if res.year is None else res.year + cmonth = default.month if res.month is None else res.month + cday = default.day if res.day is None else res.day + + if cday > monthrange(cyear, cmonth)[1]: + repl['day'] = monthrange(cyear, cmonth)[1] + + ret = default.replace(**repl) + + if res.weekday is not None and not res.day: + ret = ret+relativedelta.relativedelta(weekday=res.weekday) + + if not ignoretz: + if (isinstance(tzinfos, collections.Callable) or + tzinfos and res.tzname in tzinfos): + + if isinstance(tzinfos, collections.Callable): + tzdata = tzinfos(res.tzname, res.tzoffset) + else: + tzdata = tzinfos.get(res.tzname) + + if isinstance(tzdata, datetime.tzinfo): + tzinfo = tzdata + elif isinstance(tzdata, text_type): + tzinfo = tz.tzstr(tzdata) + elif isinstance(tzdata, integer_types): + tzinfo = tz.tzoffset(res.tzname, tzdata) + else: + raise ValueError("Offset must be tzinfo subclass, " + "tz string, or int offset.") + ret = ret.replace(tzinfo=tzinfo) + elif res.tzname and res.tzname in time.tzname: + ret = ret.replace(tzinfo=tz.tzlocal()) + elif res.tzoffset == 0: + ret = ret.replace(tzinfo=tz.tzutc()) + elif res.tzoffset: + ret = ret.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset)) + + if kwargs.get('fuzzy_with_tokens', False): + return ret, skipped_tokens + else: + return ret + + class _result(_resultbase): + __slots__ = ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond", + "tzname", "tzoffset", "ampm"] + + def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False, + fuzzy_with_tokens=False): + """ + Private method which performs the heavy lifting of parsing, called from + ``parse()``, which passes on its ``kwargs`` to this function. + + :param timestr: + The string to parse. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM + and YMD. If set to ``None``, this value is retrieved from the + current :class:`parserinfo` object (which itself defaults to + ``False``). + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken + to be the year, otherwise the last number is taken to be the year. + If this is set to ``None``, the value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param fuzzy: + Whether to allow fuzzy parsing, allowing for string like "Today is + January 1, 2047 at 8:21:00AM". + + :param fuzzy_with_tokens: + If ``True``, ``fuzzy`` is automatically set to True, and the parser + will return a tuple where the first element is the parsed + :class:`datetime.datetime` datetimestamp and the second element is + a tuple containing the portions of the string which were ignored: + + .. doctest:: + + >>> from dateutil.parser import parse + >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) + (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) + + """ + if fuzzy_with_tokens: + fuzzy = True + + info = self.info + + if dayfirst is None: + dayfirst = info.dayfirst + + if yearfirst is None: + yearfirst = info.yearfirst + + res = self._result() + l = _timelex.split(timestr) # Splits the timestr into tokens + + # keep up with the last token skipped so we can recombine + # consecutively skipped tokens (-2 for when i begins at 0). + last_skipped_token_i = -2 + skipped_tokens = list() + + try: + # year/month/day list + ymd = _ymd(timestr) + + # Index of the month string in ymd + mstridx = -1 + + len_l = len(l) + i = 0 + while i < len_l: + + # Check if it's a number + try: + value_repr = l[i] + value = float(value_repr) + except ValueError: + value = None + + if value is not None: + # Token is a number + len_li = len(l[i]) + i += 1 + + if (len(ymd) == 3 and len_li in (2, 4) + and res.hour is None and (i >= len_l or (l[i] != ':' and + info.hms(l[i]) is None))): + # 19990101T23[59] + s = l[i-1] + res.hour = int(s[:2]) + + if len_li == 4: + res.minute = int(s[2:]) + + elif len_li == 6 or (len_li > 6 and l[i-1].find('.') == 6): + # YYMMDD or HHMMSS[.ss] + s = l[i-1] + + if not ymd and l[i-1].find('.') == -1: + #ymd.append(info.convertyear(int(s[:2]))) + + ymd.append(s[:2]) + ymd.append(s[2:4]) + ymd.append(s[4:]) + else: + # 19990101T235959[.59] + res.hour = int(s[:2]) + res.minute = int(s[2:4]) + res.second, res.microsecond = _parsems(s[4:]) + + elif len_li in (8, 12, 14): + # YYYYMMDD + s = l[i-1] + ymd.append(s[:4]) + ymd.append(s[4:6]) + ymd.append(s[6:8]) + + if len_li > 8: + res.hour = int(s[8:10]) + res.minute = int(s[10:12]) + + if len_li > 12: + res.second = int(s[12:]) + + elif ((i < len_l and info.hms(l[i]) is not None) or + (i+1 < len_l and l[i] == ' ' and + info.hms(l[i+1]) is not None)): + + # HH[ ]h or MM[ ]m or SS[.ss][ ]s + if l[i] == ' ': + i += 1 + + idx = info.hms(l[i]) + + while True: + if idx == 0: + res.hour = int(value) + + if value % 1: + res.minute = int(60*(value % 1)) + + elif idx == 1: + res.minute = int(value) + + if value % 1: + res.second = int(60*(value % 1)) + + elif idx == 2: + res.second, res.microsecond = \ + _parsems(value_repr) + + i += 1 + + if i >= len_l or idx == 2: + break + + # 12h00 + try: + value_repr = l[i] + value = float(value_repr) + except ValueError: + break + else: + i += 1 + idx += 1 + + if i < len_l: + newidx = info.hms(l[i]) + + if newidx is not None: + idx = newidx + + elif (i == len_l and l[i-2] == ' ' and + info.hms(l[i-3]) is not None): + # X h MM or X m SS + idx = info.hms(l[i-3]) + + if idx == 0: # h + res.minute = int(value) + + sec_remainder = value % 1 + if sec_remainder: + res.second = int(60 * sec_remainder) + elif idx == 1: # m + res.second, res.microsecond = \ + _parsems(value_repr) + + # We don't need to advance the tokens here because the + # i == len_l call indicates that we're looking at all + # the tokens already. + + elif i+1 < len_l and l[i] == ':': + # HH:MM[:SS[.ss]] + res.hour = int(value) + i += 1 + value = float(l[i]) + res.minute = int(value) + + if value % 1: + res.second = int(60*(value % 1)) + + i += 1 + + if i < len_l and l[i] == ':': + res.second, res.microsecond = _parsems(l[i+1]) + i += 2 + + elif i < len_l and l[i] in ('-', '/', '.'): + sep = l[i] + ymd.append(value_repr) + i += 1 + + if i < len_l and not info.jump(l[i]): + try: + # 01-01[-01] + ymd.append(l[i]) + except ValueError: + # 01-Jan[-01] + value = info.month(l[i]) + + if value is not None: + ymd.append(value) + assert mstridx == -1 + mstridx = len(ymd)-1 + else: + return None, None + + i += 1 + + if i < len_l and l[i] == sep: + # We have three members + i += 1 + value = info.month(l[i]) + + if value is not None: + ymd.append(value) + mstridx = len(ymd)-1 + assert mstridx == -1 + else: + ymd.append(l[i]) + + i += 1 + elif i >= len_l or info.jump(l[i]): + if i+1 < len_l and info.ampm(l[i+1]) is not None: + # 12 am + res.hour = int(value) + + if res.hour < 12 and info.ampm(l[i+1]) == 1: + res.hour += 12 + elif res.hour == 12 and info.ampm(l[i+1]) == 0: + res.hour = 0 + + i += 1 + else: + # Year, month or day + ymd.append(value) + i += 1 + elif info.ampm(l[i]) is not None: + + # 12am + res.hour = int(value) + + if res.hour < 12 and info.ampm(l[i]) == 1: + res.hour += 12 + elif res.hour == 12 and info.ampm(l[i]) == 0: + res.hour = 0 + i += 1 + + elif not fuzzy: + return None, None + else: + i += 1 + continue + + # Check weekday + value = info.weekday(l[i]) + if value is not None: + res.weekday = value + i += 1 + continue + + # Check month name + value = info.month(l[i]) + if value is not None: + ymd.append(value) + assert mstridx == -1 + mstridx = len(ymd)-1 + + i += 1 + if i < len_l: + if l[i] in ('-', '/'): + # Jan-01[-99] + sep = l[i] + i += 1 + ymd.append(l[i]) + i += 1 + + if i < len_l and l[i] == sep: + # Jan-01-99 + i += 1 + ymd.append(l[i]) + i += 1 + + elif (i+3 < len_l and l[i] == l[i+2] == ' ' + and info.pertain(l[i+1])): + # Jan of 01 + # In this case, 01 is clearly year + try: + value = int(l[i+3]) + except ValueError: + # Wrong guess + pass + else: + # Convert it here to become unambiguous + ymd.append(str(info.convertyear(value))) + i += 4 + continue + + # Check am/pm + value = info.ampm(l[i]) + if value is not None: + # For fuzzy parsing, 'a' or 'am' (both valid English words) + # may erroneously trigger the AM/PM flag. Deal with that + # here. + val_is_ampm = True + + # If there's already an AM/PM flag, this one isn't one. + if fuzzy and res.ampm is not None: + val_is_ampm = False + + # If AM/PM is found and hour is not, raise a ValueError + if res.hour is None: + if fuzzy: + val_is_ampm = False + else: + raise ValueError('No hour specified with ' + + 'AM or PM flag.') + elif not 0 <= res.hour <= 12: + # If AM/PM is found, it's a 12 hour clock, so raise + # an error for invalid range + if fuzzy: + val_is_ampm = False + else: + raise ValueError('Invalid hour specified for ' + + '12-hour clock.') + + if val_is_ampm: + if value == 1 and res.hour < 12: + res.hour += 12 + elif value == 0 and res.hour == 12: + res.hour = 0 + + res.ampm = value + + elif fuzzy: + last_skipped_token_i = self._skip_token(skipped_tokens, + last_skipped_token_i, i, l) + i += 1 + continue + + # Check for a timezone name + if (res.hour is not None and len(l[i]) <= 5 and + res.tzname is None and res.tzoffset is None and + not [x for x in l[i] if x not in + string.ascii_uppercase]): + res.tzname = l[i] + res.tzoffset = info.tzoffset(res.tzname) + i += 1 + + # Check for something like GMT+3, or BRST+3. Notice + # that it doesn't mean "I am 3 hours after GMT", but + # "my time +3 is GMT". If found, we reverse the + # logic so that timezone parsing code will get it + # right. + if i < len_l and l[i] in ('+', '-'): + l[i] = ('+', '-')[l[i] == '+'] + res.tzoffset = None + if info.utczone(res.tzname): + # With something like GMT+3, the timezone + # is *not* GMT. + res.tzname = None + + continue + + # Check for a numbered timezone + if res.hour is not None and l[i] in ('+', '-'): + signal = (-1, 1)[l[i] == '+'] + i += 1 + len_li = len(l[i]) + + if len_li == 4: + # -0300 + res.tzoffset = int(l[i][:2])*3600+int(l[i][2:])*60 + elif i+1 < len_l and l[i+1] == ':': + # -03:00 + res.tzoffset = int(l[i])*3600+int(l[i+2])*60 + i += 2 + elif len_li <= 2: + # -[0]3 + res.tzoffset = int(l[i][:2])*3600 + else: + return None, None + i += 1 + + res.tzoffset *= signal + + # Look for a timezone name between parenthesis + if (i+3 < len_l and + info.jump(l[i]) and l[i+1] == '(' and l[i+3] == ')' and + 3 <= len(l[i+2]) <= 5 and + not [x for x in l[i+2] + if x not in string.ascii_uppercase]): + # -0300 (BRST) + res.tzname = l[i+2] + i += 4 + continue + + # Check jumps + if not (info.jump(l[i]) or fuzzy): + return None, None + + last_skipped_token_i = self._skip_token(skipped_tokens, + last_skipped_token_i, i, l) + i += 1 + + # Process year/month/day + year, month, day = ymd.resolve_ymd(mstridx, yearfirst, dayfirst) + if year is not None: + res.year = year + res.century_specified = ymd.century_specified + + if month is not None: + res.month = month + + if day is not None: + res.day = day + + except (IndexError, ValueError, AssertionError): + return None, None + + if not info.validate(res): + return None, None + + if fuzzy_with_tokens: + return res, tuple(skipped_tokens) + else: + return res, None + + @staticmethod + def _skip_token(skipped_tokens, last_skipped_token_i, i, l): + if last_skipped_token_i == i - 1: + # recombine the tokens + skipped_tokens[-1] += l[i] + else: + # just append + skipped_tokens.append(l[i]) + last_skipped_token_i = i + return last_skipped_token_i + + +DEFAULTPARSER = parser() + + +def parse(timestr, parserinfo=None, **kwargs): + """ + + Parse a string in one of the supported formats, using the + ``parserinfo`` parameters. + + :param timestr: + A string containing a date/time stamp. + + :param parserinfo: + A :class:`parserinfo` object containing parameters for the parser. + If ``None``, the default arguments to the :class:`parserinfo` + constructor are used. + + The ``**kwargs`` parameter takes the following keyword arguments: + + :param default: + The default datetime object, if this is a datetime object and not + ``None``, elements specified in ``timestr`` replace elements in the + default object. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a naive + :class:`datetime` object is returned. + + :param tzinfos: + Additional time zone names / aliases which may be present in the + string. This argument maps time zone names (and optionally offsets + from those time zones) to time zones. This parameter can be a + dictionary with timezone aliases mapping time zone names to time + zones or a function taking two parameters (``tzname`` and + ``tzoffset``) and returning a time zone. + + The timezones to which the names are mapped can be an integer + offset from UTC in minutes or a :class:`tzinfo` object. + + .. doctest:: + :options: +NORMALIZE_WHITESPACE + + >>> from dateutil.parser import parse + >>> from dateutil.tz import gettz + >>> tzinfos = {"BRST": -10800, "CST": gettz("America/Chicago")} + >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -10800)) + >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, + tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) + + This parameter is ignored if ``ignoretz`` is set. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM and + YMD. If set to ``None``, this value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken to + be the year, otherwise the last number is taken to be the year. If + this is set to ``None``, the value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param fuzzy: + Whether to allow fuzzy parsing, allowing for string like "Today is + January 1, 2047 at 8:21:00AM". + + :param fuzzy_with_tokens: + If ``True``, ``fuzzy`` is automatically set to True, and the parser + will return a tuple where the first element is the parsed + :class:`datetime.datetime` datetimestamp and the second element is + a tuple containing the portions of the string which were ignored: + + .. doctest:: + + >>> from dateutil.parser import parse + >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) + (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) + + :return: + Returns a :class:`datetime.datetime` object or, if the + ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the + first element being a :class:`datetime.datetime` object, the second + a tuple containing the fuzzy tokens. + + :raises ValueError: + Raised for invalid or unknown string format, if the provided + :class:`tzinfo` is not in a valid format, or if an invalid date + would be created. + + :raises OverflowError: + Raised if the parsed date exceeds the largest valid C integer on + your system. + """ + if parserinfo: + return parser(parserinfo).parse(timestr, **kwargs) + else: + return DEFAULTPARSER.parse(timestr, **kwargs) + + +class _tzparser(object): + + class _result(_resultbase): + + __slots__ = ["stdabbr", "stdoffset", "dstabbr", "dstoffset", + "start", "end"] + + class _attr(_resultbase): + __slots__ = ["month", "week", "weekday", + "yday", "jyday", "day", "time"] + + def __repr__(self): + return self._repr("") + + def __init__(self): + _resultbase.__init__(self) + self.start = self._attr() + self.end = self._attr() + + def parse(self, tzstr): + res = self._result() + l = _timelex.split(tzstr) + try: + + len_l = len(l) + + i = 0 + while i < len_l: + # BRST+3[BRDT[+2]] + j = i + while j < len_l and not [x for x in l[j] + if x in "0123456789:,-+"]: + j += 1 + if j != i: + if not res.stdabbr: + offattr = "stdoffset" + res.stdabbr = "".join(l[i:j]) + else: + offattr = "dstoffset" + res.dstabbr = "".join(l[i:j]) + i = j + if (i < len_l and (l[i] in ('+', '-') or l[i][0] in + "0123456789")): + if l[i] in ('+', '-'): + # Yes, that's right. See the TZ variable + # documentation. + signal = (1, -1)[l[i] == '+'] + i += 1 + else: + signal = -1 + len_li = len(l[i]) + if len_li == 4: + # -0300 + setattr(res, offattr, (int(l[i][:2])*3600 + + int(l[i][2:])*60)*signal) + elif i+1 < len_l and l[i+1] == ':': + # -03:00 + setattr(res, offattr, + (int(l[i])*3600+int(l[i+2])*60)*signal) + i += 2 + elif len_li <= 2: + # -[0]3 + setattr(res, offattr, + int(l[i][:2])*3600*signal) + else: + return None + i += 1 + if res.dstabbr: + break + else: + break + + if i < len_l: + for j in range(i, len_l): + if l[j] == ';': + l[j] = ',' + + assert l[i] == ',' + + i += 1 + + if i >= len_l: + pass + elif (8 <= l.count(',') <= 9 and + not [y for x in l[i:] if x != ',' + for y in x if y not in "0123456789"]): + # GMT0BST,3,0,30,3600,10,0,26,7200[,3600] + for x in (res.start, res.end): + x.month = int(l[i]) + i += 2 + if l[i] == '-': + value = int(l[i+1])*-1 + i += 1 + else: + value = int(l[i]) + i += 2 + if value: + x.week = value + x.weekday = (int(l[i])-1) % 7 + else: + x.day = int(l[i]) + i += 2 + x.time = int(l[i]) + i += 2 + if i < len_l: + if l[i] in ('-', '+'): + signal = (-1, 1)[l[i] == "+"] + i += 1 + else: + signal = 1 + res.dstoffset = (res.stdoffset+int(l[i]))*signal + elif (l.count(',') == 2 and l[i:].count('/') <= 2 and + not [y for x in l[i:] if x not in (',', '/', 'J', 'M', + '.', '-', ':') + for y in x if y not in "0123456789"]): + for x in (res.start, res.end): + if l[i] == 'J': + # non-leap year day (1 based) + i += 1 + x.jyday = int(l[i]) + elif l[i] == 'M': + # month[-.]week[-.]weekday + i += 1 + x.month = int(l[i]) + i += 1 + assert l[i] in ('-', '.') + i += 1 + x.week = int(l[i]) + if x.week == 5: + x.week = -1 + i += 1 + assert l[i] in ('-', '.') + i += 1 + x.weekday = (int(l[i])-1) % 7 + else: + # year day (zero based) + x.yday = int(l[i])+1 + + i += 1 + + if i < len_l and l[i] == '/': + i += 1 + # start time + len_li = len(l[i]) + if len_li == 4: + # -0300 + x.time = (int(l[i][:2])*3600+int(l[i][2:])*60) + elif i+1 < len_l and l[i+1] == ':': + # -03:00 + x.time = int(l[i])*3600+int(l[i+2])*60 + i += 2 + if i+1 < len_l and l[i+1] == ':': + i += 2 + x.time += int(l[i]) + elif len_li <= 2: + # -[0]3 + x.time = (int(l[i][:2])*3600) + else: + return None + i += 1 + + assert i == len_l or l[i] == ',' + + i += 1 + + assert i >= len_l + + except (IndexError, ValueError, AssertionError): + return None + + return res + + +DEFAULTTZPARSER = _tzparser() + + +def _parsetz(tzstr): + return DEFAULTTZPARSER.parse(tzstr) + + +def _parsems(value): + """Parse a I[.F] seconds value into (seconds, microseconds).""" + if "." not in value: + return int(value), 0 + else: + i, f = value.split(".") + return int(i), int(f.ljust(6, "0")[:6]) + + +# vim:ts=4:sw=4:et diff --git a/env/lib/python3.6/site-packages/dateutil/relativedelta.py b/env/lib/python3.6/site-packages/dateutil/relativedelta.py new file mode 100644 index 0000000..0e66afc --- /dev/null +++ b/env/lib/python3.6/site-packages/dateutil/relativedelta.py @@ -0,0 +1,549 @@ +# -*- coding: utf-8 -*- +import datetime +import calendar + +import operator +from math import copysign + +from six import integer_types +from warnings import warn + +from ._common import weekday + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) + +__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + + +class relativedelta(object): + """ + The relativedelta type is based on the specification of the excellent + work done by M.-A. Lemburg in his + `mx.DateTime `_ extension. + However, notice that this type does *NOT* implement the same algorithm as + his work. Do *NOT* expect it to behave like mx.DateTime's counterpart. + + There are two different ways to build a relativedelta instance. The + first one is passing it two date/datetime classes:: + + relativedelta(datetime1, datetime2) + + The second one is passing it any number of the following keyword arguments:: + + relativedelta(arg1=x,arg2=y,arg3=z...) + + year, month, day, hour, minute, second, microsecond: + Absolute information (argument is singular); adding or subtracting a + relativedelta with absolute information does not perform an aritmetic + operation, but rather REPLACES the corresponding value in the + original datetime with the value(s) in relativedelta. + + years, months, weeks, days, hours, minutes, seconds, microseconds: + Relative information, may be negative (argument is plural); adding + or subtracting a relativedelta with relative information performs + the corresponding aritmetic operation on the original datetime value + with the information in the relativedelta. + + weekday: + One of the weekday instances (MO, TU, etc). These instances may + receive a parameter N, specifying the Nth weekday, which could + be positive or negative (like MO(+1) or MO(-2). Not specifying + it is the same as specifying +1. You can also use an integer, + where 0=MO. + + leapdays: + Will add given days to the date found, if year is a leap + year, and the date found is post 28 of february. + + yearday, nlyearday: + Set the yearday or the non-leap year day (jump leap days). + These are converted to day/month/leapdays information. + + Here is the behavior of operations with relativedelta: + + 1. Calculate the absolute year, using the 'year' argument, or the + original datetime year, if the argument is not present. + + 2. Add the relative 'years' argument to the absolute year. + + 3. Do steps 1 and 2 for month/months. + + 4. Calculate the absolute day, using the 'day' argument, or the + original datetime day, if the argument is not present. Then, + subtract from the day until it fits in the year and month + found after their operations. + + 5. Add the relative 'days' argument to the absolute day. Notice + that the 'weeks' argument is multiplied by 7 and added to + 'days'. + + 6. Do steps 1 and 2 for hour/hours, minute/minutes, second/seconds, + microsecond/microseconds. + + 7. If the 'weekday' argument is present, calculate the weekday, + with the given (wday, nth) tuple. wday is the index of the + weekday (0-6, 0=Mon), and nth is the number of weeks to add + forward or backward, depending on its signal. Notice that if + the calculated date is already Monday, for example, using + (0, 1) or (0, -1) won't change the day. + """ + + def __init__(self, dt1=None, dt2=None, + years=0, months=0, days=0, leapdays=0, weeks=0, + hours=0, minutes=0, seconds=0, microseconds=0, + year=None, month=None, day=None, weekday=None, + yearday=None, nlyearday=None, + hour=None, minute=None, second=None, microsecond=None): + + # Check for non-integer values in integer-only quantities + if any(x is not None and x != int(x) for x in (years, months)): + raise ValueError("Non-integer years and months are " + "ambiguous and not currently supported.") + + if dt1 and dt2: + # datetime is a subclass of date. So both must be date + if not (isinstance(dt1, datetime.date) and + isinstance(dt2, datetime.date)): + raise TypeError("relativedelta only diffs datetime/date") + + # We allow two dates, or two datetimes, so we coerce them to be + # of the same type + if (isinstance(dt1, datetime.datetime) != + isinstance(dt2, datetime.datetime)): + if not isinstance(dt1, datetime.datetime): + dt1 = datetime.datetime.fromordinal(dt1.toordinal()) + elif not isinstance(dt2, datetime.datetime): + dt2 = datetime.datetime.fromordinal(dt2.toordinal()) + + self.years = 0 + self.months = 0 + self.days = 0 + self.leapdays = 0 + self.hours = 0 + self.minutes = 0 + self.seconds = 0 + self.microseconds = 0 + self.year = None + self.month = None + self.day = None + self.weekday = None + self.hour = None + self.minute = None + self.second = None + self.microsecond = None + self._has_time = 0 + + # Get year / month delta between the two + months = (dt1.year - dt2.year) * 12 + (dt1.month - dt2.month) + self._set_months(months) + + # Remove the year/month delta so the timedelta is just well-defined + # time units (seconds, days and microseconds) + dtm = self.__radd__(dt2) + + # If we've overshot our target, make an adjustment + if dt1 < dt2: + compare = operator.gt + increment = 1 + else: + compare = operator.lt + increment = -1 + + while compare(dt1, dtm): + months += increment + self._set_months(months) + dtm = self.__radd__(dt2) + + # Get the timedelta between the "months-adjusted" date and dt1 + delta = dt1 - dtm + self.seconds = delta.seconds + delta.days * 86400 + self.microseconds = delta.microseconds + else: + # Relative information + self.years = years + self.months = months + self.days = days + weeks * 7 + self.leapdays = leapdays + self.hours = hours + self.minutes = minutes + self.seconds = seconds + self.microseconds = microseconds + + # Absolute information + self.year = year + self.month = month + self.day = day + self.hour = hour + self.minute = minute + self.second = second + self.microsecond = microsecond + + if any(x is not None and int(x) != x + for x in (year, month, day, hour, + minute, second, microsecond)): + # For now we'll deprecate floats - later it'll be an error. + warn("Non-integer value passed as absolute information. " + + "This is not a well-defined condition and will raise " + + "errors in future versions.", DeprecationWarning) + + if isinstance(weekday, integer_types): + self.weekday = weekdays[weekday] + else: + self.weekday = weekday + + yday = 0 + if nlyearday: + yday = nlyearday + elif yearday: + yday = yearday + if yearday > 59: + self.leapdays = -1 + if yday: + ydayidx = [31, 59, 90, 120, 151, 181, 212, + 243, 273, 304, 334, 366] + for idx, ydays in enumerate(ydayidx): + if yday <= ydays: + self.month = idx+1 + if idx == 0: + self.day = yday + else: + self.day = yday-ydayidx[idx-1] + break + else: + raise ValueError("invalid year day (%d)" % yday) + + self._fix() + + def _fix(self): + if abs(self.microseconds) > 999999: + s = _sign(self.microseconds) + div, mod = divmod(self.microseconds * s, 1000000) + self.microseconds = mod * s + self.seconds += div * s + if abs(self.seconds) > 59: + s = _sign(self.seconds) + div, mod = divmod(self.seconds * s, 60) + self.seconds = mod * s + self.minutes += div * s + if abs(self.minutes) > 59: + s = _sign(self.minutes) + div, mod = divmod(self.minutes * s, 60) + self.minutes = mod * s + self.hours += div * s + if abs(self.hours) > 23: + s = _sign(self.hours) + div, mod = divmod(self.hours * s, 24) + self.hours = mod * s + self.days += div * s + if abs(self.months) > 11: + s = _sign(self.months) + div, mod = divmod(self.months * s, 12) + self.months = mod * s + self.years += div * s + if (self.hours or self.minutes or self.seconds or self.microseconds + or self.hour is not None or self.minute is not None or + self.second is not None or self.microsecond is not None): + self._has_time = 1 + else: + self._has_time = 0 + + @property + def weeks(self): + return self.days // 7 + + @weeks.setter + def weeks(self, value): + self.days = self.days - (self.weeks * 7) + value * 7 + + def _set_months(self, months): + self.months = months + if abs(self.months) > 11: + s = _sign(self.months) + div, mod = divmod(self.months * s, 12) + self.months = mod * s + self.years = div * s + else: + self.years = 0 + + def normalized(self): + """ + Return a version of this object represented entirely using integer + values for the relative attributes. + + >>> relativedelta(days=1.5, hours=2).normalized() + relativedelta(days=1, hours=14) + + :return: + Returns a :class:`dateutil.relativedelta.relativedelta` object. + """ + # Cascade remainders down (rounding each to roughly nearest microsecond) + days = int(self.days) + + hours_f = round(self.hours + 24 * (self.days - days), 11) + hours = int(hours_f) + + minutes_f = round(self.minutes + 60 * (hours_f - hours), 10) + minutes = int(minutes_f) + + seconds_f = round(self.seconds + 60 * (minutes_f - minutes), 8) + seconds = int(seconds_f) + + microseconds = round(self.microseconds + 1e6 * (seconds_f - seconds)) + + # Constructor carries overflow back up with call to _fix() + return self.__class__(years=self.years, months=self.months, + days=days, hours=hours, minutes=minutes, + seconds=seconds, microseconds=microseconds, + leapdays=self.leapdays, year=self.year, + month=self.month, day=self.day, + weekday=self.weekday, hour=self.hour, + minute=self.minute, second=self.second, + microsecond=self.microsecond) + + def __add__(self, other): + if isinstance(other, relativedelta): + return self.__class__(years=other.years + self.years, + months=other.months + self.months, + days=other.days + self.days, + hours=other.hours + self.hours, + minutes=other.minutes + self.minutes, + seconds=other.seconds + self.seconds, + microseconds=(other.microseconds + + self.microseconds), + leapdays=other.leapdays or self.leapdays, + year=(other.year if other.year is not None + else self.year), + month=(other.month if other.month is not None + else self.month), + day=(other.day if other.day is not None + else self.day), + weekday=(other.weekday if other.weekday is not None + else self.weekday), + hour=(other.hour if other.hour is not None + else self.hour), + minute=(other.minute if other.minute is not None + else self.minute), + second=(other.second if other.second is not None + else self.second), + microsecond=(other.microsecond if other.microsecond + is not None else + self.microsecond)) + if isinstance(other, datetime.timedelta): + return self.__class__(years=self.years, + months=self.months, + days=self.days + other.days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds + other.seconds, + microseconds=self.microseconds + other.microseconds, + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + if not isinstance(other, datetime.date): + return NotImplemented + elif self._has_time and not isinstance(other, datetime.datetime): + other = datetime.datetime.fromordinal(other.toordinal()) + year = (self.year or other.year)+self.years + month = self.month or other.month + if self.months: + assert 1 <= abs(self.months) <= 12 + month += self.months + if month > 12: + year += 1 + month -= 12 + elif month < 1: + year -= 1 + month += 12 + day = min(calendar.monthrange(year, month)[1], + self.day or other.day) + repl = {"year": year, "month": month, "day": day} + for attr in ["hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + repl[attr] = value + days = self.days + if self.leapdays and month > 2 and calendar.isleap(year): + days += self.leapdays + ret = (other.replace(**repl) + + datetime.timedelta(days=days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds, + microseconds=self.microseconds)) + if self.weekday: + weekday, nth = self.weekday.weekday, self.weekday.n or 1 + jumpdays = (abs(nth) - 1) * 7 + if nth > 0: + jumpdays += (7 - ret.weekday() + weekday) % 7 + else: + jumpdays += (ret.weekday() - weekday) % 7 + jumpdays *= -1 + ret += datetime.timedelta(days=jumpdays) + return ret + + def __radd__(self, other): + return self.__add__(other) + + def __rsub__(self, other): + return self.__neg__().__radd__(other) + + def __sub__(self, other): + if not isinstance(other, relativedelta): + return NotImplemented # In case the other object defines __rsub__ + return self.__class__(years=self.years - other.years, + months=self.months - other.months, + days=self.days - other.days, + hours=self.hours - other.hours, + minutes=self.minutes - other.minutes, + seconds=self.seconds - other.seconds, + microseconds=self.microseconds - other.microseconds, + leapdays=self.leapdays or other.leapdays, + year=(self.year if self.year is not None + else other.year), + month=(self.month if self.month is not None else + other.month), + day=(self.day if self.day is not None else + other.day), + weekday=(self.weekday if self.weekday is not None else + other.weekday), + hour=(self.hour if self.hour is not None else + other.hour), + minute=(self.minute if self.minute is not None else + other.minute), + second=(self.second if self.second is not None else + other.second), + microsecond=(self.microsecond if self.microsecond + is not None else + other.microsecond)) + + def __neg__(self): + return self.__class__(years=-self.years, + months=-self.months, + days=-self.days, + hours=-self.hours, + minutes=-self.minutes, + seconds=-self.seconds, + microseconds=-self.microseconds, + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + def __bool__(self): + return not (not self.years and + not self.months and + not self.days and + not self.hours and + not self.minutes and + not self.seconds and + not self.microseconds and + not self.leapdays and + self.year is None and + self.month is None and + self.day is None and + self.weekday is None and + self.hour is None and + self.minute is None and + self.second is None and + self.microsecond is None) + # Compatibility with Python 2.x + __nonzero__ = __bool__ + + def __mul__(self, other): + try: + f = float(other) + except TypeError: + return NotImplemented + + return self.__class__(years=int(self.years * f), + months=int(self.months * f), + days=int(self.days * f), + hours=int(self.hours * f), + minutes=int(self.minutes * f), + seconds=int(self.seconds * f), + microseconds=int(self.microseconds * f), + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + __rmul__ = __mul__ + + def __eq__(self, other): + if not isinstance(other, relativedelta): + return NotImplemented + if self.weekday or other.weekday: + if not self.weekday or not other.weekday: + return False + if self.weekday.weekday != other.weekday.weekday: + return False + n1, n2 = self.weekday.n, other.weekday.n + if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)): + return False + return (self.years == other.years and + self.months == other.months and + self.days == other.days and + self.hours == other.hours and + self.minutes == other.minutes and + self.seconds == other.seconds and + self.microseconds == other.microseconds and + self.leapdays == other.leapdays and + self.year == other.year and + self.month == other.month and + self.day == other.day and + self.hour == other.hour and + self.minute == other.minute and + self.second == other.second and + self.microsecond == other.microsecond) + + __hash__ = None + + def __ne__(self, other): + return not self.__eq__(other) + + def __div__(self, other): + try: + reciprocal = 1 / float(other) + except TypeError: + return NotImplemented + + return self.__mul__(reciprocal) + + __truediv__ = __div__ + + def __repr__(self): + l = [] + for attr in ["years", "months", "days", "leapdays", + "hours", "minutes", "seconds", "microseconds"]: + value = getattr(self, attr) + if value: + l.append("{attr}={value:+g}".format(attr=attr, value=value)) + for attr in ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + l.append("{attr}={value}".format(attr=attr, value=repr(value))) + return "{classname}({attrs})".format(classname=self.__class__.__name__, + attrs=", ".join(l)) + + +def _sign(x): + return int(copysign(1, x)) + +# vim:ts=4:sw=4:et diff --git a/env/lib/python3.6/site-packages/dateutil/rrule.py b/env/lib/python3.6/site-packages/dateutil/rrule.py new file mode 100644 index 0000000..429f8fc --- /dev/null +++ b/env/lib/python3.6/site-packages/dateutil/rrule.py @@ -0,0 +1,1610 @@ +# -*- coding: utf-8 -*- +""" +The rrule module offers a small, complete, and very fast, implementation of +the recurrence rules documented in the +`iCalendar RFC `_, +including support for caching of results. +""" +import itertools +import datetime +import calendar +import sys + +try: + from math import gcd +except ImportError: + from fractions import gcd + +from six import advance_iterator, integer_types +from six.moves import _thread, range +import heapq + +from ._common import weekday as weekdaybase + +# For warning about deprecation of until and count +from warnings import warn + +__all__ = ["rrule", "rruleset", "rrulestr", + "YEARLY", "MONTHLY", "WEEKLY", "DAILY", + "HOURLY", "MINUTELY", "SECONDLY", + "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + +# Every mask is 7 days longer to handle cross-year weekly periods. +M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30 + + [7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7) +M365MASK = list(M366MASK) +M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32)) +MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +MDAY365MASK = list(MDAY366MASK) +M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0)) +NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +NMDAY365MASK = list(NMDAY366MASK) +M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366) +M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365) +WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55 +del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31] +MDAY365MASK = tuple(MDAY365MASK) +M365MASK = tuple(M365MASK) + +FREQNAMES = ['YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY', 'HOURLY', 'MINUTELY', 'SECONDLY'] + +(YEARLY, + MONTHLY, + WEEKLY, + DAILY, + HOURLY, + MINUTELY, + SECONDLY) = list(range(7)) + +# Imported on demand. +easter = None +parser = None + + +class weekday(weekdaybase): + """ + This version of weekday does not allow n = 0. + """ + def __init__(self, wkday, n=None): + if n == 0: + raise ValueError("Can't create weekday with n==0") + + super(weekday, self).__init__(wkday, n) + + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) + + +def _invalidates_cache(f): + """ + Decorator for rruleset methods which may invalidate the + cached length. + """ + def inner_func(self, *args, **kwargs): + rv = f(self, *args, **kwargs) + self._invalidate_cache() + return rv + + return inner_func + + +class rrulebase(object): + def __init__(self, cache=False): + if cache: + self._cache = [] + self._cache_lock = _thread.allocate_lock() + self._invalidate_cache() + else: + self._cache = None + self._cache_complete = False + self._len = None + + def __iter__(self): + if self._cache_complete: + return iter(self._cache) + elif self._cache is None: + return self._iter() + else: + return self._iter_cached() + + def _invalidate_cache(self): + if self._cache is not None: + self._cache = [] + self._cache_complete = False + self._cache_gen = self._iter() + + if self._cache_lock.locked(): + self._cache_lock.release() + + self._len = None + + def _iter_cached(self): + i = 0 + gen = self._cache_gen + cache = self._cache + acquire = self._cache_lock.acquire + release = self._cache_lock.release + while gen: + if i == len(cache): + acquire() + if self._cache_complete: + break + try: + for j in range(10): + cache.append(advance_iterator(gen)) + except StopIteration: + self._cache_gen = gen = None + self._cache_complete = True + break + release() + yield cache[i] + i += 1 + while i < self._len: + yield cache[i] + i += 1 + + def __getitem__(self, item): + if self._cache_complete: + return self._cache[item] + elif isinstance(item, slice): + if item.step and item.step < 0: + return list(iter(self))[item] + else: + return list(itertools.islice(self, + item.start or 0, + item.stop or sys.maxsize, + item.step or 1)) + elif item >= 0: + gen = iter(self) + try: + for i in range(item+1): + res = advance_iterator(gen) + except StopIteration: + raise IndexError + return res + else: + return list(iter(self))[item] + + def __contains__(self, item): + if self._cache_complete: + return item in self._cache + else: + for i in self: + if i == item: + return True + elif i > item: + return False + return False + + # __len__() introduces a large performance penality. + def count(self): + """ Returns the number of recurrences in this set. It will have go + trough the whole recurrence, if this hasn't been done before. """ + if self._len is None: + for x in self: + pass + return self._len + + def before(self, dt, inc=False): + """ Returns the last recurrence before the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + last = None + if inc: + for i in gen: + if i > dt: + break + last = i + else: + for i in gen: + if i >= dt: + break + last = i + return last + + def after(self, dt, inc=False): + """ Returns the first recurrence after the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + if inc: + for i in gen: + if i >= dt: + return i + else: + for i in gen: + if i > dt: + return i + return None + + def xafter(self, dt, count=None, inc=False): + """ + Generator which yields up to `count` recurrences after the given + datetime instance, equivalent to `after`. + + :param dt: + The datetime at which to start generating recurrences. + + :param count: + The maximum number of recurrences to generate. If `None` (default), + dates are generated until the recurrence rule is exhausted. + + :param inc: + If `dt` is an instance of the rule and `inc` is `True`, it is + included in the output. + + :yields: Yields a sequence of `datetime` objects. + """ + + if self._cache_complete: + gen = self._cache + else: + gen = self + + # Select the comparison function + if inc: + comp = lambda dc, dtc: dc >= dtc + else: + comp = lambda dc, dtc: dc > dtc + + # Generate dates + n = 0 + for d in gen: + if comp(d, dt): + if count is not None: + n += 1 + if n > count: + break + + yield d + + def between(self, after, before, inc=False, count=1): + """ Returns all the occurrences of the rrule between after and before. + The inc keyword defines what happens if after and/or before are + themselves occurrences. With inc=True, they will be included in the + list, if they are found in the recurrence set. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + started = False + l = [] + if inc: + for i in gen: + if i > before: + break + elif not started: + if i >= after: + started = True + l.append(i) + else: + l.append(i) + else: + for i in gen: + if i >= before: + break + elif not started: + if i > after: + started = True + l.append(i) + else: + l.append(i) + return l + + +class rrule(rrulebase): + """ + That's the base of the rrule operation. It accepts all the keywords + defined in the RFC as its constructor parameters (except byday, + which was renamed to byweekday) and more. The constructor prototype is:: + + rrule(freq) + + Where freq must be one of YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY, + or SECONDLY. + + .. note:: + Per RFC section 3.3.10, recurrence instances falling on invalid dates + and times are ignored rather than coerced: + + Recurrence rules may generate recurrence instances with an invalid + date (e.g., February 30) or nonexistent local time (e.g., 1:30 AM + on a day where the local time is moved forward by an hour at 1:00 + AM). Such recurrence instances MUST be ignored and MUST NOT be + counted as part of the recurrence set. + + This can lead to possibly surprising behavior when, for example, the + start date occurs at the end of the month: + + >>> from dateutil.rrule import rrule, MONTHLY + >>> from datetime import datetime + >>> start_date = datetime(2014, 12, 31) + >>> list(rrule(freq=MONTHLY, count=4, dtstart=start_date)) + ... # doctest: +NORMALIZE_WHITESPACE + [datetime.datetime(2014, 12, 31, 0, 0), + datetime.datetime(2015, 1, 31, 0, 0), + datetime.datetime(2015, 3, 31, 0, 0), + datetime.datetime(2015, 5, 31, 0, 0)] + + Additionally, it supports the following keyword arguments: + + :param cache: + If given, it must be a boolean value specifying to enable or disable + caching of results. If you will use the same rrule instance multiple + times, enabling caching will improve the performance considerably. + :param dtstart: + The recurrence start. Besides being the base for the recurrence, + missing parameters in the final recurrence instances will also be + extracted from this date. If not given, datetime.now() will be used + instead. + :param interval: + The interval between each freq iteration. For example, when using + YEARLY, an interval of 2 means once every two years, but with HOURLY, + it means once every two hours. The default interval is 1. + :param wkst: + The week start day. Must be one of the MO, TU, WE constants, or an + integer, specifying the first day of the week. This will affect + recurrences based on weekly periods. The default week start is got + from calendar.firstweekday(), and may be modified by + calendar.setfirstweekday(). + :param count: + How many occurrences will be generated. + + .. note:: + As of version 2.5.0, the use of the ``until`` keyword together + with the ``count`` keyword is deprecated per RFC-2445 Sec. 4.3.10. + :param until: + If given, this must be a datetime instance, that will specify the + limit of the recurrence. The last recurrence in the rule is the greatest + datetime that is less than or equal to the value specified in the + ``until`` parameter. + + .. note:: + As of version 2.5.0, the use of the ``until`` keyword together + with the ``count`` keyword is deprecated per RFC-2445 Sec. 4.3.10. + :param bysetpos: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each given integer will specify an occurrence + number, corresponding to the nth occurrence of the rule inside the + frequency period. For example, a bysetpos of -1 if combined with a + MONTHLY frequency, and a byweekday of (MO, TU, WE, TH, FR), will + result in the last work day of every month. + :param bymonth: + If given, it must be either an integer, or a sequence of integers, + meaning the months to apply the recurrence to. + :param bymonthday: + If given, it must be either an integer, or a sequence of integers, + meaning the month days to apply the recurrence to. + :param byyearday: + If given, it must be either an integer, or a sequence of integers, + meaning the year days to apply the recurrence to. + :param byweekno: + If given, it must be either an integer, or a sequence of integers, + meaning the week numbers to apply the recurrence to. Week numbers + have the meaning described in ISO8601, that is, the first week of + the year is that containing at least four days of the new year. + :param byweekday: + If given, it must be either an integer (0 == MO), a sequence of + integers, one of the weekday constants (MO, TU, etc), or a sequence + of these constants. When given, these variables will define the + weekdays where the recurrence will be applied. It's also possible to + use an argument n for the weekday instances, which will mean the nth + occurrence of this weekday in the period. For example, with MONTHLY, + or with YEARLY and BYMONTH, using FR(+1) in byweekday will specify the + first friday of the month where the recurrence happens. Notice that in + the RFC documentation, this is specified as BYDAY, but was renamed to + avoid the ambiguity of that keyword. + :param byhour: + If given, it must be either an integer, or a sequence of integers, + meaning the hours to apply the recurrence to. + :param byminute: + If given, it must be either an integer, or a sequence of integers, + meaning the minutes to apply the recurrence to. + :param bysecond: + If given, it must be either an integer, or a sequence of integers, + meaning the seconds to apply the recurrence to. + :param byeaster: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each integer will define an offset from the + Easter Sunday. Passing the offset 0 to byeaster will yield the Easter + Sunday itself. This is an extension to the RFC specification. + """ + def __init__(self, freq, dtstart=None, + interval=1, wkst=None, count=None, until=None, bysetpos=None, + bymonth=None, bymonthday=None, byyearday=None, byeaster=None, + byweekno=None, byweekday=None, + byhour=None, byminute=None, bysecond=None, + cache=False): + super(rrule, self).__init__(cache) + global easter + if not dtstart: + dtstart = datetime.datetime.now().replace(microsecond=0) + elif not isinstance(dtstart, datetime.datetime): + dtstart = datetime.datetime.fromordinal(dtstart.toordinal()) + else: + dtstart = dtstart.replace(microsecond=0) + self._dtstart = dtstart + self._tzinfo = dtstart.tzinfo + self._freq = freq + self._interval = interval + self._count = count + + # Cache the original byxxx rules, if they are provided, as the _byxxx + # attributes do not necessarily map to the inputs, and this can be + # a problem in generating the strings. Only store things if they've + # been supplied (the string retrieval will just use .get()) + self._original_rule = {} + + if until and not isinstance(until, datetime.datetime): + until = datetime.datetime.fromordinal(until.toordinal()) + self._until = until + + if count is not None and until: + warn("Using both 'count' and 'until' is inconsistent with RFC 2445" + " and has been deprecated in dateutil. Future versions will " + "raise an error.", DeprecationWarning) + + if wkst is None: + self._wkst = calendar.firstweekday() + elif isinstance(wkst, integer_types): + self._wkst = wkst + else: + self._wkst = wkst.weekday + + if bysetpos is None: + self._bysetpos = None + elif isinstance(bysetpos, integer_types): + if bysetpos == 0 or not (-366 <= bysetpos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + self._bysetpos = (bysetpos,) + else: + self._bysetpos = tuple(bysetpos) + for pos in self._bysetpos: + if pos == 0 or not (-366 <= pos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + + if self._bysetpos: + self._original_rule['bysetpos'] = self._bysetpos + + if (byweekno is None and byyearday is None and bymonthday is None and + byweekday is None and byeaster is None): + if freq == YEARLY: + if bymonth is None: + bymonth = dtstart.month + self._original_rule['bymonth'] = None + bymonthday = dtstart.day + self._original_rule['bymonthday'] = None + elif freq == MONTHLY: + bymonthday = dtstart.day + self._original_rule['bymonthday'] = None + elif freq == WEEKLY: + byweekday = dtstart.weekday() + self._original_rule['byweekday'] = None + + # bymonth + if bymonth is None: + self._bymonth = None + else: + if isinstance(bymonth, integer_types): + bymonth = (bymonth,) + + self._bymonth = tuple(sorted(set(bymonth))) + + if 'bymonth' not in self._original_rule: + self._original_rule['bymonth'] = self._bymonth + + # byyearday + if byyearday is None: + self._byyearday = None + else: + if isinstance(byyearday, integer_types): + byyearday = (byyearday,) + + self._byyearday = tuple(sorted(set(byyearday))) + self._original_rule['byyearday'] = self._byyearday + + # byeaster + if byeaster is not None: + if not easter: + from dateutil import easter + if isinstance(byeaster, integer_types): + self._byeaster = (byeaster,) + else: + self._byeaster = tuple(sorted(byeaster)) + + self._original_rule['byeaster'] = self._byeaster + else: + self._byeaster = None + + # bymonthday + if bymonthday is None: + self._bymonthday = () + self._bynmonthday = () + else: + if isinstance(bymonthday, integer_types): + bymonthday = (bymonthday,) + + bymonthday = set(bymonthday) # Ensure it's unique + + self._bymonthday = tuple(sorted(x for x in bymonthday if x > 0)) + self._bynmonthday = tuple(sorted(x for x in bymonthday if x < 0)) + + # Storing positive numbers first, then negative numbers + if 'bymonthday' not in self._original_rule: + self._original_rule['bymonthday'] = tuple( + itertools.chain(self._bymonthday, self._bynmonthday)) + + # byweekno + if byweekno is None: + self._byweekno = None + else: + if isinstance(byweekno, integer_types): + byweekno = (byweekno,) + + self._byweekno = tuple(sorted(set(byweekno))) + + self._original_rule['byweekno'] = self._byweekno + + # byweekday / bynweekday + if byweekday is None: + self._byweekday = None + self._bynweekday = None + else: + # If it's one of the valid non-sequence types, convert to a + # single-element sequence before the iterator that builds the + # byweekday set. + if isinstance(byweekday, integer_types) or hasattr(byweekday, "n"): + byweekday = (byweekday,) + + self._byweekday = set() + self._bynweekday = set() + for wday in byweekday: + if isinstance(wday, integer_types): + self._byweekday.add(wday) + elif not wday.n or freq > MONTHLY: + self._byweekday.add(wday.weekday) + else: + self._bynweekday.add((wday.weekday, wday.n)) + + if not self._byweekday: + self._byweekday = None + elif not self._bynweekday: + self._bynweekday = None + + if self._byweekday is not None: + self._byweekday = tuple(sorted(self._byweekday)) + orig_byweekday = [weekday(x) for x in self._byweekday] + else: + orig_byweekday = tuple() + + if self._bynweekday is not None: + self._bynweekday = tuple(sorted(self._bynweekday)) + orig_bynweekday = [weekday(*x) for x in self._bynweekday] + else: + orig_bynweekday = tuple() + + if 'byweekday' not in self._original_rule: + self._original_rule['byweekday'] = tuple(itertools.chain( + orig_byweekday, orig_bynweekday)) + + # byhour + if byhour is None: + if freq < HOURLY: + self._byhour = set((dtstart.hour,)) + else: + self._byhour = None + else: + if isinstance(byhour, integer_types): + byhour = (byhour,) + + if freq == HOURLY: + self._byhour = self.__construct_byset(start=dtstart.hour, + byxxx=byhour, + base=24) + else: + self._byhour = set(byhour) + + self._byhour = tuple(sorted(self._byhour)) + self._original_rule['byhour'] = self._byhour + + # byminute + if byminute is None: + if freq < MINUTELY: + self._byminute = set((dtstart.minute,)) + else: + self._byminute = None + else: + if isinstance(byminute, integer_types): + byminute = (byminute,) + + if freq == MINUTELY: + self._byminute = self.__construct_byset(start=dtstart.minute, + byxxx=byminute, + base=60) + else: + self._byminute = set(byminute) + + self._byminute = tuple(sorted(self._byminute)) + self._original_rule['byminute'] = self._byminute + + # bysecond + if bysecond is None: + if freq < SECONDLY: + self._bysecond = ((dtstart.second,)) + else: + self._bysecond = None + else: + if isinstance(bysecond, integer_types): + bysecond = (bysecond,) + + self._bysecond = set(bysecond) + + if freq == SECONDLY: + self._bysecond = self.__construct_byset(start=dtstart.second, + byxxx=bysecond, + base=60) + else: + self._bysecond = set(bysecond) + + self._bysecond = tuple(sorted(self._bysecond)) + self._original_rule['bysecond'] = self._bysecond + + if self._freq >= HOURLY: + self._timeset = None + else: + self._timeset = [] + for hour in self._byhour: + for minute in self._byminute: + for second in self._bysecond: + self._timeset.append( + datetime.time(hour, minute, second, + tzinfo=self._tzinfo)) + self._timeset.sort() + self._timeset = tuple(self._timeset) + + def __str__(self): + """ + Output a string that would generate this RRULE if passed to rrulestr. + This is mostly compatible with RFC2445, except for the + dateutil-specific extension BYEASTER. + """ + + output = [] + h, m, s = [None] * 3 + if self._dtstart: + output.append(self._dtstart.strftime('DTSTART:%Y%m%dT%H%M%S')) + h, m, s = self._dtstart.timetuple()[3:6] + + parts = ['FREQ=' + FREQNAMES[self._freq]] + if self._interval != 1: + parts.append('INTERVAL=' + str(self._interval)) + + if self._wkst: + parts.append('WKST=' + repr(weekday(self._wkst))[0:2]) + + if self._count is not None: + parts.append('COUNT=' + str(self._count)) + + if self._until: + parts.append(self._until.strftime('UNTIL=%Y%m%dT%H%M%S')) + + if self._original_rule.get('byweekday') is not None: + # The str() method on weekday objects doesn't generate + # RFC2445-compliant strings, so we should modify that. + original_rule = dict(self._original_rule) + wday_strings = [] + for wday in original_rule['byweekday']: + if wday.n: + wday_strings.append('{n:+d}{wday}'.format( + n=wday.n, + wday=repr(wday)[0:2])) + else: + wday_strings.append(repr(wday)) + + original_rule['byweekday'] = wday_strings + else: + original_rule = self._original_rule + + partfmt = '{name}={vals}' + for name, key in [('BYSETPOS', 'bysetpos'), + ('BYMONTH', 'bymonth'), + ('BYMONTHDAY', 'bymonthday'), + ('BYYEARDAY', 'byyearday'), + ('BYWEEKNO', 'byweekno'), + ('BYDAY', 'byweekday'), + ('BYHOUR', 'byhour'), + ('BYMINUTE', 'byminute'), + ('BYSECOND', 'bysecond'), + ('BYEASTER', 'byeaster')]: + value = original_rule.get(key) + if value: + parts.append(partfmt.format(name=name, vals=(','.join(str(v) + for v in value)))) + + output.append(';'.join(parts)) + return '\n'.join(output) + + def replace(self, **kwargs): + """Return new rrule with same attributes except for those attributes given new + values by whichever keyword arguments are specified.""" + new_kwargs = {"interval": self._interval, + "count": self._count, + "dtstart": self._dtstart, + "freq": self._freq, + "until": self._until, + "wkst": self._wkst, + "cache": False if self._cache is None else True } + new_kwargs.update(self._original_rule) + new_kwargs.update(kwargs) + return rrule(**new_kwargs) + + def _iter(self): + year, month, day, hour, minute, second, weekday, yearday, _ = \ + self._dtstart.timetuple() + + # Some local variables to speed things up a bit + freq = self._freq + interval = self._interval + wkst = self._wkst + until = self._until + bymonth = self._bymonth + byweekno = self._byweekno + byyearday = self._byyearday + byweekday = self._byweekday + byeaster = self._byeaster + bymonthday = self._bymonthday + bynmonthday = self._bynmonthday + bysetpos = self._bysetpos + byhour = self._byhour + byminute = self._byminute + bysecond = self._bysecond + + ii = _iterinfo(self) + ii.rebuild(year, month) + + getdayset = {YEARLY: ii.ydayset, + MONTHLY: ii.mdayset, + WEEKLY: ii.wdayset, + DAILY: ii.ddayset, + HOURLY: ii.ddayset, + MINUTELY: ii.ddayset, + SECONDLY: ii.ddayset}[freq] + + if freq < HOURLY: + timeset = self._timeset + else: + gettimeset = {HOURLY: ii.htimeset, + MINUTELY: ii.mtimeset, + SECONDLY: ii.stimeset}[freq] + if ((freq >= HOURLY and + self._byhour and hour not in self._byhour) or + (freq >= MINUTELY and + self._byminute and minute not in self._byminute) or + (freq >= SECONDLY and + self._bysecond and second not in self._bysecond)): + timeset = () + else: + timeset = gettimeset(hour, minute, second) + + total = 0 + count = self._count + while True: + # Get dayset with the right frequency + dayset, start, end = getdayset(year, month, day) + + # Do the "hard" work ;-) + filtered = False + for i in dayset[start:end]: + if ((bymonth and ii.mmask[i] not in bymonth) or + (byweekno and not ii.wnomask[i]) or + (byweekday and ii.wdaymask[i] not in byweekday) or + (ii.nwdaymask and not ii.nwdaymask[i]) or + (byeaster and not ii.eastermask[i]) or + ((bymonthday or bynmonthday) and + ii.mdaymask[i] not in bymonthday and + ii.nmdaymask[i] not in bynmonthday) or + (byyearday and + ((i < ii.yearlen and i+1 not in byyearday and + -ii.yearlen+i not in byyearday) or + (i >= ii.yearlen and i+1-ii.yearlen not in byyearday and + -ii.nextyearlen+i-ii.yearlen not in byyearday)))): + dayset[i] = None + filtered = True + + # Output results + if bysetpos and timeset: + poslist = [] + for pos in bysetpos: + if pos < 0: + daypos, timepos = divmod(pos, len(timeset)) + else: + daypos, timepos = divmod(pos-1, len(timeset)) + try: + i = [x for x in dayset[start:end] + if x is not None][daypos] + time = timeset[timepos] + except IndexError: + pass + else: + date = datetime.date.fromordinal(ii.yearordinal+i) + res = datetime.datetime.combine(date, time) + if res not in poslist: + poslist.append(res) + poslist.sort() + for res in poslist: + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + if count is not None: + count -= 1 + if count < 0: + self._len = total + return + total += 1 + yield res + else: + for i in dayset[start:end]: + if i is not None: + date = datetime.date.fromordinal(ii.yearordinal + i) + for time in timeset: + res = datetime.datetime.combine(date, time) + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + if count is not None: + count -= 1 + if count < 0: + self._len = total + return + + total += 1 + yield res + + # Handle frequency and interval + fixday = False + if freq == YEARLY: + year += interval + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == MONTHLY: + month += interval + if month > 12: + div, mod = divmod(month, 12) + month = mod + year += div + if month == 0: + month = 12 + year -= 1 + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == WEEKLY: + if wkst > weekday: + day += -(weekday+1+(6-wkst))+self._interval*7 + else: + day += -(weekday-wkst)+self._interval*7 + weekday = wkst + fixday = True + elif freq == DAILY: + day += interval + fixday = True + elif freq == HOURLY: + if filtered: + # Jump to one iteration before next day + hour += ((23-hour)//interval)*interval + + if byhour: + ndays, hour = self.__mod_distance(value=hour, + byxxx=self._byhour, + base=24) + else: + ndays, hour = divmod(hour+interval, 24) + + if ndays: + day += ndays + fixday = True + + timeset = gettimeset(hour, minute, second) + elif freq == MINUTELY: + if filtered: + # Jump to one iteration before next day + minute += ((1439-(hour*60+minute))//interval)*interval + + valid = False + rep_rate = (24*60) + for j in range(rep_rate // gcd(interval, rep_rate)): + if byminute: + nhours, minute = \ + self.__mod_distance(value=minute, + byxxx=self._byminute, + base=60) + else: + nhours, minute = divmod(minute+interval, 60) + + div, hour = divmod(hour+nhours, 24) + if div: + day += div + fixday = True + filtered = False + + if not byhour or hour in byhour: + valid = True + break + + if not valid: + raise ValueError('Invalid combination of interval and ' + + 'byhour resulting in empty rule.') + + timeset = gettimeset(hour, minute, second) + elif freq == SECONDLY: + if filtered: + # Jump to one iteration before next day + second += (((86399 - (hour * 3600 + minute * 60 + second)) + // interval) * interval) + + rep_rate = (24 * 3600) + valid = False + for j in range(0, rep_rate // gcd(interval, rep_rate)): + if bysecond: + nminutes, second = \ + self.__mod_distance(value=second, + byxxx=self._bysecond, + base=60) + else: + nminutes, second = divmod(second+interval, 60) + + div, minute = divmod(minute+nminutes, 60) + if div: + hour += div + div, hour = divmod(hour, 24) + if div: + day += div + fixday = True + + if ((not byhour or hour in byhour) and + (not byminute or minute in byminute) and + (not bysecond or second in bysecond)): + valid = True + break + + if not valid: + raise ValueError('Invalid combination of interval, ' + + 'byhour and byminute resulting in empty' + + ' rule.') + + timeset = gettimeset(hour, minute, second) + + if fixday and day > 28: + daysinmonth = calendar.monthrange(year, month)[1] + if day > daysinmonth: + while day > daysinmonth: + day -= daysinmonth + month += 1 + if month == 13: + month = 1 + year += 1 + if year > datetime.MAXYEAR: + self._len = total + return + daysinmonth = calendar.monthrange(year, month)[1] + ii.rebuild(year, month) + + def __construct_byset(self, start, byxxx, base): + """ + If a `BYXXX` sequence is passed to the constructor at the same level as + `FREQ` (e.g. `FREQ=HOURLY,BYHOUR={2,4,7},INTERVAL=3`), there are some + specifications which cannot be reached given some starting conditions. + + This occurs whenever the interval is not coprime with the base of a + given unit and the difference between the starting position and the + ending position is not coprime with the greatest common denominator + between the interval and the base. For example, with a FREQ of hourly + starting at 17:00 and an interval of 4, the only valid values for + BYHOUR would be {21, 1, 5, 9, 13, 17}, because 4 and 24 are not + coprime. + + :param start: + Specifies the starting position. + :param byxxx: + An iterable containing the list of allowed values. + :param base: + The largest allowable value for the specified frequency (e.g. + 24 hours, 60 minutes). + + This does not preserve the type of the iterable, returning a set, since + the values should be unique and the order is irrelevant, this will + speed up later lookups. + + In the event of an empty set, raises a :exception:`ValueError`, as this + results in an empty rrule. + """ + + cset = set() + + # Support a single byxxx value. + if isinstance(byxxx, integer_types): + byxxx = (byxxx, ) + + for num in byxxx: + i_gcd = gcd(self._interval, base) + # Use divmod rather than % because we need to wrap negative nums. + if i_gcd == 1 or divmod(num - start, i_gcd)[1] == 0: + cset.add(num) + + if len(cset) == 0: + raise ValueError("Invalid rrule byxxx generates an empty set.") + + return cset + + def __mod_distance(self, value, byxxx, base): + """ + Calculates the next value in a sequence where the `FREQ` parameter is + specified along with a `BYXXX` parameter at the same "level" + (e.g. `HOURLY` specified with `BYHOUR`). + + :param value: + The old value of the component. + :param byxxx: + The `BYXXX` set, which should have been generated by + `rrule._construct_byset`, or something else which checks that a + valid rule is present. + :param base: + The largest allowable value for the specified frequency (e.g. + 24 hours, 60 minutes). + + If a valid value is not found after `base` iterations (the maximum + number before the sequence would start to repeat), this raises a + :exception:`ValueError`, as no valid values were found. + + This returns a tuple of `divmod(n*interval, base)`, where `n` is the + smallest number of `interval` repetitions until the next specified + value in `byxxx` is found. + """ + accumulator = 0 + for ii in range(1, base + 1): + # Using divmod() over % to account for negative intervals + div, value = divmod(value + self._interval, base) + accumulator += div + if value in byxxx: + return (accumulator, value) + + +class _iterinfo(object): + __slots__ = ["rrule", "lastyear", "lastmonth", + "yearlen", "nextyearlen", "yearordinal", "yearweekday", + "mmask", "mrange", "mdaymask", "nmdaymask", + "wdaymask", "wnomask", "nwdaymask", "eastermask"] + + def __init__(self, rrule): + for attr in self.__slots__: + setattr(self, attr, None) + self.rrule = rrule + + def rebuild(self, year, month): + # Every mask is 7 days longer to handle cross-year weekly periods. + rr = self.rrule + if year != self.lastyear: + self.yearlen = 365 + calendar.isleap(year) + self.nextyearlen = 365 + calendar.isleap(year + 1) + firstyday = datetime.date(year, 1, 1) + self.yearordinal = firstyday.toordinal() + self.yearweekday = firstyday.weekday() + + wday = datetime.date(year, 1, 1).weekday() + if self.yearlen == 365: + self.mmask = M365MASK + self.mdaymask = MDAY365MASK + self.nmdaymask = NMDAY365MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M365RANGE + else: + self.mmask = M366MASK + self.mdaymask = MDAY366MASK + self.nmdaymask = NMDAY366MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M366RANGE + + if not rr._byweekno: + self.wnomask = None + else: + self.wnomask = [0]*(self.yearlen+7) + # no1wkst = firstwkst = self.wdaymask.index(rr._wkst) + no1wkst = firstwkst = (7-self.yearweekday+rr._wkst) % 7 + if no1wkst >= 4: + no1wkst = 0 + # Number of days in the year, plus the days we got + # from last year. + wyearlen = self.yearlen+(self.yearweekday-rr._wkst) % 7 + else: + # Number of days in the year, minus the days we + # left in last year. + wyearlen = self.yearlen-no1wkst + div, mod = divmod(wyearlen, 7) + numweeks = div+mod//4 + for n in rr._byweekno: + if n < 0: + n += numweeks+1 + if not (0 < n <= numweeks): + continue + if n > 1: + i = no1wkst+(n-1)*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + else: + i = no1wkst + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if 1 in rr._byweekno: + # Check week number 1 of next year as well + # TODO: Check -numweeks for next year. + i = no1wkst+numweeks*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + if i < self.yearlen: + # If week starts in next year, we + # don't care about it. + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if no1wkst: + # Check last week number of last year as + # well. If no1wkst is 0, either the year + # started on week start, or week number 1 + # got days from last year, so there are no + # days from last year's last week number in + # this year. + if -1 not in rr._byweekno: + lyearweekday = datetime.date(year-1, 1, 1).weekday() + lno1wkst = (7-lyearweekday+rr._wkst) % 7 + lyearlen = 365+calendar.isleap(year-1) + if lno1wkst >= 4: + lno1wkst = 0 + lnumweeks = 52+(lyearlen + + (lyearweekday-rr._wkst) % 7) % 7//4 + else: + lnumweeks = 52+(self.yearlen-no1wkst) % 7//4 + else: + lnumweeks = -1 + if lnumweeks in rr._byweekno: + for i in range(no1wkst): + self.wnomask[i] = 1 + + if (rr._bynweekday and (month != self.lastmonth or + year != self.lastyear)): + ranges = [] + if rr._freq == YEARLY: + if rr._bymonth: + for month in rr._bymonth: + ranges.append(self.mrange[month-1:month+1]) + else: + ranges = [(0, self.yearlen)] + elif rr._freq == MONTHLY: + ranges = [self.mrange[month-1:month+1]] + if ranges: + # Weekly frequency won't get here, so we may not + # care about cross-year weekly periods. + self.nwdaymask = [0]*self.yearlen + for first, last in ranges: + last -= 1 + for wday, n in rr._bynweekday: + if n < 0: + i = last+(n+1)*7 + i -= (self.wdaymask[i]-wday) % 7 + else: + i = first+(n-1)*7 + i += (7-self.wdaymask[i]+wday) % 7 + if first <= i <= last: + self.nwdaymask[i] = 1 + + if rr._byeaster: + self.eastermask = [0]*(self.yearlen+7) + eyday = easter.easter(year).toordinal()-self.yearordinal + for offset in rr._byeaster: + self.eastermask[eyday+offset] = 1 + + self.lastyear = year + self.lastmonth = month + + def ydayset(self, year, month, day): + return list(range(self.yearlen)), 0, self.yearlen + + def mdayset(self, year, month, day): + dset = [None]*self.yearlen + start, end = self.mrange[month-1:month+1] + for i in range(start, end): + dset[i] = i + return dset, start, end + + def wdayset(self, year, month, day): + # We need to handle cross-year weeks here. + dset = [None]*(self.yearlen+7) + i = datetime.date(year, month, day).toordinal()-self.yearordinal + start = i + for j in range(7): + dset[i] = i + i += 1 + # if (not (0 <= i < self.yearlen) or + # self.wdaymask[i] == self.rrule._wkst): + # This will cross the year boundary, if necessary. + if self.wdaymask[i] == self.rrule._wkst: + break + return dset, start, i + + def ddayset(self, year, month, day): + dset = [None] * self.yearlen + i = datetime.date(year, month, day).toordinal() - self.yearordinal + dset[i] = i + return dset, i, i + 1 + + def htimeset(self, hour, minute, second): + tset = [] + rr = self.rrule + for minute in rr._byminute: + for second in rr._bysecond: + tset.append(datetime.time(hour, minute, second, + tzinfo=rr._tzinfo)) + tset.sort() + return tset + + def mtimeset(self, hour, minute, second): + tset = [] + rr = self.rrule + for second in rr._bysecond: + tset.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo)) + tset.sort() + return tset + + def stimeset(self, hour, minute, second): + return (datetime.time(hour, minute, second, + tzinfo=self.rrule._tzinfo),) + + +class rruleset(rrulebase): + """ The rruleset type allows more complex recurrence setups, mixing + multiple rules, dates, exclusion rules, and exclusion dates. The type + constructor takes the following keyword arguments: + + :param cache: If True, caching of results will be enabled, improving + performance of multiple queries considerably. """ + + class _genitem(object): + def __init__(self, genlist, gen): + try: + self.dt = advance_iterator(gen) + genlist.append(self) + except StopIteration: + pass + self.genlist = genlist + self.gen = gen + + def __next__(self): + try: + self.dt = advance_iterator(self.gen) + except StopIteration: + if self.genlist[0] is self: + heapq.heappop(self.genlist) + else: + self.genlist.remove(self) + heapq.heapify(self.genlist) + + next = __next__ + + def __lt__(self, other): + return self.dt < other.dt + + def __gt__(self, other): + return self.dt > other.dt + + def __eq__(self, other): + return self.dt == other.dt + + def __ne__(self, other): + return self.dt != other.dt + + def __init__(self, cache=False): + super(rruleset, self).__init__(cache) + self._rrule = [] + self._rdate = [] + self._exrule = [] + self._exdate = [] + + @_invalidates_cache + def rrule(self, rrule): + """ Include the given :py:class:`rrule` instance in the recurrence set + generation. """ + self._rrule.append(rrule) + + @_invalidates_cache + def rdate(self, rdate): + """ Include the given :py:class:`datetime` instance in the recurrence + set generation. """ + self._rdate.append(rdate) + + @_invalidates_cache + def exrule(self, exrule): + """ Include the given rrule instance in the recurrence set exclusion + list. Dates which are part of the given recurrence rules will not + be generated, even if some inclusive rrule or rdate matches them. + """ + self._exrule.append(exrule) + + @_invalidates_cache + def exdate(self, exdate): + """ Include the given datetime instance in the recurrence set + exclusion list. Dates included that way will not be generated, + even if some inclusive rrule or rdate matches them. """ + self._exdate.append(exdate) + + def _iter(self): + rlist = [] + self._rdate.sort() + self._genitem(rlist, iter(self._rdate)) + for gen in [iter(x) for x in self._rrule]: + self._genitem(rlist, gen) + exlist = [] + self._exdate.sort() + self._genitem(exlist, iter(self._exdate)) + for gen in [iter(x) for x in self._exrule]: + self._genitem(exlist, gen) + lastdt = None + total = 0 + heapq.heapify(rlist) + heapq.heapify(exlist) + while rlist: + ritem = rlist[0] + if not lastdt or lastdt != ritem.dt: + while exlist and exlist[0] < ritem: + exitem = exlist[0] + advance_iterator(exitem) + if exlist and exlist[0] is exitem: + heapq.heapreplace(exlist, exitem) + if not exlist or ritem != exlist[0]: + total += 1 + yield ritem.dt + lastdt = ritem.dt + advance_iterator(ritem) + if rlist and rlist[0] is ritem: + heapq.heapreplace(rlist, ritem) + self._len = total + + +class _rrulestr(object): + + _freq_map = {"YEARLY": YEARLY, + "MONTHLY": MONTHLY, + "WEEKLY": WEEKLY, + "DAILY": DAILY, + "HOURLY": HOURLY, + "MINUTELY": MINUTELY, + "SECONDLY": SECONDLY} + + _weekday_map = {"MO": 0, "TU": 1, "WE": 2, "TH": 3, + "FR": 4, "SA": 5, "SU": 6} + + def _handle_int(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = int(value) + + def _handle_int_list(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = [int(x) for x in value.split(',')] + + _handle_INTERVAL = _handle_int + _handle_COUNT = _handle_int + _handle_BYSETPOS = _handle_int_list + _handle_BYMONTH = _handle_int_list + _handle_BYMONTHDAY = _handle_int_list + _handle_BYYEARDAY = _handle_int_list + _handle_BYEASTER = _handle_int_list + _handle_BYWEEKNO = _handle_int_list + _handle_BYHOUR = _handle_int_list + _handle_BYMINUTE = _handle_int_list + _handle_BYSECOND = _handle_int_list + + def _handle_FREQ(self, rrkwargs, name, value, **kwargs): + rrkwargs["freq"] = self._freq_map[value] + + def _handle_UNTIL(self, rrkwargs, name, value, **kwargs): + global parser + if not parser: + from dateutil import parser + try: + rrkwargs["until"] = parser.parse(value, + ignoretz=kwargs.get("ignoretz"), + tzinfos=kwargs.get("tzinfos")) + except ValueError: + raise ValueError("invalid until date") + + def _handle_WKST(self, rrkwargs, name, value, **kwargs): + rrkwargs["wkst"] = self._weekday_map[value] + + def _handle_BYWEEKDAY(self, rrkwargs, name, value, **kwargs): + """ + Two ways to specify this: +1MO or MO(+1) + """ + l = [] + for wday in value.split(','): + if '(' in wday: + # If it's of the form TH(+1), etc. + splt = wday.split('(') + w = splt[0] + n = int(splt[1][:-1]) + elif len(wday): + # If it's of the form +1MO + for i in range(len(wday)): + if wday[i] not in '+-0123456789': + break + n = wday[:i] or None + w = wday[i:] + if n: + n = int(n) + else: + raise ValueError("Invalid (empty) BYDAY specification.") + + l.append(weekdays[self._weekday_map[w]](n)) + rrkwargs["byweekday"] = l + + _handle_BYDAY = _handle_BYWEEKDAY + + def _parse_rfc_rrule(self, line, + dtstart=None, + cache=False, + ignoretz=False, + tzinfos=None): + if line.find(':') != -1: + name, value = line.split(':') + if name != "RRULE": + raise ValueError("unknown parameter name") + else: + value = line + rrkwargs = {} + for pair in value.split(';'): + name, value = pair.split('=') + name = name.upper() + value = value.upper() + try: + getattr(self, "_handle_"+name)(rrkwargs, name, value, + ignoretz=ignoretz, + tzinfos=tzinfos) + except AttributeError: + raise ValueError("unknown parameter '%s'" % name) + except (KeyError, ValueError): + raise ValueError("invalid '%s': %s" % (name, value)) + return rrule(dtstart=dtstart, cache=cache, **rrkwargs) + + def _parse_rfc(self, s, + dtstart=None, + cache=False, + unfold=False, + forceset=False, + compatible=False, + ignoretz=False, + tzinfos=None): + global parser + if compatible: + forceset = True + unfold = True + s = s.upper() + if not s.strip(): + raise ValueError("empty string") + if unfold: + lines = s.splitlines() + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + else: + lines = s.split() + if (not forceset and len(lines) == 1 and (s.find(':') == -1 or + s.startswith('RRULE:'))): + return self._parse_rfc_rrule(lines[0], cache=cache, + dtstart=dtstart, ignoretz=ignoretz, + tzinfos=tzinfos) + else: + rrulevals = [] + rdatevals = [] + exrulevals = [] + exdatevals = [] + for line in lines: + if not line: + continue + if line.find(':') == -1: + name = "RRULE" + value = line + else: + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0] + parms = parms[1:] + if name == "RRULE": + for parm in parms: + raise ValueError("unsupported RRULE parm: "+parm) + rrulevals.append(value) + elif name == "RDATE": + for parm in parms: + if parm != "VALUE=DATE-TIME": + raise ValueError("unsupported RDATE parm: "+parm) + rdatevals.append(value) + elif name == "EXRULE": + for parm in parms: + raise ValueError("unsupported EXRULE parm: "+parm) + exrulevals.append(value) + elif name == "EXDATE": + for parm in parms: + if parm != "VALUE=DATE-TIME": + raise ValueError("unsupported EXDATE parm: "+parm) + exdatevals.append(value) + elif name == "DTSTART": + for parm in parms: + raise ValueError("unsupported DTSTART parm: "+parm) + if not parser: + from dateutil import parser + dtstart = parser.parse(value, ignoretz=ignoretz, + tzinfos=tzinfos) + else: + raise ValueError("unsupported property: "+name) + if (forceset or len(rrulevals) > 1 or rdatevals + or exrulevals or exdatevals): + if not parser and (rdatevals or exdatevals): + from dateutil import parser + rset = rruleset(cache=cache) + for value in rrulevals: + rset.rrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in rdatevals: + for datestr in value.split(','): + rset.rdate(parser.parse(datestr, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exrulevals: + rset.exrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exdatevals: + for datestr in value.split(','): + rset.exdate(parser.parse(datestr, + ignoretz=ignoretz, + tzinfos=tzinfos)) + if compatible and dtstart: + rset.rdate(dtstart) + return rset + else: + return self._parse_rfc_rrule(rrulevals[0], + dtstart=dtstart, + cache=cache, + ignoretz=ignoretz, + tzinfos=tzinfos) + + def __call__(self, s, **kwargs): + return self._parse_rfc(s, **kwargs) + + +rrulestr = _rrulestr() + +# vim:ts=4:sw=4:et diff --git a/env/lib/python3.6/site-packages/dateutil/tz/_common.py b/env/lib/python3.6/site-packages/dateutil/tz/_common.py new file mode 100644 index 0000000..f1cf2af --- /dev/null +++ b/env/lib/python3.6/site-packages/dateutil/tz/_common.py @@ -0,0 +1,394 @@ +from six import PY3 + +from functools import wraps + +from datetime import datetime, timedelta, tzinfo + + +ZERO = timedelta(0) + +__all__ = ['tzname_in_python2', 'enfold'] + + +def tzname_in_python2(namefunc): + """Change unicode output into bytestrings in Python 2 + + tzname() API changed in Python 3. It used to return bytes, but was changed + to unicode strings + """ + def adjust_encoding(*args, **kwargs): + name = namefunc(*args, **kwargs) + if name is not None and not PY3: + name = name.encode() + + return name + + return adjust_encoding + + +# The following is adapted from Alexander Belopolsky's tz library +# https://github.com/abalkin/tz +if hasattr(datetime, 'fold'): + # This is the pre-python 3.6 fold situation + def enfold(dt, fold=1): + """ + Provides a unified interface for assigning the ``fold`` attribute to + datetimes both before and after the implementation of PEP-495. + + :param fold: + The value for the ``fold`` attribute in the returned datetime. This + should be either 0 or 1. + + :return: + Returns an object for which ``getattr(dt, 'fold', 0)`` returns + ``fold`` for all versions of Python. In versions prior to + Python 3.6, this is a ``_DatetimeWithFold`` object, which is a + subclass of :py:class:`datetime.datetime` with the ``fold`` + attribute added, if ``fold`` is 1. + + .. versionadded:: 2.6.0 + """ + return dt.replace(fold=fold) + +else: + class _DatetimeWithFold(datetime): + """ + This is a class designed to provide a PEP 495-compliant interface for + Python versions before 3.6. It is used only for dates in a fold, so + the ``fold`` attribute is fixed at ``1``. + + .. versionadded:: 2.6.0 + """ + __slots__ = () + + @property + def fold(self): + return 1 + + def enfold(dt, fold=1): + """ + Provides a unified interface for assigning the ``fold`` attribute to + datetimes both before and after the implementation of PEP-495. + + :param fold: + The value for the ``fold`` attribute in the returned datetime. This + should be either 0 or 1. + + :return: + Returns an object for which ``getattr(dt, 'fold', 0)`` returns + ``fold`` for all versions of Python. In versions prior to + Python 3.6, this is a ``_DatetimeWithFold`` object, which is a + subclass of :py:class:`datetime.datetime` with the ``fold`` + attribute added, if ``fold`` is 1. + + .. versionadded:: 2.6.0 + """ + if getattr(dt, 'fold', 0) == fold: + return dt + + args = dt.timetuple()[:6] + args += (dt.microsecond, dt.tzinfo) + + if fold: + return _DatetimeWithFold(*args) + else: + return datetime(*args) + + +def _validate_fromutc_inputs(f): + """ + The CPython version of ``fromutc`` checks that the input is a ``datetime`` + object and that ``self`` is attached as its ``tzinfo``. + """ + @wraps(f) + def fromutc(self, dt): + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + return f(self, dt) + + return fromutc + + +class _tzinfo(tzinfo): + """ + Base class for all ``dateutil`` ``tzinfo`` objects. + """ + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + + dt = dt.replace(tzinfo=self) + + wall_0 = enfold(dt, fold=0) + wall_1 = enfold(dt, fold=1) + + same_offset = wall_0.utcoffset() == wall_1.utcoffset() + same_dt = wall_0.replace(tzinfo=None) == wall_1.replace(tzinfo=None) + + return same_dt and not same_offset + + def _fold_status(self, dt_utc, dt_wall): + """ + Determine the fold status of a "wall" datetime, given a representation + of the same datetime as a (naive) UTC datetime. This is calculated based + on the assumption that ``dt.utcoffset() - dt.dst()`` is constant for all + datetimes, and that this offset is the actual number of hours separating + ``dt_utc`` and ``dt_wall``. + + :param dt_utc: + Representation of the datetime as UTC + + :param dt_wall: + Representation of the datetime as "wall time". This parameter must + either have a `fold` attribute or have a fold-naive + :class:`datetime.tzinfo` attached, otherwise the calculation may + fail. + """ + if self.is_ambiguous(dt_wall): + delta_wall = dt_wall - dt_utc + _fold = int(delta_wall == (dt_utc.utcoffset() - dt_utc.dst())) + else: + _fold = 0 + + return _fold + + def _fold(self, dt): + return getattr(dt, 'fold', 0) + + def _fromutc(self, dt): + """ + Given a timezone-aware datetime in a given timezone, calculates a + timezone-aware datetime in a new timezone. + + Since this is the one time that we *know* we have an unambiguous + datetime object, we take this opportunity to determine whether the + datetime is ambiguous and in a "fold" state (e.g. if it's the first + occurence, chronologically, of the ambiguous datetime). + + :param dt: + A timezone-aware :class:`datetime.datetime` object. + """ + + # Re-implement the algorithm from Python's datetime.py + dtoff = dt.utcoffset() + if dtoff is None: + raise ValueError("fromutc() requires a non-None utcoffset() " + "result") + + # The original datetime.py code assumes that `dst()` defaults to + # zero during ambiguous times. PEP 495 inverts this presumption, so + # for pre-PEP 495 versions of python, we need to tweak the algorithm. + dtdst = dt.dst() + if dtdst is None: + raise ValueError("fromutc() requires a non-None dst() result") + delta = dtoff - dtdst + + dt += delta + # Set fold=1 so we can default to being in the fold for + # ambiguous dates. + dtdst = enfold(dt, fold=1).dst() + if dtdst is None: + raise ValueError("fromutc(): dt.dst gave inconsistent " + "results; cannot convert") + return dt + dtdst + + @_validate_fromutc_inputs + def fromutc(self, dt): + """ + Given a timezone-aware datetime in a given timezone, calculates a + timezone-aware datetime in a new timezone. + + Since this is the one time that we *know* we have an unambiguous + datetime object, we take this opportunity to determine whether the + datetime is ambiguous and in a "fold" state (e.g. if it's the first + occurance, chronologically, of the ambiguous datetime). + + :param dt: + A timezone-aware :class:`datetime.datetime` object. + """ + dt_wall = self._fromutc(dt) + + # Calculate the fold status given the two datetimes. + _fold = self._fold_status(dt, dt_wall) + + # Set the default fold value for ambiguous dates + return enfold(dt_wall, fold=_fold) + + +class tzrangebase(_tzinfo): + """ + This is an abstract base class for time zones represented by an annual + transition into and out of DST. Child classes should implement the following + methods: + + * ``__init__(self, *args, **kwargs)`` + * ``transitions(self, year)`` - this is expected to return a tuple of + datetimes representing the DST on and off transitions in standard + time. + + A fully initialized ``tzrangebase`` subclass should also provide the + following attributes: + * ``hasdst``: Boolean whether or not the zone uses DST. + * ``_dst_offset`` / ``_std_offset``: :class:`datetime.timedelta` objects + representing the respective UTC offsets. + * ``_dst_abbr`` / ``_std_abbr``: Strings representing the timezone short + abbreviations in DST and STD, respectively. + * ``_hasdst``: Whether or not the zone has DST. + + .. versionadded:: 2.6.0 + """ + def __init__(self): + raise NotImplementedError('tzrangebase is an abstract base class') + + def utcoffset(self, dt): + isdst = self._isdst(dt) + + if isdst is None: + return None + elif isdst: + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + isdst = self._isdst(dt) + + if isdst is None: + return None + elif isdst: + return self._dst_base_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + if self._isdst(dt): + return self._dst_abbr + else: + return self._std_abbr + + def fromutc(self, dt): + """ Given a datetime in UTC, return local time """ + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + # Get transitions - if there are none, fixed offset + transitions = self.transitions(dt.year) + if transitions is None: + return dt + self.utcoffset(dt) + + # Get the transition times in UTC + dston, dstoff = transitions + + dston -= self._std_offset + dstoff -= self._std_offset + + utc_transitions = (dston, dstoff) + dt_utc = dt.replace(tzinfo=None) + + isdst = self._naive_isdst(dt_utc, utc_transitions) + + if isdst: + dt_wall = dt + self._dst_offset + else: + dt_wall = dt + self._std_offset + + _fold = int(not isdst and self.is_ambiguous(dt_wall)) + + return enfold(dt_wall, fold=_fold) + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + if not self.hasdst: + return False + + start, end = self.transitions(dt.year) + + dt = dt.replace(tzinfo=None) + return (end <= dt < end + self._dst_base_offset) + + def _isdst(self, dt): + if not self.hasdst: + return False + elif dt is None: + return None + + transitions = self.transitions(dt.year) + + if transitions is None: + return False + + dt = dt.replace(tzinfo=None) + + isdst = self._naive_isdst(dt, transitions) + + # Handle ambiguous dates + if not isdst and self.is_ambiguous(dt): + return not self._fold(dt) + else: + return isdst + + def _naive_isdst(self, dt, transitions): + dston, dstoff = transitions + + dt = dt.replace(tzinfo=None) + + if dston < dstoff: + isdst = dston <= dt < dstoff + else: + isdst = not dstoff <= dt < dston + + return isdst + + @property + def _dst_base_offset(self): + return self._dst_offset - self._std_offset + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(...)" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + + +def _total_seconds(td): + # Python 2.6 doesn't have a total_seconds() method on timedelta objects + return ((td.seconds + td.days * 86400) * 1000000 + + td.microseconds) // 1000000 + + +_total_seconds = getattr(timedelta, 'total_seconds', _total_seconds) diff --git a/env/lib/python3.6/site-packages/dateutil/tz/tz.py b/env/lib/python3.6/site-packages/dateutil/tz/tz.py new file mode 100644 index 0000000..9468282 --- /dev/null +++ b/env/lib/python3.6/site-packages/dateutil/tz/tz.py @@ -0,0 +1,1511 @@ +# -*- coding: utf-8 -*- +""" +This module offers timezone implementations subclassing the abstract +:py:`datetime.tzinfo` type. There are classes to handle tzfile format files +(usually are in :file:`/etc/localtime`, :file:`/usr/share/zoneinfo`, etc), TZ +environment string (in all known formats), given ranges (with help from +relative deltas), local machine timezone, fixed offset timezone, and UTC +timezone. +""" +import datetime +import struct +import time +import sys +import os +import bisect + +from six import string_types +from ._common import tzname_in_python2, _tzinfo, _total_seconds +from ._common import tzrangebase, enfold +from ._common import _validate_fromutc_inputs + +try: + from .win import tzwin, tzwinlocal +except ImportError: + tzwin = tzwinlocal = None + +ZERO = datetime.timedelta(0) +EPOCH = datetime.datetime.utcfromtimestamp(0) +EPOCHORDINAL = EPOCH.toordinal() + + +class tzutc(datetime.tzinfo): + """ + This is a tzinfo object that represents the UTC time zone. + """ + def utcoffset(self, dt): + return ZERO + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return "UTC" + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + return False + + @_validate_fromutc_inputs + def fromutc(self, dt): + """ + Fast track version of fromutc() returns the original ``dt`` object for + any valid :py:class:`datetime.datetime` object. + """ + return dt + + def __eq__(self, other): + if not isinstance(other, (tzutc, tzoffset)): + return NotImplemented + + return (isinstance(other, tzutc) or + (isinstance(other, tzoffset) and other._offset == ZERO)) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + + +class tzoffset(datetime.tzinfo): + """ + A simple class for representing a fixed offset from UTC. + + :param name: + The timezone name, to be returned when ``tzname()`` is called. + + :param offset: + The time zone offset in seconds, or (since version 2.6.0, represented + as a :py:class:`datetime.timedelta` object. + """ + def __init__(self, name, offset): + self._name = name + + try: + # Allow a timedelta + offset = _total_seconds(offset) + except (TypeError, AttributeError): + pass + self._offset = datetime.timedelta(seconds=offset) + + def utcoffset(self, dt): + return self._offset + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._name + + @_validate_fromutc_inputs + def fromutc(self, dt): + return dt + self._offset + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + return False + + def __eq__(self, other): + if not isinstance(other, tzoffset): + return NotImplemented + + return self._offset == other._offset + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(%s, %s)" % (self.__class__.__name__, + repr(self._name), + int(_total_seconds(self._offset))) + + __reduce__ = object.__reduce__ + + +class tzlocal(_tzinfo): + """ + A :class:`tzinfo` subclass built around the ``time`` timezone functions. + """ + def __init__(self): + super(tzlocal, self).__init__() + + self._std_offset = datetime.timedelta(seconds=-time.timezone) + if time.daylight: + self._dst_offset = datetime.timedelta(seconds=-time.altzone) + else: + self._dst_offset = self._std_offset + + self._dst_saved = self._dst_offset - self._std_offset + self._hasdst = bool(self._dst_saved) + + def utcoffset(self, dt): + if dt is None and self._hasdst: + return None + + if self._isdst(dt): + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + if dt is None and self._hasdst: + return None + + if self._isdst(dt): + return self._dst_offset - self._std_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return time.tzname[self._isdst(dt)] + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + naive_dst = self._naive_is_dst(dt) + return (not naive_dst and + (naive_dst != self._naive_is_dst(dt - self._dst_saved))) + + def _naive_is_dst(self, dt): + timestamp = _datetime_to_timestamp(dt) + return time.localtime(timestamp + time.timezone).tm_isdst + + def _isdst(self, dt, fold_naive=True): + # We can't use mktime here. It is unstable when deciding if + # the hour near to a change is DST or not. + # + # timestamp = time.mktime((dt.year, dt.month, dt.day, dt.hour, + # dt.minute, dt.second, dt.weekday(), 0, -1)) + # return time.localtime(timestamp).tm_isdst + # + # The code above yields the following result: + # + # >>> import tz, datetime + # >>> t = tz.tzlocal() + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' + # + # Here is a more stable implementation: + # + if not self._hasdst: + return False + + # Check for ambiguous times: + dstval = self._naive_is_dst(dt) + fold = getattr(dt, 'fold', None) + + if self.is_ambiguous(dt): + if fold is not None: + return not self._fold(dt) + else: + return True + + return dstval + + def __eq__(self, other): + if not isinstance(other, tzlocal): + return NotImplemented + + return (self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + + +class _ttinfo(object): + __slots__ = ["offset", "delta", "isdst", "abbr", + "isstd", "isgmt", "dstoffset"] + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def __repr__(self): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (self.__class__.__name__, ", ".join(l)) + + def __eq__(self, other): + if not isinstance(other, _ttinfo): + return NotImplemented + + return (self.offset == other.offset and + self.delta == other.delta and + self.isdst == other.isdst and + self.abbr == other.abbr and + self.isstd == other.isstd and + self.isgmt == other.isgmt and + self.dstoffset == other.dstoffset) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __getstate__(self): + state = {} + for name in self.__slots__: + state[name] = getattr(self, name, None) + return state + + def __setstate__(self, state): + for name in self.__slots__: + if name in state: + setattr(self, name, state[name]) + + +class _tzfile(object): + """ + Lightweight class for holding the relevant transition and time zone + information read from binary tzfiles. + """ + attrs = ['trans_list', 'trans_list_utc', 'trans_idx', 'ttinfo_list', + 'ttinfo_std', 'ttinfo_dst', 'ttinfo_before', 'ttinfo_first'] + + def __init__(self, **kwargs): + for attr in self.attrs: + setattr(self, attr, kwargs.get(attr, None)) + + +class tzfile(_tzinfo): + """ + This is a ``tzinfo`` subclass thant allows one to use the ``tzfile(5)`` + format timezone files to extract current and historical zone information. + + :param fileobj: + This can be an opened file stream or a file name that the time zone + information can be read from. + + :param filename: + This is an optional parameter specifying the source of the time zone + information in the event that ``fileobj`` is a file object. If omitted + and ``fileobj`` is a file stream, this parameter will be set either to + ``fileobj``'s ``name`` attribute or to ``repr(fileobj)``. + + See `Sources for Time Zone and Daylight Saving Time Data + `_ for more information. Time zone + files can be compiled from the `IANA Time Zone database files + `_ with the `zic time zone compiler + `_ + """ + + def __init__(self, fileobj, filename=None): + super(tzfile, self).__init__() + + file_opened_here = False + if isinstance(fileobj, string_types): + self._filename = fileobj + fileobj = open(fileobj, 'rb') + file_opened_here = True + elif filename is not None: + self._filename = filename + elif hasattr(fileobj, "name"): + self._filename = fileobj.name + else: + self._filename = repr(fileobj) + + if fileobj is not None: + if not file_opened_here: + fileobj = _ContextWrapper(fileobj) + + with fileobj as file_stream: + tzobj = self._read_tzfile(file_stream) + + self._set_tzdata(tzobj) + + def _set_tzdata(self, tzobj): + """ Set the time zone data of this object from a _tzfile object """ + # Copy the relevant attributes over as private attributes + for attr in _tzfile.attrs: + setattr(self, '_' + attr, getattr(tzobj, attr)) + + def _read_tzfile(self, fileobj): + out = _tzfile() + + # From tzfile(5): + # + # The time zone information files used by tzset(3) + # begin with the magic characters "TZif" to identify + # them as time zone information files, followed by + # sixteen bytes reserved for future use, followed by + # six four-byte values of type long, written in a + # ``standard'' byte order (the high-order byte + # of the value is written first). + if fileobj.read(4).decode() != "TZif": + raise ValueError("magic not found") + + fileobj.read(16) + + ( + # The number of UTC/local indicators stored in the file. + ttisgmtcnt, + + # The number of standard/wall indicators stored in the file. + ttisstdcnt, + + # The number of leap seconds for which data is + # stored in the file. + leapcnt, + + # The number of "transition times" for which data + # is stored in the file. + timecnt, + + # The number of "local time types" for which data + # is stored in the file (must not be zero). + typecnt, + + # The number of characters of "time zone + # abbreviation strings" stored in the file. + charcnt, + + ) = struct.unpack(">6l", fileobj.read(24)) + + # The above header is followed by tzh_timecnt four-byte + # values of type long, sorted in ascending order. + # These values are written in ``standard'' byte order. + # Each is used as a transition time (as returned by + # time(2)) at which the rules for computing local time + # change. + + if timecnt: + out.trans_list_utc = list(struct.unpack(">%dl" % timecnt, + fileobj.read(timecnt*4))) + else: + out.trans_list_utc = [] + + # Next come tzh_timecnt one-byte values of type unsigned + # char; each one tells which of the different types of + # ``local time'' types described in the file is associated + # with the same-indexed transition time. These values + # serve as indices into an array of ttinfo structures that + # appears next in the file. + + if timecnt: + out.trans_idx = struct.unpack(">%dB" % timecnt, + fileobj.read(timecnt)) + else: + out.trans_idx = [] + + # Each ttinfo structure is written as a four-byte value + # for tt_gmtoff of type long, in a standard byte + # order, followed by a one-byte value for tt_isdst + # and a one-byte value for tt_abbrind. In each + # structure, tt_gmtoff gives the number of + # seconds to be added to UTC, tt_isdst tells whether + # tm_isdst should be set by localtime(3), and + # tt_abbrind serves as an index into the array of + # time zone abbreviation characters that follow the + # ttinfo structure(s) in the file. + + ttinfo = [] + + for i in range(typecnt): + ttinfo.append(struct.unpack(">lbb", fileobj.read(6))) + + abbr = fileobj.read(charcnt).decode() + + # Then there are tzh_leapcnt pairs of four-byte + # values, written in standard byte order; the + # first value of each pair gives the time (as + # returned by time(2)) at which a leap second + # occurs; the second gives the total number of + # leap seconds to be applied after the given time. + # The pairs of values are sorted in ascending order + # by time. + + # Not used, for now (but seek for correct file position) + if leapcnt: + fileobj.seek(leapcnt * 8, os.SEEK_CUR) + + # Then there are tzh_ttisstdcnt standard/wall + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as standard + # time or wall clock time, and are used when + # a time zone file is used in handling POSIX-style + # time zone environment variables. + + if ttisstdcnt: + isstd = struct.unpack(">%db" % ttisstdcnt, + fileobj.read(ttisstdcnt)) + + # Finally, there are tzh_ttisgmtcnt UTC/local + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as UTC or + # local time, and are used when a time zone file + # is used in handling POSIX-style time zone envi- + # ronment variables. + + if ttisgmtcnt: + isgmt = struct.unpack(">%db" % ttisgmtcnt, + fileobj.read(ttisgmtcnt)) + + # Build ttinfo list + out.ttinfo_list = [] + for i in range(typecnt): + gmtoff, isdst, abbrind = ttinfo[i] + # Round to full-minutes if that's not the case. Python's + # datetime doesn't accept sub-minute timezones. Check + # http://python.org/sf/1447945 for some information. + gmtoff = 60 * ((gmtoff + 30) // 60) + tti = _ttinfo() + tti.offset = gmtoff + tti.dstoffset = datetime.timedelta(0) + tti.delta = datetime.timedelta(seconds=gmtoff) + tti.isdst = isdst + tti.abbr = abbr[abbrind:abbr.find('\x00', abbrind)] + tti.isstd = (ttisstdcnt > i and isstd[i] != 0) + tti.isgmt = (ttisgmtcnt > i and isgmt[i] != 0) + out.ttinfo_list.append(tti) + + # Replace ttinfo indexes for ttinfo objects. + out.trans_idx = [out.ttinfo_list[idx] for idx in out.trans_idx] + + # Set standard, dst, and before ttinfos. before will be + # used when a given time is before any transitions, + # and will be set to the first non-dst ttinfo, or to + # the first dst, if all of them are dst. + out.ttinfo_std = None + out.ttinfo_dst = None + out.ttinfo_before = None + if out.ttinfo_list: + if not out.trans_list_utc: + out.ttinfo_std = out.ttinfo_first = out.ttinfo_list[0] + else: + for i in range(timecnt-1, -1, -1): + tti = out.trans_idx[i] + if not out.ttinfo_std and not tti.isdst: + out.ttinfo_std = tti + elif not out.ttinfo_dst and tti.isdst: + out.ttinfo_dst = tti + + if out.ttinfo_std and out.ttinfo_dst: + break + else: + if out.ttinfo_dst and not out.ttinfo_std: + out.ttinfo_std = out.ttinfo_dst + + for tti in out.ttinfo_list: + if not tti.isdst: + out.ttinfo_before = tti + break + else: + out.ttinfo_before = out.ttinfo_list[0] + + # Now fix transition times to become relative to wall time. + # + # I'm not sure about this. In my tests, the tz source file + # is setup to wall time, and in the binary file isstd and + # isgmt are off, so it should be in wall time. OTOH, it's + # always in gmt time. Let me know if you have comments + # about this. + laststdoffset = None + out.trans_list = [] + for i, tti in enumerate(out.trans_idx): + if not tti.isdst: + offset = tti.offset + laststdoffset = offset + else: + if laststdoffset is not None: + # Store the DST offset as well and update it in the list + tti.dstoffset = tti.offset - laststdoffset + out.trans_idx[i] = tti + + offset = laststdoffset or 0 + + out.trans_list.append(out.trans_list_utc[i] + offset) + + # In case we missed any DST offsets on the way in for some reason, make + # a second pass over the list, looking for the /next/ DST offset. + laststdoffset = None + for i in reversed(range(len(out.trans_idx))): + tti = out.trans_idx[i] + if tti.isdst: + if not (tti.dstoffset or laststdoffset is None): + tti.dstoffset = tti.offset - laststdoffset + else: + laststdoffset = tti.offset + + if not isinstance(tti.dstoffset, datetime.timedelta): + tti.dstoffset = datetime.timedelta(seconds=tti.dstoffset) + + out.trans_idx[i] = tti + + out.trans_idx = tuple(out.trans_idx) + out.trans_list = tuple(out.trans_list) + out.trans_list_utc = tuple(out.trans_list_utc) + + return out + + def _find_last_transition(self, dt, in_utc=False): + # If there's no list, there are no transitions to find + if not self._trans_list: + return None + + timestamp = _datetime_to_timestamp(dt) + + # Find where the timestamp fits in the transition list - if the + # timestamp is a transition time, it's part of the "after" period. + trans_list = self._trans_list_utc if in_utc else self._trans_list + idx = bisect.bisect_right(trans_list, timestamp) + + # We want to know when the previous transition was, so subtract off 1 + return idx - 1 + + def _get_ttinfo(self, idx): + # For no list or after the last transition, default to _ttinfo_std + if idx is None or (idx + 1) >= len(self._trans_list): + return self._ttinfo_std + + # If there is a list and the time is before it, return _ttinfo_before + if idx < 0: + return self._ttinfo_before + + return self._trans_idx[idx] + + def _find_ttinfo(self, dt): + idx = self._resolve_ambiguous_time(dt) + + return self._get_ttinfo(idx) + + def fromutc(self, dt): + """ + The ``tzfile`` implementation of :py:func:`datetime.tzinfo.fromutc`. + + :param dt: + A :py:class:`datetime.datetime` object. + + :raises TypeError: + Raised if ``dt`` is not a :py:class:`datetime.datetime` object. + + :raises ValueError: + Raised if this is called with a ``dt`` which does not have this + ``tzinfo`` attached. + + :return: + Returns a :py:class:`datetime.datetime` object representing the + wall time in ``self``'s time zone. + """ + # These isinstance checks are in datetime.tzinfo, so we'll preserve + # them, even if we don't care about duck typing. + if not isinstance(dt, datetime.datetime): + raise TypeError("fromutc() requires a datetime argument") + + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + # First treat UTC as wall time and get the transition we're in. + idx = self._find_last_transition(dt, in_utc=True) + tti = self._get_ttinfo(idx) + + dt_out = dt + datetime.timedelta(seconds=tti.offset) + + fold = self.is_ambiguous(dt_out, idx=idx) + + return enfold(dt_out, fold=int(fold)) + + def is_ambiguous(self, dt, idx=None): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + if idx is None: + idx = self._find_last_transition(dt) + + # Calculate the difference in offsets from current to previous + timestamp = _datetime_to_timestamp(dt) + tti = self._get_ttinfo(idx) + + if idx is None or idx <= 0: + return False + + od = self._get_ttinfo(idx - 1).offset - tti.offset + tt = self._trans_list[idx] # Transition time + + return timestamp < tt + od + + def _resolve_ambiguous_time(self, dt): + idx = self._find_last_transition(dt) + + # If we have no transitions, return the index + _fold = self._fold(dt) + if idx is None or idx == 0: + return idx + + # If it's ambiguous and we're in a fold, shift to a different index. + idx_offset = int(not _fold and self.is_ambiguous(dt, idx)) + + return idx - idx_offset + + def utcoffset(self, dt): + if dt is None: + return None + + if not self._ttinfo_std: + return ZERO + + return self._find_ttinfo(dt).delta + + def dst(self, dt): + if dt is None: + return None + + if not self._ttinfo_dst: + return ZERO + + tti = self._find_ttinfo(dt) + + if not tti.isdst: + return ZERO + + # The documentation says that utcoffset()-dst() must + # be constant for every dt. + return tti.dstoffset + + @tzname_in_python2 + def tzname(self, dt): + if not self._ttinfo_std or dt is None: + return None + return self._find_ttinfo(dt).abbr + + def __eq__(self, other): + if not isinstance(other, tzfile): + return NotImplemented + return (self._trans_list == other._trans_list and + self._trans_idx == other._trans_idx and + self._ttinfo_list == other._ttinfo_list) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._filename)) + + def __reduce__(self): + return self.__reduce_ex__(None) + + def __reduce_ex__(self, protocol): + return (self.__class__, (None, self._filename), self.__dict__) + + +class tzrange(tzrangebase): + """ + The ``tzrange`` object is a time zone specified by a set of offsets and + abbreviations, equivalent to the way the ``TZ`` variable can be specified + in POSIX-like systems, but using Python delta objects to specify DST + start, end and offsets. + + :param stdabbr: + The abbreviation for standard time (e.g. ``'EST'``). + + :param stdoffset: + An integer or :class:`datetime.timedelta` object or equivalent + specifying the base offset from UTC. + + If unspecified, +00:00 is used. + + :param dstabbr: + The abbreviation for DST / "Summer" time (e.g. ``'EDT'``). + + If specified, with no other DST information, DST is assumed to occur + and the default behavior or ``dstoffset``, ``start`` and ``end`` is + used. If unspecified and no other DST information is specified, it + is assumed that this zone has no DST. + + If this is unspecified and other DST information is *is* specified, + DST occurs in the zone but the time zone abbreviation is left + unchanged. + + :param dstoffset: + A an integer or :class:`datetime.timedelta` object or equivalent + specifying the UTC offset during DST. If unspecified and any other DST + information is specified, it is assumed to be the STD offset +1 hour. + + :param start: + A :class:`relativedelta.relativedelta` object or equivalent specifying + the time and time of year that daylight savings time starts. To specify, + for example, that DST starts at 2AM on the 2nd Sunday in March, pass: + + ``relativedelta(hours=2, month=3, day=1, weekday=SU(+2))`` + + If unspecified and any other DST information is specified, the default + value is 2 AM on the first Sunday in April. + + :param end: + A :class:`relativedelta.relativedelta` object or equivalent representing + the time and time of year that daylight savings time ends, with the + same specification method as in ``start``. One note is that this should + point to the first time in the *standard* zone, so if a transition + occurs at 2AM in the DST zone and the clocks are set back 1 hour to 1AM, + set the `hours` parameter to +1. + + + **Examples:** + + .. testsetup:: tzrange + + from dateutil.tz import tzrange, tzstr + + .. doctest:: tzrange + + >>> tzstr('EST5EDT') == tzrange("EST", -18000, "EDT") + True + + >>> from dateutil.relativedelta import * + >>> range1 = tzrange("EST", -18000, "EDT") + >>> range2 = tzrange("EST", -18000, "EDT", -14400, + ... relativedelta(hours=+2, month=4, day=1, + ... weekday=SU(+1)), + ... relativedelta(hours=+1, month=10, day=31, + ... weekday=SU(-1))) + >>> tzstr('EST5EDT') == range1 == range2 + True + + """ + def __init__(self, stdabbr, stdoffset=None, + dstabbr=None, dstoffset=None, + start=None, end=None): + + global relativedelta + from dateutil import relativedelta + + self._std_abbr = stdabbr + self._dst_abbr = dstabbr + + try: + stdoffset = _total_seconds(stdoffset) + except (TypeError, AttributeError): + pass + + try: + dstoffset = _total_seconds(dstoffset) + except (TypeError, AttributeError): + pass + + if stdoffset is not None: + self._std_offset = datetime.timedelta(seconds=stdoffset) + else: + self._std_offset = ZERO + + if dstoffset is not None: + self._dst_offset = datetime.timedelta(seconds=dstoffset) + elif dstabbr and stdoffset is not None: + self._dst_offset = self._std_offset + datetime.timedelta(hours=+1) + else: + self._dst_offset = ZERO + + if dstabbr and start is None: + self._start_delta = relativedelta.relativedelta( + hours=+2, month=4, day=1, weekday=relativedelta.SU(+1)) + else: + self._start_delta = start + + if dstabbr and end is None: + self._end_delta = relativedelta.relativedelta( + hours=+1, month=10, day=31, weekday=relativedelta.SU(-1)) + else: + self._end_delta = end + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = bool(self._start_delta) + + def transitions(self, year): + """ + For a given year, get the DST on and off transition times, expressed + always on the standard time side. For zones with no transitions, this + function returns ``None``. + + :param year: + The year whose transitions you would like to query. + + :return: + Returns a :class:`tuple` of :class:`datetime.datetime` objects, + ``(dston, dstoff)`` for zones with an annual DST transition, or + ``None`` for fixed offset zones. + """ + if not self.hasdst: + return None + + base_year = datetime.datetime(year, 1, 1) + + start = base_year + self._start_delta + end = base_year + self._end_delta + + return (start, end) + + def __eq__(self, other): + if not isinstance(other, tzrange): + return NotImplemented + + return (self._std_abbr == other._std_abbr and + self._dst_abbr == other._dst_abbr and + self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset and + self._start_delta == other._start_delta and + self._end_delta == other._end_delta) + + @property + def _dst_base_offset(self): + return self._dst_base_offset_ + + +class tzstr(tzrange): + """ + ``tzstr`` objects are time zone objects specified by a time-zone string as + it would be passed to a ``TZ`` variable on POSIX-style systems (see + the `GNU C Library: TZ Variable`_ for more details). + + There is one notable exception, which is that POSIX-style time zones use an + inverted offset format, so normally ``GMT+3`` would be parsed as an offset + 3 hours *behind* GMT. The ``tzstr`` time zone object will parse this as an + offset 3 hours *ahead* of GMT. If you would like to maintain the POSIX + behavior, pass a ``True`` value to ``posix_offset``. + + The :class:`tzrange` object provides the same functionality, but is + specified using :class:`relativedelta.relativedelta` objects. rather than + strings. + + :param s: + A time zone string in ``TZ`` variable format. This can be a + :class:`bytes` (2.x: :class:`str`), :class:`str` (2.x: :class:`unicode`) + or a stream emitting unicode characters (e.g. :class:`StringIO`). + + :param posix_offset: + Optional. If set to ``True``, interpret strings such as ``GMT+3`` or + ``UTC+3`` as being 3 hours *behind* UTC rather than ahead, per the + POSIX standard. + + .. _`GNU C Library: TZ Variable`: + https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html + """ + def __init__(self, s, posix_offset=False): + global parser + from dateutil import parser + + self._s = s + + res = parser._parsetz(s) + if res is None: + raise ValueError("unknown string format") + + # Here we break the compatibility with the TZ variable handling. + # GMT-3 actually *means* the timezone -3. + if res.stdabbr in ("GMT", "UTC") and not posix_offset: + res.stdoffset *= -1 + + # We must initialize it first, since _delta() needs + # _std_offset and _dst_offset set. Use False in start/end + # to avoid building it two times. + tzrange.__init__(self, res.stdabbr, res.stdoffset, + res.dstabbr, res.dstoffset, + start=False, end=False) + + if not res.dstabbr: + self._start_delta = None + self._end_delta = None + else: + self._start_delta = self._delta(res.start) + if self._start_delta: + self._end_delta = self._delta(res.end, isend=1) + + self.hasdst = bool(self._start_delta) + + def _delta(self, x, isend=0): + from dateutil import relativedelta + kwargs = {} + if x.month is not None: + kwargs["month"] = x.month + if x.weekday is not None: + kwargs["weekday"] = relativedelta.weekday(x.weekday, x.week) + if x.week > 0: + kwargs["day"] = 1 + else: + kwargs["day"] = 31 + elif x.day: + kwargs["day"] = x.day + elif x.yday is not None: + kwargs["yearday"] = x.yday + elif x.jyday is not None: + kwargs["nlyearday"] = x.jyday + if not kwargs: + # Default is to start on first sunday of april, and end + # on last sunday of october. + if not isend: + kwargs["month"] = 4 + kwargs["day"] = 1 + kwargs["weekday"] = relativedelta.SU(+1) + else: + kwargs["month"] = 10 + kwargs["day"] = 31 + kwargs["weekday"] = relativedelta.SU(-1) + if x.time is not None: + kwargs["seconds"] = x.time + else: + # Default is 2AM. + kwargs["seconds"] = 7200 + if isend: + # Convert to standard time, to follow the documented way + # of working with the extra hour. See the documentation + # of the tzinfo class. + delta = self._dst_offset - self._std_offset + kwargs["seconds"] -= delta.seconds + delta.days * 86400 + return relativedelta.relativedelta(**kwargs) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + + +class _tzicalvtzcomp(object): + def __init__(self, tzoffsetfrom, tzoffsetto, isdst, + tzname=None, rrule=None): + self.tzoffsetfrom = datetime.timedelta(seconds=tzoffsetfrom) + self.tzoffsetto = datetime.timedelta(seconds=tzoffsetto) + self.tzoffsetdiff = self.tzoffsetto - self.tzoffsetfrom + self.isdst = isdst + self.tzname = tzname + self.rrule = rrule + + +class _tzicalvtz(_tzinfo): + def __init__(self, tzid, comps=[]): + super(_tzicalvtz, self).__init__() + + self._tzid = tzid + self._comps = comps + self._cachedate = [] + self._cachecomp = [] + + def _find_comp(self, dt): + if len(self._comps) == 1: + return self._comps[0] + + dt = dt.replace(tzinfo=None) + + try: + return self._cachecomp[self._cachedate.index((dt, self._fold(dt)))] + except ValueError: + pass + + lastcompdt = None + lastcomp = None + + for comp in self._comps: + compdt = self._find_compdt(comp, dt) + + if compdt and (not lastcompdt or lastcompdt < compdt): + lastcompdt = compdt + lastcomp = comp + + if not lastcomp: + # RFC says nothing about what to do when a given + # time is before the first onset date. We'll look for the + # first standard component, or the first component, if + # none is found. + for comp in self._comps: + if not comp.isdst: + lastcomp = comp + break + else: + lastcomp = comp[0] + + self._cachedate.insert(0, (dt, self._fold(dt))) + self._cachecomp.insert(0, lastcomp) + + if len(self._cachedate) > 10: + self._cachedate.pop() + self._cachecomp.pop() + + return lastcomp + + def _find_compdt(self, comp, dt): + if comp.tzoffsetdiff < ZERO and self._fold(dt): + dt -= comp.tzoffsetdiff + + compdt = comp.rrule.before(dt, inc=True) + + return compdt + + def utcoffset(self, dt): + if dt is None: + return None + + return self._find_comp(dt).tzoffsetto + + def dst(self, dt): + comp = self._find_comp(dt) + if comp.isdst: + return comp.tzoffsetdiff + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._find_comp(dt).tzname + + def __repr__(self): + return "" % repr(self._tzid) + + __reduce__ = object.__reduce__ + + +class tzical(object): + """ + This object is designed to parse an iCalendar-style ``VTIMEZONE`` structure + as set out in `RFC 2445`_ Section 4.6.5 into one or more `tzinfo` objects. + + :param `fileobj`: + A file or stream in iCalendar format, which should be UTF-8 encoded + with CRLF endings. + + .. _`RFC 2445`: https://www.ietf.org/rfc/rfc2445.txt + """ + def __init__(self, fileobj): + global rrule + from dateutil import rrule + + if isinstance(fileobj, string_types): + self._s = fileobj + # ical should be encoded in UTF-8 with CRLF + fileobj = open(fileobj, 'r') + else: + self._s = getattr(fileobj, 'name', repr(fileobj)) + fileobj = _ContextWrapper(fileobj) + + self._vtz = {} + + with fileobj as fobj: + self._parse_rfc(fobj.read()) + + def keys(self): + """ + Retrieves the available time zones as a list. + """ + return list(self._vtz.keys()) + + def get(self, tzid=None): + """ + Retrieve a :py:class:`datetime.tzinfo` object by its ``tzid``. + + :param tzid: + If there is exactly one time zone available, omitting ``tzid`` + or passing :py:const:`None` value returns it. Otherwise a valid + key (which can be retrieved from :func:`keys`) is required. + + :raises ValueError: + Raised if ``tzid`` is not specified but there are either more + or fewer than 1 zone defined. + + :returns: + Returns either a :py:class:`datetime.tzinfo` object representing + the relevant time zone or :py:const:`None` if the ``tzid`` was + not found. + """ + if tzid is None: + if len(self._vtz) == 0: + raise ValueError("no timezones defined") + elif len(self._vtz) > 1: + raise ValueError("more than one timezone available") + tzid = next(iter(self._vtz)) + + return self._vtz.get(tzid) + + def _parse_offset(self, s): + s = s.strip() + if not s: + raise ValueError("empty offset") + if s[0] in ('+', '-'): + signal = (-1, +1)[s[0] == '+'] + s = s[1:] + else: + signal = +1 + if len(s) == 4: + return (int(s[:2]) * 3600 + int(s[2:]) * 60) * signal + elif len(s) == 6: + return (int(s[:2]) * 3600 + int(s[2:4]) * 60 + int(s[4:])) * signal + else: + raise ValueError("invalid offset: " + s) + + def _parse_rfc(self, s): + lines = s.splitlines() + if not lines: + raise ValueError("empty string") + + # Unfold + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + + tzid = None + comps = [] + invtz = False + comptype = None + for line in lines: + if not line: + continue + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0].upper() + parms = parms[1:] + if invtz: + if name == "BEGIN": + if value in ("STANDARD", "DAYLIGHT"): + # Process component + pass + else: + raise ValueError("unknown component: "+value) + comptype = value + founddtstart = False + tzoffsetfrom = None + tzoffsetto = None + rrulelines = [] + tzname = None + elif name == "END": + if value == "VTIMEZONE": + if comptype: + raise ValueError("component not closed: "+comptype) + if not tzid: + raise ValueError("mandatory TZID not found") + if not comps: + raise ValueError( + "at least one component is needed") + # Process vtimezone + self._vtz[tzid] = _tzicalvtz(tzid, comps) + invtz = False + elif value == comptype: + if not founddtstart: + raise ValueError("mandatory DTSTART not found") + if tzoffsetfrom is None: + raise ValueError( + "mandatory TZOFFSETFROM not found") + if tzoffsetto is None: + raise ValueError( + "mandatory TZOFFSETFROM not found") + # Process component + rr = None + if rrulelines: + rr = rrule.rrulestr("\n".join(rrulelines), + compatible=True, + ignoretz=True, + cache=True) + comp = _tzicalvtzcomp(tzoffsetfrom, tzoffsetto, + (comptype == "DAYLIGHT"), + tzname, rr) + comps.append(comp) + comptype = None + else: + raise ValueError("invalid component end: "+value) + elif comptype: + if name == "DTSTART": + rrulelines.append(line) + founddtstart = True + elif name in ("RRULE", "RDATE", "EXRULE", "EXDATE"): + rrulelines.append(line) + elif name == "TZOFFSETFROM": + if parms: + raise ValueError( + "unsupported %s parm: %s " % (name, parms[0])) + tzoffsetfrom = self._parse_offset(value) + elif name == "TZOFFSETTO": + if parms: + raise ValueError( + "unsupported TZOFFSETTO parm: "+parms[0]) + tzoffsetto = self._parse_offset(value) + elif name == "TZNAME": + if parms: + raise ValueError( + "unsupported TZNAME parm: "+parms[0]) + tzname = value + elif name == "COMMENT": + pass + else: + raise ValueError("unsupported property: "+name) + else: + if name == "TZID": + if parms: + raise ValueError( + "unsupported TZID parm: "+parms[0]) + tzid = value + elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"): + pass + else: + raise ValueError("unsupported property: "+name) + elif name == "BEGIN" and value == "VTIMEZONE": + tzid = None + comps = [] + invtz = True + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + + +if sys.platform != "win32": + TZFILES = ["/etc/localtime", "localtime"] + TZPATHS = ["/usr/share/zoneinfo", + "/usr/lib/zoneinfo", + "/usr/share/lib/zoneinfo", + "/etc/zoneinfo"] +else: + TZFILES = [] + TZPATHS = [] + + +def gettz(name=None): + tz = None + if not name: + try: + name = os.environ["TZ"] + except KeyError: + pass + if name is None or name == ":": + for filepath in TZFILES: + if not os.path.isabs(filepath): + filename = filepath + for path in TZPATHS: + filepath = os.path.join(path, filename) + if os.path.isfile(filepath): + break + else: + continue + if os.path.isfile(filepath): + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = tzlocal() + else: + if name.startswith(":"): + name = name[:-1] + if os.path.isabs(name): + if os.path.isfile(name): + tz = tzfile(name) + else: + tz = None + else: + for path in TZPATHS: + filepath = os.path.join(path, name) + if not os.path.isfile(filepath): + filepath = filepath.replace(' ', '_') + if not os.path.isfile(filepath): + continue + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = None + if tzwin is not None: + try: + tz = tzwin(name) + except WindowsError: + tz = None + + if not tz: + from dateutil.zoneinfo import get_zonefile_instance + tz = get_zonefile_instance().get(name) + + if not tz: + for c in name: + # name must have at least one offset to be a tzstr + if c in "0123456789": + try: + tz = tzstr(name) + except ValueError: + pass + break + else: + if name in ("GMT", "UTC"): + tz = tzutc() + elif name in time.tzname: + tz = tzlocal() + return tz + + +def datetime_exists(dt, tz=None): + """ + Given a datetime and a time zone, determine whether or not a given datetime + would fall in a gap. + + :param dt: + A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` + is provided.) + + :param tz: + A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If + ``None`` or not provided, the datetime's own time zone will be used. + + :return: + Returns a boolean value whether or not the "wall time" exists in ``tz``. + """ + if tz is None: + if dt.tzinfo is None: + raise ValueError('Datetime is naive and no time zone provided.') + tz = dt.tzinfo + + dt = dt.replace(tzinfo=None) + + # This is essentially a test of whether or not the datetime can survive + # a round trip to UTC. + dt_rt = dt.replace(tzinfo=tz).astimezone(tzutc()).astimezone(tz) + dt_rt = dt_rt.replace(tzinfo=None) + + return dt == dt_rt + + +def datetime_ambiguous(dt, tz=None): + """ + Given a datetime and a time zone, determine whether or not a given datetime + is ambiguous (i.e if there are two times differentiated only by their DST + status). + + :param dt: + A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` + is provided.) + + :param tz: + A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If + ``None`` or not provided, the datetime's own time zone will be used. + + :return: + Returns a boolean value whether or not the "wall time" is ambiguous in + ``tz``. + + .. versionadded:: 2.6.0 + """ + if tz is None: + if dt.tzinfo is None: + raise ValueError('Datetime is naive and no time zone provided.') + + tz = dt.tzinfo + + # If a time zone defines its own "is_ambiguous" function, we'll use that. + is_ambiguous_fn = getattr(tz, 'is_ambiguous', None) + if is_ambiguous_fn is not None: + try: + return tz.is_ambiguous(dt) + except: + pass + + # If it doesn't come out and tell us it's ambiguous, we'll just check if + # the fold attribute has any effect on this particular date and time. + dt = dt.replace(tzinfo=tz) + wall_0 = enfold(dt, fold=0) + wall_1 = enfold(dt, fold=1) + + same_offset = wall_0.utcoffset() == wall_1.utcoffset() + same_dst = wall_0.dst() == wall_1.dst() + + return not (same_offset and same_dst) + + +def _datetime_to_timestamp(dt): + """ + Convert a :class:`datetime.datetime` object to an epoch timestamp in seconds + since January 1, 1970, ignoring the time zone. + """ + return _total_seconds((dt.replace(tzinfo=None) - EPOCH)) + + +class _ContextWrapper(object): + """ + Class for wrapping contexts so that they are passed through in a + with statement. + """ + def __init__(self, context): + self.context = context + + def __enter__(self): + return self.context + + def __exit__(*args, **kwargs): + pass + +# vim:ts=4:sw=4:et diff --git a/env/lib/python3.6/site-packages/dateutil/tz/win.py b/env/lib/python3.6/site-packages/dateutil/tz/win.py new file mode 100644 index 0000000..36a1c26 --- /dev/null +++ b/env/lib/python3.6/site-packages/dateutil/tz/win.py @@ -0,0 +1,332 @@ +# This code was originally contributed by Jeffrey Harris. +import datetime +import struct + +from six.moves import winreg +from six import text_type + +try: + import ctypes + from ctypes import wintypes +except ValueError: + # ValueError is raised on non-Windows systems for some horrible reason. + raise ImportError("Running tzwin on non-Windows system") + +from ._common import tzrangebase + +__all__ = ["tzwin", "tzwinlocal", "tzres"] + +ONEWEEK = datetime.timedelta(7) + +TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones" +TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones" +TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation" + + +def _settzkeyname(): + handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + try: + winreg.OpenKey(handle, TZKEYNAMENT).Close() + TZKEYNAME = TZKEYNAMENT + except WindowsError: + TZKEYNAME = TZKEYNAME9X + handle.Close() + return TZKEYNAME + + +TZKEYNAME = _settzkeyname() + + +class tzres(object): + """ + Class for accessing `tzres.dll`, which contains timezone name related + resources. + + .. versionadded:: 2.5.0 + """ + p_wchar = ctypes.POINTER(wintypes.WCHAR) # Pointer to a wide char + + def __init__(self, tzres_loc='tzres.dll'): + # Load the user32 DLL so we can load strings from tzres + user32 = ctypes.WinDLL('user32') + + # Specify the LoadStringW function + user32.LoadStringW.argtypes = (wintypes.HINSTANCE, + wintypes.UINT, + wintypes.LPWSTR, + ctypes.c_int) + + self.LoadStringW = user32.LoadStringW + self._tzres = ctypes.WinDLL(tzres_loc) + self.tzres_loc = tzres_loc + + def load_name(self, offset): + """ + Load a timezone name from a DLL offset (integer). + + >>> from dateutil.tzwin import tzres + >>> tzr = tzres() + >>> print(tzr.load_name(112)) + 'Eastern Standard Time' + + :param offset: + A positive integer value referring to a string from the tzres dll. + + ..note: + Offsets found in the registry are generally of the form + `@tzres.dll,-114`. The offset in this case if 114, not -114. + + """ + resource = self.p_wchar() + lpBuffer = ctypes.cast(ctypes.byref(resource), wintypes.LPWSTR) + nchar = self.LoadStringW(self._tzres._handle, offset, lpBuffer, 0) + return resource[:nchar] + + def name_from_string(self, tzname_str): + """ + Parse strings as returned from the Windows registry into the time zone + name as defined in the registry. + + >>> from dateutil.tzwin import tzres + >>> tzr = tzres() + >>> print(tzr.name_from_string('@tzres.dll,-251')) + 'Dateline Daylight Time' + >>> print(tzr.name_from_string('Eastern Standard Time')) + 'Eastern Standard Time' + + :param tzname_str: + A timezone name string as returned from a Windows registry key. + + :return: + Returns the localized timezone string from tzres.dll if the string + is of the form `@tzres.dll,-offset`, else returns the input string. + """ + if not tzname_str.startswith('@'): + return tzname_str + + name_splt = tzname_str.split(',-') + try: + offset = int(name_splt[1]) + except: + raise ValueError("Malformed timezone string.") + + return self.load_name(offset) + + +class tzwinbase(tzrangebase): + """tzinfo class based on win32's timezones available in the registry.""" + def __init__(self): + raise NotImplementedError('tzwinbase is an abstract base class') + + def __eq__(self, other): + # Compare on all relevant dimensions, including name. + if not isinstance(other, tzwinbase): + return NotImplemented + + return (self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset and + self._stddayofweek == other._stddayofweek and + self._dstdayofweek == other._dstdayofweek and + self._stdweeknumber == other._stdweeknumber and + self._dstweeknumber == other._dstweeknumber and + self._stdhour == other._stdhour and + self._dsthour == other._dsthour and + self._stdminute == other._stdminute and + self._dstminute == other._dstminute and + self._std_abbr == other._std_abbr and + self._dst_abbr == other._dst_abbr) + + @staticmethod + def list(): + """Return a list of all time zones known to the system.""" + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + with winreg.OpenKey(handle, TZKEYNAME) as tzkey: + result = [winreg.EnumKey(tzkey, i) + for i in range(winreg.QueryInfoKey(tzkey)[0])] + return result + + def display(self): + return self._display + + def transitions(self, year): + """ + For a given year, get the DST on and off transition times, expressed + always on the standard time side. For zones with no transitions, this + function returns ``None``. + + :param year: + The year whose transitions you would like to query. + + :return: + Returns a :class:`tuple` of :class:`datetime.datetime` objects, + ``(dston, dstoff)`` for zones with an annual DST transition, or + ``None`` for fixed offset zones. + """ + + if not self.hasdst: + return None + + dston = picknthweekday(year, self._dstmonth, self._dstdayofweek, + self._dsthour, self._dstminute, + self._dstweeknumber) + + dstoff = picknthweekday(year, self._stdmonth, self._stddayofweek, + self._stdhour, self._stdminute, + self._stdweeknumber) + + # Ambiguous dates default to the STD side + dstoff -= self._dst_base_offset + + return dston, dstoff + + def _get_hasdst(self): + return self._dstmonth != 0 + + @property + def _dst_base_offset(self): + return self._dst_base_offset_ + + +class tzwin(tzwinbase): + + def __init__(self, name): + self._name = name + + # multiple contexts only possible in 2.7 and 3.1, we still support 2.6 + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + tzkeyname = text_type("{kn}\\{name}").format(kn=TZKEYNAME, name=name) + with winreg.OpenKey(handle, tzkeyname) as tzkey: + keydict = valuestodict(tzkey) + + self._std_abbr = keydict["Std"] + self._dst_abbr = keydict["Dlt"] + + self._display = keydict["Display"] + + # See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm + tup = struct.unpack("=3l16h", keydict["TZI"]) + stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1 + dstoffset = stdoffset-tup[2] # + DaylightBias * -1 + self._std_offset = datetime.timedelta(minutes=stdoffset) + self._dst_offset = datetime.timedelta(minutes=dstoffset) + + # for the meaning see the win32 TIME_ZONE_INFORMATION structure docs + # http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx + (self._stdmonth, + self._stddayofweek, # Sunday = 0 + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[4:9] + + (self._dstmonth, + self._dstdayofweek, # Sunday = 0 + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[12:17] + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = self._get_hasdst() + + def __repr__(self): + return "tzwin(%s)" % repr(self._name) + + def __reduce__(self): + return (self.__class__, (self._name,)) + + +class tzwinlocal(tzwinbase): + def __init__(self): + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey: + keydict = valuestodict(tzlocalkey) + + self._std_abbr = keydict["StandardName"] + self._dst_abbr = keydict["DaylightName"] + + try: + tzkeyname = text_type('{kn}\\{sn}').format(kn=TZKEYNAME, + sn=self._std_abbr) + with winreg.OpenKey(handle, tzkeyname) as tzkey: + _keydict = valuestodict(tzkey) + self._display = _keydict["Display"] + except OSError: + self._display = None + + stdoffset = -keydict["Bias"]-keydict["StandardBias"] + dstoffset = stdoffset-keydict["DaylightBias"] + + self._std_offset = datetime.timedelta(minutes=stdoffset) + self._dst_offset = datetime.timedelta(minutes=dstoffset) + + # For reasons unclear, in this particular key, the day of week has been + # moved to the END of the SYSTEMTIME structure. + tup = struct.unpack("=8h", keydict["StandardStart"]) + + (self._stdmonth, + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[1:5] + + self._stddayofweek = tup[7] + + tup = struct.unpack("=8h", keydict["DaylightStart"]) + + (self._dstmonth, + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[1:5] + + self._dstdayofweek = tup[7] + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = self._get_hasdst() + + def __repr__(self): + return "tzwinlocal()" + + def __str__(self): + # str will return the standard name, not the daylight name. + return "tzwinlocal(%s)" % repr(self._std_abbr) + + def __reduce__(self): + return (self.__class__, ()) + + +def picknthweekday(year, month, dayofweek, hour, minute, whichweek): + """ dayofweek == 0 means Sunday, whichweek 5 means last instance """ + first = datetime.datetime(year, month, 1, hour, minute) + + # This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6), + # Because 7 % 7 = 0 + weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1) + wd = weekdayone + ((whichweek - 1) * ONEWEEK) + if (wd.month != month): + wd -= ONEWEEK + + return wd + + +def valuestodict(key): + """Convert a registry key's values to a dictionary.""" + dout = {} + size = winreg.QueryInfoKey(key)[1] + tz_res = None + + for i in range(size): + key_name, value, dtype = winreg.EnumValue(key, i) + if dtype == winreg.REG_DWORD or dtype == winreg.REG_DWORD_LITTLE_ENDIAN: + # If it's a DWORD (32-bit integer), it's stored as unsigned - convert + # that to a proper signed integer + if value & (1 << 31): + value = value - (1 << 32) + elif dtype == winreg.REG_SZ: + # If it's a reference to the tzres DLL, load the actual string + if value.startswith('@tzres'): + tz_res = tz_res or tzres() + value = tz_res.name_from_string(value) + + value = value.rstrip('\x00') # Remove trailing nulls + + dout[key_name] = value + + return dout diff --git a/env/lib/python3.6/site-packages/dateutil/tzwin.py b/env/lib/python3.6/site-packages/dateutil/tzwin.py new file mode 100644 index 0000000..cebc673 --- /dev/null +++ b/env/lib/python3.6/site-packages/dateutil/tzwin.py @@ -0,0 +1,2 @@ +# tzwin has moved to dateutil.tz.win +from .tz.win import * diff --git a/env/lib/python3.6/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz b/env/lib/python3.6/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..613c0ff3b4ef910935629a7d145354f40314150c GIT binary patch literal 138881 zcmX`SRa6{Z*R>r&0s#U9_r~4b-95Ow1$VbV65QPh?(QDk-QC^YyT7`h_aFbk9J}_K z*Ic`*PrAB#7h(99FZIM+S|1^%MlNQqE|xa*UiNlomUia$3@%1aAI|h(4bk@(GQU~< zfR?fsl@}Q;v@b>=Cd*;?=Cvv$@`d~>@$V@~;cwyu=)whv;-4&YV5tXSgjYV!gww){ zoqQxA0DDhjv4vcQcEWD^n@C!xp9qFAe zB?k*b?Znm-_6tLmWLIAqOJ|+rz$K>OW|{rjzaKC2IE=nO@b4={2P+iDhDH?qOsaC; zN!~krpI&|QL-EiFBhd&@(ot_dL?Vid{c0j*-r;kHH1#H|o3!7j5{VT8m1vMROE~uhf zPdyc3e{~Ia7annUixhSjUU+%BKFD~|)OFEBpo8NN&Z~;R$HyW-6ZA;oxx3cGTjmsK zR5U$!$HbA$ZdUBM+$6w#eX_=#x{ihRhq&EUKXqHV9Juem3{u{O>D8!Iue9lrNAEdGI1xapoI!Ir?ZXrWnAA=+Z zB+VkAnwXUz&X+e@k?^?Av>;J})Baw!k1vXs9u}vN+h%c>$n(?Ny;TOB=&>s2uyG`m zZpBCE=tk!auQ%^L|1Akar%?(+%uSwoyG`Qo=Sypnx<_8{>+7rOtsug)0oiVM(*n-w zJiAxga#_#AGGbL^ipuWIIXCAo<3oibmsKv7t0GV37>+71$*73u_mdtNfnh?7%j7pD z=*78c+TWUs^mMkg7vP!>u@bbTIL~Mu*$wIp?Gqnj@8jL>B$l>~hU9e{u2K5*6FR|D z42wV1qL~_1R05~oX+ZYI7j8mo4r~M>XsG_PIeO~&zC&w&MjpZoH8XK3mK~{ z>1@la%?%}MA6MaumWSWk*4uUs5A_vRnQ*jfRi8xJ#pTsHylvcG`X(J-I8t`+tT}s* z4fLI1o?M(^)-KQ4lI4}hDbsm=UAU>8+Lc_CySI00`zuwi7^-}3*zU8k%1e#2I!>3g z<;%&alO9sEa>$0AD4cgICGJMDsM8&tpQ16~5@oz}2x?<+3A)reu>tprS%hQQ^XOb? z|84ZLu4c_O^Gpx7Mq13^b~+fzq_odkdv-cRac_D{z4#0-@Ni?Wi4be54!6@QGMPWL z5wCkQ^Wd;y$JSf$lPr~m9)d#`gTma>Mm$M$>ff`b$3Z@X#)*lu3N-ASG$E5_p|Y0mrkP<;LUl zOp4i_cgU_VS(6!=khNe+WEQ;8vv=L)i5(i&NZ(*d? zj7-5FLPaCSNF@$XNpC}Ax%-FOjZN*CS-P-OO(MovB@S>&ZzE&52Z!2COfQ&OAV2$r zB@T#VnumsbOidM7SQv3q)3LMr6pypzzH`2$u5>O-oDq7Re#06~3>^y4N`chqd96X7 zZ<=T~CP}&9gI@(>Bg|~UkiF~2>wCB{0r2Gh6Rf1^^EKqxOSHjHe>d=B;DSGsF*yDO z84S%Cc=n>WKnO9qhR&5VrG~t>zh}=u&cFsqH-i2HzwE0_ZITbfRQz`bURec{z- z=LG~?qfNStHOdimtQ(_oD;gL}4=$dTd^6tBkdap@H#eIYJNTDt&SdSaSDHr~>w~jb z-PyW~Ud^M;*IS(b*e<3{R=fu3I-C*4OnMpm#hkBh;V=B6N8my7jH)H9^z3&eGeCZw z#%HyW^tUnw6tTq%Yo^!tJH@+)R+ayOQ5->n7EII$E z!>^>@s5^{U|1rAXw35bqp~tQ`WlTT0Zlv5f6CU9+c*?Vu;d%CXkH7JwWSKSs!`E62 zZMJ!B4-u_&KF8vQA7ig2{o0?U82?1c(d+N$tM_zxTS+fY)+>AB7A9r!jcdR3)z3Mt zA}x(h(%HRRyO#Q#ps|;74)A76M$+v05=z$T^4?9+Y!2rnPmXL?W{+-r&+Hut(#7bM zKZ)73oU(SzuTtJGj2;}@?(CSJCR}^1DpZ}G!$_Q-kL&D*B;|Mo$5q@%As!wP*r(ko zc%N!=>w)neGg?6F&dmZNHITQ9G|o-5N8LEp1!%QoKm0^0`H670maW}k*MmpGP0V9w zkFp55)Y<(s3PGB`>>@i{0|U+h$o2szS(k`Ngx zMU+hJRAnj^lnU%rZJ}H&Qk95U%Yh+PXw-G=RHq0zUWq~w>1=c?_0SNTsVNN$3jt1Q zT!fsgL?H#~Y+A`76* zMB-o*;G{{ugIqAoxRT%#V5J#;lHpJx#M%6%lbX4<84Y}%E24podwXL{gw{F`({c!A z7zx}Q&ZYBE$29An{2X@|TD$d8E8(+Xthm+{>l?l-u@{IexL7`(jHQ;2rIwAQmXD=Y zER4mNO21B(rB?T&G|RL(>2d-%p!HZiyK{l5t@Ej8V`XrlU##@kZeAS|1o`UzxdVcH zH#q^Rn}HmTdzfyhbM?sMLrww}O|(x->;zXjwVVeUovo_cUwdJ=W#u|z7s-RICv;n! z_ha*iKySb9|3w6BkpDpA*qi@uHS2L^9l6~(vYUIB-R|+5 zt1JPok9Z5kc~)V)p|sdjoE-B0Amtqp`}is5 z0fzJvMPh?CqTK@9^OETXBqAf^4Mz=iPM!a$z9|Nkua5sf!l0NS!FC=j-Q;Fl+YUKA zKSaCA2fv!1`eUB>S4O~E!Qre|TfdUF{(#-UF7RD7ZMij+GCTf0I++oC>Q6y1 zysp8o4Nh~rTZ5f2K`xUWoOLUs#Mi5j&z71{slV;i!7AX9=%Voz^bNXoiAFyU-sYVr z#M4ihNI}21DIlI{$t5f~=Ek%-UX14Qf7uRk0UJWy_bfg)grDMZGp@H&>iI!v4-@nB z)13S0*(W=qls9Cn%AOSz7S@4m`4x^P&TS-_&bpO%!wi1tXOveQo;`s0$;vZyAWYN8xploLkWB2 z{R+I2fQ8wPL_+GpB_JXqAd?XI{>9#<`rF3QPvZ7zVNs7f=wJYejzuQw!6y(?Jv$QW z>6q>Q?OX&MW+(=m7>i6tk-P6gL=dXdg|~@ox=xZL2rnw#YwQj;h{8iror9m z_FJv9^`SbexaSRrH>;MIVj z3UqXnc)NP#I`K>RH9<7XfGSwFN-1hjrx?HGJYu3Ysf@}u%LTtHgJRt`oejXZO8f)?9Ke&biGQh7c(s!Y(|Zf4av$i7LiJ2 zL9t40TA@ny3B5}FzX@kqw3_Z++eO|PC57G~x}3!)o@jmNbNY^jp6t7o*{EYzinF;6 zRF2ca;{8_l=)79RH|^!7Zi?lqb(iI?HyiGMl+sAef(jhTnT6?!5+lOyX!or;f%{&A zyr=hA_cM4lrDdEMOPcmnH z25HX!++^WKjIhr6+T7`|SNxuVlW(u`)PJFkz$Ly7Pkaw#%{an8sF&<5ze>95MUX%` zn6c>1xkB`KTq7G@`V@e_Zun<|^OA?Zu)T6He{$+|J$lh(R^sSgQQ`<1(|s1!4vz%c zt0Dl@RNTSWY#^?edBlipsbdgzsiO=!(Gynq(n60+OEMg)-+wQ5XvtkNCC@($PUPFU zQSvptA@Q8cl7Bfu?wlGdPpOC|sQFI#xWJ`1(%oNF)PDG~zFTt90y%?B#$!GD*g@W3 zdxJS&|9+-y+w)Ov8*SH9aHJl(bkLILo8={E@kb86RalEYLhnIFx*&f0cAMQo8e2l& zo$fDw$KuU}Dd=J^{ULxXFoN#4X1>~Y?Nb(;FQSojYre+vq5KLPF+z(oaI3LyR+h!_7C4*=qU41Axl zUAC!*n4JC^?HT^6TEXntjz}TZ)*q%D)?}5S5 zG@0AmvN^x|q2SkD;FWXJ{h)@rWn5<65%T~3spR?0;4(|?!R+)w6fZsyyY&=(XiBfh zkfQapm9F)aaWuCL#29|zMX2SnNNPH*ha4;J&M$OjduPDd zOoq_vxx$b6my{_$y&|`rptrNS&vS=BQ^&A6FWoEQ?$jD_YUg>{!ex{AQx!{MIF zD-!L_iiH*+h4qy{dW*n+!&&~oR15bZ4#m)v_1{p?&scdVQaor04%i5rZ`jMgourab zql|e#EFv^%I*bGdQiKgCCOjs<&=d`djI?{m{G(J*V66OSQal6+4y*_pChTQXrrK%4 zF%G=Re>Tn2oM}&#Rt+~5y9R@M`+8-GO`%O|`XO)rhNilzNelk(z*DDeE=aV&!^?!E=h{AeKfw7_#(Ctut|Vs!>paky=w zA;z7Ny5usIknYlVP_fZk*5&~&Tjq7cTXG%pLa`dKfjn=oqdJZJpvypbqf1LzsU{&1 zXW#0>HhB0logKG#7xJ$BGT*SGxapstxyh;5_B7*iyi;VIFPlx5z~KLs<+8t88Rxv` zaD1}zGSg70mw!4-BPF<4LAj2tqVZZF=UHMH`D)euhoGL)*_0FU*2kC8D&iR-N+AeQ;b4dDb4vRcj?aXfl}0-_p8Mz* z%7R{gcWR$<)Nza`xyWW++6^eT`8kj;sWm(IP(}%}J8cHp^v&OQXOp9em5e_N?a0#n z>`P7%Dj&&)G8G4=r9Y3y^1r5J$({WC6pj8dl%V7jDvvTenNvQd)Rq+U&wN~9;ErjI z^&)a4{VcpAK@->)nIJ}HXB|*ZMdFyw5Pm$8eM;ge+heVcO;qv~l~x&-Og*1Ns#wZV z%(y`UHFRENdW`5^*jOB%47F@qNSy?*C8WxwVq-L8X^3k`%tVaEPk#4>qQgtk+`GUf z;9V^q59J$@1dOCa$;nWlQZW;xV#t$y&kgMvp+GIZPLukpE<+L^;`g}#4Q`G$PU^4N zkjXq6TmqNY*D?b)lEh3YGfHI$MeGQYB!mXPW%_aM0SQb`>6L)uX{LzozwvC)R&D?x z+k-k8_gp8Zf_e8#1Qv#MBz z4k}T)x?@@QzP%8!IE z9qtH^q9q{#0w2yMih2~TVFT84kfGB>3BPaz z3!vLX6=^o~5h4M{L^epiQbfMrm+)Syga;>@`}9TB>J=Sc|8ACLC?%a{_Qqm8{Yh1J<1waVS{v$Jbt z@&r7jJR@Hv_MSAzcI>u2@simdI+Ll>yVrJ{S>P?nGyFJvB7>|PxECBgDn0XRW|l0C zoR8moIa( zXRLS3QuPQW9wm%O4wLf}XW${xroqG`@0uf5r2%coF`(>GC*Iw~$f_Yt%N#n#kM21v zj_8(ECi7=1(l!ZJ>4R?(wEw6b-iO_~S<4uEw#PCqA^>6sJ}mvtWsO$9WBkXpS}OWV$CbuXVNO8jcmHwjn8PP>&%>I?NRkBH^HEr}Pql0>belSGhx=z_7%sQ*-@(s6c~ z#%hnfNZET9w=G!Rk#*d^1Zk&V)U>*`<~CGpir6Qoh*}#|HiD+NCFytcjXLrm92L8% z+D?b?f>N1X3}o(}R}FKnIVu-!CnhDYzu%prE8+{Xl&trGCz(=>>Tdi_Gh`L+=ks3K zDVp@9TWtJ1^?xvZsgF3JZFsLT5qs>&V(z18rTI^y}J^rdhUu*n81Tx5Rn zCMC`{J<5u{U5;M=T9gqD_ot++Ph}wIYc8Wq)36RLQaF(=D72K;5uBA=lbMs4NG*!y znbE@IcWW1*D5&IUw5S)jcz0T<8aYn8g<~Fn0dAJ+iX5A@teW@oZS_XN&p&!3Ii@uh z;uKk)iXmGEQ4h?jRNG&WKA0id@H=Dv)B2hkS={uERLP!LJ$FN741IKzaEqN49St$0 zK**Q|_o7dIP9>y9$e13tvrm`JE3_kG%uvFdIVNRn$j01Mf|aEVr`x8Jx$XI(b0|1< z-^ZkoG!R)>Ny~U(_ZQkJPU>?CzRVeH+{eGKA~YY!$c0kK*oBBkiEoV5KhLQsiwlgM z{PcpS{&Yf37=e`(l<=|A=(pzMP*{K$d3IZ1Ke9@;Zg8C(qd z=X-Qzj5!r4t?!D&t^wVeW4wR@eAo|Z8hA2t@%P_z=u%-}FK{I^F$oy1=*k*EV`QDijP@? zB!O8u;0csFDk^mu-zS6J_ctRqkn$OwG5TQDzFqQcOg1sDwOXHVRS%$Ooj~L}74Q7z zp_XsE4A3DErvVO;Ua-k5=OL}@W~}|PBvKPC;quL7%ORD+p0i|kO%eVdWlS#kM>*G_ z%dQiu8HdiiAIcGVnt$M;l6iu8^}VJRm3nk%m3G!VbV_858kJY`Y|V77mPUN=t!Q6; zp>$fLvl?$ol5H`JZ&qn=-O^{y;ws`ASJamiCY8wDqVue_5u&VIb#3OI*&Lg(-ntL` zT1$I(XBIgo8J&2o&cI9R&XyQD75nkF4Vyz|Ww!?((lsE?9J7DbIW7-)Z3GBR2s+u5 zR^C2pbG@x^)PGJWKL66(*@yBZYZUza4=?KPFAW0M#jj3tujMfn;5J))!~-r{#9%L8 z+{ktNIv0Ce>ow^Tow-(Y;Fm~B{kM<4U)NTcT>OcO zLOyR)u%P7O@o!pLFw+r~T%0UiZmsZ{U+{IT%^O^0Zp^Cc*gazBS~EXQ-U_i%nO)#5 z&&!L}5Y|ZuUPjS73ahY>MQZG9y5TjhgjsnZ=-dlg*H~Hb@6{jtx!2Yhz^cv-b4+RL z*t_ZaRb8;Iq|tOWhGCH4I?3He^j9MnYQm@0>KSK(P-RJ-o%3XZUIKHSnQ4J2loB%= zQqCYQ33@n(h^)<34s$2!!6l8fm4W-VU5DY+UUu2bxcWO#^oW^!i}t&xJC^b&JAF&U zgK^+GgSphgFVB-Te5@5pu_Ae-M_>ZAM#V#lqMq;5+ z03i(!3}KW(GK{KnPbN z;RhnA%v=x(xmRxpYCq8P00_DQHZNd<02>*wgHgy|dVj`!`XCK6wM0t&6QzSXidObx z#F3Gst`@GL7-z`t2r3jQk0`!I5^ZUCWv0{ljn(NZXmT_TACI7#5+ zjpG8u4TuL2FCacZ{D1_GAuSIr^MkyI*50pk_qSEi_Ewr@YTt0}-_$GA{cHp~d@Om} zkRK@8B3Mun_^+srr7^q)qNv}-cbiwrrl9T-riAmQK=*B>=6d0;U-rezQq8ecx?j4#l)H`Awd~6R zW0#}P!$R!c+~-mJ>_Oi&aQ{;izCmMFB3O-Z{@OLO=6F@R({fdRC@D+bx|;s{AjR)B z+V{us1UtB;1t!@CJkS5wl_q_gaS>O)^(EfK442dKu;6MZpueQOW8*?Oq={SXmNj^P zL2vaItuaj?4dWJzit82SGvTDPHp2joP%fD|6e@iUd`xku+RyGC7jro!-JOshBSWhF zr9v9aEpgNcWGlvML`qmilB6#zEPrZbVtz!Z1&W!KqmuEMBoZm%mVF9c&?J|d8=w)+ zrQUa}e=JLlBGdfiu(=WHEyHzG;TE9a(SNMlKv|VdZ6Ynyb zgbw#LXrU$v-SWPODAh%9!wyk~!qzMg!5OrAakOALw+3YOJOrr6n|D2SJlfpla`#x-s!s4$v~H~3_3z!QXg@!lDMQ~T#E+TH zW**cb=l^Znb=Z#8);?K{H{0_#Xew>UTJ%y9o=H@EV+)-~)3H~Gv`SKcL+ z^G?%0b0(SM=iFM!UL6`*4gCYMu_kyt-ocVmzFssFRFfa5TaAz`HgnJGQ*H!#Oe&u( z-!JT)2QCErqd;Rb3-N%Ve`5=F5X&D##C+n zM>9YrXTBz{qM_(?N$TrMzAa68Qr194GjJ<0-U&~c>J!EEP?!6$lY@XP*NX-6=xC;+ zMmAx--ls20c_N`;yNyE?II>sg5=O3A(M0HAS3{cA5=RhZQMCh`FjR=kz_#pPjG(W5++P@VR{l;)uTzv! zV0ci~gqI>AT%He_zCnZ6Z@Q0aDEvOfQ|Zposd;*~ll{!!D`v4DIbNu8W#B7zC}@y5ne!k|0qbD>S*aMzkive21~f?-_9YcKXjU0Raz-Y9W9T%Yu9dh0$hBOL~b565+aK8CUH{ z&bkobPXdSM&TB&Ogf6|*yo+=CuitDg1*d1AqY@5}lM|>50Sk^BgIQ|Cmzuf%8zq*w zXbuYQq#zfks0lVN%agYvn&J{B>#@Ij;ld>`%7k{I}a>`ca@e(VY3DW zw7CPOr));Mox_^20-|Uf7rFyHS17<4^Y*=&8Ru0y$^s`u3CAlrhR&f-KbV@+Gcf;O zJKBW+G@U<@cw`5R;j`2~TY3iG{S7beXJhIz*94a!4IupND>(+}2a8+5w~v08khgZ< z+C~2^DzLrRfE)ET#BVw&;)f{6q2Aedlgd?8OH0PFO#*x=k9aB|_@v6O+Zf)5@^ZI? z`GUNCen;rNurTTIa%>@*K6)6|m^iy`J$$_Avw!OeKYaLzmCTsoI)bIKp1^3k%4m?~ zr8;q`X^GC&s>6`NGf!3TSfJ}M3@;8gEY&rwtwbkaR%O7%t7@Ms_@&FoCqj*aPL+K3 z^}CFkT~+dgh75IluK}a;_#~BgYrejnKV071VbeW)nH>(sA;Dr|Uz$uoa??>FFQ#hg zogjtAeOIokb*EOPzEAd!zDd*>cq?xExNtbD=mD2~;xWnEPS0$Phe^GP|4=V)0yq7F z-7Uq)t10w?;5KT9wZ8NMFYWJB1F)Z@&0Y1Q&&>TlKJ$KEhGZK74GFkbZ-vUX^DO1d zC!`iXC+fWJ!UCt>rc3Nd^G<3Vm~19oMa^7J%ewh5QC_0E|m^y7jLfI1*5;9ofiKfC)gD&46jpiC4h7+hWPV3zk7}2 z5U*-yvXD@A6g^gETHS@>R7zrG*oy_bP3JMnGa0X)9y7PS;?{VMoYsu?Mu05{PoN7W zX`#>e_k=@pWY$JtZ?>Yc!d@ChL8vhXp@eUL`0Ag?$#cd>QSZYVWkTScSD*xqzg}PqwHD!L)C*F*NmEZ!!~Fj&w3RPQ%7QC zb5J{TjiSdcXT#uv6Dpb`QhDu;uIiXnhEIxSXgo3+tK*H4F{Ofl+}Hl$;Lw7dKtkJo z?%I$FA!E9UNb&N|sLuZZj1#fqUk8ACKbPc!s-qo+IcuTiw>)O~Uz(AYxP3NqvqWwt zX#=o5(5$1(@+QBp{*AfGO&*c(G1Y-)Sp$E=nm?F$!BBr7E&EVtvG)y>GXG(gO#uOU ztn{ZPZZXirNx+^{-r+qzTB+%j|K;y2P&9u#4H<1~E9{^?v)GYy5w%+#bh9 z>yWL!x|p5H*D9^`nAMn`?%0?S;wIt(Q9Dtq;I$xh)^4FpC(nl_H2;jS(SfB2E;u6O{>F3z|c1L8PVtlJWqM0Ra2}5C{Mm z;s5|o#xAZP8U|0M1`H$cWUfFFK71`m6%$C&11TLK)eNLufz%CDuiBZJ?2G*E49H7YNl+GPkRX#UlI{@EVYfRciJTGB)D_FH=cPC!QE}Lv2aH})A zv_VReeSP^RGWm#+WG6HI5E$6TK?>CQ! zxdayNxFr@zXHL35I>xnjGASD4Jnc)#FXvnmcno^AUOX6eQuSyv&6?hR&^Ec{*S)R9 zMZp7qkkfWLEf*G#&&$M`5G1aitCg|2x~krfZkMamPpPlhb|25al#Vx9{yaI{+%(F3 z518ZFDPCZ3JFhD1*2~rmk-xkWWWilWIl#z@QIo>7xW_y!9(D0+4e4rtWqD=YKx;mN zK1bBO3wZd#a1%rJhJ18_lW)`t3O@3&ksW=>WwH6^nm|1%1}d< z{YWH$eJ=ilwGw~DSkJj_ppP<(=2^89UC56B5%IJN(&qB%n|t=Kl0zO=gKw>@!sP3Mv4<&SC}6tAQL6|-vH6p70)lar zUIAeY1Gkym?7n0pQzkVm$Sh^Z$R+1%eG5U3Up9BGy<*42Dw6r!$0KH6(%6JtCHzL) zbu!)f_N4VDvu(xxRu;=OU6l+TxEy?u)PGN@VCh;UTXyMhBBYvkDo$RM|czbq6o2S`34`YZ)qE zlgF?~2U{4@IWZRX$E6x{vv#5#yzeAU)q{>0F~d-?-j#%2di(BMWI|@!X9+`={q7_` z9wYbPw;o0Ram^Fde9`f*(Yb}h$`)`&|Rfy!vVg_?eefDY$uX3*Z^8zWvBT37xuiY# z{l=o1jIJfyRG^DvIuzA^N;9lob*g5HI>+tF8D>s}fzuSQLUp=|BB zx_&1S@Lg^eeG=M@{y$-UIC}+vnolxfZ*!Ij*vTJ+DTW~q+GWnLHNxbvHNynahM6uJVs`@miBKI7+ z>}G^Ur{K!i+E^w_+eJ>>({Z zm!N^7ZOMA_tLxlxL*EJe$gC0k>&gWI$NIL4;Ka#k$H0NcLBM*~kT(aUEWA8``_?MJ zTTk_tv7`PkS`&fmdGeeWZR_$0BhwtrfR#?~byabs#H6cc5KM7o1%j=-b|?Mk;0C4i z1zeIps!#pPGdH=?DI&B;|n|sG{tUjo*4@*%|&&$O|ja^c+Z)7i!j#fogI5; z0;?4Fnr(~A&T?oai}h*>I?<)ddnx=@moO~_0{n6F#ZJe{F2CI8I$NBrnD^1Ftz$Iy<;z6e`f|_wVKH?o(UP%nTs8@m&$t>Q{O`g^-#ZRh9r-ad_5sv@b+WWzR&vHiSPzIR_J_rKatKEc+3AisHENH6&O z)Cw3cw$3{G$mu*N1-7lE`lxjgLp}bK5u-F!d+ggfwrC$bK~WF#3@s`!euD0F*8kf7kF2 zC5<*!LarxNlvEG(8FmI1D3Ah$%>N1>XB52x6zDBsXiD<&iu2>#{>I6?{8#uA=i*D`YvRoc3rguJNym>`NeEf8}Wi()=-@{Y&BeN6oi%cGyhqG6#loet-c3 z4DN3Q`r~gr4f*0iGo&t7jA0l}<+4{WZ8Bl_0e5f>A~RHX5mH4{CBDeS!p0pUMM-Q3 z7$!3ngERyA20)9T0M2}qh;p-|skkgC>Dj3qdIo$HYdoF?&flcQk)lNpI@=+dr@%FI zhyqb$;5_+(i+TkL@1jMG0e{I)iGa2L0C5mYEk&-VZ0i>?TT@eCF@Qz|Xla0^od)9n z70`f!JW${T3h)0F(1C&yP>`L@A?N?oPb4oMoaBNbwGxIze@q+0h6&hOfNlL>1Mwm^ z7;QGxQwCtYj{h!R1`_`9OyY9k0S=2myNkz9UjS-3UIVg1&j(IThJan(qoX00ix6*E zFN;M;9SdaA=O*VU;@pXjf)EKmz7gUvT`!V;5R9QvY#X~-o3bbCN-xgSmydyHnWap3 zWxOKW5MJa#6>13@v?%hp$VgN-vEENe8$B~>IB;WI^c!Wbh~nVxw{|d0p>_&iUR@C! zFA8PtUp$maGiWfP1~aO`Aq;i&;XCA+Xm7QkQ6Gp_1%Ez|8F%HxL@H~#<9B{XPS0mg ziA$PsP&L-f6Mh^^x{#g0ZMv4RoGDh&u6|ren40D7=|O5_4)$N?DSr;KAE(Fc$XK}f z#YbI1QGsC1Uz^n;-*v}k-PD}WNDoucVC}$P;9KEogo3d?P5|$hzMyoHHgherQLV7q zWp>eiIbO|RfWhjnmzlP3o@Y+IzWL5wKG+v~kkz|3>(x)ESmUalgt@#%SQX|!TXuUz})P@Rjn<*S`9(f7*nTWFzHqSI> zd$mv1O3rqvl3mYAE7%q0BTLPBM!O{5spQG{+|rZI{Ik4~cB})H_nj z%RL(UR_FtZLkEvqwWQ4u1#ZwJXR#3KxB-;~^Pxj{Ru}V^6wKWwc*E5Y^DG)q!d1qFLI#RPf$NYKE_L$1`Lz-{M$pZ zG`tDnV)7IN0roe*{z4HQ@(Hj5he<`YpHNExTa(FMjDZLxB32qEUQ+DWFllY~9?CXg zxBxjmAV(+%Lit7!E%gPke-D$g^z5M!8{LE?0Jhv$3KGhgFYtAeWPgT9a{)&SaH;`E z8wfH03t~}3fB70ONftCrO47TBk_y;UfNcfX9Dt4I`1=bISsdU@-m+i0=}pv0_dpb587;4~2CEx`wecBGjm|QEL1~S} zt&G{_zqKIW1ywVHa2FFu*=Qs*dlz$Oq~9tRcD*7uI!hmyu2-Z}zP9)7(D`G6$sg7I zrRnXFYgY0|G`uz{%Eu?_|LwcgQ4(=%pBa7vkL!#aZwL<{|B9j(y5->q!e+E(_enIE6{Qh@K8XEeuA}&6a$IOR-!#v?Yz(EI`>aE&KHbaMH zcU(3qpP7Hc+wm~bDacnK{ad1sGeRQ3G@i=Gt=k%;`k(#Z91A|1?cevOeBq!TN<=nv z0n6%~D`{w?KZ>|6{~@9PLI|J5_QBE96Zf|eZiuFc=r0r%5lbOsI7CDl$JA%l_Ji)w z(XrVN{bmP6;qL>`7|E$c-C;oGCs47R+F`k6b8t-c#BFcRz(X|Dorr{Cj)7reIFp*OYoY#PjBO(JNCiL)b`Ea7))tvJ_w%`+4yUgtGg&>Fr_M5hyPloZPw>ik13<7J_jw7)=YFLCV1PM;% z-!J+Hqmtyg{iuDfLla)9Dc$|+p61?I4la{iZynlwCg0xg2tY-veQEwux+m9p8VI+k z`m0#k^t!7)mD6$khjxF8-c+w6`vnO~Qk}rtfBUhN;R|)&D#v$ZSu68(74VK6&7L~Z z4|f(o8oU+KEe~nKyRH3p;1&a=XuJ`tTgDtg@3nG{1)nmH{K0cwj@xs*Zjk!~8}lar zy!CYlKflU_y>0tbH@xgfFSi<|ti76oBTl-tV^nhnbtV4<{lxN#jZEcK2B-yEZ``vq z`+kVEGl1LRf-Tyo%Z9Tp^SzH0l0$y*akbC`Jr&24fyZOZ_!!xu2QtrFkoLXRfDs`R zd3F*$%vOE48uR@5cro>d|L!;QD6kJzf6EO|!IssMixf)caW%=xOpbVcL2vjE*>dcTwAU71LB`QWP3kMCj|wDG@4yOu49Di3ycDO z$cjP(HP~EwL=-U{T#ArJHq~#omd!*fGciyz)xrq{IneprzX0qDfS~{wIe;kuSU!Mh z60gimiPRTt{{eun0DuDk>;PZ~0DAziL_9JxC7MvM4VzCK0<8ZHU`PN)QkO3=h>Rg? zq!*PVB8Xyh12vHflpDb(hvQjn9E66bf$xA@TkXqcqa>4;gOI-+{y>9hG;O{fwaAyi zoO2vgEOs3u(H{}SPGhbmSigQ$ap;6|=YK}eCcwjE=V6oWr-cPM9;aM@d^VqijLbY; zlJIMPeqB9{&`RjZ6YQVlUzzYcbr(f~2sbwPGe0Rx5{JKb`8$B!4GbVc(${{Vojc@1 zK?2B1A=c#+@I%my5OV19X5ASxxJmi?B$&LrneQfLQ$K8=2iDaC$xNRm}8v_)Yxo=*+r{*-VU1 z(RcGC+!Mv;ZAJz!RulT!t*FBp*H{E%k+8Gz=_={ohfC;I1-@%_nsn9f>@JMRnEa0A z8_qBH8ni1{*TJ>k(ZI8=aE)-?)=RvnSYFzS|B;X0X>LQevb^N6ud_RwO=dP5TQ+jA zo0&jQ4VzF=2Exwn=u(i*)KY)uvkbcFA9@{0&XlAwXwduNV|!6(Q})_Y+i^>5@>=6F zz)4e_7+2^mtO_>aDBjYv2Rtxv59!>3;^jFGQ%gyXp|^ zR4qcjYZ(9f(tU}4ld~|IAd0LTN$R?x>~}7&*G%Q5!uv=?+0|;Bq3x}IS6Fz?(ix4+ zs(>u@-S3A+?-R2K=fo2_(*%cRWd`kONq!f)QU2z5)tifdh!c2;e7|S-;s0LBSXg8J zzn3zcBDOPlC0cNC?EZ4jw^s#js1^j~a+2!-n-B&CrqgBgNWE0@l5iiJ?3MNAZDkZD zCuA5-jmh0RTV+7Ar!raDBh=4(0WzKSuwm(S4RQXiIC($U^U*WBCaDs1RjK**5{Ui3 z4^qu=%v14*aD;hIa_XP>j8p0F2kQ5<%QGIg>M*KV)##rDCwd+0RGt3s<&Af+Ny}O- zhST#T9Vic_K#V~bVAhZgNyOt1To1*VU<4X%%Peyv41 zM(ekRa2*4R8uw||_Qwrw_*2%3yoHNewF-|iOXcYllT-4Cr7|1*Q6mD^)@GY9>84$7 zvu2Q+y8RE?WZYX|u4P@fG&=ztYS?x8xl=p1q*H>+@zr`7^{ zjE?KyL@m;Fx?T>dS}m3h&R$YizAajn1YSQ9FZZTKE%qH2C-<-TEuC%&F^A~YeMp+t z?o&5TDyW@K&2H~F935+2M?5-gix%(Qp!4n@0-*2z|93GP`0k@P<-HDf#`h+I!%y_= zJcjfZA?5D8M-=XyVMF{2`U&1qZ48fTjgpP`JXDfV+*OZu^}-jkCdnYRYiRD)Zuk!S z4OcHYPrYdA?3~9o!m7c-$f?)th$&dQi$dfz4l&T3HZ;GTlDDTrS6D00v;k@;*@)=4=h-7D^$*0XOv?`3FD{HB;c{cf7jCns8qS%;9< zI7i^3!tuqRYNr|Og;kXWrgT2=yBXJQfvodSc0q17+iuCfX-&cp<5W<}al29fyp7-U zdGqxkS{P{QU>vRRfc(LT^5KZ` z(TMW#i1JA;Xktq?1}#n=p_qI%8*3;la!e1)VO!}{Vs7&&E|02HsAZh@O z{DW@(L4gMJU;ffK+C?JTsAy0=bv*Ewh(VW##AFUWB^(+MWu=2rlDcx+? z+|fRgm$^Wr4gb(%RD89eX#TULP!A2!Bnw(@oU#i>sC%R|2#ZL*$TB0q`a_g54k#!E zw;_PV3|Pv51;GL=ZNM4^tW6TEeWF%Xx#HVUK&kFqoFy8fw%6Bq=U48i1UZH@@b)zQ z0}ua!!*4+XHP$u}RP}lBZ5WbZj|5TwWdt{p>r0eJ`caI2F8owR;2`*@u930KaT?JU z+X9#`a&-p!&lBR!8L|F2_B=xr^nC65V6ja$*zfW!-kgQ5PeUfAL?W^*no|FnV8?6M zXv=MZp?*km{1!W1pMgwFYc!?bGr`B#u1~DC1%~?}sqtG}bbS`jdn{#QMkFG~qAByA z2`Y2DmRoImkMwh8#BcGv3qO+?%`(u0nGMyi~%r##0(F?K#*;h3wCKlp9`?Ha6c!J|jT6D6gHp>}i$xy=ku4x|)!<1AN47nsgT7!{dJ5b7g25Ybk82dO z^Y03itakFlo+`KqbnSfZFi5Q9;}5ua*ygl&+vDZ%&1nt|Jh+SLAZYcY*vEyd&X?uz zoiXq>?Ep5*=RG!B9Spy{ixP2!o#xwqEg*%U-8$X|rd>kbR_yhM+50dr)VCe!t-|N@ zsI>w_TlNCe+2;2Klx~XKR|AY1nbwB)u1~?GVz3nWYSsm;)IhvdBemUjHPXKq!Sx5~ zceU3Imtq}C9DL5ZHw&G7jtrv=XZ?K&u~r$a4}2>&A{lejHV# zv9kJs`9uv;m)UuSp6;{xmtUu`eJHXpAa2|&f4x<$$KqO~>0{wlub?4ay0~Omp;ghY)&7`L>UScaAbV$=NxdgFV<(rN z$=hLDF!K;tsAs>gT5?TVoaIsUa{I!AC+bgwxV>7!!=B=iQmvb z78N_`Dx`tN%CpXxZ(;Ru-b!C;M!Ot-PX0Z}176GBBdYgCvjh1_e1txM1@&cw<81HK z%vFKyxx`xWk->Kt2t>qDvJ@{iB0P35Hxb6(ESm;lzD;P9Goh9Q!%@8xjfMPcMUcP<0XZv1$+|Nt%KtNo8iq zp6@?d7PrNT`-`TFV2cX^R2GN@qWws6K|mhu1@?0)NfHUYke9)KF#v%85EuYK>5R5O z1(5QC3pAq3-%Cug0Z?&|J1rD9M>_ks}$oT_LeW-yMliYU>7ssSASPH?%YF3SH%5dM+Rx1B7l|P)F%l7yzTF^ANRiyX}x==iP_s9na52*GuAc zD(!A<*|3v7LiU>uBePENlAN74krqL>=XfQP(Nmr&^2WuxWRFQJ??oL4Y@w^^L-4}s zyx14gBq7D1>!*!x|98l(J4XO&9Y^bslbBx@wYyhn`$>bxwQrw?A1n!4fy39cMA6YcyJ%w-vNGA2 zAzDz<$-8||wbl+2c{DlhA!0FdbKCz);|9ho-Hs-OmoGPfz>D1sw&+1*-=W7l;u{tlqrk|_F z(>8uLm^AZ#4#&2q&v*w{?PGeJ?5{d_UDfgIPZ#Mb0?f#D^RYY|Wx}*-WQ^@>zPfnT z_a14LLz>5Q%v1KNuf^x1J}7d zqziJ#K0hZjif?*SCFlP|n_jt_ji73SIBm&B?P=$o3}s*7 z^K1akN$=d&p&G>t;8n@B_2Ira0T_G(UmiKQ02w2q05#8bna0>!DOj7{5SSyd0AOE% zWj#&Wy1a#zljga$VOQz$uM^7)uM?L=t@V+AisahI5TUijjQfjX6wd?X-GTip^A{Ko z8W;ns1NIjf4>}m58$lukNW~8E+GuT`w$biCjjO`|VT>g6N%~vNNd|A}Nk(8yz?gxt z0Amf|oMZ$3B`|hi9Kbl$xWsITiMK317Fgz)y}YKdPyazyg;3KF5edPy_)18;<=tjg z0USo_4>B+0niE8Ms46BS3_1Jh-l{bfN&-n_R)gP!layL zh&+=TK4Zl{fM7L_hwy=Z1tq~VkKo-sCOwWcL#jKGoL*$)t9=&MUj-#UFcK^CXx}aV z1K0on0l+8#^r9lq=v_vTW(_n$%zyJ@>-Ygp7-|AqKEV-3rEZsAw8E=>t~j1&prj=z znE}E9GSaC$|6+JhT??R)(w zKE4L88iv1x&519;#f!IS2n2av2#NXssTCwX%Gn7LQ<;Xt9{g*^;1|mmo#IVj)OM$; zJL4f=%@%r(ljlxO%bb1AMzglVT0M&u&Y2PnzvybX>HJDG6uvFk)wE;WvzzZt&}U7} zWP)2MW}jBl?**t1C!E#>3^*}4>s)YlIIvdlR2Qz>durq^P_TzWEBFQo@rp<=Gpr;> zcjIj&N42)IEUpb0DjDf(=UD5j%wM}Uyx+ChcF-82r?b{LWuUXB*t3%yC2^k%$*lM* zufbdUP4D>Hy69lesj!76%le|}?tSSVseXbzrO`J%x5wpeJqjbm3fSP#fQQvzm8*jl zriB8f8gG6~`D;jzo<9X8-<__W&ch`?FDlVH2+FQlszWlC{;=?6`lrCsG`*X|%OYQ4 ztfJYVfRvJ-n`|XG(5hFwzK+JFg)shsuh0s4$IaH4f&xeF(?vb23b^M+j+o7rjA0~c zR+#Mx-9(aIiYJ=iN6cDU*rIuaJGW+N@^6Z1U%50oPo-Aor2N&1b)0O<2~`SIPXpZ?0+x}&I$=0$e}A5N3k?JD||1^QRiu1gBG$w@Pd?2#2+dxyS%~@ z&}EX}QGN4VA+m1H67oKOlUPcrLUpl-rqNc%;9=8=_wFr~n?_jZ*Vi~pkcHRFelizl z3~n#!BBSM~xpZrz+ZNeyV@;5s%dXce_r_$5T=
JhNxWz(jR;|ydeCa;C2AnpExwACw9*3D&&pc zh0xwV7p28|A!~HuVV9eov!)<{50QLtMa|9UQ>G(?h^?xZ!n{K5Xn_8JRMOSoe4%BWSAou`+ z?tvhvC!S1=-%g_#8@?+S8-5^>Cp<}?CjutXE*IPJ%f6^nE|z42cPLmc)~MS)89AQ% zs?RLhK#lW*hld~qc4?g;W?an$i4U~SLd z_)4jN`WwEDvX3v11@eeN9=lwuoRO*mQ^XsVEDJQ#%mTDR_XLUpC3->H0QK|qLUg%{ z&+0<*PbZlYUKFU%OUvcR#l(qK5NMB?=t|y8O@{o9*v8x^lqUiN7C?9p2$fH@NhBc7htkXBc!MQSqKJz^UlsL%s~{CIK! z!9KwNLmWL}k= zg0o>1vv!t64lG?meZ^XA@aT%89t*e!oJK@|;{_jw5XXxPT?4G!v4^1Vr-YAdbNqfV z6|H;t@LqLl1_TEVZ(ylg7(THscazOoHE;OB3zmkW1N zo?_?4cPLfWj}>*Y_#C97u6sUjPVi`)?!Il&h+4NG`N=K4i11V8uj7MMwHK;C%0#fS;e= zjGn+nQXW(&;T+v)Ph$L%``~LkL5x9guz}EV@C0tL^UyiW=w{IC$uJ7vEjW6Y-C7c9 zI*(01meFavB)CB56O@!Q1B(wF{QuSY{&!AyMy?R>b$%fS)%NBFzCy%ziRJ?9VD(a0 zzjpTyb%iv_N8dF2K$(RphAUU7BH503XN9(o$IuUf)22%LSxwgVHtDKm$anQZI9GEi zoVZZ$a>Vs&6H0VL?q+d=p7KP6U+G$;e(r}$N27t;A#48ZM~LsS;3>Js?7Vvwr2ENK ztCIU^suxjC?t725ag?5_0H>9;_I!f%#7gvCF1B6QW{b<_*9zk+ZmuhiH{X2wy!x*a z6`$eou_u8`oBC{cE|nHu--b+h{~~CY&!~|3FiqjB!P$*rDkDzW5rsJO$zhj@z4x6t&qVK z-ANL^(0x#umxzY;zQIQ6^4+1ULJB(ib|p3q^NMmsVynsrX3>VxI6s+y+`sAEk`47j zmQu>TDyN3RGk*xV4PsdZ*1~a*^JIi^L+-oW#sc8!-DfTId!3C7Hc)0K-rQ!(5Rl07f{2cFKd0Fztmo2AC zbG-!Yhp|sNaj>By?o(~jpbsY|hG(OqV}fIDt2Y4&{&s)lhw`FL`!23F+z1Nroy5=2 z>*Q>=BSp9b8$K3{^7N>gS{8iU#@YNcXk9RU67_rfVNymfL{Vi(&@#I0;)@uETLzby zSLCDAJ=WI1(Tn?#2u^YpOR0TA^k_cD5&QnV(1gY#!_u6@ElA+#^thZ;P2?3JbxL6s zZE185>xOwQt7zrNt6F39+auJRfQ_k`xicJL|L)aX6WiXa1Ydif?}lTaY5io1V28Ym z6L>yv=&@Ztd;WbJ_;leDWv(b)98=vWu4||nr9O_?=(w_dKZTwwdh^?Dq>HeA)a|G7 z*!^_0#BqvsxHuOPa;V!x@mT$A0MI9T(HU+a5rf|xEJEjs*enAQzJdfwkRVJ;8s`Ug zZNLEmmjDpx)e)Qf562s(cz5Zm-U`1R3u_)Y9PcQyA67F8Ye=Z%()OWReA&{Qx#MAD z{)gNxWiGc}YAjZ8_;6f(pWH1C=yHR+H*=|TQmoG#StJ85h3d9wtKPPEA`LszkI=3m!Yng8LOo(^YB{sGDaQa=uT*=5z)=;o`o3f+GsX@#J)lIl`oAOsc zV?6(lHvju@x`?bzxdQdn04@+p4$j)Kl9>1t^`5pw$IPbzRs02d+)tor>xTO`wRlBAi&b*_UuFKTnWwwm_ksS zGJ7Nl;Hq<>yd-)&G5LcXgl`HT)2lyZn`!%;(z)>v28A2Bx-;ydY7K@Kd2 z<2}K{yx)0XdO6+--*dW#hu*vhAr`6pgfm`eU;8l0t@Q_c_THw9utlF{P;FP13c2D5 zB)7Tf1U5@vR^HsuR$d9aFTWdDfKGeZ4>D7zE9g>eDW_%lX4dh^4y!ky1lk5%k7IF{ zE#4gLbr`ee7_XKFy0|vic*y8xHA@kj?yc|rcJ^h!Ub_~2d6ICAxfbd~QXBft)N7~l z_e#-=qk*L#ryip0M?<>hr$NGoLj#$WG^x3^R=x>=uWaKyZOlkvAKcN8`-2f+HlKYL zA^oG9=g>uLrU!~BrobO;nL<_(#a_omqYeu%9)tW45I5lP;{XN({n+PNo%X;g>9D%Z z$vx@72qWpKk2!*gIf9Kjf{*!8C23tcZWYaV#M?&{}lPL3$ighT*Sw?>3FNWmtTUuFH_2yf z)cgXra+B+=dGL@gXe$Na5ykf$&aoL}dbvLQsV_+fu50~LM_+eMC_nwDj-kqOn%0~9 zqB)Jt#(+`9)`G-MuUz3mhYdv+ubmmNx30N~$+;CWR~Q9#)p`b93se~!$aJJkJ^$oX z6w8atSaP2m0gLXQ5WS*9)+3Xp2>H`TOEq~8^$llw$WUYo#2`_*zm;e5(j-x#8cI~I zYNO9yFjKHLUS`kMcbTb{sURL38|Dg3tuyM3J9^E5>#qTaKq-1v9E%Q8(v`X5wBZJ( z;V#Y7MCYz2+>=R!Z-199U{2PTGjnm$sSg@2suJz;#zl(iYqeP$JlkJYySmzRd6bEu z?(iLGD8!+;t20s%viwHUIT;Y@_@!!RdMK4Gx~8=;V#k1!d}prwDO+2+0fDiPTDid6 zloJZ^#j*extgdPe#bN9*F$capq@xs!Ny4Nhr+67TF)3mcW8JBYnj15J9BL;A(U%BQ zsmg!5v1%bvRyVbrQNHEIfymW#ed&|JZx*LUXptnVxyt?xtyW1HXgYd`mR zkXT)gok#b0k%T4&*n?Gx5DS|~P0-1t*iKOA5z*A5lt1YH-1DdPoxtP>Ht|>9^1b&7 zw*WrF{cCvd9*^>jE^nq8krodAh7CW*cS>mqA+cf?3`4rj33Y4<(bXgh`MHq5Tdvp{ zVyQQJc3d&Qztyut4431lR*g@eYACOKE|l<&71HvFSzhraik9|McH5HBfdi~OM)vv- z0RAJd$d2&FwgmM%GkZM(ePJOJM${1YEZx37ZQGajnke=-GTvUwX2->umj(hn`!~h@VCE2hj$GJUSuyUkwn(Gm0oz{ghRz1GR@htl4u=m{DRRKvuOeA1 zqr1Ydw#L|3UY$OMe}y%0uJByUjzO0cNcp%*<+}ou0YTI8eU(Bn;4p|MXXgGav3@J;n?b3RxJAI_3oINVS)jEqK&cStbn* z4xJ5*1|3=?zq)}QnN4*Lw^$#|9{J6j=0~zIQ8FCq=PN9Be7Nl&%2SX(6;z1g&i4-` zrR%$?L;m}QwB@Z*Jzw&*n0x)vDiaFN0&8iY4^_`TMK6J(}hEEjkM8pJY27BVNPkX~=Tm89S{<*t7OMr67 zXa+&$enW<%Mk*hma+mDAJtTfK;*+jDEz?@|tu(fxZg1=|?mo&o3?ynSDesf4RYKRN zug_im!-}Xhggu)$JGMe0q;$yXlo87dr)KVg=W>5bNl1lr9yjDt<|s=Hz21k%rj3i# zFNN*owcNyhn77V}I+&~8eHfp|}U;LBR)+2GsaTJ$3RA$2nnfz6^m862$CBeF~~d}3p#YPuIS zdYZq|Vq$*#YUdL)-U%lt>`F1PvmKLp4ioeGE3X{k8=nsOii|M%ZAT85Z;P#2mvMhB zSIgC})kw%0B{%fZ^|ZUWEuU`Vq_o9P%%5~!b&D>0EnsJ`iyd>`OqHK4r%k0|1(j7w zou4|_l-d6gfhQmCi%M`ZUCciL$?a*+Q?e=P|NyeJdDzY2(oJU9Pw0Ik_~ z(fYYn^OyL~_yC~*M^9&u-5O450RNBcmG6^Yhf#B=$dmAQazXa(a;D35m0MP7R+O&k zR4>qyEM>#ZwbNKs{9{sIo3K{UAQ}{2(FIp*V<6rt#5@g#fCB+V<)^{*+bD?MihVB; zcL^ZK%RS-qinvdMJGT)K$=>0ANF9wwddbLy(Pi%X3E|zp1ItBvg!y+FCCVWa%2#vM}2=hu)XkE^(!3a-^ZIRanq`O?xg%Gl*rV(Fd zWj;rkV?k<2q(fO37L-Kj{dXXu#Ds8YD}?B+GKJaYpRSB>GUbj`ol`^@4Cl!TCYR8_ z{DI7q6s#wKe}H%_8l0{wl^eV$frUDauFm{jCQcI3;}a3q58?ttA^>!SfbIaU z7V{?1B|9<+&C?)aBch@zSA8;Q7W!-LCzf>BOD*O_>sm=Q&M4x@mW6WFZ~r1p8Dlt~ z7r)S`t})+y?~Mnmhyknc__qo=unKAxBGnt6Tpf}gtR6ogZzThfJpc5WI8sfmT=fS4 zvd7Fq^NInm4di9O3@7NKh_U?m*oc1WmaC#zf_j3Wo`U<=`a~d{Q?9C+mlNjk?HlHg zLZaO41W82RB!HKdtNK%d8swl8RTI3kx?0R=YTYqc$;4P!6ril3MX03UFRY%_sB+ah zVQ`>WQm!iEA3_{Ct*OPVKy4aI%a=qPDRf@0DpgP;sYaVk9BBt?rV~fL@(nBptJTc4 zFJA%6f?0)b-g&P+sfn$GBx{8Dj$-w6##T$38d!vGy0rA(k77yR(3Pvcl(P(_J;Pul zO4QKJ{X;T_)w2!?ZF_4m3l-H!x)_VF5t(z=NKR}63XG#h@&%Ye9;%_aa~D=99heCg zmPYDq}KD(Ad*wImlC z=+&@;7IUu!MAFm(bdw(d#kIP*Hp!qnNT53q(D-+r;E96|022VrxwIrt!B4Ad2j zB*q%mK62E_UH9*y4*PH5@!udjj5zXXOYf@e8J{?OWw(#?F=KuArz`&{0i<)_zAAN_ zi%w%J;bfR3ZfyyU?>lv0wSIPHQuVm?;u3xrx(6TI^|v%KhFcE%squThn_0b`!-S2n zFoa5XcZ`#UIZY@372@c4?*nPzxpi}~T}8`1YpFauZ06XD!LO1SN=})0H*9>sL=;a66%zCAaIi}b+KD+nuKf!p_v7dw}RTk1T?`a7PV zUYoN&(qH3qv%1lWMSnR_$hc*^io;T*^t@fw}ema(pL-OEUmfJn4k2GKu1xzz?YsB>63aG$uezKWV+>3&2Ou-j9=o_dZsvY}+s6AoO7m2sb?oDPptMSq zx3S{TLRDi`*k-_0fw#IYs3QDAryvvc=tE@G=#|n3nV!uv-fe? z^n?LEO^iXNX%gXpmRp}!+9UK!dmUPvsctbcTQpS3^#{_Flcan`<0Os~5`=cxN| zAs3Z7GXFb|B6ElJFDzs3(KySmuF_(j(N00EQHsn}qTcn*kC1N!@9PwF{R=)N_cj?u zbHczEd1ZynAA&_qHfN435&VRjEM+n+CTlV^2J(?Y5(6*Vo`?*gay&I=rxx+K1lAPf zGb&o}5NeS*1@;q!eOnBx)R;nycYU+9i0kr!H_wk=QEBiNnQJT{JbfO%uR}`#M! zjUY{NDOw0!CwI9!po5uzdrf8DEghm+WbOny?hZN*6`F$3f88ge;GmuUf6xF_Z3-VB z0zDlw7%5paU=2g{E*3sBfQsc08d2q2noJ5>MUp5+nxH~xK`{R_!Q%g$;LrXl| zp}hRWSQj?l*><58rD!K3ub3;!(C5i)9|gCW$WEYJ(>>H`8W~oyFITbdaBd+PX`E&1 zbZ|<&_*M_`OK%3dQR|yax|a#K-@wWx-MHCIf?KY16?X(riQV>6>1O#GV7rY0t1hPr ztL_1$_nE@_sRxUOJ_UX%AJQFFR`m}$pc+X5g51+EXYq<{3tVly;c)HYzhqY?>P$Q1 zzl#nw{@f47BNET#I$}eyppJ`+5L<&sPo>AU$rz@$oze4akTm|x^eyePwvRI>nXdhSH^6- z+JS|$dQIx_lPHI|1GV2418{XczEr%T!CkSKIey5~t-2r>>fErI5_isD>}rf%cy0|@ zJhNz8V0LR+^tN_pcrGgMSi}0xRtOTsI#!y8!pNxih9xCwz2AndU{8!;x-*`bmWJ#c*wdI;7?n1^zXz9;7r*`&&#X zA5LTjrNN5J8>ojUO>x6>>~Y?mTCsA`{BW5HWBZJ>`{OgA`L|owt?7{dB86XRveQ!V zNQKRYWIOc%Y=z01^n+^iHP-Guo<b1S=5L|%5Yn1tk7BPwmn%-0;3N*?|vbc2NZ8Z%6_`mBVEjHN1;)0&L9;aW^1 zgx|!q&K`^B94i>(FS)fLl1v9gTZA6B-_)3Aw!fnlwrtT@nlc4_4K2*1(ylJ|NgnK8 z)Tk@O{2BM99wGeqF9f29P{daR*vKE~Nzf4FUp|pY&>=%$h-XAlj6_J~DMuYI!03|i z)} zK@!C?xBej#9iX>idh{VF404WzXKt=Z-Rr6DO}3=DW9kqPni8e0>C@a)^{aaxAu0@V z_0EXH=wl^B;_TJ6h@pAzy6KTJ-$Tf!pWX8_4!4-0pTi)+^Jj!u7hL1IBkD*KC94TXc<^MQ=DKIjyIl0P?09ozu%SNGBC*rw@AqUYqjyO0eg zB3kO+bP_!COCsuCBlB(*=6A!7ct3_8`I(0w=gNj3Nm+&;*EG{$DjKOUmG^0|PSe*= z6$`*K8Gg($1>R!#k?^GzT#d07E*G^B^fqxJ2pQBt>O1O;?m6oG2qwl&0xtN+stqy* zEwBc4vaLW~@bF`&dK#=sGZi*vhKGnvTN}i;{rgw{QBY#f5nMWP0ZtIL0Jlk85T^(> z`)P6AsbhIfR5JY7O=JZ()U<-X2R;HUf|G6dF`wNE4h7?{U{YyV zwQyHZT$B;dd`x%vQJgCc=G>SHE6xy!7A<=XoqJ~mcS>4-KL^#IthI1?z)yK^1$P0S zCut$bFLVK}4rU<%;+;UxQOOEU2fPQ!;|6v4K>P#XIfgC-MGqczHvKy4ECEe72lN;~ljqP4T*KE0<@RWh0p!%Mppb+3ILANpnj^M~(LLY!I zB@mAObrh5dgiC%M!STUZ8Wd6lT_gnyB9&q8EcMqN4C2mhqSk9^hGJp9!(scYjCX^{ET8{5t z3)P>8Q<3>kJ)V9zy@$7!z=`aKAdcpR~=~CLBQ)Fee{BgD;vw88c z(Sw)Ct&?~%SQYYIO+bUZx_2~yhljfbbh zw9zQoEIKFTaTNE#caHIJI9ch}0;xEJgPwDe0}Zz(uyS0t)qqv0DKmq4J@~-Rv+e_X zS#w;2G71)k6H6%iw0GkANdsa*6!(&k7f7Si1-D{ zuP?ZjcGd3be6$|JM;gm6n=mP{D=GVTWW(dP8mPOqa480|iBYmeb4b0f+cee}z)el8 z$Ch}rMXM&guSN6$LkBQmK))DM#Lp8$#LtQ2>}VktO$fj50_0`zblEm+A3aMt3x7L|Zx+RBqocl@L}L;w?M=xjVvuT3Cmek;Ljbcc ziC7Y{mlx}qF0lMsZ`y}j*1ae7b}7+WGXzq{Z9i+ylb>+B&9M;W9$Tn2GE`n{s8!ad0(MRh#eO zR0O;sX)q}FRL7kh8pAtk)`Klr_01BV^lR-@ufMED^zZZIs+;Hj{U0FupTbl^jL%ym z#W(%aMzcA4qp4nePD%_VXJZm4M3%lau{4S7i56F36AI#RJij>1MRIX!nZkwqrGQ?lPdAt7`8*^yccmjam_j`-VlNDr~ZA z7)F;p_cp?0_YrICFjw=HTadcRF0~Dp8)&E7Y0+ypnkp_g@Jf6VYd5@ns*%>*IGNgP zteMt4x9KDLjU;=CbPISgs}o)yBfP>dd)E)weF@#eu(+<_fb5>(fJ9IMGb#;A@G;FV z2za!wX;AN^G$<|b`K;VGwqOC3D;yi8#~yo^^iyqu3}b#h0-eIo-V z{F!9zSkM`Kta96H>zV8w4iE-8zyBMl_YG%Vk|aPpG?dr>XzKmL?NzWM7Vv15UrEN0hJKK(akPAH`kQE0*-TjALKLm0Jqt=>-2my z+?t^ith{SINtxmXM9KOWVH7OoQ(1HaiQuV@|~9?egmQ%JLj0p-%6vlY6tq&E@-}7ol^= znJ%Z4wL=fR*Y(gPv1hv+wuR8vaqSTY(_}xd9n4)Pw`f1((Zs>WMvY^^v{)8&tKW+w zfB6P~7GOYgBk)+l^2-)trDtEqu4(rK{OU~g*?h3P8Au2lNnE;VR-syQ$Tmvb7ZZ1j zxacl;c8xtS#!8Bg+X$ZzNWIX=AcGbb873{?%b(=1@u+d69Bg)EE-kdV(sl*#wnBW^ z%RUKn=C`H6XMGx9&ApCNTHEiiKEk~;nyDex9%W%T5>gd^T1V;ai+5!TSD(?30+-yrksr=2*;l%=fIrjNru06TfG@U6 z+dXrfhlRRHT4-=#CuW)zUuT%iY!HLXMTDy)WHs0-$dB1P`d>V$SbIA3JIwaPU$ zJB_+;%s|7zSPF^XQXA z>{Rp^S-WEbtZ2lFA%dHS-6wrhgBf(%4xUUmkar%t`X_3+!o%OTpPfu3iNDfl#q%oO zb=01u&8{!rDOc}(|ICUeMNt1)&e9Cn=cKbUaKw0xNeE?8u?)YCK-ObU+yFH3r_LWMJ zJ&FQg>HsD}qBV?apZsGYNXX_nL`Nvp6%S?}xkr?0uoz5;`+ZtPLLQJF@cOhn2-Lv2ms*6R1o8AXLEizIb#e1 z)`ubt1SJXxVgU=1YAx*tIhGc*^(U=^h#xM=gR54`F)UKcG0NR*Y0pXrBD-Glvmp__ z(?aQO(na)EH${CuGm6z^j>U!m@zg?CWG+XLng;8aoSH^7$hEZsS>Cbe8q4A#wHwQ?o9< zO^FRrGo z?jDWo@(d;Zh<60$uC9ZhYHlrU!Z&u2ivgj1rDZC;ZV9AdL|Q15%jFo+T)OxlfaD+h z8rtz?P>VZ}7*QM$X70*4Muj! zfO)R`f4DjLA4)s)|KR^!-jG6krd2C ztcCRHs2oM4sG2}^U{pbZqNUetM4{^~78~M%rxw!5&VneBbM^+!KkuT26nTC`9Vs|= zhL=%}(slp@YX=m%Q25ypozu!uo_qkx-4+a8yC4wpOQFjgC}h9a!L%{AW-uWGr7=wy zzOw=U40uf7SxgutG<7g7`$w1RW5-^7Nm@up(g9Q-& zR6qb9VF=4nK6IFA{C`0D-yre-1B!B#y6^VRQ!dy_Vc@&Qf@An6sc{a5lZXO*>OD9q zCDFkZ9EyD!v=%Z^;`R3to(z{Y`NMe{1nwZF4R6l4A8SX=nSMqr!Vz^pmg&Xl(f?z7 zZm?-~qm*fW1 zF$;M}Wb5%P@i(`2irsBk9`hVb2~e8bX;c)d%B-|#y$9RG?EN3pD|VamNA+ajbHb2k)3U7Kmb=nB};N3rROLJfd-(k5GZ)Sch_K#D0u{hjh#XXye z7cV|4pL8wtGUX<-<>hIa?|8GblDI8AY0!1_fpWj=QQNZCtJ?iMe)1xg0bJkMyt$*e zV^7{JN->#3GUju3f+qcpB@tZO zPK&k?^!8gjUF2Lcs>TD)3|v!*2qyG#2Mjqw*-235dl3Tcwu@W4PKgsYbYEEP8BR zw`FS!e+)SbTVSFve(7=eI;(-R9fb4rwhQhyN9z_prN(zq6eQ7Ozq2G3IBDfv9 zwakxZ-1nNiROGg9Y)6^0vu#D1?>u~deOK$%h+-NbY;Ak4lupis?Uj+Tvllc)95rf6 z7&Y2>awyI8BTn|7y{#nY?us_&0+IC2CNZjQWSy3V)-7Hosi~iH&kWDmm*WCG%U~NS zZ?MfaWU%c<&v8*TXt4cxpPcYh)B{39lz-c>bO+;-ki@4C@oO(#;@94c#SZ+E?kE_R zeq{L^^??1F^zQTt$3?C($AyD%aKvwEv8YO}TxLo2_#)H@Y5at!x|edVBI7IyEZ^j* zg;XAfvRL@?_of zH!P9~2@R+b3IJjQoNBxT3=9ugFMwD8TN6M!Ha#KiW&y}zdLU`B_@F(jKMDkK2U zD4{Rn3X#{$xg zNQlx1N(xd^3kcF6E!_=L5=-p-1VvBa4u} zz;=cXO6fu=A1K9whEfc$!4x*6!G<*0aE9q(1(Us`;?q~y1K;06X%~;zriU7~L_Rsq zB}#vLWJmq&^Z0F!VC3PXAsA*&*LHZk)0%GY@*0qlO<@RF{b$h?U^kjjh~-=pksQuSnv)#1WFQ36)aYVg z?PRKK+(NE7T|J<8x=49>Ef6WV>ire{58(P9+)B?QD!x@Puzqal9+tn0?l8VjXQMeH zamcth=yqXu^^2QC(HX7^!r_`qX@Gq25^YAnd+lJBWbI)0%N3W_Tm6d|DlX-)+psz_ zjz7R4z6ET>R0i0EjX$`@X@5aEp>SJdfwqy~;R2$=duQv&;ezt>S=w1PXMexpB<|Nju&D&$+R0Rd-$4`Jio*>L@K!=D+63#Ksh7m#-lRsc2gwU)wVJ~gUz9>`(~wa z)WYkmX{DeO%;J?8cTUVRH|=pSXurX_z~6 zlA3Myf*xi3a&1$~anc6dmaEtZIc%(3`a`nsrCRjupn$<>49osAMF^B^)Dx&SOgDd* zhwbK8V|lmzH#ieaaWCq=@l?pYl{onNy1b*#Y$9Q*aK6f+s{C{nr)NKSc*5+_Wl#6A zEc}`c*Wuf+q4#ljW%d`324`cfoFK>)*POL}k@`djbX1 zT+NS;`_oRwAEHWxx{;-V-Kh%;EL&B#RP(LPK46=hZ-a{hSfAl{_9-oUhwgNm1fn8w zWoFlnkc{#4VI*AI@AQeTXtPhwSaDOFAVkT&PRZ<|Hz%@+ZB`GB0_&k@e!Jiz5l+jFf0U0(Z- zF9Z*}+wC=0$QEaZ&sIzSM%23R)$N(p5-MNJ6y_=o#h3}sDj~c<%VFd9dTHCut)-oR z7eTi&j~+Sk;E{}%uO4-rP*LqH;&QfdQ2~uhIR21ZKf5Lphkj}tuCg;TPvFO++-D1G z1_&dgx4e`l(ag!e4i|Rbt z%3fIxCODo8ihR1f>=$`)dD%O%duiT1GID8`QGOW{$-lI07x{Z}*($PdarxEnT~7~^ zF;};Le95DiNOAZ)5xfTZuk6exehD$EU0mCV0=73?BlXJkZ$#FVhSmnqZFi5#2 zdknK&i#>)(F2^3jAQxnhp_4PS$6S$-jIgf_FmETH9VFmq^58_W!v%my=sUb4na zpj)glW9S@f%m_Nj8Z(SGv&Iaf<*YINXijTPFPh96(}P~J!gQfqtS}wu94ky4I>-vs zf;O|lG^6FLFyGOfR+t7fnH2_yUb4j0qFXF6)#w~c%r|t9C8h#xW{D|7%dLUID;=Mr z>ohi1%&d29 z$5(iFek5N**^ZlcBBFz$R&VqmE(MGI)_^VFuJ0%_i_1vn8dbCgIi`GEk~N- z+s_|%GwM0MYrF3Dd?`XgV6CRuEVrA`Kyt*H^OH5tJRFp4LM3W7G|`JcvZ0kY*waXLm_3QrNV=KXo$~!9x3@NdWj8% zLd(d82Zz%mm<3S}3}xOn1v*bFlnl#7`PF6gOH% zRi&Ir^U=r|47S~w|-N*4(UqdK~5lohoRZC zTRs03O?s|DQ#%@w%Y@Fiqc3hZXnGuk$YVz5yV4i0LJsGNkU}cFRcj3iRmcrvV zhfT}`X4EgvWf#RQFBj>`M$P~JmHsS~NDs)@(U`U+iqQ+Ycz2ARaSVwn&}ePBBmGpT z!ZZ!}=K{~UPC-*kU7mCQ_aD}XCjiGwftGQ4uqvgBuK@vnYkw`%X+UM;u%2V2m7wimP--oxMj3}l@n4QV+(pgp2zVdU z8>IIEY`9&EZh++k-?bts;J$z5-M5MNW?P*F_AzsR1bN&R%hgW{=Kg@?ZG2+Gy;!DG z_%uDTq_%Y%AH9cNJ@k}5m(uo*k!jKTg&RkOZ&W|o+Z*nl-leL44o}c(y>q+Y-osZD zO1kWdMfM2f?I>;>ZR5wgrfy57R=dg2o0^YzvGv_ln6n}L*v3B%(;VHK+tn*c{HP-( zDK%&q?|gcAqVZoV{DIeeII5?}R8XPX+ta(Uo%gS)&TGKYQ!mzlz{_ z+`o3{ay$D9k*T=u|LmOmPe{qMd_Q$=mrPq$GyFiq%~TMS8Z7{X-KH;R%R_*?A|}eu zwQ`XVKJ^~>pPeWr&_($l>;sn~AfUVDrZd*mQVJ}sL-v&|k&Fw!?q;eEMOH2c+omF! zo6!+?JlpREV}=>&bJHQ|%`BQ|o*HAZ^D3;l4>zTDo_iI7fkrQ-+|Q|WL_%e^lkL5ox%mIhO6E5lQr!3nq6G=I}o6L znoJv-4+=hKYWLfMO!U#B(A4{|Kt6T4pt_H|D|-QZY6}eQebWc|AKk*b zTz+F4+&Rsv@W0Oux(f)ky)6HlQ+BU5?3#!tT@ zi&UF_MG{Fn{R()M+EPUmJu=J0M%^<%ijBHu#)*x(W(J6j zx@0bdBs)~(bGG)a^9p_HStiScXdB0xS8|q<`5@F4o|K~}zrK`xCslt&RKK*HinPD}_Da)1o#(@`_0e3!rInVYfztJkw zvPr%qnCQdxF!K7xuux+h7Azp|Bt3q;m-tI8YMN8?AyJ^p*{kf3uLAk^{uW-q!b|qB zU{$LJ!H17&5kAGn2)JY)7Rq43^>5)DEL8q2)RyKjZzb%Pl8`<>d(|{77`cK+LvGbRJCO0))THu$}vVOzCwTH2) zMT4|?y5d*VYMnpjyBDZw>>Wuj#4S5s(G8K5Gr3|_)5z1?^10^nsd9jNhE&GR<3Gh5 zMKZQ+77PO{YF$@{_R7fflSC>sMDh$uEEdg*o6l+^^t}uR!wbm2JdLp+Eejp>`lOzp zRD7jjv%tFk?xN1v^uK>wR2pN`*19$YZur;>9OnIU%o^_kb$Zv-*kpB{v- zIh}xv?%w`!86MDiSz78D%h76*eR1qOH^Vgc8MM3mtnC6l2VH@RV{pHy3EP6D3zpfP zO7$jspo$0CNLf0UiP?H_!qM1YTh)|2eo9>)-Y#vET1T+fD1PT_fs26R<;jk#mV9FjoV-e#W(699x^_>k z3<^B0)AqeH9_oms7wq3Rc64$AcSnN=-m7wa3a=R0cLziUqbw~W->r^bZVo8Z7X6fZ z^$thAc8rmUP5WiZHR=vk6Nv%_+I)*oIw8KPleK7$0>sj&8h{1qjdR0 zLXM@PyQ&@W>wTk7-E^&oPbmw$JWEF5Qi}~FE^^<+B=$j6Si&JP{#m@2qhltD#^Ocj zX}d~6WI$3}hE6-*^z1vSuea#zW6d5Ud}@$8$$EE5|3;6$^M!lVN$|tzRYAQ_y6U|P zXI?J6`)L(s_8|6b3PnVjD+dkdmg1+>W})=`Gd=!V8uzFfKW!_vrtc_zD)=FkzR2C< zzxwJP^$vs#qZEpTZC-M_cV-W=+j$&9q2K!6%IrA-mBm(}|C)h8aTDZB6x za|#6?cgZbA%S%1}Z(gKOP!76s2oKJ_duA44_8=tQg}rR4AnONXhfsPXZI8dHC5$%< zW|t3Ry$EAHGQUTio3o>MBO(R4cC*L-DvarA?mcP?{w>92g*dYZ-hUgmtjH~dd;IIZ zxNrTuu|6EIu z9Stx2`S`pX#S z7I8w)ZdA2G*tf#Ij=G9tEsvKbofE%Hss%>czYSG&6pdKtZ8?|gc$yRx%!Ws897JFz zoUwhD`;SjKM!4maN~M1CcJJ$xH0yrce_ojTacm@SLg=SzfwJ}IQ7UKky@w4?am@D% z8JW1nE$j5!R4r%NbKcULd%W7QG28e>T7LJt_Z#Fz9;3XmJKBZn`0y^=OiKyY+gzPw zYIvUXPR>h8rKfM1Ol%xQCh@sKEM5V5ZMhWP+0=TNwy&hONvIRX{Lo_`8e*#Wved23 zK4C-ocq=P~#&at88aG+*%(^BPj0}1#ET*a!m;Lg1VYBj&eVNwryKyaS{h%pftc35* z@StAFWyB=M~=;uT{V9TGfpMOr3no~DW83uQ$* zIxbHfe<*6KtnSY|Gdm!BeYt-pIaxw0(tc0Ta(#Vacz95}yyok{plRW+XB`Y4!b6Ln zRG&IqMSj`q5a*OCsZ84F{jlKxxYVtqObXwSH7)%j&F+bhON5)JuJ9zkKC}Ogl(XC6 zM?wdNT_)0=7(qrV>a`aeMH`ym8%K(T1(Hu8Bdf~-dfvw zPBJwIzoSGPV)Mgo{cOv=nx{{R`V5;DPKHw7eDowV!uy-3f2wSFecv~_TGC$>n-NDz z6w17B&YfRUQVSbQYf(k~t|lIEA^EgPYV9oUI(~5)XPrJ_++4V%Ts-yo)|8*piR+|Wj7^7PGg8)e!E*47OLeedg>=*Tx{|-xtUg8{<5}vj2j&XM+jf^2j22O znBrfJi+YKlu9r)fylFi{(P7vzXG5#)pu=14X=_$&5*5}QKLH!~M>MQ7*G!8|Dk7TW zt;EClEUlr0m09tE6H^#p(TJwiJ7c3_lZ@o%_}j1{6w;jKC4-%>ziverBF<`Hiu*Z9LlBBsOk?91{FHL=(k(lGv!TVecWR=ic7opry6 zSa_>ye$@TCW0ex9-@9QjIao=Vq!aa z4DnBcUiN@Ogr^2(<~WQ+X8ZP(-dVQGIz-VLm^+V$$rYh(X!9;?<)YO&B%m~kmm3f0 z?uW_3-A8$=I`#Feh;GvA#D5VGC1MEU?`h|)$}cHizNKi|yI=_e=>J8>L=-8TkiH&9 zDczExV8$874>ujTA0?Ymhl!YmEkaN#9!95elz3&-yTGl(wpFtH=HW1TJ#SUzGNW&zY=RdI&-YVp7V&eFVn3^=5w;g1 zkF+f~2S7^%yfdb(_bvidlqYegVDc1~G6aT~2vVKRr&w|PPNY1WfYBE|tU0G<;&89p z`pD6a#N3JXLFDQ*@3rAc)eJcav$k?49ZBDhA0&IIFmbd>cNGN2EO*P`%eDn8s@i`O zborOG7P)bzc`s)+1gvIxI_5)#CcjRId1rfK`!19}K1H|1WvY&mbh)=ti7uPxsfdqg zmSKVmMkORZ3^J--&O80tt$F%P=eupo_ijce&+w$p1k8}uVJ+)NkyvC>&EO|yZJk1) zb^kwh%Oc-wmISig+1DwBo6s;F%{p= zx9`nah8c1!*Q!Qa6eKvn=M(fw*i$j($vZK*Z8yF2`XO^Bb6#r-zivoRQ(hfc&6E+Z|<(fgV6_NuP(4>71X9-N}J&xL`Y17Mc>Ea)GXq?us))% zH2ps1%8De7FJ;Z8#ow7rXEg}xe~B=c=KPWFHvUxK`xfQ2^O@bZ=F%Q?{XXtbvx#Bj zCPk7Y zXsDb~ktDZ6k>siTJ&#%rAGY7>@@OCA%(bx7Nnd$4%2Ai!w~@FV!$)oQ{7-s+f>S`oS5d*sssH6$j0~fT_g7)K#*H1?-{sc43w{SXIj= zE{0AQ{>diJgh^1cXA^rFXA_%1I&-Vv=j-KupWIvhL={b6hk3>cB((6XL_;;>lv+aP zhLpZgzfUbx7hq5%8M^b1Gz)e(?d^A@O^|Ykzay2QPmw1Pjp18n{;~CbgoyA&=){(A z5i=bK%%m&)i(SkJE?0cYJlJ&@JlGDLC`b&MrD*D<3DHgQ`7 zmnYZW-IiqKH~D$!AA@xw1f88K1!C8E$Jc;|7#=(0)Ga`Bk3Z-HTL%mb2x3ayn#krG zde=5lHXg_KPxHh^!}M@8sH)UL9vdY$>2bkUxqI-%gGvG$+$rydAKyE+qTU{_)XETf zLDM518X_e_V9_O);??~F&?nmsU9(CVAN4G>RBDVjer_IH6gpuQ$%<9BOllpHpC6GN zZ1y8-T;3C=0~Bl9XRf#pCI{+T&xN_~DGh~qZ}QPu)|5-<<3I2ZCEe6~i_QwB+C%aV z{T|iM?=Q(bw41wGT$qZ}D6+cJi<9w~04|Z_(xsSaOt~ks7^eN&G~faxx`|?Qj^Ul-#~TLRj$!g5A9)LyfkWX&h3qk^JeQ=e`~ z?l+j**JoXrtxv#Rj`N)gwO2|WAS=VmUp@Ji;O<$V-sG>=vB&yRZ7oCuBeC7|(rdYl zMuN+-M$!sv>vh;Cw};S1heSz@Jw|@ZN=Up{8ejPI!WFr_S6tcuxIMedtnplN)cMdv~iLLcdsB{{6e*{_LA^ zN_UR8$ioxSG^}y27{s%v(TZH*iE8wMWv?R6XVDcX`_g<5Hb)O2RU+bjA6E6=vXj4~ zMS9U(uA`oH?-j%CEF>RE5KWfJbfh7T;1zS&zy=%M!iMPh(tlo-o>#O`=o87lp?px} zs>h$rt?VMNp7A@nL}r53I;861s*3*~_VZL+iZs{y5^soS-BNazH>=``?voi|EeI)j zXhCff^oOJUBG=Qm%zYoAz+EWdlCd7W0|hpqz$&$B(4QRgOFjq#pBv)R#LCV-BpK^O z6fz^kw814L!k0|QTv^&LUb_E^>gN5o%xQi%#Ou#Np?{zd6BN3eSQ0fye(AvrW_^E@ zG{y6M;VQJeLHqACOMlwz^)4<{(@m3u%;0C=-_|mmV#)6)j^y==JfKo`@Q6+UJD!m7)D?7{m*N30Wno{bIB%6bOR?ba4zm0vSw1 z22lUUjWcim>SvNiqaiy2vYDX17}U3i`ioG1;IBR#)R%(-QBVL01*D*WFVrU$MA2M^ z`dQ@B!3cvG$Yq0EImnHH>Kq}^{bi_r9jZTq>akFO6ACCnfpn;j{+vg2gFW}6-D3nx z+^4(s+_FAukW~bA4WVu))P=j*UbItq%gmAh`>t?HtVu`jP)(YJ!2V|Q8EWd*kL-rY zxXy~*UM?Fu3y1bY@owN92FNCAfcW(r17l#SnSj$k%!DE7chKib#ckc(JOz2b0h+xLlel{H3%N3NQs+(dOfkuw;~mYr4a2(eR0xH69W=# zz=Wy;yLr+=vWqSCTj81VCQP}W>BN3>N^>EaXTVP>R=&q$`%lDfw^oCGC{z+UH)i^u zktJN0me_7K&TJm*pR<7d0?i2kGmFGJ#Q!?UaPt4x$9D-nP)k5wO9Yjz!qOLUrzg1W zvx#7);Cn58lL3!hG{p*)ssl)Go=lPK0wKC=+t&Z(n={Y;%hy->o8e!!wph09 z*8j3?;A=xxSn!bTeT##G`V%QzgCj!7 z9`?W9D|5l8!o2Gzx&Vs+0LWJC-DKHUm}9)&A%&;*11sj~b3`yS7TCE(gv}8ACa1v> z>BN7X3(|&&b>P(V_1CJh=7&RM{@oTid3N{z0HRGs4W zX8vYv&OFUblIp|6OriBjJPw``8Rq_qk^kva+o8Ml9z?-r^YA58faj0{(2e}tcXG&Gv@f<1 zEc~FBCXltm1j&5dT>=gu1Z&v~`I^?%rB{<2+%cX3L`lP-VE zPV7NW+E4vIpsF92CjE;ecT0)U4+(WIC-<3C8AmkQpenCD{KO^G>z1l3>WaX}3%1aqlY=tSV zcMcd<+Cx}p2iMQ8XsNwTOh9lg2w2}Z50{`cr||Ji3KfQ9Ha{J?Z{Nmd%24Um8jtk_ zMtN!-xYgAvYn?n6s9k^FHteu!(s>0vGBMWVP)HZCW2qK=Wm1gupu{?!R>_cYDQL;cq`KR@2kbsy|g&9XS~Umf%&7tBSvKRX!P_E9!E?Q!pl z`-UzE@NnWPi23t#-pPIAmxD^Ofk#u&!=6V@;#kyBUX9L9`3O-)?l<~BRmt3Th}?TmA2`tq`_ts;2>&L#i+ z?uE)C$6YpT+O|;n%G1)ks9pFN-b?bAW7Gt-3cS>pP>|nQMgx?w$T4=nAUp z)rsRMk#)zUWeKf)A@}SMcL5wNdQ(zrxwdHNrr|?euThj}!c~!;u*fcN6zmS$2+=;P;Z+NSf(eq@0n1a~-t?*Jbwmsk;>gFP~~J zhl~Z%Kav#rlVzg(}4hpO8{WWu*D`c!dQEXpfj+j#$yS7Q@hM9!^@KOrP&rr1T1 z0bf>{>ZM6TKe^Ke$M&#dZDMA3-FzA^zA$MDAFf}=jXAXkSov<`qYTf(dYQ)HHh%>5 zc{YwKXhq)e#FsRz4L1$n7&OUz4{i%YP+w$YXZF^gogur9Jbe@GLoY}nXo%_)Aa4ss zP+w-_U`Hz|hDLv&7yP@t)UZjWIk+tpK~2lX!G~6S8J<`}FX$_1c-g24E-z^dM^H1e zafqN5t-=$>=>;1E4c|aU4tZN7g8CX8hXh*DGd%Hx9zk>yMXoQyQXj1EgCHhm^E-n+ zOBzmoksiTvefDBF`89fk^i5RovjQTPhG2a^1o1gGzw_v` zwBh9Z^a%Z%s8* zD#x7h$s}h9Lik3ae3;Q^?}ffUg)`VnXMZ*a*DE0E>DbQPM4vSXCvT!hgbSirjAWkuIIJKxo$}i+w^y`_ zP2+HwORTTw@O@IF@YKe$>CY=zdhM79sQ_Rd*)2^%G))@==LmK6xKuZ{zC z#J?LTeXt`}|9TucT#|IbjiIaQ3|~HRT4U7x`yGQUeB#UwaPsJF+ZzUq5(vTsq+L1ts^f^G(LSAMTLG`_r zCUf(hH!*>we(30uDu%w{SJAmzOick~m7lY5|1d=EMJLq>8&m2wnp{bVbwo`?J47Du} zR&;zl*HOMcoq&X*4OF~o1r^3_NQ$--spvN+KrrM)3U77&1|F0_B_4Ko81DdWsC>{d z57h1EK{9MVaGVF5LeZeYejYrBMvWlfg{c*gLH?$2G>D361;_O8fKiFJ&x5r`@PPDh zbW&WgV2TGuX~Y6>fwgEs764e~2%ysU1gybendgtii?M+rXYt^l$P*y??nH|H84cwQ z$dDlb;&&(DS;z@CSCJs4hyqf>(gfT$83Oib#Jqc`6QkT;`&UVRMEvtdY@Sn{t*F|~uIAg(SWklkO-Ee%i&4P#x? zz$S_hyQb88rzoa$_On0}72)}f7*LdxED_tDd@T7?9D6(R*iy-?;3iIJNq6GeT*@o* z{W;EpAEl}F&cc^0HP;l%x}-v#{GY!O(_?W5!+N=9t`GAMMjiEeXv6k&!m>zHxwIaN zXj_!72o`KBMEQEP_9gMQo*BFOz|sOq?98fSNlQl|njYt@EzR?V3}M(&wbVdcN~3)` z{=mZPbh^pxFN}M+_RESS>%c<< zG@h#XD3#FKStSpw7>5tJbpv6w&wR zQSbZ|bKh=|p?`%y*1~34D|JA0jx36q?Onn+>f*|X0%Ou?-tEqcg10wMs0C+g5`-Jy3Pc_bQT5HVXwC8~zGf=x z&8zeKbKo!P{QIfAM}L_;mw?_`efs0c9OT>pFcJ6BUVyb zAMsjhGhAGp4snT&R;h2t3{w)TO$y$+m}cDe8bO)=m?6w|j*Rr>pYYGF+p~SVqa=J; zLb%IHrDnPGf%7s$HpAfvQNZQ1v0;9~)O%4USh~gC7{z(O1V=wkzbHoI<0)1m;snbr zN055T29PZ=U^5>coe&J<1JAU=vBnv30+b*KkfQM5=rSH4Wp_YM>j8;i8;I>e`@I3! zNH&1exDIggC{Xlj9(;ue6=gmTkaMlT4AQS~bg&7-1JW2qJa#mYDC7wNcLloit3|%~ zaDr8cIl*4o_m+2E1ipXfK~yL_AQV(U$^>CeJs1u-NT+(?poIfhV7L_m>J-(P`vfdb zh9IR0{XazkWF{QDFChZyUI$W7V4d#`K(fPRZoC0KGp%6wN*myGfTp0?YG_W1rtL2t zJ@~$L7DCVMd7uNcR3EXVLBjZ99uSFGQ9#Lw`TT$ImDmb;*r8Wh0(b(k;Ke*hy*@?d zCriMR!c;V15HFx}`RgDRLh2IiC8O&*paRD3Q9OLZY95UJhf15Wbx8ln6C4sE+`Szj zBT6F)0Ry6T>d#iN3DZ-8>ER$QA@gFu6n_lpp>6|3@G~I_(FS_h{>3CL3Mwn^fYY{v z*LH6pPQ(D9bse6*NveRiCs;EG()JPmB2C=CNE3eoykW?t^B|h}k5YVFx%h z7_jnnpyVmb%JKLAeGVX{g8Dy@bY%%ksc^H|H3JO)i%1953AjC}Fvn}0g{~Gw3$MW| z)Gmm3Ec|MAxy*6uz%E&8Tn9oDle$4&(V}C}L`C!zrOYXhd=S_EMtF4j8>JLR!txC-c^R^WZECV??E0cJ? zm{-j2If|6ESX4x=&RfvZrHO0~)jMfRdlkr56Df35<&>(4tZHk)XOYS!z1wH20ajjro?k}qcza{tgHCgl5G0A>=^3ri< zveAmTYcYMCVVMlsLa<=3a(tqmnq{%?qrzXWsVwX%k1J<~KAN8@mAO#5n)=?}qHee> zS21@2=`3a&DGNVy zB*f2XthQT~>vS-FSZe1!QRxhMY1nqB{7I*EjAh%z&WA)7(^8Y860#-+_-&18_tPt8 z3&)zKQDM|e_Zl?s33t`xx%Zkkum60mD75vJ_&hr~2OWty$08Dsm5Mn5fOj6;fmHVb z9{YrM0r=|A18R{SAn_6oWlK1;D&fSYWH{XQ@Z3Bbz>dojrkZux5F_W#FtmYcsEmUn zwFzG6l1vnj3D{yZ0h$ITAf1EM$iR(d=ikrIwhoQ zkWT`wDM3_Q(}php0+VJd04#67lo=}kG4cd#LBNSr4xCjO!+IQa6cKg;oau^vK%fsK?+K}20G!9bF>J#Mq@=HdDL8qOiaR~Nabq6LLH}~luQ>Ey zq4Do5>cK-o>grvZt2^Kr1___-{0mpa|G^dF-`N(N2SIdeg!uS(0z-obgK!%3sTCqr z9rA402{s8$z|}(EXJSrp`p~B)^w|x4LiE(@fD{55_U{Ca0z%F0Ko$s`dgal;R z>O7K0Vh7~u%mXTmd4R+~oJxy<5Dl}=g}6130=T~zJo12m4Us2EO+sr4Fa~x!=!G%d zYy$`IbTmF9OesMhtKi9ik-;~<19C4yD%lRu{C3;0y$yr`BBrF_l}+MP?6o#vmkh48Vz(w_ z5zn6z^iIeUMTYTu6MA0RzizF!4W+d&!p+Dq630^h0lq+N<)o-&A8F7dUzVf({M4)p z12%u!ot_j~3<5^kDfZOiIJgic_@{a?($wWlJHDD*-BRE4%y3St?8_dU^Wp;)yJ|qt z+wp|Th_Z8OCJzj*1zyI!JyxQpo z5`nl%x`DLqDB~3OJJRCRB)fB~#LE3xUF(?2$yiHhJ??v1UMW`oCQ{iHAiV(2q6;{M z)@|ph_wH&7f+9~VR-FBhXER-Dn_PJB3jrrlvV?*$)6cfnu20>LJGBgU-zec-B#+S( zZ`Lxcys87zB6UAmdF*Q%ctkOuM2?(WeL6K2rxUN$SlIaVq}mHL;G$!BC~?3~I~k>y zUIXUt?Pa=(ff$F!67>2il97*_0OTNu9>>+ZdKlTf4{bUvUb(e>cFf!~4SX` zpLD{miENoY*&pASRQ3rZ^)8v=E-W1L`c$9iJ9b*u(c#ncjr0{JV_N(YN-~-%U^9Bn zI&HF<@dC@)HsA0>Jd zC3*`bDvS~pMTy=2oF#XwJdh&l*=`RVg&_8&_VyMqzXvbpcbJL&Y zev|b5CVc~P;@83Vzh62-hIk#+_1TBW(2<)Z_}7Q?u^7c+<8n_?_S6Bc(-= z(xFJ{QKSqgQpShmqB8&74!nd3rg!~^_3)A)5kin0A$SQPNRJR?VR-NiWvGubv~w7| zmMkeTQ&GDo;c4p%Y)P>T@~g2Q4DxnPund6i8g2V)u=d^{*+%I??uWd-U2NNaluMSG z$*+9#xb15TuYlb>c!%4bWb>q$v{^&ws};ivyyi@g$NHXM?Ju}C3m&pH?h&NmC=3L) z3>=4>6Y1k_qN|Zf0UHU8*kgi@cT}B z(Vb(l!aI30s((Idt~K`GiW+r#eD5(X_b6zrc9APda-n%&Vm5S0;%h3YXu+yl`Q?mr z3cnKa15J6@LWd4%pLFk3aV!?`n0h}lx>IPHzN;l#ENvn!H#sVo9K}t6%B4VYpGW1MM{!?3|AWf?2gOZ^%B4hcUqt0zL~&E0 za;Y9ZW085mD)Ry*^MXz0#dVn%>@qL1b9J)RYaRvF$wx6*dBt2i279vZV%sPQi8$8f zjuV3YsTj*d%zGNKPw;hb0>c1_@Y}@(Qe6mDP%75$FYx`I_W1ovhD>R9AY};%Be5mB z+$$1=L!jq9zXT8x=6#R~=HRZy5;nO6@RUsz6xBWZ4cAoQlEL%H6u5f&ip{~kgzC?= zsofp;W5v>1E#cbFH7bK2mmWmy9X+g6F2de1J$>QGyXpl$*ns-{R~fk^iLXbedS^Sp zGO+tCr{CMWg2(NQ0vkEBTA#e7Zk1do2qnK5@pBBvJtoB=(H|IFa4(Y(M!xvu=qM`J z3fz!GhZ=d7b%Lss6RDR#^-9?DKl=hUSW7JxDa#+Oa?a>06*w;8!@fQ2ZT4qHKq3=f zXztznT6*y~zX-5-N2pK5As>1C&eE_JJ|#cs?ou>RX~rh}PZ__S+to)qwRbknjf}cMsb*mg6?_ z?9`zL)P89UHPx_D`c#LswzZ}+Ti=rwOh)u+V8 zt#EB1v)8by6<9q>^pK2plJ#TVvI?xs?f2kg1vais>Nhx-dh`1#13}=r@C#eOZH=`I zlo*Ur<=RwEIRz*TLEZeg#Z>#bm3gF9@54n92yK^bq$->DV>>E9jxJC&Kb8~E{?)(H zcI`O`Sl-i%E61k(;q|a6U)n15N=e>>^=x3X-rb;bapiR$!r5u?q)kcSeyV2@ z8e0h70-=%?^`W!TJK-Qec6mcDZUbr3_1>@`dsph=>yC9`6#8=)mopD17ClV*1njc< z+CgH3STHLDf;aSlfO~W!E7->G{H;-o74{w=2b) z^$%cs@MjM@*aoy@rw&!Cm+^dXdi_U6;hOlOp#40vK!_D-$yn_Z=}4O}bflkbQ3tPk z=H;(J4cT*7XV+ZE?l#+MYhBf%c)^TWaZA7JBT#cyrsjI#hhcxmci-JXM|;qlIb8Ww zkI*jVg%hfV@80|qKu-_o6`T~Q|LMA|`sc$2!0i;{i|Xc`Hh66KRT z@oU{!Yn{lQ{UW9vG24l>n%(Wb6TXfFZtdZvg*?Zdx7I!UA>%2+=n+94F^46c+P69f z;gRDLTG=65O@W*q( z)41nUo`xrx=7BpcBSSxgaMy2`t(h>6jHbp4-0q>4^*DXncT{@m*&O+~b6tYyU2w5nO5icg{ndTKcJ-=W` zr8VF^)pDh&U@dQds{fmQz0jk^`a#MIvu67U(k}2>f9u!2pPL2AZgfgnK>8xG>$?_~ z^ZM4gMSsUl3f<-lA{NDRAs?C8#d`Kksp>7KtPe=ae^7f58Rm&gpOYH>E^>q5_KC=8 z+cII8&*mWJyKdLY9Sh@o{qirj5$%{;XK@d?3Tq3+{4a++{FD{H)OA<8^nn{8FPL8K zAJ)=Kf;E=}n=T17{@Anoch4c*nfu>XyEb3AT~dEl_b=X9xP52 z4Tj5+Kyz@xrqysy*$Wi>um1kDLKHgLJ5T11glU~@xU8%@}!C}_%k5R3MYx6_dvs!1W zvh%hZu6-RloRQuyc+d7beGByIuArB9_Vh@Mr7!#Mey#Jx$nyr_a_%g|Up#fE2zA9r#Z;qx#o zYScpJih0RByCkUWQ|l1#@3s`K-byAq?2C5{nn&Ks`hAR@5{>iG8!Mkv6gY`U+HT8F zum801OnmW3$PFyJ;!O%`Df0ZeyPAu+leb?#!rVQh)zDe+U|QozSCX9QTjy$ew|1BK zH)K>Y4-!?w_;s{{NP-YaeUnJhu_M(r8Vpa5Ow_*#gWm{)PAmBWM+YBb2*db{_w@cY z&y)86e3cV1hY=T+t1t@0)i{FgdEPak#sVnVay#1naY8?X05vO{^QjQ>rctZV(H4$s znB)8W3nuw`lbbJ@f0w*sbr^MUhY_Ra!#YpZ?25>9 z7F{)nwNn(gb$rV|*j%n%d+&2b7as2y{}ddYz-SE$xUCs?oMK(hU$hlxt)kv8u8REd zOeFAf^>}wedUuvX@3gd1{RHq~QZjpep(=9dC41QPi*{w}r)HpJx!?HZnvvz$DjliB z0~y0>Z!jHSMG@8f4FMBJM@De!TyKaFphW;dLYUUCBd7XoynfoITdOsee#*X>^;F)i zH)UM0D*Sz=e}e&=*`2{?=S@9+bt#)^w_7AX8HL8s@Rw0u@q}7xh&zjTVUJPZYE?OR zMN1}Jhdl2BK=8V>)zBi^$d@b;p@N8v+uo`6AIU=BgF2$#SRj-u6)2`%h|=5a$b*H} zZ$ZK89dHQNOU7yssr7ulpC2(vg8tZ1#quU^7aGN*_E_he7%sXaz49vhkf|TeNvND$ zPDJZIefnatU(&cMx}DW&wxtvIma5exxk~HM1`92OX|^_Ejm2hxMNwb;{X3mOf25#YPMmszPXf zR|(S$<;gWEw)bD?IMnoh>(S!`PE6u5eNuX~2`hSZFtNXnFt zBGZq@n8ZPzdSsQYjD|$f{BXJF&=-VYVmL$86VSInxUm#eDw-q$HR=;wj!WJ+U~VPa zIJWP|2qs%mu1`zzi{Nod!17Tss5Rs=c5&0>;@&=)KU zVUo(w7i?hSpMP1RNI^K?;W;wUbuie263q`3-G%Sh2bVL2zTgM{paOjhhi|@x`ZI$2 zLD9Q#IalZlG1PF+X?Dymvt|`Z-@Tunfz5A08TxF|S38e+GxUFD9teb@UCIZca?nqI zYg$q-DLunMnM=KCR_R6My75u%tai^_$=3y%1Ks-BUzJNIU9YHuFhJdb3{PC z>Xp)Sz*)KAtUPd5J~%5soK+Cc`WntE0%sM2vr52OCE=`6a8@Xs^)3FJD58(C6#hI6 zS+5zgBpI^g7_yWZvOY0neP+lqXUMW=$Z}`M`pS?6XUK|T$Vw;|Bk&J~XZ0vEA~Wk4 zvf3E3`WUjt7_w#=vQ`g?nNRhHy=hlD!*6gjp z4aTfRfTBnsJ~g-h4uBnUY-l7}n{LZeT^f;sN}&)9b7ADI%ESGMV0WXV{)Wz9osY;$ zgxzrEhvAxhb#$QfM&Tall)Ai4r;P`+fR!F~A01~~-&vC~2>?;*QR8d=$7+e1L%+>SlkL=GC9ad4*m8oRlWyzH!k@Eu;m& zMjFnr)Mz;OJ*z~!vf z_K%RS38}c*4vb^d=-AiupmuxfRqK4Se{$i=jl4sB(c<`%w{#_+^|A^|?8r@*$83I< zm77|&Cl^bouOPN9oR9`aQ}2(dhUxC|@aXbpWR^c-KlF#Dh9|M_wKgUKi^2 zfV$Zieoqsv$0l`3Pips!$R92T1c~TMcQ+zUAG2`Ja9Y2dTxc*j6~0SWhFwN_2e?U) zQX%ud+FcwlXP$Nmxpy{6TgLc)ztv%G`X$)>Dx~%Dh-J$3J*{ezTS}%NBsI6Hz$tS6 z3-Ba&>suqLydvYz>H8ksmdF~F`QLvzZ@%sPticQSclhug=_*T*saNadeg9_V35_=G z)-il@QWUv3;D)#j+XLosks}b-!p93MA4Pob{O3Kk9CHVgfsg3ND9+vn=O0x35?X7y zw^H^i6ii)T$fdk|210PqF;pMSC7KLdh(7K_5%&v_fguHhZuhMtme=u0XX49N1y|lD z(k(N4>66Pb`a8#WqrU1CS1sda>monHMX$YdIR%aujmwGdRS8;F{yt~0tqeGBg?yxl z8f8PDaV1}O^EwY0y8acA?xDwf{h&z0r>YT*qR6HY6qJ5f0Q*Ei>EKUSzl|-xYnkTX zOY5okT=%o8n&r%|$&lKGhY{V_zi!Jf^+);(Jq4ZDWI8E=*HNez=JJEuSs6ZP@G*md{;qcRNIBx{pAp%|=0Y8m^^G3oQMlGMgeMBs` zRIl7NA5n{7(b9v-iJ}Sd{Yc<_c)@K1f`904+L`YdWoKbe#M-%6aqoBA$f29~`|lnv zml<2;;9U37K{tuOXK=$mQja!Cz>cB0&{a|}iqP#g2>x6en#KU0A&Rzx_X$HcX~2$z zpxY4mb10OS4}%K~rlUk-0Y%5d``$x0S-_6ZKpvrRT^VQ#7;FZLu7&sMK{vUl4xVNczf13q#QvmFk?18^3bWYLc0NoS@JJNt|Bj71!Y>(U{;fu0RBx4p9eb!U@ zEHe5mTKX(z`YbLU3>!(Pk0dl)5}GOrEtZ5fOG1Ywp-Ym`!_W0T;P*InJb?rv{_foj z2h9AKkE=03mzW>{ERYKps2U4&i3JkC2DxB^sZC zMNbNKCZ!#m1YQvm;`KbhR{ir9Cj9SSn9aUC#M(U^orm&Bzyg%e7!$Tg2>vNtlYBG%*e>b?UKgovj^}p_8sXWmwny^27}nbWA2)X9Rbj zAvzslSYMLv6775XIb5_p> zPN$u%T(_(N)wFYgNyxW*{VR@1b=n5m5Lr zX3_gI0jS*E5-vgXFjga4vDlJ7T$3(~xY7Y0o2F~$bbGj{&pC&7E_Q%Ud`AEE)de9U zURD#YBv1lTs>Gjjo;-!W7Wq~$_LX!4o;&`r7G1pORj$ZDJ)JG-2;?Gae7o5Uqjk~l zXUz!f6osc+{ll__RO4Oxm`Ex@#=DkV32zBm8tzza9dI7BEbyhq&%b%P``w@1oyYp4 zEZnKbJ9Vrpk&~<|get5n*QBf~E4`{MD~cX;NBCR!>1qSlzr?oguh6#c>D&jdi!cYS zeRd^{>a22~@&`A{-nQV;nB%*ZZ~FDP3$d-tWZFotmZ-GA;GUzaM=C8q97=Mjwm{_Z zBv)-!TY#Jkf84tB-$J-Y5|{GoYIwWnJmTgE!co|JDno_j-2%r4!lzu>z#4<+!w%cs zv8Fa~eBV-dncf&WeqjCavG%c>M{(=%eX>5P=Z^oF$0;b5b*a0y89IaLRh$K_m1*Yq)X(knzLf~A*JJ1 zJC2#x8B)2U2ob531?0NJU%N>yjy4fkw%(ty(4YwZLvAByyvmTVrAs&7gLx|;URt>NTmF3R~eh?vp3 z{D`E}9dd7t(c`J#qswbf-Yc3;HLyHIQJ^sCe&E=9Ia+q|@3nZ*(ClR>j}zj#$@Dou zYD(xHoPPdQx~^fPbTVY&;f*%ti(4=5^i4Akq|h^GImTc~rY^h*hOa7cIrd;lPG}Cf zbOr@@93S-SElr0g#vYMG8#1Cw8oI|T!;$v%@iS;sSr%OZ9PVuVsVQqkPV@!608sKs zrVy;3@bq!440Ik0cG0p#zXOAVK;Jb_7=MDINAUf|;pBOcN7Y)`Zl}1u8L!uaND-1Q z&>T^)3N6SY5g!2W$RRZCTT|_T%KqlW9`Q1YJ2aL98p{cd z<$}g?Lt}a3dX;-t74{TQVWl>5E`#&cI68?k_nB4ZkifJXH8@cD0YvkzoO1f&naznT!*#10@jNN3@p zD_mg_dy3k|BFgh4DO<(vo*m0S5kNLf79%vgQcm3jf`IJk^z)%8E<~c68|CQTbLEi| zi4C!9X&NL#x-!LP*(>mR@L_Up+Cn8P^FF0^#cF3^YGuPZ-CTJd#zb`tztO7S*l-I! zuugMpamBLFzx~=xNGY0XUErMd97PpvyNb?o!0+n9wH0vW+uo<(;ccJnZq_H4O{{J9 zgO`KPk5II7n8Mb2FyuISH|)lzmN*gkTD}l(aE|(7J6z3fXn2ZH340xS#~zEzIX(3( zOy_%j#Jd4ez_h(4U&#oC9iF|cc6;A;3*NsqxVN{vYIn`Y^hPuBSIj3=Aw4NCGy z8FGp^VrBW`v)gEY`|~FFr$(D$Y?NW{dv;#!zOwPWpA-VdpDZTi!)W<_zYW6Qf5Awp zAwrCu{SDtrhp&W4-T0H+L?{=Wi|@C=x$&pQvR0#05O$1O%(OSA~eO($qw6f%TWXCbGvglH< zQC?3Pl`N_!A*7V@YiGNn{zASUq*JD7`+F>I@_g~9 z028Vgkl$5Ev(vOJV5@k4=z8IXp~v<^iLURR{5K48;-h4Zs&09lBH#B%Qg9(sT{6gcK627cX66$82603hJ1UwVu!bD;T{%6Df= zKuo}3=A_l(c1(ZE<#uM=wxO3h9l3JY*H@7@m!~5xh1$slTwb=)76LIk?y<=D?3La_ zMK&N1?2m)62Yv}g4J<}6_x$MFEALf%6cy2+k;p3#(ZBV>qMTs;&AT3kVrS8~i~z$aR@(rV~xj=5s17j1{}aBQVe z^!#*Zpc0TarzqWT&A$N@u3&uwg_Z!Ung;i&nC@d+B0Fr?+P3{eS|_ z+2<%#X@npRYyq=0!ur>M=)A@^&HPco^bj(ZWL)#1G!8Ku5}Z5`1_b`yfTXFQI1D|P zG0tHvD?jYcU}7kyK{Pc3MS>F&l}=f`CUb?E-~{@tlgZrnPT~QtGrO4a9p|bKhI8c0 z$qY>p%{06*p_dI{;NCcA-agsKN_p!9(qY{2^nnns`xe5S3QUb5KSr=c#AaSSw+a== z{}P9x=Y}N2!6A6c>3}qf6&MuhPKvgkT4+66fVppXSz85_U^dR*m40Tu;$8xTr653~ z$T=)E%MP)JIyy)NYR<}o5IvOAK|(ss`J4Bg&(0uZx3#v(xh>Ue!dr*0tb&FWB(_#= z52#%fN+K&R56S#RJTof}=gfi*#a&NCP00&HePEidT9!DCxT2@WSKpLZT#DDO7aKph zLwtkIkdfz;k?Ttc*#Wl8&tKo&zJ4k5P_3zloY`ts1svdSZXUWEw-hH>Nkt(5!X?T6 z>T{icNMQPkSQxNBl_|CMih23LK4JAi7nW;_9a8zQP9Trn`GtjAbkZ@!`tZ{ma50T) zgQ#fz)3em{=kJU{bwt@Z^4>Jr7qn1`7N-$=8Ats2{(-h3SoTnxiB`aB!>ZP6pOT+d z6t|9eDaZW>QS1bk#s?u*LH6z)Y(op5RD#LIIzRcOyfCwABsPdTj7Avu*I%KKSeye0CN-y9%G(#m`&=m7h{d zJ)-|YME`}JC-Adm*VoTa-uS`&{NU~F4Ik(wDWsxe_XI-zK@>VseRKJ%B%_luu4hj& z)?>D{$Z;`JG~c_0!%yRD!teLEDZbOVRq1-i7Tq(qnF@sMr%+yuuj5UexMme z1!+ne8n-K9u9=zCb|bCTuy|Ew+H8we_y=Edlb2)+g}Zd8$%lJH+)P%bCpRRXKJ~Ki zQoJ;3A2RLq5T3pdSQr{amly1u!qkzGj>b8!do-^RrDG?==a66#zGBLs&LW8tz{ZcO z<768JH)H0@a!skXfh284hq8B zN48R-&eTzh6V<1Slv_Y1e5Nt^|AScn&q1uZjdqQD=tB#lru>e#`sSO0Y|j_=al@Y{ zh@flNJwBm%h2N4*QYZ0sW~%43+eX)Og%e`xUp{>5HhTo0y@Jmo;S}g_3T!w99-M*@ zPC){vAcs>><1{kIVi>wuz&)(s9yazm1`Vws?9)&c^LdO49+`Dw4OvtO0(Dhmx?gR~Ufrhy;KO#EkkfT2W@m_&=k)UXF zcpo}+6AgUk*G*^d&v1eUs&Dte74}};YIT>b!&BfmP_4AFh`1PVk1X0mp%tPk?t%St zSfKzAzZmmJ|6=hg3X2&YU6h>1Qq*%`hdYZzbh?pOrB}%ve%98rmj$e=jViJ65TfB~ z4|oRr*f-9q=nyr^RINB+7|Z@pH*LXTy8g} zwdjyOR~YiyX2$cu-bu<<`4YIB@n<{19LBI6AAPdkP?shSkfALLMcv% zxiP=_auHUeOd2iUCw|9K1+$o)-&g{DuO*~&Vy@eLs@*AwfIu-JXg6{QF zN(vQ64*zqjnr=Z->wM2-RZ%2!&XGXUp^F6RLFp8al1TZxgr=@idYcsn7x|}}Q7zl= zr+5)XXbx*JdukIS{`kH40+n-km|6kHA1iU)5}d&Kc*4DDLOV4S?3=cUb|az_IteX9 zwrLKttV1c{ZDQ=L!8~5N86G3!c7dUjQ>{LAOLQ-?w*5QEZ19KgVn5lbq#NRxc>XA1 z8Iw;U!hdZlNWYta^R8A0q*;55pa0o`*7bChEh$7mUFvU_{Nrv@{xO zjFyztkI^)$&zRYV(eyG3=Vnroc@=a;g%b>aVe|wl4Xt1RuMtH%!{x-FFX+I;M4)$Y zI1~!y1A`er(Xnv3e+O&Q^sL|?u?$2%F&U$NG7yo0-hGF6$w1G*;D6_8@PC`$S72f) z(7SMW*ITILD~x~VYLsYyK+y;IekX7_Pv{E?@DDl=<|B~IBT&$zKiS?=$Xuj{PrHT928OUMATks1?Y;UB^CG3+hLmxcE*+1S4T4&k3s;N%&gS(F3Z zuU->fO5PiEqsOUY>I+!8*DozJEAXup9d!>sj=J?3IuR$v-kI9w}m4B-zB69X6bsFf`V6h z|3Zem-Mxos6d~hN%)hLncJCf6Wh>V%Srr_Y8kd4Xl#V@9n?@n^7AHF;dHW#==)WVW zL~W=)A3b-OBt2VJ+HUz)$iBzbC@jLD7iu9IdKofP`0=~&TWGBp!NOPD_js`fnklrQ~+$r=! z&k9$faafYLvT$nrtD}49HNq#ma$d%M7bA|j81#zK;P{(7{&w!P%(;Is(_Y8e{$-P0 zKZ^BhH2Co<)hy2=U2JNsuU3YgkL<;V?Air*n@09u=7%?&?!!F0vSgQl zw-$xq?M&*5^mtX@YJL+{vdYHQY8>E(sr!eF+PFNGiZky{s66NBkZpb+@uj?Vo8^OuLm!{VJ2y_76^ohJ z=LVcU!*Ak^+%uWE7FUxj9W5v$a!ZCx7pi}^dzAd{;Oqb1$ss_wPJ2uVtJ(WY+Pyb0YFb*w|9(0PGta&|wV*OZZr zN#~EzkHFBjdlET`>I;8;{h8-fjNhfRt5of*-gYUsuNcP zo@#Oj|7&Kc03B7OvjkehYrm_lg>Z~tN1gFp@Dw26YcuJpS^)5(Sy~AqPgI5l+YXkunIQl zZvgz66qJeq{F4GLl_**T-%kTB#~Cci4bAy)1~nM&EDarC03Q-Xd!~dqj$->7`a{(2 zR(FGLSLUWU7M@vYn>^FeyhJ5fwqZfg-5mgx#=BWQ><*p?cAA$N#3%K-OkXwqM^7y&I!Mc%CxVP0mxlF2 zbZNl)43}L`tCf`wF7^Kof1-YN4q@iBND$!cOVatta)lzbXpqpZwsdtkz?Nb%*(SdJMX=^)SWP^&%LRP<@&TRX_8FeW@eO(-nL@|PY1Q?=dd;2XeGW&S5kF7 zef8?re{`5gLX^6dZz5K&3418$VR1Rs)ovx(o25?#tinJr9 zHM2M~ZK$_6ABoFT7&b*Q%?zggrN%?cv#G_f27b2{K_Pwf%9A`d{bbvj!G0`8K6IUT z@w}!p&cc`VgCkFOpN)orb!i8+?7!i!@qtcNC_A<)vWU16NrUX?ek&1ir84MBToaZ6 z!Ig33HJ{3I=kga`?ueIk0IP7meL!mM*9eSo4Q7nGMG^&k@A1jwFj(%~3Zry%?@kdc z`KdQSl1{zjT5M_j!Mo+!VFtC$deO3LVD536twUmP=BlOdd3fCw-@0JyVe*$V#K6bx z=T>!!0bwh0U2HH3l>S^}TdC?d!6*a~{CcLk#Bv|m071bX|9VV@9EUMSW2tA{H98FS(5Z^!SPzFW@YZ`B%-T2 zF4d*^>2av8@!aLoOvp;JPx$_ln;VVq=>EqUFYXrin}?0W62i^YZ*;cQNAv70QM=+u zyzkd}Xij+hY6{;q!SWYadiMbutxY{~bgktL1=|;f+JDUH7u5POf;skn;CRhA%_s~! zU-p^4#^n48Fa8SWY_ECnpZ?7EgtO!AV5T}zxe0~TK94jWXBUw+4oD}Ip^6M#LjkU# z0@pkT*U*A%=)pB$aLo&F4Kuig6Kq^_j%#R8~!qzetPh1jWH1^$^wp$askC&h`k64 z1>?E|ji&Wn@cI^?-IMsCI}Bp;Zgzc{>sQkwNX2uq(&YvXQKb)Xmh`)o~h+lv}K&N3VjWp_;xrbhXprJvR$qj+fZ z+u(8=t5y~uQqgE(`iKD-rRWW7PR5>TwtiMi9BP_?F<9zsFqm2Zp^Z|UxCSEjT94|1Q%7g`rhnA5OPJA}UO zxl@DHMj$9oDEsNBGwMK{GaxD7$KJ7U>m+-hLG+u7{hqG0oT(EVzpFok^)6-q=E^7$ z?OFY^wWWTdxjRCxvqr37+Z~CG2Eg_EqS1zLN&Y(HwdcK-Jqx4{b=wwOw_nyVy#5jOb*Lv4tu+!bzwL$ z-9txqPFilg-{*?cwjBKrv`6NIb!|5uYai`sa2@AM+T7`UTnzIGuTC6gdRM6rDIRWV zjpW|}+4IQsm*!~}qp%buA6rl-vK*o}r&_n`+vs>qi{7iGEVdG5?@k-4yT~v)&D$pM zS5E(!)H2kyz+Y;S7#f%JV#2uu1s{QWBDnlmjg~x#EbtlTKN*NYpMzy+oZb_A?A2p! z|6)MvS5@MP%9mh_&Ob7vS9++2rZvR)iCxn1pP>DDmv;B+iYhzS!xFBBveoWag$zdL zBi}G9iH9&MJwz*}-C2ASs|Ls9r!ht6`zkUkrFQ8XslDPRh#n@&O7^gT+XoYE4XH}! zyiy|gGa@Dc6+53KxEjDrIP6 zX!SQ;EUA5`7fp=E3g#12I1E)Zxe=~^Va$OFuD$*1`Xi_Rfl1n(3u;LBkm`!PqUpxF zkGt8OV~~Kz?ZzZ$Iv>?Lzuf#%q;lcl!eKVEs!2t_vAtUC+&_Dc z9z&V@76%H+1lQISFT!=ohN3QBb88*^XcrNsEsQ-4-gJ1QoC>A3Zcdg|{}QdbfIyp) zGw@h92+6?xORpqy$1HRkF5)d*0ey(M|M9q{_)b~b_9kkDE|LT#Vg5YX|F zcSSSr3zA5-+Z!7-UTDrLhlKPTo9X3tYy7W-75b07K#O8GLIvgb7+-B@2j#cbEaQuB z7%Ct;%CXb00_?kwypW_&&kP3Ab8*xJLnsOot~Ux2(mR$3f4%EQd`=AYh|Fho!aCI~ zTK;|xtiHQ)wz8}a19VY9rGGgD)X{?-ijeaEipXi0>B!QULcQ)gb+i1dFySk(kn7Ro zPH`PF`wf+8>K}3Mw>eHRD_oG?7T&AWj*f+T@)mWUYPz3BS?4=$ssWO0E_PYg*R~vU z2K!t}A`aR~O~yG4#Tg(Wru<|EpYR^xw^f30JVJZb_b^Jn=lk_TrUq0MSlnwBxTF2T z(ROmey%7WToMa01IyD0xW{x?32Q~U$J!O%9WwydSI&jQ2OluaiBeuq_URcL*jyfg^ z*@QhYiUSy8wU8n9rnhubNn5ZdRwzXkN^$Z}k%CefrDcCBt;o>p^Qx^ z&}=>}WAE``+?6#EKKt^7|I001K^Kay^Bclo_+-!w-b;!HOG4tV{rvI$f>BOMwNTM3P|=N1(fd(RV^CvQ27xrCKi_YGW7Brs zUBE=I_}#~=-(rNjK=>1ar_HkfwUt7x$)|}6ghAp15OKkw^EQ~Xlz4dqmCM44*8}Xv zXZ|fK?7{)U(d@hca2uYkGfw~+u1>!gF%dIxlk1`j1ueHkeQliqD?O81_c7Q<>Tv$* z?K#vQ71+tuw}WI|>bom8>|1;!d0seab_DBg3DA`P$o7<1#JuDgi8!>W@f3rtM!JB) zYui^YWw@)ZQ?LY>fx*$wjOt;XVv(V~bxZ8N-PULxfg0r#X>z$5g!)R)V+EblbxwqP zFnkSlb(0m0xUZD+Cj)2Qzd%rL4Uc;IplM!Dq{lPGX{EJL<~K z4Arnl#cV!wcb@_r`_E+G=5jpZQbNv*0u$#j6gAb5ip45RK-W#8$9P6~(`)wwivFfe zG6%U){qeB_gKonXo7}*)bU4mVa}Fra+o7x0SNX>GH`7svzgxB-sdY9k5m z;@|nYSn`ZAn*Rr4A>{GLD*k(5-<;eliD2(6Gx(m9l|{LI-RH)99D9RWti9%1w_ zx-9+^O#c-uE{hjg2to&W8X+50qN+)yH=KM62G`lE%Y6JK%m5|4A{-QHgbY4%Q+K^` zSv<%=8Na6y;#Nk6+~h?h9}>GPZho6|sFcr&Nah8*EDmKqrG|U$)g{WI2vE~SZ0}S< z2u{9=*fuC5u_csy-#_<)Y+|}BI&T+jY}dV|5hAdd#yANSvGqmG!aFm9dM`?PS-E$Z zZLjPx5+x;2lJ;Zou=Y0*Tjo$y963}RBr1-vDiYfl^+0X_m>R15e!KE&npy7v=r7Ax z8%4qx*4E;;0cF*|7QoD98<>|_>28P|CFs6wy={_?BSXhdxV&p!-v^R@o;;EfF;Iz@ zzhYK}oNW>QY)YH0OJQky{if^FpL>i8iN{LEBquM9)YT#F-HbI8F_jZVOaOL>9vA9l zrd6B?aO3jywdzN}>c#C9MTNh!YyyGJvi0i(O(hc3+7pMqK9@3~|E^bw_}rq&WDh2S zxffen6{2SvKu?yxh|B0JMytZ%(>q;ANL);<>#+Szo^d)_{*|E^B_bmBDi38aa=XDYT!OVxyne17IEn0Dk8i* z<}s*k79UW#)n@WNMQ=uHGh=SFJJ`z8}Q1}#rVyZfZEsD zDzrbc15!p3cG(<~mcLn@_=K*dXCRxY(EAOlnRdNs01#s@FmACMl>bFNQAz zxR)r}4BjUI-J}LPJ_UJ%z!!}kZrdPxqd-bC-5C+69uD`0LO<|f=!3yRlxPr8boLPD zV*BBzaGLGY6}GYsN@EAZwkNf zNuA;bc415pVO~GKZ`nA?TedkG>n=$+-9-jYMusXXL@>&${+}B^oxD?YYA?tn7Zn69 zfSN`o$`@J~zwhdo(vPF&h|i6v{)c}-NZ&ug^vOO&|A`tn^5>>q9yJ|L*$r$b6VzMU zKurhd-&@*4HGEMGV-X8PM$FLC!Ixch7@U8fy5Iq?CG-L4`*y{#0F`}f%4A)kk@i4m zk6`(qBp~4fX}Vgu-z0$Af`^UqN~eREqFf{%rzpU}uL}W2jqZT6&@>@w^n=1za(9g8 zSlK)?7R0GfdOKMc)*?*`}14%shW(!xV_W-qy&!FfaF|D-hakM#NvrRh!W ze<)3l@(kR1Qr-)Qg(?(krkGo{HBA}~>QYs#@Z(^-P5jQO;GO8hQ^FFepkYTbtQPb> zwg6XW2pJsxUj0+3Um?$4O&4qa%diGc-uYj+-=hm~&v}(e-!8>bJg5bItYxbBTF65+ zCoPE5F?#jk7Iv5$6VAO+pAC7gZrDqp7WAi&wj$_1Ce!~lp)Ks#wPpBTfSbLf5cU4Q z2BH59icXp@z6SkM=*w`oN>8hh3dUr=Jdi1Dp^C`1wfM}dZiptUp3s7bC@iD4zg;5u zr8ZOZvUKlZg;hs8$!LDl`5r@O>b@1Oa19XPMXtI4n&HUOO{pVFnNb}uX05YjyH8n$ zpP~2_T%^0enN?azxyYvT?NGC_t_D@2QRA4qv9U&C=0C-azvU=<$d~}9v`|? zOFRtrxW3bb^M3HTBG?@Fxw^Rjo?j)F@t5nP+qVXVi_h`yB)gwXJbO^Pk-V_cvizL+ zA9vd4h0{RLsmT?Lubun9bMgHnl#OC75{=Sj_g!XRibC<1bA2fWUhUJC@rh-)r8X?9 zv!@++C17K(S?}`kt)cJSy z7F+uya6Rt|^jQyFZR!yHSxeiZ7PAVih?t?~w-+skyh8cZC}+-e@94Ld`%!tD8XD4v zY1-)dlG&G3PE-MyS`1{zjN|RjL>#@2#YU9`p9PMl5O~)Lk0tuG+sa*3@?&_lcQ_*z z^qxkLqsPCGX(;(l7GN6r<3@(2+e#nSw1BCxI)!RHJ0DDrn$I^c$4(E7mH6~sM+jZT zqs(>U7@|^@yd@xRIraPF$H;YlS+fQya8d6MZRxm?q%k|5Xc-wkg$ebbU$7c;wFwE9xW{=M*u%+H9~^@5dHK1V zb@9s!@lI>_`YNotIgq=cbmgT zD36f}o5Q`|H;404{p@bQ>{$De(d$|OBN`rf`sB@SR@cII`MUyHFc=3XfPPOv0W9@` zky~w`>at1Gs=zb0LSFoiyC#?OGPG7F^65aXe7=iCQRc)|Llx~UY)7=w`i0yiGrMWN zsF`VQ_>uR{^@HzD%CUFfFm`C)a58S+aQfL7o5jpT5%AWgk_QvjYfi_r@;Vs-2mQ^> zX8Z4Z?_(%7H(OphIu`rkIi8u7`3+X$t3^yb-lhc<p&a}?{u={jWxof5%a-}O z^zluYZJf}h;-J7GVE@7+NX`jBaBjU__^>!^`pz*t=SlT;8(FBCnp3rgs9Ub<`9Ps& z_O>_8Op)c13@-V`Qk^$tmwz5Gmbx1IJsNoB0LrY+p!0xU8L; zKL3GgvU7S7|D|s6U6q`s_t6n)Qsbp+lEICnxdW4&MZHt-*q$-Ril^3(F;i9g6<0^X zH$aVMW-}b^!S(D77pQ*jOl{T6&7A$BziH_DvV^(oO0=tY`A&M*?k4IO_N$O;X7wv^?Xi%{>QeQ< zV9|%d04{_yrP!saOW%mGi8(^bWws&>m#$KaXN01Y(5_jMBATMJ*GrLz7(9Q5qiH}! z>{EF9O;+^DJ5pdZW?G2ot${>$q}G`Jd*q&wiFI6UQpIV`kNM-0mqWK|lxE74Rg*#T z4Lh^~*)ve4jUzWHjT1D9{SI40{}~M5j**`oUCG?%r5wr3iXsz!qiHFuO%1t^As*(j zJSL#dS0j%~Nkd)LoZ0r2$kzG@%Vs$bWAWLO`&p)T=VCgnX>gk^i_<%m&`yG#;I(Uy z+N#1wA0=b`{vb1QK`w%@(PFH7(}E9iz&4IfeQg~5j<2Hc38|Jb3Q=V2M5>k3=+9A6 z5z0{^9LaH3h?0Df9OLgL$vl3_k|na!e@BGMl6fdflHKw~a{TxhEz>g0_#N>^hOGYM z8?t&O88Tr_l%$$tbPzI*_QEudet2daP4}b^9DtIqFpQ(i#VYzbszqb9qz^0}B~Dz+nIc2087;%58YQDo5+xIgTOp?fMj zRn=Yf?W+F3PVaP0bpo4vFK9MK8#5(&_vtUW9^H2iq&8WCM;yz9;g=+7hUzgIQr0n= zMo;Q~_@+;eMraMeY`6x52=A$496|rPr)XnpzMLfWBex>)BuWQ?K*PTy;3yZ)hCo6) zMx#DX!>+wfsz=`b|r|cSJD>Ec172N`c;zPu`-lR9h@)@%|vPQ0-j)Y z%cKozf4YkZHg~}$Sp$g zj%AKwma`+yVveGp1`uu{Y5??wI|!2!Y~R^e!=%Yj5Ea&TrAc{tF-7UK6RfjK>U2vj zn1eM@nY5r>Dg4vmF1D(C0i&07$=OTx(q1#6diyC)ed!Qe=e}(xhj8*F(2;@^E=7Kw;v|H;4$Yc2Vs9AmJjAmEwo~SA61o@{8=aPLVkYh z(^3g(>fnfXSx%O^c%zRZ52X{A>t>;qTZp8RBTEgVYs?F!`(+@LtedbNMyZzkDF9v0 zTohYw%ln1`8m+%&@CmnXJCV+KGl}kOFOhEV;uEgUod1O|UDUh~U6ctcD2{!?71v0l z>!qfZGYsEJe4tGpR`I2adc%`E9PvJRcn_peAay279{vDQYmmlw%i!`SKPV=C#6qPU zplH?6s{~#tD14f2(8#DAy#+$2jVWV+qgE)y?h3$27wn!es5^wYC@9~*PJemCm1+~o zZaarDEP8w#;Ri+*a+S7<(fUUG@?88Ii^wBRP5&*;2~_Xs5Z-&wOL}3|P@Dp5>hnBb zZWTX$mwkINdI;HiLcZy%*9K0RZpA&=mRj=7{-l$a?XJEr9cn(xcEz&QxYO+0_+0L( zvkW0sKAPc@U2e`VIGSN0U3NL65X_rQT6W=df?Gn# zfWv1Ldqn$UbD#jG1R$17zH#fMtiHE~T-nt>A9_RXqGQ_5CeT&xGA6w}J193y>nT(} zFYV=9LDqa$OX62YU;_K0hxg1~rQ+%yiU9d{_0IL`L*B~JU!yyJSCy)W z^v96`GmSj^GFci&=>+#G#+CRL($eA>?we)x5^_7rSPTT4w3eKHHpc8X{ba9s8vd+( z>!5Ao#}v8CA~8yfnkRJ zR&c20Yx&Rsx+&1iqh)@824vz%;2oSkp>c&#)7FZ<*Dkj^{l{&o z8Gd(Rg%`#j1mHb@#nX7%*x0Gf6w$9C;@{vfYJi*j#fQGI3*c@RAMjQ$WgL-1u=Ai% z_^REjDO5X!_S@EqxG>EUkEc}pRcqF5M;7o*sv%QiZB1X2CJF6Jd=U?QCGZS!9eE>1 zn<8U--A7&k@qsQc;EB)&M+4BXo1mck0*Io$wfq-uW@&%eoj)valre6Kh#);HJv}j5 zt!Z{c$Hep{azN)8IOT$FOr`@Wea-z2f70XTuADd?pCt09+|B#Wawc+*ypu$0pVJ@P z%*6e_{29-^H5|&nQeNo&Lfe#^x^`!{*NjQmiJYD^coWC+YKl&ywlYbN&zOiM#oxNxKx*iMz%r ziMw^9iuULM8K3cUWUwY*0mmtcnW^S2v*$jcj~kp^7cl6?Ql{8QCbzlfppQlL7vRnp zhM26bo*W+januJE!Uwnn!py>xONdiiA2;G>#rbIM6px6CEiNG9J)FD1ee}Hg_%$vKk2-x{bqPt; z3b2M%iZ%9C-D0yC^n^(m2}!sMbGj&zjpfu@D@0=J_&FVBz8V}^c@TP)uIMiCZKM*d zj%)r1mXpBArTm27;kEu4zz9}pCM}>o0fQTAyj*Exldus%)T>zr%g+HdfWB@nWdvHSlYC3JpEGBIHk4*5mY;+bicECnop?UQV8) zWtG3FS{-Ya9(4+8dR=}hU8zB8eH#As;}T4(toq*7YV_*~8L!qPcxcMzSc>ALMzXH4 z86$7DCdRnPO8!A(3#gBv0w4wwFaR?FDjqEbtH;2(ik zIL_AZa8_Rda05#*O661Y&{=)LJ`jX_3}Ag&dD@uU0lxEd1D1|QkiBjpxyId>sll$Q z`PlO`_ig;vZr6L`PqdslkmW;fQX~@m zVelcy$9+%)zL$Nf$Aaj`Yw`JqU6b5>M}LWJ^|0=4PTsS&>i3x*eU-w(^QyQvxg}Uk z?9)s!_H4>mug+5fweUX2Sl2U* z)pGVbnQuJ+M4T90^>+7-* zn~aw~IjJchi=_ijK03lf-RuRoQ`$2uSyzJIgxCA-C}SJOnZpGp83RVk!r_3kHbhzP zcNQSVocV0#E(J5!G&&QPn!L`)Ufbi4?Jk7st|`FAN97(nenWOx@i|#^VBQk2-K!w_ z`y2hoqY(ayR-@+*6T!3X#a)N?jlG&ZNC8jGGp#2BuwiD=e%vE-4IbDs^e;a{?y5j* z^#amymk4zJG3vP0aCp4S(w@kZ(%S7ZsABSU2ujI{sM_M&E-`e5qL+3Ty5m2X_}ZKy z?N_Z1>zEU|I!}po=e!H4u8G}Wo1MuJbw|0<`ffe0wPeL|vL)29QUfo48+E#yd|WP0 z*R?oKFkYMCIpf?sF4}Rr^X1Qz?^1H$TaooUg_M*RTH|)|pZh76VF~l65`B;3AMiQX zvFmJRuzI@zuP%dY_f<+=kLOYk;tq0wv&pmeT8MMGaCNf?C;AMQTwWGASQyMb!??J4 zXe%ta>wDY`_PIWzFf_qC?d$KJ-LENsxT?2b2uNz-_H_cs)ww_7T-CsC`ncc9ykGVZ_upplIvi5b` z_wgJ^et++CL}E2IQ*`z7L2shLikgSAIa_dsXb{UqG=(y0IVtxIpZ~nDc+Yf{!p+#^>MH*6NG7VE#t@tQtmbvZJ$ryyYjiJ7e}bW|qLXb{bqgn3 zznU2mTw#Cb9Aia_9R%!bkWO%I{$C@%n zzpx=%I*#!~febO3fF<#ug;Y8WWDci5#uS^hiEpQo8hm_3+LT&;50|%`xQmQT+T=6d z&bJt~j+iNuL<4pmopMuIX7DtDv>?(?3|(u&Ofwb`NjgNIhv!LE1F6)j&O| z2r{w%K*knihFu@M?Ua&1a8gjZNdy^h5V<8CWLQB)GYw?${ZxvwM+~))E$SGpVmtCx zx%R5uiNNyVqnXF(*cNp;KCvAfn_PQO*dxa1B^Fbj?-RI}9(hI#QLl(Rrjxn%vMWue z_?1U8kKsUwQ}*YGwANaxFP0WltDh>gY6qA0ON9oC_!qA-!isSl||ojcfJ(jLdX zqypL>eFj^u!CqM+5B`1gDNn#Dwqs`UN9J+%DmcOz$n-kKcHGT^%n`_-{HJ3rFk<*j z4|?+%WWqock~|RZ-8hj)AfZLArTkE)gTRRX_`(3hifmJ z9UL?cY$yU_uwf?hz)IrY>&ylrUV{+4zOfx%dt7_(Nd9x;N2UYce@;YmF9{Gi&}*=b z80t}2)S(K;c6?Xo+VfKz$#lR6C+!);{stMui{;k2QOS%3@reI32dC66wl%pDoVywLY6wqTw3cQ6FkgDpw`dQUi>(^#J=C&@~_b0^D#q4>k!>u<85x=j+ za7KKKp1yFFduCI^1pS8B;{*P6>ZYIU&gSF>>#4I-;wt5Gb@ylGWD!TrhxpaG9c7QZ zi@c2C3F^w~ho6<(?HoO2*JhW29d%B!m;a2g{$5x4lUE&bWo^VSbANoXHlwz8&HVPI zf>P5qg88*88wn2MOer(2zq9iGND&QjMIp`ju4+4Tz z^Qx1+f4{ENWVh_^XV zXSnXmaKAl^VSre$~jxp@HNyzdyTYu9t-+J?a02ME;O9KIf-7gXDEd zuiKJRcsKvKkP_@8etTciv7WrtTkawlz|P#E*2msPnm_@Z#(eR15j@P#TwUOO{;m>J zVOY==)11@QI^`5(rtTHPO3^`IaGe(*@~xFxaElHiGEwa1yimr`*0m4ITD3TAI60Va zO}1!~YM!*5AxwYxLx!Jsy(V_#>d8E!R;ergVu@<@IV@z2JO^iB%SGoy!^1MQUm!}| z_@BId_7IB+xSj*;AM5v0;|)NqprOe=2b%i_`-O7covAAZX}_a zw@Q9o=ksTQX(;aIH)0b6W4>(#xLE4|^sm2SIpKM-FX>HXczcn4+KyxdlT1J0nigjn zBRHGFZ`zh+1e3KQ9rPnC?=IbE;QXg92C8q$g6eQ~pgI+(PE`V8e&L%}fJ@Rxhz4me zNaHgvr@s!xo-cozdbmp%igl^~6w8P-YW99W5SO>tbDL}1AZ#5m*nH&PkIp)vXU24- zjpXGVu~1z>0-v{cW6mE=XX@eXeVBo3KoA;lZy9gqcJ~!IM&4n?24QR#Na&aEQ2P4{_fs5#dgurTl}?}1esWXV4LH7 zeWYE%oMXhO$;IL?JqzJBM@%#K*&GkZq%?7#Z6!SiScWogjI_s4gH2G-Y+>d{y8@vf z&Rfb6qc2t#e@nG_MvNqzTiYj_lhJ=$-}Q+%JA9x-odW`Szj&&UrzM{eT^1Bdi03@0 z%!XgmYVc}orNl8`X2aTMuM^}x$&u%zCMT+5JVGE+ArA?CV1+8f>`O|*Nm66*sZTqw zeWcw2G~4;%yZz|eM_Q%$$;bUve&eygf@#4;O$iwjWpx>?%>NP>Y$LAJyM+d z>$av-a;!6Om49L;TGAK<#X&h9g{Z0FdlRbRGgejjHKKB~Q&;JnaKoK_J;L{Y6>RX$ zZ_!Vs`2X|4ZqZXd$wlB0SXmGZfYok3DJV@eOrA6K!g!I0O@aELz||4DIHI&vTgApz z!ooxGC#XMNiKS&c5pQE{UOpsoKs9zh;Smy62FSx6M@u9tl_cHW?d2XJ5)z5$VYlKz z;O`rV>@VP&eQ7;m9%!5W$nA~YTGA=*QX4tJc4eAt4XsKi1z0tLa=TOqboD;()uH1{ z)kRy0FcjW?WwW1sC|B>(|Mgf&?(4f3E1RTdYc0cEsZ(Uxa{Lwrb-nRPw-@)zmA4Xv zQ3ww9mH8t5J`|o0xtD+xKNMn)K?ah7IXt0Dt!xBFSTwV%qb6EVHS(MMU8>OE!MI^) z`iSzcXrR+5ce98-8JBgb$PbXu_SWit+UoA^l5&@T(oR^qJ98^p>A0;SwUgeDWCbP_S-d03KHM)aR{s3*e-PM0>aS zRQozyQFL;Km-!|Mi1N=#38{>2H#3Gm`iQdmC->oazd?4O^heVsl3>#8(W+S-X6aAyn^-WLpRj+0B-3#p!OD0{*-5~ zd>mf|2n5XJnh8|dh!XPJq;(t#dW+-F?v_+<80#|Sn=qmidMV@e<)kq%O`=3?F#f=R z0|<)|B7j$cAG-Mp!~BH5AzoXkBHl69~2WOTN+BmqVzV$zj?$>gI#j*~h|l^E9q8oeb{BYM?yE zyrK9DHhLEJ;rvB-ZWJiI-=z4gm5&*Ov&qo@?i>|GXFsqQvyB41^b;cjm=R{4_7JSS13seh=gL81_i*t1m;0#EQbieEq8EE4` zZcMbpX@uL(OZMOakMi^2xhr502A^F2Cieg6n0FZxT+6E+wfxTF3&@nkEWe|b>;Fw$ ze8rA}AU;SytTy{G@aK?pq-Rnz!@GQiUzcnlDPh$KsE;O&lI~Xx4JEPVS%^W7P#qI0 zp!yWh?S$4R?<$#%xEt*}xPS_yzZTm#uA+RC_SnUGBT&Oi z!m$EVe8WBGt`;65H23cHDe+~X#P&x^;5E+||!2~@opEl&FZ3Ua? zc%ZYK8}p$yvrZ%HW@;ZM7c+Q(816Fq61p<6#f{5{2a|y(6>%p3NA@2iN_rRayK^SM zGxx_7s<`tD+Fi=up{{N{+5W$00gFm`b+xR%Xj`kBt@9d{5gP|;7>cf`qM4gvtyws= zJuM!7_vdw99UH4}ys?p%#tOXh>dja1y|EXxM{TDr^oG&nNYmkfW?gE2VdkFj2NSbn z;1>2l7a(?4oV9g&RIr222F>;<*sJ@6<^_#zKlKqKQ0xFOco<3?Ac6*%WEKFj+37CV zsfu6;0PSvnDg9M;eQ%%iu5gn6n8#yl2LI!7p}o5-_>(-T)2uFol}2BO9%5@X(cgC9 z?a2D9VRv*9pOxtN)68F|M)NWDacRhh&yPFEmAE;y+WG+ZF_T977)4R+f;ynGQq2UYhyXH5w&FJU`Lo&Xv=yx0lR$uJV zsEsH;{VGR|;r_#0l3(D7>yM?J+>ZRN(L0hty`aY3+~oYXx?>+$>#nU@{sjwl7?z>m z=!V6?!C1#0sA=B>1v$4$#0U|;E}<1X`TKs(S$!&!s}*!w_~1%ZNB7K!2-!m!D`Gc~xsoI~XMN>&I{ zwq4=XKW^^u(c%8vl!H9!h@(6VPY>1ekLqiP&R_k?BhYVWu>5d1z!(ikwILrokfLepKDy*9~yArttFp2)X^3RLE@o z7~OFC{RrKv3REa;{jXY)?E4Y^f<35g{n)y}6b*3VrtqN$u~MdJY!YvDgJ~P!+kM9@m@Oy&!^HYXNIIw1uK|vS@rJq$^dIFI*LZ;)$v**aT`WGa&WCRdZpGs}S_0 zIhv-pp)h@OYwZA9LmChwBxRMjOx~bar{bS6{Ls_hbhJ zVAnWtg7J+@;4a7h`M%9BplBRDi&#z{fcT38`IQ!LTL!G7yVH&C^mo?ErQqJF^O2x%T3U0{^B+=b3gOiq7K7O@nnvDVEC^jND+P`* z>z;=1CGF|hckQurakq&1v#J+oh}64;XMOqo_{)_`ISkj_hun|8gU1}b^u%C_x39wpgasX zTjvq<_E%Vf4*Gh;u!h#{&KT`!Guq_na~haAH#7fO@VX+kep$^O7yMOnuW--%DBE3! zb6_xUc@edQ`t&l3w=<_id153*2({o1>@ryLJjLleGuLtJ4Oh4~WeD~w_rbox*R`I8^ndUBSeD{J-(<2D8 zI^`0T3SnDcM9}8ixhE%NvySq!S-tbcg2NNFi1<&exQyGO^y#0M+#S~2v1w)hY7??# z>6CeY_-9U|zlkafufCp&5>qQ1E+*ci%SbOPxqX=~hTL$)=_S^wq#GQJSl`Zn679E|enfn~dcN`i^b|#o(QIzB_uz~`LOxs<*{3U2oFQ;#qw(IBpCFoNxf!{^3F7`6SeQE26@FPKF_?gZ;s|g{A5e0g+a+blME^VfRT z{Wu-WIb0c-O--Y_5q-(vG1s)u=5EBhgK&HX1WkIh0OI*kB7T_5`M&s1AqL(dU6yA6 z8R_8({{T3v{%@TOt@i}ZQ7VA!EHwc6;LQ23eAa-p#)1I(Fwgq;Vp@)>tVxhzA((h! z>UDN%1twmso_qr)cdo2mJm~KXG)iw}Z=H{nHLR>6|GbKhgjMW#3Y$&u>-xhK8H6 zmHL1vIgBzTG0Bn4g037F+=>lB1|L}j{{}YH9w7BlXkQ)rBShowEFH~geT9InxuRpn zu1v(hS<=er(QUc}LxwWN!$q*t+?GBiMoh=umv>4tN|ZrMzfWPmEW31KLMG+;PjQXx zjG;zaE$N@CV}XO^A0B_~o9T>hqYfS8O8AVXLtb=eD|C%&y*@ae?uERomkBI=m)8B+ zbCS5EX&e%x*T>_@-@i@vX7f@MeKRR{?J%pfUpFO`E<(1#=pWj9ce3A2s+J{0dXBJ_ zE?1b}3F%J#Q|Fiom=SZm1g;>L(8#ZJY9zu!!l3x9KTYANAx;d`kXvqQ$PWxYBGoaI zMg%Mrg8!+&fBBzEAvHvFf(DZ3APMVCl7@ADh=h7_#6Y#?s3A%=NquF9iG6n#Nqr+n ziG3)IVP3YwO-*1;C9VaznIZ=#rMEXXII;Y8c_ zeLmnlt0>21osJ9O_;JDZ>u({PMlcrxKs0h(2QX5Hf@i{$0G!Xz4Pqy`jG#WRV>OM5 zo}0Pt^3-yTr{<8koAr2z=~?cJNA=?^*Q=X;zZ!9>j#B?BV?p?mfzp zYoOxq_ycuq<;^9pG)|!=bTE@oFW!ZH)US{GgtxzzB+1e8!CdzDH~6XPS*dF^Z%n9X z#i_pDL5}UFc;b~?&j|}fA(Kz%EwO4tog||r)l;{=+oqwl zz~AZRr~01oTEOt5PWibfoa-K|oQLjC=Zh6l|eht5e?6LzPni?40$FThZg|J)&IW_yn0U1U@oYhcqHfs89W(LFpIv8ci zkN;qgpa6N};`l$wW2@~S0y&)+PK-Esw=RPC0dJefrjA)2WQUW8@1spG>{P5*XeAS4 z1hdQ_>S3H4^t{Iz_RokIS!&|yB97hcUVrb>SL|kx8?SLcEIt4l+kltTo7O?C?f?EZ zh+#ndS16=C7z*kB3hk=~X(31}gQ4OvA7JM!R6v47B4F|+5oohX1P?|9lSDj_KYzR9 z5z~=i-T^Sepa88O&esSiz>fbY4mNVe0S?d&V<?MN&|CDLuGS#hSH01P~{+m3nVy6yTpfjlkCP#)YELqbOxCZ;c1GM!u{EH$ZcwL!<9eoP4y>e{0+O1R|pGK>0T zjLD>oHU}2f5)B^Jk}%;|R@EX^T?&gLu^*(I-!TYAu(TvCPl6*ZrP5v#OKGAfxrNKy z(j>@NkVeYez6L3AqGvJ zc(0Bsx>KSW`l*&Gx*QJ+b%ZQ^Dnl{u%1|+GbebA^?UovP+_oyZ(!Lt{zd8{v>;EHO z|3|+3A4yk3_noK+XSl2ghYD#*O8IC?@(XK9<{gU0dT2^w`D#kGA7jVf5E6J$(A60Km~@J7Ak-6EmpoFI#k{kT&$T$(YMIR(0`Wv zK+mTD4x$Ir|Kwa*dcG8}FAAjDQuH_23Gz%J<;$0)U-j zaz_Nn-iJEjkxvSoy`Je^W=V9~Ivo=5yf+Wb9il5ZhF5P^P%7Cgx0hqT>-xklxRI(~ z`=nV3|-b(!G+_WA>O#P^r(^WAZ zg;(V19+Z>$+oiYTaO+rHXs#f*_M#bXsF;sJx^0!7;>b|uH<>gIuR@=&`Xz9Z%A z)X%ox_?`l4_^eN>IvyiTU#)hCX69givq>C8Gq$d|eha|D_Ma?VzRr_q3v>6(!S zJtxspwp&y$$u!bABwtFA%N4yt{A+u!xke}TyDibVYSQ%-1{FhkC0~c=VIeIho!;Su z?)P7X;r#~Wjo+JSnu>Df!jJ18zl@VEe@S_pAI+SnWg_y){)(ihWsi}l(N-$VfJ$-=)#F{N(i}GP?9hPD?pfQ#Q$I_fg zkKY6#8dKQ=!yi#D3@6g9EkL`>rc%qlO=R!0zR_G-H*Ed_U9T@o_Yts?%g=Oy!zp(> ze78s1KhJFNE&{ajnMnV-r1AaZ`+_F=t(R2xH%Nt!hgwsDaJZ`9KmH7Eq7Ov_0W{$- z$0YP{KcL#9O9msS7CJ^zAzLXRv4f$0{qP6(1J*=3n)`nSVW0sOh=K=B4HY;wNJ{^y zNP?#L@6d5AiI3$}-xx!5{?(I7S z#(?xBbf0f7j2qZHDrc7;UqrJHh}bTT0ZNMrU5k~>8?}*|RNOsC_!IsK@V4X>(9DL` zf7?}JWSDG?0Ho9Wpelfc%FFiRHxJ|WR@qM?PknpNtQR!q=Cw2l`7zgvJB$Qs`VHug zQySY6Ru9@zof#?x@>ov=7(ZE%5wPr7=V= zK&x5>zQm}NaGPvD{Mo8CKvgdqHFag6)+?7*@@0Ig6`uj!^vff{z+6mb@B*lLBacES~9q0xK%CP3Zza5IpXX1;QbxV zzXK|?BE#?%^vZAqWbkZ4h(`GeWT70h^vduAWQc4+U_SHfhD|!t0k!_n=3wN=qDil) zd0RGZi$;I)#j_1^&(bTMGL zgwLWgG25|iAE*GA-sfR20OC!^2XF-|4V%?pzwn!MU(=*3x3Wm@f;fx)>ujB;g#I?k zxH1X2XXrbv9*5pZ9jB5TtZRAIy_7dOsC`f7^{8p`A5^uPtuUTgZ+~rNQc~X}-k#)M zsE4&%sDr%w^RDfYbF^;)e~k1d)kmd(-<`eQMasS_f7>3=;vYSh$M2ZwXh`goT~9xD z60BUOoYp$#3^a1$5-|#-7BOC=ztKU()rFSK=hC%*Ox3}Dz7u?*YF z5Rkh&Z_j^Sm=zA4q8_&4qZZvp`WSO7C|1mGvfj8eMEHZceEYfIl`nnNP?Pm^<#tjS2no9Dp zd_}BZqTktS5;{0u4cAO%4BtYaew%@oG)7IgMMeF3BYH@ro_5%*oCp{cM>pe&qBUCj z8g*+z_!g2)AK&?wlYyLsHePH)iL6;bMp29_G%@76`NWZ{Btlix;Acvn$2RJ>5PF`& zX3Ie!V%!qK__ik|vouJF;i2FM`jw5Pxj0*|x$#Gan_Kg{t?8|5(!H-Kj2y4ayZMzr zv+Nu8O2UfG=?QfRmnNKj+Wr5Evm}dqez%~swUL}VtoqgoMD+JAA?g15BZ;SJNEj;r>kow?9=!%1vAb2H4OV(t8SA4YORGfZ4 zo;`Y9Arhn4qY*q!E7N{&Nz4t6><&z!Q{e-Aoj{dt&ueQLcR~*teR;y>f6tOXT9u3!SyOa z5C2U>R{7V#S0XcrWkp;=@z=plOj8Zp?@h@tcu$0IbR!2H-|C0@GEJ?Usb;cU{hztS z|I8a+{|{O5#fY-4t`rbFL7(%toxHoEOYP20FUfr&QQY7OVF^1@wOVEesM%|+n)$-^jq5oU z99;LK+4rv>%)T3gRPUpifGkKQK&ouXXUZwoK&bmpwbKqC69AuVyr0X=S7QOwY;H^> z5>2Rzuj3Bp-o0mL{{a3LhQRDbVoPo;Ez%qDZVDh0A^Ye5J<8E~u~`*8WS?wBg@ZKO z_p5)AA7$KYWYoSfSBYMqTT)|O*7!AWa=4fC2hvtGnP1rwEP71WU<1kTZ_?RDW-rhb zxR(1YQWz>L+|?8kNnuH)aim!Gf{}`*>9@>JXpyzbF5M*16G*WoPzSyuNH^)pZ{Yiz z%AO+4X57T6!)_^>XUnhUTWmXJEfXyV_)ghtwssm_SJ{TmWYYomfl!A~d_A_N)beI{@zc+h%F89xXxu3Q0}lf{BhV$*A04`z0}6kwbHhm<9^0PFNa!fAZ;Hv1A0P$`=I zcope3^^f$|RFY^QF}tjRwR=m1joqm~m*S5;iu~A9bt?Gh3Z{h9kymfommZVrIjeL& zHf;CS3FXQ7kLi}B$J#V-mD%0c;QUs!ms4c#ESB~rV~W!AA(`)uY%L7i)Qpg|V(P-1 zQWeAeGw%N+ZlxQ`&CX2Uy7O2tphBry2%B9p*@j>z1J7duKpWjdS;Nl48>>Uu(2;)u zFSJR|J7Sl?<;O6jur9MZ9|CbrY~bnSfH3_jT%UZ~n_r~s5n*EVzVK(K!~qszprq)Z zal&JSTZpZ829*~DjSFmat0C}oC=T1dWjS*Ga-nuBlwYy9vA+T-D8U26x&}4C!I{D%^&`CgStX5t5>cjw&V`q5jGQ>8^TYNuMiwqin;Z8hD17Qi ztR^Cy6GwgBSZj{NRg?~gp$ms^kKPcB>B<)b6^SCjpWzm`mjjwSaL-bc{#;TZi)Mp2ZWZ_(9`VP zxqR_}ADhPL#jzF|^8X&6Nf9t_fJ<;i&9V59;KV^cnXM4=Fk5xA;1`*2yO*$IxFt$j zKQKyDXA{! z;7@WBU6O`#LZyu0h$6-LAWNb710>?PitO2#itL1_ifr#9dMQJV>1_MonjXv*dp56f z^0Tna*`b^)qQYjnN1te#)#Y;m{6PTgCoY()s_s53PQy#hPA_WqE$l!%j1#O9nh%i^ z1!QR}`*@@${Y+}DL0I*ZEpV74K`O92$I6{b>J1vni;q({nN_hFY< z-!(fs)!anHRqCKR-CMoaaQlr_@qRc8#IKW4Brh!kC}x^3 zoqM(8vIh%G<}eWIqkZc#kB5V2Uw-Bxg__nKj;&ff0CLwgeC2S z+9Uolb#~0Phjl^Ct0&}B{FZjg(&|~nX3~TUj-3OV0mI$sg;7E4x|0nxbp-wL$uCcK z(FBYSq|c$t1e$oeJ`;gPX(Q4v@c>{1?;0Pg~8(oVK{|d;%A(GdbHOw1| z5?}Dq%Akl&FQ$a3c-&fESk%G%pY}JA_;ggrLiUcLcunh!|8dEh$PSRbP+%8hb@|2$ z)C`V%AC}`<;mRAb9pa-X#wr$y6`;E!a>`_}X!(bQT#Q9)1vW4dIeks#UU^+)+ENq| z`|Hadh79RnutT%#e$$k1MI!fAF;<#ztU&v(4ODE|Vl0*FB6c^$Sb;yU>dz@ME*kesnMzEz)BHzDTdF4}oC39erUKENn9DZ!E?>N^k$u=EJk zN|3HiDQsTL^MO+ws`mX9@w4No-9Njt)y?e_+6TjXT#D)ImViW~s}H*qI^lz~gzpVm z|ByNFUR0boCxrdPpBVVelTOa<+(PV)uWzZDM?kjNlpiLQd!z>}_?_N_-o64j;H(vT z3JUUR82I*QmTE=Fa$Nr&FXh!6+)G6j*V-sH`N_b@iSFje3HvqrL)(68Sr1!KEE=G_ z|B5S#x^@Zo9v8hR^_u=9L>+cDW#WsC~Y_vKN+5z&pkk*t|7YLHZ6^E zQAD0}&)ca ztLFxqW}pQQuNQ=X>tVp3j^6UmsW-SI^(>|f{38W3Azkrc;=kP2(6#}GEilW*HV1kM z6gw2yL;DVM-bI`()wn@qqh3};)_0Y>L+Nc|y??EV5dW=CDfKW>I?kH%_N;-}_jpyg zhz|8(HXA2o>iQ$YYLna5VEw>=&nsws)GXlxW8KwQzqlK@EH*TZbuPHi)4?L{&Y=Ym ze)S;E`B2Uc>?Z)SZwPY#ADX@bs;%aE8z}DXUR;V7D_-2)TBOAt3N0R>P$(34cPkWk z3l7EIU5h)xlKbWT{m*v}d-vJdoymp^*}aq5XZYUam6*fJ zo2i0n>JVVT*E&UL{X*}0eKQ~MOGd>TY`akGc!r0szd5`b#dI1|Id<`>FD$ z{K<6fo3uM460)qq_<}Yul38zY4q0E$2_Y%-!YlisnYIMG}|~^9(J}AqVus7_=cBxa_k8!gVs2v8KQRk^V3sr zJU*Y8l#pd^oyn!w{t>RO^R4F=$f15{O!Z(bS6xZu3aKBFohDo_|@(&7{Y0rq`SbeH+IbAxkw(q=$$#EXyrJ_Mm}aS;ovJ zmz&~6ARcdH8n;rUfxV=L zUBPkBjPwTdkuZU1_SeH#q!{E_qbVhl=_gn+O z`1jN1`gMubu*XNDTL7~p`{C&KNLK0OPL_$;&~4+#*dKGS(Z8o&VSmfNOypJjS(#{b zp4||i9wp~nXja28?{6^;YDx`1hTY%#)-1l7^V3*jehWPDww&FBD=&8Vq@&B$$tn>>Wgh~+n%(R&gUEKFtc1b7Rb&8Q77aYN%H zFJLnggExDTiMep#MIBy#1%(XAOZV)k0}|xDxB2mwze4tFf5 z+&u(=^N_AU&d}z*F(98=3=lx0_;BN%GbBXU?)~AWHe{$P$Mv$I+IEF2?EdalK%u8A zhfifI+laYk!%p(SKX2yTKyWiKJoS2}^{s#SJZjhO+ms8z&bOP7>zj{pp;C`i<@!l) zwhtd_2!78r{>7DicN#h~?juS!?Ojjk?wm=e9~eyNbt+N(Jbb}L=sdbV&&j=EUilP8|6x_2tg<$H_)>e%|O-&>VnxIG{= zq#ou$YYQF6oOoI#&blMZV}3-=K<9^)&1upysKcP~ILR5Pq#gf=81Dzea*R!`VquAb4%J?Eow+QNnA zZiY8cllQlpS}cJ^Q0aon#IW`)D1)!h4125g1`I-k(jh;C?QTHj<3KW(YlUFtt>+=o z>n9=p^g>6rP;YTLixfA29?*2lY4)$aM*2_!(Nhyjd?+KvLcwPCY#ELH(y7!Xhi9Wc zlzREHDJ3MzN5UaZEeD`I95udxKOX}p>smibJtDKFd2h{1 z;IF`3$;+WSAil6Pe+!ly2l9_7><9S@PivoV;D?3a(Jqgm@Hv3aH_Z0kf6ifIDH28g z#;+jEgrBu4C9FPiO;!lBWhvNG9FNovypB)cqFq4wvm#4e#oV4w8u0z)1^&QXwzvSK z9=w>(UzZG9wl>{@%!rQwlH`LTyRXzRmZz0;t`MnKcy zDPr#GYsTkDFifCZIARRwdSwDRD;)=zdjMbZ#6+D0ozp}_UoJp^_#a?@IeRQc69(MX z?SoX;9A{yV;V4NetI+_`O99G>Rgi`iO_@=lG%Sxde!g4*#pnoq00a0(;?TmOmg@n#D` z#j98o_tf}hCKwWT?fCg(UMKE&@;&&Q^HanSu3qrQDc7_wS$etF?lu{GDm{XII%L-G zkBiUw^lvWZeaZiP%frau46yN@lhvG7`y&R2{)T@2p=Ogg{Qe##y?=v&Xi10PC*Xh1 zWi?suFb^xjw#B=;f-mwXW;zgGQQ}g=4zzSW2V~uif-k*Qp2^qdo1Defx0ld@;YshA z+;UYZ*PzAeCT{xk3$l5#L*}@%G3dtE$M=5mXvBu(L}~#bEhgtdWz@7rc2*;DI}6L2 zX;OKs?*A@RB#!kG{N&}#uM(P&8NdH`d6j6wFG3TgypX?%gYEeXMDhL?NT~RJgOZH@ zGTs=*&RE~U$XNf56EIvQr7=zng+A84!ZttiGbB-cjS!2*$B`d+lrGpUS>E`$EWs~_! zKig*$SziVAOE7}e_k3yC(0?36YKua{$~)WaX}n|<)Zxq9HLVdCRi z_yW6lQ%A+=hi6Ue#R4a0+XEwONND~@j&^5Kr#r()my)?P$nEvMbUo#WOf{xT z;SsXzE{6a8>#61FZX5jIF&i~W4PNW|oiFRruJ^g7{=tn!JQ+#Pei1#sGitbti5dxw z8^1KmQ(NjA7rL7cwt6PFYW(e#CG|XgR?yPn2}70rLaYNAoHs_j-)2&st)X$X;5YV1cZYDo39^u|-k^}KVRE&Q8oBY*Z2iZa zW%pWs$6-l{?|qL#WY>~(d<%_{o5TvdrhCcmMuBr-g`gNBW1TA*zDsAjNc+JqU;K($OIP4eVlu!a{dXxJSib#h{se`FXbpActTYQXDoH*DIGNQ~ zc*o~i0m7M`=;B%+lR?Xz7gqQO;GTcbB!S0`=bBVeeS(#$pC{;+qVY(y4?sKh+t*V6 z*za`tTXKI(Bo_0NUr>iY!UpRN?}5PplEZh;-@DJwD>AV&ItOhbr;Tzv6K02X9`|jI zJnf)|-G*hl$CoYGbWi{CNp+0pW(|wO{ov;)+f3VrEa=q`jtz+MeVc2>;!uFVVlQ;| z*1le9w!Ixh+ddzlI`4RHD#tRm@lTK&_>k8bwg3y@8jBh-Sr!TjYrrRE# zlJomPLVfBXm7?$bCY$9@FQP~B@70%CPw~!qsJC3p(5Onv!`lLTNm44ratxv*rUl+# z+qo-V8VSk%zMgEW^x`3Kri-tms2E8_^d8%mZSQP1R#AL+T94;BeIHrHo2%ezAR`(4 zq5S|{W$_~;;aGHPk7iQeXfW!4vEwH9>Wj#C!4qXYV>a#yW+kI+eBWgs>|0l_cT1 z_;|#yU|#>n;vC+Ox1Iwe|3B> zms=s;RH^^%4IRs=$t|AH;Ya;-2?pbJ0f&e-;i}TM>Ux1`kS{zLF3NFFOKj-(O+Eeq zW52kkeZ1O{?K@=$L~fB2;rZVvoUV>TTN?=(wEk}#$T&z-6#eGh!6&s9QI1DWuTatY z^opK|Q*Q5&o#fsLIfHjsco{=00eGe79;{e{_77p)_ z&_g7AkwLN3(nfrfPg;XcvQ@#s(Ty-Vk<#~1!h(pSTT2MgNR6I<5&tMw(K5SFPC{bw zngK2%DMP_7#Ejb_*AGG-)k36|RprGY*$SUSYRX4?m-k6#_z-TVaSmxAAL#|H6KzZm zC9M)J#UGu>kZ@O24+$=NxxJ;WiQ|w837;cWL&j=R;_gvs!;jkf98b*9GuBJYu*KSO zevwqpE~if6FjC`0Tws@IUCW}1HWx$+C40-kF2O#KcEtO*Ne?_fOAWFVQ_P;tW=M|$ zCbwWd(LS?PDG3AMnRFOS~*^R#h@9lNq= zn!f_N_Ju)vWO_&bwY}t9$pwDohSL9Kmt+22QK(tQD*mWqu9$71IS*#Ya_Q2NfJU)p z!@p63&H}o3ll9L@rW>rGrl!Ib&91Jb=Uira8=XuQKGm*-Y0%s{#PYOjv~<#WzJ5(!E1*ZMwZEk<5F zWQ2}uOa`MS9Qt?xij@SC5r39!UlRXk)Rg{-nk>IJO&UW6N5H0p2m#w1?pITN`KcAOCWzVEImKW53v zHuv)5eAE-Z^8DTQvsrgu{awxhULsDcK@>7dnF*6poF+Hndjza91kyM}uDn-9(&!Et zDnTek2v~M9+%niTL6mcZdk9$n5J~Zox#S7F5fi$j@-Uz*7zJ9AEyS5Vh;>0Io`_iJ zuSm6i1oj}?cO$K6sp6w?<>4Ah)BX;`c+$F4#gCX#4w2P^5;7GWr{)ppgOTIQo|wzt z&-2Kzz=w!w!-uHof-|1x=c-?oZyq}ScAt7FzW3#L;t~5I+q<+~o^JoMI3Bp;nF5%* zH7fwGq^He4DJLe<$|YUkz~=d8U504>9xmw5Ua^BF5|>|!=qSV# zXKm=isNYnD(IhhzsW}#nwE872$h;rvLSub=A?hM}t5@zKvCF!rSuvY?TO&%QfOan5 zMsZRwE<2J1%k6XwcKq!EprK?(h~bd*jCK?-wuyW9u@^Arsa^$&6XCO<4$x-S{6=u?HX-+``=D8gMXt);k8)O)oq4phJ9yO%@7A@-8v&QUHlIGno z{PMyd0Lo*|!ZqXhnu|M84B!fe&GwiK28bC=v}mG*^PkW5=xhXtwKSuYerHv6lc`w> zneDmZ5~18kebQ)+gxl?& z!8Q2&j@0}CnMUH1pxMK{W2~D1exNjIRALg8jCBY0PX`*=8hv|FVjNArx%K_SU5Qpz ztXjRTnCtRMiI-^DC5QYi%AsxC`ZE(8sC%OO=)q?y$rI`5PRF??om?4=@(GtvCO9=u z;Q`&{)Z4qx&SKU^eIR&zpz(?x$weyJPrl|&sXpoH9DOCFY{gK7Jv^Ipogq?h)?_Uu zSH*Z}%11O$PZ-Y&4FpUyMUdu;^r)dCQ}>3m{bSeOR|+3L1A|b*-&=BcA_2pl57790 z!M_ZW@`+?ShJrqiB#U+-AZ-E-|6f&qnF0-%`851vBdzk2*uOJUsHP*~>A{aa3!^g{ z;rGm&VzI~BG%D$q-R^7NNPWIdH#3EObA-|rvQ-1t9NRC7=EtB#kF$>54Ac{{4HFn7 zO4R`B%!gy@B>FPES^aW*(j0E4YAcnVu5GDKQt`+)QR*W-5~vV(4{lO9RJ{M#fbr$E z{y5e@6|}Wo8n#}=;^LVTVS7wGw7J3W$j$0{lP)3Bi`J!uznN%rUzhZ7G$i1&-lMxS zR4w8!48~pT)i#R-4l)1c)6aNvyDJLM&e_XB@fV{heVYPA>rz#)g@kJ}nJJV~N0d%G z1$k4AqecwrpKQI_`3dnfh^wQEuaSxVF@>RvE_W!qpfzgzUAjD3_jlauxpdfMX5uy{ z)O;Go6SiUvEE;`0|3#cFlY0`4#M}$xc?U3m9P&AONHskGaTF^;HOqCDfc?>zNiVd& zQsj$J^ZiLsqQ;)t(n22+mIv2~rqWwdur2JN8PcjU8S)%H{dMQ6d3mydXB$@CbrA*r zSF8GBo%!d}()`th??na>u)!vIr%HEoL{}OC-fUP8yoaftwkm=T0<}CFp+;_lFx?x8 zQ;?1(FYtILaTCDqpkM_0mO8)NpM2etw9)urbj0}7W-dq9&pST3?q3^plf1fG-I!Bu z0bV%8a&7oCv+dnaRcmI07j?ofdZ{MX_r8M&|6PMZ_yNJifZWYS2?WvXw8=cmTi|?g zMV_Ez#S2X8EiAvY9kjI{v~?7;br!UB6|{91wDlOY1r6#(261_eyks;P~mkM@UJ zf0C^_srF*JFmV9OK@>$#05U7~<|om01_#X*2vuYUv8cJXXA_uN-zL ze(lOF=IVFSw^&JMu2LVsxB;i-;9*^jb-}Es@iUnW__m34`pp&9kqYW6wkmSUzxpt1 zm>&1d%qqi)Vo3O-XtZk+(+{%AE*6&@SWxFcTIv}D$1fB|zoV2y@Z}@FCUS93-Y}8O zkg{dN13Z6-8;xYdHI?+;X3+IvB7ughhG^&hIGm39)Yl;0G2Pto9GqVF?XHkSL&`({ z9_9POw2;M?p=Tf36?bGmxJ-vtQo+_QBDa-n5J4wFu_buJi=;T zMLnt~P)lG5oDzV-KM&C7JI0&)>L$R=w*dhYD-fR41RlnpvIcE}+>&tv>)b zq*#N1e38zdK~Psn0Dx7!+XXx~d!If7Mf$=wtdko2bdnnz{#JpYo6>55gTmAI&6Hk4 z)guF}w;)wX)e}q6bh||VjjqD|8-~U4T#zjhh|eK>FA98O+I8Hiay_5in4qgJ`nNn6 zU=@YRIgT!d$EtLRdJkNmRRJblevQffO$QPCzbr%dpHGqgF7runAL@*!(Csm#(4loP zKC%~t?@8sClQL|!?jCaeek?P8>dxhO$nI{VMQwNAt#%Y7!tWaw7)Tg)Z`Wk8V#d zI}+MI(%TkMZZ0&XN}k)Nc9^c|t{2u2vBh!0vJ@(8{RggFJ70EVG9lfQfv z0{%oVBZ4R)7gA6?N4L2VvPCdT6v9@C&lW!yQk+u{BZ4^hJUbG?heHG8Ah#S-0)*ZG z7kLD9w2cP%%e-%9L5>%=!Z#{M=-fE6B^WzY zY-4BKQZOHqCuj*`ij}-m7`IKtGT%SI>kshSJC$+UOf2gC1H88QuT75MLXQ7HjxUma zFc_MO7@Ub1oO^Uiv~4H${N6ibLXx1Wq+T<$`ahA=f1=C@F@kea4>N{C5)U(TGCKT_ zYQ@VxJN`*upoTvf6mw6Csdo)-q((5|f!jB^0U3ahhkxIqZp;ZONfqER7brewSJhf4 zkQ{sA^Fx&H%KYVLbm0K22IpCBv3y!S{nTMVtg7VX%iovBd;cF*h$kfHO{kRv)3#@_ z0gdgC9s7Af(hs%4pJk*68b5VBTcqL7jjI!ESZUy)V*-R|7V(MusBD^^-dVszI}IwG*K#o_s|=S96Q=Ke9V zKVY3SfIzs)X__osh0!HCsxdUfETodu>W;?k-^IuAS%{~#V>oQ_7~9vC8~7~?wT*=W zn8|8WfaDyYPQ7{AxafZN4)`{(?gqQa&r-@%?Baj=r7rRep*3qJ)jQk8w(rbU2#yYs z@9BDX(v^cv-B>q^oxN!NO1XG)n*@)yHQUKZtXmLqm(^v_njE5%l6)XurFWf@sInG| zMBCcyPJD^E&1M0JVv@7fsXQ{}t8b%QFlZ8A_xrj3&BqsTWwm*F2l3nkz2^70zH43f zHQxh~%z+}}mG`aTyRmyYg%lT!syM)>0?_pm6@B`^qmHmq2g@CM4H#C0LCw3yfPom- z5g_J7BMt!bvOh(9nQ(ngG~oPqQdLr~VfFrR_@wUwz$tPnEYz)kmSZ)C11t6#j}7yG zAboVR=zKzy!J(90a-oMW=Z)pVg~o{`GL8D_k;6XBBzG#Pnr!_AP3l>b&dm71ek$oH z6QJmM- zteD=7r&8WnrdCOf;i&l?=$+5V_>8rU_iL1vWEg8N_yxYw%a#FWJkQ`1zN)*;f6LWC z9c^T8w|>*c#e1^a$9fdi0oK=Md>~0E318(d;PEhYz=PMpT6xs23Xy81`F8$YU}8Jz zW>9g22fzFw>E^+K%#KL3YkPFB#pRsi_--`j3@MAsL;EN&<_K&@f##N)Q91s~u)y&$ zLVQ}tyTABfhWL&_m8n&CCmSId(ZUkO7gpIix6&j>bZieFk&3r-M}5j!d#wo_h@1i< z=B19uBid!E+CTjKW!7ohV3T>HT(<@&my~lrjP~!*1JDn0Pn4$@>h*W7)Y?pAz!c$$ zPyMZ#IKpwtWA2CZTA1#eJrMk6e>h2zfX@)+(Y)BJ6Xzq-`0N`8(_d6p?L5V`LVEMl z3w4V;3kDF9FMs4>j7y7HePlmB!l7+2M4J@3Y^EL;)ELG=FKW$19mfN>@xbSf+Ak z9NVCrWv6gs$><~q?>+sKM!Sz36Sizh5WBppKwIBD|IWrW_BySzbI{`6#*)|1FS6(K zi~zM>*y^pD^w`EAUs@;5XI{T3c-P6qnx{&*2%C+257+okMBxlqj0mU38TITZ(r-xv zT<2cgHtzCVHtw2^d{L(3pGXbM;6i;W8+SaTFfNVZotKNP#qKAWJIbxT&nF2tbpHxH z`O6k)LrJ?zG;+d@GEc6m5kDOY<@dq=Nd`{p4C(VLelbZH>9Dna6FvaRVTV0FQiyIW zBYi!AntR0FfIcBZFI1ry96tA=Rpl^qVZnINn<7p_ZJY?Q*c7EB+L{5^FUjYKClnUd zPqZ~7CkW@A?xZIOfwf2{2((YMCkR~+X?`@_Tto|q)N@0HxLf)0&P=qAV@arv^Q zEFMnTqsiZP2_N}n6Ws@|bGB;WM;{e9{3<9w?LM1sq}#2iLP6@IF*CKy*z5Q)hYmu^ z9SyWqiX1|^uYHqQ^0B^nWTqG&euK`weMnVJ?Rq9~n|RDhMF`fd*^)P`5<}q$a$S2H zM6gjljmq1rys?a6P~QgXc6RXs(1%**p|9bqBP7;sL;VCsfa_WDtk_b+;ROWIRU}Oi zPE?j{&|R`-cQ8>ObI~KcMfjkiy@bx9W`k*e`7seSk)E8Ge6EaAU*cbp?JF^eu~bWK zHSI*1a*k@$g^$hDgyV3AYU=lh51{arg9@%dUu3O^zYu}q32uX@wHGJ4abQ@Vzj){+ zR;LU&{BNH>8yK>i@n#X!IzC$4-rQg*l|ErC;ZeHf>vVPs!D#vxz+g`G=tpk2+bMay z%mf=}u63oLdmc9;ys3K#yeLzur232JeH0kDwc7d6VvQL%^+nxhyfWowi&=Z@zi-SR zZdsUOpqZKxV5ObK)tAeOA?3BF$o@)UrH&QFPxB^(rElZN@5FNJ9QcYQRLpU%w)yR~ ztrcGH%^YxY=;0>gY~LmEv;7@N`Xrk@S?t&(_in*y7dS>1OiJ*m*Ig_u!W{H+;>l}c z;Ty9jJgT9xmw@5C!mK&^bw!HuR3jh+#)}co_hh#YSf-015@1dmsu*C)U3E113{gu~w zktVbn>~%-4m#zCa@_p~Ro>9$Sw1b0ui@FrCZRBX+Sa?BeSp{1sEWsHh1RCQL&Yx)?Fu#O8dj^BRY!l{a<&cr#FAY)IB;zx{mfGXaUb|#2Q)9G zyx!O`S#M5s=6+t=cv{gmrLw_UJh;Vv`!ekK)$$i)Vme)IKiqlYBYF8GFZaUqeCVfU ze$*I8YRj^pPh3JE!#fNYX*A29oO*)jSS~V)XatLSN^9RINC?UDmDWP5>RGE~7sm*{ z&DQ=l+Z#z^K0LYmA-|D<`?suuYE40iiB;FYY37|$j)Yws=f70i(R^&R=U?IQ2s;K{0t}$0`xI2LB=2B61#zhpF_t7 zdq*!NKr)|)2;jJ8RK1Zp1z1C?$4BSgDY}jT%pI@^#~AQn`C@L4d}aRC^vb;M6>No4 zZ0$WP=QV6pWf%ObAC!&{oGbx~2iE%sWZt&ud8k+B+=j1#*s1Zwqq`8muN~06J-YG2 zdQkxWS^%l=(K^79XtwJFki?P1AA3h93l+{C1fF7CZK=Z@+hB#}FP?9MI_msW1wf^f zo{aV)Tw9YKN$q8~nrQqJy}H(mE?-XaZ6!rQbQm&l3A;^uBO9oM=WhM$IXZ;22`>|Q z+x~kD>_qMSPfAa^>ua(a*Mebj~;|%aITX5K&1_=hXRpsB5Jk5Li+pC7#;oO>A zqJM8Nnr80XCZh*!&a2@YK=hq}$!|Dw17&AECucWRpHPP={TjkR#@hQ!glpX_6Z&u+ zKyzxJ%!>R)9vB$jV**kKY>&S1Q#Yrirlf7)JLZujagn3GNOQ+1QrNY#Pf#W)W z^`D2w?PJ#!5IG1~@iV}lHy<|dKam58n+p%QcH%FMG(`X1L+V}7^~{x&&xZ!JH}4PH zVt{&2lrAxVQ$gDd;7sCA-U0#>fM$;mx=`>F1oE{}^r}?orBio+pwz~A)n&ze?Yvd0 z;f&5FzoqY$xjpl;tD~64a_L#pZAZx$0B7ZmHlBT(0@uP<$I4mH_7R8IpqNEZspjF8 z-R)-~iOJX+g(G6hl3t-Y?3)*Yy$FOXAAHgDUQtu6&h~HC3=hO^IJwC9k~sAKjM5K! zL6?8+W@IF3=>27-ACiJDl@YgRk$PECLp5>GbE)wqCFEz2f~GKorU-+kD1)XL1(5{=1%obFtWLfbI_v^|WMJ#z2T#~;8_GW*_9i^- za}{p$VBmdTqj{dv>ST@KEN}{Hd^K3PsOAJI_S%vC={0lLT$wnF>QL%`ljY^$k9OAZ zjkZDKf;unz>IIwA^rF(B*(u%t0!3SCFrIC+zi^(l!Jf4ffL#PQ+B9pdjFFiBc|r{~ z3Apz-Ii(Anvtd2@BRX_!eE6Q(_=~xO?Ksx>j7NoJgGZ{O15WGepC5^x4Jd`Q&7`Le zzq85e)x|ID+ZUFe`2O$#p=2^tA)+jr z4pTNdI*oF?CkwqNoBT|9&{Qp40ZA53mnjRM~iP40>`*dU7m!a%_5X9C~tGdU8B^a{OH)tPq59 zdPE%=Es8}>d6pCI!gc{aPKPGZdOhH%&daC%+CKqcfA?TcP#A~xa>2SQb@yQir0@|? z)Ryj~61E`^&fssrl^ZlRNdHCYoD6tCBL{6rhkg~_U2JEk{qEE+H$<BSrfgfx#AJ#R#&V(B`{Y6`- z(KRz2(O5UL%DlH?5npdudHb3t-nt&sscl;Obc|rv**+kn=4k!1Gle=lJ~|`zvu(0q zlQoR8&nDn8N*&aJBmO-8x#_f^!fB$udi5X;Z29F-%6W7bw}#rZeExh2K`L!)k%Lz%m*gRy}v172C7c zf_YPruhzTbAJ)}T-g%7_NoyssLHl-(PSU*dyk&Q$T7Nc2@~wzQdDKah&?sL$ax)Qu#%GSKm_=Erl3es?Z4hMt(0sQ|YGB@g<$b&4X z1vi;ckrvEJb2UvCuQ;a3) z&Z6W)e2du_c+Lum8SiNb;Vb-D2gT*y_ZM);9f%{{WC_qMg;w(@%^u-ME(4`;qwfxZ zP$(Go7AB{OqlZJo7$TEy&44efrhv|%d9B7955oO=nzh`&uPWqS9Dah!u@<+3d|zi|zF}MAp0A*Jq%f%Y3(< z#(g}dmrDX3uW!i(JOpkp5~xB%(|z|0atlhoCs6g#ghc;~7Rnc1pl&Ig$!v!VW{ezz z$T4k-_9*zTZQqnR0uX9IU)dTyn;F2-1gI!j+UQ@8W{t{n8SC~~7*bgam>-uK_17uG zJUDydQ8fHl3~4C_{BnTe&?h>$Rwj_htMd8atP;=#@L(%DT4FWUU&n5zz&{o3A<eU%+qPPq?YosX~=*Wx@N-~_1hSo2Xo1a;AV=oNOjS2ieI2XaKZmSc@^ z2cR7Ufj2(1N}qPkj7D!)@)7v-*ZxrJwzkO2Bc5dJ-5MKouDaQvY@d71tRSN|qg40& z(PnB!LvJ3Qn~~T^E|)0>POt)+;72$LZ_@yhW6*;-%zV?GMg=NmHV}y=`Lp=Wd+Fm= zSS8Bfeo3r_8?Ewbns|b$s%@OrypQ;ySJ`u%g#-WHm+SjW$~7e2K0kC?>A*-LgT$6c zfIOR_8sQ#ct`iu|TQYsT1Nie{so)1tXax}U?Y*a~q}?~8C=@)^^OH*_qklz$%KxHK zEdM;QD49TQ&)RA<#8j8^A5VHu226XKHBHE_0t{76bpfxl4M#is+9Mu*Elye9!bv*5MIno#xC)W~X*CPC@M&r)l;G+`*3s{V7&;TM*j7omrj zUak+KG~9_|hn2+vBs0hUV@)JC6T@=9c0a%BRYVJo#Y0y|Kp&Tm7et{KM3bLk3Yy{~ zR-Q$Wz0qP)f4`b0NYsfjqZc&wA!y1fXv!&Q$}?!nKWHj6XzEwcR8r7X7O`?7l5D*W zllthK$0|~;%5La%x_Ab#3*Y=0w~0zlgGlCq6j|t`c4FbKBDMy}!{#$os)d((RUDJPIhz^0NzI4+1pE#_4#g__9T+p%!`jDb8?@LBSnFJZTsM-Y02gR(LFPxh;hSi{Pz#XfCOZZ_ z>j3&D07@u1*J|VC`Sqx-_k0U1N-T*d*;}A%$1^oK?Nf;K@6RIOeF}VM zuqfEO?nH}Nkg8#@x5k4 z>(1;WTSL$-6h|+4o-4HO9lZ5`<;G>^Sf&}IPD2|=-!dU zF;FUpj4vT<2+O?MNX-x9OOr5QLGIKvt5Ps+>@fdOMNgfb@}pVd!kod{;SwgB+>-eK z`aO}Cc~SVdiFpiEdXJ7GVNn}GMUc999{0DT%QcXPzE<3 z0CBdZi={%>1lgg~DCY=(3_yedJOx0L7BT0Eex0R;cXYeFmi|SHA|8Lz z(gh_(!t<5`jt<7={EiU*nGS)9wE!UIC-52xer(zdIq-?s@VK;@0&u-IEWa4SkZ0&P z0{U+2JHL+H|2YP!&U;MW(1qQB;qPqGctz&d=*z{^vl)*8#MN0B^hidvD*tz|>rp6k zt|OKEXx_%@+7GbS#s2PG_?E^}F`TZ!p3WzE9(I@Dtai`Z#8*mcqN@w~qzf5uiZ%=! zi!sl2%3L}}TkQ|)bWW@SS!HIW^lLmGBGjKp>67RRLK>9nzFS?YsLed58YN6b&>F9j^i z9_F1^qgdHP9u*oZEu%!na_6*}+g4Id7IR{zJylJo9ck;Q9i@Ox^q*Yo=*PG4Ej89s zm~Qnl?SltqyIps46WS*A@EaFXC%=EoxK0D5wFZwpysMG<+z_KzYc{nc^O?LnXPAc9 zp>Kj6|B*%(IU?lyu_e4g3Uw(^YQw~D!@_UF#&5&HZ;R1+btD;CgoT(-#hy?Fr=M<1 zWIWQ5{$~K!LCp7KPk4hG>Vg_tg&KN}8u|t;)CDcH3N7>;4Vef5Ss+hI4H17Vn2`@d zningwUl5_^9UCDLBC~=!(Y&$CS=E(tlB?h>+;R=lV3`%$-%2f6H+=1qM zA9K6rKvmCgay>uHUghExnl0B^f~@{-&jl3p(Wwj5`oNPB&pK1?YLCiJ*Y0Qkx)Y6N zuTi$uj%!L}{}(G$@?%C?w~t3GgHq^udd^NWK=*P$u4Ir$b;N&sul^=^#nL^NK53m`2uXOz5+_cE!?k!2*MguF;T_?vf&luD5g~yZ@?_ zZhyLDNz-Ve`q_rFiv!rAzPzvA^(RweN`}~VZl)N{GShJ-nCW=N{%4uvjmR8Fk zWj@}GBI?IKq3;Flpa$_QpTq4g3okN$FjW!`lsdn~IfK@*GX$2~8%>ck@mmdP6!&z4kd9UrwuA+ zWDZ81qz`UBj0h|cz3ND8Zq-RIk12Mep~-sh00uLtS~IZ~s_?cGZ9|I;MFnzP|Nt%4j1*@muOm}da%HK>%z2=Ac zwc7OzDNOe^-xi>Kt>WWeX_&j+I`%0Uh_%X~r3c=lW@{46YdYHa+hUnMC0(+EAwWkZDFg-Tg58|3cqBKsc^dJx$OS&R5% zMHuNtUek*((TgzCi?Gm(u+odL(TlLti*V43aMFu#$?r4-ZM6h#bp&m76aOT774lk_ zX#yJ^7YAJf2R#M{eF6s^7Z+Uv7d-|SeF7I97Y|(n4?RXRUPOu>A3+1P37mhav<=AO!zn zgZ+eJ6(OVdBct!VuOAey1qBz`Ej)1D^Td&iQ({<7JH^M5(&LkjlVbQFA_Z$pF*2c| zzgMW2m$iYv8X{8pbmf)!sX4uHh7e&$q48@5(F`G%As9+sp>ZL`GnrDXO|j^};_R6A=r<1{PcqrSXFT1D3uD5!%wyqNOIrHo55wJQt(DqR-L`)?3x)zJQo%Sa- zfRA${dl}25nf$3Ml2r@i(Lw(X2>tbfl=}-%`AnR1D5Z)5z2JEQB!|3o=YWf|%^FbN zgWECR-|g7zam5WmBgyr*6KiG5`l^KzmMND(=_!}ac*ee-*$zinE365jt!PYrA^gmJ z;yiWO;L9@RT9~Km&NQ|vibI?m`+n$v4SCj%_q*mkA9Wv#UM+7*QurRHNd^7I$6Ol4E zZd}7B3}yDW37(%wm6>Uo?fcP7<~4b)EotDyw6U}YeXo98ZLiKeLJ*pvOZs2^Rg6hoSzA zcT(Ga+1?4UZ_l^=Cc)0=alH-=?Qh>2-YCLNF685vq1r%rP9Tx;#F?rlXpU zRJyx(>{^|;f!@>YJ?Dza7d=uijzWl(VUqE?JoxNg$-|ccFL9y!?*m=8!tXc6So?Vn z!cmwBAhwU&AQwjdXM4ulRD7S0O7Jwaj1{c-$`T>NM_un#AWZze)F3JGUv!1h+d~d* zH{~;LUqqC7rMi~&;kmhhE21}neVQ-lY*I%{h=JF^jDaH?)jyeFNyFP90`?cm_#RTm z;9SOQ2mxcj&CgSH)`V}=@$G#PbJ?S|sAGEqAevFzY>JVS*oueci?6%&=C0$1PQ+JV z#ta`mz`*{r-!cwugqda}EKG*^QqH2&E3N^@U5jDv3b=rCJfG`Uz>?n{+ z{!%~+yfaerl6KihtlJc|MBhx8+mAm1F9PItu(oI}1gooedD{7!Kn3+y+kU@dWWfDPAaN zp@-yZ5ReV;Iz028C!ia-b&_@t{^9*=;b(qetmUIV>TCehmn{S}NbVlr`ex7PHmOub zH>8s1&pv7p)|<5R3U4(C+9iDI2k;qc5lB zQRqAP?Unr+uG%H;Mfy&jHP4t-pJgUAuSWm~%)V#p2zduLh*=7=;2EW~1rAO6@bECH|06W~PtQK%$0vu-^<9SyLb$r}&LIz&Cuw`)_x6JW~DzhIZQ5OlVuaTmAnhof_@K)=TspDN4#xHhU)=!ie~(`B#wXTKS`f?WklI?l_M&a<1|?+NN=tY~r! z!7SsbFGJ5q9F7btzdMW?wvemX1dh^lE?yS^z*xvC82SElQ3daj>wls-Xb z1vC**0A!SZ@scRTT^L}l3BAag(so1`L07l%Y4I!MfECUOk!W<7p6CQ^ww6_HPZ4#E zcdkq;a#ygFL+%H6svWX0B?k})0|iY`5D5nb!{rO?UTzmZv~{wUS90nT!t5jb7^kf} zWZhE2WBeF2ZQEqrQ+AzppaOHttI)G#(_ERmUz`)P9own)cyNM?m>;huTC*@7sV9Vg zT>*bOdP6uYMgi7sUdQ-Lsae@4TnGhlyYK)tkm~d_Jp|fkv$nh7r*7>(ou5{tPFmr0 zv@&`n*O1;tsF?ZVZkj4UE&_%E9kp@^-$3cF%->v~!w*K7AZw?>W*15@pi~u(LDR~4 z&(NV&r3w!#3F_66s^h$7ml);c8OEi?zB~im?y(}>d%wrBOthQnzB_4dpJ;2v(OF)_;luGxsbBi9g}k|8)v$~X9s#TDc-vl{sHp`n%pIIKSzlzYZEnxt6AG322A zo2P0$jYMI^C~lpSI4nM3WE;I{25mecdTsF>(Dp-H-2!w{^wtxAXNN!l*z~S|PA*zo zl?jyr$qFz1{?SI9Z2Q=OcJUfJ(!mpv4lvhz})wWNka>yqo z*U+^pm&=Q$CU{HK<%K@MnwsDoK%miGFJ<*q$HUEC4P1-;#yW%q76VC2j1P(cyWYjc zJrpSp<;4{gvIhhV2{M6AmG%oJ1CFmvp00cijE3F~ev5Z+=tV{&>uZVq6(utL`;T{< zK-J$L*RjVW)f-~39^Jp#5)655N%47;H%zS86}gf|Fp?ghzjKO}^RL?`*x03rKz;rV_w?0I5!gi8F+-4vcH*5~LEx5>$LqwE zY)j1*p?1dYg{~Xz5j&=$U8O%w%PgBu?>U4O*#+Zf!#pachzCDV)qdE_G82k}o^uMX zZ;%XrStasFiaQq6ko@)~tKru6=gcwCplRAtX50SLI?w8jVeeZB>v{DzMtmOf?Gl{c zjw&B-t!`qzwI&*mO}CB)vks10ylAcFM~;2iOjthh5hn$ooBiAGv3bUMfzj;?`&hr_ zGRiWjHlNRw-w(QTHI<;%zMBy0(=@_nzX8d*-*bt6hPjOsQqYe6yVJHCRGLFo;x55C zc=x#7phA|&NzbhD;6QxLIyGO6K)c$D+1XI5Id4S&-fLruTeL|rTs3}jB9qkGiKxV% zgl-O5$&w?L_Eo4Fre5=Fm(aJA&Cw?Mx*?Y{@%9Az5dp>mO2RSJiH4Rdu}yF-h6fstU4|uW6qGbIHBObew%7?l_D4TB|CzxyazJL5RsU z6$lK0K$nA3O{Q6hiI-G_$t-B`fD~ck)r{Z7Hc?=pE>viszN1xj18$GU3Ji$A8lpsJ zD%18R)>O;&dBYL&WNRKeK|iQ*nhw4Y%v3TTmFxSYuAT1M9U1fmNPD1Hnbm zIl)2$vk~wJT;*%Ty5mf|5<$yp@0m(=&<|ozVVqT_Myv-c@B}Tpz=8&l5+B?a#gyyY zfEDX7IL^A^G&w$d;J@Q@oE=e|sa$n0*N1`?aDslGfEE1hFEEfW1`p5zwDSjvO+XLk zL6-viL1HVA*b_YR3Z`=Xhz0GcL9jp_JXadL;E}FrRpAH}8pLjCRe6Jq&LGosaQl=Y zSMzZ$(&SqAf&UEMarP1_RHQJIEy9l?iN^rzM%J-2DKBJsl{|25uU1*CR08VY;z!MhDNE|!Xr}3RSk6W zdO@p3wD=2&<{)>To41;1|zQo!>;K+o~^9Q3j5#f8-F9ps~8oJL;mI>}RAa(g`B zTPN~m5*VlR4s`j5_Mt)S6q2E50EY>e@F@-hPfUR8z|01GB6pF)^uCjXdc@#x{ryuE z)!9u`mj+Z&cYSbDojI_Jm+*pMN|MmES+Y}zqT0Ya0n5Ut3r=A@xjQkl{U8TRQ%%A0 zdD`vVK#_&{2z-#WGKm{!Km9QBfXkLK1b3}4i3EZOfSg^aFTC*66qi5T7VaX~YngpC z`^fPf8AK2J&Bfh{V?ocI8_V7~_;&YCo;imXNVLac9-l9M62$cIsUag4@ESWC|2(-) zLLByTPV>3?D>0_V-lVvgt2g&Ui?X91X~QtRJ4?}1R3F*&w?qvpD$R#|auJxkr4ZR# zG34Opte*q!39#?c=Pifl(XEFkum-;75k|^YK|kv-(vU<5G1Di;tILVr?~zX7PV%$o zW4f`puqilTMHP$g;ZeRLh$=QtH+i47hIp0Iy7wpOwf-6BlSS5g)dMckC{Sg00z2*O z+ed->2%Kx>SanIxOFc*_zT|F!PdE*zoLs{vm}?HVZ@WqBDiwhwi)brGHahscIuOs$ z`*9G@OeVL71jdcrzzu3a6Vh2G)T7 zAl7+rC7Q0kpNZc*ri|U=;J(>mT!5Zal?C*8y?)3(U50SaGaaaRIrV?6HQj`qC4BNf z^5h(B^YU&d-dvLvH;b;ftIVk#Fe0!obK;$10$~O_TBUhwd7rgClbDx?Z=jd1Mh~Zp zR@C3N0S@H(YQ7;(zh1ubV1B2mOO8<8q4Wss&Q*dsz03%2Z6kukzueUG?wxi4U*jl% zNBWU4nhnvkCuaRX9<_FYMVprq_QdFsdt}*k<=xiO;h)f|w5Qt-bR4W(Kfgr_D@6<2 zDHTf+5Tz9mrxlQ(6_BF!M5OgZru9Un^+c!j#H96nM(g>Us*wIKGKQIaM7grCwx>ox z(n(?aoM^a4dn}H&XxXI7?Qc4^T|%*a#qKFq c?65YLG({~Ia|B8@OLH+-Ci*Wvo z;wS>%zb0WsJhhMmcWdHcOY&e#YC_1+3zNfPwQ{xis$a%6E1SSVcv}X4WBanFnakV1 zhf*av2fT3ZG%PSY;vRb0&g8p2s5(9RXcFim`QC9Tj}-DrL|I~3>$2QF$^KQEk^D$` z_@omrX%Lnm+{h93fE5m>M)bDqD8| zWZgq$PqYE`QZ1~1F*ZImz+Ok`B_t2{(~w`pWhahIM6D~gwyOIw1X9v4Ue7{wl$BEH zbcR-Y^4wKkU)5^`+qy!G%Y;+yt^NeS@BS7$w=8d$0k|Cn&S5^r)?G4b2@H48mPbgn zMqWnRVbg7^-IIkEDvWyuxMl)=VW14ig`99pf2#S`~<4uYjet!)cEMiyARv0CjJXa zJBoR-x!XR0@V!%0*&=>Sp%uFMb$b36c(77Fnsv2*5B2tZSUCj9JDyGr1^rz+CjG>} zS5B`456CPXZlj&lNFA3Qe`5V{pa1Ew747%L#q%G>4bdjY*5raBGo8O_Xa>-LJNMf` z3mCu9R$t2BtRrLmrK^(Vld)W~?#Q&J7U8`4+;<7{c|Tkgr6fH)#cSUOZq-m@^ZVlm z^bhPH4B_EVBG25%(`1>161#7+IecNPt{pCWa88iZ%nQ#&y&PT+ZYDHZ%Yl395KbP2rcra5j7IR9hQ#31y2p@LvFw_OSV{yC{Hb zN`U#C<_6+8)c*`_f&L_=2hROz1Ez9kE16;d(y+kvk#;a>!Dc(>8NghYMnr{~N#Si< z6Ywa!q*+a|0OX6j>o$B#>RUAQR64~kHE1+4xDsjU^jbxNQ1K^nZ}6g0YfQIjnVZ#Q zM`p`Q_mxa1p5NT2%`|u}DJQG6EvgC2e6-6#Fs@}W+GWYj@JA>^FYz^SSKYfj`o2?m zE@(CeNa0J7T?539fR5$SHd$cx`%~SB3OfhOSt?5F^@CRtt(T_`A*jX$C;w zwYRh1e|s<$xB+}ys_Fo$^m^~&xu2j3n-pEW;2#8byDg-N{kQYJz0__is?fk{AaSk` zd1}ru=zCZWRBvX-vzQS!3@eO$QmuCOAv9TXPPcQ0x`#CN72+GHXjnK6RY=t2Br;Uz zpoba#kT+}bj%~F(p1TuwvemM2-r9CrJlU;nkci1zXiz7SPe_v>7hoK(F5SMg7Hq{L zyfMI@?oD(F@a`hO+kjE|QVk_0<0Qmw_9G(3nN7{Qpc;~g%prskf11zNM<}pEDDYxQ zBt{A|KnhDOgd{f-(>9XQHj>jeQqne3(>Bu5Hqz5JGSW7_q-|uOZDgZu$e?0jpFr3_bm&XlG^>Hah!Tn?-K03qF<*tavylQw`*QR;Ba#SbVp*5($9`3u& z1klt?40BHUm)scY!z}1@({3S6Zzlisiq8*_U6ZC=L$co5Q-?K?PVdumUBpDMzoVDv zufOR01zGT;+Q?nxmcYO`?bkc50TbyxX|#iRbH*8 z&U!%33iK-P^&}*y0+5W?Iz=iWRv4MTNft}V+-Sa99Rt`Vf6#N8l*3P(&R4Hu-f(v* zsb4@UmVW}~FPT39UV)$Y8nDGWCvE`NE$T;L>$ME<7!9vGtptS4Kbn)@-5pq0j+U6% zw`-`F>Iq#{wT>|&tf19aog2s|m)C>A%G*2pfkOaW_%2)u>`|Wp|D(U@#fRWPfgJlH z0P&ugof^BqPEyqMc0GOI?&Q5S1LPtM?B^jht9Lnqq@NzR4b!^1*Vuuy5t~Tmzn||Nf9MhmsM`nB2DeriwJNf&ix~o-^dbG{bC=@-9LZM*qi^BZDntf-S!WTP6ovrUzSQ2V3ShCPgM7g%lu#)FXxT zAcg!!3Ry=AIoDxC!^TilOCaT=CFQ3j6`&;*T%)r2!`S?5>%(upHrfj(FPNb)GpDNI z*P2Uh*zGb#%FCzU>EYmjzH`W%AepOj?9lidV3>&ab^HV5^*T)0Vh{iaP1(^7 zd67~9Qcq)Df3u`$t*klb**Jc*HaRpmU8j%;TJ2g)Hj8={iolfZIdjpc68bKs4?KLS zhjA%$(J$~Lc0a+AK*+!igoONWJ(e(X;$P~))2Sv#Fut4JGFzL-IbbRUYc(w0Z%IlA$dGloGyp&(5_! z>z&OHkI(clPReY=WqU3bXJ4h#K0{lsjYl_V*OwnCN{t&8e6A*&m`?p=!~!|Iwy~Sq zR<^);_wej>gDR1KvA%L~F&AdC6PA7_mrqs_omu?rb!4=ZUL%|oSs^wQYpilY^O<}d zUzzCF>+-y8;o~NHSmPe5V(#UH*}JJTyIQOw6P)*-GW)< zyb-V0?d7dg_2}@X=f?N&}0 z12R{M=wT%tmlKNMr1T5GN#I# zVK3s%HUJ|z#g8tE-sa;WqW24ukpT4}B!w*;jGZO}D8##1p(3?Zj{xDWx9f_F6B$`i zC&?znaSCVYxI|s%6Me7GTa*2$Gm^0{NUz)KlCR~tsw4c8hOdQrBND~_Xc%BF=$HBY zWkw?0<(aXWdfv$y_TbMvvubRBgKwrt->w$9Cm0Q|wyT-7kAsN^w-}dhjR59sU`zO1 zYDEHG#d-S?mWNmWsm<9ttY@0pND3eEoG2QhNdXV>wiScc!Y94$nPdLLJ(NpaHEpx$ zIYLVepc8+GtAvDkzACi?Y=SQtPpf>7W&WDVK8sJx@RptZjI{>#jt8;6A;m`eut$m2 z+d2Z-M)Z4lnFfA{gxs)~6y4k{W<{4Yh8H{C^nbmLVe-Ofaz73*{kZo58O8~$!~S8P~xx5W^ek*u%oOA4a|hQ`_OtwTSjNFjZ~OQR0CF?(N2Ko@1E z{1wOR7gb-*RAvnt%dtVr_aT+rDgCM9=`5dR~c``x?@i`_)U* z%X+UKLf(JfTRVAHJUi3(9*&u$mll>hJFW^JU%rjvg@}5S^BTt7w^|^N659k3g!1NX z^X}MRkX}a666g-5uZ9n$(+t>*4#~y$G^!5W{G6w<+>p(#Yr4xzE*Vj`__|+p4=H)G zgAZ);%EE4~V6Gc?rEfM{aR%-I52>^<@LAYI%=x{g>q3aaJBVg}X9m^uEY-GzcWc_bTOXzof&_a;Yw6Z}5VnWlz+%c@MQI*lG>H{CGmF4tkq z>sD+mT3Tg1TbpLg>FQu$;p!L{yGI}QwCHl;-}I?<75-ueUIu5N{&rpHyVj-I|3NiA z8og$fj@omd(np-!QqM)g*>P$}P&q0@A@&VTECs$nK)Kxl$KGpI>3zEEyQ2Um+Wvd- zFwPGbNvk;^%Xft+<6)OK3 zDo}2i^aV(> z3DSt;eW7~P;RvS5cWAu~MD#ws1_QAv0=U6>ghksupE^2VAsz26`T?vRV4Keh$g>y+ zT(blM7Ei|fmEn2a%~+`G~u|`*(wS@ZAPA^E|#r zP|Vr6Z12EASVS2Cjs_i!LaDU{cm40rj^+FfB4qTlu3^B*=Izy|F*`qeE8eX-yxs4Y zDZyVGR%N_B;BSrN^thxdQ|=*qb}?uElzj-89Ex|S>F^};Aq*gjm;r`E%+N6wg>OD+ z9TvaI+k3mt+pFpub2jD|b9U`ZiFOw|gaL^Ibw%Ux;d0aru&h^#cgNrYIU`-y{uF#| z8Mewfh#~d-oRNAPXD$psh%OYL`U?)nGll@HP<%<8oD3r}#k-%c6n%-}@q-W`6WGYh zyars=dq2kRq*T((jK1a_*W9*x!EVceQoK3f-X6B<`v9Fjy@FXJoqzT{D~cGEBbbZY zoHiH2Jr=Xb#)D41;&4dx_1%4QdG;)Pr~kpkW8*jGPJ;dRPO;%*W>S0%aanZ~&pgNc zx$PioR%P10hzI4aYwCoHqcB{K`x#l+DSyB>y%->36r46MiWd%jKX@l)5pxbHA=?BP zc?W?v{DXij{~&~uzTP1Qa0mCg-o^k$1Ec`MsNlq2pxySZ}95XKesAI$9eulbJJAS!g*4r&a^$I zK&+Rl#rrA*3yprnZRl#hZw9n(FO8_l`Z$dc zetl|UL5^g~r@Tv-6LpJJ5_KDkbEgeg66IM4ai{$m;%*s?x-H!ZF8qOn|wkJ+TM(1Qf_cqnS$xg_p6rqz5b$b)#Ziz@=&y|d9%EW@63&KWRGNP8r z`IM8YS~zN{`IO3u`IPDQpehvPkbm|IF&w0q0_jsD``NT{hCwrUb3P?TNxy;dHQrK7e-JZ$YpV^g|W2ngji?eXWJV(U4E+I|KSIMqlrq zj7$zvuY)SnpzTK^Et~+>w=`*3E=Oy6oAKsWIa~s_RV9yLBs)T76faWqtmFRV>Cbo%6&HF_C=xeW zEI`Zq3e6P~v%M9-ykMLM$(}{!9uR-+v~qq0GX@PpFDF=#w(Xk2yDht{#>ci(0g0Fq zLGW?LeC&7j{M}3~=ajkW-0Pxwj-(rNK8wXNT2(gnJjN7tQyoU(Q5GHSZ2e+N{#NB# z7x@H}l{V$TPJ8Y5VT$CB>dM-yRXN2^Hx?{l_mh^`TFKzlr^le=LCz-yF^4C*ikF+| zo@Yh7QJ)z*xMDRQzd1O}mz7|iFGYVwC3+p<6V%~w7hV=caJWmzL_Nk$?&qziJyo?C zl^AcUs%tPnSn~3Twjws+H>Uc#!|&1|VLS54JF?jmd?c}| zJ_Ug;GY}{R0arq+>J|``(haMJ%Y)!;G|!wxvUPQf3hx}Gtj7pSnP5aP_#j+Ndf1}O zJ6D%tU2XAlz(~p>s$Nnxsy--@(B>(TKIWP~if3+ut=CAZ6~pE!5(IMazo1%I&rbrl zsgd~%jM?;OA~U<5>i7#-{2gN;ti^)6@*j*g?=@HVm)GL(O<+zM*< zcXi3nE`ctv6>SaxBdl}+Tn7Qv%w&DcfhH}!Yxu@N!iM^nZ;Dj=%_Di9<#-3DC0bDF z(1MltSmVe&FLFTla>v*83d(i~@Wq28h`>?BeDELCMEo-!V($UmB6uM4Ab_YeLGpar z$M{9dRpr~s2X2AP7*~N%nohk}jO)QKC68^-Cu=DLG0**@B2XnN$yWT#&d;846En|Q z7-jojt#2gQmk~2}W*QvLAXKP8RhSu5_0l92h)%y!chf;5;2asuq*<>T zT$_-Y)glZ-*E#t*#E~61ogo&Fw}m>AKaVMq1UB{Ov}wE&=r>TQ|Jm!kq@5G+QRIeJf&14K{&kC71Dt!q3lL?(+lE8&CAiK=bN;_52`M4P0gi%8_>ni*Q)Lcut;nQm^hi>ls8odFzOB zWW{${=wT#9IA0yL(BH5tQccQ+isi=+)@=e*)~B+tIvE&EA^+9^mIsU(FdUu*yW<}1 zU;wPg2~3Lf7?3>rCwVg)Fg|!vP*>U|yF@L26O5N#`uaC+*k448a~v3Q{QSapBGTmip=|5rK+Ypzw#;;3{b3%%^h3+w zP&5N_ofAE`#iXayZFFgHHu&is08o!D`Jg@PK#l8 zp>tL;n(x57{ZBDiFZ*2o!%L@)-^?`(6XVS}HLe9NmLU4ZLLs;8|!d z+F!64^tC1y{JB30K=qatg`l&}+-FHAn!HYat@N?jn$7ALxhptirmQ#EQvwSayTcNR z1v^R`4^LOx>MRR2yr+bo5G{d#5bZKriKX!d?O2XZ37Fo5Cg%btd*NubZ@>GZ;*7Rkms9`HtDN>mXSo^Jpv%9C%DK&_H$^Hs?8Si?71jd;VD7T_!X>Z zL5s*@jF+n6h`gqRXuDt?L8TZkV|v$gLEh6E7HsqBLLLzZkSm!uyhjyut_~zm$NQ2| z#M6CA)x8YXI$y{`stV}}Rj1JlVdFAkDo2UGlcIN33`h-9jFJ={LBE%Jo^}G}$Vg!& z{WAJNBteoN9QFa29|3x1K?dH6E}vWnA+~%eN>55)`1Ypm6SI53boz0tcQ$b6z)%+l zd;=<9T`~sL-hUS(v$MOmQJ3s|{2duD)Lt~CbkLX*K76g)v5;Cd!s=>)n?fPvIZ>vddh)W#BUKxt8WuTd}PEg6#S_ygs0qAmK^al zUn{_Cz%Z$UU<^xLWa#KiGx* zBDIU*2UJ-{HK&O%V5b9MMjEjHE31P2EDLU^I`WXW{KlKO?T2Rp&^ZgpdA4ON_$|OU zflAV0pdJ`HTTyVD^8nId9NyJ zJoQ4X1OljVpBgJ{_adMS5kUtHAHuQM#y22;Db0dRth!w9EGm*!CGIR(XCPNhz`PHb z1yF2AP=efLvjpIG`PhmJ^?SHnCN}d3a$1&KR?S?!b3XQ3_WlW;K?+F+;QA-;XbCGt z>*~di%d|)CogkW9EI=tj?=cVjPRSFVxnWn=*PtR+A4u#&8E+id|nUCA||I~D|N|T~(6pwfYxHG;uA7`q4Y8_XL7=o-Z z_L1;W(Ky+h3gidxXFr8>M#B(Ro1Bp5Yqqr{`B)O8VKz}UkD70t<4+m>w*4+M5Sd@&Wwk+{5|NiSOPxYIsMz)PE`;-Qbn#v~`gTEW1PHLUk&FQOg z?`$A@mWPiDmCdUWHVbf9t`?j>&7(qYf0q_TrX{^6L!KF^-_UxW%>*2NwGHJAS`#{+ z+)MitMm*otCp*);F0t9z2ca2xDU|D^{tA}nyahRYm*m~@Hc3jdOx@Tcoa%Z4x2fov z#`ndcW~WolzzC`$$QyBkd-^P~9j3$9$%ut>_araawt%;F^upYi)mOGzo~#RE_f$E~ zzrL)@RZI*ml^zqCmP!4Y720fb;1sVZP`{PtKORNgb$*uwnJx8k>VA9ms3y+7C-wgA zos07hZr-q+C%UZ0R@$S+{2(=6|V@pYMe}RLa^M+pVno;oD zo8UFe;5FMT2aOGf;I;R`Yi_}7p22Ir!D|7*Yr(;5VZm#W!D}&uU*wQNbafcxFfqn4 zF>tUjG_Ww@u#|I%y1K2_qeVDV5c@wM_V~R+#zRL#VMaqyLPN1HwW;kds5qn7P)`sM zO2L!yonaCYp%oDg_*8@(@*O#(4>@GIk3>Xr)-3IOhJY5K@&^W{)?bCl-~$d>G(CHK z*?ci45$fky{nB1@2$VR8A^Bp_Ur}a_!&u74>1{vB)g)Zm(>|~Kj`3l#z(AII#sT9K z0@4=*qyz+{3jg_21WOy~i4ja8p==c1iFeW9kt)bq z2TR)qOTP`4b_kYs43>T$EbS63?G`NU9xUw{EbTpWI=B|~l2$^RRx&CKhVTi2m4E;R zg&qZk4+TXQ1w{u1#To_09R(!>1tk#$r4R+B0R^QO1!W2aWdjA}0tE%BL|K^-FEV)p z-Pe{2kB#;#8!a9??N@eMJPz8g9JF|xv|l-C@wjNea?#>(&$5RFAbtu4t3X4cFTG=; z{{1Wr8>1K-qaPb%3mXIFIR@i%-=P;?JZ&+lebJpdgq`}G`hmrrI<`6;?YjiTuK-SN zX4mo2R~E5Ag&p;#6jotAI9!QlQ)>K>C(>sm4^no#{?qvfjWLCtsP^#CFLUemkzKyT zf>8Jp)l1?-R6$x7kqf8S*SnSibasU{bKh9$ma2n^&qf>b8J?-1TlA>ty2;DXc~x)f zu@?)r%`1-kdr9?d(KM!FE{oGHOVI|PN#9WlY$Z;}%5mMl66jBu==Xh;`&~c~F2H3n ztGSN+&~r`mu&hRvWwO08t)B)q%HfL#|b-2lH?z<_dc0JXhqnU^Sc zzWdH~C~9N_{rPMvGBkTPo4ctcAZH=;@nLiJzUlHI8dhw4|216SbK6?Cf{V`(i-q!t z*U%mJ-tswPe1(~?KYxmug&vxF&ionn*#X2WQ7QXxWqeqqD*^s03u?gGTm19q3>#T5 z!-O@O7<@1F#6xhUCHR^pYxDDop4zWywob@I2TuSM_2!S~#95}d5KpGNI{gQ!t}Q6@ zHnm5NuyouJ#difsU+;+h$w5eKpFfv){3|gP?=FF9KhICJH2x0IZQn{Wm_yVNY>r3! z?#BJw{^Nn`vsDO(2&DDQ5FS4Ny){~CXB)WN;$GIgndi9YUy7FEmPi9e{_rlByi?~) z^XJ5eP__NKFdY%wIQ?vM4<)(Uguj2YaItGh;|_URU;thZIq1%c;*RLoE#}MyJ(^>f z)OiR&0Nl2Ljl3LATw;uxurl?^M++ho+|N-v+t2&xZo5Py8@V#?GwBW=jW>Mkb}4%ZATmyxvY1p1q}_Y) zCXh9)Bf2^cL6Pg0?KwMR60I&U#;9AKCp34gS#h1Q3@lNq9)%BH1pBaOl@HlO<87Ervf>Is zcVjHzyHRuxxN35P`e_jyB0oP$+;XH;%umk`ew0AwS7)H0Oj7cqwkE$mM8E%XElEfq zWu7IqGo@$+CI#ZSrG&_nwz$XI!JLS#dco11x9A-*aq_4LcteQ@RAK51g^MQJrOJ+JN1kj`m60d~}shb1aX!jX#V z`b;;1y4grKm#8%S4`*B8>5B_T?(b^2K_0Ke@^BurjDj-FlurH7sZ5@?B%h@HzA86 zmi$poAG*D@DA144+mOG}h3y(IAvj~srgrCQiKD;AYVzo={UhsQr!8!COrV8MW zVWh+Ns3xy&Z*5cbBN9;f+_osjUS5OT9D{d6M)#-D+L`*B^T6UO%gv}JzwSBhEc7F) zCZFyd{E)vzHyRRi<}!~;&bg}9ogv;|7heq-`=cJyfr>A6Kt=i{KZ9>KSc^)Im!nq4 zM2$%^J7hzP><-)&V=sEZt#C!>bas>BWDq?#XSERSA8p6WY&%e|Y-VR+04z7G82g?I zmV-YL`g@LnTub|kG34e{&>2@y5K8MM`1WIF1#w9g<0frU+%goi2S*pw#{%CUdF(G0vH3Ex@(_On+v(1@2X07JXhTNSnZb2Vm}h z_Te~s$o){#>pwPHJsCnPVa+76SYhoz~kePw+sEGlyWv9g-8Yhot@Y@Af1iBiH+RoP5T#w3}`OS79AyX^BOU%J$lIM+Rw z{ZY$pyeMo+?^zE2R$@~^vK^OZF?@P8Cg;8zPrU21kizQoSBwd|v`FWEc5w$gc{by& z?4sNy1}5z%lloH_i;M5s`NLE;^bq5LxMM(TGja0rhnG%W4f{Z3(}YTATD0|6h@Vd0 zvhDA5>(oMmhp69yHpS_aIM%D)$0b3W)~$c?gtp?}pT#)q?UZ%f91*q zarg(N|GhwQXY3etL6q}rIP&9_P-@e;!EPuyBHL00P176fzw(3XPK9{GSq*Q$irS6Q z`uxiD|8Cl?1y2?X#nEcU{wTe`vxfwRgy3#<$uJKGhMH>$VYkbGAUX&#*gi^YvQVIz z6}%_C3U9EQB>VN*w0}}i>93XXKK~Y6JHp67EF$+YlqF8$9!n>)J2$!M?b;^Ct4R{Umi!*kAid z>j|~OrsVG$zxR1gmmgxlz{jqU+JQ9&;`6_A8i=br8K+&e)B(Mg<~amNU9@IXgF|21 zS3-*?*DSSH3$tv?jZKg4|Kj-9)tvsJpX+iy?FUNE+3uPs z@ls%MuEi|5(;}L2b{LI&Q3-V?vM4BUSn}bBr`~}*c_}Cuxm7ytDD$`1JcX*^qN?d~ zC(xPQc6;hP+PUh`Xk9$K?C<)e%_jxivVV-TroF`}v^;sDNNBs|C-6A+m)}|OQ624s zvhdx)L;9D+e}s}N3@krBqx3jE^xIh>@fhvo-pKH`7mqB!$ncNfP4}l6<-euw<-N`2 z;=QG2=DU?twmy+CvOVdfkWk{~zwPQ9Uw#x9+F4nByC52w({^irJlZ+Y#Csc*m(%9c zGq~)Z2!ak29;e#gJ1a?gqcu>KndS}KTLB%-K z3@_lCcJO%U3~CO+s&=o9ds|2QVOEytD8=fq(4 zg^!f}K%+2i>XP{PGLO}a&}F!Pv$)iGyVbPw&~hCrT{Ql<`;B+LNw=ZI7#6SML)5Lr6c!&HNt1ym zDMTOSDSLMp>5GZ6&Zm_`!i7q2jw9c}=;?#>hFG>$KriL$WScdYwe`+*T!8uc_(;Q+ zabPzjRKx!mUMJSX2TZ5Ipes+Sgk~JsKzF9k)Mw?meAh5s3vqTzqf9*_qUQA}i>i&9 zDG~I@%|u0QAFqJlYES{9orZU8qb@R`hlz7SFKe9ROvDR$`kpg%+KIin=*nCQm^gXt z*3U)Bb0Tf~gv4=&9kVAFd3I+U`Sr|?6hEnwm;fIK_Wq4uV5@uAhseLy8JSjNmn)pj zEha!2CVM>)s<;o_Cj+@l22V7*^}y1>%$JMIh1;w-9Q<&vz-F=emQxn!xbeERV7{g9&$%F0t+g789CdD=v@9yK_4n`( z4U^W<(=!*bL)!);>|*+wCnP4j;Ten7`!8FlI>%bGp~+G1%!>>_4SJN!NQwZ9!*y^Om32JZ2 z@N-f*iZ(d zLO*?&d5bWYu!ORMkMuZnB%{~c zn3ilQ%)vDvF!&Ep1pzJ)=mY^YT@X+M0X`5I0|7-{V{j1=b5HbwD<*vUyYybwES4)Wt+ zAz5~CJvUAi>pe4l2(E1@3jWX9X*UYgZ}LzmqFPvNEBqaII`vN{ATF2W|I#eDmXUuJ z7HNQi&xucO!XV$M+@I9*owoCIu%i~E?4zAGW9ur4F$c4hBIwcejhmW zY}e8#A(Ir)Scmw{K=|C?b#*&{!|VaUHBF67JpjJw$<%Vc_I@m_FvS`92YO-J+6Jehn|1IY<^sc{}CZBqTXjdf0g)6^PCr*;K(Ri>%iOomD4w5cx_)bk1j_m5C>;0 z2#R)qHtzOSr7yn?Q!88q2$=Hsw67P@i{Ve@JOMv8bzTjztT=l94_99u7uEBHEr^s7 zOG?+5E+r)w1PPJul929@jwN*g>5?vK0VM^5MN+y!q+7ak*}d=O_t*Qrf9%{d&w0+7 z_^@-AnKLu5?IX1UMTSk#)!C{K!%9;hF$Ol)n$xwZ>tLp}K$S){sSI{=Wk)qR4R+}j zM>WO3oXxv(fzkaIH%8}w!O?(1NC*=H6Wp|)%S5g#>Rd5`{AUp`o^ zjk!1OL}?Je1ben323|8v6r?wUyoCUs*V67TI!+@V)0yX2% z`-MZt2_~lZNe<`RUwJ%aH7i6E(fY@&k9)YU?`c2`m>eO*OWyp`j(Z6(q)JRWjGX;- zTsTET<@!-(7Xv65dJXWJy@rwArhkYfsyrIH0{($N0#$7LI(AFq(g6|jE0p3J(m6q{eyQJGy6V|708OIh9slp+?45*K=+uIMu=%<0su35Oq`S2uw-*VFAJ6o?Jb` zpS?WuDw)=4pB|;;?aeNBTx3a_LZkbd`0`XCauC7v(Iv|0+buB1FLxrCe&q7EM&$YZ zga6t89`o5lo1@;s2S+{v>CmO=dUcCoe+}}jQ=~-O?kP&Wq59|~1C^J#_v#Ghh^*mv z2Pj*xdeF#pl+QFQeXypc*?|X!Jc?X6`tVk&5GA{< zCN{cedb+5q`CzE})tyR}OYTnd0Zh%KqGlhVSJganm;MZNIfmBv32Js!osQWDSa$5} z&=TNbjT;YsD6}RgSB(>T)C&KlQ+0~<7A7?=LWM`^>8E`iHWAf23k(tStN72@RWqx6 zqkogeQzZYB2j}4vdgZJ4*l6vCKaEK+ueg@f)V9bLDt*JVhd6)epLh#BFI{=tV?5lQ z9Tq@-M8vj5c4s`ABA%}WJf;%yoEaLTb&O6I3FQw5L?bxQ^6sVp{(8Sz0P3=PmpJ)P z1z<2Z_~6}4Z4g-C9xxB;7Z=fh`z+yt+B#wpK!)dXa)*!s3?!);)+0GPt+%cQ z{dZ=i8vt4hAmDuqAu~1$=;)zBm}e1rhP4UOGj=&I z=(O78_OSq>eRot!2JjtcY+ZL-?tSh?V zW_Zl2r|a2h;;n1vg>cd`M35P=!^|B713!+59M;#F4u}*4>3^(~Gl(1ee(PXcx;wd{ zW=167h29D(7z|Z*QkWfl_~%nKRgHuk#TVS&ZZFa7QF4*J=S=&^X^-n!&kg#%rpHpy z&x}|82YyVJ!vY0Y@6IBB-!#9>EB%1q8&wohw!ggb@0Xq!n`PlblX2JrKbQQWSr=np zV>QeAR92|$;-k#r3#n0?vqRt&+zeg%Y@;|HE89Wvt^v=9ZT7h8^!F_QQLxWTP&5Tk zi~cGhrzR<^PJcFN;I|)ObCdfHa{6Q2a^u9IfMkk8~MG3mk`yoI&T}G-NHj!sgpA10}jz zJsAd%^kQ0#$SGzl|HM|T=)_jZk+d!5>EEX9z2I~6x2bfme}j^gN4olV)u#zz8y{-t z7+JqCGfzMuoKf)l|C$WKFB}qcBdUMtC~3dEGOsu?tZ8(kJqVtue6`EW90XYmFHh)& zM~0V$jKG!b<-Z88>NaLzPh=gF&A)kQ9|zY_ly6~S-h?Dx!j*#Lzd)HYAIJ+mgxld0 zkJB<1D$B^%g}_TNAj1z=4MDq1yv{`@MvBbG@)1g~Z@?-o)eQppmRRX@qZjMuDSf11`b% zhZgUKv1QZAVh*&*?+gLKKHHP!i$kCnRx{dd%Y4)%=s{$s3>U5%y{iQ|Wyc?;u59?*Kn)tSU~EE7rZ91TppK-LIo!s#>0! zj<46=E09uF7_#jegRyd6S8f*MOBm&FtTEoN%_5UGM@5#)D@5JFRhZ4_hTwI}X1C}0ZpoV+T9S;{Z&jFn z5L7-houa)k|Hh~NK5kX+{p#($#t2Kqj~s4>t@hL0@;AMm`sEWJlk%hKmBNYtdzmVg zfM2=a^VHD#=s7-0^Ci!#IY-OXHj@n9OM`(Bl6R?6X#)8h2G;Szjj!;uqA|6sQrl=2 zrg5~2$sYZuGK=#?1%pEAvldx`fQshR9F4yzg(2OXuQ19<3uI0~`5cr9^2vJaEUh)h zuwHA*&?u?OvivIy)gunG(TK+UE6a&*hg-}M@J9qF;{**FL6BMw1PO{c^2I>lGNBlY zz>;$S9HErKtcYgxM6`8&nQlsEh>d%@CNIzF4s*DdY?D1esx?{l_DTH!31IIIjSC3m+o#qqWdvegFuvBY&Y|) zRe@WrCeoD2D7vm*3z2tOr`dBUnBFcWY}pFQ$vK}sHR;;242g{ka~=0N4vulA!9U9K zOXSBpK}}v398(dWcrvcpS}bj%UZp%r?Xty$ksVCln1y$sjXm4Q(TmuCN;hECCIIxA zU7JmNshz>zW}bxHsdY_uh8~e3}t`as@T1Y9=M? zcUGMB1;|y*FltB8o>5i4<7&jI!(Ls|x6iBrC{PRJPP$;;zcrDyp0Pf3zD8MQsDOsz zzL8R~ID)X>^B;f-=9VTC+y18Q<6jd$6G~5dDgS#%z#k3mqAc-nv42bZxKN5YH5*X4 zShjlhU{WBk8&+jh!z%{pt|Itvj{Nd6mb{i8+#m}21u?dvH5LwIC-yk=54RG&kDFm! z_AYzIv6=mw21kY!yGG}Xg~0W|W}P;f1>np{SjBeLle86Jm<85RupelKBfzr;U?}Pp zUTRrN-Y@7X+IKpD8A})=U3Kaj0Ou(%$0}@$gsBbBppUfIZ=Kf5+b+u<4WAVugKY82 zH2NE||A8~Y+&W!2=t9e4%XkOf?@;X!eeCu+^c&$8yh^sk2;Dx^A}TpV{1DbPa?)g1 zNKzPcu(0vnQQMx?%1x#0TC&y$^G^=OC~@J`u@8q>ccHT-Ar+o%G@L_OK820>H5sHh z8lLp(+z1$JW3e7r)! z(q|!K8@?^&J0n7&2jn5&F%SQrNn~B?UY?tx6GQ+26_Ko1UHqEH9vd3d*~Ht(Uk>*zvW%i7KyyJv|UhZDJ^uQ3>Q63t!z z6cUoi66MX{C!j$k2uTX+QbW=>S)atEDu_@s$L`J;QeU(hQ6FS)^IYg498EgC7!uwz z+`{iRy(mSJ)yoW>`~+_?N7~?6?db}_k`IY=AQe5KnF%-;x&`n@Q;#6J*pO*#h%OFf z8V90_3z^1+=;A@9@gTaG@xOL#iG_~tTgxUxMQ2^hjfGFvZ<86$UlF{!R*un%Q= z+ZsY865L&#R|u^ZjA?#6q?(<Cw(YMu~xSNoOXUtjXq zcoH)D>SS!1yD_N*M=>*Y&S|G}9|=T}Jhn>0O~fSo{eS>0iA&?b!}sPGZ-{PTN{{8g zU^7oVgm8wzg=FO`h%rrx69pn$>37AS`0Y)V%(%?(#VX6-wg{i)BbtcNOse@37yDH7oN_$u9@8d5A*BEC@n7ab9OFdhGhd;E+c9xqPEwu zcV7Ew=%iZ@I{o8aU&ys*Eh-xL+j;H!qFwB$P2v@LE5Xk3Z6nb-)U^M$Mxpc^7Gwxq zn53;r*F;_{gR}NyS2^_tCX{{d+IFk2dY?GS8Orn7ed~*!Jr?*GVoMG&gH!CKZuNW% zCZVUItrn_$oemV+S{Y891@-`Yy|BnjhGi3v+)fz(0K&D!FM2MZM)Sk1I>#X}ys0U^=oXred04$;@2VqV7!0yYMI<)p2u$Ycs_C$;an|w!^F&ynpUzjnVtaobnaAIy3 zhzN(TM}4cjxN8<&-T(O`IwJ|qx_68v2dL_k#k!`;O?>MjEw9TVCh^=mmZMkaCN=!n zQINflQEkfyYzll+Q9-#?tkmqn#6=YEHw%#{zTPiW8@G+Xn|;bN(pl)S>GQ?|O}@)M z(uI!b8uy1ph%N&0vlo<2_K!rbS1K*2 zg&UE(6-&{J3S+?&@NQVWYMt*6!dFKwCo4Y=$rgU}0nLqP)vM@3bd)1bh$Uci?4@@? zwtOzgm@o8_uf2SKJ?Ln+_+9>+_olK1@S=Pjc$opb%p@Y;9A0MSn_svZI`9F9i z-d$c#z{%5S%4`eGK=~PIn>D#a(DK#y(Bm9%-`bGmcd0YHy+w%d=$zb78q};90zT-Q za!BQgf(@SimQEf*RBK)%9bbagU#nvNB1VArPZ_d4$r8hyc~KQ7DRwLSrq%0 z%^RVQzNvtPe~|qxK;0Iny+e*pFTGRT*w`##&+rh?Zes+rE@3Z)vB3#a0lh@nO+Pf) zGB8(|wNK)X{{X%0ki!gkq#^pVGg5bBy}X-#0_vUzD4;+*RscMz@@PzdY1%JP7 zFpaB9^v`N@-#m%yRaBhI%HCVIyLI@{Hkat$yQPx#*VrjesImUe+4Z7qpe$2gb5XoB z?7M0__im$H`}0OekJpRR*`D=)XKPCrKkS8g@a4~z$)i@<9H{T;(&pqvTy>@XTx(nN zZ%7~j+h5Gf-p5tWwbPFwq81)U?He(5mF=O*#iyRyPU#yPV9j9zqJ(9wAbfX;AH4z3{;9lZJFfZBz5C;3+4(H(cuVZ%XN-7>+nIpw zCH1iOdXdQnE*3Iom+w&WGpE6sj7nIkW^ zRD67CZ)_c2ju$LAQyrew6-f5OWe?qOS(R|6XhLjbny)i0j$4ri)YHdLmNj4D%Bm7; z>f*|p5^Gu$YdRBadJ$^|5o<;gE5{x`(CtLT?#jD9(1{+<%-7S2!<=Tux+ttAD|?z+ zEHhlj^Qc0>no~oROC@wb(-40uZa^#FUndRoq6nlgf)s*skfH=q6uDF)>rN>vSW`=7 zgh6mu83Z*!yG*SpES*%)t{SvUEe7p)K|8N|ib@)m&gT}L{9c`0OkJDJa`l~~qSxAf z_*0o6{V%+1DTO^Wnlvm^+izDwqyjRXGoX!}D^t)3;8JhRrA1*7HLh@-&kgC;pA&OWJB>Wb%!~>}M8@d8td7=|w&&8TL=_tx zTp)V#^St5H`ogOY*)N8tvjZY4FVey*rB=!MpbeRoq|G^%67MkdYc|hJP~FNVDA<-K zK^Kl&EEnM>b(0!|CU4wL_2F%ds@Z1_5oAU3pSxedmN9g)mV@ z+ergl=2a78{FpiA)lG&IK~uNfNMJLP3Eg;n}=@ko}B%_HVd6O#J7+2E!n z(D8z=aiG#<|ERJVW1C?eJGdo;rd~8YCcu+KF?{H?u+W+}!(qTxGpms48dZRd@Rj?24QQOUgMoJy!n`Gp|6#2qm^9AQWA;DNFb?6 z#AL}1(fQf%vV7nCB=8*=jl;!FUv~ed12Zu=T>SK99lrS_q+!h3;RGoe%v2h7(dIJc zc%x&fY@2LYk4lxWQ?fzyeYBO#9A^?nWU11Yvd9D$c^(J`M_YwFAnSjC$B~4=tbz5= zDj&B5lcp-{3nP}L#(xht%n2Gx|A{3<2>-De_x$g%|*6*EYR68>v6;rSoAW4-%1ZT~A!`3Y1~y%LZS0!wiMOIhVeGJF+4=Ak4B0)b!0 z8{@eC zvqi7ERGh~0)bHx6g|=qZ?*54KsayrpcE{IEulBjNb*O*#iBdNT{!E!9y)}Fl#Ls%= zmz?(kou4wXYcJh!#b&QHJzHE)k(6?IkgRp9TM4eBmk8bHgb8i-1bi8i(!Tlns;zkc zg5^5ffGTz20FW__A|jb|&b6;v055T(d=AX9x|p z=jH`no7#^|GALa+`>SP2cvcA=CHj%*KhN*XkzSzu+02!%W#O0ePPePtfj6++u;IA( z(aqk#fMCnZ49HS3O=E!?aoAhNLKnuuV2x1x_Yn>R`RwpAZg?3#yi5>YCJHZ;fR{nx zWpeN`MR=JCyi6TlrX7*54=*!@mzlxK-oeXk;bo5SGFNz+C%nuDUKRi^gTc$f;bkA; zWieJBG7Jz+c05^rh^8o>EEJ-t2+>rBXzD{W%^;e#5KUJ|P(kazuIWUF>+0AiGu|AMILt9H-uw$X|h@CyUM2g5zZG? zmnz_Ja}<38?FZWZ48`V}gA|QQZLQGyvk#U$P(D%D%lUfTCkVtbB8eS<{uMEmq~pTf zX5ljLrC%}3WE*jqDn_^Frab_pncJ}qKN$Zcq^O@x{6%fue?@l&T?#xBs@bpe+CxI$ zv+lyc`7ZVL4dCK%JpT@GOx2@3_oMMPQ_yTCbUpIGL1VXkuRMn>O+Hx})Tr=7@<(ch zNedX%JPoV8g`J&$vTuh2au=B-7x#-8_kl(g{xiTx%`*=~PjEj#iWEqwYZpK4M|_fd z`!}DS`mFUKN@#vMl&+sb)Ob#8-RW~y-M`6)ug`9K1h9P>>VK0hJ?6y=z3O_R5Y%#c z-CjoV=qT1f(>f4lr`yRK^di6E?wa9UZkqU{=p7MnKdczoRzc&mv>%tPiL;}D$y3H_ zV4&iMB6Nu1bOL{R?6aJfr3QbZg1a8i`zfcV+k)O-^v1l5QU|PA{?=!(kbm@{mVg&- zMH5ti5Mk5MrBF?Q#8Z`w?v%J}s5h(;(J|{4U9m@<#DZ)2vfZK)*Jgg&vd5J`tbbh$ z@LbGLOwUzyj&;!pnyPo3voLE;JDGF3}*eT<67Bsx+=JxTePF!x@vsSf+=_##K2tlmGAKGdR{w8 zFoaisD+=FE{Dp1Jl6SfeYY~2Dyc3t3Vg>gK*j+?eSu1c-G5JLPW!Vkgeb zN;NNG$LeVQ*J4c%GAfWXut;N%R?+o#QuOj&4OiWRpCiC(x!)Ys5_PQ!Hk#)AzGF6k ziEydk+JZLdrfb+4qT(1qdk4!EW)Wo>Wxs%#8uTNf?to7u5S$3;^Q8f!j{_^*S6;na z=l|*;!o8=-zTHA&0`SHxow&HId{d00H~OH@tw$K5-{To*{nFQKVyUxVvDl9?T8dM= z&_i%|u9=Fotr@fa$sUxTfZ7^m+dKeE5R>3slANR=&)2E}#yjO=&k#E!5edJ<=IjNv zhi9x@e}Xd&Z_udW@|kR%niGKSbT;zCZYnwtb^{wl)_9n13y%ZP>;{qVvhxgIK3u51 zj^(0HqBiw6a|pDlo7(uH>d@j@KIJqY5O@}wbxpyocfy;l6j&(+tmT&vGau2{`>UL^I zj6A1hP|_4~F;{D&lcRjJZkTobOQY6Q82!&$SgIpX5rR)Xh}6_L-i~=Ct`|dO9)PXc zF51En9ILdl(2AX^ASTuIN=Hjl@zpDN>sRuTtXRdx&q&B9A*qxQ@~4p0rx0=~NGcVC zoEnl!4I!t2q|!jhX(6e!5OO+5DjkHJ9+H|%OvV67Wq{y?!2gqxXJ%xkB2FZRE8)xk zhdoEnh_lwFeg2R z;3=p*Quq}t|knK=^ zs;fNdcfhgy-ZMs9M~t-IM+H+lwW$q+y^qRv!Xp+Ui#e{mzy=I zOTAE9Pa9t0pA*kkF0c&F*w1D5}k6P6yn-}hA9zN?uw zZFKT{?{%onqY45_pOx^^sF+Scg)okwJDIQ&zLJaN_3eui+cV+ zn97O>S=jdcZq((H{z&in<=xkqy4eY$O*oOIYFnvG3X8sv$OToa*|R0HMY8ELYo@<} z<7AWPKt#Ky;+r5~0CL+DJUs=RS#*Zv%Z*@2)NO<=yLuBE3F^cl-O4?W17m45cy{tL zq>4v%F(2b!9=*&07exHtK)%XOKJpvg3A1~ym@qxn%=Xwpg@4+hNhii|8)wf+PM(vc z@C!uqFPy_&K1F=-Jy8-n)(%S{He(?LV<8)3p$KE40%M_`#_A)XaB}lM9O_fVTpb~4 z8W`9@k@(N@d38cCVhb>z*b$JM%XpSOc=%WD(bF+Tu8#LMtXSeYHz znC#d=W4ZDN4~<@f#yTLk5#$P`f!qd=J6sHM`9Vr}e?>TjHmJz}HN*GSWW%ara|pK~ z4fAFx{6HF(cCX9XLjh3Qz=Z&1!~t`Cl_A25ofBELkIYo z{-Y(^+Yh`|E*QMZ$uV8d<9si%v`qH%l)eDznDd6oGvM)VU)Y<>`scMX{=dG&9I$>Q zggQ<5I5E-GdutnPVv7BoleN8WuGyg17ZD3Moa4N@ed(d#loq*kWR#N_AIx~}C-TdF z$dFr}_p?JZeTxQMnaX09CW-HSuTu1)_52epjpPtjKwzBYw~`UD;|9Fb{1}34cW#@< z;n4BSwe3%oAAV{s3%xgC_;r5mT4DjIid^+$f5VcgPe!89{_gb;551?wW){?5A~I@D zH)(?Ivs8XV(9sut)r-8#p8j#33t^NSCr9dTYD$JY>{UYIk=GYtH@{9wHdbDBcGpXn zq<~k9O#PWDnTiRItdApCwg1@im@0KNQ{e4tWdHVMW{Kpu>WHLAZ-#JNT6UXXlc23t zyOZDfqir43^SR8rKVns2u!+w4WyZK!Na%oM~ll`1zOX?;js`YFf;&UDH`-|-f)zClMLbpSUhS-_4s)5~3YUkl zPJO{F(fEYrRPYq|yj2k0{Ll2Wp^DCB9p#k30SKmp#MO(6( zQ}n#<;EPTTu^?l?7%WqM^o9kiQS%elCl;(3kUCUkfcHo7JIS*Ikjey77Q8bXW`1P=d^P} znYwuAw1&f(y4pKOEUkuh4p=dXA)8ZQF-u4>=Q-W5dr2|(I2ACrNHLL|dYF%8zLOYW zM5lI;fUW$BAzazPd@o>vg227-5ENeC3s^hP4e(!nqX63 zX81@iB2(8~85Cyj1uW;EwxiS0fpphlt65!9gqZ5i=II z{4m!yd9b36_kt2ARNf13KwJC&$$;2BFr1W{!EKG7Fy>vde(VGWj3Cq zS?9nC=7b#dhOGl%4Jv>-)sa+O;^oo~!2i1X#LVvDc)) zT&RE@0&rWd#ufr8g09@Z?OcC5>qqwqn9-+B2Q;USD*R8dtJ@Fxl?@-C9~nO0NL`&4 zg@Nx}o`MXdSROYm6Zre0Y}Ids8jE@~t}Wop1wlHlsXk1qNOJ03rYL61n{fll)_7X4 z%);l^I|;S#;o510H-i%et>Yq#1l-a1>j+=Z*A5@1DKDVmvf8!iQtew9cp8(#kM`pi zo8qpaC(E^-+@yK2S-jH7(Y}dja{cb@OMY_w5z-!A8gI|WFIFcv9|2;0#T?36t7Z0T z=lM^@qUX9Uax1#-U7OoK^e+*=oc(17of~5*4~S;A-SET=AxmrMyw(_oV5wAz2zb6`dzaEYR+eHntfSKJMVApP_X|+AToW>iS4IMY$wa-2P+r z+ijn1VG6_T-N}OANmA)IFoWMo-V%q%(@xdJyZQ3>n&w3rCI*nALAt|~f2E6KZT-?Vu)M9X$wo&I}SQiVeI>7= zf;&S2;pJmCGL^6Y3AYP^jAODCk}Jroe1gw>p1}`!RnmZIo@!t9z`(|c`@u}c3~opy z8y2^!BUXnNX;{O(_WQlo`Q^8TdYcDdMOT%*rN+Q{Y*s~iM>SjuI!Ki`lYzH<{b7Xp zDr9_sG^`j@s9!%wrZfr~c=GF%rSL*go+t$V_5nl+gUR=y{5dZCA35>3;MjjE$As{I zOvK|VjD@B7nm#MM#N+Q63uk5IM?>LD;cRb1ab)HBUxJ}GkTyJKe=_-VF8Dt~JBC-s zeDHr3#N&gYnXJ5PIGi2_(niSaPcMHiEoA>#l9<__N&Z|uVkNc=z@U%1Wgrr~&9^N? zH7f384Se-xDK5H>5PCGj}&s0YCd|qLV5JSjI}EJ3SRHf<-U|=m6&U$WoSfqQCZK1 zw6J%cAGvEOn7<)NtyH1mLkuBmVU&oSM=Xdrkyo|QKObOTC(OYytXlrVEqZ?)w3J;P zCX1UKU@UFA$KTH{)dRjbBc<1t0~|#Gbj94nW$D`$y3v^ppCN-GSb}uSa>k3u<>}If zdI`zg<;gSQVAnNi`y*TZFDMN633MLI7a~M%AA_&dN2uGcUvy-X&Ef1#XCLVqdKvy^ zu+cZVY!e=#Uy3J{uE#r>bclU~j*88;vD4Abg+N=ZNPON%>>65-< zE5%`3iL(ZJmx0;0W9n<31X?*MaaxNJgpK||mC6@#m4B)0QIj*@maZBV$PwR!Ti%b& zTn}LcB(G;Vsrppc3eS7XOb4!u=*E!Olo|0EmOBVB(>om}d>Q39Z)mAunMi<>6Wm703e*@i52!Ls|J;sZD(_cR z3~VIj^SBmc$T^~-nnhFOB+qX2a|+H59j;a#-FUw5erS^FYeN2saBdQ(Rpr!L2}MJx zj>=Nzxq$Vj-twmz?815#%Arr?({5{$40Zu{w-&H2sewElk}@El8SSPSq2 zeS1C)8@DNkiFYOM`{!fWmcufCYXJweDkww(Sm3(hE+XVzIdJym*N?g>1O`|kWl*;* z7#Pz3m_S+u6%Uq=umdY^&jMzi8txvMw<%a= zcrlujFzq(}{kJIu0?b%|86OWY69r~apTM?1!(#8$0(v9xk;)vc?F!$g^m<76cCf}Y1x%7P^H1h5cy{#*)Nt!=%JzSBf&PS$F*)m=9^o#ZLFc3dw@Wn0>) zxwuqxg2%7GG~U+EIo7y8zHzL-B?0d)wYcAyLIN~+zWDAsJ$n(+@kxNjTrNhZ^pmt41_q9&IK^yQBa zWV@YSU!=)RRJdBZF22-Cm}rB7S1~9U-y`?JJFeE!I@7RYBkj4M2lK4BHDamr65m1K zuXb}A7YJC(SoEE*QY>eN97Gyfk z0{_nMqiE5V0C($1C6nXP5$r$IN(t*)L8@38Je_LcK6kC6~X; zp7K+0t&BOY!}N2ZI>Wl6GUF)MP%eyPkusVmu8}Pr$KGYg$F5P_n*sQpw!eUIn)sF* zSL}QL17y3)w47Vw0YMj6l)}tW2-8`+kR;=mLPcu}FXO zgXaCK+lU6=*=Lbu67v9R3nq-K7M5yRC3*w>uPb{qXE<-r2DmUM!9PIg$0l0>8NjFz z`}TF>T|+_i9XS0Teg>YR5Uc}CucL`TKpjohKQpyMm61^%%mjSXr~-pX#0H>zECEI> zewlOX*9;|u+8buMr$<@IIFJzN94s7+C`p0UPfI$c&8ZOPKe-dNBfCkjyc?dg_oKoG z0H1Zhev9+|fFUq~ZMXh4YLsCQc6GHJM8)mJo4!)B(3bL9;wu0y{bB!7{w6>+(t4*p zGWc3VU1jtb1E_fi5aizE{jchV=yaQra|WP>=-nQa{3bp54(D@~jbjBvKRC<1VHS59 zFU-De;Q4dv9e+4#mKyvj4GIE-Sx?Z=QFbKM>8xWX*t*iY z;h8T}_R1_B*q`eH`S1Hl;eY+)V3an8I0UA+PWjbuN@dQ|$s!*gpk&b&^*{~SPssqT zr<&GFwKVV^G7BR;K>HBjXTKpD7L>#MoSdGPyp3}@YpHE;?+T`*mg_gr{r*`U`WH;D zj2Sph0nA!Wh2KupkT^7#P@2>1GWw z+naN>w=|G9v|J0#u~L#Eo|R+<&4Kif9_cRx$mtfY#^}b3G-vt_%E-`ptI>QsS1}KN z`tEj>a(=tqjd-148|uf^_D|1$be?RB`YpRW$(RW{BPPxG9jTP!%Wq<;{2P=EDCu%44pE{{J)A^aC6w*7Ssh0z{=eXKrZKG=AJ*`ma%()KKFW z;R{hi7;p^LE`<`2*cZ|?Kqytn>jI~zZI(pZr;gJ z;|pH}XP7yjD5x9|aNY#EXscuiOP$RtydQ9#tx^s~Zpq%zF?$<{V|&-jZ_fbaAHB&u-4>msJ!}~3nVaFq3ZQ|K7 z<-j=Wh_a}yZK;~dNL|T|9H8hp#(@OVitcc-COrbl0n;mVJQy(#eg}YmpY~JEv!^)Q z6A*-=$aXd~JF=!52-XA`j>KK#nDV#L038>C5eItJwjfm`#^AB+lkMLD{mI6wG3A}) zX9}EI+T|cvgFd1P(c6<+2Ak`8egk=@vd z%-Ze;E`5HtR_I@HIxg2>@@CSY-Bl^5`p7Kj=6;vTE$nShJgTVrg}JSw{Yq(ikE~;b zvb$?AGhsV`!-?*1>SpTy_@&@VnkBPPRHZ9Iw0YGz?KR6Mbx*u$(GwQky5>Zv(w*J) ziy|U}8~J7;(XCCa;IBo(KBHaO!3!#FJtx}KXQUKOB*`Q%S(#X5gg%O-B;jO3*k_P( zuXpN=-!z{$3zd(|E__*^|5BglQ^>v=a4#G7N1d8-|?hBA&KY1={kp+QLkHy6YVrCBTJwCV z*HH;h;mwrWEH?6pi=xugupG$$P+J)~@Pl%%#G{yM5x6 zUN%@U;!h`;*+^mj_hdSW;r>Zz7{&QjF$Am7$OCf^!kAc=Mmyk-rNmLeI{OY(Q>?Pi zu*if#=xxCa^>k9xcX|vGu;tta@eiUD8#cu5ZNC_uz5rRwTD)Rm7q|%tIw~$BppQ2d ztT?qGJ>{4O^sEU<{0i2-6Kp*lm~}5t`x#>0t7HyVF$!#me2I}jLPqP}pP|O3J~Qu_ zqjA&li!}`_)^)VcCU9b-^0pfI3XFe@jO0Q>Q;P18)@-jb1WyBN(Z-I?QVf819e3(c zKoA8GK)6obt|@`v=P(|nzfmbED|VgGp7;R4xax;isRJvQ2*}-TeZ4a+$?LF@8u27-Tml_h+izu zEHtrovDqw@3hgHSA{EA;ou`fx8^>Ab>toTQ3c8lzSh{PaT=P$-t>-^?LmHnWsCC{g ze>JB;BJroC4(J*Kq+PyJ+d39g`@HBCIXrqleO(1BXMr>h*bV-<%6C33 zx0+9(6m-f6*eb83=$WgX&C(Uy`6V*g`E+{9B3x%i`9OD4Xi=vbRathrJEf&cWbc@j zn^%*mDKRF}<@ZMTtnaMR@Up66?&H<`mgGVGVv|dS!{HEbquUYtH6)^`q3r@-J!pkMZG$$aZl&z~*JaQf0w%E(&=n6h~Yihxu+y^5vK`euC^nJlV&1vZOoyAtGA& zPpFk~jg)aSqu_s)aj9Qp2-$Hm*>OFZ$ghgX|Nj0&#t~1}6HhjPvjdxe4U4x#hDIoZ zxIP2ZWz6p7n8V94C;SfCR?Y`F1ngM6>{zO$A++^5m>y%kFUR~}js@b6$s##raIiH( z2zl~5Xg`O*oxsOa*-*d7-irxTWcd#zIAoUlq(y5ZFyEWwKl;}}fj<`d9K;Xz#j_Ljp*P3AEpr<&p;Dv&}0%cY19lM{8aRS&Oi}E z{8>EadxWJKv7~1FC0d-|>elQ`$kU@lETFLY4uw!FnoR!;l<3}q*DnI0;0aISyi&wR5-0_3k`X|u(?}PkulTl{ghs|5l6GJ~z3i4OB zQwlRn=5H;9>we#+ZH)%}4GN@sE$IRjlL^AWz?IAw`E{`)cW3W2dV4bd+}o+ItiQ|VA9}mp?(Ls~*9jjFbT}_- z?(e>%X#4z$SFDhxg~!B|cSD6Zlem{V_%+z5`c*bYDmKTr(?!%?Y2 zL)2*kkCIorfJ#e8kduq2;-W1Afk_=2rmvWrp9ln2b!a%cOi@zu5)xeM$sy$GvQnri zMKEwzi$bCnPxF+O3OtkCTr!mx9eH`pYJ%0>DE3LqTbC%*MtQt4<=rvc)<5w-|D$^O zoBv;5t>3b6$N%*GTV_s+0d4E~?|;r?X3n{9;y>G0ezi*dzy8yi|Bk;R<96=bKKXxZ zme-yCi?3>boIn3a{nVJ}rjh^DmtC1_ay4lE;Y`NuZjB4tgtkWQ`OTM?CAu?w%JhG( zUFnTV-#RR79FGL;)rj7;PNijg`hVlgMtSS*$DR3KqWb>7@rtQ`LC31ETBmPwYsb0S zO8*O!yVH}?pV#cjhGyR{yXl&zxhs|4@(YPtoLW zPp{j2(hPq+b>Gje={-+>d@-!K`_gb))#OD-e|Z-FU~`^)W1o;Kf0?uI(o-KUYgGg- z4NC1>T_AMd>UQ{|jGc3TS3g|l$$K;ZZnaOvmL2Oi-#n5Vw|?8Z*H1G~Eqf!aQJu%r zbbW%rI%TCeZmrx*esu=(#Ii5%1LZn2gk7d^$9hiSy_qwCKh|Y};LL;x!b+#O_@{`^%$Xnw k#L`Nq_ynIm4`$S|ZqsNlA`=~tt@~qN(ipytVFoV)0IAMgvH$=8 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/dateutil/zoneinfo/rebuild.py b/env/lib/python3.6/site-packages/dateutil/zoneinfo/rebuild.py new file mode 100644 index 0000000..9d53bb8 --- /dev/null +++ b/env/lib/python3.6/site-packages/dateutil/zoneinfo/rebuild.py @@ -0,0 +1,52 @@ +import logging +import os +import tempfile +import shutil +import json +from subprocess import check_call + +from dateutil.zoneinfo import tar_open, METADATA_FN, ZONEFILENAME + + +def rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None): + """Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar* + + filename is the timezone tarball from ftp.iana.org/tz. + + """ + tmpdir = tempfile.mkdtemp() + zonedir = os.path.join(tmpdir, "zoneinfo") + moduledir = os.path.dirname(__file__) + try: + with tar_open(filename) as tf: + for name in zonegroups: + tf.extract(name, tmpdir) + filepaths = [os.path.join(tmpdir, n) for n in zonegroups] + try: + check_call(["zic", "-d", zonedir] + filepaths) + except OSError as e: + _print_on_nosuchfile(e) + raise + # write metadata file + with open(os.path.join(zonedir, METADATA_FN), 'w') as f: + json.dump(metadata, f, indent=4, sort_keys=True) + target = os.path.join(moduledir, ZONEFILENAME) + with tar_open(target, "w:%s" % format) as tf: + for entry in os.listdir(zonedir): + entrypath = os.path.join(zonedir, entry) + tf.add(entrypath, entry) + finally: + shutil.rmtree(tmpdir) + + +def _print_on_nosuchfile(e): + """Print helpful troubleshooting message + + e is an exception raised by subprocess.check_call() + + """ + if e.errno == 2: + logging.error( + "Could not find zic. Perhaps you need to install " + "libc-bin or some other package that provides it, " + "or it's not in your PATH?") diff --git a/env/lib/python3.6/site-packages/easy_install.py b/env/lib/python3.6/site-packages/easy_install.py new file mode 100644 index 0000000..d87e984 --- /dev/null +++ b/env/lib/python3.6/site-packages/easy_install.py @@ -0,0 +1,5 @@ +"""Run the EasyInstall command""" + +if __name__ == '__main__': + from setuptools.command.easy_install import main + main() diff --git a/env/lib/python3.6/site-packages/examples/oauth.py b/env/lib/python3.6/site-packages/examples/oauth.py new file mode 100644 index 0000000..b69ff3a --- /dev/null +++ b/env/lib/python3.6/site-packages/examples/oauth.py @@ -0,0 +1,33 @@ +from __future__ import absolute_import, print_function + +import tweepy + +# == OAuth Authentication == +# +# This mode of authentication is the new preferred way +# of authenticating with Twitter. + +# The consumer keys can be found on your application's Details +# page located at https://dev.twitter.com/apps (under "OAuth settings") +consumer_key="" +consumer_secret="" + +# The access tokens can be found on your applications's Details +# page located at https://dev.twitter.com/apps (located +# under "Your access token") +access_token="" +access_token_secret="" + +auth = tweepy.OAuthHandler(consumer_key, consumer_secret) +auth.set_access_token(access_token, access_token_secret) + +api = tweepy.API(auth) + +# If the authentication was successful, you should +# see the name of the account print out +print(api.me().name) + +# If the application settings are set for "Read and Write" then +# this line should tweet out the message to your account's +# timeline. The "Read and Write" setting is on https://dev.twitter.com/apps +api.update_status(status='Updating using OAuth authentication via Tweepy!') diff --git a/env/lib/python3.6/site-packages/examples/streaming.py b/env/lib/python3.6/site-packages/examples/streaming.py new file mode 100644 index 0000000..e90bc5e --- /dev/null +++ b/env/lib/python3.6/site-packages/examples/streaming.py @@ -0,0 +1,35 @@ +from __future__ import absolute_import, print_function + +from tweepy.streaming import StreamListener +from tweepy import OAuthHandler +from tweepy import Stream + +# Go to http://apps.twitter.com and create an app. +# The consumer key and secret will be generated for you after +consumer_key="" +consumer_secret="" + +# After the step above, you will be redirected to your app's page. +# Create an access token under the the "Your access token" section +access_token="" +access_token_secret="" + +class StdOutListener(StreamListener): + """ A listener handles tweets that are received from the stream. + This is a basic listener that just prints received tweets to stdout. + + """ + def on_data(self, data): + print(data) + return True + + def on_error(self, status): + print(status) + +if __name__ == '__main__': + l = StdOutListener() + auth = OAuthHandler(consumer_key, consumer_secret) + auth.set_access_token(access_token, access_token_secret) + + stream = Stream(auth, l) + stream.filter(track=['basketball']) diff --git a/env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/PKG-INFO b/env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/PKG-INFO new file mode 100644 index 0000000..7c3e4d7 --- /dev/null +++ b/env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/PKG-INFO @@ -0,0 +1,63 @@ +Metadata-Version: 1.1 +Name: httplib2 +Version: 0.10.3 +Summary: A comprehensive HTTP client library. +Home-page: https://github.com/httplib2/httplib2 +Author: Joe Gregorio +Author-email: joe@bitworking.org +License: MIT +Description: + + A comprehensive HTTP client library, ``httplib2`` supports many features left out of other HTTP libraries. + + **HTTP and HTTPS** + HTTPS support is only available if the socket module was compiled with SSL support. + + + **Keep-Alive** + Supports HTTP 1.1 Keep-Alive, keeping the socket open and performing multiple requests over the same connection if possible. + + + **Authentication** + The following three types of HTTP Authentication are supported. These can be used over both HTTP and HTTPS. + + * Digest + * Basic + * WSSE + + **Caching** + The module can optionally operate with a private cache that understands the Cache-Control: + header and uses both the ETag and Last-Modified cache validators. Both file system + and memcached based caches are supported. + + + **All Methods** + The module can handle any HTTP request method, not just GET and POST. + + + **Redirects** + Automatically follows 3XX redirects on GETs. + + + **Compression** + Handles both 'deflate' and 'gzip' types of compression. + + + **Lost update support** + Automatically adds back ETags into PUT requests to resources we have already cached. This implements Section 3.2 of Detecting the Lost Update Problem Using Unreserved Checkout + + + **Unit Tested** + A large and growing set of unit tests. + + +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: Software Development :: Libraries diff --git a/env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/SOURCES.txt b/env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/SOURCES.txt new file mode 100644 index 0000000..3ace320 --- /dev/null +++ b/env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/SOURCES.txt @@ -0,0 +1,28 @@ +setup.py +python2/httplib2test.py +python2/httplib2test_appengine.py +python2/ssl_protocol_test.py +python2/httplib2/__init__.py +python2/httplib2/cacerts.txt +python2/httplib2/iri2uri.py +python2/httplib2/socks.py +python2/httplib2.egg-info/SOURCES.txt +python2/httplib2.egg-info/dependency_links.txt +python2/httplib2.egg-info/top_level.txt +python2/httplib2/test/__init__.py +python2/httplib2/test/miniserver.py +python2/httplib2/test/other_cacerts.txt +python2/httplib2/test/smoke_test.py +python2/httplib2/test/test_no_socket.py +python2/httplib2/test/test_ssl_context.py +python2/httplib2/test/brokensocket/socket.py +python2/httplib2/test/functional/test_proxies.py +python3/httplib2test.py +python3/httplib2/__init__.py +python3/httplib2/cacerts.txt +python3/httplib2/iri2uri.py +python3/httplib2.egg-info/PKG-INFO +python3/httplib2.egg-info/SOURCES.txt +python3/httplib2.egg-info/dependency_links.txt +python3/httplib2.egg-info/top_level.txt +python3/httplib2/test/other_cacerts.txt \ No newline at end of file diff --git a/env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/dependency_links.txt b/env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/installed-files.txt b/env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/installed-files.txt new file mode 100644 index 0000000..f6e49f2 --- /dev/null +++ b/env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/installed-files.txt @@ -0,0 +1,9 @@ +../httplib2/iri2uri.py +../httplib2/__init__.py +../httplib2/cacerts.txt +../httplib2/__pycache__/iri2uri.cpython-36.pyc +../httplib2/__pycache__/__init__.cpython-36.pyc +PKG-INFO +SOURCES.txt +top_level.txt +dependency_links.txt diff --git a/env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/top_level.txt b/env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/top_level.txt new file mode 100644 index 0000000..fb881ec --- /dev/null +++ b/env/lib/python3.6/site-packages/httplib2-0.10.3-py3.6.egg-info/top_level.txt @@ -0,0 +1 @@ +httplib2 diff --git a/env/lib/python3.6/site-packages/httplib2/cacerts.txt b/env/lib/python3.6/site-packages/httplib2/cacerts.txt new file mode 100644 index 0000000..b2fe5f1 --- /dev/null +++ b/env/lib/python3.6/site-packages/httplib2/cacerts.txt @@ -0,0 +1,2167 @@ +# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. +# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. +# Label: "GTE CyberTrust Global Root" +# Serial: 421 +# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db +# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74 +# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36 +-----BEGIN CERTIFICATE----- +MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD +VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv +bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv +b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV +UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU +cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds +b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH +iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS +r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4 +04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r +GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9 +3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P +lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/ +-----END CERTIFICATE----- + +# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division +# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division +# Label: "Thawte Server CA" +# Serial: 1 +# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d +# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c +# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9 +-----BEGIN CERTIFICATE----- +MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD +VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm +MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx +MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT +DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3 +dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl +cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3 +DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD +gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91 +yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX +L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj +EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG +7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e +QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ +qdq5snUb9kLy78fyGPmJvKP/iiMucEc= +-----END CERTIFICATE----- + +# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division +# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division +# Label: "Thawte Premium Server CA" +# Serial: 1 +# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a +# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a +# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72 +-----BEGIN CERTIFICATE----- +MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD +VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy +dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t +MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB +MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG +A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp +b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl +cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv +bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE +VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ +ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR +uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG +9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI +hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM +pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg== +-----END CERTIFICATE----- + +# Issuer: O=Equifax OU=Equifax Secure Certificate Authority +# Subject: O=Equifax OU=Equifax Secure Certificate Authority +# Label: "Equifax Secure CA" +# Serial: 903804111 +# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4 +# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a +# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78 +-----BEGIN CERTIFICATE----- +MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV +UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy +dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1 +MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx +dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B +AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f +BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A +cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC +AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ +MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm +aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw +ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj +IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF +MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA +A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y +7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh +1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4 +-----END CERTIFICATE----- + +# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network +# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network +# Label: "Verisign Class 3 Public Primary Certification Authority - G2" +# Serial: 167285380242319648451154478808036881606 +# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9 +# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f +# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b +-----BEGIN CERTIFICATE----- +MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ +BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh +c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy +MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp +emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X +DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw +FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg +UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo +YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 +MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB +AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4 +pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0 +13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID +AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk +U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i +F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY +oJ2daZH9 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Label: "GlobalSign Root CA - R2" +# Serial: 4835703278459682885658125 +# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 +# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe +# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 +MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL +v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 +eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq +tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd +C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa +zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB +mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH +V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n +bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG +3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs +J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO +291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS +ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd +AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 +TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority +# Label: "ValiCert Class 1 VA" +# Serial: 1 +# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb +# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e +# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04 +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy +NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y +LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+ +TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y +TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0 +LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW +I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw +nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority +# Label: "ValiCert Class 2 VA" +# Serial: 1 +# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87 +# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6 +# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy +NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY +dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9 +WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS +v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v +UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu +IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC +W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority +# Label: "RSA Root Certificate 1" +# Serial: 1 +# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72 +# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb +# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy +NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD +cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs +2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY +JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE +Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ +n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A +PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Label: "Verisign Class 3 Public Primary Certification Authority - G3" +# Serial: 206684696279472310254277870180966723415 +# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 +# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 +# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b +N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t +KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu +kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm +CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ +Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu +imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te +2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe +DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC +/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p +F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt +TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Label: "Verisign Class 4 Public Primary Certification Authority - G3" +# Serial: 314531972711909413743075096039378935511 +# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df +# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d +# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06 +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1 +GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ ++mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd +U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm +NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY +ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/ +ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1 +CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq +g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm +fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c +2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/ +bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Secure Server CA" +# Serial: 927650371 +# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee +# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39 +# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50 +-----BEGIN CERTIFICATE----- +MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC +VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u +ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc +KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u +ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1 +MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE +ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j +b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF +bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg +U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA +A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/ +I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3 +wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC +AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb +oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5 +BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p +dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk +MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp +b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu +dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0 +MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi +E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa +MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI +hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN +95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd +2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946059622 +# MD5 Fingerprint: ba:21:ea:20:d6:dd:db:8f:c1:57:8b:40:ad:a1:fc:fc +# SHA1 Fingerprint: 80:1d:62:d0:7b:44:9d:5c:5c:03:5c:98:ea:61:fa:44:3c:2a:58:fe +# SHA256 Fingerprint: d1:c3:39:ea:27:84:eb:87:0f:93:4f:c5:63:4e:4a:a9:ad:55:05:01:64:01:f2:64:65:d3:7a:57:46:63:35:9f +-----BEGIN CERTIFICATE----- +MIIEXDCCA0SgAwIBAgIEOGO5ZjANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0xOTEy +MjQxODIwNTFaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo3QwcjARBglghkgBhvhCAQEEBAMCAAcwHwYDVR0jBBgwFoAUVeSB0RGA +vtiJuQijMfmhJAkWuXAwHQYDVR0OBBYEFFXkgdERgL7YibkIozH5oSQJFrlwMB0G +CSqGSIb2fQdBAAQQMA4bCFY1LjA6NC4wAwIEkDANBgkqhkiG9w0BAQUFAAOCAQEA +WUesIYSKF8mciVMeuoCFGsY8Tj6xnLZ8xpJdGGQC49MGCBFhfGPjK50xA3B20qMo +oPS7mmNz7W3lKtvtFKkrxjYR0CvrB4ul2p5cGZ1WEvVUKcgF7bISKo30Axv/55IQ +h7A6tcOdBTcSo8f0FbnVpDkWm1M6I5HxqIKiaohowXkCIryqptau37AUX7iH0N18 +f3v/rxzP5tsHrV7bhZ3QKw0z2wTR5klAEyt2+z7pnIkPFc4YsIV4IU9rTw76NmfN +B/L/CNDi3tm/Kq+4h4YhPATKt5Rof8886ZjXOP/swNlQ8C5LWK5Gb9Auw2DaclVy +vUxFnmG6v4SBkgPR0ml8xQ== +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. +# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. +# Label: "Equifax Secure Global eBusiness CA" +# Serial: 1 +# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc +# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45 +# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07 +-----BEGIN CERTIFICATE----- +MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT +ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw +MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj +dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l +c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC +UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc +58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/ +o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr +aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA +A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA +Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv +8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV +-----END CERTIFICATE----- + +# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. +# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. +# Label: "Equifax Secure eBusiness CA 1" +# Serial: 4 +# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d +# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41 +# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73 +-----BEGIN CERTIFICATE----- +MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT +ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw +MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j +LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ +KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo +RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu +WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw +Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD +AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK +eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM +zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+ +WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN +/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ== +-----END CERTIFICATE----- + +# Issuer: O=Equifax Secure OU=Equifax Secure eBusiness CA-2 +# Subject: O=Equifax Secure OU=Equifax Secure eBusiness CA-2 +# Label: "Equifax Secure eBusiness CA 2" +# Serial: 930140085 +# MD5 Fingerprint: aa:bf:bf:64:97:da:98:1d:6f:c6:08:3a:95:70:33:ca +# SHA1 Fingerprint: 39:4f:f6:85:0b:06:be:52:e5:18:56:cc:10:e1:80:e8:82:b3:85:cc +# SHA256 Fingerprint: 2f:27:4e:48:ab:a4:ac:7b:76:59:33:10:17:75:50:6d:c3:0e:e3:8e:f6:ac:d5:c0:49:32:cf:e0:41:23:42:20 +-----BEGIN CERTIFICATE----- +MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV +UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj +dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0 +NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD +VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B +AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G +vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/ +BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C +AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX +MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl +IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw +NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq +y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF +MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA +A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy +0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1 +E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network +# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network +# Label: "AddTrust Low-Value Services Root" +# Serial: 1 +# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc +# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d +# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7 +-----BEGIN CERTIFICATE----- +MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 +b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw +MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML +QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD +VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA +A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul +CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n +tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl +dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch +PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC ++Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O +BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl +MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk +ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB +IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X +7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz +43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY +eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl +pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA +WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk= +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Label: "AddTrust External Root" +# Serial: 1 +# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f +# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 +# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 +-----BEGIN CERTIFICATE----- +MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs +IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 +MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux +FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h +bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v +dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt +H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 +uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX +mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX +a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN +E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 +WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD +VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 +Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU +cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx +IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN +AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH +YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 +6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC +Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX +c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a +mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network +# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network +# Label: "AddTrust Public Services Root" +# Serial: 1 +# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f +# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5 +# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 +b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx +MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB +ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV +BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV +6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX +GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP +dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH +1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF +62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW +BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL +MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU +cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv +b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6 +IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/ +iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao +GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh +4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm +XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY= +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network +# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network +# Label: "AddTrust Qualified Certificates Root" +# Serial: 1 +# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb +# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf +# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16 +-----BEGIN CERTIFICATE----- +MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 +b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1 +MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK +EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh +BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq +xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G +87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i +2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U +WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1 +0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G +A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr +pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL +ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm +aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv +hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm +hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X +dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3 +P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y +iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no +xqE= +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. +# Label: "GeoTrust Global CA" +# Serial: 144470 +# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 +# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 +# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a +-----BEGIN CERTIFICATE----- +MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i +YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG +EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg +R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 +9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq +fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv +iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU +1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ +bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW +MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA +ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l +uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn +Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS +tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF +PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un +hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV +5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc. +# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc. +# Label: "GeoTrust Global CA 2" +# Serial: 1 +# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9 +# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d +# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85 +-----BEGIN CERTIFICATE----- +MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs +IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG +EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg +R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A +PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8 +Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL +TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL +5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7 +S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe +2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE +FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap +EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td +EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv +/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN +A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0 +abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF +I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz +4iIprn2DQKi6bA== +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Label: "GeoTrust Universal CA" +# Serial: 1 +# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 +# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 +# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 +-----BEGIN CERTIFICATE----- +MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy +c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE +BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 +IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV +VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 +cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT +QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh +F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v +c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w +mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd +VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX +teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ +f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe +Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ +nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB +/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY +MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG +9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc +aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX +IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn +ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z +uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN +Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja +QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW +koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 +ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt +DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm +bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Label: "GeoTrust Universal CA 2" +# Serial: 1 +# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 +# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 +# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy +c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD +VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 +c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 +WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG +FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq +XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL +se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb +KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd +IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 +y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt +hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc +QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 +Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV +HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ +KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z +dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ +L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr +Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo +ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY +T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz +GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m +1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV +OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH +6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX +QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS +-----END CERTIFICATE----- + +# Issuer: CN=America Online Root Certification Authority 1 O=America Online Inc. +# Subject: CN=America Online Root Certification Authority 1 O=America Online Inc. +# Label: "America Online Root Certification Authority 1" +# Serial: 1 +# MD5 Fingerprint: 14:f1:08:ad:9d:fa:64:e2:89:e7:1c:cf:a8:ad:7d:5e +# SHA1 Fingerprint: 39:21:c1:15:c1:5d:0e:ca:5c:cb:5b:c4:f0:7d:21:d8:05:0b:56:6a +# SHA256 Fingerprint: 77:40:73:12:c6:3a:15:3d:5b:c0:0b:4e:51:75:9c:df:da:c2:37:dc:2a:33:b6:79:46:e9:8e:9b:fa:68:0a:e3 +-----BEGIN CERTIFICATE----- +MIIDpDCCAoygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP +bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAxMB4XDTAyMDUyODA2 +MDAwMFoXDTM3MTExOTIwNDMwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft +ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg +Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAKgv6KRpBgNHw+kqmP8ZonCaxlCyfqXfaE0bfA+2l2h9LaaLl+lk +hsmj76CGv2BlnEtUiMJIxUo5vxTjWVXlGbR0yLQFOVwWpeKVBeASrlmLojNoWBym +1BW32J/X3HGrfpq/m44zDyL9Hy7nBzbvYjnF3cu6JRQj3gzGPTzOggjmZj7aUTsW +OqMFf6Dch9Wc/HKpoH145LcxVR5lu9RhsCFg7RAycsWSJR74kEoYeEfffjA3PlAb +2xzTa5qGUwew76wGePiEmf4hjUyAtgyC9mZweRrTT6PP8c9GsEsPPt2IYriMqQko +O3rHl+Ee5fSfwMCuJKDIodkP1nsmgmkyPacCAwEAAaNjMGEwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUAK3Zo/Z59m50qX8zPYEX10zPM94wHwYDVR0jBBgwFoAU +AK3Zo/Z59m50qX8zPYEX10zPM94wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +BQUAA4IBAQB8itEfGDeC4Liwo+1WlchiYZwFos3CYiZhzRAW18y0ZTTQEYqtqKkF +Zu90821fnZmv9ov761KyBZiibyrFVL0lvV+uyIbqRizBs73B6UlwGBaXCBOMIOAb +LjpHyx7kADCVW/RFo8AasAFOq73AI25jP4BKxQft3OJvx8Fi8eNy1gTIdGcL+oir +oQHIb/AUr9KZzVGTfu0uOMe9zkZQPXLjeSWdm4grECDdpbgyn43gKd8hdIaC2y+C +MMbHNYaz+ZZfRtsMRf3zUMNvxsNIrUam4SdHCh0Om7bCd39j8uB9Gr784N/Xx6ds +sPmuujz9dLQR6FgNgLzTqIA6me11zEZ7 +-----END CERTIFICATE----- + +# Issuer: CN=America Online Root Certification Authority 2 O=America Online Inc. +# Subject: CN=America Online Root Certification Authority 2 O=America Online Inc. +# Label: "America Online Root Certification Authority 2" +# Serial: 1 +# MD5 Fingerprint: d6:ed:3c:ca:e2:66:0f:af:10:43:0d:77:9b:04:09:bf +# SHA1 Fingerprint: 85:b5:ff:67:9b:0c:79:96:1f:c8:6e:44:22:00:46:13:db:17:92:84 +# SHA256 Fingerprint: 7d:3b:46:5a:60:14:e5:26:c0:af:fc:ee:21:27:d2:31:17:27:ad:81:1c:26:84:2d:00:6a:f3:73:06:cc:80:bd +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP +bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAyMB4XDTAyMDUyODA2 +MDAwMFoXDTM3MDkyOTE0MDgwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft +ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg +Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBAMxBRR3pPU0Q9oyxQcngXssNt79Hc9PwVU3dxgz6sWYFas14tNwC +206B89enfHG8dWOgXeMHDEjsJcQDIPT/DjsS/5uN4cbVG7RtIuOx238hZK+GvFci +KtZHgVdEglZTvYYUAQv8f3SkWq7xuhG1m1hagLQ3eAkzfDJHA1zEpYNI9FdWboE2 +JxhP7JsowtS013wMPgwr38oE18aO6lhOqKSlGBxsRZijQdEt0sdtjRnxrXm3gT+9 +BoInLRBYBbV4Bbkv2wxrkJB+FFk4u5QkE+XRnRTf04JNRvCAOVIyD+OEsnpD8l7e +Xz8d3eOyG6ChKiMDbi4BFYdcpnV1x5dhvt6G3NRI270qv0pV2uh9UPu0gBe4lL8B +PeraunzgWGcXuVjgiIZGZ2ydEEdYMtA1fHkqkKJaEBEjNa0vzORKW6fIJ/KD3l67 +Xnfn6KVuY8INXWHQjNJsWiEOyiijzirplcdIz5ZvHZIlyMbGwcEMBawmxNJ10uEq +Z8A9W6Wa6897GqidFEXlD6CaZd4vKL3Ob5Rmg0gp2OpljK+T2WSfVVcmv2/LNzGZ +o2C7HK2JNDJiuEMhBnIMoVxtRsX6Kc8w3onccVvdtjc+31D1uAclJuW8tf48ArO3 ++L5DwYcRlJ4jbBeKuIonDFRH8KmzwICMoCfrHRnjB453cMor9H124HhnAgMBAAGj +YzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFE1FwWg4u3OpaaEg5+31IqEj +FNeeMB8GA1UdIwQYMBaAFE1FwWg4u3OpaaEg5+31IqEjFNeeMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQUFAAOCAgEAZ2sGuV9FOypLM7PmG2tZTiLMubekJcmn +xPBUlgtk87FYT15R/LKXeydlwuXK5w0MJXti4/qftIe3RUavg6WXSIylvfEWK5t2 +LHo1YGwRgJfMqZJS5ivmae2p+DYtLHe/YUjRYwu5W1LtGLBDQiKmsXeu3mnFzccc +obGlHBD7GL4acN3Bkku+KVqdPzW+5X1R+FXgJXUjhx5c3LqdsKyzadsXg8n33gy8 +CNyRnqjQ1xU3c6U1uPx+xURABsPr+CKAXEfOAuMRn0T//ZoyzH1kUQ7rVyZ2OuMe +IjzCpjbdGe+n/BLzJsBZMYVMnNjP36TMzCmT/5RtdlwTCJfy7aULTd3oyWgOZtMA +DjMSW7yV5TKQqLPGbIOtd+6Lfn6xqavT4fG2wLHqiMDn05DpKJKUe2h7lyoKZy2F +AjgQ5ANh1NolNscIWC2hp1GvMApJ9aZphwctREZ2jirlmjvXGKL8nDgQzMY70rUX +Om/9riW99XJZZLF0KjhfGEzfz3EEWjbUvy+ZnOjZurGV5gJLIaFb1cFPj65pbVPb +AZO1XB4Y3WRayhgoPmMEEf0cjQAPuDffZ4qdZqkCapH/E8ovXYO8h5Ns3CRRFgQl +Zvqz2cK6Kb6aSDiCmfS/O0oxGfm/jiEzFMpPVF/7zvuPcX/9XhmgD0uRuMRUvAaw +RY8mkaKO/qk= +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=Secure Certificate Services O=Comodo CA Limited +# Subject: CN=Secure Certificate Services O=Comodo CA Limited +# Label: "Comodo Secure Services root" +# Serial: 1 +# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd +# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1 +# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8 +-----BEGIN CERTIFICATE----- +MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp +ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow +fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV +BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM +cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S +HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996 +CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk +3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz +6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV +HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud +EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv +Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw +Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww +DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0 +5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj +Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI +gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ +aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl +izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk= +-----END CERTIFICATE----- + +# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited +# Subject: CN=Trusted Certificate Services O=Comodo CA Limited +# Label: "Comodo Trusted Services root" +# Serial: 1 +# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27 +# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd +# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0 +aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla +MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO +BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD +VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW +fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt +TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL +fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW +1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7 +kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G +A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v +ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo +dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu +Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/ +HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32 +pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS +jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+ +xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn +dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi +-----END CERTIFICATE----- + +# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com +# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com +# Label: "UTN DATACorp SGC Root CA" +# Serial: 91374294542884689855167577680241077609 +# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06 +# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4 +# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48 +-----BEGIN CERTIFICATE----- +MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB +kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug +Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho +dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw +IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG +EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD +VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu +dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6 +E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ +D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK +4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq +lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW +bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB +o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT +MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js +LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr +BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB +AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft +Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj +j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH +KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv +2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3 +mfnGV/TJVTl4uix5yaaIK/QI +-----END CERTIFICATE----- + +# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com +# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com +# Label: "UTN USERFirst Hardware Root CA" +# Serial: 91374294542884704022267039221184531197 +# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39 +# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7 +# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37 +-----BEGIN CERTIFICATE----- +MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB +lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug +Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho +dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt +SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG +A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe +MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v +d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh +cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn +0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ +M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a +MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd +oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI +DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy +oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD +VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0 +dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy +bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF +BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM +//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli +CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE +CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t +3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS +KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA== +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Label: "StartCom Certification Authority" +# Serial: 1 +# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16 +# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f +# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea +-----BEGIN CERTIFICATE----- +MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW +MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg +Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9 +MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi +U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh +cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk +pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf +OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C +Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT +Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi +HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM +Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w ++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ +Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 +Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B +26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID +AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE +FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j +ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js +LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM +BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0 +Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy +dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh +cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh +YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg +dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp +bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ +YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT +TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ +9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8 +jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW +FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz +ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1 +ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L +EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu +L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq +yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC +O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V +um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh +NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Label: "GeoTrust Primary Certification Authority" +# Serial: 32798226551256963324313806436981982369 +# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf +# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 +# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c +-----BEGIN CERTIFICATE----- +MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY +MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo +R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx +MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK +Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 +AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA +ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 +7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W +kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI +mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ +KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 +6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl +4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K +oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj +UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU +AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA" +# Serial: 69529181992039203566298953787712940909 +# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 +# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 +# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB +qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV +BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw +NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j +LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG +A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl +IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs +W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta +3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk +6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 +Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J +NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP +r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU +DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz +YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX +xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 +/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ +LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 +jVaMaA== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" +# Serial: 33037644167568058970164719475676101450 +# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c +# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 +# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df +-----BEGIN CERTIFICATE----- +MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB +yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW +ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 +nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex +t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz +SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG +BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ +rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ +NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E +BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH +BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy +aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv +MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE +p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y +5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK +WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ +4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N +hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Label: "Network Solutions Certificate Authority" +# Serial: 116697915152937497490437556386812487904 +# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e +# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce +# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi +MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV +UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO +ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz +c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP +OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl +mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF +BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 +qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw +gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu +bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp +dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 +6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ +h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH +/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN +pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA +# Subject: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA +# Label: "TC TrustCenter Class 2 CA II" +# Serial: 941389028203453866782103406992443 +# MD5 Fingerprint: ce:78:33:5c:59:78:01:6e:18:ea:b9:36:a0:b9:2e:23 +# SHA1 Fingerprint: ae:50:83:ed:7c:f4:5c:bc:8f:61:c6:21:fe:68:5d:79:42:21:15:6e +# SHA256 Fingerprint: e6:b8:f8:76:64:85:f8:07:ae:7f:8d:ac:16:70:46:1f:07:c0:a1:3e:ef:3a:1f:f7:17:53:8d:7a:ba:d3:91:b4 +-----BEGIN CERTIFICATE----- +MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjEL +MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV +BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0 +Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYwMTEyMTQzODQzWhcNMjUxMjMxMjI1 +OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i +SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UEAxMc +VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jf +tMjWQ+nEdVl//OEd+DFwIxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKg +uNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2J +XjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQXa7pIXSSTYtZgo+U4+lK +8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7uSNQZu+99 +5OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1Ud +EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3 +kUrL84J6E1wIqzCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy +dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6 +Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz +JTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290 +Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u +TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iS +GNn3Bzn1LL4GdXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprt +ZjluS5TmVfwLG4t3wVMTZonZKNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8 +au0WOB9/WIFaGusyiC2y8zl3gK9etmF1KdsjTYjKUCjLhdLTEKJZbtOTVAB6okaV +hgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kPJOzHdiEoZa5X6AeI +dUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfkvQ== +-----END CERTIFICATE----- + +# Issuer: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA +# Subject: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA +# Label: "TC TrustCenter Class 3 CA II" +# Serial: 1506523511417715638772220530020799 +# MD5 Fingerprint: 56:5f:aa:80:61:12:17:f6:67:21:e6:2b:6d:61:56:8e +# SHA1 Fingerprint: 80:25:ef:f4:6e:70:c8:d4:72:24:65:84:fe:40:3b:8a:8d:6a:db:f5 +# SHA256 Fingerprint: 8d:a0:84:fc:f9:9c:e0:77:22:f8:9b:32:05:93:98:06:fa:5c:b8:11:e1:c8:13:f6:a1:08:c7:d3:36:b3:40:8e +-----BEGIN CERTIFICATE----- +MIIEqjCCA5KgAwIBAgIOSkcAAQAC5aBd1j8AUb8wDQYJKoZIhvcNAQEFBQAwdjEL +MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV +BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDMgQ0ExJTAjBgNVBAMTHFRDIFRydXN0 +Q2VudGVyIENsYXNzIDMgQ0EgSUkwHhcNMDYwMTEyMTQ0MTU3WhcNMjUxMjMxMjI1 +OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i +SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQTElMCMGA1UEAxMc +VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBALTgu1G7OVyLBMVMeRwjhjEQY0NVJz/GRcekPewJDRoeIMJW +Ht4bNwcwIi9v8Qbxq63WyKthoy9DxLCyLfzDlml7forkzMA5EpBCYMnMNWju2l+Q +Vl/NHE1bWEnrDgFPZPosPIlY2C8u4rBo6SI7dYnWRBpl8huXJh0obazovVkdKyT2 +1oQDZogkAHhg8fir/gKya/si+zXmFtGt9i4S5Po1auUZuV3bOx4a+9P/FRQI2Alq +ukWdFHlgfa9Aigdzs5OW03Q0jTo3Kd5c7PXuLjHCINy+8U9/I1LZW+Jk2ZyqBwi1 +Rb3R0DHBq1SfqdLDYmAD8bs5SpJKPQq5ncWg/jcCAwEAAaOCATQwggEwMA8GA1Ud +EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTUovyfs8PYA9NX +XAek0CSnwPIA1DCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy +dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18zX2NhX0lJLmNybIaBn2xkYXA6 +Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz +JTIwMyUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290 +Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u +TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEANmDkcPcGIEPZIxpC8vijsrlN +irTzwppVMXzEO2eatN9NDoqTSheLG43KieHPOh6sHfGcMrSOWXaiQYUlN6AT0PV8 +TtXqluJucsG7Kv5sbviRmEb8yRtXW+rIGjs/sFGYPAfaLFkB2otE6OF0/ado3VS6 +g0bsyEa1+K+XwDsJHI/OcpY9M1ZwvJbL2NV9IJqDnxrcOfHFcqMRA/07QlIp2+gB +95tejNaNhk4Z+rwcvsUhpYeeeC422wlxo3I0+GzjBgnyXlal092Y+tTmBvTwtiBj +S+opvaqCZh77gaqnN60TGOaSw4HBM7uIHqHn4rS9MWwOUT1v+5ZWgOI2F9Hc5A== +-----END CERTIFICATE----- + +# Issuer: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA +# Subject: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA +# Label: "TC TrustCenter Universal CA I" +# Serial: 601024842042189035295619584734726 +# MD5 Fingerprint: 45:e1:a5:72:c5:a9:36:64:40:9e:f5:e4:58:84:67:8c +# SHA1 Fingerprint: 6b:2f:34:ad:89:58:be:62:fd:b0:6b:5c:ce:bb:9d:d9:4f:4e:39:f3 +# SHA256 Fingerprint: eb:f3:c0:2a:87:89:b1:fb:7d:51:19:95:d6:63:b7:29:06:d9:13:ce:0d:5e:10:56:8a:8a:77:e2:58:61:67:e7 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTEL +MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV +BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1 +c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcNMDYwMzIyMTU1NDI4WhcNMjUxMjMx +MjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIg +R21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYwJAYD +VQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSR +JJZ4Hgmgm5qVSkr1YnwCqMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3T +fCZdzHd55yx4Oagmcw6iXSVphU9VDprvxrlE4Vc93x9UIuVvZaozhDrzznq+VZeu +jRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtwag+1m7Z3W0hZneTvWq3z +wZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9OgdwZu5GQ +fezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYD +VR0jBBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0G +CSqGSIb3DQEBBQUAA4IBAQAo0uCG1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X1 +7caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/CyvwbZ71q+s2IhtNerNXxTPqYn +8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3ghUJGooWMNjs +ydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT +ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/ +2TYcuiUaUj0a7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY +-----END CERTIFICATE----- + +# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc +# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc +# Label: "Cybertrust Global Root" +# Serial: 4835703278459682877484360 +# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 +# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 +# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 +-----BEGIN CERTIFICATE----- +MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG +A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh +bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE +ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS +b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 +7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS +J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y +HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP +t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz +FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY +XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ +MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw +hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js +MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA +A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj +Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx +XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o +omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc +A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW +WL1WMRJOEcgh4LMRkWXbtKaIOM5V +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G3" +# Serial: 28809105769928564313984085209975885599 +# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 +# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd +# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 +-----BEGIN CERTIFICATE----- +MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB +mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT +MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s +eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv +cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ +BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg +MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 +BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz ++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm +hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn +5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W +JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL +DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC +huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw +HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB +AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB +zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN +kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD +AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH +SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G +spki4cErx5z481+oghLrGREt +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G2" +# Serial: 71758320672825410020661621085256472406 +# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f +# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 +# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 +-----BEGIN CERTIFICATE----- +MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp +IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi +BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw +MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh +d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig +YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v +dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ +BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 +papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K +DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 +KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox +XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G3" +# Serial: 127614157056681299805556476275995414779 +# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 +# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 +# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB +rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV +BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa +Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl +LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u +MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl +ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm +gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 +YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf +b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 +9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S +zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk +OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV +HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA +2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW +oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu +t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c +KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM +m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu +MdRAGmI0Nj81Aa6sY6A= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G2" +# Serial: 80682863203381065782177908751794619243 +# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a +# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 +# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 +-----BEGIN CERTIFICATE----- +MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL +MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj +KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 +MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 +eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV +BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw +NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV +BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH +MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL +So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal +tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG +CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT +qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz +rD6ogRLQy7rQkgu2npaqBA+K +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Universal Root Certification Authority" +# Serial: 85209574734084581917763752644031726877 +# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 +# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 +# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c +-----BEGIN CERTIFICATE----- +MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB +vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W +ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX +MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 +IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y +IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh +bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF +9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH +H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H +LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN +/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT +rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud +EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw +WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs +exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud +DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 +sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ +seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz +4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ +BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR +lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 +7M2CYfE45k+XmCpajQ== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" +# Serial: 63143484348153506665311985501458640051 +# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 +# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a +# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 +-----BEGIN CERTIFICATE----- +MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp +U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg +SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln +biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm +GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve +fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ +aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj +aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW +kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC +4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga +FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA +# Subject: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA +# Label: "TC TrustCenter Universal CA III" +# Serial: 2010889993983507346460533407902964 +# MD5 Fingerprint: 9f:dd:db:ab:ff:8e:ff:45:21:5f:f0:6c:9d:8f:fe:2b +# SHA1 Fingerprint: 96:56:cd:7b:57:96:98:95:d0:e1:41:46:68:06:fb:b8:c6:11:06:87 +# SHA256 Fingerprint: 30:9b:4a:87:f6:ca:56:c9:31:69:aa:a9:9c:6d:98:88:54:d7:89:2b:d5:43:7e:2d:07:b2:9c:be:da:55:d3:5d +-----BEGIN CERTIFICATE----- +MIID4TCCAsmgAwIBAgIOYyUAAQACFI0zFQLkbPQwDQYJKoZIhvcNAQEFBQAwezEL +MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV +BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEoMCYGA1UEAxMfVEMgVHJ1 +c3RDZW50ZXIgVW5pdmVyc2FsIENBIElJSTAeFw0wOTA5MDkwODE1MjdaFw0yOTEy +MzEyMzU5NTlaMHsxCzAJBgNVBAYTAkRFMRwwGgYDVQQKExNUQyBUcnVzdENlbnRl +ciBHbWJIMSQwIgYDVQQLExtUQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0ExKDAm +BgNVBAMTH1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQSBJSUkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDC2pxisLlxErALyBpXsq6DFJmzNEubkKLF +5+cvAqBNLaT6hdqbJYUtQCggbergvbFIgyIpRJ9Og+41URNzdNW88jBmlFPAQDYv +DIRlzg9uwliT6CwLOunBjvvya8o84pxOjuT5fdMnnxvVZ3iHLX8LR7PH6MlIfK8v +zArZQe+f/prhsq75U7Xl6UafYOPfjdN/+5Z+s7Vy+EutCHnNaYlAJ/Uqwa1D7KRT +yGG299J5KmcYdkhtWyUB0SbFt1dpIxVbYYqt8Bst2a9c8SaQaanVDED1M4BDj5yj +dipFtK+/fz6HP3bFzSreIMUWWMv5G/UPyw0RUmS40nZid4PxWJ//AgMBAAGjYzBh +MB8GA1UdIwQYMBaAFFbn4VslQ4Dg9ozhcbyO5YAvxEjiMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRW5+FbJUOA4PaM4XG8juWAL8RI +4jANBgkqhkiG9w0BAQUFAAOCAQEAg8ev6n9NCjw5sWi+e22JLumzCecYV42Fmhfz +dkJQEw/HkG8zrcVJYCtsSVgZ1OK+t7+rSbyUyKu+KGwWaODIl0YgoGhnYIg5IFHY +aAERzqf2EQf27OysGh+yZm5WZ2B6dF7AbZc2rrUNXWZzwCUyRdhKBgePxLcHsU0G +DeGl6/R1yrqc0L2z0zIkTO5+4nYES0lT2PLpVDP85XEfPRRclkvxOvIAu2y0+pZV +CIgJwcyRGSmwIC3/yzikQOEXvnlhgP8HA4ZMTnsGnxGGjYnuJ8Tb4rwZjgvDwxPH +LQNjO9Po5KIqwoIIlBZU8O8fJ5AluA0OKBtHd0e9HKgl8ZS0Zg== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Label: "StartCom Certification Authority" +# Serial: 45 +# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16 +# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0 +# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11 +-----BEGIN CERTIFICATE----- +MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW +MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg +Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9 +MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi +U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh +cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk +pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf +OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C +Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT +Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi +HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM +Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w ++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ +Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 +Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B +26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID +AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul +F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC +ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w +ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk +aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0 +YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg +c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93 +d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG +CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1 +dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF +wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS +Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst +0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc +pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl +CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF +P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK +1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm +KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE +JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ +8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm +fyWl8kgAwKQB2j8= +-----END CERTIFICATE----- + +# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd. +# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd. +# Label: "StartCom Certification Authority G2" +# Serial: 59 +# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64 +# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17 +# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95 +-----BEGIN CERTIFICATE----- +MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW +MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1 +OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG +A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G +CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ +JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD +vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo +D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/ +Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW +RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK +HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN +nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM +0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i +UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9 +Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg +TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL +BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K +2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX +UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl +6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK +9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ +HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI +wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY +XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l +IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo +hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr +so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI +-----END CERTIFICATE----- + +# Issuer: O=Digital Signature Trust Co., CN=DST Root CA X3 +# Subject: O=Digital Signature Trust Co., CN=DST Root CA X3 +# Label: "IdenTrust DST Root CA X3" +# Serial: 44AFB080D6A327BA893039862EF8406B +# MD5 Fingerprint: 41:03:52:DC:0F:F7:50:1B:16:F0:02:8E:BA:6F:45:C5 +# SHA1 Fingerprint: DA:C9:02:4F:54:D8:F6:DF:94:93:5F:B1:73:26:38:CA:6A:D7:7C:13 +# SHA256 Fingerprint: 06:87:26:03:31:A7:24:03:D9:09:F1:05:E6:9B:CF:0D:32:E1:BD:24:93:FF:C6:D9:20:6D:11:BC:D6:77:07:39 +-----BEGIN CERTIFICATE----- +MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ +MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT +DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow +PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD +Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O +rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq +OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b +xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw +7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD +aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG +SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 +ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr +AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz +R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 +JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo +Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ +-----END CERTIFICATE----- diff --git a/env/lib/python3.6/site-packages/httplib2/iri2uri.py b/env/lib/python3.6/site-packages/httplib2/iri2uri.py new file mode 100644 index 0000000..98985f8 --- /dev/null +++ b/env/lib/python3.6/site-packages/httplib2/iri2uri.py @@ -0,0 +1,110 @@ +""" +iri2uri + +Converts an IRI to a URI. + +""" +__author__ = "Joe Gregorio (joe@bitworking.org)" +__copyright__ = "Copyright 2006, Joe Gregorio" +__contributors__ = [] +__version__ = "1.0.0" +__license__ = "MIT" +__history__ = """ +""" + +import urllib.parse + + +# Convert an IRI to a URI following the rules in RFC 3987 +# +# The characters we need to enocde and escape are defined in the spec: +# +# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD +# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF +# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD +# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD +# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD +# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD +# / %xD0000-DFFFD / %xE1000-EFFFD + +escape_range = [ + (0xA0, 0xD7FF), + (0xE000, 0xF8FF), + (0xF900, 0xFDCF), + (0xFDF0, 0xFFEF), + (0x10000, 0x1FFFD), + (0x20000, 0x2FFFD), + (0x30000, 0x3FFFD), + (0x40000, 0x4FFFD), + (0x50000, 0x5FFFD), + (0x60000, 0x6FFFD), + (0x70000, 0x7FFFD), + (0x80000, 0x8FFFD), + (0x90000, 0x9FFFD), + (0xA0000, 0xAFFFD), + (0xB0000, 0xBFFFD), + (0xC0000, 0xCFFFD), + (0xD0000, 0xDFFFD), + (0xE1000, 0xEFFFD), + (0xF0000, 0xFFFFD), + (0x100000, 0x10FFFD), +] + +def encode(c): + retval = c + i = ord(c) + for low, high in escape_range: + if i < low: + break + if i >= low and i <= high: + retval = "".join(["%%%2X" % o for o in c.encode('utf-8')]) + break + return retval + + +def iri2uri(uri): + """Convert an IRI to a URI. Note that IRIs must be + passed in a unicode strings. That is, do not utf-8 encode + the IRI before passing it into the function.""" + if isinstance(uri ,str): + (scheme, authority, path, query, fragment) = urllib.parse.urlsplit(uri) + authority = authority.encode('idna').decode('utf-8') + # For each character in 'ucschar' or 'iprivate' + # 1. encode as utf-8 + # 2. then %-encode each octet of that utf-8 + uri = urllib.parse.urlunsplit((scheme, authority, path, query, fragment)) + uri = "".join([encode(c) for c in uri]) + return uri + +if __name__ == "__main__": + import unittest + + class Test(unittest.TestCase): + + def test_uris(self): + """Test that URIs are invariant under the transformation.""" + invariant = [ + "ftp://ftp.is.co.za/rfc/rfc1808.txt", + "http://www.ietf.org/rfc/rfc2396.txt", + "ldap://[2001:db8::7]/c=GB?objectClass?one", + "mailto:John.Doe@example.com", + "news:comp.infosystems.www.servers.unix", + "tel:+1-816-555-1212", + "telnet://192.0.2.16:80/", + "urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ] + for uri in invariant: + self.assertEqual(uri, iri2uri(uri)) + + def test_iri(self): + """ Test that the right type of escaping is done for each part of the URI.""" + self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri("http://\N{COMET}.com/\N{COMET}")) + self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri("http://bitworking.org/?fred=\N{COMET}")) + self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri("http://bitworking.org/#\N{COMET}")) + self.assertEqual("#%E2%98%84", iri2uri("#\N{COMET}")) + self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")) + self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))) + self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8'))) + + unittest.main() + + diff --git a/env/lib/python3.6/site-packages/idna-2.7.dist-info/INSTALLER b/env/lib/python3.6/site-packages/idna-2.7.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/lib/python3.6/site-packages/idna-2.7.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/lib/python3.6/site-packages/idna-2.7.dist-info/LICENSE.txt b/env/lib/python3.6/site-packages/idna-2.7.dist-info/LICENSE.txt new file mode 100755 index 0000000..3ee64fb --- /dev/null +++ b/env/lib/python3.6/site-packages/idna-2.7.dist-info/LICENSE.txt @@ -0,0 +1,80 @@ +License +------- + +Copyright (c) 2013-2018, Kim Davies. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +#. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +#. Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided with + the distribution. + +#. Neither the name of the copyright holder nor the names of the + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +#. THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS "AS IS" AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE + USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH + DAMAGE. + +Portions of the codec implementation and unit tests are derived from the +Python standard library, which carries the `Python Software Foundation +License `_: + + Copyright (c) 2001-2014 Python Software Foundation; All Rights Reserved + +Portions of the unit tests are derived from the Unicode standard, which +is subject to the Unicode, Inc. License Agreement: + + Copyright (c) 1991-2014 Unicode, Inc. All rights reserved. + Distributed under the Terms of Use in + . + + Permission is hereby granted, free of charge, to any person obtaining + a copy of the Unicode data files and any associated documentation + (the "Data Files") or Unicode software and any associated documentation + (the "Software") to deal in the Data Files or Software + without restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, and/or sell copies of + the Data Files or Software, and to permit persons to whom the Data Files + or Software are furnished to do so, provided that + + (a) this copyright and permission notice appear with all copies + of the Data Files or Software, + + (b) this copyright and permission notice appear in associated + documentation, and + + (c) there is clear notice in each modified Data File or in the Software + as well as in the documentation associated with the Data File(s) or + Software that the data or software has been modified. + + THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF + ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE + WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT OF THIRD PARTY RIGHTS. + IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS + NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL + DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, + DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER + TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + PERFORMANCE OF THE DATA FILES OR SOFTWARE. + + Except as contained in this notice, the name of a copyright holder + shall not be used in advertising or otherwise to promote the sale, + use or other dealings in these Data Files or Software without prior + written authorization of the copyright holder. diff --git a/env/lib/python3.6/site-packages/idna-2.7.dist-info/METADATA b/env/lib/python3.6/site-packages/idna-2.7.dist-info/METADATA new file mode 100755 index 0000000..b4ef4c7 --- /dev/null +++ b/env/lib/python3.6/site-packages/idna-2.7.dist-info/METADATA @@ -0,0 +1,239 @@ +Metadata-Version: 2.1 +Name: idna +Version: 2.7 +Summary: Internationalized Domain Names in Applications (IDNA) +Home-page: https://github.com/kjd/idna +Author: Kim Davies +Author-email: kim@cynosure.com.au +License: BSD-like +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +Support for the Internationalised Domain Names in Applications +(IDNA) protocol as specified in `RFC 5891 `_. +This is the latest version of the protocol and is sometimes referred to as +“IDNA 2008”. + +This library also provides support for Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +This acts as a suitable replacement for the “encodings.idna” module that +comes with the Python standard library, but only supports the +old, deprecated IDNA specification (`RFC 3490 `_). + +Basic functions are simply executed: + +.. code-block:: pycon + + # Python 3 + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + + # Python 2 + >>> import idna + >>> idna.encode(u'ドメイン.テスト') + 'xn--eckwd4c7c.xn--zckzah' + >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') + ドメイン.テスト + +Packages +-------- + +The latest tagged release version is published in the PyPI repository: + +.. image:: https://badge.fury.io/py/idna.svg + :target: http://badge.fury.io/py/idna + + +Installation +------------ + +To install this library, you can use pip: + +.. code-block:: bash + + $ pip install idna + +Alternatively, you can install the package using the bundled setup script: + +.. code-block:: bash + + $ python setup.py install + +This library works with Python 2.6 or later, and Python 3.3 or later. + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a domain +name argument and perform a conversion to A-labels or U-labels respectively. + +.. code-block:: pycon + + # Python 3 + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module: + +.. code-block:: pycon + + # Python 2 + >>> import idna.codec + >>> print u'домена.испытание'.encode('idna') + xn--80ahd1agd.xn--80akhbyknj4f + >>> print 'xn--80ahd1agd.xn--80akhbyknj4f'.decode('idna') + домена.испытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel`` +functions if necessary: + +.. code-block:: pycon + + # Python 2 + >>> idna.alabel(u'测试') + 'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in `RFC 5895 `_, the IDNA +specification no longer normalizes input from different potential ways a user +may input a domain name. This functionality, known as a “mapping”, is now +considered by the specification to be a local user-interface issue distinct +from IDNA conversion functionality. + +This library provides one such mapping, that was developed by the Unicode +Consortium. Known as `Unicode IDNA Compatibility Processing `_, +it provides for both a regular mapping for typical applications, as well as +a transitional mapping to help migrate from older IDNA 2003 applications. + +For example, “Königsgäßchen” is not a permissible label as *LATIN CAPITAL +LETTER K* is not allowed (nor are capital letters in general). UTS 46 will +convert this into lower case prior to applying the IDNA conversion. + +.. code-block:: pycon + + # Python 3 + >>> import idna + >>> idna.encode(u'Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed + >>> idna.encode('Königsgäßchen', uts46=True) + b'xn--knigsgchen-b4a3dun' + >>> print(idna.decode('xn--knigsgchen-b4a3dun')) + königsgäßchen + +Transitional processing provides conversions to help transition from the older +2003 standard to the current standard. For example, in the original IDNA +specification, the *LATIN SMALL LETTER SHARP S* (ß) was converted into two +*LATIN SMALL LETTER S* (ss), whereas in the current IDNA specification this +conversion is not performed. + +.. code-block:: pycon + + # Python 2 + >>> idna.encode(u'Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Implementors should use transitional processing with caution, only in rare +cases where conversion from legacy labels to current labels must be performed +(i.e. IDNA implementations that pre-date 2008). For typical applications +that just need to convert labels, transitional processing is unlikely to be +beneficial and could produce unexpected incompatible results. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the +new module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification should +raise an exception derived from the ``idna.IDNAError`` base class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and right-to-left +characters in a label; ``idna.InvalidCodepoint`` when a specific codepoint is +an illegal character in an IDN label (i.e. INVALID); and ``idna.InvalidCodepointContext`` +when the codepoint is illegal based on its positional context (i.e. it is CONTEXTO +or CONTEXTJ but the contextual requirements are not satisfied.) + +Building and Diagnostics +------------------------ + +The IDNA and UTS 46 functionality relies upon pre-calculated lookup tables for +performance. These tables are derived from computing against eligibility criteria +in the respective standards. These tables are computed using the command-line +script ``tools/idna-data``. + +This tool will fetch relevant tables from the Unicode Consortium and perform the +required calculations to identify eligibility. It has three main modes: + +* ``idna-data make-libdata``. Generates ``idnadata.py`` and ``uts46data.py``, + the pre-calculated lookup tables using for IDNA and UTS 46 conversions. Implementors + who wish to track this library against a different Unicode version may use this tool + to manually generate a different version of the ``idnadata.py`` and ``uts46data.py`` + files. + +* ``idna-data make-table``. Generate a table of the IDNA disposition + (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix B.1 of RFC + 5892 and the pre-computed tables published by `IANA `_. + +* ``idna-data U+0061``. Prints debugging output on the various properties + associated with an individual Unicode codepoint (in this case, U+0061), that are + used to assess the IDNA and UTS 46 status of a codepoint. This is helpful in debugging + or analysis. + +The tool accepts a number of arguments, described using ``idna-data -h``. Most notably, +the ``--version`` argument allows the specification of the version of Unicode to use +in computing the table data. For example, ``idna-data --version 9.0.0 make-libdata`` +will generate library data against Unicode 9.0.0. + +Note that this script requires Python 3, but all generated library data will work +in Python 2.6+. + + +Testing +------- + +The library has a test suite based on each rule of the IDNA specification, as +well as tests that are provided as part of the Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +The tests are run automatically on each commit at Travis CI: + +.. image:: https://travis-ci.org/kjd/idna.svg?branch=master + :target: https://travis-ci.org/kjd/idna + + diff --git a/env/lib/python3.6/site-packages/idna-2.7.dist-info/RECORD b/env/lib/python3.6/site-packages/idna-2.7.dist-info/RECORD new file mode 100644 index 0000000..ffe0cdc --- /dev/null +++ b/env/lib/python3.6/site-packages/idna-2.7.dist-info/RECORD @@ -0,0 +1,22 @@ +idna/__init__.py,sha256=9Nt7xpyet3DmOrPUGooDdAwmHZZu1qUAy2EaJ93kGiQ,58 +idna/codec.py,sha256=lvYb7yu7PhAqFaAIAdWcwgaWI2UmgseUua-1c0AsG0A,3299 +idna/compat.py,sha256=R-h29D-6mrnJzbXxymrWUW7iZUvy-26TQwZ0ij57i4U,232 +idna/core.py,sha256=OwI5R_uuXU4PlOSoG8cjaMPA1hhdGGjjZ8I2MZhSPxo,11858 +idna/idnadata.py,sha256=zwxvoSsYqPHNa6xzXWHizXpDC28JJMGXRinhJ4Gkcz0,39285 +idna/intranges.py,sha256=TY1lpxZIQWEP6tNqjZkFA5hgoMWOj1OBmnUG8ihT87E,1749 +idna/package_data.py,sha256=Vt9rtr32BzO7O25rypo8nzAs3syTJhG1ojU3J-s2RFo,21 +idna/uts46data.py,sha256=czULzYN5Lr9K5MmOH-1g3CJY7QPjGeHjYmC3saJ_BHk,197803 +idna-2.7.dist-info/LICENSE.txt,sha256=DUvHq9SNz7FOJCVO5AQGZzf_AWcUTiIpFKIRO4eUaD4,3947 +idna-2.7.dist-info/METADATA,sha256=rRIGQN4TE52ADjNub-LTnazbzjxmsviAzZia-v_nZHs,8866 +idna-2.7.dist-info/RECORD,, +idna-2.7.dist-info/WHEEL,sha256=J3CsTk7Mf2JNUyhImI-mjX-fmI4oDjyiXgWT4qgZiCE,110 +idna-2.7.dist-info/top_level.txt,sha256=jSag9sEDqvSPftxOQy-ABfGV_RSy7oFh4zZJpODV8k0,5 +idna-2.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +idna/__pycache__/codec.cpython-36.pyc,, +idna/__pycache__/compat.cpython-36.pyc,, +idna/__pycache__/idnadata.cpython-36.pyc,, +idna/__pycache__/package_data.cpython-36.pyc,, +idna/__pycache__/intranges.cpython-36.pyc,, +idna/__pycache__/core.cpython-36.pyc,, +idna/__pycache__/uts46data.cpython-36.pyc,, +idna/__pycache__/__init__.cpython-36.pyc,, diff --git a/env/lib/python3.6/site-packages/idna-2.7.dist-info/WHEEL b/env/lib/python3.6/site-packages/idna-2.7.dist-info/WHEEL new file mode 100755 index 0000000..f21b51c --- /dev/null +++ b/env/lib/python3.6/site-packages/idna-2.7.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/env/lib/python3.6/site-packages/idna-2.7.dist-info/top_level.txt b/env/lib/python3.6/site-packages/idna-2.7.dist-info/top_level.txt new file mode 100755 index 0000000..c40472e --- /dev/null +++ b/env/lib/python3.6/site-packages/idna-2.7.dist-info/top_level.txt @@ -0,0 +1 @@ +idna diff --git a/env/lib/python3.6/site-packages/idna/codec.py b/env/lib/python3.6/site-packages/idna/codec.py new file mode 100755 index 0000000..98c65ea --- /dev/null +++ b/env/lib/python3.6/site-packages/idna/codec.py @@ -0,0 +1,118 @@ +from .core import encode, decode, alabel, ulabel, IDNAError +import codecs +import re + +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') + +class Codec(codecs.Codec): + + def encode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return "", 0 + + return encode(data), len(data) + + def decode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return u"", 0 + + return decode(data), len(data) + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data, errors, final): + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return ("", 0) + + labels = _unicode_dots_re.split(data) + trailing_dot = u'' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result = ".".join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data, errors, final): + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return (u"", 0) + + # IDNA allows decoding to operate on Unicode strings, too. + if isinstance(data, unicode): + labels = _unicode_dots_re.split(data) + else: + # Must be ASCII string + data = str(data) + unicode(data, "ascii") + labels = data.split(".") + + trailing_dot = u'' + if labels: + if not labels[-1]: + trailing_dot = u'.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = u'.' + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result = u".".join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + +class StreamReader(Codec, codecs.StreamReader): + pass + +def getregentry(): + return codecs.CodecInfo( + name='idna', + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) diff --git a/env/lib/python3.6/site-packages/idna/compat.py b/env/lib/python3.6/site-packages/idna/compat.py new file mode 100755 index 0000000..4d47f33 --- /dev/null +++ b/env/lib/python3.6/site-packages/idna/compat.py @@ -0,0 +1,12 @@ +from .core import * +from .codec import * + +def ToASCII(label): + return encode(label) + +def ToUnicode(label): + return decode(label) + +def nameprep(s): + raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") + diff --git a/env/lib/python3.6/site-packages/idna/core.py b/env/lib/python3.6/site-packages/idna/core.py new file mode 100755 index 0000000..090c2c1 --- /dev/null +++ b/env/lib/python3.6/site-packages/idna/core.py @@ -0,0 +1,399 @@ +from . import idnadata +import bisect +import unicodedata +import re +import sys +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b'xn--' +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') + +if sys.version_info[0] == 3: + unicode = str + unichr = chr + +class IDNAError(UnicodeError): + """ Base exception for all IDNA-encoding related problems """ + pass + + +class IDNABidiError(IDNAError): + """ Exception when bidirectional requirements are not satisfied """ + pass + + +class InvalidCodepoint(IDNAError): + """ Exception when a disallowed or unallocated codepoint is used """ + pass + + +class InvalidCodepointContext(IDNAError): + """ Exception when the codepoint is not valid in the context it is used """ + pass + + +def _combining_class(cp): + v = unicodedata.combining(unichr(cp)) + if v == 0: + if not unicodedata.name(unichr(cp)): + raise ValueError("Unknown character in unicodedata") + return v + +def _is_script(cp, script): + return intranges_contain(ord(cp), idnadata.scripts[script]) + +def _punycode(s): + return s.encode('punycode') + +def _unot(s): + return 'U+{0:04X}'.format(s) + + +def valid_label_length(label): + + if len(label) > 63: + return False + return True + + +def valid_string_length(label, trailing_dot): + + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label, check_ltr=False): + + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == '': + # String likely comes from a newer version of Unicode + raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx)) + if direction in ['R', 'AL', 'AN']: + bidi_label = True + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ['R', 'AL']: + rtl = True + elif direction == 'L': + rtl = False + else: + raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label))) + + valid_ending = False + number_type = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx)) + # Bidi rule 3 + if direction in ['R', 'AL', 'EN', 'AN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + # Bidi rule 4 + if direction in ['AN', 'EN']: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError('Can not mix numeral types in a right-to-left label') + else: + # Bidi rule 5 + if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx)) + # Bidi rule 6 + if direction in ['L', 'EN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + + if not valid_ending: + raise IDNABidiError('Label ends with illegal codepoint directionality') + + return True + + +def check_initial_combiner(label): + + if unicodedata.category(label[0])[0] == 'M': + raise IDNAError('Label begins with an illegal combining character') + return True + + +def check_hyphen_ok(label): + + if label[2:4] == '--': + raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') + if label[0] == '-' or label[-1] == '-': + raise IDNAError('Label must not start or end with a hyphen') + return True + + +def check_nfc(label): + + if unicodedata.normalize('NFC', label) != label: + raise IDNAError('Label must be in Normalization Form C') + + +def valid_contextj(label, pos): + + cp_value = ord(label[pos]) + + if cp_value == 0x200c: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos-1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('L'), ord('D')]: + ok = True + break + + if not ok: + return False + + ok = False + for i in range(pos+1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('R'), ord('D')]: + ok = True + break + return ok + + if cp_value == 0x200d: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + + return False + + +def valid_contexto(label, pos, exception=False): + + cp_value = ord(label[pos]) + + if cp_value == 0x00b7: + if 0 < pos < len(label)-1: + if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label)-1 and len(label) > 1: + return _is_script(label[pos + 1], 'Greek') + return False + + elif cp_value == 0x05f3 or cp_value == 0x05f4: + if pos > 0: + return _is_script(label[pos - 1], 'Hebrew') + return False + + elif cp_value == 0x30fb: + for cp in label: + if cp == u'\u30fb': + continue + if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6f0 <= ord(cp) <= 0x06f9: + return False + return True + + elif 0x6f0 <= cp_value <= 0x6f9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + +def check_label(label): + + if isinstance(label, (bytes, bytearray)): + label = label.decode('utf-8') + if len(label) == 0: + raise IDNAError('Empty Label') + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for (pos, cp) in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): + try: + if not valid_contextj(label, pos): + raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format( + _unot(cp_value), pos+1, repr(label))) + except ValueError: + raise IDNAError('Unknown codepoint adjacent to joiner {0} at position {1} in {2}'.format( + _unot(cp_value), pos+1, repr(label))) + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): + if not valid_contexto(label, pos): + raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) + else: + raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label))) + + check_bidi(label) + + +def alabel(label): + + try: + label = label.encode('ascii') + try: + ulabel(label) + except IDNAError: + raise IDNAError('The label {0} is not a valid A-label'.format(label)) + if not valid_label_length(label): + raise IDNAError('Label too long') + return label + except UnicodeEncodeError: + pass + + if not label: + raise IDNAError('No Input') + + label = unicode(label) + check_label(label) + label = _punycode(label) + label = _alabel_prefix + label + + if not valid_label_length(label): + raise IDNAError('Label too long') + + return label + + +def ulabel(label): + + if not isinstance(label, (bytes, bytearray)): + try: + label = label.encode('ascii') + except UnicodeEncodeError: + check_label(label) + return label + + label = label.lower() + if label.startswith(_alabel_prefix): + label = label[len(_alabel_prefix):] + else: + check_label(label) + return label.decode('ascii') + + label = label.decode('punycode') + check_label(label) + return label + + +def uts46_remap(domain, std3_rules=True, transitional=False): + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + output = u"" + try: + for pos, char in enumerate(domain): + code_point = ord(char) + uts46row = uts46data[code_point if code_point < 256 else + bisect.bisect_left(uts46data, (code_point, "Z")) - 1] + status = uts46row[1] + replacement = uts46row[2] if len(uts46row) == 3 else None + if (status == "V" or + (status == "D" and not transitional) or + (status == "3" and not std3_rules and replacement is None)): + output += char + elif replacement is not None and (status == "M" or + (status == "3" and not std3_rules) or + (status == "D" and transitional)): + output += replacement + elif status != "I": + raise IndexError() + return unicodedata.normalize("NFC", output) + except IndexError: + raise InvalidCodepoint( + "Codepoint {0} not allowed at position {1} in {2}".format( + _unot(code_point), pos + 1, repr(domain))) + + +def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode("ascii") + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split('.') + else: + labels = _unicode_dots_re.split(s) + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if labels[-1] == '': + del labels[-1] + trailing_dot = True + for label in labels: + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append(b'') + s = b'.'.join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError('Domain too long') + return s + + +def decode(s, strict=False, uts46=False, std3_rules=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode("ascii") + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split(u'.') + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append(u'') + return u'.'.join(result) diff --git a/env/lib/python3.6/site-packages/idna/idnadata.py b/env/lib/python3.6/site-packages/idna/idnadata.py new file mode 100755 index 0000000..17974e2 --- /dev/null +++ b/env/lib/python3.6/site-packages/idna/idnadata.py @@ -0,0 +1,1893 @@ +# This file is automatically generated by tools/idna-data + +__version__ = "10.0.0" +scripts = { + 'Greek': ( + 0x37000000374, + 0x37500000378, + 0x37a0000037e, + 0x37f00000380, + 0x38400000385, + 0x38600000387, + 0x3880000038b, + 0x38c0000038d, + 0x38e000003a2, + 0x3a3000003e2, + 0x3f000000400, + 0x1d2600001d2b, + 0x1d5d00001d62, + 0x1d6600001d6b, + 0x1dbf00001dc0, + 0x1f0000001f16, + 0x1f1800001f1e, + 0x1f2000001f46, + 0x1f4800001f4e, + 0x1f5000001f58, + 0x1f5900001f5a, + 0x1f5b00001f5c, + 0x1f5d00001f5e, + 0x1f5f00001f7e, + 0x1f8000001fb5, + 0x1fb600001fc5, + 0x1fc600001fd4, + 0x1fd600001fdc, + 0x1fdd00001ff0, + 0x1ff200001ff5, + 0x1ff600001fff, + 0x212600002127, + 0xab650000ab66, + 0x101400001018f, + 0x101a0000101a1, + 0x1d2000001d246, + ), + 'Han': ( + 0x2e8000002e9a, + 0x2e9b00002ef4, + 0x2f0000002fd6, + 0x300500003006, + 0x300700003008, + 0x30210000302a, + 0x30380000303c, + 0x340000004db6, + 0x4e0000009feb, + 0xf9000000fa6e, + 0xfa700000fada, + 0x200000002a6d7, + 0x2a7000002b735, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + 0x2f8000002fa1e, + ), + 'Hebrew': ( + 0x591000005c8, + 0x5d0000005eb, + 0x5f0000005f5, + 0xfb1d0000fb37, + 0xfb380000fb3d, + 0xfb3e0000fb3f, + 0xfb400000fb42, + 0xfb430000fb45, + 0xfb460000fb50, + ), + 'Hiragana': ( + 0x304100003097, + 0x309d000030a0, + 0x1b0010001b11f, + 0x1f2000001f201, + ), + 'Katakana': ( + 0x30a1000030fb, + 0x30fd00003100, + 0x31f000003200, + 0x32d0000032ff, + 0x330000003358, + 0xff660000ff70, + 0xff710000ff9e, + 0x1b0000001b001, + ), +} +joining_types = { + 0x600: 85, + 0x601: 85, + 0x602: 85, + 0x603: 85, + 0x604: 85, + 0x605: 85, + 0x608: 85, + 0x60b: 85, + 0x620: 68, + 0x621: 85, + 0x622: 82, + 0x623: 82, + 0x624: 82, + 0x625: 82, + 0x626: 68, + 0x627: 82, + 0x628: 68, + 0x629: 82, + 0x62a: 68, + 0x62b: 68, + 0x62c: 68, + 0x62d: 68, + 0x62e: 68, + 0x62f: 82, + 0x630: 82, + 0x631: 82, + 0x632: 82, + 0x633: 68, + 0x634: 68, + 0x635: 68, + 0x636: 68, + 0x637: 68, + 0x638: 68, + 0x639: 68, + 0x63a: 68, + 0x63b: 68, + 0x63c: 68, + 0x63d: 68, + 0x63e: 68, + 0x63f: 68, + 0x640: 67, + 0x641: 68, + 0x642: 68, + 0x643: 68, + 0x644: 68, + 0x645: 68, + 0x646: 68, + 0x647: 68, + 0x648: 82, + 0x649: 68, + 0x64a: 68, + 0x66e: 68, + 0x66f: 68, + 0x671: 82, + 0x672: 82, + 0x673: 82, + 0x674: 85, + 0x675: 82, + 0x676: 82, + 0x677: 82, + 0x678: 68, + 0x679: 68, + 0x67a: 68, + 0x67b: 68, + 0x67c: 68, + 0x67d: 68, + 0x67e: 68, + 0x67f: 68, + 0x680: 68, + 0x681: 68, + 0x682: 68, + 0x683: 68, + 0x684: 68, + 0x685: 68, + 0x686: 68, + 0x687: 68, + 0x688: 82, + 0x689: 82, + 0x68a: 82, + 0x68b: 82, + 0x68c: 82, + 0x68d: 82, + 0x68e: 82, + 0x68f: 82, + 0x690: 82, + 0x691: 82, + 0x692: 82, + 0x693: 82, + 0x694: 82, + 0x695: 82, + 0x696: 82, + 0x697: 82, + 0x698: 82, + 0x699: 82, + 0x69a: 68, + 0x69b: 68, + 0x69c: 68, + 0x69d: 68, + 0x69e: 68, + 0x69f: 68, + 0x6a0: 68, + 0x6a1: 68, + 0x6a2: 68, + 0x6a3: 68, + 0x6a4: 68, + 0x6a5: 68, + 0x6a6: 68, + 0x6a7: 68, + 0x6a8: 68, + 0x6a9: 68, + 0x6aa: 68, + 0x6ab: 68, + 0x6ac: 68, + 0x6ad: 68, + 0x6ae: 68, + 0x6af: 68, + 0x6b0: 68, + 0x6b1: 68, + 0x6b2: 68, + 0x6b3: 68, + 0x6b4: 68, + 0x6b5: 68, + 0x6b6: 68, + 0x6b7: 68, + 0x6b8: 68, + 0x6b9: 68, + 0x6ba: 68, + 0x6bb: 68, + 0x6bc: 68, + 0x6bd: 68, + 0x6be: 68, + 0x6bf: 68, + 0x6c0: 82, + 0x6c1: 68, + 0x6c2: 68, + 0x6c3: 82, + 0x6c4: 82, + 0x6c5: 82, + 0x6c6: 82, + 0x6c7: 82, + 0x6c8: 82, + 0x6c9: 82, + 0x6ca: 82, + 0x6cb: 82, + 0x6cc: 68, + 0x6cd: 82, + 0x6ce: 68, + 0x6cf: 82, + 0x6d0: 68, + 0x6d1: 68, + 0x6d2: 82, + 0x6d3: 82, + 0x6d5: 82, + 0x6dd: 85, + 0x6ee: 82, + 0x6ef: 82, + 0x6fa: 68, + 0x6fb: 68, + 0x6fc: 68, + 0x6ff: 68, + 0x710: 82, + 0x712: 68, + 0x713: 68, + 0x714: 68, + 0x715: 82, + 0x716: 82, + 0x717: 82, + 0x718: 82, + 0x719: 82, + 0x71a: 68, + 0x71b: 68, + 0x71c: 68, + 0x71d: 68, + 0x71e: 82, + 0x71f: 68, + 0x720: 68, + 0x721: 68, + 0x722: 68, + 0x723: 68, + 0x724: 68, + 0x725: 68, + 0x726: 68, + 0x727: 68, + 0x728: 82, + 0x729: 68, + 0x72a: 82, + 0x72b: 68, + 0x72c: 82, + 0x72d: 68, + 0x72e: 68, + 0x72f: 82, + 0x74d: 82, + 0x74e: 68, + 0x74f: 68, + 0x750: 68, + 0x751: 68, + 0x752: 68, + 0x753: 68, + 0x754: 68, + 0x755: 68, + 0x756: 68, + 0x757: 68, + 0x758: 68, + 0x759: 82, + 0x75a: 82, + 0x75b: 82, + 0x75c: 68, + 0x75d: 68, + 0x75e: 68, + 0x75f: 68, + 0x760: 68, + 0x761: 68, + 0x762: 68, + 0x763: 68, + 0x764: 68, + 0x765: 68, + 0x766: 68, + 0x767: 68, + 0x768: 68, + 0x769: 68, + 0x76a: 68, + 0x76b: 82, + 0x76c: 82, + 0x76d: 68, + 0x76e: 68, + 0x76f: 68, + 0x770: 68, + 0x771: 82, + 0x772: 68, + 0x773: 82, + 0x774: 82, + 0x775: 68, + 0x776: 68, + 0x777: 68, + 0x778: 82, + 0x779: 82, + 0x77a: 68, + 0x77b: 68, + 0x77c: 68, + 0x77d: 68, + 0x77e: 68, + 0x77f: 68, + 0x7ca: 68, + 0x7cb: 68, + 0x7cc: 68, + 0x7cd: 68, + 0x7ce: 68, + 0x7cf: 68, + 0x7d0: 68, + 0x7d1: 68, + 0x7d2: 68, + 0x7d3: 68, + 0x7d4: 68, + 0x7d5: 68, + 0x7d6: 68, + 0x7d7: 68, + 0x7d8: 68, + 0x7d9: 68, + 0x7da: 68, + 0x7db: 68, + 0x7dc: 68, + 0x7dd: 68, + 0x7de: 68, + 0x7df: 68, + 0x7e0: 68, + 0x7e1: 68, + 0x7e2: 68, + 0x7e3: 68, + 0x7e4: 68, + 0x7e5: 68, + 0x7e6: 68, + 0x7e7: 68, + 0x7e8: 68, + 0x7e9: 68, + 0x7ea: 68, + 0x7fa: 67, + 0x840: 82, + 0x841: 68, + 0x842: 68, + 0x843: 68, + 0x844: 68, + 0x845: 68, + 0x846: 82, + 0x847: 82, + 0x848: 68, + 0x849: 82, + 0x84a: 68, + 0x84b: 68, + 0x84c: 68, + 0x84d: 68, + 0x84e: 68, + 0x84f: 68, + 0x850: 68, + 0x851: 68, + 0x852: 68, + 0x853: 68, + 0x854: 82, + 0x855: 68, + 0x856: 85, + 0x857: 85, + 0x858: 85, + 0x860: 68, + 0x861: 85, + 0x862: 68, + 0x863: 68, + 0x864: 68, + 0x865: 68, + 0x866: 85, + 0x867: 82, + 0x868: 68, + 0x869: 82, + 0x86a: 82, + 0x8a0: 68, + 0x8a1: 68, + 0x8a2: 68, + 0x8a3: 68, + 0x8a4: 68, + 0x8a5: 68, + 0x8a6: 68, + 0x8a7: 68, + 0x8a8: 68, + 0x8a9: 68, + 0x8aa: 82, + 0x8ab: 82, + 0x8ac: 82, + 0x8ad: 85, + 0x8ae: 82, + 0x8af: 68, + 0x8b0: 68, + 0x8b1: 82, + 0x8b2: 82, + 0x8b3: 68, + 0x8b4: 68, + 0x8b6: 68, + 0x8b7: 68, + 0x8b8: 68, + 0x8b9: 82, + 0x8ba: 68, + 0x8bb: 68, + 0x8bc: 68, + 0x8bd: 68, + 0x8e2: 85, + 0x1806: 85, + 0x1807: 68, + 0x180a: 67, + 0x180e: 85, + 0x1820: 68, + 0x1821: 68, + 0x1822: 68, + 0x1823: 68, + 0x1824: 68, + 0x1825: 68, + 0x1826: 68, + 0x1827: 68, + 0x1828: 68, + 0x1829: 68, + 0x182a: 68, + 0x182b: 68, + 0x182c: 68, + 0x182d: 68, + 0x182e: 68, + 0x182f: 68, + 0x1830: 68, + 0x1831: 68, + 0x1832: 68, + 0x1833: 68, + 0x1834: 68, + 0x1835: 68, + 0x1836: 68, + 0x1837: 68, + 0x1838: 68, + 0x1839: 68, + 0x183a: 68, + 0x183b: 68, + 0x183c: 68, + 0x183d: 68, + 0x183e: 68, + 0x183f: 68, + 0x1840: 68, + 0x1841: 68, + 0x1842: 68, + 0x1843: 68, + 0x1844: 68, + 0x1845: 68, + 0x1846: 68, + 0x1847: 68, + 0x1848: 68, + 0x1849: 68, + 0x184a: 68, + 0x184b: 68, + 0x184c: 68, + 0x184d: 68, + 0x184e: 68, + 0x184f: 68, + 0x1850: 68, + 0x1851: 68, + 0x1852: 68, + 0x1853: 68, + 0x1854: 68, + 0x1855: 68, + 0x1856: 68, + 0x1857: 68, + 0x1858: 68, + 0x1859: 68, + 0x185a: 68, + 0x185b: 68, + 0x185c: 68, + 0x185d: 68, + 0x185e: 68, + 0x185f: 68, + 0x1860: 68, + 0x1861: 68, + 0x1862: 68, + 0x1863: 68, + 0x1864: 68, + 0x1865: 68, + 0x1866: 68, + 0x1867: 68, + 0x1868: 68, + 0x1869: 68, + 0x186a: 68, + 0x186b: 68, + 0x186c: 68, + 0x186d: 68, + 0x186e: 68, + 0x186f: 68, + 0x1870: 68, + 0x1871: 68, + 0x1872: 68, + 0x1873: 68, + 0x1874: 68, + 0x1875: 68, + 0x1876: 68, + 0x1877: 68, + 0x1880: 85, + 0x1881: 85, + 0x1882: 85, + 0x1883: 85, + 0x1884: 85, + 0x1885: 84, + 0x1886: 84, + 0x1887: 68, + 0x1888: 68, + 0x1889: 68, + 0x188a: 68, + 0x188b: 68, + 0x188c: 68, + 0x188d: 68, + 0x188e: 68, + 0x188f: 68, + 0x1890: 68, + 0x1891: 68, + 0x1892: 68, + 0x1893: 68, + 0x1894: 68, + 0x1895: 68, + 0x1896: 68, + 0x1897: 68, + 0x1898: 68, + 0x1899: 68, + 0x189a: 68, + 0x189b: 68, + 0x189c: 68, + 0x189d: 68, + 0x189e: 68, + 0x189f: 68, + 0x18a0: 68, + 0x18a1: 68, + 0x18a2: 68, + 0x18a3: 68, + 0x18a4: 68, + 0x18a5: 68, + 0x18a6: 68, + 0x18a7: 68, + 0x18a8: 68, + 0x18aa: 68, + 0x200c: 85, + 0x200d: 67, + 0x202f: 85, + 0x2066: 85, + 0x2067: 85, + 0x2068: 85, + 0x2069: 85, + 0xa840: 68, + 0xa841: 68, + 0xa842: 68, + 0xa843: 68, + 0xa844: 68, + 0xa845: 68, + 0xa846: 68, + 0xa847: 68, + 0xa848: 68, + 0xa849: 68, + 0xa84a: 68, + 0xa84b: 68, + 0xa84c: 68, + 0xa84d: 68, + 0xa84e: 68, + 0xa84f: 68, + 0xa850: 68, + 0xa851: 68, + 0xa852: 68, + 0xa853: 68, + 0xa854: 68, + 0xa855: 68, + 0xa856: 68, + 0xa857: 68, + 0xa858: 68, + 0xa859: 68, + 0xa85a: 68, + 0xa85b: 68, + 0xa85c: 68, + 0xa85d: 68, + 0xa85e: 68, + 0xa85f: 68, + 0xa860: 68, + 0xa861: 68, + 0xa862: 68, + 0xa863: 68, + 0xa864: 68, + 0xa865: 68, + 0xa866: 68, + 0xa867: 68, + 0xa868: 68, + 0xa869: 68, + 0xa86a: 68, + 0xa86b: 68, + 0xa86c: 68, + 0xa86d: 68, + 0xa86e: 68, + 0xa86f: 68, + 0xa870: 68, + 0xa871: 68, + 0xa872: 76, + 0xa873: 85, + 0x10ac0: 68, + 0x10ac1: 68, + 0x10ac2: 68, + 0x10ac3: 68, + 0x10ac4: 68, + 0x10ac5: 82, + 0x10ac6: 85, + 0x10ac7: 82, + 0x10ac8: 85, + 0x10ac9: 82, + 0x10aca: 82, + 0x10acb: 85, + 0x10acc: 85, + 0x10acd: 76, + 0x10ace: 82, + 0x10acf: 82, + 0x10ad0: 82, + 0x10ad1: 82, + 0x10ad2: 82, + 0x10ad3: 68, + 0x10ad4: 68, + 0x10ad5: 68, + 0x10ad6: 68, + 0x10ad7: 76, + 0x10ad8: 68, + 0x10ad9: 68, + 0x10ada: 68, + 0x10adb: 68, + 0x10adc: 68, + 0x10add: 82, + 0x10ade: 68, + 0x10adf: 68, + 0x10ae0: 68, + 0x10ae1: 82, + 0x10ae2: 85, + 0x10ae3: 85, + 0x10ae4: 82, + 0x10aeb: 68, + 0x10aec: 68, + 0x10aed: 68, + 0x10aee: 68, + 0x10aef: 82, + 0x10b80: 68, + 0x10b81: 82, + 0x10b82: 68, + 0x10b83: 82, + 0x10b84: 82, + 0x10b85: 82, + 0x10b86: 68, + 0x10b87: 68, + 0x10b88: 68, + 0x10b89: 82, + 0x10b8a: 68, + 0x10b8b: 68, + 0x10b8c: 82, + 0x10b8d: 68, + 0x10b8e: 82, + 0x10b8f: 82, + 0x10b90: 68, + 0x10b91: 82, + 0x10ba9: 82, + 0x10baa: 82, + 0x10bab: 82, + 0x10bac: 82, + 0x10bad: 68, + 0x10bae: 68, + 0x10baf: 85, + 0x1e900: 68, + 0x1e901: 68, + 0x1e902: 68, + 0x1e903: 68, + 0x1e904: 68, + 0x1e905: 68, + 0x1e906: 68, + 0x1e907: 68, + 0x1e908: 68, + 0x1e909: 68, + 0x1e90a: 68, + 0x1e90b: 68, + 0x1e90c: 68, + 0x1e90d: 68, + 0x1e90e: 68, + 0x1e90f: 68, + 0x1e910: 68, + 0x1e911: 68, + 0x1e912: 68, + 0x1e913: 68, + 0x1e914: 68, + 0x1e915: 68, + 0x1e916: 68, + 0x1e917: 68, + 0x1e918: 68, + 0x1e919: 68, + 0x1e91a: 68, + 0x1e91b: 68, + 0x1e91c: 68, + 0x1e91d: 68, + 0x1e91e: 68, + 0x1e91f: 68, + 0x1e920: 68, + 0x1e921: 68, + 0x1e922: 68, + 0x1e923: 68, + 0x1e924: 68, + 0x1e925: 68, + 0x1e926: 68, + 0x1e927: 68, + 0x1e928: 68, + 0x1e929: 68, + 0x1e92a: 68, + 0x1e92b: 68, + 0x1e92c: 68, + 0x1e92d: 68, + 0x1e92e: 68, + 0x1e92f: 68, + 0x1e930: 68, + 0x1e931: 68, + 0x1e932: 68, + 0x1e933: 68, + 0x1e934: 68, + 0x1e935: 68, + 0x1e936: 68, + 0x1e937: 68, + 0x1e938: 68, + 0x1e939: 68, + 0x1e93a: 68, + 0x1e93b: 68, + 0x1e93c: 68, + 0x1e93d: 68, + 0x1e93e: 68, + 0x1e93f: 68, + 0x1e940: 68, + 0x1e941: 68, + 0x1e942: 68, + 0x1e943: 68, +} +codepoint_classes = { + 'PVALID': ( + 0x2d0000002e, + 0x300000003a, + 0x610000007b, + 0xdf000000f7, + 0xf800000100, + 0x10100000102, + 0x10300000104, + 0x10500000106, + 0x10700000108, + 0x1090000010a, + 0x10b0000010c, + 0x10d0000010e, + 0x10f00000110, + 0x11100000112, + 0x11300000114, + 0x11500000116, + 0x11700000118, + 0x1190000011a, + 0x11b0000011c, + 0x11d0000011e, + 0x11f00000120, + 0x12100000122, + 0x12300000124, + 0x12500000126, + 0x12700000128, + 0x1290000012a, + 0x12b0000012c, + 0x12d0000012e, + 0x12f00000130, + 0x13100000132, + 0x13500000136, + 0x13700000139, + 0x13a0000013b, + 0x13c0000013d, + 0x13e0000013f, + 0x14200000143, + 0x14400000145, + 0x14600000147, + 0x14800000149, + 0x14b0000014c, + 0x14d0000014e, + 0x14f00000150, + 0x15100000152, + 0x15300000154, + 0x15500000156, + 0x15700000158, + 0x1590000015a, + 0x15b0000015c, + 0x15d0000015e, + 0x15f00000160, + 0x16100000162, + 0x16300000164, + 0x16500000166, + 0x16700000168, + 0x1690000016a, + 0x16b0000016c, + 0x16d0000016e, + 0x16f00000170, + 0x17100000172, + 0x17300000174, + 0x17500000176, + 0x17700000178, + 0x17a0000017b, + 0x17c0000017d, + 0x17e0000017f, + 0x18000000181, + 0x18300000184, + 0x18500000186, + 0x18800000189, + 0x18c0000018e, + 0x19200000193, + 0x19500000196, + 0x1990000019c, + 0x19e0000019f, + 0x1a1000001a2, + 0x1a3000001a4, + 0x1a5000001a6, + 0x1a8000001a9, + 0x1aa000001ac, + 0x1ad000001ae, + 0x1b0000001b1, + 0x1b4000001b5, + 0x1b6000001b7, + 0x1b9000001bc, + 0x1bd000001c4, + 0x1ce000001cf, + 0x1d0000001d1, + 0x1d2000001d3, + 0x1d4000001d5, + 0x1d6000001d7, + 0x1d8000001d9, + 0x1da000001db, + 0x1dc000001de, + 0x1df000001e0, + 0x1e1000001e2, + 0x1e3000001e4, + 0x1e5000001e6, + 0x1e7000001e8, + 0x1e9000001ea, + 0x1eb000001ec, + 0x1ed000001ee, + 0x1ef000001f1, + 0x1f5000001f6, + 0x1f9000001fa, + 0x1fb000001fc, + 0x1fd000001fe, + 0x1ff00000200, + 0x20100000202, + 0x20300000204, + 0x20500000206, + 0x20700000208, + 0x2090000020a, + 0x20b0000020c, + 0x20d0000020e, + 0x20f00000210, + 0x21100000212, + 0x21300000214, + 0x21500000216, + 0x21700000218, + 0x2190000021a, + 0x21b0000021c, + 0x21d0000021e, + 0x21f00000220, + 0x22100000222, + 0x22300000224, + 0x22500000226, + 0x22700000228, + 0x2290000022a, + 0x22b0000022c, + 0x22d0000022e, + 0x22f00000230, + 0x23100000232, + 0x2330000023a, + 0x23c0000023d, + 0x23f00000241, + 0x24200000243, + 0x24700000248, + 0x2490000024a, + 0x24b0000024c, + 0x24d0000024e, + 0x24f000002b0, + 0x2b9000002c2, + 0x2c6000002d2, + 0x2ec000002ed, + 0x2ee000002ef, + 0x30000000340, + 0x34200000343, + 0x3460000034f, + 0x35000000370, + 0x37100000372, + 0x37300000374, + 0x37700000378, + 0x37b0000037e, + 0x39000000391, + 0x3ac000003cf, + 0x3d7000003d8, + 0x3d9000003da, + 0x3db000003dc, + 0x3dd000003de, + 0x3df000003e0, + 0x3e1000003e2, + 0x3e3000003e4, + 0x3e5000003e6, + 0x3e7000003e8, + 0x3e9000003ea, + 0x3eb000003ec, + 0x3ed000003ee, + 0x3ef000003f0, + 0x3f3000003f4, + 0x3f8000003f9, + 0x3fb000003fd, + 0x43000000460, + 0x46100000462, + 0x46300000464, + 0x46500000466, + 0x46700000468, + 0x4690000046a, + 0x46b0000046c, + 0x46d0000046e, + 0x46f00000470, + 0x47100000472, + 0x47300000474, + 0x47500000476, + 0x47700000478, + 0x4790000047a, + 0x47b0000047c, + 0x47d0000047e, + 0x47f00000480, + 0x48100000482, + 0x48300000488, + 0x48b0000048c, + 0x48d0000048e, + 0x48f00000490, + 0x49100000492, + 0x49300000494, + 0x49500000496, + 0x49700000498, + 0x4990000049a, + 0x49b0000049c, + 0x49d0000049e, + 0x49f000004a0, + 0x4a1000004a2, + 0x4a3000004a4, + 0x4a5000004a6, + 0x4a7000004a8, + 0x4a9000004aa, + 0x4ab000004ac, + 0x4ad000004ae, + 0x4af000004b0, + 0x4b1000004b2, + 0x4b3000004b4, + 0x4b5000004b6, + 0x4b7000004b8, + 0x4b9000004ba, + 0x4bb000004bc, + 0x4bd000004be, + 0x4bf000004c0, + 0x4c2000004c3, + 0x4c4000004c5, + 0x4c6000004c7, + 0x4c8000004c9, + 0x4ca000004cb, + 0x4cc000004cd, + 0x4ce000004d0, + 0x4d1000004d2, + 0x4d3000004d4, + 0x4d5000004d6, + 0x4d7000004d8, + 0x4d9000004da, + 0x4db000004dc, + 0x4dd000004de, + 0x4df000004e0, + 0x4e1000004e2, + 0x4e3000004e4, + 0x4e5000004e6, + 0x4e7000004e8, + 0x4e9000004ea, + 0x4eb000004ec, + 0x4ed000004ee, + 0x4ef000004f0, + 0x4f1000004f2, + 0x4f3000004f4, + 0x4f5000004f6, + 0x4f7000004f8, + 0x4f9000004fa, + 0x4fb000004fc, + 0x4fd000004fe, + 0x4ff00000500, + 0x50100000502, + 0x50300000504, + 0x50500000506, + 0x50700000508, + 0x5090000050a, + 0x50b0000050c, + 0x50d0000050e, + 0x50f00000510, + 0x51100000512, + 0x51300000514, + 0x51500000516, + 0x51700000518, + 0x5190000051a, + 0x51b0000051c, + 0x51d0000051e, + 0x51f00000520, + 0x52100000522, + 0x52300000524, + 0x52500000526, + 0x52700000528, + 0x5290000052a, + 0x52b0000052c, + 0x52d0000052e, + 0x52f00000530, + 0x5590000055a, + 0x56100000587, + 0x591000005be, + 0x5bf000005c0, + 0x5c1000005c3, + 0x5c4000005c6, + 0x5c7000005c8, + 0x5d0000005eb, + 0x5f0000005f3, + 0x6100000061b, + 0x62000000640, + 0x64100000660, + 0x66e00000675, + 0x679000006d4, + 0x6d5000006dd, + 0x6df000006e9, + 0x6ea000006f0, + 0x6fa00000700, + 0x7100000074b, + 0x74d000007b2, + 0x7c0000007f6, + 0x8000000082e, + 0x8400000085c, + 0x8600000086b, + 0x8a0000008b5, + 0x8b6000008be, + 0x8d4000008e2, + 0x8e300000958, + 0x96000000964, + 0x96600000970, + 0x97100000984, + 0x9850000098d, + 0x98f00000991, + 0x993000009a9, + 0x9aa000009b1, + 0x9b2000009b3, + 0x9b6000009ba, + 0x9bc000009c5, + 0x9c7000009c9, + 0x9cb000009cf, + 0x9d7000009d8, + 0x9e0000009e4, + 0x9e6000009f2, + 0x9fc000009fd, + 0xa0100000a04, + 0xa0500000a0b, + 0xa0f00000a11, + 0xa1300000a29, + 0xa2a00000a31, + 0xa3200000a33, + 0xa3500000a36, + 0xa3800000a3a, + 0xa3c00000a3d, + 0xa3e00000a43, + 0xa4700000a49, + 0xa4b00000a4e, + 0xa5100000a52, + 0xa5c00000a5d, + 0xa6600000a76, + 0xa8100000a84, + 0xa8500000a8e, + 0xa8f00000a92, + 0xa9300000aa9, + 0xaaa00000ab1, + 0xab200000ab4, + 0xab500000aba, + 0xabc00000ac6, + 0xac700000aca, + 0xacb00000ace, + 0xad000000ad1, + 0xae000000ae4, + 0xae600000af0, + 0xaf900000b00, + 0xb0100000b04, + 0xb0500000b0d, + 0xb0f00000b11, + 0xb1300000b29, + 0xb2a00000b31, + 0xb3200000b34, + 0xb3500000b3a, + 0xb3c00000b45, + 0xb4700000b49, + 0xb4b00000b4e, + 0xb5600000b58, + 0xb5f00000b64, + 0xb6600000b70, + 0xb7100000b72, + 0xb8200000b84, + 0xb8500000b8b, + 0xb8e00000b91, + 0xb9200000b96, + 0xb9900000b9b, + 0xb9c00000b9d, + 0xb9e00000ba0, + 0xba300000ba5, + 0xba800000bab, + 0xbae00000bba, + 0xbbe00000bc3, + 0xbc600000bc9, + 0xbca00000bce, + 0xbd000000bd1, + 0xbd700000bd8, + 0xbe600000bf0, + 0xc0000000c04, + 0xc0500000c0d, + 0xc0e00000c11, + 0xc1200000c29, + 0xc2a00000c3a, + 0xc3d00000c45, + 0xc4600000c49, + 0xc4a00000c4e, + 0xc5500000c57, + 0xc5800000c5b, + 0xc6000000c64, + 0xc6600000c70, + 0xc8000000c84, + 0xc8500000c8d, + 0xc8e00000c91, + 0xc9200000ca9, + 0xcaa00000cb4, + 0xcb500000cba, + 0xcbc00000cc5, + 0xcc600000cc9, + 0xcca00000cce, + 0xcd500000cd7, + 0xcde00000cdf, + 0xce000000ce4, + 0xce600000cf0, + 0xcf100000cf3, + 0xd0000000d04, + 0xd0500000d0d, + 0xd0e00000d11, + 0xd1200000d45, + 0xd4600000d49, + 0xd4a00000d4f, + 0xd5400000d58, + 0xd5f00000d64, + 0xd6600000d70, + 0xd7a00000d80, + 0xd8200000d84, + 0xd8500000d97, + 0xd9a00000db2, + 0xdb300000dbc, + 0xdbd00000dbe, + 0xdc000000dc7, + 0xdca00000dcb, + 0xdcf00000dd5, + 0xdd600000dd7, + 0xdd800000de0, + 0xde600000df0, + 0xdf200000df4, + 0xe0100000e33, + 0xe3400000e3b, + 0xe4000000e4f, + 0xe5000000e5a, + 0xe8100000e83, + 0xe8400000e85, + 0xe8700000e89, + 0xe8a00000e8b, + 0xe8d00000e8e, + 0xe9400000e98, + 0xe9900000ea0, + 0xea100000ea4, + 0xea500000ea6, + 0xea700000ea8, + 0xeaa00000eac, + 0xead00000eb3, + 0xeb400000eba, + 0xebb00000ebe, + 0xec000000ec5, + 0xec600000ec7, + 0xec800000ece, + 0xed000000eda, + 0xede00000ee0, + 0xf0000000f01, + 0xf0b00000f0c, + 0xf1800000f1a, + 0xf2000000f2a, + 0xf3500000f36, + 0xf3700000f38, + 0xf3900000f3a, + 0xf3e00000f43, + 0xf4400000f48, + 0xf4900000f4d, + 0xf4e00000f52, + 0xf5300000f57, + 0xf5800000f5c, + 0xf5d00000f69, + 0xf6a00000f6d, + 0xf7100000f73, + 0xf7400000f75, + 0xf7a00000f81, + 0xf8200000f85, + 0xf8600000f93, + 0xf9400000f98, + 0xf9900000f9d, + 0xf9e00000fa2, + 0xfa300000fa7, + 0xfa800000fac, + 0xfad00000fb9, + 0xfba00000fbd, + 0xfc600000fc7, + 0x10000000104a, + 0x10500000109e, + 0x10d0000010fb, + 0x10fd00001100, + 0x120000001249, + 0x124a0000124e, + 0x125000001257, + 0x125800001259, + 0x125a0000125e, + 0x126000001289, + 0x128a0000128e, + 0x1290000012b1, + 0x12b2000012b6, + 0x12b8000012bf, + 0x12c0000012c1, + 0x12c2000012c6, + 0x12c8000012d7, + 0x12d800001311, + 0x131200001316, + 0x13180000135b, + 0x135d00001360, + 0x138000001390, + 0x13a0000013f6, + 0x14010000166d, + 0x166f00001680, + 0x16810000169b, + 0x16a0000016eb, + 0x16f1000016f9, + 0x17000000170d, + 0x170e00001715, + 0x172000001735, + 0x174000001754, + 0x17600000176d, + 0x176e00001771, + 0x177200001774, + 0x1780000017b4, + 0x17b6000017d4, + 0x17d7000017d8, + 0x17dc000017de, + 0x17e0000017ea, + 0x18100000181a, + 0x182000001878, + 0x1880000018ab, + 0x18b0000018f6, + 0x19000000191f, + 0x19200000192c, + 0x19300000193c, + 0x19460000196e, + 0x197000001975, + 0x1980000019ac, + 0x19b0000019ca, + 0x19d0000019da, + 0x1a0000001a1c, + 0x1a2000001a5f, + 0x1a6000001a7d, + 0x1a7f00001a8a, + 0x1a9000001a9a, + 0x1aa700001aa8, + 0x1ab000001abe, + 0x1b0000001b4c, + 0x1b5000001b5a, + 0x1b6b00001b74, + 0x1b8000001bf4, + 0x1c0000001c38, + 0x1c4000001c4a, + 0x1c4d00001c7e, + 0x1cd000001cd3, + 0x1cd400001cfa, + 0x1d0000001d2c, + 0x1d2f00001d30, + 0x1d3b00001d3c, + 0x1d4e00001d4f, + 0x1d6b00001d78, + 0x1d7900001d9b, + 0x1dc000001dfa, + 0x1dfb00001e00, + 0x1e0100001e02, + 0x1e0300001e04, + 0x1e0500001e06, + 0x1e0700001e08, + 0x1e0900001e0a, + 0x1e0b00001e0c, + 0x1e0d00001e0e, + 0x1e0f00001e10, + 0x1e1100001e12, + 0x1e1300001e14, + 0x1e1500001e16, + 0x1e1700001e18, + 0x1e1900001e1a, + 0x1e1b00001e1c, + 0x1e1d00001e1e, + 0x1e1f00001e20, + 0x1e2100001e22, + 0x1e2300001e24, + 0x1e2500001e26, + 0x1e2700001e28, + 0x1e2900001e2a, + 0x1e2b00001e2c, + 0x1e2d00001e2e, + 0x1e2f00001e30, + 0x1e3100001e32, + 0x1e3300001e34, + 0x1e3500001e36, + 0x1e3700001e38, + 0x1e3900001e3a, + 0x1e3b00001e3c, + 0x1e3d00001e3e, + 0x1e3f00001e40, + 0x1e4100001e42, + 0x1e4300001e44, + 0x1e4500001e46, + 0x1e4700001e48, + 0x1e4900001e4a, + 0x1e4b00001e4c, + 0x1e4d00001e4e, + 0x1e4f00001e50, + 0x1e5100001e52, + 0x1e5300001e54, + 0x1e5500001e56, + 0x1e5700001e58, + 0x1e5900001e5a, + 0x1e5b00001e5c, + 0x1e5d00001e5e, + 0x1e5f00001e60, + 0x1e6100001e62, + 0x1e6300001e64, + 0x1e6500001e66, + 0x1e6700001e68, + 0x1e6900001e6a, + 0x1e6b00001e6c, + 0x1e6d00001e6e, + 0x1e6f00001e70, + 0x1e7100001e72, + 0x1e7300001e74, + 0x1e7500001e76, + 0x1e7700001e78, + 0x1e7900001e7a, + 0x1e7b00001e7c, + 0x1e7d00001e7e, + 0x1e7f00001e80, + 0x1e8100001e82, + 0x1e8300001e84, + 0x1e8500001e86, + 0x1e8700001e88, + 0x1e8900001e8a, + 0x1e8b00001e8c, + 0x1e8d00001e8e, + 0x1e8f00001e90, + 0x1e9100001e92, + 0x1e9300001e94, + 0x1e9500001e9a, + 0x1e9c00001e9e, + 0x1e9f00001ea0, + 0x1ea100001ea2, + 0x1ea300001ea4, + 0x1ea500001ea6, + 0x1ea700001ea8, + 0x1ea900001eaa, + 0x1eab00001eac, + 0x1ead00001eae, + 0x1eaf00001eb0, + 0x1eb100001eb2, + 0x1eb300001eb4, + 0x1eb500001eb6, + 0x1eb700001eb8, + 0x1eb900001eba, + 0x1ebb00001ebc, + 0x1ebd00001ebe, + 0x1ebf00001ec0, + 0x1ec100001ec2, + 0x1ec300001ec4, + 0x1ec500001ec6, + 0x1ec700001ec8, + 0x1ec900001eca, + 0x1ecb00001ecc, + 0x1ecd00001ece, + 0x1ecf00001ed0, + 0x1ed100001ed2, + 0x1ed300001ed4, + 0x1ed500001ed6, + 0x1ed700001ed8, + 0x1ed900001eda, + 0x1edb00001edc, + 0x1edd00001ede, + 0x1edf00001ee0, + 0x1ee100001ee2, + 0x1ee300001ee4, + 0x1ee500001ee6, + 0x1ee700001ee8, + 0x1ee900001eea, + 0x1eeb00001eec, + 0x1eed00001eee, + 0x1eef00001ef0, + 0x1ef100001ef2, + 0x1ef300001ef4, + 0x1ef500001ef6, + 0x1ef700001ef8, + 0x1ef900001efa, + 0x1efb00001efc, + 0x1efd00001efe, + 0x1eff00001f08, + 0x1f1000001f16, + 0x1f2000001f28, + 0x1f3000001f38, + 0x1f4000001f46, + 0x1f5000001f58, + 0x1f6000001f68, + 0x1f7000001f71, + 0x1f7200001f73, + 0x1f7400001f75, + 0x1f7600001f77, + 0x1f7800001f79, + 0x1f7a00001f7b, + 0x1f7c00001f7d, + 0x1fb000001fb2, + 0x1fb600001fb7, + 0x1fc600001fc7, + 0x1fd000001fd3, + 0x1fd600001fd8, + 0x1fe000001fe3, + 0x1fe400001fe8, + 0x1ff600001ff7, + 0x214e0000214f, + 0x218400002185, + 0x2c3000002c5f, + 0x2c6100002c62, + 0x2c6500002c67, + 0x2c6800002c69, + 0x2c6a00002c6b, + 0x2c6c00002c6d, + 0x2c7100002c72, + 0x2c7300002c75, + 0x2c7600002c7c, + 0x2c8100002c82, + 0x2c8300002c84, + 0x2c8500002c86, + 0x2c8700002c88, + 0x2c8900002c8a, + 0x2c8b00002c8c, + 0x2c8d00002c8e, + 0x2c8f00002c90, + 0x2c9100002c92, + 0x2c9300002c94, + 0x2c9500002c96, + 0x2c9700002c98, + 0x2c9900002c9a, + 0x2c9b00002c9c, + 0x2c9d00002c9e, + 0x2c9f00002ca0, + 0x2ca100002ca2, + 0x2ca300002ca4, + 0x2ca500002ca6, + 0x2ca700002ca8, + 0x2ca900002caa, + 0x2cab00002cac, + 0x2cad00002cae, + 0x2caf00002cb0, + 0x2cb100002cb2, + 0x2cb300002cb4, + 0x2cb500002cb6, + 0x2cb700002cb8, + 0x2cb900002cba, + 0x2cbb00002cbc, + 0x2cbd00002cbe, + 0x2cbf00002cc0, + 0x2cc100002cc2, + 0x2cc300002cc4, + 0x2cc500002cc6, + 0x2cc700002cc8, + 0x2cc900002cca, + 0x2ccb00002ccc, + 0x2ccd00002cce, + 0x2ccf00002cd0, + 0x2cd100002cd2, + 0x2cd300002cd4, + 0x2cd500002cd6, + 0x2cd700002cd8, + 0x2cd900002cda, + 0x2cdb00002cdc, + 0x2cdd00002cde, + 0x2cdf00002ce0, + 0x2ce100002ce2, + 0x2ce300002ce5, + 0x2cec00002ced, + 0x2cee00002cf2, + 0x2cf300002cf4, + 0x2d0000002d26, + 0x2d2700002d28, + 0x2d2d00002d2e, + 0x2d3000002d68, + 0x2d7f00002d97, + 0x2da000002da7, + 0x2da800002daf, + 0x2db000002db7, + 0x2db800002dbf, + 0x2dc000002dc7, + 0x2dc800002dcf, + 0x2dd000002dd7, + 0x2dd800002ddf, + 0x2de000002e00, + 0x2e2f00002e30, + 0x300500003008, + 0x302a0000302e, + 0x303c0000303d, + 0x304100003097, + 0x30990000309b, + 0x309d0000309f, + 0x30a1000030fb, + 0x30fc000030ff, + 0x31050000312f, + 0x31a0000031bb, + 0x31f000003200, + 0x340000004db6, + 0x4e0000009feb, + 0xa0000000a48d, + 0xa4d00000a4fe, + 0xa5000000a60d, + 0xa6100000a62c, + 0xa6410000a642, + 0xa6430000a644, + 0xa6450000a646, + 0xa6470000a648, + 0xa6490000a64a, + 0xa64b0000a64c, + 0xa64d0000a64e, + 0xa64f0000a650, + 0xa6510000a652, + 0xa6530000a654, + 0xa6550000a656, + 0xa6570000a658, + 0xa6590000a65a, + 0xa65b0000a65c, + 0xa65d0000a65e, + 0xa65f0000a660, + 0xa6610000a662, + 0xa6630000a664, + 0xa6650000a666, + 0xa6670000a668, + 0xa6690000a66a, + 0xa66b0000a66c, + 0xa66d0000a670, + 0xa6740000a67e, + 0xa67f0000a680, + 0xa6810000a682, + 0xa6830000a684, + 0xa6850000a686, + 0xa6870000a688, + 0xa6890000a68a, + 0xa68b0000a68c, + 0xa68d0000a68e, + 0xa68f0000a690, + 0xa6910000a692, + 0xa6930000a694, + 0xa6950000a696, + 0xa6970000a698, + 0xa6990000a69a, + 0xa69b0000a69c, + 0xa69e0000a6e6, + 0xa6f00000a6f2, + 0xa7170000a720, + 0xa7230000a724, + 0xa7250000a726, + 0xa7270000a728, + 0xa7290000a72a, + 0xa72b0000a72c, + 0xa72d0000a72e, + 0xa72f0000a732, + 0xa7330000a734, + 0xa7350000a736, + 0xa7370000a738, + 0xa7390000a73a, + 0xa73b0000a73c, + 0xa73d0000a73e, + 0xa73f0000a740, + 0xa7410000a742, + 0xa7430000a744, + 0xa7450000a746, + 0xa7470000a748, + 0xa7490000a74a, + 0xa74b0000a74c, + 0xa74d0000a74e, + 0xa74f0000a750, + 0xa7510000a752, + 0xa7530000a754, + 0xa7550000a756, + 0xa7570000a758, + 0xa7590000a75a, + 0xa75b0000a75c, + 0xa75d0000a75e, + 0xa75f0000a760, + 0xa7610000a762, + 0xa7630000a764, + 0xa7650000a766, + 0xa7670000a768, + 0xa7690000a76a, + 0xa76b0000a76c, + 0xa76d0000a76e, + 0xa76f0000a770, + 0xa7710000a779, + 0xa77a0000a77b, + 0xa77c0000a77d, + 0xa77f0000a780, + 0xa7810000a782, + 0xa7830000a784, + 0xa7850000a786, + 0xa7870000a789, + 0xa78c0000a78d, + 0xa78e0000a790, + 0xa7910000a792, + 0xa7930000a796, + 0xa7970000a798, + 0xa7990000a79a, + 0xa79b0000a79c, + 0xa79d0000a79e, + 0xa79f0000a7a0, + 0xa7a10000a7a2, + 0xa7a30000a7a4, + 0xa7a50000a7a6, + 0xa7a70000a7a8, + 0xa7a90000a7aa, + 0xa7b50000a7b6, + 0xa7b70000a7b8, + 0xa7f70000a7f8, + 0xa7fa0000a828, + 0xa8400000a874, + 0xa8800000a8c6, + 0xa8d00000a8da, + 0xa8e00000a8f8, + 0xa8fb0000a8fc, + 0xa8fd0000a8fe, + 0xa9000000a92e, + 0xa9300000a954, + 0xa9800000a9c1, + 0xa9cf0000a9da, + 0xa9e00000a9ff, + 0xaa000000aa37, + 0xaa400000aa4e, + 0xaa500000aa5a, + 0xaa600000aa77, + 0xaa7a0000aac3, + 0xaadb0000aade, + 0xaae00000aaf0, + 0xaaf20000aaf7, + 0xab010000ab07, + 0xab090000ab0f, + 0xab110000ab17, + 0xab200000ab27, + 0xab280000ab2f, + 0xab300000ab5b, + 0xab600000ab66, + 0xabc00000abeb, + 0xabec0000abee, + 0xabf00000abfa, + 0xac000000d7a4, + 0xfa0e0000fa10, + 0xfa110000fa12, + 0xfa130000fa15, + 0xfa1f0000fa20, + 0xfa210000fa22, + 0xfa230000fa25, + 0xfa270000fa2a, + 0xfb1e0000fb1f, + 0xfe200000fe30, + 0xfe730000fe74, + 0x100000001000c, + 0x1000d00010027, + 0x100280001003b, + 0x1003c0001003e, + 0x1003f0001004e, + 0x100500001005e, + 0x10080000100fb, + 0x101fd000101fe, + 0x102800001029d, + 0x102a0000102d1, + 0x102e0000102e1, + 0x1030000010320, + 0x1032d00010341, + 0x103420001034a, + 0x103500001037b, + 0x103800001039e, + 0x103a0000103c4, + 0x103c8000103d0, + 0x104280001049e, + 0x104a0000104aa, + 0x104d8000104fc, + 0x1050000010528, + 0x1053000010564, + 0x1060000010737, + 0x1074000010756, + 0x1076000010768, + 0x1080000010806, + 0x1080800010809, + 0x1080a00010836, + 0x1083700010839, + 0x1083c0001083d, + 0x1083f00010856, + 0x1086000010877, + 0x108800001089f, + 0x108e0000108f3, + 0x108f4000108f6, + 0x1090000010916, + 0x109200001093a, + 0x10980000109b8, + 0x109be000109c0, + 0x10a0000010a04, + 0x10a0500010a07, + 0x10a0c00010a14, + 0x10a1500010a18, + 0x10a1900010a34, + 0x10a3800010a3b, + 0x10a3f00010a40, + 0x10a6000010a7d, + 0x10a8000010a9d, + 0x10ac000010ac8, + 0x10ac900010ae7, + 0x10b0000010b36, + 0x10b4000010b56, + 0x10b6000010b73, + 0x10b8000010b92, + 0x10c0000010c49, + 0x10cc000010cf3, + 0x1100000011047, + 0x1106600011070, + 0x1107f000110bb, + 0x110d0000110e9, + 0x110f0000110fa, + 0x1110000011135, + 0x1113600011140, + 0x1115000011174, + 0x1117600011177, + 0x11180000111c5, + 0x111ca000111cd, + 0x111d0000111db, + 0x111dc000111dd, + 0x1120000011212, + 0x1121300011238, + 0x1123e0001123f, + 0x1128000011287, + 0x1128800011289, + 0x1128a0001128e, + 0x1128f0001129e, + 0x1129f000112a9, + 0x112b0000112eb, + 0x112f0000112fa, + 0x1130000011304, + 0x113050001130d, + 0x1130f00011311, + 0x1131300011329, + 0x1132a00011331, + 0x1133200011334, + 0x113350001133a, + 0x1133c00011345, + 0x1134700011349, + 0x1134b0001134e, + 0x1135000011351, + 0x1135700011358, + 0x1135d00011364, + 0x113660001136d, + 0x1137000011375, + 0x114000001144b, + 0x114500001145a, + 0x11480000114c6, + 0x114c7000114c8, + 0x114d0000114da, + 0x11580000115b6, + 0x115b8000115c1, + 0x115d8000115de, + 0x1160000011641, + 0x1164400011645, + 0x116500001165a, + 0x11680000116b8, + 0x116c0000116ca, + 0x117000001171a, + 0x1171d0001172c, + 0x117300001173a, + 0x118c0000118ea, + 0x118ff00011900, + 0x11a0000011a3f, + 0x11a4700011a48, + 0x11a5000011a84, + 0x11a8600011a9a, + 0x11ac000011af9, + 0x11c0000011c09, + 0x11c0a00011c37, + 0x11c3800011c41, + 0x11c5000011c5a, + 0x11c7200011c90, + 0x11c9200011ca8, + 0x11ca900011cb7, + 0x11d0000011d07, + 0x11d0800011d0a, + 0x11d0b00011d37, + 0x11d3a00011d3b, + 0x11d3c00011d3e, + 0x11d3f00011d48, + 0x11d5000011d5a, + 0x120000001239a, + 0x1248000012544, + 0x130000001342f, + 0x1440000014647, + 0x1680000016a39, + 0x16a4000016a5f, + 0x16a6000016a6a, + 0x16ad000016aee, + 0x16af000016af5, + 0x16b0000016b37, + 0x16b4000016b44, + 0x16b5000016b5a, + 0x16b6300016b78, + 0x16b7d00016b90, + 0x16f0000016f45, + 0x16f5000016f7f, + 0x16f8f00016fa0, + 0x16fe000016fe2, + 0x17000000187ed, + 0x1880000018af3, + 0x1b0000001b11f, + 0x1b1700001b2fc, + 0x1bc000001bc6b, + 0x1bc700001bc7d, + 0x1bc800001bc89, + 0x1bc900001bc9a, + 0x1bc9d0001bc9f, + 0x1da000001da37, + 0x1da3b0001da6d, + 0x1da750001da76, + 0x1da840001da85, + 0x1da9b0001daa0, + 0x1daa10001dab0, + 0x1e0000001e007, + 0x1e0080001e019, + 0x1e01b0001e022, + 0x1e0230001e025, + 0x1e0260001e02b, + 0x1e8000001e8c5, + 0x1e8d00001e8d7, + 0x1e9220001e94b, + 0x1e9500001e95a, + 0x200000002a6d7, + 0x2a7000002b735, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + ), + 'CONTEXTJ': ( + 0x200c0000200e, + ), + 'CONTEXTO': ( + 0xb7000000b8, + 0x37500000376, + 0x5f3000005f5, + 0x6600000066a, + 0x6f0000006fa, + 0x30fb000030fc, + ), +} diff --git a/env/lib/python3.6/site-packages/idna/intranges.py b/env/lib/python3.6/site-packages/idna/intranges.py new file mode 100755 index 0000000..fa8a735 --- /dev/null +++ b/env/lib/python3.6/site-packages/idna/intranges.py @@ -0,0 +1,53 @@ +""" +Given a list of integers, made up of (hopefully) a small number of long runs +of consecutive integers, compute a representation of the form +((start1, end1), (start2, end2) ...). Then answer the question "was x present +in the original list?" in time O(log(# runs)). +""" + +import bisect + +def intranges_from_list(list_): + """Represent a list of integers as a sequence of ranges: + ((start_0, end_0), (start_1, end_1), ...), such that the original + integers are exactly those x such that start_i <= x < end_i for some i. + + Ranges are encoded as single integers (start << 32 | end), not as tuples. + """ + + sorted_list = sorted(list_) + ranges = [] + last_write = -1 + for i in range(len(sorted_list)): + if i+1 < len(sorted_list): + if sorted_list[i] == sorted_list[i+1]-1: + continue + current_range = sorted_list[last_write+1:i+1] + ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) + last_write = i + + return tuple(ranges) + +def _encode_range(start, end): + return (start << 32) | end + +def _decode_range(r): + return (r >> 32), (r & ((1 << 32) - 1)) + + +def intranges_contain(int_, ranges): + """Determine if `int_` falls into one of the ranges in `ranges`.""" + tuple_ = _encode_range(int_, 0) + pos = bisect.bisect_left(ranges, tuple_) + # we could be immediately ahead of a tuple (start, end) + # with start < int_ <= end + if pos > 0: + left, right = _decode_range(ranges[pos-1]) + if left <= int_ < right: + return True + # or we could be immediately behind a tuple (int_, end) + if pos < len(ranges): + left, _ = _decode_range(ranges[pos]) + if left == int_: + return True + return False diff --git a/env/lib/python3.6/site-packages/idna/package_data.py b/env/lib/python3.6/site-packages/idna/package_data.py new file mode 100755 index 0000000..39c192b --- /dev/null +++ b/env/lib/python3.6/site-packages/idna/package_data.py @@ -0,0 +1,2 @@ +__version__ = '2.7' + diff --git a/env/lib/python3.6/site-packages/idna/uts46data.py b/env/lib/python3.6/site-packages/idna/uts46data.py new file mode 100755 index 0000000..79731cb --- /dev/null +++ b/env/lib/python3.6/site-packages/idna/uts46data.py @@ -0,0 +1,8179 @@ +# This file is automatically generated by tools/idna-data +# vim: set fileencoding=utf-8 : + +"""IDNA Mapping Table from UTS46.""" + + +__version__ = "10.0.0" +def _seg_0(): + return [ + (0x0, '3'), + (0x1, '3'), + (0x2, '3'), + (0x3, '3'), + (0x4, '3'), + (0x5, '3'), + (0x6, '3'), + (0x7, '3'), + (0x8, '3'), + (0x9, '3'), + (0xA, '3'), + (0xB, '3'), + (0xC, '3'), + (0xD, '3'), + (0xE, '3'), + (0xF, '3'), + (0x10, '3'), + (0x11, '3'), + (0x12, '3'), + (0x13, '3'), + (0x14, '3'), + (0x15, '3'), + (0x16, '3'), + (0x17, '3'), + (0x18, '3'), + (0x19, '3'), + (0x1A, '3'), + (0x1B, '3'), + (0x1C, '3'), + (0x1D, '3'), + (0x1E, '3'), + (0x1F, '3'), + (0x20, '3'), + (0x21, '3'), + (0x22, '3'), + (0x23, '3'), + (0x24, '3'), + (0x25, '3'), + (0x26, '3'), + (0x27, '3'), + (0x28, '3'), + (0x29, '3'), + (0x2A, '3'), + (0x2B, '3'), + (0x2C, '3'), + (0x2D, 'V'), + (0x2E, 'V'), + (0x2F, '3'), + (0x30, 'V'), + (0x31, 'V'), + (0x32, 'V'), + (0x33, 'V'), + (0x34, 'V'), + (0x35, 'V'), + (0x36, 'V'), + (0x37, 'V'), + (0x38, 'V'), + (0x39, 'V'), + (0x3A, '3'), + (0x3B, '3'), + (0x3C, '3'), + (0x3D, '3'), + (0x3E, '3'), + (0x3F, '3'), + (0x40, '3'), + (0x41, 'M', u'a'), + (0x42, 'M', u'b'), + (0x43, 'M', u'c'), + (0x44, 'M', u'd'), + (0x45, 'M', u'e'), + (0x46, 'M', u'f'), + (0x47, 'M', u'g'), + (0x48, 'M', u'h'), + (0x49, 'M', u'i'), + (0x4A, 'M', u'j'), + (0x4B, 'M', u'k'), + (0x4C, 'M', u'l'), + (0x4D, 'M', u'm'), + (0x4E, 'M', u'n'), + (0x4F, 'M', u'o'), + (0x50, 'M', u'p'), + (0x51, 'M', u'q'), + (0x52, 'M', u'r'), + (0x53, 'M', u's'), + (0x54, 'M', u't'), + (0x55, 'M', u'u'), + (0x56, 'M', u'v'), + (0x57, 'M', u'w'), + (0x58, 'M', u'x'), + (0x59, 'M', u'y'), + (0x5A, 'M', u'z'), + (0x5B, '3'), + (0x5C, '3'), + (0x5D, '3'), + (0x5E, '3'), + (0x5F, '3'), + (0x60, '3'), + (0x61, 'V'), + (0x62, 'V'), + (0x63, 'V'), + ] + +def _seg_1(): + return [ + (0x64, 'V'), + (0x65, 'V'), + (0x66, 'V'), + (0x67, 'V'), + (0x68, 'V'), + (0x69, 'V'), + (0x6A, 'V'), + (0x6B, 'V'), + (0x6C, 'V'), + (0x6D, 'V'), + (0x6E, 'V'), + (0x6F, 'V'), + (0x70, 'V'), + (0x71, 'V'), + (0x72, 'V'), + (0x73, 'V'), + (0x74, 'V'), + (0x75, 'V'), + (0x76, 'V'), + (0x77, 'V'), + (0x78, 'V'), + (0x79, 'V'), + (0x7A, 'V'), + (0x7B, '3'), + (0x7C, '3'), + (0x7D, '3'), + (0x7E, '3'), + (0x7F, '3'), + (0x80, 'X'), + (0x81, 'X'), + (0x82, 'X'), + (0x83, 'X'), + (0x84, 'X'), + (0x85, 'X'), + (0x86, 'X'), + (0x87, 'X'), + (0x88, 'X'), + (0x89, 'X'), + (0x8A, 'X'), + (0x8B, 'X'), + (0x8C, 'X'), + (0x8D, 'X'), + (0x8E, 'X'), + (0x8F, 'X'), + (0x90, 'X'), + (0x91, 'X'), + (0x92, 'X'), + (0x93, 'X'), + (0x94, 'X'), + (0x95, 'X'), + (0x96, 'X'), + (0x97, 'X'), + (0x98, 'X'), + (0x99, 'X'), + (0x9A, 'X'), + (0x9B, 'X'), + (0x9C, 'X'), + (0x9D, 'X'), + (0x9E, 'X'), + (0x9F, 'X'), + (0xA0, '3', u' '), + (0xA1, 'V'), + (0xA2, 'V'), + (0xA3, 'V'), + (0xA4, 'V'), + (0xA5, 'V'), + (0xA6, 'V'), + (0xA7, 'V'), + (0xA8, '3', u' ̈'), + (0xA9, 'V'), + (0xAA, 'M', u'a'), + (0xAB, 'V'), + (0xAC, 'V'), + (0xAD, 'I'), + (0xAE, 'V'), + (0xAF, '3', u' ̄'), + (0xB0, 'V'), + (0xB1, 'V'), + (0xB2, 'M', u'2'), + (0xB3, 'M', u'3'), + (0xB4, '3', u' ́'), + (0xB5, 'M', u'μ'), + (0xB6, 'V'), + (0xB7, 'V'), + (0xB8, '3', u' ̧'), + (0xB9, 'M', u'1'), + (0xBA, 'M', u'o'), + (0xBB, 'V'), + (0xBC, 'M', u'1⁄4'), + (0xBD, 'M', u'1⁄2'), + (0xBE, 'M', u'3⁄4'), + (0xBF, 'V'), + (0xC0, 'M', u'à'), + (0xC1, 'M', u'á'), + (0xC2, 'M', u'â'), + (0xC3, 'M', u'ã'), + (0xC4, 'M', u'ä'), + (0xC5, 'M', u'å'), + (0xC6, 'M', u'æ'), + (0xC7, 'M', u'ç'), + ] + +def _seg_2(): + return [ + (0xC8, 'M', u'è'), + (0xC9, 'M', u'é'), + (0xCA, 'M', u'ê'), + (0xCB, 'M', u'ë'), + (0xCC, 'M', u'ì'), + (0xCD, 'M', u'í'), + (0xCE, 'M', u'î'), + (0xCF, 'M', u'ï'), + (0xD0, 'M', u'ð'), + (0xD1, 'M', u'ñ'), + (0xD2, 'M', u'ò'), + (0xD3, 'M', u'ó'), + (0xD4, 'M', u'ô'), + (0xD5, 'M', u'õ'), + (0xD6, 'M', u'ö'), + (0xD7, 'V'), + (0xD8, 'M', u'ø'), + (0xD9, 'M', u'ù'), + (0xDA, 'M', u'ú'), + (0xDB, 'M', u'û'), + (0xDC, 'M', u'ü'), + (0xDD, 'M', u'ý'), + (0xDE, 'M', u'þ'), + (0xDF, 'D', u'ss'), + (0xE0, 'V'), + (0xE1, 'V'), + (0xE2, 'V'), + (0xE3, 'V'), + (0xE4, 'V'), + (0xE5, 'V'), + (0xE6, 'V'), + (0xE7, 'V'), + (0xE8, 'V'), + (0xE9, 'V'), + (0xEA, 'V'), + (0xEB, 'V'), + (0xEC, 'V'), + (0xED, 'V'), + (0xEE, 'V'), + (0xEF, 'V'), + (0xF0, 'V'), + (0xF1, 'V'), + (0xF2, 'V'), + (0xF3, 'V'), + (0xF4, 'V'), + (0xF5, 'V'), + (0xF6, 'V'), + (0xF7, 'V'), + (0xF8, 'V'), + (0xF9, 'V'), + (0xFA, 'V'), + (0xFB, 'V'), + (0xFC, 'V'), + (0xFD, 'V'), + (0xFE, 'V'), + (0xFF, 'V'), + (0x100, 'M', u'ā'), + (0x101, 'V'), + (0x102, 'M', u'ă'), + (0x103, 'V'), + (0x104, 'M', u'ą'), + (0x105, 'V'), + (0x106, 'M', u'ć'), + (0x107, 'V'), + (0x108, 'M', u'ĉ'), + (0x109, 'V'), + (0x10A, 'M', u'ċ'), + (0x10B, 'V'), + (0x10C, 'M', u'č'), + (0x10D, 'V'), + (0x10E, 'M', u'ď'), + (0x10F, 'V'), + (0x110, 'M', u'đ'), + (0x111, 'V'), + (0x112, 'M', u'ē'), + (0x113, 'V'), + (0x114, 'M', u'ĕ'), + (0x115, 'V'), + (0x116, 'M', u'ė'), + (0x117, 'V'), + (0x118, 'M', u'ę'), + (0x119, 'V'), + (0x11A, 'M', u'ě'), + (0x11B, 'V'), + (0x11C, 'M', u'ĝ'), + (0x11D, 'V'), + (0x11E, 'M', u'ğ'), + (0x11F, 'V'), + (0x120, 'M', u'ġ'), + (0x121, 'V'), + (0x122, 'M', u'ģ'), + (0x123, 'V'), + (0x124, 'M', u'ĥ'), + (0x125, 'V'), + (0x126, 'M', u'ħ'), + (0x127, 'V'), + (0x128, 'M', u'ĩ'), + (0x129, 'V'), + (0x12A, 'M', u'ī'), + (0x12B, 'V'), + ] + +def _seg_3(): + return [ + (0x12C, 'M', u'ĭ'), + (0x12D, 'V'), + (0x12E, 'M', u'į'), + (0x12F, 'V'), + (0x130, 'M', u'i̇'), + (0x131, 'V'), + (0x132, 'M', u'ij'), + (0x134, 'M', u'ĵ'), + (0x135, 'V'), + (0x136, 'M', u'ķ'), + (0x137, 'V'), + (0x139, 'M', u'ĺ'), + (0x13A, 'V'), + (0x13B, 'M', u'ļ'), + (0x13C, 'V'), + (0x13D, 'M', u'ľ'), + (0x13E, 'V'), + (0x13F, 'M', u'l·'), + (0x141, 'M', u'ł'), + (0x142, 'V'), + (0x143, 'M', u'ń'), + (0x144, 'V'), + (0x145, 'M', u'ņ'), + (0x146, 'V'), + (0x147, 'M', u'ň'), + (0x148, 'V'), + (0x149, 'M', u'ʼn'), + (0x14A, 'M', u'ŋ'), + (0x14B, 'V'), + (0x14C, 'M', u'ō'), + (0x14D, 'V'), + (0x14E, 'M', u'ŏ'), + (0x14F, 'V'), + (0x150, 'M', u'ő'), + (0x151, 'V'), + (0x152, 'M', u'œ'), + (0x153, 'V'), + (0x154, 'M', u'ŕ'), + (0x155, 'V'), + (0x156, 'M', u'ŗ'), + (0x157, 'V'), + (0x158, 'M', u'ř'), + (0x159, 'V'), + (0x15A, 'M', u'ś'), + (0x15B, 'V'), + (0x15C, 'M', u'ŝ'), + (0x15D, 'V'), + (0x15E, 'M', u'ş'), + (0x15F, 'V'), + (0x160, 'M', u'š'), + (0x161, 'V'), + (0x162, 'M', u'ţ'), + (0x163, 'V'), + (0x164, 'M', u'ť'), + (0x165, 'V'), + (0x166, 'M', u'ŧ'), + (0x167, 'V'), + (0x168, 'M', u'ũ'), + (0x169, 'V'), + (0x16A, 'M', u'ū'), + (0x16B, 'V'), + (0x16C, 'M', u'ŭ'), + (0x16D, 'V'), + (0x16E, 'M', u'ů'), + (0x16F, 'V'), + (0x170, 'M', u'ű'), + (0x171, 'V'), + (0x172, 'M', u'ų'), + (0x173, 'V'), + (0x174, 'M', u'ŵ'), + (0x175, 'V'), + (0x176, 'M', u'ŷ'), + (0x177, 'V'), + (0x178, 'M', u'ÿ'), + (0x179, 'M', u'ź'), + (0x17A, 'V'), + (0x17B, 'M', u'ż'), + (0x17C, 'V'), + (0x17D, 'M', u'ž'), + (0x17E, 'V'), + (0x17F, 'M', u's'), + (0x180, 'V'), + (0x181, 'M', u'ɓ'), + (0x182, 'M', u'ƃ'), + (0x183, 'V'), + (0x184, 'M', u'ƅ'), + (0x185, 'V'), + (0x186, 'M', u'ɔ'), + (0x187, 'M', u'ƈ'), + (0x188, 'V'), + (0x189, 'M', u'ɖ'), + (0x18A, 'M', u'ɗ'), + (0x18B, 'M', u'ƌ'), + (0x18C, 'V'), + (0x18E, 'M', u'ǝ'), + (0x18F, 'M', u'ə'), + (0x190, 'M', u'ɛ'), + (0x191, 'M', u'ƒ'), + (0x192, 'V'), + (0x193, 'M', u'ɠ'), + ] + +def _seg_4(): + return [ + (0x194, 'M', u'ɣ'), + (0x195, 'V'), + (0x196, 'M', u'ɩ'), + (0x197, 'M', u'ɨ'), + (0x198, 'M', u'ƙ'), + (0x199, 'V'), + (0x19C, 'M', u'ɯ'), + (0x19D, 'M', u'ɲ'), + (0x19E, 'V'), + (0x19F, 'M', u'ɵ'), + (0x1A0, 'M', u'ơ'), + (0x1A1, 'V'), + (0x1A2, 'M', u'ƣ'), + (0x1A3, 'V'), + (0x1A4, 'M', u'ƥ'), + (0x1A5, 'V'), + (0x1A6, 'M', u'ʀ'), + (0x1A7, 'M', u'ƨ'), + (0x1A8, 'V'), + (0x1A9, 'M', u'ʃ'), + (0x1AA, 'V'), + (0x1AC, 'M', u'ƭ'), + (0x1AD, 'V'), + (0x1AE, 'M', u'ʈ'), + (0x1AF, 'M', u'ư'), + (0x1B0, 'V'), + (0x1B1, 'M', u'ʊ'), + (0x1B2, 'M', u'ʋ'), + (0x1B3, 'M', u'ƴ'), + (0x1B4, 'V'), + (0x1B5, 'M', u'ƶ'), + (0x1B6, 'V'), + (0x1B7, 'M', u'ʒ'), + (0x1B8, 'M', u'ƹ'), + (0x1B9, 'V'), + (0x1BC, 'M', u'ƽ'), + (0x1BD, 'V'), + (0x1C4, 'M', u'dž'), + (0x1C7, 'M', u'lj'), + (0x1CA, 'M', u'nj'), + (0x1CD, 'M', u'ǎ'), + (0x1CE, 'V'), + (0x1CF, 'M', u'ǐ'), + (0x1D0, 'V'), + (0x1D1, 'M', u'ǒ'), + (0x1D2, 'V'), + (0x1D3, 'M', u'ǔ'), + (0x1D4, 'V'), + (0x1D5, 'M', u'ǖ'), + (0x1D6, 'V'), + (0x1D7, 'M', u'ǘ'), + (0x1D8, 'V'), + (0x1D9, 'M', u'ǚ'), + (0x1DA, 'V'), + (0x1DB, 'M', u'ǜ'), + (0x1DC, 'V'), + (0x1DE, 'M', u'ǟ'), + (0x1DF, 'V'), + (0x1E0, 'M', u'ǡ'), + (0x1E1, 'V'), + (0x1E2, 'M', u'ǣ'), + (0x1E3, 'V'), + (0x1E4, 'M', u'ǥ'), + (0x1E5, 'V'), + (0x1E6, 'M', u'ǧ'), + (0x1E7, 'V'), + (0x1E8, 'M', u'ǩ'), + (0x1E9, 'V'), + (0x1EA, 'M', u'ǫ'), + (0x1EB, 'V'), + (0x1EC, 'M', u'ǭ'), + (0x1ED, 'V'), + (0x1EE, 'M', u'ǯ'), + (0x1EF, 'V'), + (0x1F1, 'M', u'dz'), + (0x1F4, 'M', u'ǵ'), + (0x1F5, 'V'), + (0x1F6, 'M', u'ƕ'), + (0x1F7, 'M', u'ƿ'), + (0x1F8, 'M', u'ǹ'), + (0x1F9, 'V'), + (0x1FA, 'M', u'ǻ'), + (0x1FB, 'V'), + (0x1FC, 'M', u'ǽ'), + (0x1FD, 'V'), + (0x1FE, 'M', u'ǿ'), + (0x1FF, 'V'), + (0x200, 'M', u'ȁ'), + (0x201, 'V'), + (0x202, 'M', u'ȃ'), + (0x203, 'V'), + (0x204, 'M', u'ȅ'), + (0x205, 'V'), + (0x206, 'M', u'ȇ'), + (0x207, 'V'), + (0x208, 'M', u'ȉ'), + (0x209, 'V'), + (0x20A, 'M', u'ȋ'), + (0x20B, 'V'), + (0x20C, 'M', u'ȍ'), + ] + +def _seg_5(): + return [ + (0x20D, 'V'), + (0x20E, 'M', u'ȏ'), + (0x20F, 'V'), + (0x210, 'M', u'ȑ'), + (0x211, 'V'), + (0x212, 'M', u'ȓ'), + (0x213, 'V'), + (0x214, 'M', u'ȕ'), + (0x215, 'V'), + (0x216, 'M', u'ȗ'), + (0x217, 'V'), + (0x218, 'M', u'ș'), + (0x219, 'V'), + (0x21A, 'M', u'ț'), + (0x21B, 'V'), + (0x21C, 'M', u'ȝ'), + (0x21D, 'V'), + (0x21E, 'M', u'ȟ'), + (0x21F, 'V'), + (0x220, 'M', u'ƞ'), + (0x221, 'V'), + (0x222, 'M', u'ȣ'), + (0x223, 'V'), + (0x224, 'M', u'ȥ'), + (0x225, 'V'), + (0x226, 'M', u'ȧ'), + (0x227, 'V'), + (0x228, 'M', u'ȩ'), + (0x229, 'V'), + (0x22A, 'M', u'ȫ'), + (0x22B, 'V'), + (0x22C, 'M', u'ȭ'), + (0x22D, 'V'), + (0x22E, 'M', u'ȯ'), + (0x22F, 'V'), + (0x230, 'M', u'ȱ'), + (0x231, 'V'), + (0x232, 'M', u'ȳ'), + (0x233, 'V'), + (0x23A, 'M', u'ⱥ'), + (0x23B, 'M', u'ȼ'), + (0x23C, 'V'), + (0x23D, 'M', u'ƚ'), + (0x23E, 'M', u'ⱦ'), + (0x23F, 'V'), + (0x241, 'M', u'ɂ'), + (0x242, 'V'), + (0x243, 'M', u'ƀ'), + (0x244, 'M', u'ʉ'), + (0x245, 'M', u'ʌ'), + (0x246, 'M', u'ɇ'), + (0x247, 'V'), + (0x248, 'M', u'ɉ'), + (0x249, 'V'), + (0x24A, 'M', u'ɋ'), + (0x24B, 'V'), + (0x24C, 'M', u'ɍ'), + (0x24D, 'V'), + (0x24E, 'M', u'ɏ'), + (0x24F, 'V'), + (0x2B0, 'M', u'h'), + (0x2B1, 'M', u'ɦ'), + (0x2B2, 'M', u'j'), + (0x2B3, 'M', u'r'), + (0x2B4, 'M', u'ɹ'), + (0x2B5, 'M', u'ɻ'), + (0x2B6, 'M', u'ʁ'), + (0x2B7, 'M', u'w'), + (0x2B8, 'M', u'y'), + (0x2B9, 'V'), + (0x2D8, '3', u' ̆'), + (0x2D9, '3', u' ̇'), + (0x2DA, '3', u' ̊'), + (0x2DB, '3', u' ̨'), + (0x2DC, '3', u' ̃'), + (0x2DD, '3', u' ̋'), + (0x2DE, 'V'), + (0x2E0, 'M', u'ɣ'), + (0x2E1, 'M', u'l'), + (0x2E2, 'M', u's'), + (0x2E3, 'M', u'x'), + (0x2E4, 'M', u'ʕ'), + (0x2E5, 'V'), + (0x340, 'M', u'̀'), + (0x341, 'M', u'́'), + (0x342, 'V'), + (0x343, 'M', u'̓'), + (0x344, 'M', u'̈́'), + (0x345, 'M', u'ι'), + (0x346, 'V'), + (0x34F, 'I'), + (0x350, 'V'), + (0x370, 'M', u'ͱ'), + (0x371, 'V'), + (0x372, 'M', u'ͳ'), + (0x373, 'V'), + (0x374, 'M', u'ʹ'), + (0x375, 'V'), + (0x376, 'M', u'ͷ'), + (0x377, 'V'), + ] + +def _seg_6(): + return [ + (0x378, 'X'), + (0x37A, '3', u' ι'), + (0x37B, 'V'), + (0x37E, '3', u';'), + (0x37F, 'M', u'ϳ'), + (0x380, 'X'), + (0x384, '3', u' ́'), + (0x385, '3', u' ̈́'), + (0x386, 'M', u'ά'), + (0x387, 'M', u'·'), + (0x388, 'M', u'έ'), + (0x389, 'M', u'ή'), + (0x38A, 'M', u'ί'), + (0x38B, 'X'), + (0x38C, 'M', u'ό'), + (0x38D, 'X'), + (0x38E, 'M', u'ύ'), + (0x38F, 'M', u'ώ'), + (0x390, 'V'), + (0x391, 'M', u'α'), + (0x392, 'M', u'β'), + (0x393, 'M', u'γ'), + (0x394, 'M', u'δ'), + (0x395, 'M', u'ε'), + (0x396, 'M', u'ζ'), + (0x397, 'M', u'η'), + (0x398, 'M', u'θ'), + (0x399, 'M', u'ι'), + (0x39A, 'M', u'κ'), + (0x39B, 'M', u'λ'), + (0x39C, 'M', u'μ'), + (0x39D, 'M', u'ν'), + (0x39E, 'M', u'ξ'), + (0x39F, 'M', u'ο'), + (0x3A0, 'M', u'π'), + (0x3A1, 'M', u'ρ'), + (0x3A2, 'X'), + (0x3A3, 'M', u'σ'), + (0x3A4, 'M', u'τ'), + (0x3A5, 'M', u'υ'), + (0x3A6, 'M', u'φ'), + (0x3A7, 'M', u'χ'), + (0x3A8, 'M', u'ψ'), + (0x3A9, 'M', u'ω'), + (0x3AA, 'M', u'ϊ'), + (0x3AB, 'M', u'ϋ'), + (0x3AC, 'V'), + (0x3C2, 'D', u'σ'), + (0x3C3, 'V'), + (0x3CF, 'M', u'ϗ'), + (0x3D0, 'M', u'β'), + (0x3D1, 'M', u'θ'), + (0x3D2, 'M', u'υ'), + (0x3D3, 'M', u'ύ'), + (0x3D4, 'M', u'ϋ'), + (0x3D5, 'M', u'φ'), + (0x3D6, 'M', u'π'), + (0x3D7, 'V'), + (0x3D8, 'M', u'ϙ'), + (0x3D9, 'V'), + (0x3DA, 'M', u'ϛ'), + (0x3DB, 'V'), + (0x3DC, 'M', u'ϝ'), + (0x3DD, 'V'), + (0x3DE, 'M', u'ϟ'), + (0x3DF, 'V'), + (0x3E0, 'M', u'ϡ'), + (0x3E1, 'V'), + (0x3E2, 'M', u'ϣ'), + (0x3E3, 'V'), + (0x3E4, 'M', u'ϥ'), + (0x3E5, 'V'), + (0x3E6, 'M', u'ϧ'), + (0x3E7, 'V'), + (0x3E8, 'M', u'ϩ'), + (0x3E9, 'V'), + (0x3EA, 'M', u'ϫ'), + (0x3EB, 'V'), + (0x3EC, 'M', u'ϭ'), + (0x3ED, 'V'), + (0x3EE, 'M', u'ϯ'), + (0x3EF, 'V'), + (0x3F0, 'M', u'κ'), + (0x3F1, 'M', u'ρ'), + (0x3F2, 'M', u'σ'), + (0x3F3, 'V'), + (0x3F4, 'M', u'θ'), + (0x3F5, 'M', u'ε'), + (0x3F6, 'V'), + (0x3F7, 'M', u'ϸ'), + (0x3F8, 'V'), + (0x3F9, 'M', u'σ'), + (0x3FA, 'M', u'ϻ'), + (0x3FB, 'V'), + (0x3FD, 'M', u'ͻ'), + (0x3FE, 'M', u'ͼ'), + (0x3FF, 'M', u'ͽ'), + (0x400, 'M', u'ѐ'), + (0x401, 'M', u'ё'), + (0x402, 'M', u'ђ'), + ] + +def _seg_7(): + return [ + (0x403, 'M', u'ѓ'), + (0x404, 'M', u'є'), + (0x405, 'M', u'ѕ'), + (0x406, 'M', u'і'), + (0x407, 'M', u'ї'), + (0x408, 'M', u'ј'), + (0x409, 'M', u'љ'), + (0x40A, 'M', u'њ'), + (0x40B, 'M', u'ћ'), + (0x40C, 'M', u'ќ'), + (0x40D, 'M', u'ѝ'), + (0x40E, 'M', u'ў'), + (0x40F, 'M', u'џ'), + (0x410, 'M', u'а'), + (0x411, 'M', u'б'), + (0x412, 'M', u'в'), + (0x413, 'M', u'г'), + (0x414, 'M', u'д'), + (0x415, 'M', u'е'), + (0x416, 'M', u'ж'), + (0x417, 'M', u'з'), + (0x418, 'M', u'и'), + (0x419, 'M', u'й'), + (0x41A, 'M', u'к'), + (0x41B, 'M', u'л'), + (0x41C, 'M', u'м'), + (0x41D, 'M', u'н'), + (0x41E, 'M', u'о'), + (0x41F, 'M', u'п'), + (0x420, 'M', u'р'), + (0x421, 'M', u'с'), + (0x422, 'M', u'т'), + (0x423, 'M', u'у'), + (0x424, 'M', u'ф'), + (0x425, 'M', u'х'), + (0x426, 'M', u'ц'), + (0x427, 'M', u'ч'), + (0x428, 'M', u'ш'), + (0x429, 'M', u'щ'), + (0x42A, 'M', u'ъ'), + (0x42B, 'M', u'ы'), + (0x42C, 'M', u'ь'), + (0x42D, 'M', u'э'), + (0x42E, 'M', u'ю'), + (0x42F, 'M', u'я'), + (0x430, 'V'), + (0x460, 'M', u'ѡ'), + (0x461, 'V'), + (0x462, 'M', u'ѣ'), + (0x463, 'V'), + (0x464, 'M', u'ѥ'), + (0x465, 'V'), + (0x466, 'M', u'ѧ'), + (0x467, 'V'), + (0x468, 'M', u'ѩ'), + (0x469, 'V'), + (0x46A, 'M', u'ѫ'), + (0x46B, 'V'), + (0x46C, 'M', u'ѭ'), + (0x46D, 'V'), + (0x46E, 'M', u'ѯ'), + (0x46F, 'V'), + (0x470, 'M', u'ѱ'), + (0x471, 'V'), + (0x472, 'M', u'ѳ'), + (0x473, 'V'), + (0x474, 'M', u'ѵ'), + (0x475, 'V'), + (0x476, 'M', u'ѷ'), + (0x477, 'V'), + (0x478, 'M', u'ѹ'), + (0x479, 'V'), + (0x47A, 'M', u'ѻ'), + (0x47B, 'V'), + (0x47C, 'M', u'ѽ'), + (0x47D, 'V'), + (0x47E, 'M', u'ѿ'), + (0x47F, 'V'), + (0x480, 'M', u'ҁ'), + (0x481, 'V'), + (0x48A, 'M', u'ҋ'), + (0x48B, 'V'), + (0x48C, 'M', u'ҍ'), + (0x48D, 'V'), + (0x48E, 'M', u'ҏ'), + (0x48F, 'V'), + (0x490, 'M', u'ґ'), + (0x491, 'V'), + (0x492, 'M', u'ғ'), + (0x493, 'V'), + (0x494, 'M', u'ҕ'), + (0x495, 'V'), + (0x496, 'M', u'җ'), + (0x497, 'V'), + (0x498, 'M', u'ҙ'), + (0x499, 'V'), + (0x49A, 'M', u'қ'), + (0x49B, 'V'), + (0x49C, 'M', u'ҝ'), + (0x49D, 'V'), + ] + +def _seg_8(): + return [ + (0x49E, 'M', u'ҟ'), + (0x49F, 'V'), + (0x4A0, 'M', u'ҡ'), + (0x4A1, 'V'), + (0x4A2, 'M', u'ң'), + (0x4A3, 'V'), + (0x4A4, 'M', u'ҥ'), + (0x4A5, 'V'), + (0x4A6, 'M', u'ҧ'), + (0x4A7, 'V'), + (0x4A8, 'M', u'ҩ'), + (0x4A9, 'V'), + (0x4AA, 'M', u'ҫ'), + (0x4AB, 'V'), + (0x4AC, 'M', u'ҭ'), + (0x4AD, 'V'), + (0x4AE, 'M', u'ү'), + (0x4AF, 'V'), + (0x4B0, 'M', u'ұ'), + (0x4B1, 'V'), + (0x4B2, 'M', u'ҳ'), + (0x4B3, 'V'), + (0x4B4, 'M', u'ҵ'), + (0x4B5, 'V'), + (0x4B6, 'M', u'ҷ'), + (0x4B7, 'V'), + (0x4B8, 'M', u'ҹ'), + (0x4B9, 'V'), + (0x4BA, 'M', u'һ'), + (0x4BB, 'V'), + (0x4BC, 'M', u'ҽ'), + (0x4BD, 'V'), + (0x4BE, 'M', u'ҿ'), + (0x4BF, 'V'), + (0x4C0, 'X'), + (0x4C1, 'M', u'ӂ'), + (0x4C2, 'V'), + (0x4C3, 'M', u'ӄ'), + (0x4C4, 'V'), + (0x4C5, 'M', u'ӆ'), + (0x4C6, 'V'), + (0x4C7, 'M', u'ӈ'), + (0x4C8, 'V'), + (0x4C9, 'M', u'ӊ'), + (0x4CA, 'V'), + (0x4CB, 'M', u'ӌ'), + (0x4CC, 'V'), + (0x4CD, 'M', u'ӎ'), + (0x4CE, 'V'), + (0x4D0, 'M', u'ӑ'), + (0x4D1, 'V'), + (0x4D2, 'M', u'ӓ'), + (0x4D3, 'V'), + (0x4D4, 'M', u'ӕ'), + (0x4D5, 'V'), + (0x4D6, 'M', u'ӗ'), + (0x4D7, 'V'), + (0x4D8, 'M', u'ә'), + (0x4D9, 'V'), + (0x4DA, 'M', u'ӛ'), + (0x4DB, 'V'), + (0x4DC, 'M', u'ӝ'), + (0x4DD, 'V'), + (0x4DE, 'M', u'ӟ'), + (0x4DF, 'V'), + (0x4E0, 'M', u'ӡ'), + (0x4E1, 'V'), + (0x4E2, 'M', u'ӣ'), + (0x4E3, 'V'), + (0x4E4, 'M', u'ӥ'), + (0x4E5, 'V'), + (0x4E6, 'M', u'ӧ'), + (0x4E7, 'V'), + (0x4E8, 'M', u'ө'), + (0x4E9, 'V'), + (0x4EA, 'M', u'ӫ'), + (0x4EB, 'V'), + (0x4EC, 'M', u'ӭ'), + (0x4ED, 'V'), + (0x4EE, 'M', u'ӯ'), + (0x4EF, 'V'), + (0x4F0, 'M', u'ӱ'), + (0x4F1, 'V'), + (0x4F2, 'M', u'ӳ'), + (0x4F3, 'V'), + (0x4F4, 'M', u'ӵ'), + (0x4F5, 'V'), + (0x4F6, 'M', u'ӷ'), + (0x4F7, 'V'), + (0x4F8, 'M', u'ӹ'), + (0x4F9, 'V'), + (0x4FA, 'M', u'ӻ'), + (0x4FB, 'V'), + (0x4FC, 'M', u'ӽ'), + (0x4FD, 'V'), + (0x4FE, 'M', u'ӿ'), + (0x4FF, 'V'), + (0x500, 'M', u'ԁ'), + (0x501, 'V'), + (0x502, 'M', u'ԃ'), + ] + +def _seg_9(): + return [ + (0x503, 'V'), + (0x504, 'M', u'ԅ'), + (0x505, 'V'), + (0x506, 'M', u'ԇ'), + (0x507, 'V'), + (0x508, 'M', u'ԉ'), + (0x509, 'V'), + (0x50A, 'M', u'ԋ'), + (0x50B, 'V'), + (0x50C, 'M', u'ԍ'), + (0x50D, 'V'), + (0x50E, 'M', u'ԏ'), + (0x50F, 'V'), + (0x510, 'M', u'ԑ'), + (0x511, 'V'), + (0x512, 'M', u'ԓ'), + (0x513, 'V'), + (0x514, 'M', u'ԕ'), + (0x515, 'V'), + (0x516, 'M', u'ԗ'), + (0x517, 'V'), + (0x518, 'M', u'ԙ'), + (0x519, 'V'), + (0x51A, 'M', u'ԛ'), + (0x51B, 'V'), + (0x51C, 'M', u'ԝ'), + (0x51D, 'V'), + (0x51E, 'M', u'ԟ'), + (0x51F, 'V'), + (0x520, 'M', u'ԡ'), + (0x521, 'V'), + (0x522, 'M', u'ԣ'), + (0x523, 'V'), + (0x524, 'M', u'ԥ'), + (0x525, 'V'), + (0x526, 'M', u'ԧ'), + (0x527, 'V'), + (0x528, 'M', u'ԩ'), + (0x529, 'V'), + (0x52A, 'M', u'ԫ'), + (0x52B, 'V'), + (0x52C, 'M', u'ԭ'), + (0x52D, 'V'), + (0x52E, 'M', u'ԯ'), + (0x52F, 'V'), + (0x530, 'X'), + (0x531, 'M', u'ա'), + (0x532, 'M', u'բ'), + (0x533, 'M', u'գ'), + (0x534, 'M', u'դ'), + (0x535, 'M', u'ե'), + (0x536, 'M', u'զ'), + (0x537, 'M', u'է'), + (0x538, 'M', u'ը'), + (0x539, 'M', u'թ'), + (0x53A, 'M', u'ժ'), + (0x53B, 'M', u'ի'), + (0x53C, 'M', u'լ'), + (0x53D, 'M', u'խ'), + (0x53E, 'M', u'ծ'), + (0x53F, 'M', u'կ'), + (0x540, 'M', u'հ'), + (0x541, 'M', u'ձ'), + (0x542, 'M', u'ղ'), + (0x543, 'M', u'ճ'), + (0x544, 'M', u'մ'), + (0x545, 'M', u'յ'), + (0x546, 'M', u'ն'), + (0x547, 'M', u'շ'), + (0x548, 'M', u'ո'), + (0x549, 'M', u'չ'), + (0x54A, 'M', u'պ'), + (0x54B, 'M', u'ջ'), + (0x54C, 'M', u'ռ'), + (0x54D, 'M', u'ս'), + (0x54E, 'M', u'վ'), + (0x54F, 'M', u'տ'), + (0x550, 'M', u'ր'), + (0x551, 'M', u'ց'), + (0x552, 'M', u'ւ'), + (0x553, 'M', u'փ'), + (0x554, 'M', u'ք'), + (0x555, 'M', u'օ'), + (0x556, 'M', u'ֆ'), + (0x557, 'X'), + (0x559, 'V'), + (0x560, 'X'), + (0x561, 'V'), + (0x587, 'M', u'եւ'), + (0x588, 'X'), + (0x589, 'V'), + (0x58B, 'X'), + (0x58D, 'V'), + (0x590, 'X'), + (0x591, 'V'), + (0x5C8, 'X'), + (0x5D0, 'V'), + (0x5EB, 'X'), + (0x5F0, 'V'), + (0x5F5, 'X'), + ] + +def _seg_10(): + return [ + (0x606, 'V'), + (0x61C, 'X'), + (0x61E, 'V'), + (0x675, 'M', u'اٴ'), + (0x676, 'M', u'وٴ'), + (0x677, 'M', u'ۇٴ'), + (0x678, 'M', u'يٴ'), + (0x679, 'V'), + (0x6DD, 'X'), + (0x6DE, 'V'), + (0x70E, 'X'), + (0x710, 'V'), + (0x74B, 'X'), + (0x74D, 'V'), + (0x7B2, 'X'), + (0x7C0, 'V'), + (0x7FB, 'X'), + (0x800, 'V'), + (0x82E, 'X'), + (0x830, 'V'), + (0x83F, 'X'), + (0x840, 'V'), + (0x85C, 'X'), + (0x85E, 'V'), + (0x85F, 'X'), + (0x860, 'V'), + (0x86B, 'X'), + (0x8A0, 'V'), + (0x8B5, 'X'), + (0x8B6, 'V'), + (0x8BE, 'X'), + (0x8D4, 'V'), + (0x8E2, 'X'), + (0x8E3, 'V'), + (0x958, 'M', u'क़'), + (0x959, 'M', u'ख़'), + (0x95A, 'M', u'ग़'), + (0x95B, 'M', u'ज़'), + (0x95C, 'M', u'ड़'), + (0x95D, 'M', u'ढ़'), + (0x95E, 'M', u'फ़'), + (0x95F, 'M', u'य़'), + (0x960, 'V'), + (0x984, 'X'), + (0x985, 'V'), + (0x98D, 'X'), + (0x98F, 'V'), + (0x991, 'X'), + (0x993, 'V'), + (0x9A9, 'X'), + (0x9AA, 'V'), + (0x9B1, 'X'), + (0x9B2, 'V'), + (0x9B3, 'X'), + (0x9B6, 'V'), + (0x9BA, 'X'), + (0x9BC, 'V'), + (0x9C5, 'X'), + (0x9C7, 'V'), + (0x9C9, 'X'), + (0x9CB, 'V'), + (0x9CF, 'X'), + (0x9D7, 'V'), + (0x9D8, 'X'), + (0x9DC, 'M', u'ড়'), + (0x9DD, 'M', u'ঢ়'), + (0x9DE, 'X'), + (0x9DF, 'M', u'য়'), + (0x9E0, 'V'), + (0x9E4, 'X'), + (0x9E6, 'V'), + (0x9FE, 'X'), + (0xA01, 'V'), + (0xA04, 'X'), + (0xA05, 'V'), + (0xA0B, 'X'), + (0xA0F, 'V'), + (0xA11, 'X'), + (0xA13, 'V'), + (0xA29, 'X'), + (0xA2A, 'V'), + (0xA31, 'X'), + (0xA32, 'V'), + (0xA33, 'M', u'ਲ਼'), + (0xA34, 'X'), + (0xA35, 'V'), + (0xA36, 'M', u'ਸ਼'), + (0xA37, 'X'), + (0xA38, 'V'), + (0xA3A, 'X'), + (0xA3C, 'V'), + (0xA3D, 'X'), + (0xA3E, 'V'), + (0xA43, 'X'), + (0xA47, 'V'), + (0xA49, 'X'), + (0xA4B, 'V'), + (0xA4E, 'X'), + (0xA51, 'V'), + (0xA52, 'X'), + ] + +def _seg_11(): + return [ + (0xA59, 'M', u'ਖ਼'), + (0xA5A, 'M', u'ਗ਼'), + (0xA5B, 'M', u'ਜ਼'), + (0xA5C, 'V'), + (0xA5D, 'X'), + (0xA5E, 'M', u'ਫ਼'), + (0xA5F, 'X'), + (0xA66, 'V'), + (0xA76, 'X'), + (0xA81, 'V'), + (0xA84, 'X'), + (0xA85, 'V'), + (0xA8E, 'X'), + (0xA8F, 'V'), + (0xA92, 'X'), + (0xA93, 'V'), + (0xAA9, 'X'), + (0xAAA, 'V'), + (0xAB1, 'X'), + (0xAB2, 'V'), + (0xAB4, 'X'), + (0xAB5, 'V'), + (0xABA, 'X'), + (0xABC, 'V'), + (0xAC6, 'X'), + (0xAC7, 'V'), + (0xACA, 'X'), + (0xACB, 'V'), + (0xACE, 'X'), + (0xAD0, 'V'), + (0xAD1, 'X'), + (0xAE0, 'V'), + (0xAE4, 'X'), + (0xAE6, 'V'), + (0xAF2, 'X'), + (0xAF9, 'V'), + (0xB00, 'X'), + (0xB01, 'V'), + (0xB04, 'X'), + (0xB05, 'V'), + (0xB0D, 'X'), + (0xB0F, 'V'), + (0xB11, 'X'), + (0xB13, 'V'), + (0xB29, 'X'), + (0xB2A, 'V'), + (0xB31, 'X'), + (0xB32, 'V'), + (0xB34, 'X'), + (0xB35, 'V'), + (0xB3A, 'X'), + (0xB3C, 'V'), + (0xB45, 'X'), + (0xB47, 'V'), + (0xB49, 'X'), + (0xB4B, 'V'), + (0xB4E, 'X'), + (0xB56, 'V'), + (0xB58, 'X'), + (0xB5C, 'M', u'ଡ଼'), + (0xB5D, 'M', u'ଢ଼'), + (0xB5E, 'X'), + (0xB5F, 'V'), + (0xB64, 'X'), + (0xB66, 'V'), + (0xB78, 'X'), + (0xB82, 'V'), + (0xB84, 'X'), + (0xB85, 'V'), + (0xB8B, 'X'), + (0xB8E, 'V'), + (0xB91, 'X'), + (0xB92, 'V'), + (0xB96, 'X'), + (0xB99, 'V'), + (0xB9B, 'X'), + (0xB9C, 'V'), + (0xB9D, 'X'), + (0xB9E, 'V'), + (0xBA0, 'X'), + (0xBA3, 'V'), + (0xBA5, 'X'), + (0xBA8, 'V'), + (0xBAB, 'X'), + (0xBAE, 'V'), + (0xBBA, 'X'), + (0xBBE, 'V'), + (0xBC3, 'X'), + (0xBC6, 'V'), + (0xBC9, 'X'), + (0xBCA, 'V'), + (0xBCE, 'X'), + (0xBD0, 'V'), + (0xBD1, 'X'), + (0xBD7, 'V'), + (0xBD8, 'X'), + (0xBE6, 'V'), + (0xBFB, 'X'), + (0xC00, 'V'), + (0xC04, 'X'), + ] + +def _seg_12(): + return [ + (0xC05, 'V'), + (0xC0D, 'X'), + (0xC0E, 'V'), + (0xC11, 'X'), + (0xC12, 'V'), + (0xC29, 'X'), + (0xC2A, 'V'), + (0xC3A, 'X'), + (0xC3D, 'V'), + (0xC45, 'X'), + (0xC46, 'V'), + (0xC49, 'X'), + (0xC4A, 'V'), + (0xC4E, 'X'), + (0xC55, 'V'), + (0xC57, 'X'), + (0xC58, 'V'), + (0xC5B, 'X'), + (0xC60, 'V'), + (0xC64, 'X'), + (0xC66, 'V'), + (0xC70, 'X'), + (0xC78, 'V'), + (0xC84, 'X'), + (0xC85, 'V'), + (0xC8D, 'X'), + (0xC8E, 'V'), + (0xC91, 'X'), + (0xC92, 'V'), + (0xCA9, 'X'), + (0xCAA, 'V'), + (0xCB4, 'X'), + (0xCB5, 'V'), + (0xCBA, 'X'), + (0xCBC, 'V'), + (0xCC5, 'X'), + (0xCC6, 'V'), + (0xCC9, 'X'), + (0xCCA, 'V'), + (0xCCE, 'X'), + (0xCD5, 'V'), + (0xCD7, 'X'), + (0xCDE, 'V'), + (0xCDF, 'X'), + (0xCE0, 'V'), + (0xCE4, 'X'), + (0xCE6, 'V'), + (0xCF0, 'X'), + (0xCF1, 'V'), + (0xCF3, 'X'), + (0xD00, 'V'), + (0xD04, 'X'), + (0xD05, 'V'), + (0xD0D, 'X'), + (0xD0E, 'V'), + (0xD11, 'X'), + (0xD12, 'V'), + (0xD45, 'X'), + (0xD46, 'V'), + (0xD49, 'X'), + (0xD4A, 'V'), + (0xD50, 'X'), + (0xD54, 'V'), + (0xD64, 'X'), + (0xD66, 'V'), + (0xD80, 'X'), + (0xD82, 'V'), + (0xD84, 'X'), + (0xD85, 'V'), + (0xD97, 'X'), + (0xD9A, 'V'), + (0xDB2, 'X'), + (0xDB3, 'V'), + (0xDBC, 'X'), + (0xDBD, 'V'), + (0xDBE, 'X'), + (0xDC0, 'V'), + (0xDC7, 'X'), + (0xDCA, 'V'), + (0xDCB, 'X'), + (0xDCF, 'V'), + (0xDD5, 'X'), + (0xDD6, 'V'), + (0xDD7, 'X'), + (0xDD8, 'V'), + (0xDE0, 'X'), + (0xDE6, 'V'), + (0xDF0, 'X'), + (0xDF2, 'V'), + (0xDF5, 'X'), + (0xE01, 'V'), + (0xE33, 'M', u'ํา'), + (0xE34, 'V'), + (0xE3B, 'X'), + (0xE3F, 'V'), + (0xE5C, 'X'), + (0xE81, 'V'), + (0xE83, 'X'), + (0xE84, 'V'), + (0xE85, 'X'), + ] + +def _seg_13(): + return [ + (0xE87, 'V'), + (0xE89, 'X'), + (0xE8A, 'V'), + (0xE8B, 'X'), + (0xE8D, 'V'), + (0xE8E, 'X'), + (0xE94, 'V'), + (0xE98, 'X'), + (0xE99, 'V'), + (0xEA0, 'X'), + (0xEA1, 'V'), + (0xEA4, 'X'), + (0xEA5, 'V'), + (0xEA6, 'X'), + (0xEA7, 'V'), + (0xEA8, 'X'), + (0xEAA, 'V'), + (0xEAC, 'X'), + (0xEAD, 'V'), + (0xEB3, 'M', u'ໍາ'), + (0xEB4, 'V'), + (0xEBA, 'X'), + (0xEBB, 'V'), + (0xEBE, 'X'), + (0xEC0, 'V'), + (0xEC5, 'X'), + (0xEC6, 'V'), + (0xEC7, 'X'), + (0xEC8, 'V'), + (0xECE, 'X'), + (0xED0, 'V'), + (0xEDA, 'X'), + (0xEDC, 'M', u'ຫນ'), + (0xEDD, 'M', u'ຫມ'), + (0xEDE, 'V'), + (0xEE0, 'X'), + (0xF00, 'V'), + (0xF0C, 'M', u'་'), + (0xF0D, 'V'), + (0xF43, 'M', u'གྷ'), + (0xF44, 'V'), + (0xF48, 'X'), + (0xF49, 'V'), + (0xF4D, 'M', u'ཌྷ'), + (0xF4E, 'V'), + (0xF52, 'M', u'དྷ'), + (0xF53, 'V'), + (0xF57, 'M', u'བྷ'), + (0xF58, 'V'), + (0xF5C, 'M', u'ཛྷ'), + (0xF5D, 'V'), + (0xF69, 'M', u'ཀྵ'), + (0xF6A, 'V'), + (0xF6D, 'X'), + (0xF71, 'V'), + (0xF73, 'M', u'ཱི'), + (0xF74, 'V'), + (0xF75, 'M', u'ཱུ'), + (0xF76, 'M', u'ྲྀ'), + (0xF77, 'M', u'ྲཱྀ'), + (0xF78, 'M', u'ླྀ'), + (0xF79, 'M', u'ླཱྀ'), + (0xF7A, 'V'), + (0xF81, 'M', u'ཱྀ'), + (0xF82, 'V'), + (0xF93, 'M', u'ྒྷ'), + (0xF94, 'V'), + (0xF98, 'X'), + (0xF99, 'V'), + (0xF9D, 'M', u'ྜྷ'), + (0xF9E, 'V'), + (0xFA2, 'M', u'ྡྷ'), + (0xFA3, 'V'), + (0xFA7, 'M', u'ྦྷ'), + (0xFA8, 'V'), + (0xFAC, 'M', u'ྫྷ'), + (0xFAD, 'V'), + (0xFB9, 'M', u'ྐྵ'), + (0xFBA, 'V'), + (0xFBD, 'X'), + (0xFBE, 'V'), + (0xFCD, 'X'), + (0xFCE, 'V'), + (0xFDB, 'X'), + (0x1000, 'V'), + (0x10A0, 'X'), + (0x10C7, 'M', u'ⴧ'), + (0x10C8, 'X'), + (0x10CD, 'M', u'ⴭ'), + (0x10CE, 'X'), + (0x10D0, 'V'), + (0x10FC, 'M', u'ნ'), + (0x10FD, 'V'), + (0x115F, 'X'), + (0x1161, 'V'), + (0x1249, 'X'), + (0x124A, 'V'), + (0x124E, 'X'), + (0x1250, 'V'), + (0x1257, 'X'), + ] + +def _seg_14(): + return [ + (0x1258, 'V'), + (0x1259, 'X'), + (0x125A, 'V'), + (0x125E, 'X'), + (0x1260, 'V'), + (0x1289, 'X'), + (0x128A, 'V'), + (0x128E, 'X'), + (0x1290, 'V'), + (0x12B1, 'X'), + (0x12B2, 'V'), + (0x12B6, 'X'), + (0x12B8, 'V'), + (0x12BF, 'X'), + (0x12C0, 'V'), + (0x12C1, 'X'), + (0x12C2, 'V'), + (0x12C6, 'X'), + (0x12C8, 'V'), + (0x12D7, 'X'), + (0x12D8, 'V'), + (0x1311, 'X'), + (0x1312, 'V'), + (0x1316, 'X'), + (0x1318, 'V'), + (0x135B, 'X'), + (0x135D, 'V'), + (0x137D, 'X'), + (0x1380, 'V'), + (0x139A, 'X'), + (0x13A0, 'V'), + (0x13F6, 'X'), + (0x13F8, 'M', u'Ᏸ'), + (0x13F9, 'M', u'Ᏹ'), + (0x13FA, 'M', u'Ᏺ'), + (0x13FB, 'M', u'Ᏻ'), + (0x13FC, 'M', u'Ᏼ'), + (0x13FD, 'M', u'Ᏽ'), + (0x13FE, 'X'), + (0x1400, 'V'), + (0x1680, 'X'), + (0x1681, 'V'), + (0x169D, 'X'), + (0x16A0, 'V'), + (0x16F9, 'X'), + (0x1700, 'V'), + (0x170D, 'X'), + (0x170E, 'V'), + (0x1715, 'X'), + (0x1720, 'V'), + (0x1737, 'X'), + (0x1740, 'V'), + (0x1754, 'X'), + (0x1760, 'V'), + (0x176D, 'X'), + (0x176E, 'V'), + (0x1771, 'X'), + (0x1772, 'V'), + (0x1774, 'X'), + (0x1780, 'V'), + (0x17B4, 'X'), + (0x17B6, 'V'), + (0x17DE, 'X'), + (0x17E0, 'V'), + (0x17EA, 'X'), + (0x17F0, 'V'), + (0x17FA, 'X'), + (0x1800, 'V'), + (0x1806, 'X'), + (0x1807, 'V'), + (0x180B, 'I'), + (0x180E, 'X'), + (0x1810, 'V'), + (0x181A, 'X'), + (0x1820, 'V'), + (0x1878, 'X'), + (0x1880, 'V'), + (0x18AB, 'X'), + (0x18B0, 'V'), + (0x18F6, 'X'), + (0x1900, 'V'), + (0x191F, 'X'), + (0x1920, 'V'), + (0x192C, 'X'), + (0x1930, 'V'), + (0x193C, 'X'), + (0x1940, 'V'), + (0x1941, 'X'), + (0x1944, 'V'), + (0x196E, 'X'), + (0x1970, 'V'), + (0x1975, 'X'), + (0x1980, 'V'), + (0x19AC, 'X'), + (0x19B0, 'V'), + (0x19CA, 'X'), + (0x19D0, 'V'), + (0x19DB, 'X'), + (0x19DE, 'V'), + (0x1A1C, 'X'), + ] + +def _seg_15(): + return [ + (0x1A1E, 'V'), + (0x1A5F, 'X'), + (0x1A60, 'V'), + (0x1A7D, 'X'), + (0x1A7F, 'V'), + (0x1A8A, 'X'), + (0x1A90, 'V'), + (0x1A9A, 'X'), + (0x1AA0, 'V'), + (0x1AAE, 'X'), + (0x1AB0, 'V'), + (0x1ABF, 'X'), + (0x1B00, 'V'), + (0x1B4C, 'X'), + (0x1B50, 'V'), + (0x1B7D, 'X'), + (0x1B80, 'V'), + (0x1BF4, 'X'), + (0x1BFC, 'V'), + (0x1C38, 'X'), + (0x1C3B, 'V'), + (0x1C4A, 'X'), + (0x1C4D, 'V'), + (0x1C80, 'M', u'в'), + (0x1C81, 'M', u'д'), + (0x1C82, 'M', u'о'), + (0x1C83, 'M', u'с'), + (0x1C84, 'M', u'т'), + (0x1C86, 'M', u'ъ'), + (0x1C87, 'M', u'ѣ'), + (0x1C88, 'M', u'ꙋ'), + (0x1C89, 'X'), + (0x1CC0, 'V'), + (0x1CC8, 'X'), + (0x1CD0, 'V'), + (0x1CFA, 'X'), + (0x1D00, 'V'), + (0x1D2C, 'M', u'a'), + (0x1D2D, 'M', u'æ'), + (0x1D2E, 'M', u'b'), + (0x1D2F, 'V'), + (0x1D30, 'M', u'd'), + (0x1D31, 'M', u'e'), + (0x1D32, 'M', u'ǝ'), + (0x1D33, 'M', u'g'), + (0x1D34, 'M', u'h'), + (0x1D35, 'M', u'i'), + (0x1D36, 'M', u'j'), + (0x1D37, 'M', u'k'), + (0x1D38, 'M', u'l'), + (0x1D39, 'M', u'm'), + (0x1D3A, 'M', u'n'), + (0x1D3B, 'V'), + (0x1D3C, 'M', u'o'), + (0x1D3D, 'M', u'ȣ'), + (0x1D3E, 'M', u'p'), + (0x1D3F, 'M', u'r'), + (0x1D40, 'M', u't'), + (0x1D41, 'M', u'u'), + (0x1D42, 'M', u'w'), + (0x1D43, 'M', u'a'), + (0x1D44, 'M', u'ɐ'), + (0x1D45, 'M', u'ɑ'), + (0x1D46, 'M', u'ᴂ'), + (0x1D47, 'M', u'b'), + (0x1D48, 'M', u'd'), + (0x1D49, 'M', u'e'), + (0x1D4A, 'M', u'ə'), + (0x1D4B, 'M', u'ɛ'), + (0x1D4C, 'M', u'ɜ'), + (0x1D4D, 'M', u'g'), + (0x1D4E, 'V'), + (0x1D4F, 'M', u'k'), + (0x1D50, 'M', u'm'), + (0x1D51, 'M', u'ŋ'), + (0x1D52, 'M', u'o'), + (0x1D53, 'M', u'ɔ'), + (0x1D54, 'M', u'ᴖ'), + (0x1D55, 'M', u'ᴗ'), + (0x1D56, 'M', u'p'), + (0x1D57, 'M', u't'), + (0x1D58, 'M', u'u'), + (0x1D59, 'M', u'ᴝ'), + (0x1D5A, 'M', u'ɯ'), + (0x1D5B, 'M', u'v'), + (0x1D5C, 'M', u'ᴥ'), + (0x1D5D, 'M', u'β'), + (0x1D5E, 'M', u'γ'), + (0x1D5F, 'M', u'δ'), + (0x1D60, 'M', u'φ'), + (0x1D61, 'M', u'χ'), + (0x1D62, 'M', u'i'), + (0x1D63, 'M', u'r'), + (0x1D64, 'M', u'u'), + (0x1D65, 'M', u'v'), + (0x1D66, 'M', u'β'), + (0x1D67, 'M', u'γ'), + (0x1D68, 'M', u'ρ'), + (0x1D69, 'M', u'φ'), + (0x1D6A, 'M', u'χ'), + ] + +def _seg_16(): + return [ + (0x1D6B, 'V'), + (0x1D78, 'M', u'н'), + (0x1D79, 'V'), + (0x1D9B, 'M', u'ɒ'), + (0x1D9C, 'M', u'c'), + (0x1D9D, 'M', u'ɕ'), + (0x1D9E, 'M', u'ð'), + (0x1D9F, 'M', u'ɜ'), + (0x1DA0, 'M', u'f'), + (0x1DA1, 'M', u'ɟ'), + (0x1DA2, 'M', u'ɡ'), + (0x1DA3, 'M', u'ɥ'), + (0x1DA4, 'M', u'ɨ'), + (0x1DA5, 'M', u'ɩ'), + (0x1DA6, 'M', u'ɪ'), + (0x1DA7, 'M', u'ᵻ'), + (0x1DA8, 'M', u'ʝ'), + (0x1DA9, 'M', u'ɭ'), + (0x1DAA, 'M', u'ᶅ'), + (0x1DAB, 'M', u'ʟ'), + (0x1DAC, 'M', u'ɱ'), + (0x1DAD, 'M', u'ɰ'), + (0x1DAE, 'M', u'ɲ'), + (0x1DAF, 'M', u'ɳ'), + (0x1DB0, 'M', u'ɴ'), + (0x1DB1, 'M', u'ɵ'), + (0x1DB2, 'M', u'ɸ'), + (0x1DB3, 'M', u'ʂ'), + (0x1DB4, 'M', u'ʃ'), + (0x1DB5, 'M', u'ƫ'), + (0x1DB6, 'M', u'ʉ'), + (0x1DB7, 'M', u'ʊ'), + (0x1DB8, 'M', u'ᴜ'), + (0x1DB9, 'M', u'ʋ'), + (0x1DBA, 'M', u'ʌ'), + (0x1DBB, 'M', u'z'), + (0x1DBC, 'M', u'ʐ'), + (0x1DBD, 'M', u'ʑ'), + (0x1DBE, 'M', u'ʒ'), + (0x1DBF, 'M', u'θ'), + (0x1DC0, 'V'), + (0x1DFA, 'X'), + (0x1DFB, 'V'), + (0x1E00, 'M', u'ḁ'), + (0x1E01, 'V'), + (0x1E02, 'M', u'ḃ'), + (0x1E03, 'V'), + (0x1E04, 'M', u'ḅ'), + (0x1E05, 'V'), + (0x1E06, 'M', u'ḇ'), + (0x1E07, 'V'), + (0x1E08, 'M', u'ḉ'), + (0x1E09, 'V'), + (0x1E0A, 'M', u'ḋ'), + (0x1E0B, 'V'), + (0x1E0C, 'M', u'ḍ'), + (0x1E0D, 'V'), + (0x1E0E, 'M', u'ḏ'), + (0x1E0F, 'V'), + (0x1E10, 'M', u'ḑ'), + (0x1E11, 'V'), + (0x1E12, 'M', u'ḓ'), + (0x1E13, 'V'), + (0x1E14, 'M', u'ḕ'), + (0x1E15, 'V'), + (0x1E16, 'M', u'ḗ'), + (0x1E17, 'V'), + (0x1E18, 'M', u'ḙ'), + (0x1E19, 'V'), + (0x1E1A, 'M', u'ḛ'), + (0x1E1B, 'V'), + (0x1E1C, 'M', u'ḝ'), + (0x1E1D, 'V'), + (0x1E1E, 'M', u'ḟ'), + (0x1E1F, 'V'), + (0x1E20, 'M', u'ḡ'), + (0x1E21, 'V'), + (0x1E22, 'M', u'ḣ'), + (0x1E23, 'V'), + (0x1E24, 'M', u'ḥ'), + (0x1E25, 'V'), + (0x1E26, 'M', u'ḧ'), + (0x1E27, 'V'), + (0x1E28, 'M', u'ḩ'), + (0x1E29, 'V'), + (0x1E2A, 'M', u'ḫ'), + (0x1E2B, 'V'), + (0x1E2C, 'M', u'ḭ'), + (0x1E2D, 'V'), + (0x1E2E, 'M', u'ḯ'), + (0x1E2F, 'V'), + (0x1E30, 'M', u'ḱ'), + (0x1E31, 'V'), + (0x1E32, 'M', u'ḳ'), + (0x1E33, 'V'), + (0x1E34, 'M', u'ḵ'), + (0x1E35, 'V'), + (0x1E36, 'M', u'ḷ'), + (0x1E37, 'V'), + (0x1E38, 'M', u'ḹ'), + ] + +def _seg_17(): + return [ + (0x1E39, 'V'), + (0x1E3A, 'M', u'ḻ'), + (0x1E3B, 'V'), + (0x1E3C, 'M', u'ḽ'), + (0x1E3D, 'V'), + (0x1E3E, 'M', u'ḿ'), + (0x1E3F, 'V'), + (0x1E40, 'M', u'ṁ'), + (0x1E41, 'V'), + (0x1E42, 'M', u'ṃ'), + (0x1E43, 'V'), + (0x1E44, 'M', u'ṅ'), + (0x1E45, 'V'), + (0x1E46, 'M', u'ṇ'), + (0x1E47, 'V'), + (0x1E48, 'M', u'ṉ'), + (0x1E49, 'V'), + (0x1E4A, 'M', u'ṋ'), + (0x1E4B, 'V'), + (0x1E4C, 'M', u'ṍ'), + (0x1E4D, 'V'), + (0x1E4E, 'M', u'ṏ'), + (0x1E4F, 'V'), + (0x1E50, 'M', u'ṑ'), + (0x1E51, 'V'), + (0x1E52, 'M', u'ṓ'), + (0x1E53, 'V'), + (0x1E54, 'M', u'ṕ'), + (0x1E55, 'V'), + (0x1E56, 'M', u'ṗ'), + (0x1E57, 'V'), + (0x1E58, 'M', u'ṙ'), + (0x1E59, 'V'), + (0x1E5A, 'M', u'ṛ'), + (0x1E5B, 'V'), + (0x1E5C, 'M', u'ṝ'), + (0x1E5D, 'V'), + (0x1E5E, 'M', u'ṟ'), + (0x1E5F, 'V'), + (0x1E60, 'M', u'ṡ'), + (0x1E61, 'V'), + (0x1E62, 'M', u'ṣ'), + (0x1E63, 'V'), + (0x1E64, 'M', u'ṥ'), + (0x1E65, 'V'), + (0x1E66, 'M', u'ṧ'), + (0x1E67, 'V'), + (0x1E68, 'M', u'ṩ'), + (0x1E69, 'V'), + (0x1E6A, 'M', u'ṫ'), + (0x1E6B, 'V'), + (0x1E6C, 'M', u'ṭ'), + (0x1E6D, 'V'), + (0x1E6E, 'M', u'ṯ'), + (0x1E6F, 'V'), + (0x1E70, 'M', u'ṱ'), + (0x1E71, 'V'), + (0x1E72, 'M', u'ṳ'), + (0x1E73, 'V'), + (0x1E74, 'M', u'ṵ'), + (0x1E75, 'V'), + (0x1E76, 'M', u'ṷ'), + (0x1E77, 'V'), + (0x1E78, 'M', u'ṹ'), + (0x1E79, 'V'), + (0x1E7A, 'M', u'ṻ'), + (0x1E7B, 'V'), + (0x1E7C, 'M', u'ṽ'), + (0x1E7D, 'V'), + (0x1E7E, 'M', u'ṿ'), + (0x1E7F, 'V'), + (0x1E80, 'M', u'ẁ'), + (0x1E81, 'V'), + (0x1E82, 'M', u'ẃ'), + (0x1E83, 'V'), + (0x1E84, 'M', u'ẅ'), + (0x1E85, 'V'), + (0x1E86, 'M', u'ẇ'), + (0x1E87, 'V'), + (0x1E88, 'M', u'ẉ'), + (0x1E89, 'V'), + (0x1E8A, 'M', u'ẋ'), + (0x1E8B, 'V'), + (0x1E8C, 'M', u'ẍ'), + (0x1E8D, 'V'), + (0x1E8E, 'M', u'ẏ'), + (0x1E8F, 'V'), + (0x1E90, 'M', u'ẑ'), + (0x1E91, 'V'), + (0x1E92, 'M', u'ẓ'), + (0x1E93, 'V'), + (0x1E94, 'M', u'ẕ'), + (0x1E95, 'V'), + (0x1E9A, 'M', u'aʾ'), + (0x1E9B, 'M', u'ṡ'), + (0x1E9C, 'V'), + (0x1E9E, 'M', u'ss'), + (0x1E9F, 'V'), + (0x1EA0, 'M', u'ạ'), + (0x1EA1, 'V'), + ] + +def _seg_18(): + return [ + (0x1EA2, 'M', u'ả'), + (0x1EA3, 'V'), + (0x1EA4, 'M', u'ấ'), + (0x1EA5, 'V'), + (0x1EA6, 'M', u'ầ'), + (0x1EA7, 'V'), + (0x1EA8, 'M', u'ẩ'), + (0x1EA9, 'V'), + (0x1EAA, 'M', u'ẫ'), + (0x1EAB, 'V'), + (0x1EAC, 'M', u'ậ'), + (0x1EAD, 'V'), + (0x1EAE, 'M', u'ắ'), + (0x1EAF, 'V'), + (0x1EB0, 'M', u'ằ'), + (0x1EB1, 'V'), + (0x1EB2, 'M', u'ẳ'), + (0x1EB3, 'V'), + (0x1EB4, 'M', u'ẵ'), + (0x1EB5, 'V'), + (0x1EB6, 'M', u'ặ'), + (0x1EB7, 'V'), + (0x1EB8, 'M', u'ẹ'), + (0x1EB9, 'V'), + (0x1EBA, 'M', u'ẻ'), + (0x1EBB, 'V'), + (0x1EBC, 'M', u'ẽ'), + (0x1EBD, 'V'), + (0x1EBE, 'M', u'ế'), + (0x1EBF, 'V'), + (0x1EC0, 'M', u'ề'), + (0x1EC1, 'V'), + (0x1EC2, 'M', u'ể'), + (0x1EC3, 'V'), + (0x1EC4, 'M', u'ễ'), + (0x1EC5, 'V'), + (0x1EC6, 'M', u'ệ'), + (0x1EC7, 'V'), + (0x1EC8, 'M', u'ỉ'), + (0x1EC9, 'V'), + (0x1ECA, 'M', u'ị'), + (0x1ECB, 'V'), + (0x1ECC, 'M', u'ọ'), + (0x1ECD, 'V'), + (0x1ECE, 'M', u'ỏ'), + (0x1ECF, 'V'), + (0x1ED0, 'M', u'ố'), + (0x1ED1, 'V'), + (0x1ED2, 'M', u'ồ'), + (0x1ED3, 'V'), + (0x1ED4, 'M', u'ổ'), + (0x1ED5, 'V'), + (0x1ED6, 'M', u'ỗ'), + (0x1ED7, 'V'), + (0x1ED8, 'M', u'ộ'), + (0x1ED9, 'V'), + (0x1EDA, 'M', u'ớ'), + (0x1EDB, 'V'), + (0x1EDC, 'M', u'ờ'), + (0x1EDD, 'V'), + (0x1EDE, 'M', u'ở'), + (0x1EDF, 'V'), + (0x1EE0, 'M', u'ỡ'), + (0x1EE1, 'V'), + (0x1EE2, 'M', u'ợ'), + (0x1EE3, 'V'), + (0x1EE4, 'M', u'ụ'), + (0x1EE5, 'V'), + (0x1EE6, 'M', u'ủ'), + (0x1EE7, 'V'), + (0x1EE8, 'M', u'ứ'), + (0x1EE9, 'V'), + (0x1EEA, 'M', u'ừ'), + (0x1EEB, 'V'), + (0x1EEC, 'M', u'ử'), + (0x1EED, 'V'), + (0x1EEE, 'M', u'ữ'), + (0x1EEF, 'V'), + (0x1EF0, 'M', u'ự'), + (0x1EF1, 'V'), + (0x1EF2, 'M', u'ỳ'), + (0x1EF3, 'V'), + (0x1EF4, 'M', u'ỵ'), + (0x1EF5, 'V'), + (0x1EF6, 'M', u'ỷ'), + (0x1EF7, 'V'), + (0x1EF8, 'M', u'ỹ'), + (0x1EF9, 'V'), + (0x1EFA, 'M', u'ỻ'), + (0x1EFB, 'V'), + (0x1EFC, 'M', u'ỽ'), + (0x1EFD, 'V'), + (0x1EFE, 'M', u'ỿ'), + (0x1EFF, 'V'), + (0x1F08, 'M', u'ἀ'), + (0x1F09, 'M', u'ἁ'), + (0x1F0A, 'M', u'ἂ'), + (0x1F0B, 'M', u'ἃ'), + (0x1F0C, 'M', u'ἄ'), + (0x1F0D, 'M', u'ἅ'), + ] + +def _seg_19(): + return [ + (0x1F0E, 'M', u'ἆ'), + (0x1F0F, 'M', u'ἇ'), + (0x1F10, 'V'), + (0x1F16, 'X'), + (0x1F18, 'M', u'ἐ'), + (0x1F19, 'M', u'ἑ'), + (0x1F1A, 'M', u'ἒ'), + (0x1F1B, 'M', u'ἓ'), + (0x1F1C, 'M', u'ἔ'), + (0x1F1D, 'M', u'ἕ'), + (0x1F1E, 'X'), + (0x1F20, 'V'), + (0x1F28, 'M', u'ἠ'), + (0x1F29, 'M', u'ἡ'), + (0x1F2A, 'M', u'ἢ'), + (0x1F2B, 'M', u'ἣ'), + (0x1F2C, 'M', u'ἤ'), + (0x1F2D, 'M', u'ἥ'), + (0x1F2E, 'M', u'ἦ'), + (0x1F2F, 'M', u'ἧ'), + (0x1F30, 'V'), + (0x1F38, 'M', u'ἰ'), + (0x1F39, 'M', u'ἱ'), + (0x1F3A, 'M', u'ἲ'), + (0x1F3B, 'M', u'ἳ'), + (0x1F3C, 'M', u'ἴ'), + (0x1F3D, 'M', u'ἵ'), + (0x1F3E, 'M', u'ἶ'), + (0x1F3F, 'M', u'ἷ'), + (0x1F40, 'V'), + (0x1F46, 'X'), + (0x1F48, 'M', u'ὀ'), + (0x1F49, 'M', u'ὁ'), + (0x1F4A, 'M', u'ὂ'), + (0x1F4B, 'M', u'ὃ'), + (0x1F4C, 'M', u'ὄ'), + (0x1F4D, 'M', u'ὅ'), + (0x1F4E, 'X'), + (0x1F50, 'V'), + (0x1F58, 'X'), + (0x1F59, 'M', u'ὑ'), + (0x1F5A, 'X'), + (0x1F5B, 'M', u'ὓ'), + (0x1F5C, 'X'), + (0x1F5D, 'M', u'ὕ'), + (0x1F5E, 'X'), + (0x1F5F, 'M', u'ὗ'), + (0x1F60, 'V'), + (0x1F68, 'M', u'ὠ'), + (0x1F69, 'M', u'ὡ'), + (0x1F6A, 'M', u'ὢ'), + (0x1F6B, 'M', u'ὣ'), + (0x1F6C, 'M', u'ὤ'), + (0x1F6D, 'M', u'ὥ'), + (0x1F6E, 'M', u'ὦ'), + (0x1F6F, 'M', u'ὧ'), + (0x1F70, 'V'), + (0x1F71, 'M', u'ά'), + (0x1F72, 'V'), + (0x1F73, 'M', u'έ'), + (0x1F74, 'V'), + (0x1F75, 'M', u'ή'), + (0x1F76, 'V'), + (0x1F77, 'M', u'ί'), + (0x1F78, 'V'), + (0x1F79, 'M', u'ό'), + (0x1F7A, 'V'), + (0x1F7B, 'M', u'ύ'), + (0x1F7C, 'V'), + (0x1F7D, 'M', u'ώ'), + (0x1F7E, 'X'), + (0x1F80, 'M', u'ἀι'), + (0x1F81, 'M', u'ἁι'), + (0x1F82, 'M', u'ἂι'), + (0x1F83, 'M', u'ἃι'), + (0x1F84, 'M', u'ἄι'), + (0x1F85, 'M', u'ἅι'), + (0x1F86, 'M', u'ἆι'), + (0x1F87, 'M', u'ἇι'), + (0x1F88, 'M', u'ἀι'), + (0x1F89, 'M', u'ἁι'), + (0x1F8A, 'M', u'ἂι'), + (0x1F8B, 'M', u'ἃι'), + (0x1F8C, 'M', u'ἄι'), + (0x1F8D, 'M', u'ἅι'), + (0x1F8E, 'M', u'ἆι'), + (0x1F8F, 'M', u'ἇι'), + (0x1F90, 'M', u'ἠι'), + (0x1F91, 'M', u'ἡι'), + (0x1F92, 'M', u'ἢι'), + (0x1F93, 'M', u'ἣι'), + (0x1F94, 'M', u'ἤι'), + (0x1F95, 'M', u'ἥι'), + (0x1F96, 'M', u'ἦι'), + (0x1F97, 'M', u'ἧι'), + (0x1F98, 'M', u'ἠι'), + (0x1F99, 'M', u'ἡι'), + (0x1F9A, 'M', u'ἢι'), + (0x1F9B, 'M', u'ἣι'), + (0x1F9C, 'M', u'ἤι'), + ] + +def _seg_20(): + return [ + (0x1F9D, 'M', u'ἥι'), + (0x1F9E, 'M', u'ἦι'), + (0x1F9F, 'M', u'ἧι'), + (0x1FA0, 'M', u'ὠι'), + (0x1FA1, 'M', u'ὡι'), + (0x1FA2, 'M', u'ὢι'), + (0x1FA3, 'M', u'ὣι'), + (0x1FA4, 'M', u'ὤι'), + (0x1FA5, 'M', u'ὥι'), + (0x1FA6, 'M', u'ὦι'), + (0x1FA7, 'M', u'ὧι'), + (0x1FA8, 'M', u'ὠι'), + (0x1FA9, 'M', u'ὡι'), + (0x1FAA, 'M', u'ὢι'), + (0x1FAB, 'M', u'ὣι'), + (0x1FAC, 'M', u'ὤι'), + (0x1FAD, 'M', u'ὥι'), + (0x1FAE, 'M', u'ὦι'), + (0x1FAF, 'M', u'ὧι'), + (0x1FB0, 'V'), + (0x1FB2, 'M', u'ὰι'), + (0x1FB3, 'M', u'αι'), + (0x1FB4, 'M', u'άι'), + (0x1FB5, 'X'), + (0x1FB6, 'V'), + (0x1FB7, 'M', u'ᾶι'), + (0x1FB8, 'M', u'ᾰ'), + (0x1FB9, 'M', u'ᾱ'), + (0x1FBA, 'M', u'ὰ'), + (0x1FBB, 'M', u'ά'), + (0x1FBC, 'M', u'αι'), + (0x1FBD, '3', u' ̓'), + (0x1FBE, 'M', u'ι'), + (0x1FBF, '3', u' ̓'), + (0x1FC0, '3', u' ͂'), + (0x1FC1, '3', u' ̈͂'), + (0x1FC2, 'M', u'ὴι'), + (0x1FC3, 'M', u'ηι'), + (0x1FC4, 'M', u'ήι'), + (0x1FC5, 'X'), + (0x1FC6, 'V'), + (0x1FC7, 'M', u'ῆι'), + (0x1FC8, 'M', u'ὲ'), + (0x1FC9, 'M', u'έ'), + (0x1FCA, 'M', u'ὴ'), + (0x1FCB, 'M', u'ή'), + (0x1FCC, 'M', u'ηι'), + (0x1FCD, '3', u' ̓̀'), + (0x1FCE, '3', u' ̓́'), + (0x1FCF, '3', u' ̓͂'), + (0x1FD0, 'V'), + (0x1FD3, 'M', u'ΐ'), + (0x1FD4, 'X'), + (0x1FD6, 'V'), + (0x1FD8, 'M', u'ῐ'), + (0x1FD9, 'M', u'ῑ'), + (0x1FDA, 'M', u'ὶ'), + (0x1FDB, 'M', u'ί'), + (0x1FDC, 'X'), + (0x1FDD, '3', u' ̔̀'), + (0x1FDE, '3', u' ̔́'), + (0x1FDF, '3', u' ̔͂'), + (0x1FE0, 'V'), + (0x1FE3, 'M', u'ΰ'), + (0x1FE4, 'V'), + (0x1FE8, 'M', u'ῠ'), + (0x1FE9, 'M', u'ῡ'), + (0x1FEA, 'M', u'ὺ'), + (0x1FEB, 'M', u'ύ'), + (0x1FEC, 'M', u'ῥ'), + (0x1FED, '3', u' ̈̀'), + (0x1FEE, '3', u' ̈́'), + (0x1FEF, '3', u'`'), + (0x1FF0, 'X'), + (0x1FF2, 'M', u'ὼι'), + (0x1FF3, 'M', u'ωι'), + (0x1FF4, 'M', u'ώι'), + (0x1FF5, 'X'), + (0x1FF6, 'V'), + (0x1FF7, 'M', u'ῶι'), + (0x1FF8, 'M', u'ὸ'), + (0x1FF9, 'M', u'ό'), + (0x1FFA, 'M', u'ὼ'), + (0x1FFB, 'M', u'ώ'), + (0x1FFC, 'M', u'ωι'), + (0x1FFD, '3', u' ́'), + (0x1FFE, '3', u' ̔'), + (0x1FFF, 'X'), + (0x2000, '3', u' '), + (0x200B, 'I'), + (0x200C, 'D', u''), + (0x200E, 'X'), + (0x2010, 'V'), + (0x2011, 'M', u'‐'), + (0x2012, 'V'), + (0x2017, '3', u' ̳'), + (0x2018, 'V'), + (0x2024, 'X'), + (0x2027, 'V'), + (0x2028, 'X'), + ] + +def _seg_21(): + return [ + (0x202F, '3', u' '), + (0x2030, 'V'), + (0x2033, 'M', u'′′'), + (0x2034, 'M', u'′′′'), + (0x2035, 'V'), + (0x2036, 'M', u'‵‵'), + (0x2037, 'M', u'‵‵‵'), + (0x2038, 'V'), + (0x203C, '3', u'!!'), + (0x203D, 'V'), + (0x203E, '3', u' ̅'), + (0x203F, 'V'), + (0x2047, '3', u'??'), + (0x2048, '3', u'?!'), + (0x2049, '3', u'!?'), + (0x204A, 'V'), + (0x2057, 'M', u'′′′′'), + (0x2058, 'V'), + (0x205F, '3', u' '), + (0x2060, 'I'), + (0x2061, 'X'), + (0x2064, 'I'), + (0x2065, 'X'), + (0x2070, 'M', u'0'), + (0x2071, 'M', u'i'), + (0x2072, 'X'), + (0x2074, 'M', u'4'), + (0x2075, 'M', u'5'), + (0x2076, 'M', u'6'), + (0x2077, 'M', u'7'), + (0x2078, 'M', u'8'), + (0x2079, 'M', u'9'), + (0x207A, '3', u'+'), + (0x207B, 'M', u'−'), + (0x207C, '3', u'='), + (0x207D, '3', u'('), + (0x207E, '3', u')'), + (0x207F, 'M', u'n'), + (0x2080, 'M', u'0'), + (0x2081, 'M', u'1'), + (0x2082, 'M', u'2'), + (0x2083, 'M', u'3'), + (0x2084, 'M', u'4'), + (0x2085, 'M', u'5'), + (0x2086, 'M', u'6'), + (0x2087, 'M', u'7'), + (0x2088, 'M', u'8'), + (0x2089, 'M', u'9'), + (0x208A, '3', u'+'), + (0x208B, 'M', u'−'), + (0x208C, '3', u'='), + (0x208D, '3', u'('), + (0x208E, '3', u')'), + (0x208F, 'X'), + (0x2090, 'M', u'a'), + (0x2091, 'M', u'e'), + (0x2092, 'M', u'o'), + (0x2093, 'M', u'x'), + (0x2094, 'M', u'ə'), + (0x2095, 'M', u'h'), + (0x2096, 'M', u'k'), + (0x2097, 'M', u'l'), + (0x2098, 'M', u'm'), + (0x2099, 'M', u'n'), + (0x209A, 'M', u'p'), + (0x209B, 'M', u's'), + (0x209C, 'M', u't'), + (0x209D, 'X'), + (0x20A0, 'V'), + (0x20A8, 'M', u'rs'), + (0x20A9, 'V'), + (0x20C0, 'X'), + (0x20D0, 'V'), + (0x20F1, 'X'), + (0x2100, '3', u'a/c'), + (0x2101, '3', u'a/s'), + (0x2102, 'M', u'c'), + (0x2103, 'M', u'°c'), + (0x2104, 'V'), + (0x2105, '3', u'c/o'), + (0x2106, '3', u'c/u'), + (0x2107, 'M', u'ɛ'), + (0x2108, 'V'), + (0x2109, 'M', u'°f'), + (0x210A, 'M', u'g'), + (0x210B, 'M', u'h'), + (0x210F, 'M', u'ħ'), + (0x2110, 'M', u'i'), + (0x2112, 'M', u'l'), + (0x2114, 'V'), + (0x2115, 'M', u'n'), + (0x2116, 'M', u'no'), + (0x2117, 'V'), + (0x2119, 'M', u'p'), + (0x211A, 'M', u'q'), + (0x211B, 'M', u'r'), + (0x211E, 'V'), + (0x2120, 'M', u'sm'), + (0x2121, 'M', u'tel'), + (0x2122, 'M', u'tm'), + ] + +def _seg_22(): + return [ + (0x2123, 'V'), + (0x2124, 'M', u'z'), + (0x2125, 'V'), + (0x2126, 'M', u'ω'), + (0x2127, 'V'), + (0x2128, 'M', u'z'), + (0x2129, 'V'), + (0x212A, 'M', u'k'), + (0x212B, 'M', u'å'), + (0x212C, 'M', u'b'), + (0x212D, 'M', u'c'), + (0x212E, 'V'), + (0x212F, 'M', u'e'), + (0x2131, 'M', u'f'), + (0x2132, 'X'), + (0x2133, 'M', u'm'), + (0x2134, 'M', u'o'), + (0x2135, 'M', u'א'), + (0x2136, 'M', u'ב'), + (0x2137, 'M', u'ג'), + (0x2138, 'M', u'ד'), + (0x2139, 'M', u'i'), + (0x213A, 'V'), + (0x213B, 'M', u'fax'), + (0x213C, 'M', u'π'), + (0x213D, 'M', u'γ'), + (0x213F, 'M', u'π'), + (0x2140, 'M', u'∑'), + (0x2141, 'V'), + (0x2145, 'M', u'd'), + (0x2147, 'M', u'e'), + (0x2148, 'M', u'i'), + (0x2149, 'M', u'j'), + (0x214A, 'V'), + (0x2150, 'M', u'1⁄7'), + (0x2151, 'M', u'1⁄9'), + (0x2152, 'M', u'1⁄10'), + (0x2153, 'M', u'1⁄3'), + (0x2154, 'M', u'2⁄3'), + (0x2155, 'M', u'1⁄5'), + (0x2156, 'M', u'2⁄5'), + (0x2157, 'M', u'3⁄5'), + (0x2158, 'M', u'4⁄5'), + (0x2159, 'M', u'1⁄6'), + (0x215A, 'M', u'5⁄6'), + (0x215B, 'M', u'1⁄8'), + (0x215C, 'M', u'3⁄8'), + (0x215D, 'M', u'5⁄8'), + (0x215E, 'M', u'7⁄8'), + (0x215F, 'M', u'1⁄'), + (0x2160, 'M', u'i'), + (0x2161, 'M', u'ii'), + (0x2162, 'M', u'iii'), + (0x2163, 'M', u'iv'), + (0x2164, 'M', u'v'), + (0x2165, 'M', u'vi'), + (0x2166, 'M', u'vii'), + (0x2167, 'M', u'viii'), + (0x2168, 'M', u'ix'), + (0x2169, 'M', u'x'), + (0x216A, 'M', u'xi'), + (0x216B, 'M', u'xii'), + (0x216C, 'M', u'l'), + (0x216D, 'M', u'c'), + (0x216E, 'M', u'd'), + (0x216F, 'M', u'm'), + (0x2170, 'M', u'i'), + (0x2171, 'M', u'ii'), + (0x2172, 'M', u'iii'), + (0x2173, 'M', u'iv'), + (0x2174, 'M', u'v'), + (0x2175, 'M', u'vi'), + (0x2176, 'M', u'vii'), + (0x2177, 'M', u'viii'), + (0x2178, 'M', u'ix'), + (0x2179, 'M', u'x'), + (0x217A, 'M', u'xi'), + (0x217B, 'M', u'xii'), + (0x217C, 'M', u'l'), + (0x217D, 'M', u'c'), + (0x217E, 'M', u'd'), + (0x217F, 'M', u'm'), + (0x2180, 'V'), + (0x2183, 'X'), + (0x2184, 'V'), + (0x2189, 'M', u'0⁄3'), + (0x218A, 'V'), + (0x218C, 'X'), + (0x2190, 'V'), + (0x222C, 'M', u'∫∫'), + (0x222D, 'M', u'∫∫∫'), + (0x222E, 'V'), + (0x222F, 'M', u'∮∮'), + (0x2230, 'M', u'∮∮∮'), + (0x2231, 'V'), + (0x2260, '3'), + (0x2261, 'V'), + (0x226E, '3'), + (0x2270, 'V'), + (0x2329, 'M', u'〈'), + ] + +def _seg_23(): + return [ + (0x232A, 'M', u'〉'), + (0x232B, 'V'), + (0x2427, 'X'), + (0x2440, 'V'), + (0x244B, 'X'), + (0x2460, 'M', u'1'), + (0x2461, 'M', u'2'), + (0x2462, 'M', u'3'), + (0x2463, 'M', u'4'), + (0x2464, 'M', u'5'), + (0x2465, 'M', u'6'), + (0x2466, 'M', u'7'), + (0x2467, 'M', u'8'), + (0x2468, 'M', u'9'), + (0x2469, 'M', u'10'), + (0x246A, 'M', u'11'), + (0x246B, 'M', u'12'), + (0x246C, 'M', u'13'), + (0x246D, 'M', u'14'), + (0x246E, 'M', u'15'), + (0x246F, 'M', u'16'), + (0x2470, 'M', u'17'), + (0x2471, 'M', u'18'), + (0x2472, 'M', u'19'), + (0x2473, 'M', u'20'), + (0x2474, '3', u'(1)'), + (0x2475, '3', u'(2)'), + (0x2476, '3', u'(3)'), + (0x2477, '3', u'(4)'), + (0x2478, '3', u'(5)'), + (0x2479, '3', u'(6)'), + (0x247A, '3', u'(7)'), + (0x247B, '3', u'(8)'), + (0x247C, '3', u'(9)'), + (0x247D, '3', u'(10)'), + (0x247E, '3', u'(11)'), + (0x247F, '3', u'(12)'), + (0x2480, '3', u'(13)'), + (0x2481, '3', u'(14)'), + (0x2482, '3', u'(15)'), + (0x2483, '3', u'(16)'), + (0x2484, '3', u'(17)'), + (0x2485, '3', u'(18)'), + (0x2486, '3', u'(19)'), + (0x2487, '3', u'(20)'), + (0x2488, 'X'), + (0x249C, '3', u'(a)'), + (0x249D, '3', u'(b)'), + (0x249E, '3', u'(c)'), + (0x249F, '3', u'(d)'), + (0x24A0, '3', u'(e)'), + (0x24A1, '3', u'(f)'), + (0x24A2, '3', u'(g)'), + (0x24A3, '3', u'(h)'), + (0x24A4, '3', u'(i)'), + (0x24A5, '3', u'(j)'), + (0x24A6, '3', u'(k)'), + (0x24A7, '3', u'(l)'), + (0x24A8, '3', u'(m)'), + (0x24A9, '3', u'(n)'), + (0x24AA, '3', u'(o)'), + (0x24AB, '3', u'(p)'), + (0x24AC, '3', u'(q)'), + (0x24AD, '3', u'(r)'), + (0x24AE, '3', u'(s)'), + (0x24AF, '3', u'(t)'), + (0x24B0, '3', u'(u)'), + (0x24B1, '3', u'(v)'), + (0x24B2, '3', u'(w)'), + (0x24B3, '3', u'(x)'), + (0x24B4, '3', u'(y)'), + (0x24B5, '3', u'(z)'), + (0x24B6, 'M', u'a'), + (0x24B7, 'M', u'b'), + (0x24B8, 'M', u'c'), + (0x24B9, 'M', u'd'), + (0x24BA, 'M', u'e'), + (0x24BB, 'M', u'f'), + (0x24BC, 'M', u'g'), + (0x24BD, 'M', u'h'), + (0x24BE, 'M', u'i'), + (0x24BF, 'M', u'j'), + (0x24C0, 'M', u'k'), + (0x24C1, 'M', u'l'), + (0x24C2, 'M', u'm'), + (0x24C3, 'M', u'n'), + (0x24C4, 'M', u'o'), + (0x24C5, 'M', u'p'), + (0x24C6, 'M', u'q'), + (0x24C7, 'M', u'r'), + (0x24C8, 'M', u's'), + (0x24C9, 'M', u't'), + (0x24CA, 'M', u'u'), + (0x24CB, 'M', u'v'), + (0x24CC, 'M', u'w'), + (0x24CD, 'M', u'x'), + (0x24CE, 'M', u'y'), + (0x24CF, 'M', u'z'), + (0x24D0, 'M', u'a'), + (0x24D1, 'M', u'b'), + ] + +def _seg_24(): + return [ + (0x24D2, 'M', u'c'), + (0x24D3, 'M', u'd'), + (0x24D4, 'M', u'e'), + (0x24D5, 'M', u'f'), + (0x24D6, 'M', u'g'), + (0x24D7, 'M', u'h'), + (0x24D8, 'M', u'i'), + (0x24D9, 'M', u'j'), + (0x24DA, 'M', u'k'), + (0x24DB, 'M', u'l'), + (0x24DC, 'M', u'm'), + (0x24DD, 'M', u'n'), + (0x24DE, 'M', u'o'), + (0x24DF, 'M', u'p'), + (0x24E0, 'M', u'q'), + (0x24E1, 'M', u'r'), + (0x24E2, 'M', u's'), + (0x24E3, 'M', u't'), + (0x24E4, 'M', u'u'), + (0x24E5, 'M', u'v'), + (0x24E6, 'M', u'w'), + (0x24E7, 'M', u'x'), + (0x24E8, 'M', u'y'), + (0x24E9, 'M', u'z'), + (0x24EA, 'M', u'0'), + (0x24EB, 'V'), + (0x2A0C, 'M', u'∫∫∫∫'), + (0x2A0D, 'V'), + (0x2A74, '3', u'::='), + (0x2A75, '3', u'=='), + (0x2A76, '3', u'==='), + (0x2A77, 'V'), + (0x2ADC, 'M', u'⫝̸'), + (0x2ADD, 'V'), + (0x2B74, 'X'), + (0x2B76, 'V'), + (0x2B96, 'X'), + (0x2B98, 'V'), + (0x2BBA, 'X'), + (0x2BBD, 'V'), + (0x2BC9, 'X'), + (0x2BCA, 'V'), + (0x2BD3, 'X'), + (0x2BEC, 'V'), + (0x2BF0, 'X'), + (0x2C00, 'M', u'ⰰ'), + (0x2C01, 'M', u'ⰱ'), + (0x2C02, 'M', u'ⰲ'), + (0x2C03, 'M', u'ⰳ'), + (0x2C04, 'M', u'ⰴ'), + (0x2C05, 'M', u'ⰵ'), + (0x2C06, 'M', u'ⰶ'), + (0x2C07, 'M', u'ⰷ'), + (0x2C08, 'M', u'ⰸ'), + (0x2C09, 'M', u'ⰹ'), + (0x2C0A, 'M', u'ⰺ'), + (0x2C0B, 'M', u'ⰻ'), + (0x2C0C, 'M', u'ⰼ'), + (0x2C0D, 'M', u'ⰽ'), + (0x2C0E, 'M', u'ⰾ'), + (0x2C0F, 'M', u'ⰿ'), + (0x2C10, 'M', u'ⱀ'), + (0x2C11, 'M', u'ⱁ'), + (0x2C12, 'M', u'ⱂ'), + (0x2C13, 'M', u'ⱃ'), + (0x2C14, 'M', u'ⱄ'), + (0x2C15, 'M', u'ⱅ'), + (0x2C16, 'M', u'ⱆ'), + (0x2C17, 'M', u'ⱇ'), + (0x2C18, 'M', u'ⱈ'), + (0x2C19, 'M', u'ⱉ'), + (0x2C1A, 'M', u'ⱊ'), + (0x2C1B, 'M', u'ⱋ'), + (0x2C1C, 'M', u'ⱌ'), + (0x2C1D, 'M', u'ⱍ'), + (0x2C1E, 'M', u'ⱎ'), + (0x2C1F, 'M', u'ⱏ'), + (0x2C20, 'M', u'ⱐ'), + (0x2C21, 'M', u'ⱑ'), + (0x2C22, 'M', u'ⱒ'), + (0x2C23, 'M', u'ⱓ'), + (0x2C24, 'M', u'ⱔ'), + (0x2C25, 'M', u'ⱕ'), + (0x2C26, 'M', u'ⱖ'), + (0x2C27, 'M', u'ⱗ'), + (0x2C28, 'M', u'ⱘ'), + (0x2C29, 'M', u'ⱙ'), + (0x2C2A, 'M', u'ⱚ'), + (0x2C2B, 'M', u'ⱛ'), + (0x2C2C, 'M', u'ⱜ'), + (0x2C2D, 'M', u'ⱝ'), + (0x2C2E, 'M', u'ⱞ'), + (0x2C2F, 'X'), + (0x2C30, 'V'), + (0x2C5F, 'X'), + (0x2C60, 'M', u'ⱡ'), + (0x2C61, 'V'), + (0x2C62, 'M', u'ɫ'), + (0x2C63, 'M', u'ᵽ'), + (0x2C64, 'M', u'ɽ'), + ] + +def _seg_25(): + return [ + (0x2C65, 'V'), + (0x2C67, 'M', u'ⱨ'), + (0x2C68, 'V'), + (0x2C69, 'M', u'ⱪ'), + (0x2C6A, 'V'), + (0x2C6B, 'M', u'ⱬ'), + (0x2C6C, 'V'), + (0x2C6D, 'M', u'ɑ'), + (0x2C6E, 'M', u'ɱ'), + (0x2C6F, 'M', u'ɐ'), + (0x2C70, 'M', u'ɒ'), + (0x2C71, 'V'), + (0x2C72, 'M', u'ⱳ'), + (0x2C73, 'V'), + (0x2C75, 'M', u'ⱶ'), + (0x2C76, 'V'), + (0x2C7C, 'M', u'j'), + (0x2C7D, 'M', u'v'), + (0x2C7E, 'M', u'ȿ'), + (0x2C7F, 'M', u'ɀ'), + (0x2C80, 'M', u'ⲁ'), + (0x2C81, 'V'), + (0x2C82, 'M', u'ⲃ'), + (0x2C83, 'V'), + (0x2C84, 'M', u'ⲅ'), + (0x2C85, 'V'), + (0x2C86, 'M', u'ⲇ'), + (0x2C87, 'V'), + (0x2C88, 'M', u'ⲉ'), + (0x2C89, 'V'), + (0x2C8A, 'M', u'ⲋ'), + (0x2C8B, 'V'), + (0x2C8C, 'M', u'ⲍ'), + (0x2C8D, 'V'), + (0x2C8E, 'M', u'ⲏ'), + (0x2C8F, 'V'), + (0x2C90, 'M', u'ⲑ'), + (0x2C91, 'V'), + (0x2C92, 'M', u'ⲓ'), + (0x2C93, 'V'), + (0x2C94, 'M', u'ⲕ'), + (0x2C95, 'V'), + (0x2C96, 'M', u'ⲗ'), + (0x2C97, 'V'), + (0x2C98, 'M', u'ⲙ'), + (0x2C99, 'V'), + (0x2C9A, 'M', u'ⲛ'), + (0x2C9B, 'V'), + (0x2C9C, 'M', u'ⲝ'), + (0x2C9D, 'V'), + (0x2C9E, 'M', u'ⲟ'), + (0x2C9F, 'V'), + (0x2CA0, 'M', u'ⲡ'), + (0x2CA1, 'V'), + (0x2CA2, 'M', u'ⲣ'), + (0x2CA3, 'V'), + (0x2CA4, 'M', u'ⲥ'), + (0x2CA5, 'V'), + (0x2CA6, 'M', u'ⲧ'), + (0x2CA7, 'V'), + (0x2CA8, 'M', u'ⲩ'), + (0x2CA9, 'V'), + (0x2CAA, 'M', u'ⲫ'), + (0x2CAB, 'V'), + (0x2CAC, 'M', u'ⲭ'), + (0x2CAD, 'V'), + (0x2CAE, 'M', u'ⲯ'), + (0x2CAF, 'V'), + (0x2CB0, 'M', u'ⲱ'), + (0x2CB1, 'V'), + (0x2CB2, 'M', u'ⲳ'), + (0x2CB3, 'V'), + (0x2CB4, 'M', u'ⲵ'), + (0x2CB5, 'V'), + (0x2CB6, 'M', u'ⲷ'), + (0x2CB7, 'V'), + (0x2CB8, 'M', u'ⲹ'), + (0x2CB9, 'V'), + (0x2CBA, 'M', u'ⲻ'), + (0x2CBB, 'V'), + (0x2CBC, 'M', u'ⲽ'), + (0x2CBD, 'V'), + (0x2CBE, 'M', u'ⲿ'), + (0x2CBF, 'V'), + (0x2CC0, 'M', u'ⳁ'), + (0x2CC1, 'V'), + (0x2CC2, 'M', u'ⳃ'), + (0x2CC3, 'V'), + (0x2CC4, 'M', u'ⳅ'), + (0x2CC5, 'V'), + (0x2CC6, 'M', u'ⳇ'), + (0x2CC7, 'V'), + (0x2CC8, 'M', u'ⳉ'), + (0x2CC9, 'V'), + (0x2CCA, 'M', u'ⳋ'), + (0x2CCB, 'V'), + (0x2CCC, 'M', u'ⳍ'), + (0x2CCD, 'V'), + (0x2CCE, 'M', u'ⳏ'), + (0x2CCF, 'V'), + ] + +def _seg_26(): + return [ + (0x2CD0, 'M', u'ⳑ'), + (0x2CD1, 'V'), + (0x2CD2, 'M', u'ⳓ'), + (0x2CD3, 'V'), + (0x2CD4, 'M', u'ⳕ'), + (0x2CD5, 'V'), + (0x2CD6, 'M', u'ⳗ'), + (0x2CD7, 'V'), + (0x2CD8, 'M', u'ⳙ'), + (0x2CD9, 'V'), + (0x2CDA, 'M', u'ⳛ'), + (0x2CDB, 'V'), + (0x2CDC, 'M', u'ⳝ'), + (0x2CDD, 'V'), + (0x2CDE, 'M', u'ⳟ'), + (0x2CDF, 'V'), + (0x2CE0, 'M', u'ⳡ'), + (0x2CE1, 'V'), + (0x2CE2, 'M', u'ⳣ'), + (0x2CE3, 'V'), + (0x2CEB, 'M', u'ⳬ'), + (0x2CEC, 'V'), + (0x2CED, 'M', u'ⳮ'), + (0x2CEE, 'V'), + (0x2CF2, 'M', u'ⳳ'), + (0x2CF3, 'V'), + (0x2CF4, 'X'), + (0x2CF9, 'V'), + (0x2D26, 'X'), + (0x2D27, 'V'), + (0x2D28, 'X'), + (0x2D2D, 'V'), + (0x2D2E, 'X'), + (0x2D30, 'V'), + (0x2D68, 'X'), + (0x2D6F, 'M', u'ⵡ'), + (0x2D70, 'V'), + (0x2D71, 'X'), + (0x2D7F, 'V'), + (0x2D97, 'X'), + (0x2DA0, 'V'), + (0x2DA7, 'X'), + (0x2DA8, 'V'), + (0x2DAF, 'X'), + (0x2DB0, 'V'), + (0x2DB7, 'X'), + (0x2DB8, 'V'), + (0x2DBF, 'X'), + (0x2DC0, 'V'), + (0x2DC7, 'X'), + (0x2DC8, 'V'), + (0x2DCF, 'X'), + (0x2DD0, 'V'), + (0x2DD7, 'X'), + (0x2DD8, 'V'), + (0x2DDF, 'X'), + (0x2DE0, 'V'), + (0x2E4A, 'X'), + (0x2E80, 'V'), + (0x2E9A, 'X'), + (0x2E9B, 'V'), + (0x2E9F, 'M', u'母'), + (0x2EA0, 'V'), + (0x2EF3, 'M', u'龟'), + (0x2EF4, 'X'), + (0x2F00, 'M', u'一'), + (0x2F01, 'M', u'丨'), + (0x2F02, 'M', u'丶'), + (0x2F03, 'M', u'丿'), + (0x2F04, 'M', u'乙'), + (0x2F05, 'M', u'亅'), + (0x2F06, 'M', u'二'), + (0x2F07, 'M', u'亠'), + (0x2F08, 'M', u'人'), + (0x2F09, 'M', u'儿'), + (0x2F0A, 'M', u'入'), + (0x2F0B, 'M', u'八'), + (0x2F0C, 'M', u'冂'), + (0x2F0D, 'M', u'冖'), + (0x2F0E, 'M', u'冫'), + (0x2F0F, 'M', u'几'), + (0x2F10, 'M', u'凵'), + (0x2F11, 'M', u'刀'), + (0x2F12, 'M', u'力'), + (0x2F13, 'M', u'勹'), + (0x2F14, 'M', u'匕'), + (0x2F15, 'M', u'匚'), + (0x2F16, 'M', u'匸'), + (0x2F17, 'M', u'十'), + (0x2F18, 'M', u'卜'), + (0x2F19, 'M', u'卩'), + (0x2F1A, 'M', u'厂'), + (0x2F1B, 'M', u'厶'), + (0x2F1C, 'M', u'又'), + (0x2F1D, 'M', u'口'), + (0x2F1E, 'M', u'囗'), + (0x2F1F, 'M', u'土'), + (0x2F20, 'M', u'士'), + (0x2F21, 'M', u'夂'), + (0x2F22, 'M', u'夊'), + ] + +def _seg_27(): + return [ + (0x2F23, 'M', u'夕'), + (0x2F24, 'M', u'大'), + (0x2F25, 'M', u'女'), + (0x2F26, 'M', u'子'), + (0x2F27, 'M', u'宀'), + (0x2F28, 'M', u'寸'), + (0x2F29, 'M', u'小'), + (0x2F2A, 'M', u'尢'), + (0x2F2B, 'M', u'尸'), + (0x2F2C, 'M', u'屮'), + (0x2F2D, 'M', u'山'), + (0x2F2E, 'M', u'巛'), + (0x2F2F, 'M', u'工'), + (0x2F30, 'M', u'己'), + (0x2F31, 'M', u'巾'), + (0x2F32, 'M', u'干'), + (0x2F33, 'M', u'幺'), + (0x2F34, 'M', u'广'), + (0x2F35, 'M', u'廴'), + (0x2F36, 'M', u'廾'), + (0x2F37, 'M', u'弋'), + (0x2F38, 'M', u'弓'), + (0x2F39, 'M', u'彐'), + (0x2F3A, 'M', u'彡'), + (0x2F3B, 'M', u'彳'), + (0x2F3C, 'M', u'心'), + (0x2F3D, 'M', u'戈'), + (0x2F3E, 'M', u'戶'), + (0x2F3F, 'M', u'手'), + (0x2F40, 'M', u'支'), + (0x2F41, 'M', u'攴'), + (0x2F42, 'M', u'文'), + (0x2F43, 'M', u'斗'), + (0x2F44, 'M', u'斤'), + (0x2F45, 'M', u'方'), + (0x2F46, 'M', u'无'), + (0x2F47, 'M', u'日'), + (0x2F48, 'M', u'曰'), + (0x2F49, 'M', u'月'), + (0x2F4A, 'M', u'木'), + (0x2F4B, 'M', u'欠'), + (0x2F4C, 'M', u'止'), + (0x2F4D, 'M', u'歹'), + (0x2F4E, 'M', u'殳'), + (0x2F4F, 'M', u'毋'), + (0x2F50, 'M', u'比'), + (0x2F51, 'M', u'毛'), + (0x2F52, 'M', u'氏'), + (0x2F53, 'M', u'气'), + (0x2F54, 'M', u'水'), + (0x2F55, 'M', u'火'), + (0x2F56, 'M', u'爪'), + (0x2F57, 'M', u'父'), + (0x2F58, 'M', u'爻'), + (0x2F59, 'M', u'爿'), + (0x2F5A, 'M', u'片'), + (0x2F5B, 'M', u'牙'), + (0x2F5C, 'M', u'牛'), + (0x2F5D, 'M', u'犬'), + (0x2F5E, 'M', u'玄'), + (0x2F5F, 'M', u'玉'), + (0x2F60, 'M', u'瓜'), + (0x2F61, 'M', u'瓦'), + (0x2F62, 'M', u'甘'), + (0x2F63, 'M', u'生'), + (0x2F64, 'M', u'用'), + (0x2F65, 'M', u'田'), + (0x2F66, 'M', u'疋'), + (0x2F67, 'M', u'疒'), + (0x2F68, 'M', u'癶'), + (0x2F69, 'M', u'白'), + (0x2F6A, 'M', u'皮'), + (0x2F6B, 'M', u'皿'), + (0x2F6C, 'M', u'目'), + (0x2F6D, 'M', u'矛'), + (0x2F6E, 'M', u'矢'), + (0x2F6F, 'M', u'石'), + (0x2F70, 'M', u'示'), + (0x2F71, 'M', u'禸'), + (0x2F72, 'M', u'禾'), + (0x2F73, 'M', u'穴'), + (0x2F74, 'M', u'立'), + (0x2F75, 'M', u'竹'), + (0x2F76, 'M', u'米'), + (0x2F77, 'M', u'糸'), + (0x2F78, 'M', u'缶'), + (0x2F79, 'M', u'网'), + (0x2F7A, 'M', u'羊'), + (0x2F7B, 'M', u'羽'), + (0x2F7C, 'M', u'老'), + (0x2F7D, 'M', u'而'), + (0x2F7E, 'M', u'耒'), + (0x2F7F, 'M', u'耳'), + (0x2F80, 'M', u'聿'), + (0x2F81, 'M', u'肉'), + (0x2F82, 'M', u'臣'), + (0x2F83, 'M', u'自'), + (0x2F84, 'M', u'至'), + (0x2F85, 'M', u'臼'), + (0x2F86, 'M', u'舌'), + ] + +def _seg_28(): + return [ + (0x2F87, 'M', u'舛'), + (0x2F88, 'M', u'舟'), + (0x2F89, 'M', u'艮'), + (0x2F8A, 'M', u'色'), + (0x2F8B, 'M', u'艸'), + (0x2F8C, 'M', u'虍'), + (0x2F8D, 'M', u'虫'), + (0x2F8E, 'M', u'血'), + (0x2F8F, 'M', u'行'), + (0x2F90, 'M', u'衣'), + (0x2F91, 'M', u'襾'), + (0x2F92, 'M', u'見'), + (0x2F93, 'M', u'角'), + (0x2F94, 'M', u'言'), + (0x2F95, 'M', u'谷'), + (0x2F96, 'M', u'豆'), + (0x2F97, 'M', u'豕'), + (0x2F98, 'M', u'豸'), + (0x2F99, 'M', u'貝'), + (0x2F9A, 'M', u'赤'), + (0x2F9B, 'M', u'走'), + (0x2F9C, 'M', u'足'), + (0x2F9D, 'M', u'身'), + (0x2F9E, 'M', u'車'), + (0x2F9F, 'M', u'辛'), + (0x2FA0, 'M', u'辰'), + (0x2FA1, 'M', u'辵'), + (0x2FA2, 'M', u'邑'), + (0x2FA3, 'M', u'酉'), + (0x2FA4, 'M', u'釆'), + (0x2FA5, 'M', u'里'), + (0x2FA6, 'M', u'金'), + (0x2FA7, 'M', u'長'), + (0x2FA8, 'M', u'門'), + (0x2FA9, 'M', u'阜'), + (0x2FAA, 'M', u'隶'), + (0x2FAB, 'M', u'隹'), + (0x2FAC, 'M', u'雨'), + (0x2FAD, 'M', u'靑'), + (0x2FAE, 'M', u'非'), + (0x2FAF, 'M', u'面'), + (0x2FB0, 'M', u'革'), + (0x2FB1, 'M', u'韋'), + (0x2FB2, 'M', u'韭'), + (0x2FB3, 'M', u'音'), + (0x2FB4, 'M', u'頁'), + (0x2FB5, 'M', u'風'), + (0x2FB6, 'M', u'飛'), + (0x2FB7, 'M', u'食'), + (0x2FB8, 'M', u'首'), + (0x2FB9, 'M', u'香'), + (0x2FBA, 'M', u'馬'), + (0x2FBB, 'M', u'骨'), + (0x2FBC, 'M', u'高'), + (0x2FBD, 'M', u'髟'), + (0x2FBE, 'M', u'鬥'), + (0x2FBF, 'M', u'鬯'), + (0x2FC0, 'M', u'鬲'), + (0x2FC1, 'M', u'鬼'), + (0x2FC2, 'M', u'魚'), + (0x2FC3, 'M', u'鳥'), + (0x2FC4, 'M', u'鹵'), + (0x2FC5, 'M', u'鹿'), + (0x2FC6, 'M', u'麥'), + (0x2FC7, 'M', u'麻'), + (0x2FC8, 'M', u'黃'), + (0x2FC9, 'M', u'黍'), + (0x2FCA, 'M', u'黑'), + (0x2FCB, 'M', u'黹'), + (0x2FCC, 'M', u'黽'), + (0x2FCD, 'M', u'鼎'), + (0x2FCE, 'M', u'鼓'), + (0x2FCF, 'M', u'鼠'), + (0x2FD0, 'M', u'鼻'), + (0x2FD1, 'M', u'齊'), + (0x2FD2, 'M', u'齒'), + (0x2FD3, 'M', u'龍'), + (0x2FD4, 'M', u'龜'), + (0x2FD5, 'M', u'龠'), + (0x2FD6, 'X'), + (0x3000, '3', u' '), + (0x3001, 'V'), + (0x3002, 'M', u'.'), + (0x3003, 'V'), + (0x3036, 'M', u'〒'), + (0x3037, 'V'), + (0x3038, 'M', u'十'), + (0x3039, 'M', u'卄'), + (0x303A, 'M', u'卅'), + (0x303B, 'V'), + (0x3040, 'X'), + (0x3041, 'V'), + (0x3097, 'X'), + (0x3099, 'V'), + (0x309B, '3', u' ゙'), + (0x309C, '3', u' ゚'), + (0x309D, 'V'), + (0x309F, 'M', u'より'), + (0x30A0, 'V'), + (0x30FF, 'M', u'コト'), + ] + +def _seg_29(): + return [ + (0x3100, 'X'), + (0x3105, 'V'), + (0x312F, 'X'), + (0x3131, 'M', u'ᄀ'), + (0x3132, 'M', u'ᄁ'), + (0x3133, 'M', u'ᆪ'), + (0x3134, 'M', u'ᄂ'), + (0x3135, 'M', u'ᆬ'), + (0x3136, 'M', u'ᆭ'), + (0x3137, 'M', u'ᄃ'), + (0x3138, 'M', u'ᄄ'), + (0x3139, 'M', u'ᄅ'), + (0x313A, 'M', u'ᆰ'), + (0x313B, 'M', u'ᆱ'), + (0x313C, 'M', u'ᆲ'), + (0x313D, 'M', u'ᆳ'), + (0x313E, 'M', u'ᆴ'), + (0x313F, 'M', u'ᆵ'), + (0x3140, 'M', u'ᄚ'), + (0x3141, 'M', u'ᄆ'), + (0x3142, 'M', u'ᄇ'), + (0x3143, 'M', u'ᄈ'), + (0x3144, 'M', u'ᄡ'), + (0x3145, 'M', u'ᄉ'), + (0x3146, 'M', u'ᄊ'), + (0x3147, 'M', u'ᄋ'), + (0x3148, 'M', u'ᄌ'), + (0x3149, 'M', u'ᄍ'), + (0x314A, 'M', u'ᄎ'), + (0x314B, 'M', u'ᄏ'), + (0x314C, 'M', u'ᄐ'), + (0x314D, 'M', u'ᄑ'), + (0x314E, 'M', u'ᄒ'), + (0x314F, 'M', u'ᅡ'), + (0x3150, 'M', u'ᅢ'), + (0x3151, 'M', u'ᅣ'), + (0x3152, 'M', u'ᅤ'), + (0x3153, 'M', u'ᅥ'), + (0x3154, 'M', u'ᅦ'), + (0x3155, 'M', u'ᅧ'), + (0x3156, 'M', u'ᅨ'), + (0x3157, 'M', u'ᅩ'), + (0x3158, 'M', u'ᅪ'), + (0x3159, 'M', u'ᅫ'), + (0x315A, 'M', u'ᅬ'), + (0x315B, 'M', u'ᅭ'), + (0x315C, 'M', u'ᅮ'), + (0x315D, 'M', u'ᅯ'), + (0x315E, 'M', u'ᅰ'), + (0x315F, 'M', u'ᅱ'), + (0x3160, 'M', u'ᅲ'), + (0x3161, 'M', u'ᅳ'), + (0x3162, 'M', u'ᅴ'), + (0x3163, 'M', u'ᅵ'), + (0x3164, 'X'), + (0x3165, 'M', u'ᄔ'), + (0x3166, 'M', u'ᄕ'), + (0x3167, 'M', u'ᇇ'), + (0x3168, 'M', u'ᇈ'), + (0x3169, 'M', u'ᇌ'), + (0x316A, 'M', u'ᇎ'), + (0x316B, 'M', u'ᇓ'), + (0x316C, 'M', u'ᇗ'), + (0x316D, 'M', u'ᇙ'), + (0x316E, 'M', u'ᄜ'), + (0x316F, 'M', u'ᇝ'), + (0x3170, 'M', u'ᇟ'), + (0x3171, 'M', u'ᄝ'), + (0x3172, 'M', u'ᄞ'), + (0x3173, 'M', u'ᄠ'), + (0x3174, 'M', u'ᄢ'), + (0x3175, 'M', u'ᄣ'), + (0x3176, 'M', u'ᄧ'), + (0x3177, 'M', u'ᄩ'), + (0x3178, 'M', u'ᄫ'), + (0x3179, 'M', u'ᄬ'), + (0x317A, 'M', u'ᄭ'), + (0x317B, 'M', u'ᄮ'), + (0x317C, 'M', u'ᄯ'), + (0x317D, 'M', u'ᄲ'), + (0x317E, 'M', u'ᄶ'), + (0x317F, 'M', u'ᅀ'), + (0x3180, 'M', u'ᅇ'), + (0x3181, 'M', u'ᅌ'), + (0x3182, 'M', u'ᇱ'), + (0x3183, 'M', u'ᇲ'), + (0x3184, 'M', u'ᅗ'), + (0x3185, 'M', u'ᅘ'), + (0x3186, 'M', u'ᅙ'), + (0x3187, 'M', u'ᆄ'), + (0x3188, 'M', u'ᆅ'), + (0x3189, 'M', u'ᆈ'), + (0x318A, 'M', u'ᆑ'), + (0x318B, 'M', u'ᆒ'), + (0x318C, 'M', u'ᆔ'), + (0x318D, 'M', u'ᆞ'), + (0x318E, 'M', u'ᆡ'), + (0x318F, 'X'), + (0x3190, 'V'), + (0x3192, 'M', u'一'), + ] + +def _seg_30(): + return [ + (0x3193, 'M', u'二'), + (0x3194, 'M', u'三'), + (0x3195, 'M', u'四'), + (0x3196, 'M', u'上'), + (0x3197, 'M', u'中'), + (0x3198, 'M', u'下'), + (0x3199, 'M', u'甲'), + (0x319A, 'M', u'乙'), + (0x319B, 'M', u'丙'), + (0x319C, 'M', u'丁'), + (0x319D, 'M', u'天'), + (0x319E, 'M', u'地'), + (0x319F, 'M', u'人'), + (0x31A0, 'V'), + (0x31BB, 'X'), + (0x31C0, 'V'), + (0x31E4, 'X'), + (0x31F0, 'V'), + (0x3200, '3', u'(ᄀ)'), + (0x3201, '3', u'(ᄂ)'), + (0x3202, '3', u'(ᄃ)'), + (0x3203, '3', u'(ᄅ)'), + (0x3204, '3', u'(ᄆ)'), + (0x3205, '3', u'(ᄇ)'), + (0x3206, '3', u'(ᄉ)'), + (0x3207, '3', u'(ᄋ)'), + (0x3208, '3', u'(ᄌ)'), + (0x3209, '3', u'(ᄎ)'), + (0x320A, '3', u'(ᄏ)'), + (0x320B, '3', u'(ᄐ)'), + (0x320C, '3', u'(ᄑ)'), + (0x320D, '3', u'(ᄒ)'), + (0x320E, '3', u'(가)'), + (0x320F, '3', u'(나)'), + (0x3210, '3', u'(다)'), + (0x3211, '3', u'(라)'), + (0x3212, '3', u'(마)'), + (0x3213, '3', u'(바)'), + (0x3214, '3', u'(사)'), + (0x3215, '3', u'(아)'), + (0x3216, '3', u'(자)'), + (0x3217, '3', u'(차)'), + (0x3218, '3', u'(카)'), + (0x3219, '3', u'(타)'), + (0x321A, '3', u'(파)'), + (0x321B, '3', u'(하)'), + (0x321C, '3', u'(주)'), + (0x321D, '3', u'(오전)'), + (0x321E, '3', u'(오후)'), + (0x321F, 'X'), + (0x3220, '3', u'(一)'), + (0x3221, '3', u'(二)'), + (0x3222, '3', u'(三)'), + (0x3223, '3', u'(四)'), + (0x3224, '3', u'(五)'), + (0x3225, '3', u'(六)'), + (0x3226, '3', u'(七)'), + (0x3227, '3', u'(八)'), + (0x3228, '3', u'(九)'), + (0x3229, '3', u'(十)'), + (0x322A, '3', u'(月)'), + (0x322B, '3', u'(火)'), + (0x322C, '3', u'(水)'), + (0x322D, '3', u'(木)'), + (0x322E, '3', u'(金)'), + (0x322F, '3', u'(土)'), + (0x3230, '3', u'(日)'), + (0x3231, '3', u'(株)'), + (0x3232, '3', u'(有)'), + (0x3233, '3', u'(社)'), + (0x3234, '3', u'(名)'), + (0x3235, '3', u'(特)'), + (0x3236, '3', u'(財)'), + (0x3237, '3', u'(祝)'), + (0x3238, '3', u'(労)'), + (0x3239, '3', u'(代)'), + (0x323A, '3', u'(呼)'), + (0x323B, '3', u'(学)'), + (0x323C, '3', u'(監)'), + (0x323D, '3', u'(企)'), + (0x323E, '3', u'(資)'), + (0x323F, '3', u'(協)'), + (0x3240, '3', u'(祭)'), + (0x3241, '3', u'(休)'), + (0x3242, '3', u'(自)'), + (0x3243, '3', u'(至)'), + (0x3244, 'M', u'問'), + (0x3245, 'M', u'幼'), + (0x3246, 'M', u'文'), + (0x3247, 'M', u'箏'), + (0x3248, 'V'), + (0x3250, 'M', u'pte'), + (0x3251, 'M', u'21'), + (0x3252, 'M', u'22'), + (0x3253, 'M', u'23'), + (0x3254, 'M', u'24'), + (0x3255, 'M', u'25'), + (0x3256, 'M', u'26'), + (0x3257, 'M', u'27'), + (0x3258, 'M', u'28'), + ] + +def _seg_31(): + return [ + (0x3259, 'M', u'29'), + (0x325A, 'M', u'30'), + (0x325B, 'M', u'31'), + (0x325C, 'M', u'32'), + (0x325D, 'M', u'33'), + (0x325E, 'M', u'34'), + (0x325F, 'M', u'35'), + (0x3260, 'M', u'ᄀ'), + (0x3261, 'M', u'ᄂ'), + (0x3262, 'M', u'ᄃ'), + (0x3263, 'M', u'ᄅ'), + (0x3264, 'M', u'ᄆ'), + (0x3265, 'M', u'ᄇ'), + (0x3266, 'M', u'ᄉ'), + (0x3267, 'M', u'ᄋ'), + (0x3268, 'M', u'ᄌ'), + (0x3269, 'M', u'ᄎ'), + (0x326A, 'M', u'ᄏ'), + (0x326B, 'M', u'ᄐ'), + (0x326C, 'M', u'ᄑ'), + (0x326D, 'M', u'ᄒ'), + (0x326E, 'M', u'가'), + (0x326F, 'M', u'나'), + (0x3270, 'M', u'다'), + (0x3271, 'M', u'라'), + (0x3272, 'M', u'마'), + (0x3273, 'M', u'바'), + (0x3274, 'M', u'사'), + (0x3275, 'M', u'아'), + (0x3276, 'M', u'자'), + (0x3277, 'M', u'차'), + (0x3278, 'M', u'카'), + (0x3279, 'M', u'타'), + (0x327A, 'M', u'파'), + (0x327B, 'M', u'하'), + (0x327C, 'M', u'참고'), + (0x327D, 'M', u'주의'), + (0x327E, 'M', u'우'), + (0x327F, 'V'), + (0x3280, 'M', u'一'), + (0x3281, 'M', u'二'), + (0x3282, 'M', u'三'), + (0x3283, 'M', u'四'), + (0x3284, 'M', u'五'), + (0x3285, 'M', u'六'), + (0x3286, 'M', u'七'), + (0x3287, 'M', u'八'), + (0x3288, 'M', u'九'), + (0x3289, 'M', u'十'), + (0x328A, 'M', u'月'), + (0x328B, 'M', u'火'), + (0x328C, 'M', u'水'), + (0x328D, 'M', u'木'), + (0x328E, 'M', u'金'), + (0x328F, 'M', u'土'), + (0x3290, 'M', u'日'), + (0x3291, 'M', u'株'), + (0x3292, 'M', u'有'), + (0x3293, 'M', u'社'), + (0x3294, 'M', u'名'), + (0x3295, 'M', u'特'), + (0x3296, 'M', u'財'), + (0x3297, 'M', u'祝'), + (0x3298, 'M', u'労'), + (0x3299, 'M', u'秘'), + (0x329A, 'M', u'男'), + (0x329B, 'M', u'女'), + (0x329C, 'M', u'適'), + (0x329D, 'M', u'優'), + (0x329E, 'M', u'印'), + (0x329F, 'M', u'注'), + (0x32A0, 'M', u'項'), + (0x32A1, 'M', u'休'), + (0x32A2, 'M', u'写'), + (0x32A3, 'M', u'正'), + (0x32A4, 'M', u'上'), + (0x32A5, 'M', u'中'), + (0x32A6, 'M', u'下'), + (0x32A7, 'M', u'左'), + (0x32A8, 'M', u'右'), + (0x32A9, 'M', u'医'), + (0x32AA, 'M', u'宗'), + (0x32AB, 'M', u'学'), + (0x32AC, 'M', u'監'), + (0x32AD, 'M', u'企'), + (0x32AE, 'M', u'資'), + (0x32AF, 'M', u'協'), + (0x32B0, 'M', u'夜'), + (0x32B1, 'M', u'36'), + (0x32B2, 'M', u'37'), + (0x32B3, 'M', u'38'), + (0x32B4, 'M', u'39'), + (0x32B5, 'M', u'40'), + (0x32B6, 'M', u'41'), + (0x32B7, 'M', u'42'), + (0x32B8, 'M', u'43'), + (0x32B9, 'M', u'44'), + (0x32BA, 'M', u'45'), + (0x32BB, 'M', u'46'), + (0x32BC, 'M', u'47'), + ] + +def _seg_32(): + return [ + (0x32BD, 'M', u'48'), + (0x32BE, 'M', u'49'), + (0x32BF, 'M', u'50'), + (0x32C0, 'M', u'1月'), + (0x32C1, 'M', u'2月'), + (0x32C2, 'M', u'3月'), + (0x32C3, 'M', u'4月'), + (0x32C4, 'M', u'5月'), + (0x32C5, 'M', u'6月'), + (0x32C6, 'M', u'7月'), + (0x32C7, 'M', u'8月'), + (0x32C8, 'M', u'9月'), + (0x32C9, 'M', u'10月'), + (0x32CA, 'M', u'11月'), + (0x32CB, 'M', u'12月'), + (0x32CC, 'M', u'hg'), + (0x32CD, 'M', u'erg'), + (0x32CE, 'M', u'ev'), + (0x32CF, 'M', u'ltd'), + (0x32D0, 'M', u'ア'), + (0x32D1, 'M', u'イ'), + (0x32D2, 'M', u'ウ'), + (0x32D3, 'M', u'エ'), + (0x32D4, 'M', u'オ'), + (0x32D5, 'M', u'カ'), + (0x32D6, 'M', u'キ'), + (0x32D7, 'M', u'ク'), + (0x32D8, 'M', u'ケ'), + (0x32D9, 'M', u'コ'), + (0x32DA, 'M', u'サ'), + (0x32DB, 'M', u'シ'), + (0x32DC, 'M', u'ス'), + (0x32DD, 'M', u'セ'), + (0x32DE, 'M', u'ソ'), + (0x32DF, 'M', u'タ'), + (0x32E0, 'M', u'チ'), + (0x32E1, 'M', u'ツ'), + (0x32E2, 'M', u'テ'), + (0x32E3, 'M', u'ト'), + (0x32E4, 'M', u'ナ'), + (0x32E5, 'M', u'ニ'), + (0x32E6, 'M', u'ヌ'), + (0x32E7, 'M', u'ネ'), + (0x32E8, 'M', u'ノ'), + (0x32E9, 'M', u'ハ'), + (0x32EA, 'M', u'ヒ'), + (0x32EB, 'M', u'フ'), + (0x32EC, 'M', u'ヘ'), + (0x32ED, 'M', u'ホ'), + (0x32EE, 'M', u'マ'), + (0x32EF, 'M', u'ミ'), + (0x32F0, 'M', u'ム'), + (0x32F1, 'M', u'メ'), + (0x32F2, 'M', u'モ'), + (0x32F3, 'M', u'ヤ'), + (0x32F4, 'M', u'ユ'), + (0x32F5, 'M', u'ヨ'), + (0x32F6, 'M', u'ラ'), + (0x32F7, 'M', u'リ'), + (0x32F8, 'M', u'ル'), + (0x32F9, 'M', u'レ'), + (0x32FA, 'M', u'ロ'), + (0x32FB, 'M', u'ワ'), + (0x32FC, 'M', u'ヰ'), + (0x32FD, 'M', u'ヱ'), + (0x32FE, 'M', u'ヲ'), + (0x32FF, 'X'), + (0x3300, 'M', u'アパート'), + (0x3301, 'M', u'アルファ'), + (0x3302, 'M', u'アンペア'), + (0x3303, 'M', u'アール'), + (0x3304, 'M', u'イニング'), + (0x3305, 'M', u'インチ'), + (0x3306, 'M', u'ウォン'), + (0x3307, 'M', u'エスクード'), + (0x3308, 'M', u'エーカー'), + (0x3309, 'M', u'オンス'), + (0x330A, 'M', u'オーム'), + (0x330B, 'M', u'カイリ'), + (0x330C, 'M', u'カラット'), + (0x330D, 'M', u'カロリー'), + (0x330E, 'M', u'ガロン'), + (0x330F, 'M', u'ガンマ'), + (0x3310, 'M', u'ギガ'), + (0x3311, 'M', u'ギニー'), + (0x3312, 'M', u'キュリー'), + (0x3313, 'M', u'ギルダー'), + (0x3314, 'M', u'キロ'), + (0x3315, 'M', u'キログラム'), + (0x3316, 'M', u'キロメートル'), + (0x3317, 'M', u'キロワット'), + (0x3318, 'M', u'グラム'), + (0x3319, 'M', u'グラムトン'), + (0x331A, 'M', u'クルゼイロ'), + (0x331B, 'M', u'クローネ'), + (0x331C, 'M', u'ケース'), + (0x331D, 'M', u'コルナ'), + (0x331E, 'M', u'コーポ'), + (0x331F, 'M', u'サイクル'), + (0x3320, 'M', u'サンチーム'), + ] + +def _seg_33(): + return [ + (0x3321, 'M', u'シリング'), + (0x3322, 'M', u'センチ'), + (0x3323, 'M', u'セント'), + (0x3324, 'M', u'ダース'), + (0x3325, 'M', u'デシ'), + (0x3326, 'M', u'ドル'), + (0x3327, 'M', u'トン'), + (0x3328, 'M', u'ナノ'), + (0x3329, 'M', u'ノット'), + (0x332A, 'M', u'ハイツ'), + (0x332B, 'M', u'パーセント'), + (0x332C, 'M', u'パーツ'), + (0x332D, 'M', u'バーレル'), + (0x332E, 'M', u'ピアストル'), + (0x332F, 'M', u'ピクル'), + (0x3330, 'M', u'ピコ'), + (0x3331, 'M', u'ビル'), + (0x3332, 'M', u'ファラッド'), + (0x3333, 'M', u'フィート'), + (0x3334, 'M', u'ブッシェル'), + (0x3335, 'M', u'フラン'), + (0x3336, 'M', u'ヘクタール'), + (0x3337, 'M', u'ペソ'), + (0x3338, 'M', u'ペニヒ'), + (0x3339, 'M', u'ヘルツ'), + (0x333A, 'M', u'ペンス'), + (0x333B, 'M', u'ページ'), + (0x333C, 'M', u'ベータ'), + (0x333D, 'M', u'ポイント'), + (0x333E, 'M', u'ボルト'), + (0x333F, 'M', u'ホン'), + (0x3340, 'M', u'ポンド'), + (0x3341, 'M', u'ホール'), + (0x3342, 'M', u'ホーン'), + (0x3343, 'M', u'マイクロ'), + (0x3344, 'M', u'マイル'), + (0x3345, 'M', u'マッハ'), + (0x3346, 'M', u'マルク'), + (0x3347, 'M', u'マンション'), + (0x3348, 'M', u'ミクロン'), + (0x3349, 'M', u'ミリ'), + (0x334A, 'M', u'ミリバール'), + (0x334B, 'M', u'メガ'), + (0x334C, 'M', u'メガトン'), + (0x334D, 'M', u'メートル'), + (0x334E, 'M', u'ヤード'), + (0x334F, 'M', u'ヤール'), + (0x3350, 'M', u'ユアン'), + (0x3351, 'M', u'リットル'), + (0x3352, 'M', u'リラ'), + (0x3353, 'M', u'ルピー'), + (0x3354, 'M', u'ルーブル'), + (0x3355, 'M', u'レム'), + (0x3356, 'M', u'レントゲン'), + (0x3357, 'M', u'ワット'), + (0x3358, 'M', u'0点'), + (0x3359, 'M', u'1点'), + (0x335A, 'M', u'2点'), + (0x335B, 'M', u'3点'), + (0x335C, 'M', u'4点'), + (0x335D, 'M', u'5点'), + (0x335E, 'M', u'6点'), + (0x335F, 'M', u'7点'), + (0x3360, 'M', u'8点'), + (0x3361, 'M', u'9点'), + (0x3362, 'M', u'10点'), + (0x3363, 'M', u'11点'), + (0x3364, 'M', u'12点'), + (0x3365, 'M', u'13点'), + (0x3366, 'M', u'14点'), + (0x3367, 'M', u'15点'), + (0x3368, 'M', u'16点'), + (0x3369, 'M', u'17点'), + (0x336A, 'M', u'18点'), + (0x336B, 'M', u'19点'), + (0x336C, 'M', u'20点'), + (0x336D, 'M', u'21点'), + (0x336E, 'M', u'22点'), + (0x336F, 'M', u'23点'), + (0x3370, 'M', u'24点'), + (0x3371, 'M', u'hpa'), + (0x3372, 'M', u'da'), + (0x3373, 'M', u'au'), + (0x3374, 'M', u'bar'), + (0x3375, 'M', u'ov'), + (0x3376, 'M', u'pc'), + (0x3377, 'M', u'dm'), + (0x3378, 'M', u'dm2'), + (0x3379, 'M', u'dm3'), + (0x337A, 'M', u'iu'), + (0x337B, 'M', u'平成'), + (0x337C, 'M', u'昭和'), + (0x337D, 'M', u'大正'), + (0x337E, 'M', u'明治'), + (0x337F, 'M', u'株式会社'), + (0x3380, 'M', u'pa'), + (0x3381, 'M', u'na'), + (0x3382, 'M', u'μa'), + (0x3383, 'M', u'ma'), + (0x3384, 'M', u'ka'), + ] + +def _seg_34(): + return [ + (0x3385, 'M', u'kb'), + (0x3386, 'M', u'mb'), + (0x3387, 'M', u'gb'), + (0x3388, 'M', u'cal'), + (0x3389, 'M', u'kcal'), + (0x338A, 'M', u'pf'), + (0x338B, 'M', u'nf'), + (0x338C, 'M', u'μf'), + (0x338D, 'M', u'μg'), + (0x338E, 'M', u'mg'), + (0x338F, 'M', u'kg'), + (0x3390, 'M', u'hz'), + (0x3391, 'M', u'khz'), + (0x3392, 'M', u'mhz'), + (0x3393, 'M', u'ghz'), + (0x3394, 'M', u'thz'), + (0x3395, 'M', u'μl'), + (0x3396, 'M', u'ml'), + (0x3397, 'M', u'dl'), + (0x3398, 'M', u'kl'), + (0x3399, 'M', u'fm'), + (0x339A, 'M', u'nm'), + (0x339B, 'M', u'μm'), + (0x339C, 'M', u'mm'), + (0x339D, 'M', u'cm'), + (0x339E, 'M', u'km'), + (0x339F, 'M', u'mm2'), + (0x33A0, 'M', u'cm2'), + (0x33A1, 'M', u'm2'), + (0x33A2, 'M', u'km2'), + (0x33A3, 'M', u'mm3'), + (0x33A4, 'M', u'cm3'), + (0x33A5, 'M', u'm3'), + (0x33A6, 'M', u'km3'), + (0x33A7, 'M', u'm∕s'), + (0x33A8, 'M', u'm∕s2'), + (0x33A9, 'M', u'pa'), + (0x33AA, 'M', u'kpa'), + (0x33AB, 'M', u'mpa'), + (0x33AC, 'M', u'gpa'), + (0x33AD, 'M', u'rad'), + (0x33AE, 'M', u'rad∕s'), + (0x33AF, 'M', u'rad∕s2'), + (0x33B0, 'M', u'ps'), + (0x33B1, 'M', u'ns'), + (0x33B2, 'M', u'μs'), + (0x33B3, 'M', u'ms'), + (0x33B4, 'M', u'pv'), + (0x33B5, 'M', u'nv'), + (0x33B6, 'M', u'μv'), + (0x33B7, 'M', u'mv'), + (0x33B8, 'M', u'kv'), + (0x33B9, 'M', u'mv'), + (0x33BA, 'M', u'pw'), + (0x33BB, 'M', u'nw'), + (0x33BC, 'M', u'μw'), + (0x33BD, 'M', u'mw'), + (0x33BE, 'M', u'kw'), + (0x33BF, 'M', u'mw'), + (0x33C0, 'M', u'kω'), + (0x33C1, 'M', u'mω'), + (0x33C2, 'X'), + (0x33C3, 'M', u'bq'), + (0x33C4, 'M', u'cc'), + (0x33C5, 'M', u'cd'), + (0x33C6, 'M', u'c∕kg'), + (0x33C7, 'X'), + (0x33C8, 'M', u'db'), + (0x33C9, 'M', u'gy'), + (0x33CA, 'M', u'ha'), + (0x33CB, 'M', u'hp'), + (0x33CC, 'M', u'in'), + (0x33CD, 'M', u'kk'), + (0x33CE, 'M', u'km'), + (0x33CF, 'M', u'kt'), + (0x33D0, 'M', u'lm'), + (0x33D1, 'M', u'ln'), + (0x33D2, 'M', u'log'), + (0x33D3, 'M', u'lx'), + (0x33D4, 'M', u'mb'), + (0x33D5, 'M', u'mil'), + (0x33D6, 'M', u'mol'), + (0x33D7, 'M', u'ph'), + (0x33D8, 'X'), + (0x33D9, 'M', u'ppm'), + (0x33DA, 'M', u'pr'), + (0x33DB, 'M', u'sr'), + (0x33DC, 'M', u'sv'), + (0x33DD, 'M', u'wb'), + (0x33DE, 'M', u'v∕m'), + (0x33DF, 'M', u'a∕m'), + (0x33E0, 'M', u'1日'), + (0x33E1, 'M', u'2日'), + (0x33E2, 'M', u'3日'), + (0x33E3, 'M', u'4日'), + (0x33E4, 'M', u'5日'), + (0x33E5, 'M', u'6日'), + (0x33E6, 'M', u'7日'), + (0x33E7, 'M', u'8日'), + (0x33E8, 'M', u'9日'), + ] + +def _seg_35(): + return [ + (0x33E9, 'M', u'10日'), + (0x33EA, 'M', u'11日'), + (0x33EB, 'M', u'12日'), + (0x33EC, 'M', u'13日'), + (0x33ED, 'M', u'14日'), + (0x33EE, 'M', u'15日'), + (0x33EF, 'M', u'16日'), + (0x33F0, 'M', u'17日'), + (0x33F1, 'M', u'18日'), + (0x33F2, 'M', u'19日'), + (0x33F3, 'M', u'20日'), + (0x33F4, 'M', u'21日'), + (0x33F5, 'M', u'22日'), + (0x33F6, 'M', u'23日'), + (0x33F7, 'M', u'24日'), + (0x33F8, 'M', u'25日'), + (0x33F9, 'M', u'26日'), + (0x33FA, 'M', u'27日'), + (0x33FB, 'M', u'28日'), + (0x33FC, 'M', u'29日'), + (0x33FD, 'M', u'30日'), + (0x33FE, 'M', u'31日'), + (0x33FF, 'M', u'gal'), + (0x3400, 'V'), + (0x4DB6, 'X'), + (0x4DC0, 'V'), + (0x9FEB, 'X'), + (0xA000, 'V'), + (0xA48D, 'X'), + (0xA490, 'V'), + (0xA4C7, 'X'), + (0xA4D0, 'V'), + (0xA62C, 'X'), + (0xA640, 'M', u'ꙁ'), + (0xA641, 'V'), + (0xA642, 'M', u'ꙃ'), + (0xA643, 'V'), + (0xA644, 'M', u'ꙅ'), + (0xA645, 'V'), + (0xA646, 'M', u'ꙇ'), + (0xA647, 'V'), + (0xA648, 'M', u'ꙉ'), + (0xA649, 'V'), + (0xA64A, 'M', u'ꙋ'), + (0xA64B, 'V'), + (0xA64C, 'M', u'ꙍ'), + (0xA64D, 'V'), + (0xA64E, 'M', u'ꙏ'), + (0xA64F, 'V'), + (0xA650, 'M', u'ꙑ'), + (0xA651, 'V'), + (0xA652, 'M', u'ꙓ'), + (0xA653, 'V'), + (0xA654, 'M', u'ꙕ'), + (0xA655, 'V'), + (0xA656, 'M', u'ꙗ'), + (0xA657, 'V'), + (0xA658, 'M', u'ꙙ'), + (0xA659, 'V'), + (0xA65A, 'M', u'ꙛ'), + (0xA65B, 'V'), + (0xA65C, 'M', u'ꙝ'), + (0xA65D, 'V'), + (0xA65E, 'M', u'ꙟ'), + (0xA65F, 'V'), + (0xA660, 'M', u'ꙡ'), + (0xA661, 'V'), + (0xA662, 'M', u'ꙣ'), + (0xA663, 'V'), + (0xA664, 'M', u'ꙥ'), + (0xA665, 'V'), + (0xA666, 'M', u'ꙧ'), + (0xA667, 'V'), + (0xA668, 'M', u'ꙩ'), + (0xA669, 'V'), + (0xA66A, 'M', u'ꙫ'), + (0xA66B, 'V'), + (0xA66C, 'M', u'ꙭ'), + (0xA66D, 'V'), + (0xA680, 'M', u'ꚁ'), + (0xA681, 'V'), + (0xA682, 'M', u'ꚃ'), + (0xA683, 'V'), + (0xA684, 'M', u'ꚅ'), + (0xA685, 'V'), + (0xA686, 'M', u'ꚇ'), + (0xA687, 'V'), + (0xA688, 'M', u'ꚉ'), + (0xA689, 'V'), + (0xA68A, 'M', u'ꚋ'), + (0xA68B, 'V'), + (0xA68C, 'M', u'ꚍ'), + (0xA68D, 'V'), + (0xA68E, 'M', u'ꚏ'), + (0xA68F, 'V'), + (0xA690, 'M', u'ꚑ'), + (0xA691, 'V'), + (0xA692, 'M', u'ꚓ'), + (0xA693, 'V'), + (0xA694, 'M', u'ꚕ'), + ] + +def _seg_36(): + return [ + (0xA695, 'V'), + (0xA696, 'M', u'ꚗ'), + (0xA697, 'V'), + (0xA698, 'M', u'ꚙ'), + (0xA699, 'V'), + (0xA69A, 'M', u'ꚛ'), + (0xA69B, 'V'), + (0xA69C, 'M', u'ъ'), + (0xA69D, 'M', u'ь'), + (0xA69E, 'V'), + (0xA6F8, 'X'), + (0xA700, 'V'), + (0xA722, 'M', u'ꜣ'), + (0xA723, 'V'), + (0xA724, 'M', u'ꜥ'), + (0xA725, 'V'), + (0xA726, 'M', u'ꜧ'), + (0xA727, 'V'), + (0xA728, 'M', u'ꜩ'), + (0xA729, 'V'), + (0xA72A, 'M', u'ꜫ'), + (0xA72B, 'V'), + (0xA72C, 'M', u'ꜭ'), + (0xA72D, 'V'), + (0xA72E, 'M', u'ꜯ'), + (0xA72F, 'V'), + (0xA732, 'M', u'ꜳ'), + (0xA733, 'V'), + (0xA734, 'M', u'ꜵ'), + (0xA735, 'V'), + (0xA736, 'M', u'ꜷ'), + (0xA737, 'V'), + (0xA738, 'M', u'ꜹ'), + (0xA739, 'V'), + (0xA73A, 'M', u'ꜻ'), + (0xA73B, 'V'), + (0xA73C, 'M', u'ꜽ'), + (0xA73D, 'V'), + (0xA73E, 'M', u'ꜿ'), + (0xA73F, 'V'), + (0xA740, 'M', u'ꝁ'), + (0xA741, 'V'), + (0xA742, 'M', u'ꝃ'), + (0xA743, 'V'), + (0xA744, 'M', u'ꝅ'), + (0xA745, 'V'), + (0xA746, 'M', u'ꝇ'), + (0xA747, 'V'), + (0xA748, 'M', u'ꝉ'), + (0xA749, 'V'), + (0xA74A, 'M', u'ꝋ'), + (0xA74B, 'V'), + (0xA74C, 'M', u'ꝍ'), + (0xA74D, 'V'), + (0xA74E, 'M', u'ꝏ'), + (0xA74F, 'V'), + (0xA750, 'M', u'ꝑ'), + (0xA751, 'V'), + (0xA752, 'M', u'ꝓ'), + (0xA753, 'V'), + (0xA754, 'M', u'ꝕ'), + (0xA755, 'V'), + (0xA756, 'M', u'ꝗ'), + (0xA757, 'V'), + (0xA758, 'M', u'ꝙ'), + (0xA759, 'V'), + (0xA75A, 'M', u'ꝛ'), + (0xA75B, 'V'), + (0xA75C, 'M', u'ꝝ'), + (0xA75D, 'V'), + (0xA75E, 'M', u'ꝟ'), + (0xA75F, 'V'), + (0xA760, 'M', u'ꝡ'), + (0xA761, 'V'), + (0xA762, 'M', u'ꝣ'), + (0xA763, 'V'), + (0xA764, 'M', u'ꝥ'), + (0xA765, 'V'), + (0xA766, 'M', u'ꝧ'), + (0xA767, 'V'), + (0xA768, 'M', u'ꝩ'), + (0xA769, 'V'), + (0xA76A, 'M', u'ꝫ'), + (0xA76B, 'V'), + (0xA76C, 'M', u'ꝭ'), + (0xA76D, 'V'), + (0xA76E, 'M', u'ꝯ'), + (0xA76F, 'V'), + (0xA770, 'M', u'ꝯ'), + (0xA771, 'V'), + (0xA779, 'M', u'ꝺ'), + (0xA77A, 'V'), + (0xA77B, 'M', u'ꝼ'), + (0xA77C, 'V'), + (0xA77D, 'M', u'ᵹ'), + (0xA77E, 'M', u'ꝿ'), + (0xA77F, 'V'), + (0xA780, 'M', u'ꞁ'), + (0xA781, 'V'), + (0xA782, 'M', u'ꞃ'), + ] + +def _seg_37(): + return [ + (0xA783, 'V'), + (0xA784, 'M', u'ꞅ'), + (0xA785, 'V'), + (0xA786, 'M', u'ꞇ'), + (0xA787, 'V'), + (0xA78B, 'M', u'ꞌ'), + (0xA78C, 'V'), + (0xA78D, 'M', u'ɥ'), + (0xA78E, 'V'), + (0xA790, 'M', u'ꞑ'), + (0xA791, 'V'), + (0xA792, 'M', u'ꞓ'), + (0xA793, 'V'), + (0xA796, 'M', u'ꞗ'), + (0xA797, 'V'), + (0xA798, 'M', u'ꞙ'), + (0xA799, 'V'), + (0xA79A, 'M', u'ꞛ'), + (0xA79B, 'V'), + (0xA79C, 'M', u'ꞝ'), + (0xA79D, 'V'), + (0xA79E, 'M', u'ꞟ'), + (0xA79F, 'V'), + (0xA7A0, 'M', u'ꞡ'), + (0xA7A1, 'V'), + (0xA7A2, 'M', u'ꞣ'), + (0xA7A3, 'V'), + (0xA7A4, 'M', u'ꞥ'), + (0xA7A5, 'V'), + (0xA7A6, 'M', u'ꞧ'), + (0xA7A7, 'V'), + (0xA7A8, 'M', u'ꞩ'), + (0xA7A9, 'V'), + (0xA7AA, 'M', u'ɦ'), + (0xA7AB, 'M', u'ɜ'), + (0xA7AC, 'M', u'ɡ'), + (0xA7AD, 'M', u'ɬ'), + (0xA7AE, 'M', u'ɪ'), + (0xA7AF, 'X'), + (0xA7B0, 'M', u'ʞ'), + (0xA7B1, 'M', u'ʇ'), + (0xA7B2, 'M', u'ʝ'), + (0xA7B3, 'M', u'ꭓ'), + (0xA7B4, 'M', u'ꞵ'), + (0xA7B5, 'V'), + (0xA7B6, 'M', u'ꞷ'), + (0xA7B7, 'V'), + (0xA7B8, 'X'), + (0xA7F7, 'V'), + (0xA7F8, 'M', u'ħ'), + (0xA7F9, 'M', u'œ'), + (0xA7FA, 'V'), + (0xA82C, 'X'), + (0xA830, 'V'), + (0xA83A, 'X'), + (0xA840, 'V'), + (0xA878, 'X'), + (0xA880, 'V'), + (0xA8C6, 'X'), + (0xA8CE, 'V'), + (0xA8DA, 'X'), + (0xA8E0, 'V'), + (0xA8FE, 'X'), + (0xA900, 'V'), + (0xA954, 'X'), + (0xA95F, 'V'), + (0xA97D, 'X'), + (0xA980, 'V'), + (0xA9CE, 'X'), + (0xA9CF, 'V'), + (0xA9DA, 'X'), + (0xA9DE, 'V'), + (0xA9FF, 'X'), + (0xAA00, 'V'), + (0xAA37, 'X'), + (0xAA40, 'V'), + (0xAA4E, 'X'), + (0xAA50, 'V'), + (0xAA5A, 'X'), + (0xAA5C, 'V'), + (0xAAC3, 'X'), + (0xAADB, 'V'), + (0xAAF7, 'X'), + (0xAB01, 'V'), + (0xAB07, 'X'), + (0xAB09, 'V'), + (0xAB0F, 'X'), + (0xAB11, 'V'), + (0xAB17, 'X'), + (0xAB20, 'V'), + (0xAB27, 'X'), + (0xAB28, 'V'), + (0xAB2F, 'X'), + (0xAB30, 'V'), + (0xAB5C, 'M', u'ꜧ'), + (0xAB5D, 'M', u'ꬷ'), + (0xAB5E, 'M', u'ɫ'), + (0xAB5F, 'M', u'ꭒ'), + (0xAB60, 'V'), + (0xAB66, 'X'), + ] + +def _seg_38(): + return [ + (0xAB70, 'M', u'Ꭰ'), + (0xAB71, 'M', u'Ꭱ'), + (0xAB72, 'M', u'Ꭲ'), + (0xAB73, 'M', u'Ꭳ'), + (0xAB74, 'M', u'Ꭴ'), + (0xAB75, 'M', u'Ꭵ'), + (0xAB76, 'M', u'Ꭶ'), + (0xAB77, 'M', u'Ꭷ'), + (0xAB78, 'M', u'Ꭸ'), + (0xAB79, 'M', u'Ꭹ'), + (0xAB7A, 'M', u'Ꭺ'), + (0xAB7B, 'M', u'Ꭻ'), + (0xAB7C, 'M', u'Ꭼ'), + (0xAB7D, 'M', u'Ꭽ'), + (0xAB7E, 'M', u'Ꭾ'), + (0xAB7F, 'M', u'Ꭿ'), + (0xAB80, 'M', u'Ꮀ'), + (0xAB81, 'M', u'Ꮁ'), + (0xAB82, 'M', u'Ꮂ'), + (0xAB83, 'M', u'Ꮃ'), + (0xAB84, 'M', u'Ꮄ'), + (0xAB85, 'M', u'Ꮅ'), + (0xAB86, 'M', u'Ꮆ'), + (0xAB87, 'M', u'Ꮇ'), + (0xAB88, 'M', u'Ꮈ'), + (0xAB89, 'M', u'Ꮉ'), + (0xAB8A, 'M', u'Ꮊ'), + (0xAB8B, 'M', u'Ꮋ'), + (0xAB8C, 'M', u'Ꮌ'), + (0xAB8D, 'M', u'Ꮍ'), + (0xAB8E, 'M', u'Ꮎ'), + (0xAB8F, 'M', u'Ꮏ'), + (0xAB90, 'M', u'Ꮐ'), + (0xAB91, 'M', u'Ꮑ'), + (0xAB92, 'M', u'Ꮒ'), + (0xAB93, 'M', u'Ꮓ'), + (0xAB94, 'M', u'Ꮔ'), + (0xAB95, 'M', u'Ꮕ'), + (0xAB96, 'M', u'Ꮖ'), + (0xAB97, 'M', u'Ꮗ'), + (0xAB98, 'M', u'Ꮘ'), + (0xAB99, 'M', u'Ꮙ'), + (0xAB9A, 'M', u'Ꮚ'), + (0xAB9B, 'M', u'Ꮛ'), + (0xAB9C, 'M', u'Ꮜ'), + (0xAB9D, 'M', u'Ꮝ'), + (0xAB9E, 'M', u'Ꮞ'), + (0xAB9F, 'M', u'Ꮟ'), + (0xABA0, 'M', u'Ꮠ'), + (0xABA1, 'M', u'Ꮡ'), + (0xABA2, 'M', u'Ꮢ'), + (0xABA3, 'M', u'Ꮣ'), + (0xABA4, 'M', u'Ꮤ'), + (0xABA5, 'M', u'Ꮥ'), + (0xABA6, 'M', u'Ꮦ'), + (0xABA7, 'M', u'Ꮧ'), + (0xABA8, 'M', u'Ꮨ'), + (0xABA9, 'M', u'Ꮩ'), + (0xABAA, 'M', u'Ꮪ'), + (0xABAB, 'M', u'Ꮫ'), + (0xABAC, 'M', u'Ꮬ'), + (0xABAD, 'M', u'Ꮭ'), + (0xABAE, 'M', u'Ꮮ'), + (0xABAF, 'M', u'Ꮯ'), + (0xABB0, 'M', u'Ꮰ'), + (0xABB1, 'M', u'Ꮱ'), + (0xABB2, 'M', u'Ꮲ'), + (0xABB3, 'M', u'Ꮳ'), + (0xABB4, 'M', u'Ꮴ'), + (0xABB5, 'M', u'Ꮵ'), + (0xABB6, 'M', u'Ꮶ'), + (0xABB7, 'M', u'Ꮷ'), + (0xABB8, 'M', u'Ꮸ'), + (0xABB9, 'M', u'Ꮹ'), + (0xABBA, 'M', u'Ꮺ'), + (0xABBB, 'M', u'Ꮻ'), + (0xABBC, 'M', u'Ꮼ'), + (0xABBD, 'M', u'Ꮽ'), + (0xABBE, 'M', u'Ꮾ'), + (0xABBF, 'M', u'Ꮿ'), + (0xABC0, 'V'), + (0xABEE, 'X'), + (0xABF0, 'V'), + (0xABFA, 'X'), + (0xAC00, 'V'), + (0xD7A4, 'X'), + (0xD7B0, 'V'), + (0xD7C7, 'X'), + (0xD7CB, 'V'), + (0xD7FC, 'X'), + (0xF900, 'M', u'豈'), + (0xF901, 'M', u'更'), + (0xF902, 'M', u'車'), + (0xF903, 'M', u'賈'), + (0xF904, 'M', u'滑'), + (0xF905, 'M', u'串'), + (0xF906, 'M', u'句'), + (0xF907, 'M', u'龜'), + (0xF909, 'M', u'契'), + (0xF90A, 'M', u'金'), + ] + +def _seg_39(): + return [ + (0xF90B, 'M', u'喇'), + (0xF90C, 'M', u'奈'), + (0xF90D, 'M', u'懶'), + (0xF90E, 'M', u'癩'), + (0xF90F, 'M', u'羅'), + (0xF910, 'M', u'蘿'), + (0xF911, 'M', u'螺'), + (0xF912, 'M', u'裸'), + (0xF913, 'M', u'邏'), + (0xF914, 'M', u'樂'), + (0xF915, 'M', u'洛'), + (0xF916, 'M', u'烙'), + (0xF917, 'M', u'珞'), + (0xF918, 'M', u'落'), + (0xF919, 'M', u'酪'), + (0xF91A, 'M', u'駱'), + (0xF91B, 'M', u'亂'), + (0xF91C, 'M', u'卵'), + (0xF91D, 'M', u'欄'), + (0xF91E, 'M', u'爛'), + (0xF91F, 'M', u'蘭'), + (0xF920, 'M', u'鸞'), + (0xF921, 'M', u'嵐'), + (0xF922, 'M', u'濫'), + (0xF923, 'M', u'藍'), + (0xF924, 'M', u'襤'), + (0xF925, 'M', u'拉'), + (0xF926, 'M', u'臘'), + (0xF927, 'M', u'蠟'), + (0xF928, 'M', u'廊'), + (0xF929, 'M', u'朗'), + (0xF92A, 'M', u'浪'), + (0xF92B, 'M', u'狼'), + (0xF92C, 'M', u'郎'), + (0xF92D, 'M', u'來'), + (0xF92E, 'M', u'冷'), + (0xF92F, 'M', u'勞'), + (0xF930, 'M', u'擄'), + (0xF931, 'M', u'櫓'), + (0xF932, 'M', u'爐'), + (0xF933, 'M', u'盧'), + (0xF934, 'M', u'老'), + (0xF935, 'M', u'蘆'), + (0xF936, 'M', u'虜'), + (0xF937, 'M', u'路'), + (0xF938, 'M', u'露'), + (0xF939, 'M', u'魯'), + (0xF93A, 'M', u'鷺'), + (0xF93B, 'M', u'碌'), + (0xF93C, 'M', u'祿'), + (0xF93D, 'M', u'綠'), + (0xF93E, 'M', u'菉'), + (0xF93F, 'M', u'錄'), + (0xF940, 'M', u'鹿'), + (0xF941, 'M', u'論'), + (0xF942, 'M', u'壟'), + (0xF943, 'M', u'弄'), + (0xF944, 'M', u'籠'), + (0xF945, 'M', u'聾'), + (0xF946, 'M', u'牢'), + (0xF947, 'M', u'磊'), + (0xF948, 'M', u'賂'), + (0xF949, 'M', u'雷'), + (0xF94A, 'M', u'壘'), + (0xF94B, 'M', u'屢'), + (0xF94C, 'M', u'樓'), + (0xF94D, 'M', u'淚'), + (0xF94E, 'M', u'漏'), + (0xF94F, 'M', u'累'), + (0xF950, 'M', u'縷'), + (0xF951, 'M', u'陋'), + (0xF952, 'M', u'勒'), + (0xF953, 'M', u'肋'), + (0xF954, 'M', u'凜'), + (0xF955, 'M', u'凌'), + (0xF956, 'M', u'稜'), + (0xF957, 'M', u'綾'), + (0xF958, 'M', u'菱'), + (0xF959, 'M', u'陵'), + (0xF95A, 'M', u'讀'), + (0xF95B, 'M', u'拏'), + (0xF95C, 'M', u'樂'), + (0xF95D, 'M', u'諾'), + (0xF95E, 'M', u'丹'), + (0xF95F, 'M', u'寧'), + (0xF960, 'M', u'怒'), + (0xF961, 'M', u'率'), + (0xF962, 'M', u'異'), + (0xF963, 'M', u'北'), + (0xF964, 'M', u'磻'), + (0xF965, 'M', u'便'), + (0xF966, 'M', u'復'), + (0xF967, 'M', u'不'), + (0xF968, 'M', u'泌'), + (0xF969, 'M', u'數'), + (0xF96A, 'M', u'索'), + (0xF96B, 'M', u'參'), + (0xF96C, 'M', u'塞'), + (0xF96D, 'M', u'省'), + (0xF96E, 'M', u'葉'), + ] + +def _seg_40(): + return [ + (0xF96F, 'M', u'說'), + (0xF970, 'M', u'殺'), + (0xF971, 'M', u'辰'), + (0xF972, 'M', u'沈'), + (0xF973, 'M', u'拾'), + (0xF974, 'M', u'若'), + (0xF975, 'M', u'掠'), + (0xF976, 'M', u'略'), + (0xF977, 'M', u'亮'), + (0xF978, 'M', u'兩'), + (0xF979, 'M', u'凉'), + (0xF97A, 'M', u'梁'), + (0xF97B, 'M', u'糧'), + (0xF97C, 'M', u'良'), + (0xF97D, 'M', u'諒'), + (0xF97E, 'M', u'量'), + (0xF97F, 'M', u'勵'), + (0xF980, 'M', u'呂'), + (0xF981, 'M', u'女'), + (0xF982, 'M', u'廬'), + (0xF983, 'M', u'旅'), + (0xF984, 'M', u'濾'), + (0xF985, 'M', u'礪'), + (0xF986, 'M', u'閭'), + (0xF987, 'M', u'驪'), + (0xF988, 'M', u'麗'), + (0xF989, 'M', u'黎'), + (0xF98A, 'M', u'力'), + (0xF98B, 'M', u'曆'), + (0xF98C, 'M', u'歷'), + (0xF98D, 'M', u'轢'), + (0xF98E, 'M', u'年'), + (0xF98F, 'M', u'憐'), + (0xF990, 'M', u'戀'), + (0xF991, 'M', u'撚'), + (0xF992, 'M', u'漣'), + (0xF993, 'M', u'煉'), + (0xF994, 'M', u'璉'), + (0xF995, 'M', u'秊'), + (0xF996, 'M', u'練'), + (0xF997, 'M', u'聯'), + (0xF998, 'M', u'輦'), + (0xF999, 'M', u'蓮'), + (0xF99A, 'M', u'連'), + (0xF99B, 'M', u'鍊'), + (0xF99C, 'M', u'列'), + (0xF99D, 'M', u'劣'), + (0xF99E, 'M', u'咽'), + (0xF99F, 'M', u'烈'), + (0xF9A0, 'M', u'裂'), + (0xF9A1, 'M', u'說'), + (0xF9A2, 'M', u'廉'), + (0xF9A3, 'M', u'念'), + (0xF9A4, 'M', u'捻'), + (0xF9A5, 'M', u'殮'), + (0xF9A6, 'M', u'簾'), + (0xF9A7, 'M', u'獵'), + (0xF9A8, 'M', u'令'), + (0xF9A9, 'M', u'囹'), + (0xF9AA, 'M', u'寧'), + (0xF9AB, 'M', u'嶺'), + (0xF9AC, 'M', u'怜'), + (0xF9AD, 'M', u'玲'), + (0xF9AE, 'M', u'瑩'), + (0xF9AF, 'M', u'羚'), + (0xF9B0, 'M', u'聆'), + (0xF9B1, 'M', u'鈴'), + (0xF9B2, 'M', u'零'), + (0xF9B3, 'M', u'靈'), + (0xF9B4, 'M', u'領'), + (0xF9B5, 'M', u'例'), + (0xF9B6, 'M', u'禮'), + (0xF9B7, 'M', u'醴'), + (0xF9B8, 'M', u'隸'), + (0xF9B9, 'M', u'惡'), + (0xF9BA, 'M', u'了'), + (0xF9BB, 'M', u'僚'), + (0xF9BC, 'M', u'寮'), + (0xF9BD, 'M', u'尿'), + (0xF9BE, 'M', u'料'), + (0xF9BF, 'M', u'樂'), + (0xF9C0, 'M', u'燎'), + (0xF9C1, 'M', u'療'), + (0xF9C2, 'M', u'蓼'), + (0xF9C3, 'M', u'遼'), + (0xF9C4, 'M', u'龍'), + (0xF9C5, 'M', u'暈'), + (0xF9C6, 'M', u'阮'), + (0xF9C7, 'M', u'劉'), + (0xF9C8, 'M', u'杻'), + (0xF9C9, 'M', u'柳'), + (0xF9CA, 'M', u'流'), + (0xF9CB, 'M', u'溜'), + (0xF9CC, 'M', u'琉'), + (0xF9CD, 'M', u'留'), + (0xF9CE, 'M', u'硫'), + (0xF9CF, 'M', u'紐'), + (0xF9D0, 'M', u'類'), + (0xF9D1, 'M', u'六'), + (0xF9D2, 'M', u'戮'), + ] + +def _seg_41(): + return [ + (0xF9D3, 'M', u'陸'), + (0xF9D4, 'M', u'倫'), + (0xF9D5, 'M', u'崙'), + (0xF9D6, 'M', u'淪'), + (0xF9D7, 'M', u'輪'), + (0xF9D8, 'M', u'律'), + (0xF9D9, 'M', u'慄'), + (0xF9DA, 'M', u'栗'), + (0xF9DB, 'M', u'率'), + (0xF9DC, 'M', u'隆'), + (0xF9DD, 'M', u'利'), + (0xF9DE, 'M', u'吏'), + (0xF9DF, 'M', u'履'), + (0xF9E0, 'M', u'易'), + (0xF9E1, 'M', u'李'), + (0xF9E2, 'M', u'梨'), + (0xF9E3, 'M', u'泥'), + (0xF9E4, 'M', u'理'), + (0xF9E5, 'M', u'痢'), + (0xF9E6, 'M', u'罹'), + (0xF9E7, 'M', u'裏'), + (0xF9E8, 'M', u'裡'), + (0xF9E9, 'M', u'里'), + (0xF9EA, 'M', u'離'), + (0xF9EB, 'M', u'匿'), + (0xF9EC, 'M', u'溺'), + (0xF9ED, 'M', u'吝'), + (0xF9EE, 'M', u'燐'), + (0xF9EF, 'M', u'璘'), + (0xF9F0, 'M', u'藺'), + (0xF9F1, 'M', u'隣'), + (0xF9F2, 'M', u'鱗'), + (0xF9F3, 'M', u'麟'), + (0xF9F4, 'M', u'林'), + (0xF9F5, 'M', u'淋'), + (0xF9F6, 'M', u'臨'), + (0xF9F7, 'M', u'立'), + (0xF9F8, 'M', u'笠'), + (0xF9F9, 'M', u'粒'), + (0xF9FA, 'M', u'狀'), + (0xF9FB, 'M', u'炙'), + (0xF9FC, 'M', u'識'), + (0xF9FD, 'M', u'什'), + (0xF9FE, 'M', u'茶'), + (0xF9FF, 'M', u'刺'), + (0xFA00, 'M', u'切'), + (0xFA01, 'M', u'度'), + (0xFA02, 'M', u'拓'), + (0xFA03, 'M', u'糖'), + (0xFA04, 'M', u'宅'), + (0xFA05, 'M', u'洞'), + (0xFA06, 'M', u'暴'), + (0xFA07, 'M', u'輻'), + (0xFA08, 'M', u'行'), + (0xFA09, 'M', u'降'), + (0xFA0A, 'M', u'見'), + (0xFA0B, 'M', u'廓'), + (0xFA0C, 'M', u'兀'), + (0xFA0D, 'M', u'嗀'), + (0xFA0E, 'V'), + (0xFA10, 'M', u'塚'), + (0xFA11, 'V'), + (0xFA12, 'M', u'晴'), + (0xFA13, 'V'), + (0xFA15, 'M', u'凞'), + (0xFA16, 'M', u'猪'), + (0xFA17, 'M', u'益'), + (0xFA18, 'M', u'礼'), + (0xFA19, 'M', u'神'), + (0xFA1A, 'M', u'祥'), + (0xFA1B, 'M', u'福'), + (0xFA1C, 'M', u'靖'), + (0xFA1D, 'M', u'精'), + (0xFA1E, 'M', u'羽'), + (0xFA1F, 'V'), + (0xFA20, 'M', u'蘒'), + (0xFA21, 'V'), + (0xFA22, 'M', u'諸'), + (0xFA23, 'V'), + (0xFA25, 'M', u'逸'), + (0xFA26, 'M', u'都'), + (0xFA27, 'V'), + (0xFA2A, 'M', u'飯'), + (0xFA2B, 'M', u'飼'), + (0xFA2C, 'M', u'館'), + (0xFA2D, 'M', u'鶴'), + (0xFA2E, 'M', u'郞'), + (0xFA2F, 'M', u'隷'), + (0xFA30, 'M', u'侮'), + (0xFA31, 'M', u'僧'), + (0xFA32, 'M', u'免'), + (0xFA33, 'M', u'勉'), + (0xFA34, 'M', u'勤'), + (0xFA35, 'M', u'卑'), + (0xFA36, 'M', u'喝'), + (0xFA37, 'M', u'嘆'), + (0xFA38, 'M', u'器'), + (0xFA39, 'M', u'塀'), + (0xFA3A, 'M', u'墨'), + (0xFA3B, 'M', u'層'), + ] + +def _seg_42(): + return [ + (0xFA3C, 'M', u'屮'), + (0xFA3D, 'M', u'悔'), + (0xFA3E, 'M', u'慨'), + (0xFA3F, 'M', u'憎'), + (0xFA40, 'M', u'懲'), + (0xFA41, 'M', u'敏'), + (0xFA42, 'M', u'既'), + (0xFA43, 'M', u'暑'), + (0xFA44, 'M', u'梅'), + (0xFA45, 'M', u'海'), + (0xFA46, 'M', u'渚'), + (0xFA47, 'M', u'漢'), + (0xFA48, 'M', u'煮'), + (0xFA49, 'M', u'爫'), + (0xFA4A, 'M', u'琢'), + (0xFA4B, 'M', u'碑'), + (0xFA4C, 'M', u'社'), + (0xFA4D, 'M', u'祉'), + (0xFA4E, 'M', u'祈'), + (0xFA4F, 'M', u'祐'), + (0xFA50, 'M', u'祖'), + (0xFA51, 'M', u'祝'), + (0xFA52, 'M', u'禍'), + (0xFA53, 'M', u'禎'), + (0xFA54, 'M', u'穀'), + (0xFA55, 'M', u'突'), + (0xFA56, 'M', u'節'), + (0xFA57, 'M', u'練'), + (0xFA58, 'M', u'縉'), + (0xFA59, 'M', u'繁'), + (0xFA5A, 'M', u'署'), + (0xFA5B, 'M', u'者'), + (0xFA5C, 'M', u'臭'), + (0xFA5D, 'M', u'艹'), + (0xFA5F, 'M', u'著'), + (0xFA60, 'M', u'褐'), + (0xFA61, 'M', u'視'), + (0xFA62, 'M', u'謁'), + (0xFA63, 'M', u'謹'), + (0xFA64, 'M', u'賓'), + (0xFA65, 'M', u'贈'), + (0xFA66, 'M', u'辶'), + (0xFA67, 'M', u'逸'), + (0xFA68, 'M', u'難'), + (0xFA69, 'M', u'響'), + (0xFA6A, 'M', u'頻'), + (0xFA6B, 'M', u'恵'), + (0xFA6C, 'M', u'𤋮'), + (0xFA6D, 'M', u'舘'), + (0xFA6E, 'X'), + (0xFA70, 'M', u'並'), + (0xFA71, 'M', u'况'), + (0xFA72, 'M', u'全'), + (0xFA73, 'M', u'侀'), + (0xFA74, 'M', u'充'), + (0xFA75, 'M', u'冀'), + (0xFA76, 'M', u'勇'), + (0xFA77, 'M', u'勺'), + (0xFA78, 'M', u'喝'), + (0xFA79, 'M', u'啕'), + (0xFA7A, 'M', u'喙'), + (0xFA7B, 'M', u'嗢'), + (0xFA7C, 'M', u'塚'), + (0xFA7D, 'M', u'墳'), + (0xFA7E, 'M', u'奄'), + (0xFA7F, 'M', u'奔'), + (0xFA80, 'M', u'婢'), + (0xFA81, 'M', u'嬨'), + (0xFA82, 'M', u'廒'), + (0xFA83, 'M', u'廙'), + (0xFA84, 'M', u'彩'), + (0xFA85, 'M', u'徭'), + (0xFA86, 'M', u'惘'), + (0xFA87, 'M', u'慎'), + (0xFA88, 'M', u'愈'), + (0xFA89, 'M', u'憎'), + (0xFA8A, 'M', u'慠'), + (0xFA8B, 'M', u'懲'), + (0xFA8C, 'M', u'戴'), + (0xFA8D, 'M', u'揄'), + (0xFA8E, 'M', u'搜'), + (0xFA8F, 'M', u'摒'), + (0xFA90, 'M', u'敖'), + (0xFA91, 'M', u'晴'), + (0xFA92, 'M', u'朗'), + (0xFA93, 'M', u'望'), + (0xFA94, 'M', u'杖'), + (0xFA95, 'M', u'歹'), + (0xFA96, 'M', u'殺'), + (0xFA97, 'M', u'流'), + (0xFA98, 'M', u'滛'), + (0xFA99, 'M', u'滋'), + (0xFA9A, 'M', u'漢'), + (0xFA9B, 'M', u'瀞'), + (0xFA9C, 'M', u'煮'), + (0xFA9D, 'M', u'瞧'), + (0xFA9E, 'M', u'爵'), + (0xFA9F, 'M', u'犯'), + (0xFAA0, 'M', u'猪'), + (0xFAA1, 'M', u'瑱'), + ] + +def _seg_43(): + return [ + (0xFAA2, 'M', u'甆'), + (0xFAA3, 'M', u'画'), + (0xFAA4, 'M', u'瘝'), + (0xFAA5, 'M', u'瘟'), + (0xFAA6, 'M', u'益'), + (0xFAA7, 'M', u'盛'), + (0xFAA8, 'M', u'直'), + (0xFAA9, 'M', u'睊'), + (0xFAAA, 'M', u'着'), + (0xFAAB, 'M', u'磌'), + (0xFAAC, 'M', u'窱'), + (0xFAAD, 'M', u'節'), + (0xFAAE, 'M', u'类'), + (0xFAAF, 'M', u'絛'), + (0xFAB0, 'M', u'練'), + (0xFAB1, 'M', u'缾'), + (0xFAB2, 'M', u'者'), + (0xFAB3, 'M', u'荒'), + (0xFAB4, 'M', u'華'), + (0xFAB5, 'M', u'蝹'), + (0xFAB6, 'M', u'襁'), + (0xFAB7, 'M', u'覆'), + (0xFAB8, 'M', u'視'), + (0xFAB9, 'M', u'調'), + (0xFABA, 'M', u'諸'), + (0xFABB, 'M', u'請'), + (0xFABC, 'M', u'謁'), + (0xFABD, 'M', u'諾'), + (0xFABE, 'M', u'諭'), + (0xFABF, 'M', u'謹'), + (0xFAC0, 'M', u'變'), + (0xFAC1, 'M', u'贈'), + (0xFAC2, 'M', u'輸'), + (0xFAC3, 'M', u'遲'), + (0xFAC4, 'M', u'醙'), + (0xFAC5, 'M', u'鉶'), + (0xFAC6, 'M', u'陼'), + (0xFAC7, 'M', u'難'), + (0xFAC8, 'M', u'靖'), + (0xFAC9, 'M', u'韛'), + (0xFACA, 'M', u'響'), + (0xFACB, 'M', u'頋'), + (0xFACC, 'M', u'頻'), + (0xFACD, 'M', u'鬒'), + (0xFACE, 'M', u'龜'), + (0xFACF, 'M', u'𢡊'), + (0xFAD0, 'M', u'𢡄'), + (0xFAD1, 'M', u'𣏕'), + (0xFAD2, 'M', u'㮝'), + (0xFAD3, 'M', u'䀘'), + (0xFAD4, 'M', u'䀹'), + (0xFAD5, 'M', u'𥉉'), + (0xFAD6, 'M', u'𥳐'), + (0xFAD7, 'M', u'𧻓'), + (0xFAD8, 'M', u'齃'), + (0xFAD9, 'M', u'龎'), + (0xFADA, 'X'), + (0xFB00, 'M', u'ff'), + (0xFB01, 'M', u'fi'), + (0xFB02, 'M', u'fl'), + (0xFB03, 'M', u'ffi'), + (0xFB04, 'M', u'ffl'), + (0xFB05, 'M', u'st'), + (0xFB07, 'X'), + (0xFB13, 'M', u'մն'), + (0xFB14, 'M', u'մե'), + (0xFB15, 'M', u'մի'), + (0xFB16, 'M', u'վն'), + (0xFB17, 'M', u'մխ'), + (0xFB18, 'X'), + (0xFB1D, 'M', u'יִ'), + (0xFB1E, 'V'), + (0xFB1F, 'M', u'ײַ'), + (0xFB20, 'M', u'ע'), + (0xFB21, 'M', u'א'), + (0xFB22, 'M', u'ד'), + (0xFB23, 'M', u'ה'), + (0xFB24, 'M', u'כ'), + (0xFB25, 'M', u'ל'), + (0xFB26, 'M', u'ם'), + (0xFB27, 'M', u'ר'), + (0xFB28, 'M', u'ת'), + (0xFB29, '3', u'+'), + (0xFB2A, 'M', u'שׁ'), + (0xFB2B, 'M', u'שׂ'), + (0xFB2C, 'M', u'שּׁ'), + (0xFB2D, 'M', u'שּׂ'), + (0xFB2E, 'M', u'אַ'), + (0xFB2F, 'M', u'אָ'), + (0xFB30, 'M', u'אּ'), + (0xFB31, 'M', u'בּ'), + (0xFB32, 'M', u'גּ'), + (0xFB33, 'M', u'דּ'), + (0xFB34, 'M', u'הּ'), + (0xFB35, 'M', u'וּ'), + (0xFB36, 'M', u'זּ'), + (0xFB37, 'X'), + (0xFB38, 'M', u'טּ'), + (0xFB39, 'M', u'יּ'), + (0xFB3A, 'M', u'ךּ'), + ] + +def _seg_44(): + return [ + (0xFB3B, 'M', u'כּ'), + (0xFB3C, 'M', u'לּ'), + (0xFB3D, 'X'), + (0xFB3E, 'M', u'מּ'), + (0xFB3F, 'X'), + (0xFB40, 'M', u'נּ'), + (0xFB41, 'M', u'סּ'), + (0xFB42, 'X'), + (0xFB43, 'M', u'ףּ'), + (0xFB44, 'M', u'פּ'), + (0xFB45, 'X'), + (0xFB46, 'M', u'צּ'), + (0xFB47, 'M', u'קּ'), + (0xFB48, 'M', u'רּ'), + (0xFB49, 'M', u'שּ'), + (0xFB4A, 'M', u'תּ'), + (0xFB4B, 'M', u'וֹ'), + (0xFB4C, 'M', u'בֿ'), + (0xFB4D, 'M', u'כֿ'), + (0xFB4E, 'M', u'פֿ'), + (0xFB4F, 'M', u'אל'), + (0xFB50, 'M', u'ٱ'), + (0xFB52, 'M', u'ٻ'), + (0xFB56, 'M', u'پ'), + (0xFB5A, 'M', u'ڀ'), + (0xFB5E, 'M', u'ٺ'), + (0xFB62, 'M', u'ٿ'), + (0xFB66, 'M', u'ٹ'), + (0xFB6A, 'M', u'ڤ'), + (0xFB6E, 'M', u'ڦ'), + (0xFB72, 'M', u'ڄ'), + (0xFB76, 'M', u'ڃ'), + (0xFB7A, 'M', u'چ'), + (0xFB7E, 'M', u'ڇ'), + (0xFB82, 'M', u'ڍ'), + (0xFB84, 'M', u'ڌ'), + (0xFB86, 'M', u'ڎ'), + (0xFB88, 'M', u'ڈ'), + (0xFB8A, 'M', u'ژ'), + (0xFB8C, 'M', u'ڑ'), + (0xFB8E, 'M', u'ک'), + (0xFB92, 'M', u'گ'), + (0xFB96, 'M', u'ڳ'), + (0xFB9A, 'M', u'ڱ'), + (0xFB9E, 'M', u'ں'), + (0xFBA0, 'M', u'ڻ'), + (0xFBA4, 'M', u'ۀ'), + (0xFBA6, 'M', u'ہ'), + (0xFBAA, 'M', u'ھ'), + (0xFBAE, 'M', u'ے'), + (0xFBB0, 'M', u'ۓ'), + (0xFBB2, 'V'), + (0xFBC2, 'X'), + (0xFBD3, 'M', u'ڭ'), + (0xFBD7, 'M', u'ۇ'), + (0xFBD9, 'M', u'ۆ'), + (0xFBDB, 'M', u'ۈ'), + (0xFBDD, 'M', u'ۇٴ'), + (0xFBDE, 'M', u'ۋ'), + (0xFBE0, 'M', u'ۅ'), + (0xFBE2, 'M', u'ۉ'), + (0xFBE4, 'M', u'ې'), + (0xFBE8, 'M', u'ى'), + (0xFBEA, 'M', u'ئا'), + (0xFBEC, 'M', u'ئە'), + (0xFBEE, 'M', u'ئو'), + (0xFBF0, 'M', u'ئۇ'), + (0xFBF2, 'M', u'ئۆ'), + (0xFBF4, 'M', u'ئۈ'), + (0xFBF6, 'M', u'ئې'), + (0xFBF9, 'M', u'ئى'), + (0xFBFC, 'M', u'ی'), + (0xFC00, 'M', u'ئج'), + (0xFC01, 'M', u'ئح'), + (0xFC02, 'M', u'ئم'), + (0xFC03, 'M', u'ئى'), + (0xFC04, 'M', u'ئي'), + (0xFC05, 'M', u'بج'), + (0xFC06, 'M', u'بح'), + (0xFC07, 'M', u'بخ'), + (0xFC08, 'M', u'بم'), + (0xFC09, 'M', u'بى'), + (0xFC0A, 'M', u'بي'), + (0xFC0B, 'M', u'تج'), + (0xFC0C, 'M', u'تح'), + (0xFC0D, 'M', u'تخ'), + (0xFC0E, 'M', u'تم'), + (0xFC0F, 'M', u'تى'), + (0xFC10, 'M', u'تي'), + (0xFC11, 'M', u'ثج'), + (0xFC12, 'M', u'ثم'), + (0xFC13, 'M', u'ثى'), + (0xFC14, 'M', u'ثي'), + (0xFC15, 'M', u'جح'), + (0xFC16, 'M', u'جم'), + (0xFC17, 'M', u'حج'), + (0xFC18, 'M', u'حم'), + (0xFC19, 'M', u'خج'), + (0xFC1A, 'M', u'خح'), + (0xFC1B, 'M', u'خم'), + ] + +def _seg_45(): + return [ + (0xFC1C, 'M', u'سج'), + (0xFC1D, 'M', u'سح'), + (0xFC1E, 'M', u'سخ'), + (0xFC1F, 'M', u'سم'), + (0xFC20, 'M', u'صح'), + (0xFC21, 'M', u'صم'), + (0xFC22, 'M', u'ضج'), + (0xFC23, 'M', u'ضح'), + (0xFC24, 'M', u'ضخ'), + (0xFC25, 'M', u'ضم'), + (0xFC26, 'M', u'طح'), + (0xFC27, 'M', u'طم'), + (0xFC28, 'M', u'ظم'), + (0xFC29, 'M', u'عج'), + (0xFC2A, 'M', u'عم'), + (0xFC2B, 'M', u'غج'), + (0xFC2C, 'M', u'غم'), + (0xFC2D, 'M', u'فج'), + (0xFC2E, 'M', u'فح'), + (0xFC2F, 'M', u'فخ'), + (0xFC30, 'M', u'فم'), + (0xFC31, 'M', u'فى'), + (0xFC32, 'M', u'في'), + (0xFC33, 'M', u'قح'), + (0xFC34, 'M', u'قم'), + (0xFC35, 'M', u'قى'), + (0xFC36, 'M', u'قي'), + (0xFC37, 'M', u'كا'), + (0xFC38, 'M', u'كج'), + (0xFC39, 'M', u'كح'), + (0xFC3A, 'M', u'كخ'), + (0xFC3B, 'M', u'كل'), + (0xFC3C, 'M', u'كم'), + (0xFC3D, 'M', u'كى'), + (0xFC3E, 'M', u'كي'), + (0xFC3F, 'M', u'لج'), + (0xFC40, 'M', u'لح'), + (0xFC41, 'M', u'لخ'), + (0xFC42, 'M', u'لم'), + (0xFC43, 'M', u'لى'), + (0xFC44, 'M', u'لي'), + (0xFC45, 'M', u'مج'), + (0xFC46, 'M', u'مح'), + (0xFC47, 'M', u'مخ'), + (0xFC48, 'M', u'مم'), + (0xFC49, 'M', u'مى'), + (0xFC4A, 'M', u'مي'), + (0xFC4B, 'M', u'نج'), + (0xFC4C, 'M', u'نح'), + (0xFC4D, 'M', u'نخ'), + (0xFC4E, 'M', u'نم'), + (0xFC4F, 'M', u'نى'), + (0xFC50, 'M', u'ني'), + (0xFC51, 'M', u'هج'), + (0xFC52, 'M', u'هم'), + (0xFC53, 'M', u'هى'), + (0xFC54, 'M', u'هي'), + (0xFC55, 'M', u'يج'), + (0xFC56, 'M', u'يح'), + (0xFC57, 'M', u'يخ'), + (0xFC58, 'M', u'يم'), + (0xFC59, 'M', u'يى'), + (0xFC5A, 'M', u'يي'), + (0xFC5B, 'M', u'ذٰ'), + (0xFC5C, 'M', u'رٰ'), + (0xFC5D, 'M', u'ىٰ'), + (0xFC5E, '3', u' ٌّ'), + (0xFC5F, '3', u' ٍّ'), + (0xFC60, '3', u' َّ'), + (0xFC61, '3', u' ُّ'), + (0xFC62, '3', u' ِّ'), + (0xFC63, '3', u' ّٰ'), + (0xFC64, 'M', u'ئر'), + (0xFC65, 'M', u'ئز'), + (0xFC66, 'M', u'ئم'), + (0xFC67, 'M', u'ئن'), + (0xFC68, 'M', u'ئى'), + (0xFC69, 'M', u'ئي'), + (0xFC6A, 'M', u'بر'), + (0xFC6B, 'M', u'بز'), + (0xFC6C, 'M', u'بم'), + (0xFC6D, 'M', u'بن'), + (0xFC6E, 'M', u'بى'), + (0xFC6F, 'M', u'بي'), + (0xFC70, 'M', u'تر'), + (0xFC71, 'M', u'تز'), + (0xFC72, 'M', u'تم'), + (0xFC73, 'M', u'تن'), + (0xFC74, 'M', u'تى'), + (0xFC75, 'M', u'تي'), + (0xFC76, 'M', u'ثر'), + (0xFC77, 'M', u'ثز'), + (0xFC78, 'M', u'ثم'), + (0xFC79, 'M', u'ثن'), + (0xFC7A, 'M', u'ثى'), + (0xFC7B, 'M', u'ثي'), + (0xFC7C, 'M', u'فى'), + (0xFC7D, 'M', u'في'), + (0xFC7E, 'M', u'قى'), + (0xFC7F, 'M', u'قي'), + ] + +def _seg_46(): + return [ + (0xFC80, 'M', u'كا'), + (0xFC81, 'M', u'كل'), + (0xFC82, 'M', u'كم'), + (0xFC83, 'M', u'كى'), + (0xFC84, 'M', u'كي'), + (0xFC85, 'M', u'لم'), + (0xFC86, 'M', u'لى'), + (0xFC87, 'M', u'لي'), + (0xFC88, 'M', u'ما'), + (0xFC89, 'M', u'مم'), + (0xFC8A, 'M', u'نر'), + (0xFC8B, 'M', u'نز'), + (0xFC8C, 'M', u'نم'), + (0xFC8D, 'M', u'نن'), + (0xFC8E, 'M', u'نى'), + (0xFC8F, 'M', u'ني'), + (0xFC90, 'M', u'ىٰ'), + (0xFC91, 'M', u'ير'), + (0xFC92, 'M', u'يز'), + (0xFC93, 'M', u'يم'), + (0xFC94, 'M', u'ين'), + (0xFC95, 'M', u'يى'), + (0xFC96, 'M', u'يي'), + (0xFC97, 'M', u'ئج'), + (0xFC98, 'M', u'ئح'), + (0xFC99, 'M', u'ئخ'), + (0xFC9A, 'M', u'ئم'), + (0xFC9B, 'M', u'ئه'), + (0xFC9C, 'M', u'بج'), + (0xFC9D, 'M', u'بح'), + (0xFC9E, 'M', u'بخ'), + (0xFC9F, 'M', u'بم'), + (0xFCA0, 'M', u'به'), + (0xFCA1, 'M', u'تج'), + (0xFCA2, 'M', u'تح'), + (0xFCA3, 'M', u'تخ'), + (0xFCA4, 'M', u'تم'), + (0xFCA5, 'M', u'ته'), + (0xFCA6, 'M', u'ثم'), + (0xFCA7, 'M', u'جح'), + (0xFCA8, 'M', u'جم'), + (0xFCA9, 'M', u'حج'), + (0xFCAA, 'M', u'حم'), + (0xFCAB, 'M', u'خج'), + (0xFCAC, 'M', u'خم'), + (0xFCAD, 'M', u'سج'), + (0xFCAE, 'M', u'سح'), + (0xFCAF, 'M', u'سخ'), + (0xFCB0, 'M', u'سم'), + (0xFCB1, 'M', u'صح'), + (0xFCB2, 'M', u'صخ'), + (0xFCB3, 'M', u'صم'), + (0xFCB4, 'M', u'ضج'), + (0xFCB5, 'M', u'ضح'), + (0xFCB6, 'M', u'ضخ'), + (0xFCB7, 'M', u'ضم'), + (0xFCB8, 'M', u'طح'), + (0xFCB9, 'M', u'ظم'), + (0xFCBA, 'M', u'عج'), + (0xFCBB, 'M', u'عم'), + (0xFCBC, 'M', u'غج'), + (0xFCBD, 'M', u'غم'), + (0xFCBE, 'M', u'فج'), + (0xFCBF, 'M', u'فح'), + (0xFCC0, 'M', u'فخ'), + (0xFCC1, 'M', u'فم'), + (0xFCC2, 'M', u'قح'), + (0xFCC3, 'M', u'قم'), + (0xFCC4, 'M', u'كج'), + (0xFCC5, 'M', u'كح'), + (0xFCC6, 'M', u'كخ'), + (0xFCC7, 'M', u'كل'), + (0xFCC8, 'M', u'كم'), + (0xFCC9, 'M', u'لج'), + (0xFCCA, 'M', u'لح'), + (0xFCCB, 'M', u'لخ'), + (0xFCCC, 'M', u'لم'), + (0xFCCD, 'M', u'له'), + (0xFCCE, 'M', u'مج'), + (0xFCCF, 'M', u'مح'), + (0xFCD0, 'M', u'مخ'), + (0xFCD1, 'M', u'مم'), + (0xFCD2, 'M', u'نج'), + (0xFCD3, 'M', u'نح'), + (0xFCD4, 'M', u'نخ'), + (0xFCD5, 'M', u'نم'), + (0xFCD6, 'M', u'نه'), + (0xFCD7, 'M', u'هج'), + (0xFCD8, 'M', u'هم'), + (0xFCD9, 'M', u'هٰ'), + (0xFCDA, 'M', u'يج'), + (0xFCDB, 'M', u'يح'), + (0xFCDC, 'M', u'يخ'), + (0xFCDD, 'M', u'يم'), + (0xFCDE, 'M', u'يه'), + (0xFCDF, 'M', u'ئم'), + (0xFCE0, 'M', u'ئه'), + (0xFCE1, 'M', u'بم'), + (0xFCE2, 'M', u'به'), + (0xFCE3, 'M', u'تم'), + ] + +def _seg_47(): + return [ + (0xFCE4, 'M', u'ته'), + (0xFCE5, 'M', u'ثم'), + (0xFCE6, 'M', u'ثه'), + (0xFCE7, 'M', u'سم'), + (0xFCE8, 'M', u'سه'), + (0xFCE9, 'M', u'شم'), + (0xFCEA, 'M', u'شه'), + (0xFCEB, 'M', u'كل'), + (0xFCEC, 'M', u'كم'), + (0xFCED, 'M', u'لم'), + (0xFCEE, 'M', u'نم'), + (0xFCEF, 'M', u'نه'), + (0xFCF0, 'M', u'يم'), + (0xFCF1, 'M', u'يه'), + (0xFCF2, 'M', u'ـَّ'), + (0xFCF3, 'M', u'ـُّ'), + (0xFCF4, 'M', u'ـِّ'), + (0xFCF5, 'M', u'طى'), + (0xFCF6, 'M', u'طي'), + (0xFCF7, 'M', u'عى'), + (0xFCF8, 'M', u'عي'), + (0xFCF9, 'M', u'غى'), + (0xFCFA, 'M', u'غي'), + (0xFCFB, 'M', u'سى'), + (0xFCFC, 'M', u'سي'), + (0xFCFD, 'M', u'شى'), + (0xFCFE, 'M', u'شي'), + (0xFCFF, 'M', u'حى'), + (0xFD00, 'M', u'حي'), + (0xFD01, 'M', u'جى'), + (0xFD02, 'M', u'جي'), + (0xFD03, 'M', u'خى'), + (0xFD04, 'M', u'خي'), + (0xFD05, 'M', u'صى'), + (0xFD06, 'M', u'صي'), + (0xFD07, 'M', u'ضى'), + (0xFD08, 'M', u'ضي'), + (0xFD09, 'M', u'شج'), + (0xFD0A, 'M', u'شح'), + (0xFD0B, 'M', u'شخ'), + (0xFD0C, 'M', u'شم'), + (0xFD0D, 'M', u'شر'), + (0xFD0E, 'M', u'سر'), + (0xFD0F, 'M', u'صر'), + (0xFD10, 'M', u'ضر'), + (0xFD11, 'M', u'طى'), + (0xFD12, 'M', u'طي'), + (0xFD13, 'M', u'عى'), + (0xFD14, 'M', u'عي'), + (0xFD15, 'M', u'غى'), + (0xFD16, 'M', u'غي'), + (0xFD17, 'M', u'سى'), + (0xFD18, 'M', u'سي'), + (0xFD19, 'M', u'شى'), + (0xFD1A, 'M', u'شي'), + (0xFD1B, 'M', u'حى'), + (0xFD1C, 'M', u'حي'), + (0xFD1D, 'M', u'جى'), + (0xFD1E, 'M', u'جي'), + (0xFD1F, 'M', u'خى'), + (0xFD20, 'M', u'خي'), + (0xFD21, 'M', u'صى'), + (0xFD22, 'M', u'صي'), + (0xFD23, 'M', u'ضى'), + (0xFD24, 'M', u'ضي'), + (0xFD25, 'M', u'شج'), + (0xFD26, 'M', u'شح'), + (0xFD27, 'M', u'شخ'), + (0xFD28, 'M', u'شم'), + (0xFD29, 'M', u'شر'), + (0xFD2A, 'M', u'سر'), + (0xFD2B, 'M', u'صر'), + (0xFD2C, 'M', u'ضر'), + (0xFD2D, 'M', u'شج'), + (0xFD2E, 'M', u'شح'), + (0xFD2F, 'M', u'شخ'), + (0xFD30, 'M', u'شم'), + (0xFD31, 'M', u'سه'), + (0xFD32, 'M', u'شه'), + (0xFD33, 'M', u'طم'), + (0xFD34, 'M', u'سج'), + (0xFD35, 'M', u'سح'), + (0xFD36, 'M', u'سخ'), + (0xFD37, 'M', u'شج'), + (0xFD38, 'M', u'شح'), + (0xFD39, 'M', u'شخ'), + (0xFD3A, 'M', u'طم'), + (0xFD3B, 'M', u'ظم'), + (0xFD3C, 'M', u'اً'), + (0xFD3E, 'V'), + (0xFD40, 'X'), + (0xFD50, 'M', u'تجم'), + (0xFD51, 'M', u'تحج'), + (0xFD53, 'M', u'تحم'), + (0xFD54, 'M', u'تخم'), + (0xFD55, 'M', u'تمج'), + (0xFD56, 'M', u'تمح'), + (0xFD57, 'M', u'تمخ'), + (0xFD58, 'M', u'جمح'), + (0xFD5A, 'M', u'حمي'), + ] + +def _seg_48(): + return [ + (0xFD5B, 'M', u'حمى'), + (0xFD5C, 'M', u'سحج'), + (0xFD5D, 'M', u'سجح'), + (0xFD5E, 'M', u'سجى'), + (0xFD5F, 'M', u'سمح'), + (0xFD61, 'M', u'سمج'), + (0xFD62, 'M', u'سمم'), + (0xFD64, 'M', u'صحح'), + (0xFD66, 'M', u'صمم'), + (0xFD67, 'M', u'شحم'), + (0xFD69, 'M', u'شجي'), + (0xFD6A, 'M', u'شمخ'), + (0xFD6C, 'M', u'شمم'), + (0xFD6E, 'M', u'ضحى'), + (0xFD6F, 'M', u'ضخم'), + (0xFD71, 'M', u'طمح'), + (0xFD73, 'M', u'طمم'), + (0xFD74, 'M', u'طمي'), + (0xFD75, 'M', u'عجم'), + (0xFD76, 'M', u'عمم'), + (0xFD78, 'M', u'عمى'), + (0xFD79, 'M', u'غمم'), + (0xFD7A, 'M', u'غمي'), + (0xFD7B, 'M', u'غمى'), + (0xFD7C, 'M', u'فخم'), + (0xFD7E, 'M', u'قمح'), + (0xFD7F, 'M', u'قمم'), + (0xFD80, 'M', u'لحم'), + (0xFD81, 'M', u'لحي'), + (0xFD82, 'M', u'لحى'), + (0xFD83, 'M', u'لجج'), + (0xFD85, 'M', u'لخم'), + (0xFD87, 'M', u'لمح'), + (0xFD89, 'M', u'محج'), + (0xFD8A, 'M', u'محم'), + (0xFD8B, 'M', u'محي'), + (0xFD8C, 'M', u'مجح'), + (0xFD8D, 'M', u'مجم'), + (0xFD8E, 'M', u'مخج'), + (0xFD8F, 'M', u'مخم'), + (0xFD90, 'X'), + (0xFD92, 'M', u'مجخ'), + (0xFD93, 'M', u'همج'), + (0xFD94, 'M', u'همم'), + (0xFD95, 'M', u'نحم'), + (0xFD96, 'M', u'نحى'), + (0xFD97, 'M', u'نجم'), + (0xFD99, 'M', u'نجى'), + (0xFD9A, 'M', u'نمي'), + (0xFD9B, 'M', u'نمى'), + (0xFD9C, 'M', u'يمم'), + (0xFD9E, 'M', u'بخي'), + (0xFD9F, 'M', u'تجي'), + (0xFDA0, 'M', u'تجى'), + (0xFDA1, 'M', u'تخي'), + (0xFDA2, 'M', u'تخى'), + (0xFDA3, 'M', u'تمي'), + (0xFDA4, 'M', u'تمى'), + (0xFDA5, 'M', u'جمي'), + (0xFDA6, 'M', u'جحى'), + (0xFDA7, 'M', u'جمى'), + (0xFDA8, 'M', u'سخى'), + (0xFDA9, 'M', u'صحي'), + (0xFDAA, 'M', u'شحي'), + (0xFDAB, 'M', u'ضحي'), + (0xFDAC, 'M', u'لجي'), + (0xFDAD, 'M', u'لمي'), + (0xFDAE, 'M', u'يحي'), + (0xFDAF, 'M', u'يجي'), + (0xFDB0, 'M', u'يمي'), + (0xFDB1, 'M', u'ممي'), + (0xFDB2, 'M', u'قمي'), + (0xFDB3, 'M', u'نحي'), + (0xFDB4, 'M', u'قمح'), + (0xFDB5, 'M', u'لحم'), + (0xFDB6, 'M', u'عمي'), + (0xFDB7, 'M', u'كمي'), + (0xFDB8, 'M', u'نجح'), + (0xFDB9, 'M', u'مخي'), + (0xFDBA, 'M', u'لجم'), + (0xFDBB, 'M', u'كمم'), + (0xFDBC, 'M', u'لجم'), + (0xFDBD, 'M', u'نجح'), + (0xFDBE, 'M', u'جحي'), + (0xFDBF, 'M', u'حجي'), + (0xFDC0, 'M', u'مجي'), + (0xFDC1, 'M', u'فمي'), + (0xFDC2, 'M', u'بحي'), + (0xFDC3, 'M', u'كمم'), + (0xFDC4, 'M', u'عجم'), + (0xFDC5, 'M', u'صمم'), + (0xFDC6, 'M', u'سخي'), + (0xFDC7, 'M', u'نجي'), + (0xFDC8, 'X'), + (0xFDF0, 'M', u'صلے'), + (0xFDF1, 'M', u'قلے'), + (0xFDF2, 'M', u'الله'), + (0xFDF3, 'M', u'اكبر'), + (0xFDF4, 'M', u'محمد'), + (0xFDF5, 'M', u'صلعم'), + ] + +def _seg_49(): + return [ + (0xFDF6, 'M', u'رسول'), + (0xFDF7, 'M', u'عليه'), + (0xFDF8, 'M', u'وسلم'), + (0xFDF9, 'M', u'صلى'), + (0xFDFA, '3', u'صلى الله عليه وسلم'), + (0xFDFB, '3', u'جل جلاله'), + (0xFDFC, 'M', u'ریال'), + (0xFDFD, 'V'), + (0xFDFE, 'X'), + (0xFE00, 'I'), + (0xFE10, '3', u','), + (0xFE11, 'M', u'、'), + (0xFE12, 'X'), + (0xFE13, '3', u':'), + (0xFE14, '3', u';'), + (0xFE15, '3', u'!'), + (0xFE16, '3', u'?'), + (0xFE17, 'M', u'〖'), + (0xFE18, 'M', u'〗'), + (0xFE19, 'X'), + (0xFE20, 'V'), + (0xFE30, 'X'), + (0xFE31, 'M', u'—'), + (0xFE32, 'M', u'–'), + (0xFE33, '3', u'_'), + (0xFE35, '3', u'('), + (0xFE36, '3', u')'), + (0xFE37, '3', u'{'), + (0xFE38, '3', u'}'), + (0xFE39, 'M', u'〔'), + (0xFE3A, 'M', u'〕'), + (0xFE3B, 'M', u'【'), + (0xFE3C, 'M', u'】'), + (0xFE3D, 'M', u'《'), + (0xFE3E, 'M', u'》'), + (0xFE3F, 'M', u'〈'), + (0xFE40, 'M', u'〉'), + (0xFE41, 'M', u'「'), + (0xFE42, 'M', u'」'), + (0xFE43, 'M', u'『'), + (0xFE44, 'M', u'』'), + (0xFE45, 'V'), + (0xFE47, '3', u'['), + (0xFE48, '3', u']'), + (0xFE49, '3', u' ̅'), + (0xFE4D, '3', u'_'), + (0xFE50, '3', u','), + (0xFE51, 'M', u'、'), + (0xFE52, 'X'), + (0xFE54, '3', u';'), + (0xFE55, '3', u':'), + (0xFE56, '3', u'?'), + (0xFE57, '3', u'!'), + (0xFE58, 'M', u'—'), + (0xFE59, '3', u'('), + (0xFE5A, '3', u')'), + (0xFE5B, '3', u'{'), + (0xFE5C, '3', u'}'), + (0xFE5D, 'M', u'〔'), + (0xFE5E, 'M', u'〕'), + (0xFE5F, '3', u'#'), + (0xFE60, '3', u'&'), + (0xFE61, '3', u'*'), + (0xFE62, '3', u'+'), + (0xFE63, 'M', u'-'), + (0xFE64, '3', u'<'), + (0xFE65, '3', u'>'), + (0xFE66, '3', u'='), + (0xFE67, 'X'), + (0xFE68, '3', u'\\'), + (0xFE69, '3', u'$'), + (0xFE6A, '3', u'%'), + (0xFE6B, '3', u'@'), + (0xFE6C, 'X'), + (0xFE70, '3', u' ً'), + (0xFE71, 'M', u'ـً'), + (0xFE72, '3', u' ٌ'), + (0xFE73, 'V'), + (0xFE74, '3', u' ٍ'), + (0xFE75, 'X'), + (0xFE76, '3', u' َ'), + (0xFE77, 'M', u'ـَ'), + (0xFE78, '3', u' ُ'), + (0xFE79, 'M', u'ـُ'), + (0xFE7A, '3', u' ِ'), + (0xFE7B, 'M', u'ـِ'), + (0xFE7C, '3', u' ّ'), + (0xFE7D, 'M', u'ـّ'), + (0xFE7E, '3', u' ْ'), + (0xFE7F, 'M', u'ـْ'), + (0xFE80, 'M', u'ء'), + (0xFE81, 'M', u'آ'), + (0xFE83, 'M', u'أ'), + (0xFE85, 'M', u'ؤ'), + (0xFE87, 'M', u'إ'), + (0xFE89, 'M', u'ئ'), + (0xFE8D, 'M', u'ا'), + (0xFE8F, 'M', u'ب'), + (0xFE93, 'M', u'ة'), + (0xFE95, 'M', u'ت'), + ] + +def _seg_50(): + return [ + (0xFE99, 'M', u'ث'), + (0xFE9D, 'M', u'ج'), + (0xFEA1, 'M', u'ح'), + (0xFEA5, 'M', u'خ'), + (0xFEA9, 'M', u'د'), + (0xFEAB, 'M', u'ذ'), + (0xFEAD, 'M', u'ر'), + (0xFEAF, 'M', u'ز'), + (0xFEB1, 'M', u'س'), + (0xFEB5, 'M', u'ش'), + (0xFEB9, 'M', u'ص'), + (0xFEBD, 'M', u'ض'), + (0xFEC1, 'M', u'ط'), + (0xFEC5, 'M', u'ظ'), + (0xFEC9, 'M', u'ع'), + (0xFECD, 'M', u'غ'), + (0xFED1, 'M', u'ف'), + (0xFED5, 'M', u'ق'), + (0xFED9, 'M', u'ك'), + (0xFEDD, 'M', u'ل'), + (0xFEE1, 'M', u'م'), + (0xFEE5, 'M', u'ن'), + (0xFEE9, 'M', u'ه'), + (0xFEED, 'M', u'و'), + (0xFEEF, 'M', u'ى'), + (0xFEF1, 'M', u'ي'), + (0xFEF5, 'M', u'لآ'), + (0xFEF7, 'M', u'لأ'), + (0xFEF9, 'M', u'لإ'), + (0xFEFB, 'M', u'لا'), + (0xFEFD, 'X'), + (0xFEFF, 'I'), + (0xFF00, 'X'), + (0xFF01, '3', u'!'), + (0xFF02, '3', u'"'), + (0xFF03, '3', u'#'), + (0xFF04, '3', u'$'), + (0xFF05, '3', u'%'), + (0xFF06, '3', u'&'), + (0xFF07, '3', u'\''), + (0xFF08, '3', u'('), + (0xFF09, '3', u')'), + (0xFF0A, '3', u'*'), + (0xFF0B, '3', u'+'), + (0xFF0C, '3', u','), + (0xFF0D, 'M', u'-'), + (0xFF0E, 'M', u'.'), + (0xFF0F, '3', u'/'), + (0xFF10, 'M', u'0'), + (0xFF11, 'M', u'1'), + (0xFF12, 'M', u'2'), + (0xFF13, 'M', u'3'), + (0xFF14, 'M', u'4'), + (0xFF15, 'M', u'5'), + (0xFF16, 'M', u'6'), + (0xFF17, 'M', u'7'), + (0xFF18, 'M', u'8'), + (0xFF19, 'M', u'9'), + (0xFF1A, '3', u':'), + (0xFF1B, '3', u';'), + (0xFF1C, '3', u'<'), + (0xFF1D, '3', u'='), + (0xFF1E, '3', u'>'), + (0xFF1F, '3', u'?'), + (0xFF20, '3', u'@'), + (0xFF21, 'M', u'a'), + (0xFF22, 'M', u'b'), + (0xFF23, 'M', u'c'), + (0xFF24, 'M', u'd'), + (0xFF25, 'M', u'e'), + (0xFF26, 'M', u'f'), + (0xFF27, 'M', u'g'), + (0xFF28, 'M', u'h'), + (0xFF29, 'M', u'i'), + (0xFF2A, 'M', u'j'), + (0xFF2B, 'M', u'k'), + (0xFF2C, 'M', u'l'), + (0xFF2D, 'M', u'm'), + (0xFF2E, 'M', u'n'), + (0xFF2F, 'M', u'o'), + (0xFF30, 'M', u'p'), + (0xFF31, 'M', u'q'), + (0xFF32, 'M', u'r'), + (0xFF33, 'M', u's'), + (0xFF34, 'M', u't'), + (0xFF35, 'M', u'u'), + (0xFF36, 'M', u'v'), + (0xFF37, 'M', u'w'), + (0xFF38, 'M', u'x'), + (0xFF39, 'M', u'y'), + (0xFF3A, 'M', u'z'), + (0xFF3B, '3', u'['), + (0xFF3C, '3', u'\\'), + (0xFF3D, '3', u']'), + (0xFF3E, '3', u'^'), + (0xFF3F, '3', u'_'), + (0xFF40, '3', u'`'), + (0xFF41, 'M', u'a'), + (0xFF42, 'M', u'b'), + (0xFF43, 'M', u'c'), + ] + +def _seg_51(): + return [ + (0xFF44, 'M', u'd'), + (0xFF45, 'M', u'e'), + (0xFF46, 'M', u'f'), + (0xFF47, 'M', u'g'), + (0xFF48, 'M', u'h'), + (0xFF49, 'M', u'i'), + (0xFF4A, 'M', u'j'), + (0xFF4B, 'M', u'k'), + (0xFF4C, 'M', u'l'), + (0xFF4D, 'M', u'm'), + (0xFF4E, 'M', u'n'), + (0xFF4F, 'M', u'o'), + (0xFF50, 'M', u'p'), + (0xFF51, 'M', u'q'), + (0xFF52, 'M', u'r'), + (0xFF53, 'M', u's'), + (0xFF54, 'M', u't'), + (0xFF55, 'M', u'u'), + (0xFF56, 'M', u'v'), + (0xFF57, 'M', u'w'), + (0xFF58, 'M', u'x'), + (0xFF59, 'M', u'y'), + (0xFF5A, 'M', u'z'), + (0xFF5B, '3', u'{'), + (0xFF5C, '3', u'|'), + (0xFF5D, '3', u'}'), + (0xFF5E, '3', u'~'), + (0xFF5F, 'M', u'⦅'), + (0xFF60, 'M', u'⦆'), + (0xFF61, 'M', u'.'), + (0xFF62, 'M', u'「'), + (0xFF63, 'M', u'」'), + (0xFF64, 'M', u'、'), + (0xFF65, 'M', u'・'), + (0xFF66, 'M', u'ヲ'), + (0xFF67, 'M', u'ァ'), + (0xFF68, 'M', u'ィ'), + (0xFF69, 'M', u'ゥ'), + (0xFF6A, 'M', u'ェ'), + (0xFF6B, 'M', u'ォ'), + (0xFF6C, 'M', u'ャ'), + (0xFF6D, 'M', u'ュ'), + (0xFF6E, 'M', u'ョ'), + (0xFF6F, 'M', u'ッ'), + (0xFF70, 'M', u'ー'), + (0xFF71, 'M', u'ア'), + (0xFF72, 'M', u'イ'), + (0xFF73, 'M', u'ウ'), + (0xFF74, 'M', u'エ'), + (0xFF75, 'M', u'オ'), + (0xFF76, 'M', u'カ'), + (0xFF77, 'M', u'キ'), + (0xFF78, 'M', u'ク'), + (0xFF79, 'M', u'ケ'), + (0xFF7A, 'M', u'コ'), + (0xFF7B, 'M', u'サ'), + (0xFF7C, 'M', u'シ'), + (0xFF7D, 'M', u'ス'), + (0xFF7E, 'M', u'セ'), + (0xFF7F, 'M', u'ソ'), + (0xFF80, 'M', u'タ'), + (0xFF81, 'M', u'チ'), + (0xFF82, 'M', u'ツ'), + (0xFF83, 'M', u'テ'), + (0xFF84, 'M', u'ト'), + (0xFF85, 'M', u'ナ'), + (0xFF86, 'M', u'ニ'), + (0xFF87, 'M', u'ヌ'), + (0xFF88, 'M', u'ネ'), + (0xFF89, 'M', u'ノ'), + (0xFF8A, 'M', u'ハ'), + (0xFF8B, 'M', u'ヒ'), + (0xFF8C, 'M', u'フ'), + (0xFF8D, 'M', u'ヘ'), + (0xFF8E, 'M', u'ホ'), + (0xFF8F, 'M', u'マ'), + (0xFF90, 'M', u'ミ'), + (0xFF91, 'M', u'ム'), + (0xFF92, 'M', u'メ'), + (0xFF93, 'M', u'モ'), + (0xFF94, 'M', u'ヤ'), + (0xFF95, 'M', u'ユ'), + (0xFF96, 'M', u'ヨ'), + (0xFF97, 'M', u'ラ'), + (0xFF98, 'M', u'リ'), + (0xFF99, 'M', u'ル'), + (0xFF9A, 'M', u'レ'), + (0xFF9B, 'M', u'ロ'), + (0xFF9C, 'M', u'ワ'), + (0xFF9D, 'M', u'ン'), + (0xFF9E, 'M', u'゙'), + (0xFF9F, 'M', u'゚'), + (0xFFA0, 'X'), + (0xFFA1, 'M', u'ᄀ'), + (0xFFA2, 'M', u'ᄁ'), + (0xFFA3, 'M', u'ᆪ'), + (0xFFA4, 'M', u'ᄂ'), + (0xFFA5, 'M', u'ᆬ'), + (0xFFA6, 'M', u'ᆭ'), + (0xFFA7, 'M', u'ᄃ'), + ] + +def _seg_52(): + return [ + (0xFFA8, 'M', u'ᄄ'), + (0xFFA9, 'M', u'ᄅ'), + (0xFFAA, 'M', u'ᆰ'), + (0xFFAB, 'M', u'ᆱ'), + (0xFFAC, 'M', u'ᆲ'), + (0xFFAD, 'M', u'ᆳ'), + (0xFFAE, 'M', u'ᆴ'), + (0xFFAF, 'M', u'ᆵ'), + (0xFFB0, 'M', u'ᄚ'), + (0xFFB1, 'M', u'ᄆ'), + (0xFFB2, 'M', u'ᄇ'), + (0xFFB3, 'M', u'ᄈ'), + (0xFFB4, 'M', u'ᄡ'), + (0xFFB5, 'M', u'ᄉ'), + (0xFFB6, 'M', u'ᄊ'), + (0xFFB7, 'M', u'ᄋ'), + (0xFFB8, 'M', u'ᄌ'), + (0xFFB9, 'M', u'ᄍ'), + (0xFFBA, 'M', u'ᄎ'), + (0xFFBB, 'M', u'ᄏ'), + (0xFFBC, 'M', u'ᄐ'), + (0xFFBD, 'M', u'ᄑ'), + (0xFFBE, 'M', u'ᄒ'), + (0xFFBF, 'X'), + (0xFFC2, 'M', u'ᅡ'), + (0xFFC3, 'M', u'ᅢ'), + (0xFFC4, 'M', u'ᅣ'), + (0xFFC5, 'M', u'ᅤ'), + (0xFFC6, 'M', u'ᅥ'), + (0xFFC7, 'M', u'ᅦ'), + (0xFFC8, 'X'), + (0xFFCA, 'M', u'ᅧ'), + (0xFFCB, 'M', u'ᅨ'), + (0xFFCC, 'M', u'ᅩ'), + (0xFFCD, 'M', u'ᅪ'), + (0xFFCE, 'M', u'ᅫ'), + (0xFFCF, 'M', u'ᅬ'), + (0xFFD0, 'X'), + (0xFFD2, 'M', u'ᅭ'), + (0xFFD3, 'M', u'ᅮ'), + (0xFFD4, 'M', u'ᅯ'), + (0xFFD5, 'M', u'ᅰ'), + (0xFFD6, 'M', u'ᅱ'), + (0xFFD7, 'M', u'ᅲ'), + (0xFFD8, 'X'), + (0xFFDA, 'M', u'ᅳ'), + (0xFFDB, 'M', u'ᅴ'), + (0xFFDC, 'M', u'ᅵ'), + (0xFFDD, 'X'), + (0xFFE0, 'M', u'¢'), + (0xFFE1, 'M', u'£'), + (0xFFE2, 'M', u'¬'), + (0xFFE3, '3', u' ̄'), + (0xFFE4, 'M', u'¦'), + (0xFFE5, 'M', u'¥'), + (0xFFE6, 'M', u'₩'), + (0xFFE7, 'X'), + (0xFFE8, 'M', u'│'), + (0xFFE9, 'M', u'←'), + (0xFFEA, 'M', u'↑'), + (0xFFEB, 'M', u'→'), + (0xFFEC, 'M', u'↓'), + (0xFFED, 'M', u'■'), + (0xFFEE, 'M', u'○'), + (0xFFEF, 'X'), + (0x10000, 'V'), + (0x1000C, 'X'), + (0x1000D, 'V'), + (0x10027, 'X'), + (0x10028, 'V'), + (0x1003B, 'X'), + (0x1003C, 'V'), + (0x1003E, 'X'), + (0x1003F, 'V'), + (0x1004E, 'X'), + (0x10050, 'V'), + (0x1005E, 'X'), + (0x10080, 'V'), + (0x100FB, 'X'), + (0x10100, 'V'), + (0x10103, 'X'), + (0x10107, 'V'), + (0x10134, 'X'), + (0x10137, 'V'), + (0x1018F, 'X'), + (0x10190, 'V'), + (0x1019C, 'X'), + (0x101A0, 'V'), + (0x101A1, 'X'), + (0x101D0, 'V'), + (0x101FE, 'X'), + (0x10280, 'V'), + (0x1029D, 'X'), + (0x102A0, 'V'), + (0x102D1, 'X'), + (0x102E0, 'V'), + (0x102FC, 'X'), + (0x10300, 'V'), + (0x10324, 'X'), + (0x1032D, 'V'), + ] + +def _seg_53(): + return [ + (0x1034B, 'X'), + (0x10350, 'V'), + (0x1037B, 'X'), + (0x10380, 'V'), + (0x1039E, 'X'), + (0x1039F, 'V'), + (0x103C4, 'X'), + (0x103C8, 'V'), + (0x103D6, 'X'), + (0x10400, 'M', u'𐐨'), + (0x10401, 'M', u'𐐩'), + (0x10402, 'M', u'𐐪'), + (0x10403, 'M', u'𐐫'), + (0x10404, 'M', u'𐐬'), + (0x10405, 'M', u'𐐭'), + (0x10406, 'M', u'𐐮'), + (0x10407, 'M', u'𐐯'), + (0x10408, 'M', u'𐐰'), + (0x10409, 'M', u'𐐱'), + (0x1040A, 'M', u'𐐲'), + (0x1040B, 'M', u'𐐳'), + (0x1040C, 'M', u'𐐴'), + (0x1040D, 'M', u'𐐵'), + (0x1040E, 'M', u'𐐶'), + (0x1040F, 'M', u'𐐷'), + (0x10410, 'M', u'𐐸'), + (0x10411, 'M', u'𐐹'), + (0x10412, 'M', u'𐐺'), + (0x10413, 'M', u'𐐻'), + (0x10414, 'M', u'𐐼'), + (0x10415, 'M', u'𐐽'), + (0x10416, 'M', u'𐐾'), + (0x10417, 'M', u'𐐿'), + (0x10418, 'M', u'𐑀'), + (0x10419, 'M', u'𐑁'), + (0x1041A, 'M', u'𐑂'), + (0x1041B, 'M', u'𐑃'), + (0x1041C, 'M', u'𐑄'), + (0x1041D, 'M', u'𐑅'), + (0x1041E, 'M', u'𐑆'), + (0x1041F, 'M', u'𐑇'), + (0x10420, 'M', u'𐑈'), + (0x10421, 'M', u'𐑉'), + (0x10422, 'M', u'𐑊'), + (0x10423, 'M', u'𐑋'), + (0x10424, 'M', u'𐑌'), + (0x10425, 'M', u'𐑍'), + (0x10426, 'M', u'𐑎'), + (0x10427, 'M', u'𐑏'), + (0x10428, 'V'), + (0x1049E, 'X'), + (0x104A0, 'V'), + (0x104AA, 'X'), + (0x104B0, 'M', u'𐓘'), + (0x104B1, 'M', u'𐓙'), + (0x104B2, 'M', u'𐓚'), + (0x104B3, 'M', u'𐓛'), + (0x104B4, 'M', u'𐓜'), + (0x104B5, 'M', u'𐓝'), + (0x104B6, 'M', u'𐓞'), + (0x104B7, 'M', u'𐓟'), + (0x104B8, 'M', u'𐓠'), + (0x104B9, 'M', u'𐓡'), + (0x104BA, 'M', u'𐓢'), + (0x104BB, 'M', u'𐓣'), + (0x104BC, 'M', u'𐓤'), + (0x104BD, 'M', u'𐓥'), + (0x104BE, 'M', u'𐓦'), + (0x104BF, 'M', u'𐓧'), + (0x104C0, 'M', u'𐓨'), + (0x104C1, 'M', u'𐓩'), + (0x104C2, 'M', u'𐓪'), + (0x104C3, 'M', u'𐓫'), + (0x104C4, 'M', u'𐓬'), + (0x104C5, 'M', u'𐓭'), + (0x104C6, 'M', u'𐓮'), + (0x104C7, 'M', u'𐓯'), + (0x104C8, 'M', u'𐓰'), + (0x104C9, 'M', u'𐓱'), + (0x104CA, 'M', u'𐓲'), + (0x104CB, 'M', u'𐓳'), + (0x104CC, 'M', u'𐓴'), + (0x104CD, 'M', u'𐓵'), + (0x104CE, 'M', u'𐓶'), + (0x104CF, 'M', u'𐓷'), + (0x104D0, 'M', u'𐓸'), + (0x104D1, 'M', u'𐓹'), + (0x104D2, 'M', u'𐓺'), + (0x104D3, 'M', u'𐓻'), + (0x104D4, 'X'), + (0x104D8, 'V'), + (0x104FC, 'X'), + (0x10500, 'V'), + (0x10528, 'X'), + (0x10530, 'V'), + (0x10564, 'X'), + (0x1056F, 'V'), + (0x10570, 'X'), + (0x10600, 'V'), + (0x10737, 'X'), + ] + +def _seg_54(): + return [ + (0x10740, 'V'), + (0x10756, 'X'), + (0x10760, 'V'), + (0x10768, 'X'), + (0x10800, 'V'), + (0x10806, 'X'), + (0x10808, 'V'), + (0x10809, 'X'), + (0x1080A, 'V'), + (0x10836, 'X'), + (0x10837, 'V'), + (0x10839, 'X'), + (0x1083C, 'V'), + (0x1083D, 'X'), + (0x1083F, 'V'), + (0x10856, 'X'), + (0x10857, 'V'), + (0x1089F, 'X'), + (0x108A7, 'V'), + (0x108B0, 'X'), + (0x108E0, 'V'), + (0x108F3, 'X'), + (0x108F4, 'V'), + (0x108F6, 'X'), + (0x108FB, 'V'), + (0x1091C, 'X'), + (0x1091F, 'V'), + (0x1093A, 'X'), + (0x1093F, 'V'), + (0x10940, 'X'), + (0x10980, 'V'), + (0x109B8, 'X'), + (0x109BC, 'V'), + (0x109D0, 'X'), + (0x109D2, 'V'), + (0x10A04, 'X'), + (0x10A05, 'V'), + (0x10A07, 'X'), + (0x10A0C, 'V'), + (0x10A14, 'X'), + (0x10A15, 'V'), + (0x10A18, 'X'), + (0x10A19, 'V'), + (0x10A34, 'X'), + (0x10A38, 'V'), + (0x10A3B, 'X'), + (0x10A3F, 'V'), + (0x10A48, 'X'), + (0x10A50, 'V'), + (0x10A59, 'X'), + (0x10A60, 'V'), + (0x10AA0, 'X'), + (0x10AC0, 'V'), + (0x10AE7, 'X'), + (0x10AEB, 'V'), + (0x10AF7, 'X'), + (0x10B00, 'V'), + (0x10B36, 'X'), + (0x10B39, 'V'), + (0x10B56, 'X'), + (0x10B58, 'V'), + (0x10B73, 'X'), + (0x10B78, 'V'), + (0x10B92, 'X'), + (0x10B99, 'V'), + (0x10B9D, 'X'), + (0x10BA9, 'V'), + (0x10BB0, 'X'), + (0x10C00, 'V'), + (0x10C49, 'X'), + (0x10C80, 'M', u'𐳀'), + (0x10C81, 'M', u'𐳁'), + (0x10C82, 'M', u'𐳂'), + (0x10C83, 'M', u'𐳃'), + (0x10C84, 'M', u'𐳄'), + (0x10C85, 'M', u'𐳅'), + (0x10C86, 'M', u'𐳆'), + (0x10C87, 'M', u'𐳇'), + (0x10C88, 'M', u'𐳈'), + (0x10C89, 'M', u'𐳉'), + (0x10C8A, 'M', u'𐳊'), + (0x10C8B, 'M', u'𐳋'), + (0x10C8C, 'M', u'𐳌'), + (0x10C8D, 'M', u'𐳍'), + (0x10C8E, 'M', u'𐳎'), + (0x10C8F, 'M', u'𐳏'), + (0x10C90, 'M', u'𐳐'), + (0x10C91, 'M', u'𐳑'), + (0x10C92, 'M', u'𐳒'), + (0x10C93, 'M', u'𐳓'), + (0x10C94, 'M', u'𐳔'), + (0x10C95, 'M', u'𐳕'), + (0x10C96, 'M', u'𐳖'), + (0x10C97, 'M', u'𐳗'), + (0x10C98, 'M', u'𐳘'), + (0x10C99, 'M', u'𐳙'), + (0x10C9A, 'M', u'𐳚'), + (0x10C9B, 'M', u'𐳛'), + (0x10C9C, 'M', u'𐳜'), + (0x10C9D, 'M', u'𐳝'), + ] + +def _seg_55(): + return [ + (0x10C9E, 'M', u'𐳞'), + (0x10C9F, 'M', u'𐳟'), + (0x10CA0, 'M', u'𐳠'), + (0x10CA1, 'M', u'𐳡'), + (0x10CA2, 'M', u'𐳢'), + (0x10CA3, 'M', u'𐳣'), + (0x10CA4, 'M', u'𐳤'), + (0x10CA5, 'M', u'𐳥'), + (0x10CA6, 'M', u'𐳦'), + (0x10CA7, 'M', u'𐳧'), + (0x10CA8, 'M', u'𐳨'), + (0x10CA9, 'M', u'𐳩'), + (0x10CAA, 'M', u'𐳪'), + (0x10CAB, 'M', u'𐳫'), + (0x10CAC, 'M', u'𐳬'), + (0x10CAD, 'M', u'𐳭'), + (0x10CAE, 'M', u'𐳮'), + (0x10CAF, 'M', u'𐳯'), + (0x10CB0, 'M', u'𐳰'), + (0x10CB1, 'M', u'𐳱'), + (0x10CB2, 'M', u'𐳲'), + (0x10CB3, 'X'), + (0x10CC0, 'V'), + (0x10CF3, 'X'), + (0x10CFA, 'V'), + (0x10D00, 'X'), + (0x10E60, 'V'), + (0x10E7F, 'X'), + (0x11000, 'V'), + (0x1104E, 'X'), + (0x11052, 'V'), + (0x11070, 'X'), + (0x1107F, 'V'), + (0x110BD, 'X'), + (0x110BE, 'V'), + (0x110C2, 'X'), + (0x110D0, 'V'), + (0x110E9, 'X'), + (0x110F0, 'V'), + (0x110FA, 'X'), + (0x11100, 'V'), + (0x11135, 'X'), + (0x11136, 'V'), + (0x11144, 'X'), + (0x11150, 'V'), + (0x11177, 'X'), + (0x11180, 'V'), + (0x111CE, 'X'), + (0x111D0, 'V'), + (0x111E0, 'X'), + (0x111E1, 'V'), + (0x111F5, 'X'), + (0x11200, 'V'), + (0x11212, 'X'), + (0x11213, 'V'), + (0x1123F, 'X'), + (0x11280, 'V'), + (0x11287, 'X'), + (0x11288, 'V'), + (0x11289, 'X'), + (0x1128A, 'V'), + (0x1128E, 'X'), + (0x1128F, 'V'), + (0x1129E, 'X'), + (0x1129F, 'V'), + (0x112AA, 'X'), + (0x112B0, 'V'), + (0x112EB, 'X'), + (0x112F0, 'V'), + (0x112FA, 'X'), + (0x11300, 'V'), + (0x11304, 'X'), + (0x11305, 'V'), + (0x1130D, 'X'), + (0x1130F, 'V'), + (0x11311, 'X'), + (0x11313, 'V'), + (0x11329, 'X'), + (0x1132A, 'V'), + (0x11331, 'X'), + (0x11332, 'V'), + (0x11334, 'X'), + (0x11335, 'V'), + (0x1133A, 'X'), + (0x1133C, 'V'), + (0x11345, 'X'), + (0x11347, 'V'), + (0x11349, 'X'), + (0x1134B, 'V'), + (0x1134E, 'X'), + (0x11350, 'V'), + (0x11351, 'X'), + (0x11357, 'V'), + (0x11358, 'X'), + (0x1135D, 'V'), + (0x11364, 'X'), + (0x11366, 'V'), + (0x1136D, 'X'), + (0x11370, 'V'), + (0x11375, 'X'), + ] + +def _seg_56(): + return [ + (0x11400, 'V'), + (0x1145A, 'X'), + (0x1145B, 'V'), + (0x1145C, 'X'), + (0x1145D, 'V'), + (0x1145E, 'X'), + (0x11480, 'V'), + (0x114C8, 'X'), + (0x114D0, 'V'), + (0x114DA, 'X'), + (0x11580, 'V'), + (0x115B6, 'X'), + (0x115B8, 'V'), + (0x115DE, 'X'), + (0x11600, 'V'), + (0x11645, 'X'), + (0x11650, 'V'), + (0x1165A, 'X'), + (0x11660, 'V'), + (0x1166D, 'X'), + (0x11680, 'V'), + (0x116B8, 'X'), + (0x116C0, 'V'), + (0x116CA, 'X'), + (0x11700, 'V'), + (0x1171A, 'X'), + (0x1171D, 'V'), + (0x1172C, 'X'), + (0x11730, 'V'), + (0x11740, 'X'), + (0x118A0, 'M', u'𑣀'), + (0x118A1, 'M', u'𑣁'), + (0x118A2, 'M', u'𑣂'), + (0x118A3, 'M', u'𑣃'), + (0x118A4, 'M', u'𑣄'), + (0x118A5, 'M', u'𑣅'), + (0x118A6, 'M', u'𑣆'), + (0x118A7, 'M', u'𑣇'), + (0x118A8, 'M', u'𑣈'), + (0x118A9, 'M', u'𑣉'), + (0x118AA, 'M', u'𑣊'), + (0x118AB, 'M', u'𑣋'), + (0x118AC, 'M', u'𑣌'), + (0x118AD, 'M', u'𑣍'), + (0x118AE, 'M', u'𑣎'), + (0x118AF, 'M', u'𑣏'), + (0x118B0, 'M', u'𑣐'), + (0x118B1, 'M', u'𑣑'), + (0x118B2, 'M', u'𑣒'), + (0x118B3, 'M', u'𑣓'), + (0x118B4, 'M', u'𑣔'), + (0x118B5, 'M', u'𑣕'), + (0x118B6, 'M', u'𑣖'), + (0x118B7, 'M', u'𑣗'), + (0x118B8, 'M', u'𑣘'), + (0x118B9, 'M', u'𑣙'), + (0x118BA, 'M', u'𑣚'), + (0x118BB, 'M', u'𑣛'), + (0x118BC, 'M', u'𑣜'), + (0x118BD, 'M', u'𑣝'), + (0x118BE, 'M', u'𑣞'), + (0x118BF, 'M', u'𑣟'), + (0x118C0, 'V'), + (0x118F3, 'X'), + (0x118FF, 'V'), + (0x11900, 'X'), + (0x11A00, 'V'), + (0x11A48, 'X'), + (0x11A50, 'V'), + (0x11A84, 'X'), + (0x11A86, 'V'), + (0x11A9D, 'X'), + (0x11A9E, 'V'), + (0x11AA3, 'X'), + (0x11AC0, 'V'), + (0x11AF9, 'X'), + (0x11C00, 'V'), + (0x11C09, 'X'), + (0x11C0A, 'V'), + (0x11C37, 'X'), + (0x11C38, 'V'), + (0x11C46, 'X'), + (0x11C50, 'V'), + (0x11C6D, 'X'), + (0x11C70, 'V'), + (0x11C90, 'X'), + (0x11C92, 'V'), + (0x11CA8, 'X'), + (0x11CA9, 'V'), + (0x11CB7, 'X'), + (0x11D00, 'V'), + (0x11D07, 'X'), + (0x11D08, 'V'), + (0x11D0A, 'X'), + (0x11D0B, 'V'), + (0x11D37, 'X'), + (0x11D3A, 'V'), + (0x11D3B, 'X'), + (0x11D3C, 'V'), + (0x11D3E, 'X'), + ] + +def _seg_57(): + return [ + (0x11D3F, 'V'), + (0x11D48, 'X'), + (0x11D50, 'V'), + (0x11D5A, 'X'), + (0x12000, 'V'), + (0x1239A, 'X'), + (0x12400, 'V'), + (0x1246F, 'X'), + (0x12470, 'V'), + (0x12475, 'X'), + (0x12480, 'V'), + (0x12544, 'X'), + (0x13000, 'V'), + (0x1342F, 'X'), + (0x14400, 'V'), + (0x14647, 'X'), + (0x16800, 'V'), + (0x16A39, 'X'), + (0x16A40, 'V'), + (0x16A5F, 'X'), + (0x16A60, 'V'), + (0x16A6A, 'X'), + (0x16A6E, 'V'), + (0x16A70, 'X'), + (0x16AD0, 'V'), + (0x16AEE, 'X'), + (0x16AF0, 'V'), + (0x16AF6, 'X'), + (0x16B00, 'V'), + (0x16B46, 'X'), + (0x16B50, 'V'), + (0x16B5A, 'X'), + (0x16B5B, 'V'), + (0x16B62, 'X'), + (0x16B63, 'V'), + (0x16B78, 'X'), + (0x16B7D, 'V'), + (0x16B90, 'X'), + (0x16F00, 'V'), + (0x16F45, 'X'), + (0x16F50, 'V'), + (0x16F7F, 'X'), + (0x16F8F, 'V'), + (0x16FA0, 'X'), + (0x16FE0, 'V'), + (0x16FE2, 'X'), + (0x17000, 'V'), + (0x187ED, 'X'), + (0x18800, 'V'), + (0x18AF3, 'X'), + (0x1B000, 'V'), + (0x1B11F, 'X'), + (0x1B170, 'V'), + (0x1B2FC, 'X'), + (0x1BC00, 'V'), + (0x1BC6B, 'X'), + (0x1BC70, 'V'), + (0x1BC7D, 'X'), + (0x1BC80, 'V'), + (0x1BC89, 'X'), + (0x1BC90, 'V'), + (0x1BC9A, 'X'), + (0x1BC9C, 'V'), + (0x1BCA0, 'I'), + (0x1BCA4, 'X'), + (0x1D000, 'V'), + (0x1D0F6, 'X'), + (0x1D100, 'V'), + (0x1D127, 'X'), + (0x1D129, 'V'), + (0x1D15E, 'M', u'𝅗𝅥'), + (0x1D15F, 'M', u'𝅘𝅥'), + (0x1D160, 'M', u'𝅘𝅥𝅮'), + (0x1D161, 'M', u'𝅘𝅥𝅯'), + (0x1D162, 'M', u'𝅘𝅥𝅰'), + (0x1D163, 'M', u'𝅘𝅥𝅱'), + (0x1D164, 'M', u'𝅘𝅥𝅲'), + (0x1D165, 'V'), + (0x1D173, 'X'), + (0x1D17B, 'V'), + (0x1D1BB, 'M', u'𝆹𝅥'), + (0x1D1BC, 'M', u'𝆺𝅥'), + (0x1D1BD, 'M', u'𝆹𝅥𝅮'), + (0x1D1BE, 'M', u'𝆺𝅥𝅮'), + (0x1D1BF, 'M', u'𝆹𝅥𝅯'), + (0x1D1C0, 'M', u'𝆺𝅥𝅯'), + (0x1D1C1, 'V'), + (0x1D1E9, 'X'), + (0x1D200, 'V'), + (0x1D246, 'X'), + (0x1D300, 'V'), + (0x1D357, 'X'), + (0x1D360, 'V'), + (0x1D372, 'X'), + (0x1D400, 'M', u'a'), + (0x1D401, 'M', u'b'), + (0x1D402, 'M', u'c'), + (0x1D403, 'M', u'd'), + (0x1D404, 'M', u'e'), + (0x1D405, 'M', u'f'), + ] + +def _seg_58(): + return [ + (0x1D406, 'M', u'g'), + (0x1D407, 'M', u'h'), + (0x1D408, 'M', u'i'), + (0x1D409, 'M', u'j'), + (0x1D40A, 'M', u'k'), + (0x1D40B, 'M', u'l'), + (0x1D40C, 'M', u'm'), + (0x1D40D, 'M', u'n'), + (0x1D40E, 'M', u'o'), + (0x1D40F, 'M', u'p'), + (0x1D410, 'M', u'q'), + (0x1D411, 'M', u'r'), + (0x1D412, 'M', u's'), + (0x1D413, 'M', u't'), + (0x1D414, 'M', u'u'), + (0x1D415, 'M', u'v'), + (0x1D416, 'M', u'w'), + (0x1D417, 'M', u'x'), + (0x1D418, 'M', u'y'), + (0x1D419, 'M', u'z'), + (0x1D41A, 'M', u'a'), + (0x1D41B, 'M', u'b'), + (0x1D41C, 'M', u'c'), + (0x1D41D, 'M', u'd'), + (0x1D41E, 'M', u'e'), + (0x1D41F, 'M', u'f'), + (0x1D420, 'M', u'g'), + (0x1D421, 'M', u'h'), + (0x1D422, 'M', u'i'), + (0x1D423, 'M', u'j'), + (0x1D424, 'M', u'k'), + (0x1D425, 'M', u'l'), + (0x1D426, 'M', u'm'), + (0x1D427, 'M', u'n'), + (0x1D428, 'M', u'o'), + (0x1D429, 'M', u'p'), + (0x1D42A, 'M', u'q'), + (0x1D42B, 'M', u'r'), + (0x1D42C, 'M', u's'), + (0x1D42D, 'M', u't'), + (0x1D42E, 'M', u'u'), + (0x1D42F, 'M', u'v'), + (0x1D430, 'M', u'w'), + (0x1D431, 'M', u'x'), + (0x1D432, 'M', u'y'), + (0x1D433, 'M', u'z'), + (0x1D434, 'M', u'a'), + (0x1D435, 'M', u'b'), + (0x1D436, 'M', u'c'), + (0x1D437, 'M', u'd'), + (0x1D438, 'M', u'e'), + (0x1D439, 'M', u'f'), + (0x1D43A, 'M', u'g'), + (0x1D43B, 'M', u'h'), + (0x1D43C, 'M', u'i'), + (0x1D43D, 'M', u'j'), + (0x1D43E, 'M', u'k'), + (0x1D43F, 'M', u'l'), + (0x1D440, 'M', u'm'), + (0x1D441, 'M', u'n'), + (0x1D442, 'M', u'o'), + (0x1D443, 'M', u'p'), + (0x1D444, 'M', u'q'), + (0x1D445, 'M', u'r'), + (0x1D446, 'M', u's'), + (0x1D447, 'M', u't'), + (0x1D448, 'M', u'u'), + (0x1D449, 'M', u'v'), + (0x1D44A, 'M', u'w'), + (0x1D44B, 'M', u'x'), + (0x1D44C, 'M', u'y'), + (0x1D44D, 'M', u'z'), + (0x1D44E, 'M', u'a'), + (0x1D44F, 'M', u'b'), + (0x1D450, 'M', u'c'), + (0x1D451, 'M', u'd'), + (0x1D452, 'M', u'e'), + (0x1D453, 'M', u'f'), + (0x1D454, 'M', u'g'), + (0x1D455, 'X'), + (0x1D456, 'M', u'i'), + (0x1D457, 'M', u'j'), + (0x1D458, 'M', u'k'), + (0x1D459, 'M', u'l'), + (0x1D45A, 'M', u'm'), + (0x1D45B, 'M', u'n'), + (0x1D45C, 'M', u'o'), + (0x1D45D, 'M', u'p'), + (0x1D45E, 'M', u'q'), + (0x1D45F, 'M', u'r'), + (0x1D460, 'M', u's'), + (0x1D461, 'M', u't'), + (0x1D462, 'M', u'u'), + (0x1D463, 'M', u'v'), + (0x1D464, 'M', u'w'), + (0x1D465, 'M', u'x'), + (0x1D466, 'M', u'y'), + (0x1D467, 'M', u'z'), + (0x1D468, 'M', u'a'), + (0x1D469, 'M', u'b'), + ] + +def _seg_59(): + return [ + (0x1D46A, 'M', u'c'), + (0x1D46B, 'M', u'd'), + (0x1D46C, 'M', u'e'), + (0x1D46D, 'M', u'f'), + (0x1D46E, 'M', u'g'), + (0x1D46F, 'M', u'h'), + (0x1D470, 'M', u'i'), + (0x1D471, 'M', u'j'), + (0x1D472, 'M', u'k'), + (0x1D473, 'M', u'l'), + (0x1D474, 'M', u'm'), + (0x1D475, 'M', u'n'), + (0x1D476, 'M', u'o'), + (0x1D477, 'M', u'p'), + (0x1D478, 'M', u'q'), + (0x1D479, 'M', u'r'), + (0x1D47A, 'M', u's'), + (0x1D47B, 'M', u't'), + (0x1D47C, 'M', u'u'), + (0x1D47D, 'M', u'v'), + (0x1D47E, 'M', u'w'), + (0x1D47F, 'M', u'x'), + (0x1D480, 'M', u'y'), + (0x1D481, 'M', u'z'), + (0x1D482, 'M', u'a'), + (0x1D483, 'M', u'b'), + (0x1D484, 'M', u'c'), + (0x1D485, 'M', u'd'), + (0x1D486, 'M', u'e'), + (0x1D487, 'M', u'f'), + (0x1D488, 'M', u'g'), + (0x1D489, 'M', u'h'), + (0x1D48A, 'M', u'i'), + (0x1D48B, 'M', u'j'), + (0x1D48C, 'M', u'k'), + (0x1D48D, 'M', u'l'), + (0x1D48E, 'M', u'm'), + (0x1D48F, 'M', u'n'), + (0x1D490, 'M', u'o'), + (0x1D491, 'M', u'p'), + (0x1D492, 'M', u'q'), + (0x1D493, 'M', u'r'), + (0x1D494, 'M', u's'), + (0x1D495, 'M', u't'), + (0x1D496, 'M', u'u'), + (0x1D497, 'M', u'v'), + (0x1D498, 'M', u'w'), + (0x1D499, 'M', u'x'), + (0x1D49A, 'M', u'y'), + (0x1D49B, 'M', u'z'), + (0x1D49C, 'M', u'a'), + (0x1D49D, 'X'), + (0x1D49E, 'M', u'c'), + (0x1D49F, 'M', u'd'), + (0x1D4A0, 'X'), + (0x1D4A2, 'M', u'g'), + (0x1D4A3, 'X'), + (0x1D4A5, 'M', u'j'), + (0x1D4A6, 'M', u'k'), + (0x1D4A7, 'X'), + (0x1D4A9, 'M', u'n'), + (0x1D4AA, 'M', u'o'), + (0x1D4AB, 'M', u'p'), + (0x1D4AC, 'M', u'q'), + (0x1D4AD, 'X'), + (0x1D4AE, 'M', u's'), + (0x1D4AF, 'M', u't'), + (0x1D4B0, 'M', u'u'), + (0x1D4B1, 'M', u'v'), + (0x1D4B2, 'M', u'w'), + (0x1D4B3, 'M', u'x'), + (0x1D4B4, 'M', u'y'), + (0x1D4B5, 'M', u'z'), + (0x1D4B6, 'M', u'a'), + (0x1D4B7, 'M', u'b'), + (0x1D4B8, 'M', u'c'), + (0x1D4B9, 'M', u'd'), + (0x1D4BA, 'X'), + (0x1D4BB, 'M', u'f'), + (0x1D4BC, 'X'), + (0x1D4BD, 'M', u'h'), + (0x1D4BE, 'M', u'i'), + (0x1D4BF, 'M', u'j'), + (0x1D4C0, 'M', u'k'), + (0x1D4C1, 'M', u'l'), + (0x1D4C2, 'M', u'm'), + (0x1D4C3, 'M', u'n'), + (0x1D4C4, 'X'), + (0x1D4C5, 'M', u'p'), + (0x1D4C6, 'M', u'q'), + (0x1D4C7, 'M', u'r'), + (0x1D4C8, 'M', u's'), + (0x1D4C9, 'M', u't'), + (0x1D4CA, 'M', u'u'), + (0x1D4CB, 'M', u'v'), + (0x1D4CC, 'M', u'w'), + (0x1D4CD, 'M', u'x'), + (0x1D4CE, 'M', u'y'), + (0x1D4CF, 'M', u'z'), + (0x1D4D0, 'M', u'a'), + ] + +def _seg_60(): + return [ + (0x1D4D1, 'M', u'b'), + (0x1D4D2, 'M', u'c'), + (0x1D4D3, 'M', u'd'), + (0x1D4D4, 'M', u'e'), + (0x1D4D5, 'M', u'f'), + (0x1D4D6, 'M', u'g'), + (0x1D4D7, 'M', u'h'), + (0x1D4D8, 'M', u'i'), + (0x1D4D9, 'M', u'j'), + (0x1D4DA, 'M', u'k'), + (0x1D4DB, 'M', u'l'), + (0x1D4DC, 'M', u'm'), + (0x1D4DD, 'M', u'n'), + (0x1D4DE, 'M', u'o'), + (0x1D4DF, 'M', u'p'), + (0x1D4E0, 'M', u'q'), + (0x1D4E1, 'M', u'r'), + (0x1D4E2, 'M', u's'), + (0x1D4E3, 'M', u't'), + (0x1D4E4, 'M', u'u'), + (0x1D4E5, 'M', u'v'), + (0x1D4E6, 'M', u'w'), + (0x1D4E7, 'M', u'x'), + (0x1D4E8, 'M', u'y'), + (0x1D4E9, 'M', u'z'), + (0x1D4EA, 'M', u'a'), + (0x1D4EB, 'M', u'b'), + (0x1D4EC, 'M', u'c'), + (0x1D4ED, 'M', u'd'), + (0x1D4EE, 'M', u'e'), + (0x1D4EF, 'M', u'f'), + (0x1D4F0, 'M', u'g'), + (0x1D4F1, 'M', u'h'), + (0x1D4F2, 'M', u'i'), + (0x1D4F3, 'M', u'j'), + (0x1D4F4, 'M', u'k'), + (0x1D4F5, 'M', u'l'), + (0x1D4F6, 'M', u'm'), + (0x1D4F7, 'M', u'n'), + (0x1D4F8, 'M', u'o'), + (0x1D4F9, 'M', u'p'), + (0x1D4FA, 'M', u'q'), + (0x1D4FB, 'M', u'r'), + (0x1D4FC, 'M', u's'), + (0x1D4FD, 'M', u't'), + (0x1D4FE, 'M', u'u'), + (0x1D4FF, 'M', u'v'), + (0x1D500, 'M', u'w'), + (0x1D501, 'M', u'x'), + (0x1D502, 'M', u'y'), + (0x1D503, 'M', u'z'), + (0x1D504, 'M', u'a'), + (0x1D505, 'M', u'b'), + (0x1D506, 'X'), + (0x1D507, 'M', u'd'), + (0x1D508, 'M', u'e'), + (0x1D509, 'M', u'f'), + (0x1D50A, 'M', u'g'), + (0x1D50B, 'X'), + (0x1D50D, 'M', u'j'), + (0x1D50E, 'M', u'k'), + (0x1D50F, 'M', u'l'), + (0x1D510, 'M', u'm'), + (0x1D511, 'M', u'n'), + (0x1D512, 'M', u'o'), + (0x1D513, 'M', u'p'), + (0x1D514, 'M', u'q'), + (0x1D515, 'X'), + (0x1D516, 'M', u's'), + (0x1D517, 'M', u't'), + (0x1D518, 'M', u'u'), + (0x1D519, 'M', u'v'), + (0x1D51A, 'M', u'w'), + (0x1D51B, 'M', u'x'), + (0x1D51C, 'M', u'y'), + (0x1D51D, 'X'), + (0x1D51E, 'M', u'a'), + (0x1D51F, 'M', u'b'), + (0x1D520, 'M', u'c'), + (0x1D521, 'M', u'd'), + (0x1D522, 'M', u'e'), + (0x1D523, 'M', u'f'), + (0x1D524, 'M', u'g'), + (0x1D525, 'M', u'h'), + (0x1D526, 'M', u'i'), + (0x1D527, 'M', u'j'), + (0x1D528, 'M', u'k'), + (0x1D529, 'M', u'l'), + (0x1D52A, 'M', u'm'), + (0x1D52B, 'M', u'n'), + (0x1D52C, 'M', u'o'), + (0x1D52D, 'M', u'p'), + (0x1D52E, 'M', u'q'), + (0x1D52F, 'M', u'r'), + (0x1D530, 'M', u's'), + (0x1D531, 'M', u't'), + (0x1D532, 'M', u'u'), + (0x1D533, 'M', u'v'), + (0x1D534, 'M', u'w'), + (0x1D535, 'M', u'x'), + ] + +def _seg_61(): + return [ + (0x1D536, 'M', u'y'), + (0x1D537, 'M', u'z'), + (0x1D538, 'M', u'a'), + (0x1D539, 'M', u'b'), + (0x1D53A, 'X'), + (0x1D53B, 'M', u'd'), + (0x1D53C, 'M', u'e'), + (0x1D53D, 'M', u'f'), + (0x1D53E, 'M', u'g'), + (0x1D53F, 'X'), + (0x1D540, 'M', u'i'), + (0x1D541, 'M', u'j'), + (0x1D542, 'M', u'k'), + (0x1D543, 'M', u'l'), + (0x1D544, 'M', u'm'), + (0x1D545, 'X'), + (0x1D546, 'M', u'o'), + (0x1D547, 'X'), + (0x1D54A, 'M', u's'), + (0x1D54B, 'M', u't'), + (0x1D54C, 'M', u'u'), + (0x1D54D, 'M', u'v'), + (0x1D54E, 'M', u'w'), + (0x1D54F, 'M', u'x'), + (0x1D550, 'M', u'y'), + (0x1D551, 'X'), + (0x1D552, 'M', u'a'), + (0x1D553, 'M', u'b'), + (0x1D554, 'M', u'c'), + (0x1D555, 'M', u'd'), + (0x1D556, 'M', u'e'), + (0x1D557, 'M', u'f'), + (0x1D558, 'M', u'g'), + (0x1D559, 'M', u'h'), + (0x1D55A, 'M', u'i'), + (0x1D55B, 'M', u'j'), + (0x1D55C, 'M', u'k'), + (0x1D55D, 'M', u'l'), + (0x1D55E, 'M', u'm'), + (0x1D55F, 'M', u'n'), + (0x1D560, 'M', u'o'), + (0x1D561, 'M', u'p'), + (0x1D562, 'M', u'q'), + (0x1D563, 'M', u'r'), + (0x1D564, 'M', u's'), + (0x1D565, 'M', u't'), + (0x1D566, 'M', u'u'), + (0x1D567, 'M', u'v'), + (0x1D568, 'M', u'w'), + (0x1D569, 'M', u'x'), + (0x1D56A, 'M', u'y'), + (0x1D56B, 'M', u'z'), + (0x1D56C, 'M', u'a'), + (0x1D56D, 'M', u'b'), + (0x1D56E, 'M', u'c'), + (0x1D56F, 'M', u'd'), + (0x1D570, 'M', u'e'), + (0x1D571, 'M', u'f'), + (0x1D572, 'M', u'g'), + (0x1D573, 'M', u'h'), + (0x1D574, 'M', u'i'), + (0x1D575, 'M', u'j'), + (0x1D576, 'M', u'k'), + (0x1D577, 'M', u'l'), + (0x1D578, 'M', u'm'), + (0x1D579, 'M', u'n'), + (0x1D57A, 'M', u'o'), + (0x1D57B, 'M', u'p'), + (0x1D57C, 'M', u'q'), + (0x1D57D, 'M', u'r'), + (0x1D57E, 'M', u's'), + (0x1D57F, 'M', u't'), + (0x1D580, 'M', u'u'), + (0x1D581, 'M', u'v'), + (0x1D582, 'M', u'w'), + (0x1D583, 'M', u'x'), + (0x1D584, 'M', u'y'), + (0x1D585, 'M', u'z'), + (0x1D586, 'M', u'a'), + (0x1D587, 'M', u'b'), + (0x1D588, 'M', u'c'), + (0x1D589, 'M', u'd'), + (0x1D58A, 'M', u'e'), + (0x1D58B, 'M', u'f'), + (0x1D58C, 'M', u'g'), + (0x1D58D, 'M', u'h'), + (0x1D58E, 'M', u'i'), + (0x1D58F, 'M', u'j'), + (0x1D590, 'M', u'k'), + (0x1D591, 'M', u'l'), + (0x1D592, 'M', u'm'), + (0x1D593, 'M', u'n'), + (0x1D594, 'M', u'o'), + (0x1D595, 'M', u'p'), + (0x1D596, 'M', u'q'), + (0x1D597, 'M', u'r'), + (0x1D598, 'M', u's'), + (0x1D599, 'M', u't'), + (0x1D59A, 'M', u'u'), + (0x1D59B, 'M', u'v'), + ] + +def _seg_62(): + return [ + (0x1D59C, 'M', u'w'), + (0x1D59D, 'M', u'x'), + (0x1D59E, 'M', u'y'), + (0x1D59F, 'M', u'z'), + (0x1D5A0, 'M', u'a'), + (0x1D5A1, 'M', u'b'), + (0x1D5A2, 'M', u'c'), + (0x1D5A3, 'M', u'd'), + (0x1D5A4, 'M', u'e'), + (0x1D5A5, 'M', u'f'), + (0x1D5A6, 'M', u'g'), + (0x1D5A7, 'M', u'h'), + (0x1D5A8, 'M', u'i'), + (0x1D5A9, 'M', u'j'), + (0x1D5AA, 'M', u'k'), + (0x1D5AB, 'M', u'l'), + (0x1D5AC, 'M', u'm'), + (0x1D5AD, 'M', u'n'), + (0x1D5AE, 'M', u'o'), + (0x1D5AF, 'M', u'p'), + (0x1D5B0, 'M', u'q'), + (0x1D5B1, 'M', u'r'), + (0x1D5B2, 'M', u's'), + (0x1D5B3, 'M', u't'), + (0x1D5B4, 'M', u'u'), + (0x1D5B5, 'M', u'v'), + (0x1D5B6, 'M', u'w'), + (0x1D5B7, 'M', u'x'), + (0x1D5B8, 'M', u'y'), + (0x1D5B9, 'M', u'z'), + (0x1D5BA, 'M', u'a'), + (0x1D5BB, 'M', u'b'), + (0x1D5BC, 'M', u'c'), + (0x1D5BD, 'M', u'd'), + (0x1D5BE, 'M', u'e'), + (0x1D5BF, 'M', u'f'), + (0x1D5C0, 'M', u'g'), + (0x1D5C1, 'M', u'h'), + (0x1D5C2, 'M', u'i'), + (0x1D5C3, 'M', u'j'), + (0x1D5C4, 'M', u'k'), + (0x1D5C5, 'M', u'l'), + (0x1D5C6, 'M', u'm'), + (0x1D5C7, 'M', u'n'), + (0x1D5C8, 'M', u'o'), + (0x1D5C9, 'M', u'p'), + (0x1D5CA, 'M', u'q'), + (0x1D5CB, 'M', u'r'), + (0x1D5CC, 'M', u's'), + (0x1D5CD, 'M', u't'), + (0x1D5CE, 'M', u'u'), + (0x1D5CF, 'M', u'v'), + (0x1D5D0, 'M', u'w'), + (0x1D5D1, 'M', u'x'), + (0x1D5D2, 'M', u'y'), + (0x1D5D3, 'M', u'z'), + (0x1D5D4, 'M', u'a'), + (0x1D5D5, 'M', u'b'), + (0x1D5D6, 'M', u'c'), + (0x1D5D7, 'M', u'd'), + (0x1D5D8, 'M', u'e'), + (0x1D5D9, 'M', u'f'), + (0x1D5DA, 'M', u'g'), + (0x1D5DB, 'M', u'h'), + (0x1D5DC, 'M', u'i'), + (0x1D5DD, 'M', u'j'), + (0x1D5DE, 'M', u'k'), + (0x1D5DF, 'M', u'l'), + (0x1D5E0, 'M', u'm'), + (0x1D5E1, 'M', u'n'), + (0x1D5E2, 'M', u'o'), + (0x1D5E3, 'M', u'p'), + (0x1D5E4, 'M', u'q'), + (0x1D5E5, 'M', u'r'), + (0x1D5E6, 'M', u's'), + (0x1D5E7, 'M', u't'), + (0x1D5E8, 'M', u'u'), + (0x1D5E9, 'M', u'v'), + (0x1D5EA, 'M', u'w'), + (0x1D5EB, 'M', u'x'), + (0x1D5EC, 'M', u'y'), + (0x1D5ED, 'M', u'z'), + (0x1D5EE, 'M', u'a'), + (0x1D5EF, 'M', u'b'), + (0x1D5F0, 'M', u'c'), + (0x1D5F1, 'M', u'd'), + (0x1D5F2, 'M', u'e'), + (0x1D5F3, 'M', u'f'), + (0x1D5F4, 'M', u'g'), + (0x1D5F5, 'M', u'h'), + (0x1D5F6, 'M', u'i'), + (0x1D5F7, 'M', u'j'), + (0x1D5F8, 'M', u'k'), + (0x1D5F9, 'M', u'l'), + (0x1D5FA, 'M', u'm'), + (0x1D5FB, 'M', u'n'), + (0x1D5FC, 'M', u'o'), + (0x1D5FD, 'M', u'p'), + (0x1D5FE, 'M', u'q'), + (0x1D5FF, 'M', u'r'), + ] + +def _seg_63(): + return [ + (0x1D600, 'M', u's'), + (0x1D601, 'M', u't'), + (0x1D602, 'M', u'u'), + (0x1D603, 'M', u'v'), + (0x1D604, 'M', u'w'), + (0x1D605, 'M', u'x'), + (0x1D606, 'M', u'y'), + (0x1D607, 'M', u'z'), + (0x1D608, 'M', u'a'), + (0x1D609, 'M', u'b'), + (0x1D60A, 'M', u'c'), + (0x1D60B, 'M', u'd'), + (0x1D60C, 'M', u'e'), + (0x1D60D, 'M', u'f'), + (0x1D60E, 'M', u'g'), + (0x1D60F, 'M', u'h'), + (0x1D610, 'M', u'i'), + (0x1D611, 'M', u'j'), + (0x1D612, 'M', u'k'), + (0x1D613, 'M', u'l'), + (0x1D614, 'M', u'm'), + (0x1D615, 'M', u'n'), + (0x1D616, 'M', u'o'), + (0x1D617, 'M', u'p'), + (0x1D618, 'M', u'q'), + (0x1D619, 'M', u'r'), + (0x1D61A, 'M', u's'), + (0x1D61B, 'M', u't'), + (0x1D61C, 'M', u'u'), + (0x1D61D, 'M', u'v'), + (0x1D61E, 'M', u'w'), + (0x1D61F, 'M', u'x'), + (0x1D620, 'M', u'y'), + (0x1D621, 'M', u'z'), + (0x1D622, 'M', u'a'), + (0x1D623, 'M', u'b'), + (0x1D624, 'M', u'c'), + (0x1D625, 'M', u'd'), + (0x1D626, 'M', u'e'), + (0x1D627, 'M', u'f'), + (0x1D628, 'M', u'g'), + (0x1D629, 'M', u'h'), + (0x1D62A, 'M', u'i'), + (0x1D62B, 'M', u'j'), + (0x1D62C, 'M', u'k'), + (0x1D62D, 'M', u'l'), + (0x1D62E, 'M', u'm'), + (0x1D62F, 'M', u'n'), + (0x1D630, 'M', u'o'), + (0x1D631, 'M', u'p'), + (0x1D632, 'M', u'q'), + (0x1D633, 'M', u'r'), + (0x1D634, 'M', u's'), + (0x1D635, 'M', u't'), + (0x1D636, 'M', u'u'), + (0x1D637, 'M', u'v'), + (0x1D638, 'M', u'w'), + (0x1D639, 'M', u'x'), + (0x1D63A, 'M', u'y'), + (0x1D63B, 'M', u'z'), + (0x1D63C, 'M', u'a'), + (0x1D63D, 'M', u'b'), + (0x1D63E, 'M', u'c'), + (0x1D63F, 'M', u'd'), + (0x1D640, 'M', u'e'), + (0x1D641, 'M', u'f'), + (0x1D642, 'M', u'g'), + (0x1D643, 'M', u'h'), + (0x1D644, 'M', u'i'), + (0x1D645, 'M', u'j'), + (0x1D646, 'M', u'k'), + (0x1D647, 'M', u'l'), + (0x1D648, 'M', u'm'), + (0x1D649, 'M', u'n'), + (0x1D64A, 'M', u'o'), + (0x1D64B, 'M', u'p'), + (0x1D64C, 'M', u'q'), + (0x1D64D, 'M', u'r'), + (0x1D64E, 'M', u's'), + (0x1D64F, 'M', u't'), + (0x1D650, 'M', u'u'), + (0x1D651, 'M', u'v'), + (0x1D652, 'M', u'w'), + (0x1D653, 'M', u'x'), + (0x1D654, 'M', u'y'), + (0x1D655, 'M', u'z'), + (0x1D656, 'M', u'a'), + (0x1D657, 'M', u'b'), + (0x1D658, 'M', u'c'), + (0x1D659, 'M', u'd'), + (0x1D65A, 'M', u'e'), + (0x1D65B, 'M', u'f'), + (0x1D65C, 'M', u'g'), + (0x1D65D, 'M', u'h'), + (0x1D65E, 'M', u'i'), + (0x1D65F, 'M', u'j'), + (0x1D660, 'M', u'k'), + (0x1D661, 'M', u'l'), + (0x1D662, 'M', u'm'), + (0x1D663, 'M', u'n'), + ] + +def _seg_64(): + return [ + (0x1D664, 'M', u'o'), + (0x1D665, 'M', u'p'), + (0x1D666, 'M', u'q'), + (0x1D667, 'M', u'r'), + (0x1D668, 'M', u's'), + (0x1D669, 'M', u't'), + (0x1D66A, 'M', u'u'), + (0x1D66B, 'M', u'v'), + (0x1D66C, 'M', u'w'), + (0x1D66D, 'M', u'x'), + (0x1D66E, 'M', u'y'), + (0x1D66F, 'M', u'z'), + (0x1D670, 'M', u'a'), + (0x1D671, 'M', u'b'), + (0x1D672, 'M', u'c'), + (0x1D673, 'M', u'd'), + (0x1D674, 'M', u'e'), + (0x1D675, 'M', u'f'), + (0x1D676, 'M', u'g'), + (0x1D677, 'M', u'h'), + (0x1D678, 'M', u'i'), + (0x1D679, 'M', u'j'), + (0x1D67A, 'M', u'k'), + (0x1D67B, 'M', u'l'), + (0x1D67C, 'M', u'm'), + (0x1D67D, 'M', u'n'), + (0x1D67E, 'M', u'o'), + (0x1D67F, 'M', u'p'), + (0x1D680, 'M', u'q'), + (0x1D681, 'M', u'r'), + (0x1D682, 'M', u's'), + (0x1D683, 'M', u't'), + (0x1D684, 'M', u'u'), + (0x1D685, 'M', u'v'), + (0x1D686, 'M', u'w'), + (0x1D687, 'M', u'x'), + (0x1D688, 'M', u'y'), + (0x1D689, 'M', u'z'), + (0x1D68A, 'M', u'a'), + (0x1D68B, 'M', u'b'), + (0x1D68C, 'M', u'c'), + (0x1D68D, 'M', u'd'), + (0x1D68E, 'M', u'e'), + (0x1D68F, 'M', u'f'), + (0x1D690, 'M', u'g'), + (0x1D691, 'M', u'h'), + (0x1D692, 'M', u'i'), + (0x1D693, 'M', u'j'), + (0x1D694, 'M', u'k'), + (0x1D695, 'M', u'l'), + (0x1D696, 'M', u'm'), + (0x1D697, 'M', u'n'), + (0x1D698, 'M', u'o'), + (0x1D699, 'M', u'p'), + (0x1D69A, 'M', u'q'), + (0x1D69B, 'M', u'r'), + (0x1D69C, 'M', u's'), + (0x1D69D, 'M', u't'), + (0x1D69E, 'M', u'u'), + (0x1D69F, 'M', u'v'), + (0x1D6A0, 'M', u'w'), + (0x1D6A1, 'M', u'x'), + (0x1D6A2, 'M', u'y'), + (0x1D6A3, 'M', u'z'), + (0x1D6A4, 'M', u'ı'), + (0x1D6A5, 'M', u'ȷ'), + (0x1D6A6, 'X'), + (0x1D6A8, 'M', u'α'), + (0x1D6A9, 'M', u'β'), + (0x1D6AA, 'M', u'γ'), + (0x1D6AB, 'M', u'δ'), + (0x1D6AC, 'M', u'ε'), + (0x1D6AD, 'M', u'ζ'), + (0x1D6AE, 'M', u'η'), + (0x1D6AF, 'M', u'θ'), + (0x1D6B0, 'M', u'ι'), + (0x1D6B1, 'M', u'κ'), + (0x1D6B2, 'M', u'λ'), + (0x1D6B3, 'M', u'μ'), + (0x1D6B4, 'M', u'ν'), + (0x1D6B5, 'M', u'ξ'), + (0x1D6B6, 'M', u'ο'), + (0x1D6B7, 'M', u'π'), + (0x1D6B8, 'M', u'ρ'), + (0x1D6B9, 'M', u'θ'), + (0x1D6BA, 'M', u'σ'), + (0x1D6BB, 'M', u'τ'), + (0x1D6BC, 'M', u'υ'), + (0x1D6BD, 'M', u'φ'), + (0x1D6BE, 'M', u'χ'), + (0x1D6BF, 'M', u'ψ'), + (0x1D6C0, 'M', u'ω'), + (0x1D6C1, 'M', u'∇'), + (0x1D6C2, 'M', u'α'), + (0x1D6C3, 'M', u'β'), + (0x1D6C4, 'M', u'γ'), + (0x1D6C5, 'M', u'δ'), + (0x1D6C6, 'M', u'ε'), + (0x1D6C7, 'M', u'ζ'), + (0x1D6C8, 'M', u'η'), + ] + +def _seg_65(): + return [ + (0x1D6C9, 'M', u'θ'), + (0x1D6CA, 'M', u'ι'), + (0x1D6CB, 'M', u'κ'), + (0x1D6CC, 'M', u'λ'), + (0x1D6CD, 'M', u'μ'), + (0x1D6CE, 'M', u'ν'), + (0x1D6CF, 'M', u'ξ'), + (0x1D6D0, 'M', u'ο'), + (0x1D6D1, 'M', u'π'), + (0x1D6D2, 'M', u'ρ'), + (0x1D6D3, 'M', u'σ'), + (0x1D6D5, 'M', u'τ'), + (0x1D6D6, 'M', u'υ'), + (0x1D6D7, 'M', u'φ'), + (0x1D6D8, 'M', u'χ'), + (0x1D6D9, 'M', u'ψ'), + (0x1D6DA, 'M', u'ω'), + (0x1D6DB, 'M', u'∂'), + (0x1D6DC, 'M', u'ε'), + (0x1D6DD, 'M', u'θ'), + (0x1D6DE, 'M', u'κ'), + (0x1D6DF, 'M', u'φ'), + (0x1D6E0, 'M', u'ρ'), + (0x1D6E1, 'M', u'π'), + (0x1D6E2, 'M', u'α'), + (0x1D6E3, 'M', u'β'), + (0x1D6E4, 'M', u'γ'), + (0x1D6E5, 'M', u'δ'), + (0x1D6E6, 'M', u'ε'), + (0x1D6E7, 'M', u'ζ'), + (0x1D6E8, 'M', u'η'), + (0x1D6E9, 'M', u'θ'), + (0x1D6EA, 'M', u'ι'), + (0x1D6EB, 'M', u'κ'), + (0x1D6EC, 'M', u'λ'), + (0x1D6ED, 'M', u'μ'), + (0x1D6EE, 'M', u'ν'), + (0x1D6EF, 'M', u'ξ'), + (0x1D6F0, 'M', u'ο'), + (0x1D6F1, 'M', u'π'), + (0x1D6F2, 'M', u'ρ'), + (0x1D6F3, 'M', u'θ'), + (0x1D6F4, 'M', u'σ'), + (0x1D6F5, 'M', u'τ'), + (0x1D6F6, 'M', u'υ'), + (0x1D6F7, 'M', u'φ'), + (0x1D6F8, 'M', u'χ'), + (0x1D6F9, 'M', u'ψ'), + (0x1D6FA, 'M', u'ω'), + (0x1D6FB, 'M', u'∇'), + (0x1D6FC, 'M', u'α'), + (0x1D6FD, 'M', u'β'), + (0x1D6FE, 'M', u'γ'), + (0x1D6FF, 'M', u'δ'), + (0x1D700, 'M', u'ε'), + (0x1D701, 'M', u'ζ'), + (0x1D702, 'M', u'η'), + (0x1D703, 'M', u'θ'), + (0x1D704, 'M', u'ι'), + (0x1D705, 'M', u'κ'), + (0x1D706, 'M', u'λ'), + (0x1D707, 'M', u'μ'), + (0x1D708, 'M', u'ν'), + (0x1D709, 'M', u'ξ'), + (0x1D70A, 'M', u'ο'), + (0x1D70B, 'M', u'π'), + (0x1D70C, 'M', u'ρ'), + (0x1D70D, 'M', u'σ'), + (0x1D70F, 'M', u'τ'), + (0x1D710, 'M', u'υ'), + (0x1D711, 'M', u'φ'), + (0x1D712, 'M', u'χ'), + (0x1D713, 'M', u'ψ'), + (0x1D714, 'M', u'ω'), + (0x1D715, 'M', u'∂'), + (0x1D716, 'M', u'ε'), + (0x1D717, 'M', u'θ'), + (0x1D718, 'M', u'κ'), + (0x1D719, 'M', u'φ'), + (0x1D71A, 'M', u'ρ'), + (0x1D71B, 'M', u'π'), + (0x1D71C, 'M', u'α'), + (0x1D71D, 'M', u'β'), + (0x1D71E, 'M', u'γ'), + (0x1D71F, 'M', u'δ'), + (0x1D720, 'M', u'ε'), + (0x1D721, 'M', u'ζ'), + (0x1D722, 'M', u'η'), + (0x1D723, 'M', u'θ'), + (0x1D724, 'M', u'ι'), + (0x1D725, 'M', u'κ'), + (0x1D726, 'M', u'λ'), + (0x1D727, 'M', u'μ'), + (0x1D728, 'M', u'ν'), + (0x1D729, 'M', u'ξ'), + (0x1D72A, 'M', u'ο'), + (0x1D72B, 'M', u'π'), + (0x1D72C, 'M', u'ρ'), + (0x1D72D, 'M', u'θ'), + (0x1D72E, 'M', u'σ'), + ] + +def _seg_66(): + return [ + (0x1D72F, 'M', u'τ'), + (0x1D730, 'M', u'υ'), + (0x1D731, 'M', u'φ'), + (0x1D732, 'M', u'χ'), + (0x1D733, 'M', u'ψ'), + (0x1D734, 'M', u'ω'), + (0x1D735, 'M', u'∇'), + (0x1D736, 'M', u'α'), + (0x1D737, 'M', u'β'), + (0x1D738, 'M', u'γ'), + (0x1D739, 'M', u'δ'), + (0x1D73A, 'M', u'ε'), + (0x1D73B, 'M', u'ζ'), + (0x1D73C, 'M', u'η'), + (0x1D73D, 'M', u'θ'), + (0x1D73E, 'M', u'ι'), + (0x1D73F, 'M', u'κ'), + (0x1D740, 'M', u'λ'), + (0x1D741, 'M', u'μ'), + (0x1D742, 'M', u'ν'), + (0x1D743, 'M', u'ξ'), + (0x1D744, 'M', u'ο'), + (0x1D745, 'M', u'π'), + (0x1D746, 'M', u'ρ'), + (0x1D747, 'M', u'σ'), + (0x1D749, 'M', u'τ'), + (0x1D74A, 'M', u'υ'), + (0x1D74B, 'M', u'φ'), + (0x1D74C, 'M', u'χ'), + (0x1D74D, 'M', u'ψ'), + (0x1D74E, 'M', u'ω'), + (0x1D74F, 'M', u'∂'), + (0x1D750, 'M', u'ε'), + (0x1D751, 'M', u'θ'), + (0x1D752, 'M', u'κ'), + (0x1D753, 'M', u'φ'), + (0x1D754, 'M', u'ρ'), + (0x1D755, 'M', u'π'), + (0x1D756, 'M', u'α'), + (0x1D757, 'M', u'β'), + (0x1D758, 'M', u'γ'), + (0x1D759, 'M', u'δ'), + (0x1D75A, 'M', u'ε'), + (0x1D75B, 'M', u'ζ'), + (0x1D75C, 'M', u'η'), + (0x1D75D, 'M', u'θ'), + (0x1D75E, 'M', u'ι'), + (0x1D75F, 'M', u'κ'), + (0x1D760, 'M', u'λ'), + (0x1D761, 'M', u'μ'), + (0x1D762, 'M', u'ν'), + (0x1D763, 'M', u'ξ'), + (0x1D764, 'M', u'ο'), + (0x1D765, 'M', u'π'), + (0x1D766, 'M', u'ρ'), + (0x1D767, 'M', u'θ'), + (0x1D768, 'M', u'σ'), + (0x1D769, 'M', u'τ'), + (0x1D76A, 'M', u'υ'), + (0x1D76B, 'M', u'φ'), + (0x1D76C, 'M', u'χ'), + (0x1D76D, 'M', u'ψ'), + (0x1D76E, 'M', u'ω'), + (0x1D76F, 'M', u'∇'), + (0x1D770, 'M', u'α'), + (0x1D771, 'M', u'β'), + (0x1D772, 'M', u'γ'), + (0x1D773, 'M', u'δ'), + (0x1D774, 'M', u'ε'), + (0x1D775, 'M', u'ζ'), + (0x1D776, 'M', u'η'), + (0x1D777, 'M', u'θ'), + (0x1D778, 'M', u'ι'), + (0x1D779, 'M', u'κ'), + (0x1D77A, 'M', u'λ'), + (0x1D77B, 'M', u'μ'), + (0x1D77C, 'M', u'ν'), + (0x1D77D, 'M', u'ξ'), + (0x1D77E, 'M', u'ο'), + (0x1D77F, 'M', u'π'), + (0x1D780, 'M', u'ρ'), + (0x1D781, 'M', u'σ'), + (0x1D783, 'M', u'τ'), + (0x1D784, 'M', u'υ'), + (0x1D785, 'M', u'φ'), + (0x1D786, 'M', u'χ'), + (0x1D787, 'M', u'ψ'), + (0x1D788, 'M', u'ω'), + (0x1D789, 'M', u'∂'), + (0x1D78A, 'M', u'ε'), + (0x1D78B, 'M', u'θ'), + (0x1D78C, 'M', u'κ'), + (0x1D78D, 'M', u'φ'), + (0x1D78E, 'M', u'ρ'), + (0x1D78F, 'M', u'π'), + (0x1D790, 'M', u'α'), + (0x1D791, 'M', u'β'), + (0x1D792, 'M', u'γ'), + (0x1D793, 'M', u'δ'), + (0x1D794, 'M', u'ε'), + ] + +def _seg_67(): + return [ + (0x1D795, 'M', u'ζ'), + (0x1D796, 'M', u'η'), + (0x1D797, 'M', u'θ'), + (0x1D798, 'M', u'ι'), + (0x1D799, 'M', u'κ'), + (0x1D79A, 'M', u'λ'), + (0x1D79B, 'M', u'μ'), + (0x1D79C, 'M', u'ν'), + (0x1D79D, 'M', u'ξ'), + (0x1D79E, 'M', u'ο'), + (0x1D79F, 'M', u'π'), + (0x1D7A0, 'M', u'ρ'), + (0x1D7A1, 'M', u'θ'), + (0x1D7A2, 'M', u'σ'), + (0x1D7A3, 'M', u'τ'), + (0x1D7A4, 'M', u'υ'), + (0x1D7A5, 'M', u'φ'), + (0x1D7A6, 'M', u'χ'), + (0x1D7A7, 'M', u'ψ'), + (0x1D7A8, 'M', u'ω'), + (0x1D7A9, 'M', u'∇'), + (0x1D7AA, 'M', u'α'), + (0x1D7AB, 'M', u'β'), + (0x1D7AC, 'M', u'γ'), + (0x1D7AD, 'M', u'δ'), + (0x1D7AE, 'M', u'ε'), + (0x1D7AF, 'M', u'ζ'), + (0x1D7B0, 'M', u'η'), + (0x1D7B1, 'M', u'θ'), + (0x1D7B2, 'M', u'ι'), + (0x1D7B3, 'M', u'κ'), + (0x1D7B4, 'M', u'λ'), + (0x1D7B5, 'M', u'μ'), + (0x1D7B6, 'M', u'ν'), + (0x1D7B7, 'M', u'ξ'), + (0x1D7B8, 'M', u'ο'), + (0x1D7B9, 'M', u'π'), + (0x1D7BA, 'M', u'ρ'), + (0x1D7BB, 'M', u'σ'), + (0x1D7BD, 'M', u'τ'), + (0x1D7BE, 'M', u'υ'), + (0x1D7BF, 'M', u'φ'), + (0x1D7C0, 'M', u'χ'), + (0x1D7C1, 'M', u'ψ'), + (0x1D7C2, 'M', u'ω'), + (0x1D7C3, 'M', u'∂'), + (0x1D7C4, 'M', u'ε'), + (0x1D7C5, 'M', u'θ'), + (0x1D7C6, 'M', u'κ'), + (0x1D7C7, 'M', u'φ'), + (0x1D7C8, 'M', u'ρ'), + (0x1D7C9, 'M', u'π'), + (0x1D7CA, 'M', u'ϝ'), + (0x1D7CC, 'X'), + (0x1D7CE, 'M', u'0'), + (0x1D7CF, 'M', u'1'), + (0x1D7D0, 'M', u'2'), + (0x1D7D1, 'M', u'3'), + (0x1D7D2, 'M', u'4'), + (0x1D7D3, 'M', u'5'), + (0x1D7D4, 'M', u'6'), + (0x1D7D5, 'M', u'7'), + (0x1D7D6, 'M', u'8'), + (0x1D7D7, 'M', u'9'), + (0x1D7D8, 'M', u'0'), + (0x1D7D9, 'M', u'1'), + (0x1D7DA, 'M', u'2'), + (0x1D7DB, 'M', u'3'), + (0x1D7DC, 'M', u'4'), + (0x1D7DD, 'M', u'5'), + (0x1D7DE, 'M', u'6'), + (0x1D7DF, 'M', u'7'), + (0x1D7E0, 'M', u'8'), + (0x1D7E1, 'M', u'9'), + (0x1D7E2, 'M', u'0'), + (0x1D7E3, 'M', u'1'), + (0x1D7E4, 'M', u'2'), + (0x1D7E5, 'M', u'3'), + (0x1D7E6, 'M', u'4'), + (0x1D7E7, 'M', u'5'), + (0x1D7E8, 'M', u'6'), + (0x1D7E9, 'M', u'7'), + (0x1D7EA, 'M', u'8'), + (0x1D7EB, 'M', u'9'), + (0x1D7EC, 'M', u'0'), + (0x1D7ED, 'M', u'1'), + (0x1D7EE, 'M', u'2'), + (0x1D7EF, 'M', u'3'), + (0x1D7F0, 'M', u'4'), + (0x1D7F1, 'M', u'5'), + (0x1D7F2, 'M', u'6'), + (0x1D7F3, 'M', u'7'), + (0x1D7F4, 'M', u'8'), + (0x1D7F5, 'M', u'9'), + (0x1D7F6, 'M', u'0'), + (0x1D7F7, 'M', u'1'), + (0x1D7F8, 'M', u'2'), + (0x1D7F9, 'M', u'3'), + (0x1D7FA, 'M', u'4'), + (0x1D7FB, 'M', u'5'), + ] + +def _seg_68(): + return [ + (0x1D7FC, 'M', u'6'), + (0x1D7FD, 'M', u'7'), + (0x1D7FE, 'M', u'8'), + (0x1D7FF, 'M', u'9'), + (0x1D800, 'V'), + (0x1DA8C, 'X'), + (0x1DA9B, 'V'), + (0x1DAA0, 'X'), + (0x1DAA1, 'V'), + (0x1DAB0, 'X'), + (0x1E000, 'V'), + (0x1E007, 'X'), + (0x1E008, 'V'), + (0x1E019, 'X'), + (0x1E01B, 'V'), + (0x1E022, 'X'), + (0x1E023, 'V'), + (0x1E025, 'X'), + (0x1E026, 'V'), + (0x1E02B, 'X'), + (0x1E800, 'V'), + (0x1E8C5, 'X'), + (0x1E8C7, 'V'), + (0x1E8D7, 'X'), + (0x1E900, 'M', u'𞤢'), + (0x1E901, 'M', u'𞤣'), + (0x1E902, 'M', u'𞤤'), + (0x1E903, 'M', u'𞤥'), + (0x1E904, 'M', u'𞤦'), + (0x1E905, 'M', u'𞤧'), + (0x1E906, 'M', u'𞤨'), + (0x1E907, 'M', u'𞤩'), + (0x1E908, 'M', u'𞤪'), + (0x1E909, 'M', u'𞤫'), + (0x1E90A, 'M', u'𞤬'), + (0x1E90B, 'M', u'𞤭'), + (0x1E90C, 'M', u'𞤮'), + (0x1E90D, 'M', u'𞤯'), + (0x1E90E, 'M', u'𞤰'), + (0x1E90F, 'M', u'𞤱'), + (0x1E910, 'M', u'𞤲'), + (0x1E911, 'M', u'𞤳'), + (0x1E912, 'M', u'𞤴'), + (0x1E913, 'M', u'𞤵'), + (0x1E914, 'M', u'𞤶'), + (0x1E915, 'M', u'𞤷'), + (0x1E916, 'M', u'𞤸'), + (0x1E917, 'M', u'𞤹'), + (0x1E918, 'M', u'𞤺'), + (0x1E919, 'M', u'𞤻'), + (0x1E91A, 'M', u'𞤼'), + (0x1E91B, 'M', u'𞤽'), + (0x1E91C, 'M', u'𞤾'), + (0x1E91D, 'M', u'𞤿'), + (0x1E91E, 'M', u'𞥀'), + (0x1E91F, 'M', u'𞥁'), + (0x1E920, 'M', u'𞥂'), + (0x1E921, 'M', u'𞥃'), + (0x1E922, 'V'), + (0x1E94B, 'X'), + (0x1E950, 'V'), + (0x1E95A, 'X'), + (0x1E95E, 'V'), + (0x1E960, 'X'), + (0x1EE00, 'M', u'ا'), + (0x1EE01, 'M', u'ب'), + (0x1EE02, 'M', u'ج'), + (0x1EE03, 'M', u'د'), + (0x1EE04, 'X'), + (0x1EE05, 'M', u'و'), + (0x1EE06, 'M', u'ز'), + (0x1EE07, 'M', u'ح'), + (0x1EE08, 'M', u'ط'), + (0x1EE09, 'M', u'ي'), + (0x1EE0A, 'M', u'ك'), + (0x1EE0B, 'M', u'ل'), + (0x1EE0C, 'M', u'م'), + (0x1EE0D, 'M', u'ن'), + (0x1EE0E, 'M', u'س'), + (0x1EE0F, 'M', u'ع'), + (0x1EE10, 'M', u'ف'), + (0x1EE11, 'M', u'ص'), + (0x1EE12, 'M', u'ق'), + (0x1EE13, 'M', u'ر'), + (0x1EE14, 'M', u'ش'), + (0x1EE15, 'M', u'ت'), + (0x1EE16, 'M', u'ث'), + (0x1EE17, 'M', u'خ'), + (0x1EE18, 'M', u'ذ'), + (0x1EE19, 'M', u'ض'), + (0x1EE1A, 'M', u'ظ'), + (0x1EE1B, 'M', u'غ'), + (0x1EE1C, 'M', u'ٮ'), + (0x1EE1D, 'M', u'ں'), + (0x1EE1E, 'M', u'ڡ'), + (0x1EE1F, 'M', u'ٯ'), + (0x1EE20, 'X'), + (0x1EE21, 'M', u'ب'), + (0x1EE22, 'M', u'ج'), + (0x1EE23, 'X'), + ] + +def _seg_69(): + return [ + (0x1EE24, 'M', u'ه'), + (0x1EE25, 'X'), + (0x1EE27, 'M', u'ح'), + (0x1EE28, 'X'), + (0x1EE29, 'M', u'ي'), + (0x1EE2A, 'M', u'ك'), + (0x1EE2B, 'M', u'ل'), + (0x1EE2C, 'M', u'م'), + (0x1EE2D, 'M', u'ن'), + (0x1EE2E, 'M', u'س'), + (0x1EE2F, 'M', u'ع'), + (0x1EE30, 'M', u'ف'), + (0x1EE31, 'M', u'ص'), + (0x1EE32, 'M', u'ق'), + (0x1EE33, 'X'), + (0x1EE34, 'M', u'ش'), + (0x1EE35, 'M', u'ت'), + (0x1EE36, 'M', u'ث'), + (0x1EE37, 'M', u'خ'), + (0x1EE38, 'X'), + (0x1EE39, 'M', u'ض'), + (0x1EE3A, 'X'), + (0x1EE3B, 'M', u'غ'), + (0x1EE3C, 'X'), + (0x1EE42, 'M', u'ج'), + (0x1EE43, 'X'), + (0x1EE47, 'M', u'ح'), + (0x1EE48, 'X'), + (0x1EE49, 'M', u'ي'), + (0x1EE4A, 'X'), + (0x1EE4B, 'M', u'ل'), + (0x1EE4C, 'X'), + (0x1EE4D, 'M', u'ن'), + (0x1EE4E, 'M', u'س'), + (0x1EE4F, 'M', u'ع'), + (0x1EE50, 'X'), + (0x1EE51, 'M', u'ص'), + (0x1EE52, 'M', u'ق'), + (0x1EE53, 'X'), + (0x1EE54, 'M', u'ش'), + (0x1EE55, 'X'), + (0x1EE57, 'M', u'خ'), + (0x1EE58, 'X'), + (0x1EE59, 'M', u'ض'), + (0x1EE5A, 'X'), + (0x1EE5B, 'M', u'غ'), + (0x1EE5C, 'X'), + (0x1EE5D, 'M', u'ں'), + (0x1EE5E, 'X'), + (0x1EE5F, 'M', u'ٯ'), + (0x1EE60, 'X'), + (0x1EE61, 'M', u'ب'), + (0x1EE62, 'M', u'ج'), + (0x1EE63, 'X'), + (0x1EE64, 'M', u'ه'), + (0x1EE65, 'X'), + (0x1EE67, 'M', u'ح'), + (0x1EE68, 'M', u'ط'), + (0x1EE69, 'M', u'ي'), + (0x1EE6A, 'M', u'ك'), + (0x1EE6B, 'X'), + (0x1EE6C, 'M', u'م'), + (0x1EE6D, 'M', u'ن'), + (0x1EE6E, 'M', u'س'), + (0x1EE6F, 'M', u'ع'), + (0x1EE70, 'M', u'ف'), + (0x1EE71, 'M', u'ص'), + (0x1EE72, 'M', u'ق'), + (0x1EE73, 'X'), + (0x1EE74, 'M', u'ش'), + (0x1EE75, 'M', u'ت'), + (0x1EE76, 'M', u'ث'), + (0x1EE77, 'M', u'خ'), + (0x1EE78, 'X'), + (0x1EE79, 'M', u'ض'), + (0x1EE7A, 'M', u'ظ'), + (0x1EE7B, 'M', u'غ'), + (0x1EE7C, 'M', u'ٮ'), + (0x1EE7D, 'X'), + (0x1EE7E, 'M', u'ڡ'), + (0x1EE7F, 'X'), + (0x1EE80, 'M', u'ا'), + (0x1EE81, 'M', u'ب'), + (0x1EE82, 'M', u'ج'), + (0x1EE83, 'M', u'د'), + (0x1EE84, 'M', u'ه'), + (0x1EE85, 'M', u'و'), + (0x1EE86, 'M', u'ز'), + (0x1EE87, 'M', u'ح'), + (0x1EE88, 'M', u'ط'), + (0x1EE89, 'M', u'ي'), + (0x1EE8A, 'X'), + (0x1EE8B, 'M', u'ل'), + (0x1EE8C, 'M', u'م'), + (0x1EE8D, 'M', u'ن'), + (0x1EE8E, 'M', u'س'), + (0x1EE8F, 'M', u'ع'), + (0x1EE90, 'M', u'ف'), + (0x1EE91, 'M', u'ص'), + (0x1EE92, 'M', u'ق'), + ] + +def _seg_70(): + return [ + (0x1EE93, 'M', u'ر'), + (0x1EE94, 'M', u'ش'), + (0x1EE95, 'M', u'ت'), + (0x1EE96, 'M', u'ث'), + (0x1EE97, 'M', u'خ'), + (0x1EE98, 'M', u'ذ'), + (0x1EE99, 'M', u'ض'), + (0x1EE9A, 'M', u'ظ'), + (0x1EE9B, 'M', u'غ'), + (0x1EE9C, 'X'), + (0x1EEA1, 'M', u'ب'), + (0x1EEA2, 'M', u'ج'), + (0x1EEA3, 'M', u'د'), + (0x1EEA4, 'X'), + (0x1EEA5, 'M', u'و'), + (0x1EEA6, 'M', u'ز'), + (0x1EEA7, 'M', u'ح'), + (0x1EEA8, 'M', u'ط'), + (0x1EEA9, 'M', u'ي'), + (0x1EEAA, 'X'), + (0x1EEAB, 'M', u'ل'), + (0x1EEAC, 'M', u'م'), + (0x1EEAD, 'M', u'ن'), + (0x1EEAE, 'M', u'س'), + (0x1EEAF, 'M', u'ع'), + (0x1EEB0, 'M', u'ف'), + (0x1EEB1, 'M', u'ص'), + (0x1EEB2, 'M', u'ق'), + (0x1EEB3, 'M', u'ر'), + (0x1EEB4, 'M', u'ش'), + (0x1EEB5, 'M', u'ت'), + (0x1EEB6, 'M', u'ث'), + (0x1EEB7, 'M', u'خ'), + (0x1EEB8, 'M', u'ذ'), + (0x1EEB9, 'M', u'ض'), + (0x1EEBA, 'M', u'ظ'), + (0x1EEBB, 'M', u'غ'), + (0x1EEBC, 'X'), + (0x1EEF0, 'V'), + (0x1EEF2, 'X'), + (0x1F000, 'V'), + (0x1F02C, 'X'), + (0x1F030, 'V'), + (0x1F094, 'X'), + (0x1F0A0, 'V'), + (0x1F0AF, 'X'), + (0x1F0B1, 'V'), + (0x1F0C0, 'X'), + (0x1F0C1, 'V'), + (0x1F0D0, 'X'), + (0x1F0D1, 'V'), + (0x1F0F6, 'X'), + (0x1F101, '3', u'0,'), + (0x1F102, '3', u'1,'), + (0x1F103, '3', u'2,'), + (0x1F104, '3', u'3,'), + (0x1F105, '3', u'4,'), + (0x1F106, '3', u'5,'), + (0x1F107, '3', u'6,'), + (0x1F108, '3', u'7,'), + (0x1F109, '3', u'8,'), + (0x1F10A, '3', u'9,'), + (0x1F10B, 'V'), + (0x1F10D, 'X'), + (0x1F110, '3', u'(a)'), + (0x1F111, '3', u'(b)'), + (0x1F112, '3', u'(c)'), + (0x1F113, '3', u'(d)'), + (0x1F114, '3', u'(e)'), + (0x1F115, '3', u'(f)'), + (0x1F116, '3', u'(g)'), + (0x1F117, '3', u'(h)'), + (0x1F118, '3', u'(i)'), + (0x1F119, '3', u'(j)'), + (0x1F11A, '3', u'(k)'), + (0x1F11B, '3', u'(l)'), + (0x1F11C, '3', u'(m)'), + (0x1F11D, '3', u'(n)'), + (0x1F11E, '3', u'(o)'), + (0x1F11F, '3', u'(p)'), + (0x1F120, '3', u'(q)'), + (0x1F121, '3', u'(r)'), + (0x1F122, '3', u'(s)'), + (0x1F123, '3', u'(t)'), + (0x1F124, '3', u'(u)'), + (0x1F125, '3', u'(v)'), + (0x1F126, '3', u'(w)'), + (0x1F127, '3', u'(x)'), + (0x1F128, '3', u'(y)'), + (0x1F129, '3', u'(z)'), + (0x1F12A, 'M', u'〔s〕'), + (0x1F12B, 'M', u'c'), + (0x1F12C, 'M', u'r'), + (0x1F12D, 'M', u'cd'), + (0x1F12E, 'M', u'wz'), + (0x1F12F, 'X'), + (0x1F130, 'M', u'a'), + (0x1F131, 'M', u'b'), + (0x1F132, 'M', u'c'), + (0x1F133, 'M', u'd'), + ] + +def _seg_71(): + return [ + (0x1F134, 'M', u'e'), + (0x1F135, 'M', u'f'), + (0x1F136, 'M', u'g'), + (0x1F137, 'M', u'h'), + (0x1F138, 'M', u'i'), + (0x1F139, 'M', u'j'), + (0x1F13A, 'M', u'k'), + (0x1F13B, 'M', u'l'), + (0x1F13C, 'M', u'm'), + (0x1F13D, 'M', u'n'), + (0x1F13E, 'M', u'o'), + (0x1F13F, 'M', u'p'), + (0x1F140, 'M', u'q'), + (0x1F141, 'M', u'r'), + (0x1F142, 'M', u's'), + (0x1F143, 'M', u't'), + (0x1F144, 'M', u'u'), + (0x1F145, 'M', u'v'), + (0x1F146, 'M', u'w'), + (0x1F147, 'M', u'x'), + (0x1F148, 'M', u'y'), + (0x1F149, 'M', u'z'), + (0x1F14A, 'M', u'hv'), + (0x1F14B, 'M', u'mv'), + (0x1F14C, 'M', u'sd'), + (0x1F14D, 'M', u'ss'), + (0x1F14E, 'M', u'ppv'), + (0x1F14F, 'M', u'wc'), + (0x1F150, 'V'), + (0x1F16A, 'M', u'mc'), + (0x1F16B, 'M', u'md'), + (0x1F16C, 'X'), + (0x1F170, 'V'), + (0x1F190, 'M', u'dj'), + (0x1F191, 'V'), + (0x1F1AD, 'X'), + (0x1F1E6, 'V'), + (0x1F200, 'M', u'ほか'), + (0x1F201, 'M', u'ココ'), + (0x1F202, 'M', u'サ'), + (0x1F203, 'X'), + (0x1F210, 'M', u'手'), + (0x1F211, 'M', u'字'), + (0x1F212, 'M', u'双'), + (0x1F213, 'M', u'デ'), + (0x1F214, 'M', u'二'), + (0x1F215, 'M', u'多'), + (0x1F216, 'M', u'解'), + (0x1F217, 'M', u'天'), + (0x1F218, 'M', u'交'), + (0x1F219, 'M', u'映'), + (0x1F21A, 'M', u'無'), + (0x1F21B, 'M', u'料'), + (0x1F21C, 'M', u'前'), + (0x1F21D, 'M', u'後'), + (0x1F21E, 'M', u'再'), + (0x1F21F, 'M', u'新'), + (0x1F220, 'M', u'初'), + (0x1F221, 'M', u'終'), + (0x1F222, 'M', u'生'), + (0x1F223, 'M', u'販'), + (0x1F224, 'M', u'声'), + (0x1F225, 'M', u'吹'), + (0x1F226, 'M', u'演'), + (0x1F227, 'M', u'投'), + (0x1F228, 'M', u'捕'), + (0x1F229, 'M', u'一'), + (0x1F22A, 'M', u'三'), + (0x1F22B, 'M', u'遊'), + (0x1F22C, 'M', u'左'), + (0x1F22D, 'M', u'中'), + (0x1F22E, 'M', u'右'), + (0x1F22F, 'M', u'指'), + (0x1F230, 'M', u'走'), + (0x1F231, 'M', u'打'), + (0x1F232, 'M', u'禁'), + (0x1F233, 'M', u'空'), + (0x1F234, 'M', u'合'), + (0x1F235, 'M', u'満'), + (0x1F236, 'M', u'有'), + (0x1F237, 'M', u'月'), + (0x1F238, 'M', u'申'), + (0x1F239, 'M', u'割'), + (0x1F23A, 'M', u'営'), + (0x1F23B, 'M', u'配'), + (0x1F23C, 'X'), + (0x1F240, 'M', u'〔本〕'), + (0x1F241, 'M', u'〔三〕'), + (0x1F242, 'M', u'〔二〕'), + (0x1F243, 'M', u'〔安〕'), + (0x1F244, 'M', u'〔点〕'), + (0x1F245, 'M', u'〔打〕'), + (0x1F246, 'M', u'〔盗〕'), + (0x1F247, 'M', u'〔勝〕'), + (0x1F248, 'M', u'〔敗〕'), + (0x1F249, 'X'), + (0x1F250, 'M', u'得'), + (0x1F251, 'M', u'可'), + (0x1F252, 'X'), + (0x1F260, 'V'), + ] + +def _seg_72(): + return [ + (0x1F266, 'X'), + (0x1F300, 'V'), + (0x1F6D5, 'X'), + (0x1F6E0, 'V'), + (0x1F6ED, 'X'), + (0x1F6F0, 'V'), + (0x1F6F9, 'X'), + (0x1F700, 'V'), + (0x1F774, 'X'), + (0x1F780, 'V'), + (0x1F7D5, 'X'), + (0x1F800, 'V'), + (0x1F80C, 'X'), + (0x1F810, 'V'), + (0x1F848, 'X'), + (0x1F850, 'V'), + (0x1F85A, 'X'), + (0x1F860, 'V'), + (0x1F888, 'X'), + (0x1F890, 'V'), + (0x1F8AE, 'X'), + (0x1F900, 'V'), + (0x1F90C, 'X'), + (0x1F910, 'V'), + (0x1F93F, 'X'), + (0x1F940, 'V'), + (0x1F94D, 'X'), + (0x1F950, 'V'), + (0x1F96C, 'X'), + (0x1F980, 'V'), + (0x1F998, 'X'), + (0x1F9C0, 'V'), + (0x1F9C1, 'X'), + (0x1F9D0, 'V'), + (0x1F9E7, 'X'), + (0x20000, 'V'), + (0x2A6D7, 'X'), + (0x2A700, 'V'), + (0x2B735, 'X'), + (0x2B740, 'V'), + (0x2B81E, 'X'), + (0x2B820, 'V'), + (0x2CEA2, 'X'), + (0x2CEB0, 'V'), + (0x2EBE1, 'X'), + (0x2F800, 'M', u'丽'), + (0x2F801, 'M', u'丸'), + (0x2F802, 'M', u'乁'), + (0x2F803, 'M', u'𠄢'), + (0x2F804, 'M', u'你'), + (0x2F805, 'M', u'侮'), + (0x2F806, 'M', u'侻'), + (0x2F807, 'M', u'倂'), + (0x2F808, 'M', u'偺'), + (0x2F809, 'M', u'備'), + (0x2F80A, 'M', u'僧'), + (0x2F80B, 'M', u'像'), + (0x2F80C, 'M', u'㒞'), + (0x2F80D, 'M', u'𠘺'), + (0x2F80E, 'M', u'免'), + (0x2F80F, 'M', u'兔'), + (0x2F810, 'M', u'兤'), + (0x2F811, 'M', u'具'), + (0x2F812, 'M', u'𠔜'), + (0x2F813, 'M', u'㒹'), + (0x2F814, 'M', u'內'), + (0x2F815, 'M', u'再'), + (0x2F816, 'M', u'𠕋'), + (0x2F817, 'M', u'冗'), + (0x2F818, 'M', u'冤'), + (0x2F819, 'M', u'仌'), + (0x2F81A, 'M', u'冬'), + (0x2F81B, 'M', u'况'), + (0x2F81C, 'M', u'𩇟'), + (0x2F81D, 'M', u'凵'), + (0x2F81E, 'M', u'刃'), + (0x2F81F, 'M', u'㓟'), + (0x2F820, 'M', u'刻'), + (0x2F821, 'M', u'剆'), + (0x2F822, 'M', u'割'), + (0x2F823, 'M', u'剷'), + (0x2F824, 'M', u'㔕'), + (0x2F825, 'M', u'勇'), + (0x2F826, 'M', u'勉'), + (0x2F827, 'M', u'勤'), + (0x2F828, 'M', u'勺'), + (0x2F829, 'M', u'包'), + (0x2F82A, 'M', u'匆'), + (0x2F82B, 'M', u'北'), + (0x2F82C, 'M', u'卉'), + (0x2F82D, 'M', u'卑'), + (0x2F82E, 'M', u'博'), + (0x2F82F, 'M', u'即'), + (0x2F830, 'M', u'卽'), + (0x2F831, 'M', u'卿'), + (0x2F834, 'M', u'𠨬'), + (0x2F835, 'M', u'灰'), + (0x2F836, 'M', u'及'), + (0x2F837, 'M', u'叟'), + (0x2F838, 'M', u'𠭣'), + ] + +def _seg_73(): + return [ + (0x2F839, 'M', u'叫'), + (0x2F83A, 'M', u'叱'), + (0x2F83B, 'M', u'吆'), + (0x2F83C, 'M', u'咞'), + (0x2F83D, 'M', u'吸'), + (0x2F83E, 'M', u'呈'), + (0x2F83F, 'M', u'周'), + (0x2F840, 'M', u'咢'), + (0x2F841, 'M', u'哶'), + (0x2F842, 'M', u'唐'), + (0x2F843, 'M', u'啓'), + (0x2F844, 'M', u'啣'), + (0x2F845, 'M', u'善'), + (0x2F847, 'M', u'喙'), + (0x2F848, 'M', u'喫'), + (0x2F849, 'M', u'喳'), + (0x2F84A, 'M', u'嗂'), + (0x2F84B, 'M', u'圖'), + (0x2F84C, 'M', u'嘆'), + (0x2F84D, 'M', u'圗'), + (0x2F84E, 'M', u'噑'), + (0x2F84F, 'M', u'噴'), + (0x2F850, 'M', u'切'), + (0x2F851, 'M', u'壮'), + (0x2F852, 'M', u'城'), + (0x2F853, 'M', u'埴'), + (0x2F854, 'M', u'堍'), + (0x2F855, 'M', u'型'), + (0x2F856, 'M', u'堲'), + (0x2F857, 'M', u'報'), + (0x2F858, 'M', u'墬'), + (0x2F859, 'M', u'𡓤'), + (0x2F85A, 'M', u'売'), + (0x2F85B, 'M', u'壷'), + (0x2F85C, 'M', u'夆'), + (0x2F85D, 'M', u'多'), + (0x2F85E, 'M', u'夢'), + (0x2F85F, 'M', u'奢'), + (0x2F860, 'M', u'𡚨'), + (0x2F861, 'M', u'𡛪'), + (0x2F862, 'M', u'姬'), + (0x2F863, 'M', u'娛'), + (0x2F864, 'M', u'娧'), + (0x2F865, 'M', u'姘'), + (0x2F866, 'M', u'婦'), + (0x2F867, 'M', u'㛮'), + (0x2F868, 'X'), + (0x2F869, 'M', u'嬈'), + (0x2F86A, 'M', u'嬾'), + (0x2F86C, 'M', u'𡧈'), + (0x2F86D, 'M', u'寃'), + (0x2F86E, 'M', u'寘'), + (0x2F86F, 'M', u'寧'), + (0x2F870, 'M', u'寳'), + (0x2F871, 'M', u'𡬘'), + (0x2F872, 'M', u'寿'), + (0x2F873, 'M', u'将'), + (0x2F874, 'X'), + (0x2F875, 'M', u'尢'), + (0x2F876, 'M', u'㞁'), + (0x2F877, 'M', u'屠'), + (0x2F878, 'M', u'屮'), + (0x2F879, 'M', u'峀'), + (0x2F87A, 'M', u'岍'), + (0x2F87B, 'M', u'𡷤'), + (0x2F87C, 'M', u'嵃'), + (0x2F87D, 'M', u'𡷦'), + (0x2F87E, 'M', u'嵮'), + (0x2F87F, 'M', u'嵫'), + (0x2F880, 'M', u'嵼'), + (0x2F881, 'M', u'巡'), + (0x2F882, 'M', u'巢'), + (0x2F883, 'M', u'㠯'), + (0x2F884, 'M', u'巽'), + (0x2F885, 'M', u'帨'), + (0x2F886, 'M', u'帽'), + (0x2F887, 'M', u'幩'), + (0x2F888, 'M', u'㡢'), + (0x2F889, 'M', u'𢆃'), + (0x2F88A, 'M', u'㡼'), + (0x2F88B, 'M', u'庰'), + (0x2F88C, 'M', u'庳'), + (0x2F88D, 'M', u'庶'), + (0x2F88E, 'M', u'廊'), + (0x2F88F, 'M', u'𪎒'), + (0x2F890, 'M', u'廾'), + (0x2F891, 'M', u'𢌱'), + (0x2F893, 'M', u'舁'), + (0x2F894, 'M', u'弢'), + (0x2F896, 'M', u'㣇'), + (0x2F897, 'M', u'𣊸'), + (0x2F898, 'M', u'𦇚'), + (0x2F899, 'M', u'形'), + (0x2F89A, 'M', u'彫'), + (0x2F89B, 'M', u'㣣'), + (0x2F89C, 'M', u'徚'), + (0x2F89D, 'M', u'忍'), + (0x2F89E, 'M', u'志'), + (0x2F89F, 'M', u'忹'), + (0x2F8A0, 'M', u'悁'), + ] + +def _seg_74(): + return [ + (0x2F8A1, 'M', u'㤺'), + (0x2F8A2, 'M', u'㤜'), + (0x2F8A3, 'M', u'悔'), + (0x2F8A4, 'M', u'𢛔'), + (0x2F8A5, 'M', u'惇'), + (0x2F8A6, 'M', u'慈'), + (0x2F8A7, 'M', u'慌'), + (0x2F8A8, 'M', u'慎'), + (0x2F8A9, 'M', u'慌'), + (0x2F8AA, 'M', u'慺'), + (0x2F8AB, 'M', u'憎'), + (0x2F8AC, 'M', u'憲'), + (0x2F8AD, 'M', u'憤'), + (0x2F8AE, 'M', u'憯'), + (0x2F8AF, 'M', u'懞'), + (0x2F8B0, 'M', u'懲'), + (0x2F8B1, 'M', u'懶'), + (0x2F8B2, 'M', u'成'), + (0x2F8B3, 'M', u'戛'), + (0x2F8B4, 'M', u'扝'), + (0x2F8B5, 'M', u'抱'), + (0x2F8B6, 'M', u'拔'), + (0x2F8B7, 'M', u'捐'), + (0x2F8B8, 'M', u'𢬌'), + (0x2F8B9, 'M', u'挽'), + (0x2F8BA, 'M', u'拼'), + (0x2F8BB, 'M', u'捨'), + (0x2F8BC, 'M', u'掃'), + (0x2F8BD, 'M', u'揤'), + (0x2F8BE, 'M', u'𢯱'), + (0x2F8BF, 'M', u'搢'), + (0x2F8C0, 'M', u'揅'), + (0x2F8C1, 'M', u'掩'), + (0x2F8C2, 'M', u'㨮'), + (0x2F8C3, 'M', u'摩'), + (0x2F8C4, 'M', u'摾'), + (0x2F8C5, 'M', u'撝'), + (0x2F8C6, 'M', u'摷'), + (0x2F8C7, 'M', u'㩬'), + (0x2F8C8, 'M', u'敏'), + (0x2F8C9, 'M', u'敬'), + (0x2F8CA, 'M', u'𣀊'), + (0x2F8CB, 'M', u'旣'), + (0x2F8CC, 'M', u'書'), + (0x2F8CD, 'M', u'晉'), + (0x2F8CE, 'M', u'㬙'), + (0x2F8CF, 'M', u'暑'), + (0x2F8D0, 'M', u'㬈'), + (0x2F8D1, 'M', u'㫤'), + (0x2F8D2, 'M', u'冒'), + (0x2F8D3, 'M', u'冕'), + (0x2F8D4, 'M', u'最'), + (0x2F8D5, 'M', u'暜'), + (0x2F8D6, 'M', u'肭'), + (0x2F8D7, 'M', u'䏙'), + (0x2F8D8, 'M', u'朗'), + (0x2F8D9, 'M', u'望'), + (0x2F8DA, 'M', u'朡'), + (0x2F8DB, 'M', u'杞'), + (0x2F8DC, 'M', u'杓'), + (0x2F8DD, 'M', u'𣏃'), + (0x2F8DE, 'M', u'㭉'), + (0x2F8DF, 'M', u'柺'), + (0x2F8E0, 'M', u'枅'), + (0x2F8E1, 'M', u'桒'), + (0x2F8E2, 'M', u'梅'), + (0x2F8E3, 'M', u'𣑭'), + (0x2F8E4, 'M', u'梎'), + (0x2F8E5, 'M', u'栟'), + (0x2F8E6, 'M', u'椔'), + (0x2F8E7, 'M', u'㮝'), + (0x2F8E8, 'M', u'楂'), + (0x2F8E9, 'M', u'榣'), + (0x2F8EA, 'M', u'槪'), + (0x2F8EB, 'M', u'檨'), + (0x2F8EC, 'M', u'𣚣'), + (0x2F8ED, 'M', u'櫛'), + (0x2F8EE, 'M', u'㰘'), + (0x2F8EF, 'M', u'次'), + (0x2F8F0, 'M', u'𣢧'), + (0x2F8F1, 'M', u'歔'), + (0x2F8F2, 'M', u'㱎'), + (0x2F8F3, 'M', u'歲'), + (0x2F8F4, 'M', u'殟'), + (0x2F8F5, 'M', u'殺'), + (0x2F8F6, 'M', u'殻'), + (0x2F8F7, 'M', u'𣪍'), + (0x2F8F8, 'M', u'𡴋'), + (0x2F8F9, 'M', u'𣫺'), + (0x2F8FA, 'M', u'汎'), + (0x2F8FB, 'M', u'𣲼'), + (0x2F8FC, 'M', u'沿'), + (0x2F8FD, 'M', u'泍'), + (0x2F8FE, 'M', u'汧'), + (0x2F8FF, 'M', u'洖'), + (0x2F900, 'M', u'派'), + (0x2F901, 'M', u'海'), + (0x2F902, 'M', u'流'), + (0x2F903, 'M', u'浩'), + (0x2F904, 'M', u'浸'), + ] + +def _seg_75(): + return [ + (0x2F905, 'M', u'涅'), + (0x2F906, 'M', u'𣴞'), + (0x2F907, 'M', u'洴'), + (0x2F908, 'M', u'港'), + (0x2F909, 'M', u'湮'), + (0x2F90A, 'M', u'㴳'), + (0x2F90B, 'M', u'滋'), + (0x2F90C, 'M', u'滇'), + (0x2F90D, 'M', u'𣻑'), + (0x2F90E, 'M', u'淹'), + (0x2F90F, 'M', u'潮'), + (0x2F910, 'M', u'𣽞'), + (0x2F911, 'M', u'𣾎'), + (0x2F912, 'M', u'濆'), + (0x2F913, 'M', u'瀹'), + (0x2F914, 'M', u'瀞'), + (0x2F915, 'M', u'瀛'), + (0x2F916, 'M', u'㶖'), + (0x2F917, 'M', u'灊'), + (0x2F918, 'M', u'災'), + (0x2F919, 'M', u'灷'), + (0x2F91A, 'M', u'炭'), + (0x2F91B, 'M', u'𠔥'), + (0x2F91C, 'M', u'煅'), + (0x2F91D, 'M', u'𤉣'), + (0x2F91E, 'M', u'熜'), + (0x2F91F, 'X'), + (0x2F920, 'M', u'爨'), + (0x2F921, 'M', u'爵'), + (0x2F922, 'M', u'牐'), + (0x2F923, 'M', u'𤘈'), + (0x2F924, 'M', u'犀'), + (0x2F925, 'M', u'犕'), + (0x2F926, 'M', u'𤜵'), + (0x2F927, 'M', u'𤠔'), + (0x2F928, 'M', u'獺'), + (0x2F929, 'M', u'王'), + (0x2F92A, 'M', u'㺬'), + (0x2F92B, 'M', u'玥'), + (0x2F92C, 'M', u'㺸'), + (0x2F92E, 'M', u'瑇'), + (0x2F92F, 'M', u'瑜'), + (0x2F930, 'M', u'瑱'), + (0x2F931, 'M', u'璅'), + (0x2F932, 'M', u'瓊'), + (0x2F933, 'M', u'㼛'), + (0x2F934, 'M', u'甤'), + (0x2F935, 'M', u'𤰶'), + (0x2F936, 'M', u'甾'), + (0x2F937, 'M', u'𤲒'), + (0x2F938, 'M', u'異'), + (0x2F939, 'M', u'𢆟'), + (0x2F93A, 'M', u'瘐'), + (0x2F93B, 'M', u'𤾡'), + (0x2F93C, 'M', u'𤾸'), + (0x2F93D, 'M', u'𥁄'), + (0x2F93E, 'M', u'㿼'), + (0x2F93F, 'M', u'䀈'), + (0x2F940, 'M', u'直'), + (0x2F941, 'M', u'𥃳'), + (0x2F942, 'M', u'𥃲'), + (0x2F943, 'M', u'𥄙'), + (0x2F944, 'M', u'𥄳'), + (0x2F945, 'M', u'眞'), + (0x2F946, 'M', u'真'), + (0x2F948, 'M', u'睊'), + (0x2F949, 'M', u'䀹'), + (0x2F94A, 'M', u'瞋'), + (0x2F94B, 'M', u'䁆'), + (0x2F94C, 'M', u'䂖'), + (0x2F94D, 'M', u'𥐝'), + (0x2F94E, 'M', u'硎'), + (0x2F94F, 'M', u'碌'), + (0x2F950, 'M', u'磌'), + (0x2F951, 'M', u'䃣'), + (0x2F952, 'M', u'𥘦'), + (0x2F953, 'M', u'祖'), + (0x2F954, 'M', u'𥚚'), + (0x2F955, 'M', u'𥛅'), + (0x2F956, 'M', u'福'), + (0x2F957, 'M', u'秫'), + (0x2F958, 'M', u'䄯'), + (0x2F959, 'M', u'穀'), + (0x2F95A, 'M', u'穊'), + (0x2F95B, 'M', u'穏'), + (0x2F95C, 'M', u'𥥼'), + (0x2F95D, 'M', u'𥪧'), + (0x2F95F, 'X'), + (0x2F960, 'M', u'䈂'), + (0x2F961, 'M', u'𥮫'), + (0x2F962, 'M', u'篆'), + (0x2F963, 'M', u'築'), + (0x2F964, 'M', u'䈧'), + (0x2F965, 'M', u'𥲀'), + (0x2F966, 'M', u'糒'), + (0x2F967, 'M', u'䊠'), + (0x2F968, 'M', u'糨'), + (0x2F969, 'M', u'糣'), + (0x2F96A, 'M', u'紀'), + (0x2F96B, 'M', u'𥾆'), + ] + +def _seg_76(): + return [ + (0x2F96C, 'M', u'絣'), + (0x2F96D, 'M', u'䌁'), + (0x2F96E, 'M', u'緇'), + (0x2F96F, 'M', u'縂'), + (0x2F970, 'M', u'繅'), + (0x2F971, 'M', u'䌴'), + (0x2F972, 'M', u'𦈨'), + (0x2F973, 'M', u'𦉇'), + (0x2F974, 'M', u'䍙'), + (0x2F975, 'M', u'𦋙'), + (0x2F976, 'M', u'罺'), + (0x2F977, 'M', u'𦌾'), + (0x2F978, 'M', u'羕'), + (0x2F979, 'M', u'翺'), + (0x2F97A, 'M', u'者'), + (0x2F97B, 'M', u'𦓚'), + (0x2F97C, 'M', u'𦔣'), + (0x2F97D, 'M', u'聠'), + (0x2F97E, 'M', u'𦖨'), + (0x2F97F, 'M', u'聰'), + (0x2F980, 'M', u'𣍟'), + (0x2F981, 'M', u'䏕'), + (0x2F982, 'M', u'育'), + (0x2F983, 'M', u'脃'), + (0x2F984, 'M', u'䐋'), + (0x2F985, 'M', u'脾'), + (0x2F986, 'M', u'媵'), + (0x2F987, 'M', u'𦞧'), + (0x2F988, 'M', u'𦞵'), + (0x2F989, 'M', u'𣎓'), + (0x2F98A, 'M', u'𣎜'), + (0x2F98B, 'M', u'舁'), + (0x2F98C, 'M', u'舄'), + (0x2F98D, 'M', u'辞'), + (0x2F98E, 'M', u'䑫'), + (0x2F98F, 'M', u'芑'), + (0x2F990, 'M', u'芋'), + (0x2F991, 'M', u'芝'), + (0x2F992, 'M', u'劳'), + (0x2F993, 'M', u'花'), + (0x2F994, 'M', u'芳'), + (0x2F995, 'M', u'芽'), + (0x2F996, 'M', u'苦'), + (0x2F997, 'M', u'𦬼'), + (0x2F998, 'M', u'若'), + (0x2F999, 'M', u'茝'), + (0x2F99A, 'M', u'荣'), + (0x2F99B, 'M', u'莭'), + (0x2F99C, 'M', u'茣'), + (0x2F99D, 'M', u'莽'), + (0x2F99E, 'M', u'菧'), + (0x2F99F, 'M', u'著'), + (0x2F9A0, 'M', u'荓'), + (0x2F9A1, 'M', u'菊'), + (0x2F9A2, 'M', u'菌'), + (0x2F9A3, 'M', u'菜'), + (0x2F9A4, 'M', u'𦰶'), + (0x2F9A5, 'M', u'𦵫'), + (0x2F9A6, 'M', u'𦳕'), + (0x2F9A7, 'M', u'䔫'), + (0x2F9A8, 'M', u'蓱'), + (0x2F9A9, 'M', u'蓳'), + (0x2F9AA, 'M', u'蔖'), + (0x2F9AB, 'M', u'𧏊'), + (0x2F9AC, 'M', u'蕤'), + (0x2F9AD, 'M', u'𦼬'), + (0x2F9AE, 'M', u'䕝'), + (0x2F9AF, 'M', u'䕡'), + (0x2F9B0, 'M', u'𦾱'), + (0x2F9B1, 'M', u'𧃒'), + (0x2F9B2, 'M', u'䕫'), + (0x2F9B3, 'M', u'虐'), + (0x2F9B4, 'M', u'虜'), + (0x2F9B5, 'M', u'虧'), + (0x2F9B6, 'M', u'虩'), + (0x2F9B7, 'M', u'蚩'), + (0x2F9B8, 'M', u'蚈'), + (0x2F9B9, 'M', u'蜎'), + (0x2F9BA, 'M', u'蛢'), + (0x2F9BB, 'M', u'蝹'), + (0x2F9BC, 'M', u'蜨'), + (0x2F9BD, 'M', u'蝫'), + (0x2F9BE, 'M', u'螆'), + (0x2F9BF, 'X'), + (0x2F9C0, 'M', u'蟡'), + (0x2F9C1, 'M', u'蠁'), + (0x2F9C2, 'M', u'䗹'), + (0x2F9C3, 'M', u'衠'), + (0x2F9C4, 'M', u'衣'), + (0x2F9C5, 'M', u'𧙧'), + (0x2F9C6, 'M', u'裗'), + (0x2F9C7, 'M', u'裞'), + (0x2F9C8, 'M', u'䘵'), + (0x2F9C9, 'M', u'裺'), + (0x2F9CA, 'M', u'㒻'), + (0x2F9CB, 'M', u'𧢮'), + (0x2F9CC, 'M', u'𧥦'), + (0x2F9CD, 'M', u'䚾'), + (0x2F9CE, 'M', u'䛇'), + (0x2F9CF, 'M', u'誠'), + ] + +def _seg_77(): + return [ + (0x2F9D0, 'M', u'諭'), + (0x2F9D1, 'M', u'變'), + (0x2F9D2, 'M', u'豕'), + (0x2F9D3, 'M', u'𧲨'), + (0x2F9D4, 'M', u'貫'), + (0x2F9D5, 'M', u'賁'), + (0x2F9D6, 'M', u'贛'), + (0x2F9D7, 'M', u'起'), + (0x2F9D8, 'M', u'𧼯'), + (0x2F9D9, 'M', u'𠠄'), + (0x2F9DA, 'M', u'跋'), + (0x2F9DB, 'M', u'趼'), + (0x2F9DC, 'M', u'跰'), + (0x2F9DD, 'M', u'𠣞'), + (0x2F9DE, 'M', u'軔'), + (0x2F9DF, 'M', u'輸'), + (0x2F9E0, 'M', u'𨗒'), + (0x2F9E1, 'M', u'𨗭'), + (0x2F9E2, 'M', u'邔'), + (0x2F9E3, 'M', u'郱'), + (0x2F9E4, 'M', u'鄑'), + (0x2F9E5, 'M', u'𨜮'), + (0x2F9E6, 'M', u'鄛'), + (0x2F9E7, 'M', u'鈸'), + (0x2F9E8, 'M', u'鋗'), + (0x2F9E9, 'M', u'鋘'), + (0x2F9EA, 'M', u'鉼'), + (0x2F9EB, 'M', u'鏹'), + (0x2F9EC, 'M', u'鐕'), + (0x2F9ED, 'M', u'𨯺'), + (0x2F9EE, 'M', u'開'), + (0x2F9EF, 'M', u'䦕'), + (0x2F9F0, 'M', u'閷'), + (0x2F9F1, 'M', u'𨵷'), + (0x2F9F2, 'M', u'䧦'), + (0x2F9F3, 'M', u'雃'), + (0x2F9F4, 'M', u'嶲'), + (0x2F9F5, 'M', u'霣'), + (0x2F9F6, 'M', u'𩅅'), + (0x2F9F7, 'M', u'𩈚'), + (0x2F9F8, 'M', u'䩮'), + (0x2F9F9, 'M', u'䩶'), + (0x2F9FA, 'M', u'韠'), + (0x2F9FB, 'M', u'𩐊'), + (0x2F9FC, 'M', u'䪲'), + (0x2F9FD, 'M', u'𩒖'), + (0x2F9FE, 'M', u'頋'), + (0x2FA00, 'M', u'頩'), + (0x2FA01, 'M', u'𩖶'), + (0x2FA02, 'M', u'飢'), + (0x2FA03, 'M', u'䬳'), + (0x2FA04, 'M', u'餩'), + (0x2FA05, 'M', u'馧'), + (0x2FA06, 'M', u'駂'), + (0x2FA07, 'M', u'駾'), + (0x2FA08, 'M', u'䯎'), + (0x2FA09, 'M', u'𩬰'), + (0x2FA0A, 'M', u'鬒'), + (0x2FA0B, 'M', u'鱀'), + (0x2FA0C, 'M', u'鳽'), + (0x2FA0D, 'M', u'䳎'), + (0x2FA0E, 'M', u'䳭'), + (0x2FA0F, 'M', u'鵧'), + (0x2FA10, 'M', u'𪃎'), + (0x2FA11, 'M', u'䳸'), + (0x2FA12, 'M', u'𪄅'), + (0x2FA13, 'M', u'𪈎'), + (0x2FA14, 'M', u'𪊑'), + (0x2FA15, 'M', u'麻'), + (0x2FA16, 'M', u'䵖'), + (0x2FA17, 'M', u'黹'), + (0x2FA18, 'M', u'黾'), + (0x2FA19, 'M', u'鼅'), + (0x2FA1A, 'M', u'鼏'), + (0x2FA1B, 'M', u'鼖'), + (0x2FA1C, 'M', u'鼻'), + (0x2FA1D, 'M', u'𪘀'), + (0x2FA1E, 'X'), + (0xE0100, 'I'), + (0xE01F0, 'X'), + ] + +uts46data = tuple( + _seg_0() + + _seg_1() + + _seg_2() + + _seg_3() + + _seg_4() + + _seg_5() + + _seg_6() + + _seg_7() + + _seg_8() + + _seg_9() + + _seg_10() + + _seg_11() + + _seg_12() + + _seg_13() + + _seg_14() + + _seg_15() + + _seg_16() + + _seg_17() + + _seg_18() + + _seg_19() + + _seg_20() + + _seg_21() + + _seg_22() + + _seg_23() + + _seg_24() + + _seg_25() + + _seg_26() + + _seg_27() + + _seg_28() + + _seg_29() + + _seg_30() + + _seg_31() + + _seg_32() + + _seg_33() + + _seg_34() + + _seg_35() + + _seg_36() + + _seg_37() + + _seg_38() + + _seg_39() + + _seg_40() + + _seg_41() + + _seg_42() + + _seg_43() + + _seg_44() + + _seg_45() + + _seg_46() + + _seg_47() + + _seg_48() + + _seg_49() + + _seg_50() + + _seg_51() + + _seg_52() + + _seg_53() + + _seg_54() + + _seg_55() + + _seg_56() + + _seg_57() + + _seg_58() + + _seg_59() + + _seg_60() + + _seg_61() + + _seg_62() + + _seg_63() + + _seg_64() + + _seg_65() + + _seg_66() + + _seg_67() + + _seg_68() + + _seg_69() + + _seg_70() + + _seg_71() + + _seg_72() + + _seg_73() + + _seg_74() + + _seg_75() + + _seg_76() + + _seg_77() +) diff --git a/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/INSTALLER b/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/LICENSE.txt b/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..c10d256 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/LICENSE.txt @@ -0,0 +1,27 @@ +Copyright (c) 2011 Idan Gazit and contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of this project nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/METADATA b/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/METADATA new file mode 100644 index 0000000..d6e1d05 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/METADATA @@ -0,0 +1,167 @@ +Metadata-Version: 2.1 +Name: oauthlib +Version: 2.1.0 +Summary: A generic, spec-compliant, thorough implementation of the OAuth request-signing logic +Home-page: https://github.com/oauthlib/oauthlib +Author: Idan Gazit +Author-email: idan@gazit.me +Maintainer: Ib Lundgren +Maintainer-email: ib.lundgren@gmail.com +License: BSD +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: MacOS +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: Linux +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: Implementation +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Provides-Extra: test +Provides-Extra: signals +Provides-Extra: signedtoken +Provides-Extra: rsa +Provides-Extra: rsa +Requires-Dist: cryptography; extra == 'rsa' +Provides-Extra: signals +Requires-Dist: blinker; extra == 'signals' +Provides-Extra: signedtoken +Requires-Dist: cryptography; extra == 'signedtoken' +Requires-Dist: pyjwt (>=1.0.0); extra == 'signedtoken' +Provides-Extra: test +Requires-Dist: nose; extra == 'test' +Requires-Dist: unittest2; extra == 'test' +Requires-Dist: cryptography; extra == 'test' +Requires-Dist: mock; extra == 'test' +Requires-Dist: pyjwt (>=1.0.0); extra == 'test' +Requires-Dist: blinker; extra == 'test' + +OAuthLib +======== + +*A generic, spec-compliant, thorough implementation of the OAuth request-signing +logic for Python 2.7 and 3.4+.* + +.. image:: https://travis-ci.org/oauthlib/oauthlib.svg?branch=master + :target: https://travis-ci.org/oauthlib/oauthlib + :alt: Travis +.. image:: https://coveralls.io/repos/oauthlib/oauthlib/badge.svg?branch=master + :target: https://coveralls.io/r/oauthlib/oauthlib + :alt: Coveralls +.. image:: https://img.shields.io/pypi/pyversions/oauthlib.svg + :target: https://pypi.python.org/pypi/oauthlib + :alt: Download from PyPi +.. image:: https://img.shields.io/pypi/l/oauthlib.svg + :target: https://pypi.python.org/pypi/oauthlib + :alt: License +.. image:: https://img.shields.io/readthedocs/oauthlib.svg + :target: https://oauthlib.readthedocs.io/en/latest/index.html + :alt: Read the Docs +.. image:: https://badges.gitter.im/oauthlib/oauthlib.svg + :target: https://gitter.im/oauthlib/Lobby + :alt: Chat on Gitter + +OAuth often seems complicated and difficult-to-implement. There are several +prominent libraries for handling OAuth requests, but they all suffer from one or +both of the following: + +1. They predate the `OAuth 1.0 spec`_, AKA RFC 5849. +2. They predate the `OAuth 2.0 spec`_, AKA RFC 6749. +3. They assume the usage of a specific HTTP request library. + +.. _`OAuth 1.0 spec`: https://tools.ietf.org/html/rfc5849 +.. _`OAuth 2.0 spec`: https://tools.ietf.org/html/rfc6749 + +OAuthLib is a generic utility which implements the logic of OAuth without +assuming a specific HTTP request object or web framework. Use it to graft OAuth +client support onto your favorite HTTP library, or provide support onto your +favourite web framework. If you're a maintainer of such a library, write a thin +veneer on top of OAuthLib and get OAuth support for very little effort. + + +Documentation +-------------- + +Full documentation is available on `Read the Docs`_. All contributions are very +welcome! The documentation is still quite sparse, please open an issue for what +you'd like to know, or discuss it in our `Gitter community`_, or even better, send a +pull request! + +.. _`Gitter community`: https://gitter.im/oauthlib/Lobby +.. _`Read the Docs`: https://oauthlib.readthedocs.io/en/latest/index.html + +Interested in making OAuth requests? +------------------------------------ + +Then you might be more interested in using `requests`_ which has OAuthLib +powered OAuth support provided by the `requests-oauthlib`_ library. + +.. _`requests`: https://github.com/requests/requests +.. _`requests-oauthlib`: https://github.com/requests/requests-oauthlib + +Which web frameworks are supported? +----------------------------------- + +The following packages provide OAuth support using OAuthLib. + +- For Django there is `django-oauth-toolkit`_, which includes `Django REST framework`_ support. +- For Flask there is `flask-oauthlib`_ and `Flask-Dance`_. +- For Pyramid there is `pyramid-oauthlib`_. +- For Bottle there is `bottle-oauthlib`_. + +If you have written an OAuthLib package that supports your favorite framework, +please open a Pull Request, updating the documentation. + +.. _`django-oauth-toolkit`: https://github.com/evonove/django-oauth-toolkit +.. _`flask-oauthlib`: https://github.com/lepture/flask-oauthlib +.. _`Django REST framework`: http://django-rest-framework.org +.. _`Flask-Dance`: https://github.com/singingwolfboy/flask-dance +.. _`pyramid-oauthlib`: https://github.com/tilgovi/pyramid-oauthlib +.. _`bottle-oauthlib`: https://github.com/thomsonreuters/bottle-oauthlib + +Using OAuthLib? Please get in touch! +------------------------------------ +Patching OAuth support onto an http request framework? Creating an OAuth +provider extension for a web framework? Simply using OAuthLib to Get Things Done +or to learn? + +No matter which we'd love to hear from you in our `Gitter community`_ or if you have +anything in particular you would like to have, change or comment on don't +hesitate for a second to send a pull request or open an issue. We might be quite +busy and therefore slow to reply but we love feedback! + +Chances are you have run into something annoying that you wish there was +documentation for, if you wish to gain eternal fame and glory, and a drink if we +have the pleasure to run into eachother, please send a docs pull request =) + +.. _`Gitter community`: https://gitter.im/oauthlib/Lobby + +License +------- + +OAuthLib is yours to use and abuse according to the terms of the BSD license. +Check the LICENSE file for full details. + +Changelog +--------- + +*OAuthLib is in active development, with the core of both OAuth 1 and 2 +completed, for providers as well as clients.* See `supported features`_ for +details. + +.. _`supported features`: https://oauthlib.readthedocs.io/en/latest/feature_matrix.html + +For a full changelog see ``CHANGELOG.rst``. + + diff --git a/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/RECORD b/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/RECORD new file mode 100644 index 0000000..0e5dead --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/RECORD @@ -0,0 +1,102 @@ +oauthlib/__init__.py,sha256=FRYrT5p6QoskGGBc1xuzi6xw8mxJSjZxUbRremTMVtQ,516 +oauthlib/common.py,sha256=Of_YKjXlEUCoV-Ghxpz6-0LT74beOPn88xcAnTrugBI,14705 +oauthlib/signals.py,sha256=WdwbOYf_bsZAIWAfxxEiKixqm9a56rCkjAmgyw1JQiE,1521 +oauthlib/uri_validate.py,sha256=PpEpmrRxA1w-H3oicm0vqsg9dM9hjGRx1wxh96Xnl7k,7611 +oauthlib/oauth1/__init__.py,sha256=DHwYb0S5eNIqm6VXql7SaoFywtW6d9MgtGZAzZyOTk4,842 +oauthlib/oauth1/rfc5849/__init__.py,sha256=0Gtt8q6cdLIh7Tuk96PRy46qEwhmVFoNpRdWWl_QhtE,15247 +oauthlib/oauth1/rfc5849/errors.py,sha256=9BqHAtzoVUQ7v0SBkT3COFt2JuwMZlOOEXtL0pv7d4I,2543 +oauthlib/oauth1/rfc5849/parameters.py,sha256=tZDwYfs0ZThyFw20yhskgoMvUQ3FGAHk3dWG4owEX5U,4961 +oauthlib/oauth1/rfc5849/request_validator.py,sha256=41vU3L8typle2pcRuF1AspgJRBEfk4NvFMbgduBRLgc,30462 +oauthlib/oauth1/rfc5849/signature.py,sha256=Tb4HBi-0-SJjOSoyVpVO_ppKrAsZ-Uek15rq4i7g40c,23617 +oauthlib/oauth1/rfc5849/utils.py,sha256=xQRvb2qnMX_mEKFYDQiDY-mQeguUk9Wf3ezDGAULBTs,2782 +oauthlib/oauth1/rfc5849/endpoints/__init__.py,sha256=d1ZrqNd-gGcMFKILLIE3yVoQVR-bx9AQaj0OqPrX-qc,352 +oauthlib/oauth1/rfc5849/endpoints/access_token.py,sha256=CZUCG45c5pPVNM7X9jTFYfBgUez4IplLmabKFLITUTU,9345 +oauthlib/oauth1/rfc5849/endpoints/authorization.py,sha256=MaSkwWYuClN3JH-7DNsKJFwDUT3HbJhPvKynkNQ6J2o,6823 +oauthlib/oauth1/rfc5849/endpoints/base.py,sha256=5t4h5oHydbRNVwHe5akXr-UEtzmtKBkFEDk5yNCIIp0,10355 +oauthlib/oauth1/rfc5849/endpoints/pre_configured.py,sha256=ttawzUkqmjvu1OlCw-fMDEFJNJYXk0OAtZYPBQDZc4E,605 +oauthlib/oauth1/rfc5849/endpoints/request_token.py,sha256=xZZAz4_AS_OcT84-OxA8j6X77Agu7Ur-uy0NEC7_IWA,9285 +oauthlib/oauth1/rfc5849/endpoints/resource.py,sha256=YIYfNO5bgXXF758qMskXY-E-CbkZxbdS-J6IbGiOYGc,7434 +oauthlib/oauth1/rfc5849/endpoints/signature_only.py,sha256=1gVLRB6MFpa-hTlzaR0wfAKSpD53t-7sGww_xwTpV4o,3385 +oauthlib/oauth2/__init__.py,sha256=h-aY5iK2_DiM4M0yOTyCfCqQKxrK4WASGA19b0B6w6k,2141 +oauthlib/oauth2/rfc6749/__init__.py,sha256=ByRccoeZxOuCI5LVDLkzJKzyrzSYyqX6yNbAJrRWSnQ,1728 +oauthlib/oauth2/rfc6749/errors.py,sha256=ps9pEcHNT0vkquAGyJpUOesYyixLIH6EMvnWSCH6pCc,12866 +oauthlib/oauth2/rfc6749/parameters.py,sha256=vti9Dd_xHVg7ZrmzfuhEHGWJrFGYOCSmWKPHa0XwouQ,15929 +oauthlib/oauth2/rfc6749/request_validator.py,sha256=fTGi7AeH2_84SAN30N83WhkkdOPdyi3Pgt7SY5V-KhM,24587 +oauthlib/oauth2/rfc6749/tokens.py,sha256=qNIhqAJIXDHTI5xX3IM-CiZ4fUXmgReGEBvUgjJnpnk,9805 +oauthlib/oauth2/rfc6749/utils.py,sha256=TbyPVOw_hfBrf3A82HI94bEpHb0HA5At_uMg4neskEo,2494 +oauthlib/oauth2/rfc6749/clients/__init__.py,sha256=MBwNpK8to49QlYYMxhHooU4UnPG6hsKpAVwoCm-ufE4,562 +oauthlib/oauth2/rfc6749/clients/backend_application.py,sha256=lxloTG9PEfD8m9NdvxeWrAtMSA4soHOd2xASo5Cb4As,2477 +oauthlib/oauth2/rfc6749/clients/base.py,sha256=BujIxA0YU0i9w8OKaTu7G_xnrmhuTF1MV_IW68x9ogk,20833 +oauthlib/oauth2/rfc6749/clients/legacy_application.py,sha256=S0WoSiACk8gdiJKR9qZMQxlnHWI3MmnsbM79YpGIJtM,3316 +oauthlib/oauth2/rfc6749/clients/mobile_application.py,sha256=fuj2ozuMr4mEddzcxuxVV8OIK2_YymgLnpGu18ZILBU,8781 +oauthlib/oauth2/rfc6749/clients/service_application.py,sha256=ojsYVvuaf46_t3v_R0-w8ryyHZIz4B85de7h2UKfBdk,6984 +oauthlib/oauth2/rfc6749/clients/web_application.py,sha256=8T73IkN-5xV4TIiKsQWTfYB_gYP6LIKWIEMHYWlJtNw,9212 +oauthlib/oauth2/rfc6749/endpoints/__init__.py,sha256=2cFcinSijL_H9ToMCwc8aYTYQhpMnNvUtXY8LNGMYZ0,648 +oauthlib/oauth2/rfc6749/endpoints/authorization.py,sha256=5Rc-2QMuBeDowq2QJQD9PaYhf_meUoJlL9OvmNf-Dt8,4666 +oauthlib/oauth2/rfc6749/endpoints/base.py,sha256=L2QooEesOsDIlMvR2jPStG31p1_bu9qX0Ab3VCQw_So,1733 +oauthlib/oauth2/rfc6749/endpoints/pre_configured.py,sha256=OccCP8eMhrT6ReyKQs8PHxH1bDjirfyj8wWKM-tn7MA,12327 +oauthlib/oauth2/rfc6749/endpoints/resource.py,sha256=4Br4ZvzTc4mhVW9Bsw_gHQNA-BwAyr0_-r6GotaCifA,3316 +oauthlib/oauth2/rfc6749/endpoints/revocation.py,sha256=1nCTzF4nIP_riZCj_ECySPN1hXaQxIwM2bFofAFVNxE,5802 +oauthlib/oauth2/rfc6749/endpoints/token.py,sha256=XozL_QXZRbGg353Bd3Qs2j9Jt0iYU0NRUrXBrhWTex4,4517 +oauthlib/oauth2/rfc6749/grant_types/__init__.py,sha256=Y4ejhY509WtGNwi6S2_cCiY_xdEIAxnvsQGDMdT1XCc,728 +oauthlib/oauth2/rfc6749/grant_types/authorization_code.py,sha256=QoSF-XCqfShT4Hol08dz0qY2ltFjbz1jxKkLxCoYLQA,21597 +oauthlib/oauth2/rfc6749/grant_types/base.py,sha256=uc1-nu-V6RvCvCQEMvrEj_exo00sxvuMCoNrYjdXBtg,7576 +oauthlib/oauth2/rfc6749/grant_types/client_credentials.py,sha256=qOHns6SaCVIePsDHMdixylYupIqwqw-67E8MSqJAnYE,4951 +oauthlib/oauth2/rfc6749/grant_types/implicit.py,sha256=Ypjgk-saAtEzviU68DRqxBNIo2vdotE3dHMNErILpX8,17639 +oauthlib/oauth2/rfc6749/grant_types/openid_connect.py,sha256=GgcuURNoDYKihZ0xOidM8cciqwo9L6Alv21oAlxYBxY,18890 +oauthlib/oauth2/rfc6749/grant_types/refresh_token.py,sha256=jpeZliXaE03qziO2sWIet9XX-l-q9t0rbM2G1u1Ujhk,5649 +oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.py,sha256=T9gNakVJEBfXo4ndiCC74VCNbhIWFiu8YScD9tJ1P6A,8418 +oauthlib-2.1.0.dist-info/LICENSE.txt,sha256=nhrocl9YFXONrbbLsJ9FBFPZTgfHte-u6GT1RqYgmpU,1534 +oauthlib-2.1.0.dist-info/METADATA,sha256=49wqCxm7pm0jfLlt1eI7HOu33Ec1Vz1OJkLpzw5B4Nw,6693 +oauthlib-2.1.0.dist-info/RECORD,, +oauthlib-2.1.0.dist-info/WHEEL,sha256=gduuPyBvFJQSQ0zdyxF7k0zynDXbIbvg5ZBHoXum5uk,110 +oauthlib-2.1.0.dist-info/top_level.txt,sha256=gz2py0fFs1AhG1O7KpHPcIXOgXOwdIiCaSnmLkiR12Q,9 +oauthlib-2.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +oauthlib/oauth2/rfc6749/clients/__pycache__/mobile_application.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/clients/__pycache__/legacy_application.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/clients/__pycache__/web_application.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/clients/__pycache__/base.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/clients/__pycache__/__init__.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/clients/__pycache__/service_application.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/clients/__pycache__/backend_application.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/endpoints/__pycache__/authorization.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/endpoints/__pycache__/resource.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/endpoints/__pycache__/token.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/endpoints/__pycache__/base.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/endpoints/__pycache__/revocation.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/endpoints/__pycache__/__init__.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/endpoints/__pycache__/pre_configured.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/__pycache__/tokens.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/__pycache__/request_validator.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/__pycache__/parameters.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/__pycache__/errors.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/__pycache__/utils.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/__pycache__/__init__.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/grant_types/__pycache__/refresh_token.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/grant_types/__pycache__/authorization_code.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/grant_types/__pycache__/client_credentials.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/grant_types/__pycache__/resource_owner_password_credentials.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/grant_types/__pycache__/base.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/grant_types/__pycache__/implicit.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/grant_types/__pycache__/openid_connect.cpython-36.pyc,, +oauthlib/oauth2/rfc6749/grant_types/__pycache__/__init__.cpython-36.pyc,, +oauthlib/oauth2/__pycache__/__init__.cpython-36.pyc,, +oauthlib/__pycache__/signals.cpython-36.pyc,, +oauthlib/__pycache__/common.cpython-36.pyc,, +oauthlib/__pycache__/uri_validate.cpython-36.pyc,, +oauthlib/__pycache__/__init__.cpython-36.pyc,, +oauthlib/oauth1/rfc5849/endpoints/__pycache__/authorization.cpython-36.pyc,, +oauthlib/oauth1/rfc5849/endpoints/__pycache__/signature_only.cpython-36.pyc,, +oauthlib/oauth1/rfc5849/endpoints/__pycache__/resource.cpython-36.pyc,, +oauthlib/oauth1/rfc5849/endpoints/__pycache__/request_token.cpython-36.pyc,, +oauthlib/oauth1/rfc5849/endpoints/__pycache__/base.cpython-36.pyc,, +oauthlib/oauth1/rfc5849/endpoints/__pycache__/__init__.cpython-36.pyc,, +oauthlib/oauth1/rfc5849/endpoints/__pycache__/pre_configured.cpython-36.pyc,, +oauthlib/oauth1/rfc5849/endpoints/__pycache__/access_token.cpython-36.pyc,, +oauthlib/oauth1/rfc5849/__pycache__/request_validator.cpython-36.pyc,, +oauthlib/oauth1/rfc5849/__pycache__/parameters.cpython-36.pyc,, +oauthlib/oauth1/rfc5849/__pycache__/errors.cpython-36.pyc,, +oauthlib/oauth1/rfc5849/__pycache__/utils.cpython-36.pyc,, +oauthlib/oauth1/rfc5849/__pycache__/__init__.cpython-36.pyc,, +oauthlib/oauth1/rfc5849/__pycache__/signature.cpython-36.pyc,, +oauthlib/oauth1/__pycache__/__init__.cpython-36.pyc,, diff --git a/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/WHEEL b/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/WHEEL new file mode 100644 index 0000000..1316c41 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/top_level.txt b/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/top_level.txt new file mode 100644 index 0000000..b5f3f0e --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib-2.1.0.dist-info/top_level.txt @@ -0,0 +1 @@ +oauthlib diff --git a/env/lib/python3.6/site-packages/oauthlib/common.py b/env/lib/python3.6/site-packages/oauthlib/common.py new file mode 100644 index 0000000..f25656f --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/common.py @@ -0,0 +1,465 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.common +~~~~~~~~~~~~~~ + +This module provides data structures and utilities common +to all implementations of OAuth. +""" +from __future__ import absolute_import, unicode_literals + +import collections +import datetime +import logging +import re +import sys +import time + +try: + from secrets import randbits + from secrets import SystemRandom +except ImportError: + from random import getrandbits as randbits + from random import SystemRandom +try: + from urllib import quote as _quote + from urllib import unquote as _unquote + from urllib import urlencode as _urlencode +except ImportError: + from urllib.parse import quote as _quote + from urllib.parse import unquote as _unquote + from urllib.parse import urlencode as _urlencode +try: + import urlparse +except ImportError: + import urllib.parse as urlparse + +UNICODE_ASCII_CHARACTER_SET = ('abcdefghijklmnopqrstuvwxyz' + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + '0123456789') + +CLIENT_ID_CHARACTER_SET = (r' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMN' + 'OPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}') + +SANITIZE_PATTERN = re.compile(r'([^&;]*(?:password|token)[^=]*=)[^&;]+', re.IGNORECASE) +INVALID_HEX_PATTERN = re.compile(r'%[^0-9A-Fa-f]|%[0-9A-Fa-f][^0-9A-Fa-f]') + +always_safe = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ' + 'abcdefghijklmnopqrstuvwxyz' + '0123456789' '_.-') + +log = logging.getLogger('oauthlib') + +PY3 = sys.version_info[0] == 3 + +if PY3: + unicode_type = str + bytes_type = bytes +else: + unicode_type = unicode + bytes_type = str + + +# 'safe' must be bytes (Python 2.6 requires bytes, other versions allow either) +def quote(s, safe=b'/'): + s = s.encode('utf-8') if isinstance(s, unicode_type) else s + s = _quote(s, safe) + # PY3 always returns unicode. PY2 may return either, depending on whether + # it had to modify the string. + if isinstance(s, bytes_type): + s = s.decode('utf-8') + return s + + +def unquote(s): + s = _unquote(s) + # PY3 always returns unicode. PY2 seems to always return what you give it, + # which differs from quote's behavior. Just to be safe, make sure it is + # unicode before we return. + if isinstance(s, bytes_type): + s = s.decode('utf-8') + return s + + +def urlencode(params): + utf8_params = encode_params_utf8(params) + urlencoded = _urlencode(utf8_params) + if isinstance(urlencoded, unicode_type): # PY3 returns unicode + return urlencoded + else: + return urlencoded.decode("utf-8") + + +def encode_params_utf8(params): + """Ensures that all parameters in a list of 2-element tuples are encoded to + bytestrings using UTF-8 + """ + encoded = [] + for k, v in params: + encoded.append(( + k.encode('utf-8') if isinstance(k, unicode_type) else k, + v.encode('utf-8') if isinstance(v, unicode_type) else v)) + return encoded + + +def decode_params_utf8(params): + """Ensures that all parameters in a list of 2-element tuples are decoded to + unicode using UTF-8. + """ + decoded = [] + for k, v in params: + decoded.append(( + k.decode('utf-8') if isinstance(k, bytes_type) else k, + v.decode('utf-8') if isinstance(v, bytes_type) else v)) + return decoded + + +urlencoded = set(always_safe) | set('=&;:%+~,*@!()/?') + + +def urldecode(query): + """Decode a query string in x-www-form-urlencoded format into a sequence + of two-element tuples. + + Unlike urlparse.parse_qsl(..., strict_parsing=True) urldecode will enforce + correct formatting of the query string by validation. If validation fails + a ValueError will be raised. urllib.parse_qsl will only raise errors if + any of name-value pairs omits the equals sign. + """ + # Check if query contains invalid characters + if query and not set(query) <= urlencoded: + error = ("Error trying to decode a non urlencoded string. " + "Found invalid characters: %s " + "in the string: '%s'. " + "Please ensure the request/response body is " + "x-www-form-urlencoded.") + raise ValueError(error % (set(query) - urlencoded, query)) + + # Check for correctly hex encoded values using a regular expression + # All encoded values begin with % followed by two hex characters + # correct = %00, %A0, %0A, %FF + # invalid = %G0, %5H, %PO + if INVALID_HEX_PATTERN.search(query): + raise ValueError('Invalid hex encoding in query string.') + + # We encode to utf-8 prior to parsing because parse_qsl behaves + # differently on unicode input in python 2 and 3. + # Python 2.7 + # >>> urlparse.parse_qsl(u'%E5%95%A6%E5%95%A6') + # u'\xe5\x95\xa6\xe5\x95\xa6' + # Python 2.7, non unicode input gives the same + # >>> urlparse.parse_qsl('%E5%95%A6%E5%95%A6') + # '\xe5\x95\xa6\xe5\x95\xa6' + # but now we can decode it to unicode + # >>> urlparse.parse_qsl('%E5%95%A6%E5%95%A6').decode('utf-8') + # u'\u5566\u5566' + # Python 3.3 however + # >>> urllib.parse.parse_qsl(u'%E5%95%A6%E5%95%A6') + # u'\u5566\u5566' + query = query.encode( + 'utf-8') if not PY3 and isinstance(query, unicode_type) else query + # We want to allow queries such as "c2" whereas urlparse.parse_qsl + # with the strict_parsing flag will not. + params = urlparse.parse_qsl(query, keep_blank_values=True) + + # unicode all the things + return decode_params_utf8(params) + + +def extract_params(raw): + """Extract parameters and return them as a list of 2-tuples. + + Will successfully extract parameters from urlencoded query strings, + dicts, or lists of 2-tuples. Empty strings/dicts/lists will return an + empty list of parameters. Any other input will result in a return + value of None. + """ + if isinstance(raw, bytes_type) or isinstance(raw, unicode_type): + try: + params = urldecode(raw) + except ValueError: + params = None + elif hasattr(raw, '__iter__'): + try: + dict(raw) + except ValueError: + params = None + except TypeError: + params = None + else: + params = list(raw.items() if isinstance(raw, dict) else raw) + params = decode_params_utf8(params) + else: + params = None + + return params + + +def generate_nonce(): + """Generate pseudorandom nonce that is unlikely to repeat. + + Per `section 3.3`_ of the OAuth 1 RFC 5849 spec. + Per `section 3.2.1`_ of the MAC Access Authentication spec. + + A random 64-bit number is appended to the epoch timestamp for both + randomness and to decrease the likelihood of collisions. + + .. _`section 3.2.1`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-3.2.1 + .. _`section 3.3`: https://tools.ietf.org/html/rfc5849#section-3.3 + """ + return unicode_type(unicode_type(randbits(64)) + generate_timestamp()) + + +def generate_timestamp(): + """Get seconds since epoch (UTC). + + Per `section 3.3`_ of the OAuth 1 RFC 5849 spec. + Per `section 3.2.1`_ of the MAC Access Authentication spec. + + .. _`section 3.2.1`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-3.2.1 + .. _`section 3.3`: https://tools.ietf.org/html/rfc5849#section-3.3 + """ + return unicode_type(int(time.time())) + + +def generate_token(length=30, chars=UNICODE_ASCII_CHARACTER_SET): + """Generates a non-guessable OAuth token + + OAuth (1 and 2) does not specify the format of tokens except that they + should be strings of random characters. Tokens should not be guessable + and entropy when generating the random characters is important. Which is + why SystemRandom is used instead of the default random.choice method. + """ + rand = SystemRandom() + return ''.join(rand.choice(chars) for x in range(length)) + + +def generate_signed_token(private_pem, request): + import jwt + + now = datetime.datetime.utcnow() + + claims = { + 'scope': request.scope, + 'exp': now + datetime.timedelta(seconds=request.expires_in) + } + + claims.update(request.claims) + + token = jwt.encode(claims, private_pem, 'RS256') + token = to_unicode(token, "UTF-8") + + return token + + +def verify_signed_token(public_pem, token): + import jwt + + return jwt.decode(token, public_pem, algorithms=['RS256']) + + +def generate_client_id(length=30, chars=CLIENT_ID_CHARACTER_SET): + """Generates an OAuth client_id + + OAuth 2 specify the format of client_id in + https://tools.ietf.org/html/rfc6749#appendix-A. + """ + return generate_token(length, chars) + + +def add_params_to_qs(query, params): + """Extend a query with a list of two-tuples.""" + if isinstance(params, dict): + params = params.items() + queryparams = urlparse.parse_qsl(query, keep_blank_values=True) + queryparams.extend(params) + return urlencode(queryparams) + + +def add_params_to_uri(uri, params, fragment=False): + """Add a list of two-tuples to the uri query components.""" + sch, net, path, par, query, fra = urlparse.urlparse(uri) + if fragment: + fra = add_params_to_qs(fra, params) + else: + query = add_params_to_qs(query, params) + return urlparse.urlunparse((sch, net, path, par, query, fra)) + + +def safe_string_equals(a, b): + """ Near-constant time string comparison. + + Used in order to avoid timing attacks on sensitive information such + as secret keys during request verification (`rootLabs`_). + + .. _`rootLabs`: http://rdist.root.org/2010/01/07/timing-independent-array-comparison/ + + """ + if len(a) != len(b): + return False + + result = 0 + for x, y in zip(a, b): + result |= ord(x) ^ ord(y) + return result == 0 + + +def to_unicode(data, encoding='UTF-8'): + """Convert a number of different types of objects to unicode.""" + if isinstance(data, unicode_type): + return data + + if isinstance(data, bytes_type): + return unicode_type(data, encoding=encoding) + + if hasattr(data, '__iter__'): + try: + dict(data) + except TypeError: + pass + except ValueError: + # Assume it's a one dimensional data structure + return (to_unicode(i, encoding) for i in data) + else: + # We support 2.6 which lacks dict comprehensions + if hasattr(data, 'items'): + data = data.items() + return dict(((to_unicode(k, encoding), to_unicode(v, encoding)) for k, v in data)) + + return data + + +class CaseInsensitiveDict(dict): + + """Basic case insensitive dict with strings only keys.""" + + proxy = {} + + def __init__(self, data): + self.proxy = dict((k.lower(), k) for k in data) + for k in data: + self[k] = data[k] + + def __contains__(self, k): + return k.lower() in self.proxy + + def __delitem__(self, k): + key = self.proxy[k.lower()] + super(CaseInsensitiveDict, self).__delitem__(key) + del self.proxy[k.lower()] + + def __getitem__(self, k): + key = self.proxy[k.lower()] + return super(CaseInsensitiveDict, self).__getitem__(key) + + def get(self, k, default=None): + return self[k] if k in self else default + + def __setitem__(self, k, v): + super(CaseInsensitiveDict, self).__setitem__(k, v) + self.proxy[k.lower()] = k + + def update(self, *args, **kwargs): + super(CaseInsensitiveDict, self).update(*args, **kwargs) + for k in dict(*args, **kwargs): + self.proxy[k.lower()] = k + + +class Request(object): + + """A malleable representation of a signable HTTP request. + + Body argument may contain any data, but parameters will only be decoded if + they are one of: + + * urlencoded query string + * dict + * list of 2-tuples + + Anything else will be treated as raw body data to be passed through + unmolested. + """ + + def __init__(self, uri, http_method='GET', body=None, headers=None, + encoding='utf-8'): + # Convert to unicode using encoding if given, else assume unicode + encode = lambda x: to_unicode(x, encoding) if encoding else x + + self.uri = encode(uri) + self.http_method = encode(http_method) + self.headers = CaseInsensitiveDict(encode(headers or {})) + self.body = encode(body) + self.decoded_body = extract_params(self.body) + self.oauth_params = [] + self.validator_log = {} + + self._params = { + "access_token": None, + "client": None, + "client_id": None, + "client_secret": None, + "code": None, + "extra_credentials": None, + "grant_type": None, + "redirect_uri": None, + "refresh_token": None, + "request_token": None, + "response_type": None, + "scope": None, + "scopes": None, + "state": None, + "token": None, + "user": None, + "token_type_hint": None, + + # OpenID Connect + "response_mode": None, + "nonce": None, + "display": None, + "prompt": None, + "claims": None, + "max_age": None, + "ui_locales": None, + "id_token_hint": None, + "login_hint": None, + "acr_values": None + } + self._params.update(dict(urldecode(self.uri_query))) + self._params.update(dict(self.decoded_body or [])) + self._params.update(self.headers) + + def __getattr__(self, name): + if name in self._params: + return self._params[name] + else: + raise AttributeError(name) + + def __repr__(self): + body = self.body + headers = self.headers.copy() + if body: + body = SANITIZE_PATTERN.sub('\1', str(body)) + if 'Authorization' in headers: + headers['Authorization'] = '' + return '' % ( + self.uri, self.http_method, headers, body) + + @property + def uri_query(self): + return urlparse.urlparse(self.uri).query + + @property + def uri_query_params(self): + if not self.uri_query: + return [] + return urlparse.parse_qsl(self.uri_query, keep_blank_values=True, + strict_parsing=True) + + @property + def duplicate_params(self): + seen_keys = collections.defaultdict(int) + all_keys = (p[0] + for p in (self.decoded_body or []) + self.uri_query_params) + for k in all_keys: + seen_keys[k] += 1 + return [k for k, c in seen_keys.items() if c > 1] diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/access_token.py b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/access_token.py new file mode 100644 index 0000000..12d13e9 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/access_token.py @@ -0,0 +1,215 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth1.rfc5849.endpoints.access_token +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of the access token provider logic of +OAuth 1.0 RFC 5849. It validates the correctness of access token requests, +creates and persists tokens as well as create the proper response to be +returned to the client. +""" +from __future__ import absolute_import, unicode_literals + +import logging + +from oauthlib.common import urlencode + +from .. import errors +from .base import BaseEndpoint + +log = logging.getLogger(__name__) + + +class AccessTokenEndpoint(BaseEndpoint): + + """An endpoint responsible for providing OAuth 1 access tokens. + + Typical use is to instantiate with a request validator and invoke the + ``create_access_token_response`` from a view function. The tuple returned + has all information necessary (body, status, headers) to quickly form + and return a proper response. See :doc:`/oauth1/validator` for details on which + validator methods to implement for this endpoint. + """ + + def create_access_token(self, request, credentials): + """Create and save a new access token. + + Similar to OAuth 2, indication of granted scopes will be included as a + space separated list in ``oauth_authorized_realms``. + + :param request: An oauthlib.common.Request object. + :returns: The token as an urlencoded string. + """ + request.realms = self.request_validator.get_realms( + request.resource_owner_key, request) + token = { + 'oauth_token': self.token_generator(), + 'oauth_token_secret': self.token_generator(), + # Backport the authorized scopes indication used in OAuth2 + 'oauth_authorized_realms': ' '.join(request.realms) + } + token.update(credentials) + self.request_validator.save_access_token(token, request) + return urlencode(token.items()) + + def create_access_token_response(self, uri, http_method='GET', body=None, + headers=None, credentials=None): + """Create an access token response, with a new request token if valid. + + :param uri: The full URI of the token request. + :param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc. + :param body: The request body as a string. + :param headers: The request headers as a dict. + :param credentials: A list of extra credentials to include in the token. + :returns: A tuple of 3 elements. + 1. A dict of headers to set on the response. + 2. The response body as a string. + 3. The response status code as an integer. + + An example of a valid request:: + + >>> from your_validator import your_validator + >>> from oauthlib.oauth1 import AccessTokenEndpoint + >>> endpoint = AccessTokenEndpoint(your_validator) + >>> h, b, s = endpoint.create_access_token_response( + ... 'https://your.provider/access_token?foo=bar', + ... headers={ + ... 'Authorization': 'OAuth oauth_token=234lsdkf....' + ... }, + ... credentials={ + ... 'my_specific': 'argument', + ... }) + >>> h + {'Content-Type': 'application/x-www-form-urlencoded'} + >>> b + 'oauth_token=lsdkfol23w54jlksdef&oauth_token_secret=qwe089234lkjsdf&oauth_authorized_realms=movies+pics&my_specific=argument' + >>> s + 200 + + An response to invalid request would have a different body and status:: + + >>> b + 'error=invalid_request&description=missing+resource+owner+key' + >>> s + 400 + + The same goes for an an unauthorized request: + + >>> b + '' + >>> s + 401 + """ + resp_headers = {'Content-Type': 'application/x-www-form-urlencoded'} + try: + request = self._create_request(uri, http_method, body, headers) + valid, processed_request = self.validate_access_token_request( + request) + if valid: + token = self.create_access_token(request, credentials or {}) + self.request_validator.invalidate_request_token( + request.client_key, + request.resource_owner_key, + request) + return resp_headers, token, 200 + else: + return {}, None, 401 + except errors.OAuth1Error as e: + return resp_headers, e.urlencoded, e.status_code + + def validate_access_token_request(self, request): + """Validate an access token request. + + :param request: An oauthlib.common.Request object. + :raises: OAuth1Error if the request is invalid. + :returns: A tuple of 2 elements. + 1. The validation result (True or False). + 2. The request object. + """ + self._check_transport_security(request) + self._check_mandatory_parameters(request) + + if not request.resource_owner_key: + raise errors.InvalidRequestError( + description='Missing resource owner.') + + if not self.request_validator.check_request_token( + request.resource_owner_key): + raise errors.InvalidRequestError( + description='Invalid resource owner key format.') + + if not request.verifier: + raise errors.InvalidRequestError( + description='Missing verifier.') + + if not self.request_validator.check_verifier(request.verifier): + raise errors.InvalidRequestError( + description='Invalid verifier format.') + + if not self.request_validator.validate_timestamp_and_nonce( + request.client_key, request.timestamp, request.nonce, request, + request_token=request.resource_owner_key): + return False, request + + # The server SHOULD return a 401 (Unauthorized) status code when + # receiving a request with invalid client credentials. + # Note: This is postponed in order to avoid timing attacks, instead + # a dummy client is assigned and used to maintain near constant + # time request verification. + # + # Note that early exit would enable client enumeration + valid_client = self.request_validator.validate_client_key( + request.client_key, request) + if not valid_client: + request.client_key = self.request_validator.dummy_client + + # The server SHOULD return a 401 (Unauthorized) status code when + # receiving a request with invalid or expired token. + # Note: This is postponed in order to avoid timing attacks, instead + # a dummy token is assigned and used to maintain near constant + # time request verification. + # + # Note that early exit would enable resource owner enumeration + valid_resource_owner = self.request_validator.validate_request_token( + request.client_key, request.resource_owner_key, request) + if not valid_resource_owner: + request.resource_owner_key = self.request_validator.dummy_request_token + + # The server MUST verify (Section 3.2) the validity of the request, + # ensure that the resource owner has authorized the provisioning of + # token credentials to the client, and ensure that the temporary + # credentials have not expired or been used before. The server MUST + # also verify the verification code received from the client. + # .. _`Section 3.2`: https://tools.ietf.org/html/rfc5849#section-3.2 + # + # Note that early exit would enable resource owner authorization + # verifier enumertion. + valid_verifier = self.request_validator.validate_verifier( + request.client_key, + request.resource_owner_key, + request.verifier, + request) + + valid_signature = self._check_signature(request, is_token_request=True) + + # log the results to the validator_log + # this lets us handle internal reporting and analysis + request.validator_log['client'] = valid_client + request.validator_log['resource_owner'] = valid_resource_owner + request.validator_log['verifier'] = valid_verifier + request.validator_log['signature'] = valid_signature + + # We delay checking validity until the very end, using dummy values for + # calculations and fetching secrets/keys to ensure the flow of every + # request remains almost identical regardless of whether valid values + # have been supplied. This ensures near constant time execution and + # prevents malicious users from guessing sensitive information + v = all((valid_client, valid_resource_owner, valid_verifier, + valid_signature)) + if not v: + log.info("[Failure] request verification failed.") + log.info("Valid client:, %s", valid_client) + log.info("Valid token:, %s", valid_resource_owner) + log.info("Valid verifier:, %s", valid_verifier) + log.info("Valid signature:, %s", valid_signature) + return v, request diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/authorization.py b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/authorization.py new file mode 100644 index 0000000..1751a45 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/authorization.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth1.rfc5849.endpoints.authorization +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for signing and checking OAuth 1.0 RFC 5849 requests. +""" +from __future__ import absolute_import, unicode_literals + +from oauthlib.common import Request, add_params_to_uri + +from .. import errors +from .base import BaseEndpoint + +try: + from urllib import urlencode +except ImportError: + from urllib.parse import urlencode + + +class AuthorizationEndpoint(BaseEndpoint): + + """An endpoint responsible for letting authenticated users authorize access + to their protected resources to a client. + + Typical use would be to have two views, one for displaying the authorization + form and one to process said form on submission. + + The first view will want to utilize ``get_realms_and_credentials`` to fetch + requested realms and useful client credentials, such as name and + description, to be used when creating the authorization form. + + During form processing you can use ``create_authorization_response`` to + validate the request, create a verifier as well as prepare the final + redirection URI used to send the user back to the client. + + See :doc:`/oauth1/validator` for details on which validator methods to implement + for this endpoint. + """ + + def create_verifier(self, request, credentials): + """Create and save a new request token. + + :param request: An oauthlib.common.Request object. + :param credentials: A dict of extra token credentials. + :returns: The verifier as a dict. + """ + verifier = { + 'oauth_token': request.resource_owner_key, + 'oauth_verifier': self.token_generator(), + } + verifier.update(credentials) + self.request_validator.save_verifier( + request.resource_owner_key, verifier, request) + return verifier + + def create_authorization_response(self, uri, http_method='GET', body=None, + headers=None, realms=None, credentials=None): + """Create an authorization response, with a new request token if valid. + + :param uri: The full URI of the token request. + :param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc. + :param body: The request body as a string. + :param headers: The request headers as a dict. + :param credentials: A list of credentials to include in the verifier. + :returns: A tuple of 3 elements. + 1. A dict of headers to set on the response. + 2. The response body as a string. + 3. The response status code as an integer. + + If the callback URI tied to the current token is "oob", a response with + a 200 status code will be returned. In this case, it may be desirable to + modify the response to better display the verifier to the client. + + An example of an authorization request:: + + >>> from your_validator import your_validator + >>> from oauthlib.oauth1 import AuthorizationEndpoint + >>> endpoint = AuthorizationEndpoint(your_validator) + >>> h, b, s = endpoint.create_authorization_response( + ... 'https://your.provider/authorize?oauth_token=...', + ... credentials={ + ... 'extra': 'argument', + ... }) + >>> h + {'Location': 'https://the.client/callback?oauth_verifier=...&extra=argument'} + >>> b + None + >>> s + 302 + + An example of a request with an "oob" callback:: + + >>> from your_validator import your_validator + >>> from oauthlib.oauth1 import AuthorizationEndpoint + >>> endpoint = AuthorizationEndpoint(your_validator) + >>> h, b, s = endpoint.create_authorization_response( + ... 'https://your.provider/authorize?foo=bar', + ... credentials={ + ... 'extra': 'argument', + ... }) + >>> h + {'Content-Type': 'application/x-www-form-urlencoded'} + >>> b + 'oauth_verifier=...&extra=argument' + >>> s + 200 + """ + request = self._create_request(uri, http_method=http_method, body=body, + headers=headers) + + if not request.resource_owner_key: + raise errors.InvalidRequestError( + 'Missing mandatory parameter oauth_token.') + if not self.request_validator.verify_request_token( + request.resource_owner_key, request): + raise errors.InvalidClientError() + + request.realms = realms + if (request.realms and not self.request_validator.verify_realms( + request.resource_owner_key, request.realms, request)): + raise errors.InvalidRequestError( + description=('User granted access to realms outside of ' + 'what the client may request.')) + + verifier = self.create_verifier(request, credentials or {}) + redirect_uri = self.request_validator.get_redirect_uri( + request.resource_owner_key, request) + if redirect_uri == 'oob': + response_headers = { + 'Content-Type': 'application/x-www-form-urlencoded'} + response_body = urlencode(verifier) + return response_headers, response_body, 200 + else: + populated_redirect = add_params_to_uri( + redirect_uri, verifier.items()) + return {'Location': populated_redirect}, None, 302 + + def get_realms_and_credentials(self, uri, http_method='GET', body=None, + headers=None): + """Fetch realms and credentials for the presented request token. + + :param uri: The full URI of the token request. + :param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc. + :param body: The request body as a string. + :param headers: The request headers as a dict. + :returns: A tuple of 2 elements. + 1. A list of request realms. + 2. A dict of credentials which may be useful in creating the + authorization form. + """ + request = self._create_request(uri, http_method=http_method, body=body, + headers=headers) + + if not self.request_validator.verify_request_token( + request.resource_owner_key, request): + raise errors.InvalidClientError() + + realms = self.request_validator.get_realms( + request.resource_owner_key, request) + return realms, {'resource_owner_key': request.resource_owner_key} diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/base.py b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/base.py new file mode 100644 index 0000000..9702939 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/base.py @@ -0,0 +1,213 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth1.rfc5849.endpoints.base +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for signing and checking OAuth 1.0 RFC 5849 requests. +""" +from __future__ import absolute_import, unicode_literals + +import time + +from oauthlib.common import Request, generate_token + +from .. import (CONTENT_TYPE_FORM_URLENCODED, SIGNATURE_HMAC, SIGNATURE_RSA, + SIGNATURE_TYPE_AUTH_HEADER, SIGNATURE_TYPE_BODY, + SIGNATURE_TYPE_QUERY, errors, signature, utils) + + +class BaseEndpoint(object): + + def __init__(self, request_validator, token_generator=None): + self.request_validator = request_validator + self.token_generator = token_generator or generate_token + + def _get_signature_type_and_params(self, request): + """Extracts parameters from query, headers and body. Signature type + is set to the source in which parameters were found. + """ + # Per RFC5849, only the Authorization header may contain the 'realm' + # optional parameter. + header_params = signature.collect_parameters(headers=request.headers, + exclude_oauth_signature=False, with_realm=True) + body_params = signature.collect_parameters(body=request.body, + exclude_oauth_signature=False) + query_params = signature.collect_parameters(uri_query=request.uri_query, + exclude_oauth_signature=False) + + params = [] + params.extend(header_params) + params.extend(body_params) + params.extend(query_params) + signature_types_with_oauth_params = list(filter(lambda s: s[2], ( + (SIGNATURE_TYPE_AUTH_HEADER, params, + utils.filter_oauth_params(header_params)), + (SIGNATURE_TYPE_BODY, params, + utils.filter_oauth_params(body_params)), + (SIGNATURE_TYPE_QUERY, params, + utils.filter_oauth_params(query_params)) + ))) + + if len(signature_types_with_oauth_params) > 1: + found_types = [s[0] for s in signature_types_with_oauth_params] + raise errors.InvalidRequestError( + description=('oauth_ params must come from only 1 signature' + 'type but were found in %s', + ', '.join(found_types))) + + try: + signature_type, params, oauth_params = signature_types_with_oauth_params[ + 0] + except IndexError: + raise errors.InvalidRequestError( + description='Missing mandatory OAuth parameters.') + + return signature_type, params, oauth_params + + def _create_request(self, uri, http_method, body, headers): + # Only include body data from x-www-form-urlencoded requests + headers = headers or {} + if ("Content-Type" in headers and + CONTENT_TYPE_FORM_URLENCODED in headers["Content-Type"]): + request = Request(uri, http_method, body, headers) + else: + request = Request(uri, http_method, '', headers) + + signature_type, params, oauth_params = ( + self._get_signature_type_and_params(request)) + + # The server SHOULD return a 400 (Bad Request) status code when + # receiving a request with duplicated protocol parameters. + if len(dict(oauth_params)) != len(oauth_params): + raise errors.InvalidRequestError( + description='Duplicate OAuth1 entries.') + + oauth_params = dict(oauth_params) + request.signature = oauth_params.get('oauth_signature') + request.client_key = oauth_params.get('oauth_consumer_key') + request.resource_owner_key = oauth_params.get('oauth_token') + request.nonce = oauth_params.get('oauth_nonce') + request.timestamp = oauth_params.get('oauth_timestamp') + request.redirect_uri = oauth_params.get('oauth_callback') + request.verifier = oauth_params.get('oauth_verifier') + request.signature_method = oauth_params.get('oauth_signature_method') + request.realm = dict(params).get('realm') + request.oauth_params = oauth_params + + # Parameters to Client depend on signature method which may vary + # for each request. Note that HMAC-SHA1 and PLAINTEXT share parameters + request.params = [(k, v) for k, v in params if k != "oauth_signature"] + + if 'realm' in request.headers.get('Authorization', ''): + request.params = [(k, v) + for k, v in request.params if k != "realm"] + + return request + + def _check_transport_security(self, request): + # TODO: move into oauthlib.common from oauth2.utils + if (self.request_validator.enforce_ssl and + not request.uri.lower().startswith("https://")): + raise errors.InsecureTransportError() + + def _check_mandatory_parameters(self, request): + # The server SHOULD return a 400 (Bad Request) status code when + # receiving a request with missing parameters. + if not all((request.signature, request.client_key, + request.nonce, request.timestamp, + request.signature_method)): + raise errors.InvalidRequestError( + description='Missing mandatory OAuth parameters.') + + # OAuth does not mandate a particular signature method, as each + # implementation can have its own unique requirements. Servers are + # free to implement and document their own custom methods. + # Recommending any particular method is beyond the scope of this + # specification. Implementers should review the Security + # Considerations section (`Section 4`_) before deciding on which + # method to support. + # .. _`Section 4`: https://tools.ietf.org/html/rfc5849#section-4 + if (not request.signature_method in + self.request_validator.allowed_signature_methods): + raise errors.InvalidSignatureMethodError( + description="Invalid signature, %s not in %r." % ( + request.signature_method, + self.request_validator.allowed_signature_methods)) + + # Servers receiving an authenticated request MUST validate it by: + # If the "oauth_version" parameter is present, ensuring its value is + # "1.0". + if ('oauth_version' in request.oauth_params and + request.oauth_params['oauth_version'] != '1.0'): + raise errors.InvalidRequestError( + description='Invalid OAuth version.') + + # The timestamp value MUST be a positive integer. Unless otherwise + # specified by the server's documentation, the timestamp is expressed + # in the number of seconds since January 1, 1970 00:00:00 GMT. + if len(request.timestamp) != 10: + raise errors.InvalidRequestError( + description='Invalid timestamp size') + + try: + ts = int(request.timestamp) + + except ValueError: + raise errors.InvalidRequestError( + description='Timestamp must be an integer.') + + else: + # To avoid the need to retain an infinite number of nonce values for + # future checks, servers MAY choose to restrict the time period after + # which a request with an old timestamp is rejected. + if abs(time.time() - ts) > self.request_validator.timestamp_lifetime: + raise errors.InvalidRequestError( + description=('Timestamp given is invalid, differ from ' + 'allowed by over %s seconds.' % ( + self.request_validator.timestamp_lifetime))) + + # Provider specific validation of parameters, used to enforce + # restrictions such as character set and length. + if not self.request_validator.check_client_key(request.client_key): + raise errors.InvalidRequestError( + description='Invalid client key format.') + + if not self.request_validator.check_nonce(request.nonce): + raise errors.InvalidRequestError( + description='Invalid nonce format.') + + def _check_signature(self, request, is_token_request=False): + # ---- RSA Signature verification ---- + if request.signature_method == SIGNATURE_RSA: + # The server verifies the signature per `[RFC3447] section 8.2.2`_ + # .. _`[RFC3447] section 8.2.2`: https://tools.ietf.org/html/rfc3447#section-8.2.1 + rsa_key = self.request_validator.get_rsa_key( + request.client_key, request) + valid_signature = signature.verify_rsa_sha1(request, rsa_key) + + # ---- HMAC or Plaintext Signature verification ---- + else: + # Servers receiving an authenticated request MUST validate it by: + # Recalculating the request signature independently as described in + # `Section 3.4`_ and comparing it to the value received from the + # client via the "oauth_signature" parameter. + # .. _`Section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4 + client_secret = self.request_validator.get_client_secret( + request.client_key, request) + resource_owner_secret = None + if request.resource_owner_key: + if is_token_request: + resource_owner_secret = self.request_validator.get_request_token_secret( + request.client_key, request.resource_owner_key, request) + else: + resource_owner_secret = self.request_validator.get_access_token_secret( + request.client_key, request.resource_owner_key, request) + + if request.signature_method == SIGNATURE_HMAC: + valid_signature = signature.verify_hmac_sha1(request, + client_secret, resource_owner_secret) + else: + valid_signature = signature.verify_plaintext(request, + client_secret, resource_owner_secret) + return valid_signature diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/pre_configured.py b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/pre_configured.py new file mode 100644 index 0000000..f89393a --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/pre_configured.py @@ -0,0 +1,14 @@ +from __future__ import absolute_import, unicode_literals + +from . import (AccessTokenEndpoint, AuthorizationEndpoint, + RequestTokenEndpoint, ResourceEndpoint) + + +class WebApplicationServer(RequestTokenEndpoint, AuthorizationEndpoint, + AccessTokenEndpoint, ResourceEndpoint): + + def __init__(self, request_validator): + RequestTokenEndpoint.__init__(self, request_validator) + AuthorizationEndpoint.__init__(self, request_validator) + AccessTokenEndpoint.__init__(self, request_validator) + ResourceEndpoint.__init__(self, request_validator) diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/request_token.py b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/request_token.py new file mode 100644 index 0000000..88fd6c0 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/request_token.py @@ -0,0 +1,209 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth1.rfc5849.endpoints.request_token +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of the request token provider logic of +OAuth 1.0 RFC 5849. It validates the correctness of request token requests, +creates and persists tokens as well as create the proper response to be +returned to the client. +""" +from __future__ import absolute_import, unicode_literals + +import logging + +from oauthlib.common import urlencode + +from .. import errors +from .base import BaseEndpoint + +log = logging.getLogger(__name__) + + +class RequestTokenEndpoint(BaseEndpoint): + + """An endpoint responsible for providing OAuth 1 request tokens. + + Typical use is to instantiate with a request validator and invoke the + ``create_request_token_response`` from a view function. The tuple returned + has all information necessary (body, status, headers) to quickly form + and return a proper response. See :doc:`/oauth1/validator` for details on which + validator methods to implement for this endpoint. + """ + + def create_request_token(self, request, credentials): + """Create and save a new request token. + + :param request: An oauthlib.common.Request object. + :param credentials: A dict of extra token credentials. + :returns: The token as an urlencoded string. + """ + token = { + 'oauth_token': self.token_generator(), + 'oauth_token_secret': self.token_generator(), + 'oauth_callback_confirmed': 'true' + } + token.update(credentials) + self.request_validator.save_request_token(token, request) + return urlencode(token.items()) + + def create_request_token_response(self, uri, http_method='GET', body=None, + headers=None, credentials=None): + """Create a request token response, with a new request token if valid. + + :param uri: The full URI of the token request. + :param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc. + :param body: The request body as a string. + :param headers: The request headers as a dict. + :param credentials: A list of extra credentials to include in the token. + :returns: A tuple of 3 elements. + 1. A dict of headers to set on the response. + 2. The response body as a string. + 3. The response status code as an integer. + + An example of a valid request:: + + >>> from your_validator import your_validator + >>> from oauthlib.oauth1 import RequestTokenEndpoint + >>> endpoint = RequestTokenEndpoint(your_validator) + >>> h, b, s = endpoint.create_request_token_response( + ... 'https://your.provider/request_token?foo=bar', + ... headers={ + ... 'Authorization': 'OAuth realm=movies user, oauth_....' + ... }, + ... credentials={ + ... 'my_specific': 'argument', + ... }) + >>> h + {'Content-Type': 'application/x-www-form-urlencoded'} + >>> b + 'oauth_token=lsdkfol23w54jlksdef&oauth_token_secret=qwe089234lkjsdf&oauth_callback_confirmed=true&my_specific=argument' + >>> s + 200 + + An response to invalid request would have a different body and status:: + + >>> b + 'error=invalid_request&description=missing+callback+uri' + >>> s + 400 + + The same goes for an an unauthorized request: + + >>> b + '' + >>> s + 401 + """ + resp_headers = {'Content-Type': 'application/x-www-form-urlencoded'} + try: + request = self._create_request(uri, http_method, body, headers) + valid, processed_request = self.validate_request_token_request( + request) + if valid: + token = self.create_request_token(request, credentials or {}) + return resp_headers, token, 200 + else: + return {}, None, 401 + except errors.OAuth1Error as e: + return resp_headers, e.urlencoded, e.status_code + + def validate_request_token_request(self, request): + """Validate a request token request. + + :param request: An oauthlib.common.Request object. + :raises: OAuth1Error if the request is invalid. + :returns: A tuple of 2 elements. + 1. The validation result (True or False). + 2. The request object. + """ + self._check_transport_security(request) + self._check_mandatory_parameters(request) + + if request.realm: + request.realms = request.realm.split(' ') + else: + request.realms = self.request_validator.get_default_realms( + request.client_key, request) + if not self.request_validator.check_realms(request.realms): + raise errors.InvalidRequestError( + description='Invalid realm %s. Allowed are %r.' % ( + request.realms, self.request_validator.realms)) + + if not request.redirect_uri: + raise errors.InvalidRequestError( + description='Missing callback URI.') + + if not self.request_validator.validate_timestamp_and_nonce( + request.client_key, request.timestamp, request.nonce, request, + request_token=request.resource_owner_key): + return False, request + + # The server SHOULD return a 401 (Unauthorized) status code when + # receiving a request with invalid client credentials. + # Note: This is postponed in order to avoid timing attacks, instead + # a dummy client is assigned and used to maintain near constant + # time request verification. + # + # Note that early exit would enable client enumeration + valid_client = self.request_validator.validate_client_key( + request.client_key, request) + if not valid_client: + request.client_key = self.request_validator.dummy_client + + # Note that `realm`_ is only used in authorization headers and how + # it should be interepreted is not included in the OAuth spec. + # However they could be seen as a scope or realm to which the + # client has access and as such every client should be checked + # to ensure it is authorized access to that scope or realm. + # .. _`realm`: https://tools.ietf.org/html/rfc2617#section-1.2 + # + # Note that early exit would enable client realm access enumeration. + # + # The require_realm indicates this is the first step in the OAuth + # workflow where a client requests access to a specific realm. + # This first step (obtaining request token) need not require a realm + # and can then be identified by checking the require_resource_owner + # flag and abscence of realm. + # + # Clients obtaining an access token will not supply a realm and it will + # not be checked. Instead the previously requested realm should be + # transferred from the request token to the access token. + # + # Access to protected resources will always validate the realm but note + # that the realm is now tied to the access token and not provided by + # the client. + valid_realm = self.request_validator.validate_requested_realms( + request.client_key, request.realms, request) + + # Callback is normally never required, except for requests for + # a Temporary Credential as described in `Section 2.1`_ + # .._`Section 2.1`: https://tools.ietf.org/html/rfc5849#section-2.1 + valid_redirect = self.request_validator.validate_redirect_uri( + request.client_key, request.redirect_uri, request) + if not request.redirect_uri: + raise NotImplementedError('Redirect URI must either be provided ' + 'or set to a default during validation.') + + valid_signature = self._check_signature(request) + + # log the results to the validator_log + # this lets us handle internal reporting and analysis + request.validator_log['client'] = valid_client + request.validator_log['realm'] = valid_realm + request.validator_log['callback'] = valid_redirect + request.validator_log['signature'] = valid_signature + + # We delay checking validity until the very end, using dummy values for + # calculations and fetching secrets/keys to ensure the flow of every + # request remains almost identical regardless of whether valid values + # have been supplied. This ensures near constant time execution and + # prevents malicious users from guessing sensitive information + v = all((valid_client, valid_realm, valid_redirect, valid_signature)) + if not v: + log.info("[Failure] request verification failed.") + log.info("Valid client: %s.", valid_client) + log.info("Valid realm: %s.", valid_realm) + log.info("Valid callback: %s.", valid_redirect) + log.info("Valid signature: %s.", valid_signature) + return v, request diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/resource.py b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/resource.py new file mode 100644 index 0000000..f82e8b1 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/resource.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth1.rfc5849.endpoints.resource +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of the resource protection provider logic of +OAuth 1.0 RFC 5849. +""" +from __future__ import absolute_import, unicode_literals + +import logging + +from .. import errors +from .base import BaseEndpoint + +log = logging.getLogger(__name__) + + +class ResourceEndpoint(BaseEndpoint): + + """An endpoint responsible for protecting resources. + + Typical use is to instantiate with a request validator and invoke the + ``validate_protected_resource_request`` in a decorator around a view + function. If the request is valid, invoke and return the response of the + view. If invalid create and return an error response directly from the + decorator. + + See :doc:`/oauth1/validator` for details on which validator methods to implement + for this endpoint. + + An example decorator:: + + from functools import wraps + from your_validator import your_validator + from oauthlib.oauth1 import ResourceEndpoint + endpoint = ResourceEndpoint(your_validator) + + def require_oauth(realms=None): + def decorator(f): + @wraps(f) + def wrapper(request, *args, **kwargs): + v, r = provider.validate_protected_resource_request( + request.url, + http_method=request.method, + body=request.data, + headers=request.headers, + realms=realms or []) + if v: + return f(*args, **kwargs) + else: + return abort(403) + """ + + def validate_protected_resource_request(self, uri, http_method='GET', + body=None, headers=None, realms=None): + """Create a request token response, with a new request token if valid. + + :param uri: The full URI of the token request. + :param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc. + :param body: The request body as a string. + :param headers: The request headers as a dict. + :param realms: A list of realms the resource is protected under. + This will be supplied to the ``validate_realms`` + method of the request validator. + :returns: A tuple of 2 elements. + 1. True if valid, False otherwise. + 2. An oauthlib.common.Request object. + """ + try: + request = self._create_request(uri, http_method, body, headers) + except errors.OAuth1Error: + return False, None + + try: + self._check_transport_security(request) + self._check_mandatory_parameters(request) + except errors.OAuth1Error: + return False, request + + if not request.resource_owner_key: + return False, request + + if not self.request_validator.check_access_token( + request.resource_owner_key): + return False, request + + if not self.request_validator.validate_timestamp_and_nonce( + request.client_key, request.timestamp, request.nonce, request, + access_token=request.resource_owner_key): + return False, request + + # The server SHOULD return a 401 (Unauthorized) status code when + # receiving a request with invalid client credentials. + # Note: This is postponed in order to avoid timing attacks, instead + # a dummy client is assigned and used to maintain near constant + # time request verification. + # + # Note that early exit would enable client enumeration + valid_client = self.request_validator.validate_client_key( + request.client_key, request) + if not valid_client: + request.client_key = self.request_validator.dummy_client + + # The server SHOULD return a 401 (Unauthorized) status code when + # receiving a request with invalid or expired token. + # Note: This is postponed in order to avoid timing attacks, instead + # a dummy token is assigned and used to maintain near constant + # time request verification. + # + # Note that early exit would enable resource owner enumeration + valid_resource_owner = self.request_validator.validate_access_token( + request.client_key, request.resource_owner_key, request) + if not valid_resource_owner: + request.resource_owner_key = self.request_validator.dummy_access_token + + # Note that `realm`_ is only used in authorization headers and how + # it should be interepreted is not included in the OAuth spec. + # However they could be seen as a scope or realm to which the + # client has access and as such every client should be checked + # to ensure it is authorized access to that scope or realm. + # .. _`realm`: https://tools.ietf.org/html/rfc2617#section-1.2 + # + # Note that early exit would enable client realm access enumeration. + # + # The require_realm indicates this is the first step in the OAuth + # workflow where a client requests access to a specific realm. + # This first step (obtaining request token) need not require a realm + # and can then be identified by checking the require_resource_owner + # flag and abscence of realm. + # + # Clients obtaining an access token will not supply a realm and it will + # not be checked. Instead the previously requested realm should be + # transferred from the request token to the access token. + # + # Access to protected resources will always validate the realm but note + # that the realm is now tied to the access token and not provided by + # the client. + valid_realm = self.request_validator.validate_realms(request.client_key, + request.resource_owner_key, request, uri=request.uri, + realms=realms) + + valid_signature = self._check_signature(request) + + # log the results to the validator_log + # this lets us handle internal reporting and analysis + request.validator_log['client'] = valid_client + request.validator_log['resource_owner'] = valid_resource_owner + request.validator_log['realm'] = valid_realm + request.validator_log['signature'] = valid_signature + + # We delay checking validity until the very end, using dummy values for + # calculations and fetching secrets/keys to ensure the flow of every + # request remains almost identical regardless of whether valid values + # have been supplied. This ensures near constant time execution and + # prevents malicious users from guessing sensitive information + v = all((valid_client, valid_resource_owner, valid_realm, + valid_signature)) + if not v: + log.info("[Failure] request verification failed.") + log.info("Valid client: %s", valid_client) + log.info("Valid token: %s", valid_resource_owner) + log.info("Valid realm: %s", valid_realm) + log.info("Valid signature: %s", valid_signature) + return v, request diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/signature_only.py b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/signature_only.py new file mode 100644 index 0000000..4297770 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/endpoints/signature_only.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth1.rfc5849.endpoints.signature_only +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of the signing logic of OAuth 1.0 RFC 5849. +""" + +from __future__ import absolute_import, unicode_literals + +import logging + +from .. import errors +from .base import BaseEndpoint + +log = logging.getLogger(__name__) + + +class SignatureOnlyEndpoint(BaseEndpoint): + + """An endpoint only responsible for verifying an oauth signature.""" + + def validate_request(self, uri, http_method='GET', + body=None, headers=None): + """Validate a signed OAuth request. + + :param uri: The full URI of the token request. + :param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc. + :param body: The request body as a string. + :param headers: The request headers as a dict. + :returns: A tuple of 2 elements. + 1. True if valid, False otherwise. + 2. An oauthlib.common.Request object. + """ + try: + request = self._create_request(uri, http_method, body, headers) + except errors.OAuth1Error as err: + log.info( + 'Exception caught while validating request, %s.' % err) + return False, None + + try: + self._check_transport_security(request) + self._check_mandatory_parameters(request) + except errors.OAuth1Error as err: + log.info( + 'Exception caught while validating request, %s.' % err) + return False, request + + if not self.request_validator.validate_timestamp_and_nonce( + request.client_key, request.timestamp, request.nonce, request): + log.debug('[Failure] verification failed: timestamp/nonce') + return False, request + + # The server SHOULD return a 401 (Unauthorized) status code when + # receiving a request with invalid client credentials. + # Note: This is postponed in order to avoid timing attacks, instead + # a dummy client is assigned and used to maintain near constant + # time request verification. + # + # Note that early exit would enable client enumeration + valid_client = self.request_validator.validate_client_key( + request.client_key, request) + if not valid_client: + request.client_key = self.request_validator.dummy_client + + valid_signature = self._check_signature(request) + + # log the results to the validator_log + # this lets us handle internal reporting and analysis + request.validator_log['client'] = valid_client + request.validator_log['signature'] = valid_signature + + # We delay checking validity until the very end, using dummy values for + # calculations and fetching secrets/keys to ensure the flow of every + # request remains almost identical regardless of whether valid values + # have been supplied. This ensures near constant time execution and + # prevents malicious users from guessing sensitive information + v = all((valid_client, valid_signature)) + if not v: + log.info("[Failure] request verification failed.") + log.info("Valid client: %s", valid_client) + log.info("Valid signature: %s", valid_signature) + return v, request diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/errors.py b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/errors.py new file mode 100644 index 0000000..a5c59bd --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/errors.py @@ -0,0 +1,79 @@ +# coding=utf-8 +""" +oauthlib.oauth1.rfc5849.errors +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Error used both by OAuth 1 clients and provicers to represent the spec +defined error responses for all four core grant types. +""" +from __future__ import unicode_literals + +from oauthlib.common import add_params_to_uri, urlencode + + +class OAuth1Error(Exception): + error = None + description = '' + + def __init__(self, description=None, uri=None, status_code=400, + request=None): + """ + description: A human-readable ASCII [USASCII] text providing + additional information, used to assist the client + developer in understanding the error that occurred. + Values for the "error_description" parameter MUST NOT + include characters outside the set + x20-21 / x23-5B / x5D-7E. + + uri: A URI identifying a human-readable web page with information + about the error, used to provide the client developer with + additional information about the error. Values for the + "error_uri" parameter MUST conform to the URI- Reference + syntax, and thus MUST NOT include characters outside the set + x21 / x23-5B / x5D-7E. + + state: A CSRF protection value received from the client. + + request: Oauthlib Request object + """ + self.description = description or self.description + message = '(%s) %s' % (self.error, self.description) + if request: + message += ' ' + repr(request) + super(OAuth1Error, self).__init__(message) + + self.uri = uri + self.status_code = status_code + + def in_uri(self, uri): + return add_params_to_uri(uri, self.twotuples) + + @property + def twotuples(self): + error = [('error', self.error)] + if self.description: + error.append(('error_description', self.description)) + if self.uri: + error.append(('error_uri', self.uri)) + return error + + @property + def urlencoded(self): + return urlencode(self.twotuples) + + +class InsecureTransportError(OAuth1Error): + error = 'insecure_transport_protocol' + description = 'Only HTTPS connections are permitted.' + + +class InvalidSignatureMethodError(OAuth1Error): + error = 'invalid_signature_method' + + +class InvalidRequestError(OAuth1Error): + error = 'invalid_request' + + +class InvalidClientError(OAuth1Error): + error = 'invalid_client' diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/parameters.py b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/parameters.py new file mode 100644 index 0000000..2f068a7 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/parameters.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.parameters +~~~~~~~~~~~~~~~~~~~ + +This module contains methods related to `section 3.5`_ of the OAuth 1.0a spec. + +.. _`section 3.5`: https://tools.ietf.org/html/rfc5849#section-3.5 +""" +from __future__ import absolute_import, unicode_literals + +from oauthlib.common import extract_params, urlencode + +from . import utils + +try: + from urlparse import urlparse, urlunparse +except ImportError: + from urllib.parse import urlparse, urlunparse + + +# TODO: do we need filter_params now that oauth_params are handled by Request? +# We can easily pass in just oauth protocol params. +@utils.filter_params +def prepare_headers(oauth_params, headers=None, realm=None): + """**Prepare the Authorization header.** + Per `section 3.5.1`_ of the spec. + + Protocol parameters can be transmitted using the HTTP "Authorization" + header field as defined by `RFC2617`_ with the auth-scheme name set to + "OAuth" (case insensitive). + + For example:: + + Authorization: OAuth realm="Example", + oauth_consumer_key="0685bd9184jfhq22", + oauth_token="ad180jjd733klru7", + oauth_signature_method="HMAC-SHA1", + oauth_signature="wOJIO9A2W5mFwDgiDvZbTSMK%2FPY%3D", + oauth_timestamp="137131200", + oauth_nonce="4572616e48616d6d65724c61686176", + oauth_version="1.0" + + + .. _`section 3.5.1`: https://tools.ietf.org/html/rfc5849#section-3.5.1 + .. _`RFC2617`: https://tools.ietf.org/html/rfc2617 + """ + headers = headers or {} + + # Protocol parameters SHALL be included in the "Authorization" header + # field as follows: + authorization_header_parameters_parts = [] + for oauth_parameter_name, value in oauth_params: + # 1. Parameter names and values are encoded per Parameter Encoding + # (`Section 3.6`_) + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + escaped_name = utils.escape(oauth_parameter_name) + escaped_value = utils.escape(value) + + # 2. Each parameter's name is immediately followed by an "=" character + # (ASCII code 61), a """ character (ASCII code 34), the parameter + # value (MAY be empty), and another """ character (ASCII code 34). + part = '{0}="{1}"'.format(escaped_name, escaped_value) + + authorization_header_parameters_parts.append(part) + + # 3. Parameters are separated by a "," character (ASCII code 44) and + # OPTIONAL linear whitespace per `RFC2617`_. + # + # .. _`RFC2617`: https://tools.ietf.org/html/rfc2617 + authorization_header_parameters = ', '.join( + authorization_header_parameters_parts) + + # 4. The OPTIONAL "realm" parameter MAY be added and interpreted per + # `RFC2617 section 1.2`_. + # + # .. _`RFC2617 section 1.2`: https://tools.ietf.org/html/rfc2617#section-1.2 + if realm: + # NOTE: realm should *not* be escaped + authorization_header_parameters = ('realm="%s", ' % realm + + authorization_header_parameters) + + # the auth-scheme name set to "OAuth" (case insensitive). + authorization_header = 'OAuth %s' % authorization_header_parameters + + # contribute the Authorization header to the given headers + full_headers = {} + full_headers.update(headers) + full_headers['Authorization'] = authorization_header + return full_headers + + +def _append_params(oauth_params, params): + """Append OAuth params to an existing set of parameters. + + Both params and oauth_params is must be lists of 2-tuples. + + Per `section 3.5.2`_ and `3.5.3`_ of the spec. + + .. _`section 3.5.2`: https://tools.ietf.org/html/rfc5849#section-3.5.2 + .. _`3.5.3`: https://tools.ietf.org/html/rfc5849#section-3.5.3 + + """ + merged = list(params) + merged.extend(oauth_params) + # The request URI / entity-body MAY include other request-specific + # parameters, in which case, the protocol parameters SHOULD be appended + # following the request-specific parameters, properly separated by an "&" + # character (ASCII code 38) + merged.sort(key=lambda i: i[0].startswith('oauth_')) + return merged + + +def prepare_form_encoded_body(oauth_params, body): + """Prepare the Form-Encoded Body. + + Per `section 3.5.2`_ of the spec. + + .. _`section 3.5.2`: https://tools.ietf.org/html/rfc5849#section-3.5.2 + + """ + # append OAuth params to the existing body + return _append_params(oauth_params, body) + + +def prepare_request_uri_query(oauth_params, uri): + """Prepare the Request URI Query. + + Per `section 3.5.3`_ of the spec. + + .. _`section 3.5.3`: https://tools.ietf.org/html/rfc5849#section-3.5.3 + + """ + # append OAuth params to the existing set of query components + sch, net, path, par, query, fra = urlparse(uri) + query = urlencode( + _append_params(oauth_params, extract_params(query) or [])) + return urlunparse((sch, net, path, par, query, fra)) diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/request_validator.py b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/request_validator.py new file mode 100644 index 0000000..bc62ea0 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/request_validator.py @@ -0,0 +1,833 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth1.rfc5849 +~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for signing and checking OAuth 1.0 RFC 5849 requests. +""" +from __future__ import absolute_import, unicode_literals + +import sys + +from . import SIGNATURE_METHODS, utils + + +class RequestValidator(object): + + """A validator/datastore interaction base class for OAuth 1 providers. + + OAuth providers should inherit from RequestValidator and implement the + methods and properties outlined below. Further details are provided in the + documentation for each method and property. + + Methods used to check the format of input parameters. Common tests include + length, character set, membership, range or pattern. These tests are + referred to as `whitelisting or blacklisting`_. Whitelisting is better + but blacklisting can be usefull to spot malicious activity. + The following have methods a default implementation: + + - check_client_key + - check_request_token + - check_access_token + - check_nonce + - check_verifier + - check_realms + + The methods above default to whitelist input parameters, checking that they + are alphanumerical and between a minimum and maximum length. Rather than + overloading the methods a few properties can be used to configure these + methods. + + * @safe_characters -> (character set) + * @client_key_length -> (min, max) + * @request_token_length -> (min, max) + * @access_token_length -> (min, max) + * @nonce_length -> (min, max) + * @verifier_length -> (min, max) + * @realms -> [list, of, realms] + + Methods used to validate/invalidate input parameters. These checks usually + hit either persistent or temporary storage such as databases or the + filesystem. See each methods documentation for detailed usage. + The following methods must be implemented: + + - validate_client_key + - validate_request_token + - validate_access_token + - validate_timestamp_and_nonce + - validate_redirect_uri + - validate_requested_realms + - validate_realms + - validate_verifier + - invalidate_request_token + + Methods used to retrieve sensitive information from storage. + The following methods must be implemented: + + - get_client_secret + - get_request_token_secret + - get_access_token_secret + - get_rsa_key + - get_realms + - get_default_realms + - get_redirect_uri + + Methods used to save credentials. + The following methods must be implemented: + + - save_request_token + - save_verifier + - save_access_token + + Methods used to verify input parameters. This methods are used during + authorizing request token by user (AuthorizationEndpoint), to check if + parameters are valid. During token authorization request is not signed, + thus 'validation' methods can not be used. The following methods must be + implemented: + + - verify_realms + - verify_request_token + + To prevent timing attacks it is necessary to not exit early even if the + client key or resource owner key is invalid. Instead dummy values should + be used during the remaining verification process. It is very important + that the dummy client and token are valid input parameters to the methods + get_client_secret, get_rsa_key and get_(access/request)_token_secret and + that the running time of those methods when given a dummy value remain + equivalent to the running time when given a valid client/resource owner. + The following properties must be implemented: + + * @dummy_client + * @dummy_request_token + * @dummy_access_token + + Example implementations have been provided, note that the database used is + a simple dictionary and serves only an illustrative purpose. Use whichever + database suits your project and how to access it is entirely up to you. + The methods are introduced in an order which should make understanding + their use more straightforward and as such it could be worth reading what + follows in chronological order. + + .. _`whitelisting or blacklisting`: https://www.schneier.com/blog/archives/2011/01/whitelisting_vs.html + """ + + def __init__(self): + pass + + @property + def allowed_signature_methods(self): + return SIGNATURE_METHODS + + @property + def safe_characters(self): + return set(utils.UNICODE_ASCII_CHARACTER_SET) + + @property + def client_key_length(self): + return 20, 30 + + @property + def request_token_length(self): + return 20, 30 + + @property + def access_token_length(self): + return 20, 30 + + @property + def timestamp_lifetime(self): + return 600 + + @property + def nonce_length(self): + return 20, 30 + + @property + def verifier_length(self): + return 20, 30 + + @property + def realms(self): + return [] + + @property + def enforce_ssl(self): + return True + + def check_client_key(self, client_key): + """Check that the client key only contains safe characters + and is no shorter than lower and no longer than upper. + """ + lower, upper = self.client_key_length + return (set(client_key) <= self.safe_characters and + lower <= len(client_key) <= upper) + + def check_request_token(self, request_token): + """Checks that the request token contains only safe characters + and is no shorter than lower and no longer than upper. + """ + lower, upper = self.request_token_length + return (set(request_token) <= self.safe_characters and + lower <= len(request_token) <= upper) + + def check_access_token(self, request_token): + """Checks that the token contains only safe characters + and is no shorter than lower and no longer than upper. + """ + lower, upper = self.access_token_length + return (set(request_token) <= self.safe_characters and + lower <= len(request_token) <= upper) + + def check_nonce(self, nonce): + """Checks that the nonce only contains only safe characters + and is no shorter than lower and no longer than upper. + """ + lower, upper = self.nonce_length + return (set(nonce) <= self.safe_characters and + lower <= len(nonce) <= upper) + + def check_verifier(self, verifier): + """Checks that the verifier contains only safe characters + and is no shorter than lower and no longer than upper. + """ + lower, upper = self.verifier_length + return (set(verifier) <= self.safe_characters and + lower <= len(verifier) <= upper) + + def check_realms(self, realms): + """Check that the realm is one of a set allowed realms.""" + return all((r in self.realms for r in realms)) + + def _subclass_must_implement(self, fn): + """ + Returns a NotImplementedError for a function that should be implemented. + :param fn: name of the function + """ + m = "Missing function implementation in {}: {}".format(type(self), fn) + return NotImplementedError(m) + + @property + def dummy_client(self): + """Dummy client used when an invalid client key is supplied. + + :returns: The dummy client key string. + + The dummy client should be associated with either a client secret, + a rsa key or both depending on which signature methods are supported. + Providers should make sure that + + get_client_secret(dummy_client) + get_rsa_key(dummy_client) + + return a valid secret or key for the dummy client. + + This method is used by + + * AccessTokenEndpoint + * RequestTokenEndpoint + * ResourceEndpoint + * SignatureOnlyEndpoint + """ + raise self._subclass_must_implement("dummy_client") + + @property + def dummy_request_token(self): + """Dummy request token used when an invalid token was supplied. + + :returns: The dummy request token string. + + The dummy request token should be associated with a request token + secret such that get_request_token_secret(.., dummy_request_token) + returns a valid secret. + + This method is used by + + * AccessTokenEndpoint + """ + raise self._subclass_must_implement("dummy_request_token") + + @property + def dummy_access_token(self): + """Dummy access token used when an invalid token was supplied. + + :returns: The dummy access token string. + + The dummy access token should be associated with an access token + secret such that get_access_token_secret(.., dummy_access_token) + returns a valid secret. + + This method is used by + + * ResourceEndpoint + """ + raise self._subclass_must_implement("dummy_access_token") + + def get_client_secret(self, client_key, request): + """Retrieves the client secret associated with the client key. + + :param client_key: The client/consumer key. + :param request: An oauthlib.common.Request object. + :returns: The client secret as a string. + + This method must allow the use of a dummy client_key value. + Fetching the secret using the dummy key must take the same amount of + time as fetching a secret for a valid client:: + + # Unlikely to be near constant time as it uses two database + # lookups for a valid client, and only one for an invalid. + from your_datastore import ClientSecret + if ClientSecret.has(client_key): + return ClientSecret.get(client_key) + else: + return 'dummy' + + # Aim to mimic number of latency inducing operations no matter + # whether the client is valid or not. + from your_datastore import ClientSecret + return ClientSecret.get(client_key, 'dummy') + + Note that the returned key must be in plaintext. + + This method is used by + + * AccessTokenEndpoint + * RequestTokenEndpoint + * ResourceEndpoint + * SignatureOnlyEndpoint + """ + raise self._subclass_must_implement('get_client_secret') + + def get_request_token_secret(self, client_key, token, request): + """Retrieves the shared secret associated with the request token. + + :param client_key: The client/consumer key. + :param token: The request token string. + :param request: An oauthlib.common.Request object. + :returns: The token secret as a string. + + This method must allow the use of a dummy values and the running time + must be roughly equivalent to that of the running time of valid values:: + + # Unlikely to be near constant time as it uses two database + # lookups for a valid client, and only one for an invalid. + from your_datastore import RequestTokenSecret + if RequestTokenSecret.has(client_key): + return RequestTokenSecret.get((client_key, request_token)) + else: + return 'dummy' + + # Aim to mimic number of latency inducing operations no matter + # whether the client is valid or not. + from your_datastore import RequestTokenSecret + return ClientSecret.get((client_key, request_token), 'dummy') + + Note that the returned key must be in plaintext. + + This method is used by + + * AccessTokenEndpoint + """ + raise self._subclass_must_implement('get_request_token_secret') + + def get_access_token_secret(self, client_key, token, request): + """Retrieves the shared secret associated with the access token. + + :param client_key: The client/consumer key. + :param token: The access token string. + :param request: An oauthlib.common.Request object. + :returns: The token secret as a string. + + This method must allow the use of a dummy values and the running time + must be roughly equivalent to that of the running time of valid values:: + + # Unlikely to be near constant time as it uses two database + # lookups for a valid client, and only one for an invalid. + from your_datastore import AccessTokenSecret + if AccessTokenSecret.has(client_key): + return AccessTokenSecret.get((client_key, request_token)) + else: + return 'dummy' + + # Aim to mimic number of latency inducing operations no matter + # whether the client is valid or not. + from your_datastore import AccessTokenSecret + return ClientSecret.get((client_key, request_token), 'dummy') + + Note that the returned key must be in plaintext. + + This method is used by + + * ResourceEndpoint + """ + raise self._subclass_must_implement("get_access_token_secret") + + def get_default_realms(self, client_key, request): + """Get the default realms for a client. + + :param client_key: The client/consumer key. + :param request: An oauthlib.common.Request object. + :returns: The list of default realms associated with the client. + + The list of default realms will be set during client registration and + is outside the scope of OAuthLib. + + This method is used by + + * RequestTokenEndpoint + """ + raise self._subclass_must_implement("get_default_realms") + + def get_realms(self, token, request): + """Get realms associated with a request token. + + :param token: The request token string. + :param request: An oauthlib.common.Request object. + :returns: The list of realms associated with the request token. + + This method is used by + + * AuthorizationEndpoint + * AccessTokenEndpoint + """ + raise self._subclass_must_implement("get_realms") + + def get_redirect_uri(self, token, request): + """Get the redirect URI associated with a request token. + + :param token: The request token string. + :param request: An oauthlib.common.Request object. + :returns: The redirect URI associated with the request token. + + It may be desirable to return a custom URI if the redirect is set to "oob". + In this case, the user will be redirected to the returned URI and at that + endpoint the verifier can be displayed. + + This method is used by + + * AuthorizationEndpoint + """ + raise self._subclass_must_implement("get_redirect_uri") + + def get_rsa_key(self, client_key, request): + """Retrieves a previously stored client provided RSA key. + + :param client_key: The client/consumer key. + :param request: An oauthlib.common.Request object. + :returns: The rsa public key as a string. + + This method must allow the use of a dummy client_key value. Fetching + the rsa key using the dummy key must take the same amount of time + as fetching a key for a valid client. The dummy key must also be of + the same bit length as client keys. + + Note that the key must be returned in plaintext. + + This method is used by + + * AccessTokenEndpoint + * RequestTokenEndpoint + * ResourceEndpoint + * SignatureOnlyEndpoint + """ + raise self._subclass_must_implement("get_rsa_key") + + def invalidate_request_token(self, client_key, request_token, request): + """Invalidates a used request token. + + :param client_key: The client/consumer key. + :param request_token: The request token string. + :param request: An oauthlib.common.Request object. + :returns: None + + Per `Section 2.3`__ of the spec: + + "The server MUST (...) ensure that the temporary + credentials have not expired or been used before." + + .. _`Section 2.3`: https://tools.ietf.org/html/rfc5849#section-2.3 + + This method should ensure that provided token won't validate anymore. + It can be simply removing RequestToken from storage or setting + specific flag that makes it invalid (note that such flag should be + also validated during request token validation). + + This method is used by + + * AccessTokenEndpoint + """ + raise self._subclass_must_implement("invalidate_request_token") + + def validate_client_key(self, client_key, request): + """Validates that supplied client key is a registered and valid client. + + :param client_key: The client/consumer key. + :param request: An oauthlib.common.Request object. + :returns: True or False + + Note that if the dummy client is supplied it should validate in same + or nearly the same amount of time as a valid one. + + Ensure latency inducing tasks are mimiced even for dummy clients. + For example, use:: + + from your_datastore import Client + try: + return Client.exists(client_key, access_token) + except DoesNotExist: + return False + + Rather than:: + + from your_datastore import Client + if access_token == self.dummy_access_token: + return False + else: + return Client.exists(client_key, access_token) + + This method is used by + + * AccessTokenEndpoint + * RequestTokenEndpoint + * ResourceEndpoint + * SignatureOnlyEndpoint + """ + raise self._subclass_must_implement("validate_client_key") + + def validate_request_token(self, client_key, token, request): + """Validates that supplied request token is registered and valid. + + :param client_key: The client/consumer key. + :param token: The request token string. + :param request: An oauthlib.common.Request object. + :returns: True or False + + Note that if the dummy request_token is supplied it should validate in + the same nearly the same amount of time as a valid one. + + Ensure latency inducing tasks are mimiced even for dummy clients. + For example, use:: + + from your_datastore import RequestToken + try: + return RequestToken.exists(client_key, access_token) + except DoesNotExist: + return False + + Rather than:: + + from your_datastore import RequestToken + if access_token == self.dummy_access_token: + return False + else: + return RequestToken.exists(client_key, access_token) + + This method is used by + + * AccessTokenEndpoint + """ + raise self._subclass_must_implement("validate_request_token") + + def validate_access_token(self, client_key, token, request): + """Validates that supplied access token is registered and valid. + + :param client_key: The client/consumer key. + :param token: The access token string. + :param request: An oauthlib.common.Request object. + :returns: True or False + + Note that if the dummy access token is supplied it should validate in + the same or nearly the same amount of time as a valid one. + + Ensure latency inducing tasks are mimiced even for dummy clients. + For example, use:: + + from your_datastore import AccessToken + try: + return AccessToken.exists(client_key, access_token) + except DoesNotExist: + return False + + Rather than:: + + from your_datastore import AccessToken + if access_token == self.dummy_access_token: + return False + else: + return AccessToken.exists(client_key, access_token) + + This method is used by + + * ResourceEndpoint + """ + raise self._subclass_must_implement("validate_access_token") + + def validate_timestamp_and_nonce(self, client_key, timestamp, nonce, + request, request_token=None, access_token=None): + """Validates that the nonce has not been used before. + + :param client_key: The client/consumer key. + :param timestamp: The ``oauth_timestamp`` parameter. + :param nonce: The ``oauth_nonce`` parameter. + :param request_token: Request token string, if any. + :param access_token: Access token string, if any. + :param request: An oauthlib.common.Request object. + :returns: True or False + + Per `Section 3.3`_ of the spec. + + "A nonce is a random string, uniquely generated by the client to allow + the server to verify that a request has never been made before and + helps prevent replay attacks when requests are made over a non-secure + channel. The nonce value MUST be unique across all requests with the + same timestamp, client credentials, and token combinations." + + .. _`Section 3.3`: https://tools.ietf.org/html/rfc5849#section-3.3 + + One of the first validation checks that will be made is for the validity + of the nonce and timestamp, which are associated with a client key and + possibly a token. If invalid then immediately fail the request + by returning False. If the nonce/timestamp pair has been used before and + you may just have detected a replay attack. Therefore it is an essential + part of OAuth security that you not allow nonce/timestamp reuse. + Note that this validation check is done before checking the validity of + the client and token.:: + + nonces_and_timestamps_database = [ + (u'foo', 1234567890, u'rannoMstrInghere', u'bar') + ] + + def validate_timestamp_and_nonce(self, client_key, timestamp, nonce, + request_token=None, access_token=None): + + return ((client_key, timestamp, nonce, request_token or access_token) + not in self.nonces_and_timestamps_database) + + This method is used by + + * AccessTokenEndpoint + * RequestTokenEndpoint + * ResourceEndpoint + * SignatureOnlyEndpoint + """ + raise self._subclass_must_implement("validate_timestamp_and_nonce") + + def validate_redirect_uri(self, client_key, redirect_uri, request): + """Validates the client supplied redirection URI. + + :param client_key: The client/consumer key. + :param redirect_uri: The URI the client which to redirect back to after + authorization is successful. + :param request: An oauthlib.common.Request object. + :returns: True or False + + It is highly recommended that OAuth providers require their clients + to register all redirection URIs prior to using them in requests and + register them as absolute URIs. See `CWE-601`_ for more information + about open redirection attacks. + + By requiring registration of all redirection URIs it should be + straightforward for the provider to verify whether the supplied + redirect_uri is valid or not. + + Alternatively per `Section 2.1`_ of the spec: + + "If the client is unable to receive callbacks or a callback URI has + been established via other means, the parameter value MUST be set to + "oob" (case sensitive), to indicate an out-of-band configuration." + + .. _`CWE-601`: http://cwe.mitre.org/top25/index.html#CWE-601 + .. _`Section 2.1`: https://tools.ietf.org/html/rfc5849#section-2.1 + + This method is used by + + * RequestTokenEndpoint + """ + raise self._subclass_must_implement("validate_redirect_uri") + + def validate_requested_realms(self, client_key, realms, request): + """Validates that the client may request access to the realm. + + :param client_key: The client/consumer key. + :param realms: The list of realms that client is requesting access to. + :param request: An oauthlib.common.Request object. + :returns: True or False + + This method is invoked when obtaining a request token and should + tie a realm to the request token and after user authorization + this realm restriction should transfer to the access token. + + This method is used by + + * RequestTokenEndpoint + """ + raise self._subclass_must_implement("validate_requested_realms") + + def validate_realms(self, client_key, token, request, uri=None, + realms=None): + """Validates access to the request realm. + + :param client_key: The client/consumer key. + :param token: A request token string. + :param request: An oauthlib.common.Request object. + :param uri: The URI the realms is protecting. + :param realms: A list of realms that must have been granted to + the access token. + :returns: True or False + + How providers choose to use the realm parameter is outside the OAuth + specification but it is commonly used to restrict access to a subset + of protected resources such as "photos". + + realms is a convenience parameter which can be used to provide + a per view method pre-defined list of allowed realms. + + Can be as simple as:: + + from your_datastore import RequestToken + request_token = RequestToken.get(token, None) + + if not request_token: + return False + return set(request_token.realms).issuperset(set(realms)) + + This method is used by + + * ResourceEndpoint + """ + raise self._subclass_must_implement("validate_realms") + + def validate_verifier(self, client_key, token, verifier, request): + """Validates a verification code. + + :param client_key: The client/consumer key. + :param token: A request token string. + :param verifier: The authorization verifier string. + :param request: An oauthlib.common.Request object. + :returns: True or False + + OAuth providers issue a verification code to clients after the + resource owner authorizes access. This code is used by the client to + obtain token credentials and the provider must verify that the + verifier is valid and associated with the client as well as the + resource owner. + + Verifier validation should be done in near constant time + (to avoid verifier enumeration). To achieve this we need a + constant time string comparison which is provided by OAuthLib + in ``oauthlib.common.safe_string_equals``:: + + from your_datastore import Verifier + correct_verifier = Verifier.get(client_key, request_token) + from oauthlib.common import safe_string_equals + return safe_string_equals(verifier, correct_verifier) + + This method is used by + + * AccessTokenEndpoint + """ + raise self._subclass_must_implement("validate_verifier") + + def verify_request_token(self, token, request): + """Verify that the given OAuth1 request token is valid. + + :param token: A request token string. + :param request: An oauthlib.common.Request object. + :returns: True or False + + This method is used only in AuthorizationEndpoint to check whether the + oauth_token given in the authorization URL is valid or not. + This request is not signed and thus similar ``validate_request_token`` + method can not be used. + + This method is used by + + * AuthorizationEndpoint + """ + raise self._subclass_must_implement("verify_request_token") + + def verify_realms(self, token, realms, request): + """Verify authorized realms to see if they match those given to token. + + :param token: An access token string. + :param realms: A list of realms the client attempts to access. + :param request: An oauthlib.common.Request object. + :returns: True or False + + This prevents the list of authorized realms sent by the client during + the authorization step to be altered to include realms outside what + was bound with the request token. + + Can be as simple as:: + + valid_realms = self.get_realms(token) + return all((r in valid_realms for r in realms)) + + This method is used by + + * AuthorizationEndpoint + """ + raise self._subclass_must_implement("verify_realms") + + def save_access_token(self, token, request): + """Save an OAuth1 access token. + + :param token: A dict with token credentials. + :param request: An oauthlib.common.Request object. + + The token dictionary will at minimum include + + * ``oauth_token`` the access token string. + * ``oauth_token_secret`` the token specific secret used in signing. + * ``oauth_authorized_realms`` a space separated list of realms. + + Client key can be obtained from ``request.client_key``. + + The list of realms (not joined string) can be obtained from + ``request.realm``. + + This method is used by + + * AccessTokenEndpoint + """ + raise self._subclass_must_implement("save_access_token") + + def save_request_token(self, token, request): + """Save an OAuth1 request token. + + :param token: A dict with token credentials. + :param request: An oauthlib.common.Request object. + + The token dictionary will at minimum include + + * ``oauth_token`` the request token string. + * ``oauth_token_secret`` the token specific secret used in signing. + * ``oauth_callback_confirmed`` the string ``true``. + + Client key can be obtained from ``request.client_key``. + + This method is used by + + * RequestTokenEndpoint + """ + raise self._subclass_must_implement("save_request_token") + + def save_verifier(self, token, verifier, request): + """Associate an authorization verifier with a request token. + + :param token: A request token string. + :param verifier A dictionary containing the oauth_verifier and + oauth_token + :param request: An oauthlib.common.Request object. + + We need to associate verifiers with tokens for validation during the + access token request. + + Note that unlike save_x_token token here is the ``oauth_token`` token + string from the request token saved previously. + + This method is used by + + * AuthorizationEndpoint + """ + raise self._subclass_must_implement("save_verifier") diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/signature.py b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/signature.py new file mode 100644 index 0000000..881d8d4 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/signature.py @@ -0,0 +1,627 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth1.rfc5849.signature +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module represents a direct implementation of `section 3.4`_ of the spec. + +Terminology: + * Client: software interfacing with an OAuth API + * Server: the API provider + * Resource Owner: the user who is granting authorization to the client + +Steps for signing a request: + +1. Collect parameters from the uri query, auth header, & body +2. Normalize those parameters +3. Normalize the uri +4. Pass the normalized uri, normalized parameters, and http method to + construct the base string +5. Pass the base string and any keys needed to a signing function + +.. _`section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4 +""" +from __future__ import absolute_import, unicode_literals + +import binascii +import hashlib +import hmac +import logging + +from oauthlib.common import (bytes_type, extract_params, safe_string_equals, + unicode_type, urldecode) + +from . import utils + +try: + import urlparse +except ImportError: + import urllib.parse as urlparse + +log = logging.getLogger(__name__) + +def construct_base_string(http_method, base_string_uri, + normalized_encoded_request_parameters): + """**String Construction** + Per `section 3.4.1.1`_ of the spec. + + For example, the HTTP request:: + + POST /request?b5=%3D%253D&a3=a&c%40=&a2=r%20b HTTP/1.1 + Host: example.com + Content-Type: application/x-www-form-urlencoded + Authorization: OAuth realm="Example", + oauth_consumer_key="9djdj82h48djs9d2", + oauth_token="kkk9d7dh3k39sjv7", + oauth_signature_method="HMAC-SHA1", + oauth_timestamp="137131201", + oauth_nonce="7d8f3e4a", + oauth_signature="bYT5CMsGcbgUdFHObYMEfcx6bsw%3D" + + c2&a3=2+q + + is represented by the following signature base string (line breaks + are for display purposes only):: + + POST&http%3A%2F%2Fexample.com%2Frequest&a2%3Dr%2520b%26a3%3D2%2520q + %26a3%3Da%26b5%3D%253D%25253D%26c%2540%3D%26c2%3D%26oauth_consumer_ + key%3D9djdj82h48djs9d2%26oauth_nonce%3D7d8f3e4a%26oauth_signature_m + ethod%3DHMAC-SHA1%26oauth_timestamp%3D137131201%26oauth_token%3Dkkk + 9d7dh3k39sjv7 + + .. _`section 3.4.1.1`: https://tools.ietf.org/html/rfc5849#section-3.4.1.1 + """ + + # The signature base string is constructed by concatenating together, + # in order, the following HTTP request elements: + + # 1. The HTTP request method in uppercase. For example: "HEAD", + # "GET", "POST", etc. If the request uses a custom HTTP method, it + # MUST be encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + base_string = utils.escape(http_method.upper()) + + # 2. An "&" character (ASCII code 38). + base_string += '&' + + # 3. The base string URI from `Section 3.4.1.2`_, after being encoded + # (`Section 3.6`_). + # + # .. _`Section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2 + # .. _`Section 3.4.6`: https://tools.ietf.org/html/rfc5849#section-3.4.6 + base_string += utils.escape(base_string_uri) + + # 4. An "&" character (ASCII code 38). + base_string += '&' + + # 5. The request parameters as normalized in `Section 3.4.1.3.2`_, after + # being encoded (`Section 3.6`). + # + # .. _`Section 3.4.1.3.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3.2 + # .. _`Section 3.4.6`: https://tools.ietf.org/html/rfc5849#section-3.4.6 + base_string += utils.escape(normalized_encoded_request_parameters) + + return base_string + + +def normalize_base_string_uri(uri, host=None): + """**Base String URI** + Per `section 3.4.1.2`_ of the spec. + + For example, the HTTP request:: + + GET /r%20v/X?id=123 HTTP/1.1 + Host: EXAMPLE.COM:80 + + is represented by the base string URI: "http://example.com/r%20v/X". + + In another example, the HTTPS request:: + + GET /?q=1 HTTP/1.1 + Host: www.example.net:8080 + + is represented by the base string URI: "https://www.example.net:8080/". + + .. _`section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2 + + The host argument overrides the netloc part of the uri argument. + """ + if not isinstance(uri, unicode_type): + raise ValueError('uri must be a unicode object.') + + # FIXME: urlparse does not support unicode + scheme, netloc, path, params, query, fragment = urlparse.urlparse(uri) + + # The scheme, authority, and path of the request resource URI `RFC3986` + # are included by constructing an "http" or "https" URI representing + # the request resource (without the query or fragment) as follows: + # + # .. _`RFC3986`: https://tools.ietf.org/html/rfc3986 + + if not scheme or not netloc: + raise ValueError('uri must include a scheme and netloc') + + # Per `RFC 2616 section 5.1.2`_: + # + # Note that the absolute path cannot be empty; if none is present in + # the original URI, it MUST be given as "/" (the server root). + # + # .. _`RFC 2616 section 5.1.2`: https://tools.ietf.org/html/rfc2616#section-5.1.2 + if not path: + path = '/' + + # 1. The scheme and host MUST be in lowercase. + scheme = scheme.lower() + netloc = netloc.lower() + + # 2. The host and port values MUST match the content of the HTTP + # request "Host" header field. + if host is not None: + netloc = host.lower() + + # 3. The port MUST be included if it is not the default port for the + # scheme, and MUST be excluded if it is the default. Specifically, + # the port MUST be excluded when making an HTTP request `RFC2616`_ + # to port 80 or when making an HTTPS request `RFC2818`_ to port 443. + # All other non-default port numbers MUST be included. + # + # .. _`RFC2616`: https://tools.ietf.org/html/rfc2616 + # .. _`RFC2818`: https://tools.ietf.org/html/rfc2818 + default_ports = ( + ('http', '80'), + ('https', '443'), + ) + if ':' in netloc: + host, port = netloc.split(':', 1) + if (scheme, port) in default_ports: + netloc = host + + return urlparse.urlunparse((scheme, netloc, path, params, '', '')) + + +# ** Request Parameters ** +# +# Per `section 3.4.1.3`_ of the spec. +# +# In order to guarantee a consistent and reproducible representation of +# the request parameters, the parameters are collected and decoded to +# their original decoded form. They are then sorted and encoded in a +# particular manner that is often different from their original +# encoding scheme, and concatenated into a single string. +# +# .. _`section 3.4.1.3`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3 + +def collect_parameters(uri_query='', body=[], headers=None, + exclude_oauth_signature=True, with_realm=False): + """**Parameter Sources** + + Parameters starting with `oauth_` will be unescaped. + + Body parameters must be supplied as a dict, a list of 2-tuples, or a + formencoded query string. + + Headers must be supplied as a dict. + + Per `section 3.4.1.3.1`_ of the spec. + + For example, the HTTP request:: + + POST /request?b5=%3D%253D&a3=a&c%40=&a2=r%20b HTTP/1.1 + Host: example.com + Content-Type: application/x-www-form-urlencoded + Authorization: OAuth realm="Example", + oauth_consumer_key="9djdj82h48djs9d2", + oauth_token="kkk9d7dh3k39sjv7", + oauth_signature_method="HMAC-SHA1", + oauth_timestamp="137131201", + oauth_nonce="7d8f3e4a", + oauth_signature="djosJKDKJSD8743243%2Fjdk33klY%3D" + + c2&a3=2+q + + contains the following (fully decoded) parameters used in the + signature base sting:: + + +------------------------+------------------+ + | Name | Value | + +------------------------+------------------+ + | b5 | =%3D | + | a3 | a | + | c@ | | + | a2 | r b | + | oauth_consumer_key | 9djdj82h48djs9d2 | + | oauth_token | kkk9d7dh3k39sjv7 | + | oauth_signature_method | HMAC-SHA1 | + | oauth_timestamp | 137131201 | + | oauth_nonce | 7d8f3e4a | + | c2 | | + | a3 | 2 q | + +------------------------+------------------+ + + Note that the value of "b5" is "=%3D" and not "==". Both "c@" and + "c2" have empty values. While the encoding rules specified in this + specification for the purpose of constructing the signature base + string exclude the use of a "+" character (ASCII code 43) to + represent an encoded space character (ASCII code 32), this practice + is widely used in "application/x-www-form-urlencoded" encoded values, + and MUST be properly decoded, as demonstrated by one of the "a3" + parameter instances (the "a3" parameter is used twice in this + request). + + .. _`section 3.4.1.3.1`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3.1 + """ + headers = headers or {} + params = [] + + # The parameters from the following sources are collected into a single + # list of name/value pairs: + + # * The query component of the HTTP request URI as defined by + # `RFC3986, Section 3.4`_. The query component is parsed into a list + # of name/value pairs by treating it as an + # "application/x-www-form-urlencoded" string, separating the names + # and values and decoding them as defined by + # `W3C.REC-html40-19980424`_, Section 17.13.4. + # + # .. _`RFC3986, Section 3.4`: https://tools.ietf.org/html/rfc3986#section-3.4 + # .. _`W3C.REC-html40-19980424`: https://tools.ietf.org/html/rfc5849#ref-W3C.REC-html40-19980424 + if uri_query: + params.extend(urldecode(uri_query)) + + # * The OAuth HTTP "Authorization" header field (`Section 3.5.1`_) if + # present. The header's content is parsed into a list of name/value + # pairs excluding the "realm" parameter if present. The parameter + # values are decoded as defined by `Section 3.5.1`_. + # + # .. _`Section 3.5.1`: https://tools.ietf.org/html/rfc5849#section-3.5.1 + if headers: + headers_lower = dict((k.lower(), v) for k, v in headers.items()) + authorization_header = headers_lower.get('authorization') + if authorization_header is not None: + params.extend([i for i in utils.parse_authorization_header( + authorization_header) if with_realm or i[0] != 'realm']) + + # * The HTTP request entity-body, but only if all of the following + # conditions are met: + # * The entity-body is single-part. + # + # * The entity-body follows the encoding requirements of the + # "application/x-www-form-urlencoded" content-type as defined by + # `W3C.REC-html40-19980424`_. + + # * The HTTP request entity-header includes the "Content-Type" + # header field set to "application/x-www-form-urlencoded". + # + # .._`W3C.REC-html40-19980424`: https://tools.ietf.org/html/rfc5849#ref-W3C.REC-html40-19980424 + + # TODO: enforce header param inclusion conditions + bodyparams = extract_params(body) or [] + params.extend(bodyparams) + + # ensure all oauth params are unescaped + unescaped_params = [] + for k, v in params: + if k.startswith('oauth_'): + v = utils.unescape(v) + unescaped_params.append((k, v)) + + # The "oauth_signature" parameter MUST be excluded from the signature + # base string if present. + if exclude_oauth_signature: + unescaped_params = list(filter(lambda i: i[0] != 'oauth_signature', + unescaped_params)) + + return unescaped_params + + +def normalize_parameters(params): + """**Parameters Normalization** + Per `section 3.4.1.3.2`_ of the spec. + + For example, the list of parameters from the previous section would + be normalized as follows: + + Encoded:: + + +------------------------+------------------+ + | Name | Value | + +------------------------+------------------+ + | b5 | %3D%253D | + | a3 | a | + | c%40 | | + | a2 | r%20b | + | oauth_consumer_key | 9djdj82h48djs9d2 | + | oauth_token | kkk9d7dh3k39sjv7 | + | oauth_signature_method | HMAC-SHA1 | + | oauth_timestamp | 137131201 | + | oauth_nonce | 7d8f3e4a | + | c2 | | + | a3 | 2%20q | + +------------------------+------------------+ + + Sorted:: + + +------------------------+------------------+ + | Name | Value | + +------------------------+------------------+ + | a2 | r%20b | + | a3 | 2%20q | + | a3 | a | + | b5 | %3D%253D | + | c%40 | | + | c2 | | + | oauth_consumer_key | 9djdj82h48djs9d2 | + | oauth_nonce | 7d8f3e4a | + | oauth_signature_method | HMAC-SHA1 | + | oauth_timestamp | 137131201 | + | oauth_token | kkk9d7dh3k39sjv7 | + +------------------------+------------------+ + + Concatenated Pairs:: + + +-------------------------------------+ + | Name=Value | + +-------------------------------------+ + | a2=r%20b | + | a3=2%20q | + | a3=a | + | b5=%3D%253D | + | c%40= | + | c2= | + | oauth_consumer_key=9djdj82h48djs9d2 | + | oauth_nonce=7d8f3e4a | + | oauth_signature_method=HMAC-SHA1 | + | oauth_timestamp=137131201 | + | oauth_token=kkk9d7dh3k39sjv7 | + +-------------------------------------+ + + and concatenated together into a single string (line breaks are for + display purposes only):: + + a2=r%20b&a3=2%20q&a3=a&b5=%3D%253D&c%40=&c2=&oauth_consumer_key=9dj + dj82h48djs9d2&oauth_nonce=7d8f3e4a&oauth_signature_method=HMAC-SHA1 + &oauth_timestamp=137131201&oauth_token=kkk9d7dh3k39sjv7 + + .. _`section 3.4.1.3.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3.2 + """ + + # The parameters collected in `Section 3.4.1.3`_ are normalized into a + # single string as follows: + # + # .. _`Section 3.4.1.3`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3 + + # 1. First, the name and value of each parameter are encoded + # (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + key_values = [(utils.escape(k), utils.escape(v)) for k, v in params] + + # 2. The parameters are sorted by name, using ascending byte value + # ordering. If two or more parameters share the same name, they + # are sorted by their value. + key_values.sort() + + # 3. The name of each parameter is concatenated to its corresponding + # value using an "=" character (ASCII code 61) as a separator, even + # if the value is empty. + parameter_parts = ['{0}={1}'.format(k, v) for k, v in key_values] + + # 4. The sorted name/value pairs are concatenated together into a + # single string by using an "&" character (ASCII code 38) as + # separator. + return '&'.join(parameter_parts) + + +def sign_hmac_sha1_with_client(base_string, client): + return sign_hmac_sha1(base_string, + client.client_secret, + client.resource_owner_secret + ) + + +def sign_hmac_sha1(base_string, client_secret, resource_owner_secret): + """**HMAC-SHA1** + + The "HMAC-SHA1" signature method uses the HMAC-SHA1 signature + algorithm as defined in `RFC2104`_:: + + digest = HMAC-SHA1 (key, text) + + Per `section 3.4.2`_ of the spec. + + .. _`RFC2104`: https://tools.ietf.org/html/rfc2104 + .. _`section 3.4.2`: https://tools.ietf.org/html/rfc5849#section-3.4.2 + """ + + # The HMAC-SHA1 function variables are used in following way: + + # text is set to the value of the signature base string from + # `Section 3.4.1.1`_. + # + # .. _`Section 3.4.1.1`: https://tools.ietf.org/html/rfc5849#section-3.4.1.1 + text = base_string + + # key is set to the concatenated values of: + # 1. The client shared-secret, after being encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + key = utils.escape(client_secret or '') + + # 2. An "&" character (ASCII code 38), which MUST be included + # even when either secret is empty. + key += '&' + + # 3. The token shared-secret, after being encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + key += utils.escape(resource_owner_secret or '') + + # FIXME: HMAC does not support unicode! + key_utf8 = key.encode('utf-8') + text_utf8 = text.encode('utf-8') + signature = hmac.new(key_utf8, text_utf8, hashlib.sha1) + + # digest is used to set the value of the "oauth_signature" protocol + # parameter, after the result octet string is base64-encoded + # per `RFC2045, Section 6.8`. + # + # .. _`RFC2045, Section 6.8`: https://tools.ietf.org/html/rfc2045#section-6.8 + return binascii.b2a_base64(signature.digest())[:-1].decode('utf-8') + +_jwtrs1 = None + +#jwt has some nice pycrypto/cryptography abstractions +def _jwt_rs1_signing_algorithm(): + global _jwtrs1 + if _jwtrs1 is None: + import jwt.algorithms as jwtalgo + _jwtrs1 = jwtalgo.RSAAlgorithm(jwtalgo.hashes.SHA1) + return _jwtrs1 + +def sign_rsa_sha1(base_string, rsa_private_key): + """**RSA-SHA1** + + Per `section 3.4.3`_ of the spec. + + The "RSA-SHA1" signature method uses the RSASSA-PKCS1-v1_5 signature + algorithm as defined in `RFC3447, Section 8.2`_ (also known as + PKCS#1), using SHA-1 as the hash function for EMSA-PKCS1-v1_5. To + use this method, the client MUST have established client credentials + with the server that included its RSA public key (in a manner that is + beyond the scope of this specification). + + .. _`section 3.4.3`: https://tools.ietf.org/html/rfc5849#section-3.4.3 + .. _`RFC3447, Section 8.2`: https://tools.ietf.org/html/rfc3447#section-8.2 + + """ + if isinstance(base_string, unicode_type): + base_string = base_string.encode('utf-8') + # TODO: finish RSA documentation + alg = _jwt_rs1_signing_algorithm() + key = _prepare_key_plus(alg, rsa_private_key) + s=alg.sign(base_string, key) + return binascii.b2a_base64(s)[:-1].decode('utf-8') + + +def sign_rsa_sha1_with_client(base_string, client): + if not client.rsa_key: + raise ValueError('rsa_key is required when using RSA signature method.') + return sign_rsa_sha1(base_string, client.rsa_key) + + +def sign_plaintext(client_secret, resource_owner_secret): + """Sign a request using plaintext. + + Per `section 3.4.4`_ of the spec. + + The "PLAINTEXT" method does not employ a signature algorithm. It + MUST be used with a transport-layer mechanism such as TLS or SSL (or + sent over a secure channel with equivalent protections). It does not + utilize the signature base string or the "oauth_timestamp" and + "oauth_nonce" parameters. + + .. _`section 3.4.4`: https://tools.ietf.org/html/rfc5849#section-3.4.4 + + """ + + # The "oauth_signature" protocol parameter is set to the concatenated + # value of: + + # 1. The client shared-secret, after being encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + signature = utils.escape(client_secret or '') + + # 2. An "&" character (ASCII code 38), which MUST be included even + # when either secret is empty. + signature += '&' + + # 3. The token shared-secret, after being encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + signature += utils.escape(resource_owner_secret or '') + + return signature + + +def sign_plaintext_with_client(base_string, client): + return sign_plaintext(client.client_secret, client.resource_owner_secret) + + +def verify_hmac_sha1(request, client_secret=None, + resource_owner_secret=None): + """Verify a HMAC-SHA1 signature. + + Per `section 3.4`_ of the spec. + + .. _`section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4 + + To satisfy `RFC2616 section 5.2`_ item 1, the request argument's uri + attribute MUST be an absolute URI whose netloc part identifies the + origin server or gateway on which the resource resides. Any Host + item of the request argument's headers dict attribute will be + ignored. + + .. _`RFC2616 section 5.2`: https://tools.ietf.org/html/rfc2616#section-5.2 + + """ + norm_params = normalize_parameters(request.params) + uri = normalize_base_string_uri(request.uri) + base_string = construct_base_string(request.http_method, uri, norm_params) + signature = sign_hmac_sha1(base_string, client_secret, + resource_owner_secret) + match = safe_string_equals(signature, request.signature) + if not match: + log.debug('Verify HMAC-SHA1 failed: sig base string: %s', base_string) + return match + + +def _prepare_key_plus(alg, keystr): + if isinstance(keystr, bytes_type): + keystr = keystr.decode('utf-8') + return alg.prepare_key(keystr) + +def verify_rsa_sha1(request, rsa_public_key): + """Verify a RSASSA-PKCS #1 v1.5 base64 encoded signature. + + Per `section 3.4.3`_ of the spec. + + Note this method requires the jwt and cryptography libraries. + + .. _`section 3.4.3`: https://tools.ietf.org/html/rfc5849#section-3.4.3 + + To satisfy `RFC2616 section 5.2`_ item 1, the request argument's uri + attribute MUST be an absolute URI whose netloc part identifies the + origin server or gateway on which the resource resides. Any Host + item of the request argument's headers dict attribute will be + ignored. + + .. _`RFC2616 section 5.2`: https://tools.ietf.org/html/rfc2616#section-5.2 + """ + norm_params = normalize_parameters(request.params) + uri = normalize_base_string_uri(request.uri) + message = construct_base_string(request.http_method, uri, norm_params).encode('utf-8') + sig = binascii.a2b_base64(request.signature.encode('utf-8')) + + alg = _jwt_rs1_signing_algorithm() + key = _prepare_key_plus(alg, rsa_public_key) + + verify_ok = alg.verify(message, key, sig) + if not verify_ok: + log.debug('Verify RSA-SHA1 failed: sig base string: %s', message) + return verify_ok + + +def verify_plaintext(request, client_secret=None, resource_owner_secret=None): + """Verify a PLAINTEXT signature. + + Per `section 3.4`_ of the spec. + + .. _`section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4 + """ + signature = sign_plaintext(client_secret, resource_owner_secret) + match = safe_string_equals(signature, request.signature) + if not match: + log.debug('Verify PLAINTEXT failed') + return match diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/utils.py b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/utils.py new file mode 100644 index 0000000..3762e3b --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth1/rfc5849/utils.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.utils +~~~~~~~~~~~~~~ + +This module contains utility methods used by various parts of the OAuth +spec. +""" +from __future__ import absolute_import, unicode_literals + +from oauthlib.common import bytes_type, quote, unicode_type, unquote + +try: + import urllib2 +except ImportError: + import urllib.request as urllib2 + + +UNICODE_ASCII_CHARACTER_SET = ('abcdefghijklmnopqrstuvwxyz' + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + '0123456789') + + +def filter_params(target): + """Decorator which filters params to remove non-oauth_* parameters + + Assumes the decorated method takes a params dict or list of tuples as its + first argument. + """ + def wrapper(params, *args, **kwargs): + params = filter_oauth_params(params) + return target(params, *args, **kwargs) + + wrapper.__doc__ = target.__doc__ + return wrapper + + +def filter_oauth_params(params): + """Removes all non oauth parameters from a dict or a list of params.""" + is_oauth = lambda kv: kv[0].startswith("oauth_") + if isinstance(params, dict): + return list(filter(is_oauth, list(params.items()))) + else: + return list(filter(is_oauth, params)) + + +def escape(u): + """Escape a unicode string in an OAuth-compatible fashion. + + Per `section 3.6`_ of the spec. + + .. _`section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + + """ + if not isinstance(u, unicode_type): + raise ValueError('Only unicode objects are escapable. ' + + 'Got %r of type %s.' % (u, type(u))) + # Letters, digits, and the characters '_.-' are already treated as safe + # by urllib.quote(). We need to add '~' to fully support rfc5849. + return quote(u, safe=b'~') + + +def unescape(u): + if not isinstance(u, unicode_type): + raise ValueError('Only unicode objects are unescapable.') + return unquote(u) + + +def parse_keqv_list(l): + """A unicode-safe version of urllib2.parse_keqv_list""" + # With Python 2.6, parse_http_list handles unicode fine + return urllib2.parse_keqv_list(l) + + +def parse_http_list(u): + """A unicode-safe version of urllib2.parse_http_list""" + # With Python 2.6, parse_http_list handles unicode fine + return urllib2.parse_http_list(u) + + +def parse_authorization_header(authorization_header): + """Parse an OAuth authorization header into a list of 2-tuples""" + auth_scheme = 'OAuth '.lower() + if authorization_header[:len(auth_scheme)].lower().startswith(auth_scheme): + items = parse_http_list(authorization_header[len(auth_scheme):]) + try: + return list(parse_keqv_list(items).items()) + except (IndexError, ValueError): + pass + raise ValueError('Malformed authorization header') diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/backend_application.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/backend_application.py new file mode 100644 index 0000000..cbad8b7 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/backend_application.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +from __future__ import absolute_import, unicode_literals + +from ..parameters import parse_token_response, prepare_token_request +from .base import Client + + +class BackendApplicationClient(Client): + + """A public client utilizing the client credentials grant workflow. + + The client can request an access token using only its client + credentials (or other supported means of authentication) when the + client is requesting access to the protected resources under its + control, or those of another resource owner which has been previously + arranged with the authorization server (the method of which is beyond + the scope of this specification). + + The client credentials grant type MUST only be used by confidential + clients. + + Since the client authentication is used as the authorization grant, + no additional authorization request is needed. + """ + + def prepare_request_body(self, body='', scope=None, **kwargs): + """Add the client credentials to the request body. + + The client makes a request to the token endpoint by adding the + following parameters using the "application/x-www-form-urlencoded" + format per `Appendix B`_ in the HTTP request entity-body: + + :param scope: The scope of the access request as described by + `Section 3.3`_. + :param kwargs: Extra credentials to include in the token request. + + The client MUST authenticate with the authorization server as + described in `Section 3.2.1`_. + + The prepared body will include all provided credentials as well as + the ``grant_type`` parameter set to ``client_credentials``:: + + >>> from oauthlib.oauth2 import BackendApplicationClient + >>> client = BackendApplicationClient('your_id') + >>> client.prepare_request_body(scope=['hello', 'world']) + 'grant_type=client_credentials&scope=hello+world' + + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`Section 3.2.1`: https://tools.ietf.org/html/rfc6749#section-3.2.1 + """ + return prepare_token_request('client_credentials', body=body, + scope=scope, **kwargs) diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/base.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/base.py new file mode 100644 index 0000000..07ef894 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/base.py @@ -0,0 +1,499 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming OAuth 2.0 RFC6749. +""" +from __future__ import absolute_import, unicode_literals + +import time +import warnings + +from oauthlib.common import generate_token +from oauthlib.oauth2.rfc6749 import tokens +from oauthlib.oauth2.rfc6749.errors import (InsecureTransportError, + TokenExpiredError) +from oauthlib.oauth2.rfc6749.parameters import (parse_token_response, + prepare_token_request, + prepare_token_revocation_request) +from oauthlib.oauth2.rfc6749.utils import is_secure_transport + +AUTH_HEADER = 'auth_header' +URI_QUERY = 'query' +BODY = 'body' + +FORM_ENC_HEADERS = { + 'Content-Type': 'application/x-www-form-urlencoded' +} + +class Client(object): + + """Base OAuth2 client responsible for access token management. + + This class also acts as a generic interface providing methods common to all + client types such as ``prepare_authorization_request`` and + ``prepare_token_revocation_request``. The ``prepare_x_request`` methods are + the recommended way of interacting with clients (as opposed to the abstract + prepare uri/body/etc methods). They are recommended over the older set + because they are easier to use (more consistent) and add a few additional + security checks, such as HTTPS and state checking. + + Some of these methods require further implementation only provided by the + specific purpose clients such as + :py:class:`oauthlib.oauth2.MobileApplicationClient` and thus you should always + seek to use the client class matching the OAuth workflow you need. For + Python, this is usually :py:class:`oauthlib.oauth2.WebApplicationClient`. + + """ + + def __init__(self, client_id, + default_token_placement=AUTH_HEADER, + token_type='Bearer', + access_token=None, + refresh_token=None, + mac_key=None, + mac_algorithm=None, + token=None, + scope=None, + state=None, + redirect_url=None, + state_generator=generate_token, + **kwargs): + """Initialize a client with commonly used attributes. + + :param client_id: Client identifier given by the OAuth provider upon + registration. + + :param default_token_placement: Tokens can be supplied in the Authorization + header (default), the URL query component (``query``) or the request + body (``body``). + + :param token_type: OAuth 2 token type. Defaults to Bearer. Change this + if you specify the ``access_token`` parameter and know it is of a + different token type, such as a MAC, JWT or SAML token. Can + also be supplied as ``token_type`` inside the ``token`` dict parameter. + + :param access_token: An access token (string) used to authenticate + requests to protected resources. Can also be supplied inside the + ``token`` dict parameter. + + :param refresh_token: A refresh token (string) used to refresh expired + tokens. Can also be supplied inside the ``token`` dict parameter. + + :param mac_key: Encryption key used with MAC tokens. + + :param mac_algorithm: Hashing algorithm for MAC tokens. + + :param token: A dict of token attributes such as ``access_token``, + ``token_type`` and ``expires_at``. + + :param scope: A list of default scopes to request authorization for. + + :param state: A CSRF protection string used during authorization. + + :param redirect_url: The redirection endpoint on the client side to which + the user returns after authorization. + + :param state_generator: A no argument state generation callable. Defaults + to :py:meth:`oauthlib.common.generate_token`. + """ + + self.client_id = client_id + self.default_token_placement = default_token_placement + self.token_type = token_type + self.access_token = access_token + self.refresh_token = refresh_token + self.mac_key = mac_key + self.mac_algorithm = mac_algorithm + self.token = token or {} + self.scope = scope + self.state_generator = state_generator + self.state = state + self.redirect_url = redirect_url + self.code = None + self.expires_in = None + self._expires_at = None + self.populate_token_attributes(self.token) + + @property + def token_types(self): + """Supported token types and their respective methods + + Additional tokens can be supported by extending this dictionary. + + The Bearer token spec is stable and safe to use. + + The MAC token spec is not yet stable and support for MAC tokens + is experimental and currently matching version 00 of the spec. + """ + return { + 'Bearer': self._add_bearer_token, + 'MAC': self._add_mac_token + } + + def prepare_request_uri(self, *args, **kwargs): + """Abstract method used to create request URIs.""" + raise NotImplementedError("Must be implemented by inheriting classes.") + + def prepare_request_body(self, *args, **kwargs): + """Abstract method used to create request bodies.""" + raise NotImplementedError("Must be implemented by inheriting classes.") + + def parse_request_uri_response(self, *args, **kwargs): + """Abstract method used to parse redirection responses.""" + + def add_token(self, uri, http_method='GET', body=None, headers=None, + token_placement=None, **kwargs): + """Add token to the request uri, body or authorization header. + + The access token type provides the client with the information + required to successfully utilize the access token to make a protected + resource request (along with type-specific attributes). The client + MUST NOT use an access token if it does not understand the token + type. + + For example, the "bearer" token type defined in + [`I-D.ietf-oauth-v2-bearer`_] is utilized by simply including the access + token string in the request: + + .. code-block:: http + + GET /resource/1 HTTP/1.1 + Host: example.com + Authorization: Bearer mF_9.B5f-4.1JqM + + while the "mac" token type defined in [`I-D.ietf-oauth-v2-http-mac`_] is + utilized by issuing a MAC key together with the access token which is + used to sign certain components of the HTTP requests: + + .. code-block:: http + + GET /resource/1 HTTP/1.1 + Host: example.com + Authorization: MAC id="h480djs93hd8", + nonce="274312:dj83hs9s", + mac="kDZvddkndxvhGRXZhvuDjEWhGeE=" + + .. _`I-D.ietf-oauth-v2-bearer`: https://tools.ietf.org/html/rfc6749#section-12.2 + .. _`I-D.ietf-oauth-v2-http-mac`: https://tools.ietf.org/html/rfc6749#section-12.2 + """ + if not is_secure_transport(uri): + raise InsecureTransportError() + + token_placement = token_placement or self.default_token_placement + + case_insensitive_token_types = dict( + (k.lower(), v) for k, v in self.token_types.items()) + if not self.token_type.lower() in case_insensitive_token_types: + raise ValueError("Unsupported token type: %s" % self.token_type) + + if not (self.access_token or self.token.get('access_token')): + raise ValueError("Missing access token.") + + if self._expires_at and self._expires_at < time.time(): + raise TokenExpiredError() + + return case_insensitive_token_types[self.token_type.lower()](uri, http_method, body, + headers, token_placement, **kwargs) + + def prepare_authorization_request(self, authorization_url, state=None, + redirect_url=None, scope=None, **kwargs): + """Prepare the authorization request. + + This is the first step in many OAuth flows in which the user is + redirected to a certain authorization URL. This method adds + required parameters to the authorization URL. + + :param authorization_url: Provider authorization endpoint URL. + + :param state: CSRF protection string. Will be automatically created if + not provided. The generated state is available via the ``state`` + attribute. Clients should verify that the state is unchanged and + present in the authorization response. This verification is done + automatically if using the ``authorization_response`` parameter + with ``prepare_token_request``. + + :param redirect_url: Redirect URL to which the user will be returned + after authorization. Must be provided unless previously setup with + the provider. If provided then it must also be provided in the + token request. + + :param kwargs: Additional parameters to included in the request. + + :returns: The prepared request tuple with (url, headers, body). + """ + if not is_secure_transport(authorization_url): + raise InsecureTransportError() + + self.state = state or self.state_generator() + self.redirect_url = redirect_url or self.redirect_url + self.scope = scope or self.scope + auth_url = self.prepare_request_uri( + authorization_url, redirect_uri=self.redirect_url, + scope=self.scope, state=self.state, **kwargs) + return auth_url, FORM_ENC_HEADERS, '' + + def prepare_token_request(self, token_url, authorization_response=None, + redirect_url=None, state=None, body='', **kwargs): + """Prepare a token creation request. + + Note that these requests usually require client authentication, either + by including client_id or a set of provider specific authentication + credentials. + + :param token_url: Provider token creation endpoint URL. + + :param authorization_response: The full redirection URL string, i.e. + the location to which the user was redirected after successfull + authorization. Used to mine credentials needed to obtain a token + in this step, such as authorization code. + + :param redirect_url: The redirect_url supplied with the authorization + request (if there was one). + + :param body: Request body (URL encoded string). + + :param kwargs: Additional parameters to included in the request. + + :returns: The prepared request tuple with (url, headers, body). + """ + if not is_secure_transport(token_url): + raise InsecureTransportError() + + state = state or self.state + if authorization_response: + self.parse_request_uri_response( + authorization_response, state=state) + self.redirect_url = redirect_url or self.redirect_url + body = self.prepare_request_body(body=body, + redirect_uri=self.redirect_url, **kwargs) + + return token_url, FORM_ENC_HEADERS, body + + def prepare_refresh_token_request(self, token_url, refresh_token=None, + body='', scope=None, **kwargs): + """Prepare an access token refresh request. + + Expired access tokens can be replaced by new access tokens without + going through the OAuth dance if the client obtained a refresh token. + This refresh token and authentication credentials can be used to + obtain a new access token, and possibly a new refresh token. + + :param token_url: Provider token refresh endpoint URL. + + :param refresh_token: Refresh token string. + + :param body: Request body (URL encoded string). + + :param scope: List of scopes to request. Must be equal to + or a subset of the scopes granted when obtaining the refresh + token. + + :param kwargs: Additional parameters to included in the request. + + :returns: The prepared request tuple with (url, headers, body). + """ + if not is_secure_transport(token_url): + raise InsecureTransportError() + + self.scope = scope or self.scope + body = self.prepare_refresh_body(body=body, + refresh_token=refresh_token, scope=self.scope, **kwargs) + return token_url, FORM_ENC_HEADERS, body + + def prepare_token_revocation_request(self, revocation_url, token, + token_type_hint="access_token", body='', callback=None, **kwargs): + """Prepare a token revocation request. + + :param revocation_url: Provider token revocation endpoint URL. + + :param token: The access or refresh token to be revoked (string). + + :param token_type_hint: ``"access_token"`` (default) or + ``"refresh_token"``. This is optional and if you wish to not pass it you + must provide ``token_type_hint=None``. + + :param callback: A jsonp callback such as ``package.callback`` to be invoked + upon receiving the response. Not that it should not include a () suffix. + + :param kwargs: Additional parameters to included in the request. + + :returns: The prepared request tuple with (url, headers, body). + + Note that JSONP request may use GET requests as the parameters will + be added to the request URL query as opposed to the request body. + + An example of a revocation request + + .. code-block: http + + POST /revoke HTTP/1.1 + Host: server.example.com + Content-Type: application/x-www-form-urlencoded + Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW + + token=45ghiukldjahdnhzdauz&token_type_hint=refresh_token + + An example of a jsonp revocation request + + .. code-block: http + + GET /revoke?token=agabcdefddddafdd&callback=package.myCallback HTTP/1.1 + Host: server.example.com + Content-Type: application/x-www-form-urlencoded + Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW + + and an error response + + .. code-block: http + + package.myCallback({"error":"unsupported_token_type"}); + + Note that these requests usually require client credentials, client_id in + the case for public clients and provider specific authentication + credentials for confidential clients. + """ + if not is_secure_transport(revocation_url): + raise InsecureTransportError() + + return prepare_token_revocation_request(revocation_url, token, + token_type_hint=token_type_hint, body=body, callback=callback, + **kwargs) + + def parse_request_body_response(self, body, scope=None, **kwargs): + """Parse the JSON response body. + + If the access token request is valid and authorized, the + authorization server issues an access token as described in + `Section 5.1`_. A refresh token SHOULD NOT be included. If the request + failed client authentication or is invalid, the authorization server + returns an error response as described in `Section 5.2`_. + + :param body: The response body from the token request. + :param scope: Scopes originally requested. + :return: Dictionary of token parameters. + :raises: Warning if scope has changed. OAuth2Error if response is invalid. + + These response are json encoded and could easily be parsed without + the assistance of OAuthLib. However, there are a few subtle issues + to be aware of regarding the response which are helpfully addressed + through the raising of various errors. + + A successful response should always contain + + **access_token** + The access token issued by the authorization server. Often + a random string. + + **token_type** + The type of the token issued as described in `Section 7.1`_. + Commonly ``Bearer``. + + While it is not mandated it is recommended that the provider include + + **expires_in** + The lifetime in seconds of the access token. For + example, the value "3600" denotes that the access token will + expire in one hour from the time the response was generated. + If omitted, the authorization server SHOULD provide the + expiration time via other means or document the default value. + + **scope** + Providers may supply this in all responses but are required to only + if it has changed since the authorization request. + + .. _`Section 5.1`: https://tools.ietf.org/html/rfc6749#section-5.1 + .. _`Section 5.2`: https://tools.ietf.org/html/rfc6749#section-5.2 + .. _`Section 7.1`: https://tools.ietf.org/html/rfc6749#section-7.1 + """ + self.token = parse_token_response(body, scope=scope) + self.populate_token_attributes(self.token) + return self.token + + def prepare_refresh_body(self, body='', refresh_token=None, scope=None, **kwargs): + """Prepare an access token request, using a refresh token. + + If the authorization server issued a refresh token to the client, the + client makes a refresh request to the token endpoint by adding the + following parameters using the "application/x-www-form-urlencoded" + format in the HTTP request entity-body: + + grant_type + REQUIRED. Value MUST be set to "refresh_token". + refresh_token + REQUIRED. The refresh token issued to the client. + scope + OPTIONAL. The scope of the access request as described by + Section 3.3. The requested scope MUST NOT include any scope + not originally granted by the resource owner, and if omitted is + treated as equal to the scope originally granted by the + resource owner. + """ + refresh_token = refresh_token or self.refresh_token + return prepare_token_request('refresh_token', body=body, scope=scope, + refresh_token=refresh_token, **kwargs) + + def _add_bearer_token(self, uri, http_method='GET', body=None, + headers=None, token_placement=None): + """Add a bearer token to the request uri, body or authorization header.""" + if token_placement == AUTH_HEADER: + headers = tokens.prepare_bearer_headers(self.access_token, headers) + + elif token_placement == URI_QUERY: + uri = tokens.prepare_bearer_uri(self.access_token, uri) + + elif token_placement == BODY: + body = tokens.prepare_bearer_body(self.access_token, body) + + else: + raise ValueError("Invalid token placement.") + return uri, headers, body + + def _add_mac_token(self, uri, http_method='GET', body=None, + headers=None, token_placement=AUTH_HEADER, ext=None, **kwargs): + """Add a MAC token to the request authorization header. + + Warning: MAC token support is experimental as the spec is not yet stable. + """ + headers = tokens.prepare_mac_header(self.access_token, uri, + self.mac_key, http_method, headers=headers, body=body, ext=ext, + hash_algorithm=self.mac_algorithm, **kwargs) + return uri, headers, body + + def _populate_attributes(self, response): + warnings.warn("Please switch to the public method " + "populate_token_attributes.", DeprecationWarning) + return self.populate_token_attributes(response) + + def populate_code_attributes(self, response): + """Add attributes from an auth code response to self.""" + + if 'code' in response: + self.code = response.get('code') + + def populate_token_attributes(self, response): + """Add attributes from a token exchange response to self.""" + + if 'access_token' in response: + self.access_token = response.get('access_token') + + if 'refresh_token' in response: + self.refresh_token = response.get('refresh_token') + + if 'token_type' in response: + self.token_type = response.get('token_type') + + if 'expires_in' in response: + self.expires_in = response.get('expires_in') + self._expires_at = time.time() + int(self.expires_in) + + if 'expires_at' in response: + self._expires_at = int(response.get('expires_at')) + + if 'mac_key' in response: + self.mac_key = response.get('mac_key') + + if 'mac_algorithm' in response: + self.mac_algorithm = response.get('mac_algorithm') diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/legacy_application.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/legacy_application.py new file mode 100644 index 0000000..b16fc9f --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/legacy_application.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +from __future__ import absolute_import, unicode_literals + +from ..parameters import parse_token_response, prepare_token_request +from .base import Client + + +class LegacyApplicationClient(Client): + + """A public client using the resource owner password and username directly. + + The resource owner password credentials grant type is suitable in + cases where the resource owner has a trust relationship with the + client, such as the device operating system or a highly privileged + application. The authorization server should take special care when + enabling this grant type, and only allow it when other flows are not + viable. + + The grant type is suitable for clients capable of obtaining the + resource owner's credentials (username and password, typically using + an interactive form). It is also used to migrate existing clients + using direct authentication schemes such as HTTP Basic or Digest + authentication to OAuth by converting the stored credentials to an + access token. + + The method through which the client obtains the resource owner + credentials is beyond the scope of this specification. The client + MUST discard the credentials once an access token has been obtained. + """ + + def __init__(self, client_id, **kwargs): + super(LegacyApplicationClient, self).__init__(client_id, **kwargs) + + def prepare_request_body(self, username, password, body='', scope=None, **kwargs): + """Add the resource owner password and username to the request body. + + The client makes a request to the token endpoint by adding the + following parameters using the "application/x-www-form-urlencoded" + format per `Appendix B`_ in the HTTP request entity-body: + + :param username: The resource owner username. + :param password: The resource owner password. + :param scope: The scope of the access request as described by + `Section 3.3`_. + :param kwargs: Extra credentials to include in the token request. + + If the client type is confidential or the client was issued client + credentials (or assigned other authentication requirements), the + client MUST authenticate with the authorization server as described + in `Section 3.2.1`_. + + The prepared body will include all provided credentials as well as + the ``grant_type`` parameter set to ``password``:: + + >>> from oauthlib.oauth2 import LegacyApplicationClient + >>> client = LegacyApplicationClient('your_id') + >>> client.prepare_request_body(username='foo', password='bar', scope=['hello', 'world']) + 'grant_type=password&username=foo&scope=hello+world&password=bar' + + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`Section 3.2.1`: https://tools.ietf.org/html/rfc6749#section-3.2.1 + """ + return prepare_token_request('password', body=body, username=username, + password=password, scope=scope, **kwargs) diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/mobile_application.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/mobile_application.py new file mode 100644 index 0000000..aa20daa --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/mobile_application.py @@ -0,0 +1,172 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +from __future__ import absolute_import, unicode_literals + +from ..parameters import parse_implicit_response, prepare_grant_uri +from .base import Client + + +class MobileApplicationClient(Client): + + """A public client utilizing the implicit code grant workflow. + + A user-agent-based application is a public client in which the + client code is downloaded from a web server and executes within a + user-agent (e.g. web browser) on the device used by the resource + owner. Protocol data and credentials are easily accessible (and + often visible) to the resource owner. Since such applications + reside within the user-agent, they can make seamless use of the + user-agent capabilities when requesting authorization. + + The implicit grant type is used to obtain access tokens (it does not + support the issuance of refresh tokens) and is optimized for public + clients known to operate a particular redirection URI. These clients + are typically implemented in a browser using a scripting language + such as JavaScript. + + As a redirection-based flow, the client must be capable of + interacting with the resource owner's user-agent (typically a web + browser) and capable of receiving incoming requests (via redirection) + from the authorization server. + + Unlike the authorization code grant type in which the client makes + separate requests for authorization and access token, the client + receives the access token as the result of the authorization request. + + The implicit grant type does not include client authentication, and + relies on the presence of the resource owner and the registration of + the redirection URI. Because the access token is encoded into the + redirection URI, it may be exposed to the resource owner and other + applications residing on the same device. + """ + + def prepare_request_uri(self, uri, redirect_uri=None, scope=None, + state=None, **kwargs): + """Prepare the implicit grant request URI. + + The client constructs the request URI by adding the following + parameters to the query component of the authorization endpoint URI + using the "application/x-www-form-urlencoded" format, per `Appendix B`_: + + :param redirect_uri: OPTIONAL. The redirect URI must be an absolute URI + and it should have been registerd with the OAuth + provider prior to use. As described in `Section 3.1.2`_. + + :param scope: OPTIONAL. The scope of the access request as described by + Section 3.3`_. These may be any string but are commonly + URIs or various categories such as ``videos`` or ``documents``. + + :param state: RECOMMENDED. An opaque value used by the client to maintain + state between the request and callback. The authorization + server includes this value when redirecting the user-agent back + to the client. The parameter SHOULD be used for preventing + cross-site request forgery as described in `Section 10.12`_. + + :param kwargs: Extra arguments to include in the request URI. + + In addition to supplied parameters, OAuthLib will append the ``client_id`` + that was provided in the constructor as well as the mandatory ``response_type`` + argument, set to ``token``:: + + >>> from oauthlib.oauth2 import MobileApplicationClient + >>> client = MobileApplicationClient('your_id') + >>> client.prepare_request_uri('https://example.com') + 'https://example.com?client_id=your_id&response_type=token' + >>> client.prepare_request_uri('https://example.com', redirect_uri='https://a.b/callback') + 'https://example.com?client_id=your_id&response_type=token&redirect_uri=https%3A%2F%2Fa.b%2Fcallback' + >>> client.prepare_request_uri('https://example.com', scope=['profile', 'pictures']) + 'https://example.com?client_id=your_id&response_type=token&scope=profile+pictures' + >>> client.prepare_request_uri('https://example.com', foo='bar') + 'https://example.com?client_id=your_id&response_type=token&foo=bar' + + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + .. _`Section 2.2`: https://tools.ietf.org/html/rfc6749#section-2.2 + .. _`Section 3.1.2`: https://tools.ietf.org/html/rfc6749#section-3.1.2 + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`Section 10.12`: https://tools.ietf.org/html/rfc6749#section-10.12 + """ + return prepare_grant_uri(uri, self.client_id, 'token', + redirect_uri=redirect_uri, state=state, scope=scope, **kwargs) + + def parse_request_uri_response(self, uri, state=None, scope=None): + """Parse the response URI fragment. + + If the resource owner grants the access request, the authorization + server issues an access token and delivers it to the client by adding + the following parameters to the fragment component of the redirection + URI using the "application/x-www-form-urlencoded" format: + + :param uri: The callback URI that resulted from the user being redirected + back from the provider to you, the client. + :param state: The state provided in the authorization request. + :param scope: The scopes provided in the authorization request. + :return: Dictionary of token parameters. + :raises: OAuth2Error if response is invalid. + + A successful response should always contain + + **access_token** + The access token issued by the authorization server. Often + a random string. + + **token_type** + The type of the token issued as described in `Section 7.1`_. + Commonly ``Bearer``. + + **state** + If you provided the state parameter in the authorization phase, then + the provider is required to include that exact state value in the + response. + + While it is not mandated it is recommended that the provider include + + **expires_in** + The lifetime in seconds of the access token. For + example, the value "3600" denotes that the access token will + expire in one hour from the time the response was generated. + If omitted, the authorization server SHOULD provide the + expiration time via other means or document the default value. + + **scope** + Providers may supply this in all responses but are required to only + if it has changed since the authorization request. + + A few example responses can be seen below:: + + >>> response_uri = 'https://example.com/callback#access_token=sdlfkj452&state=ss345asyht&token_type=Bearer&scope=hello+world' + >>> from oauthlib.oauth2 import MobileApplicationClient + >>> client = MobileApplicationClient('your_id') + >>> client.parse_request_uri_response(response_uri) + { + 'access_token': 'sdlfkj452', + 'token_type': 'Bearer', + 'state': 'ss345asyht', + 'scope': [u'hello', u'world'] + } + >>> client.parse_request_uri_response(response_uri, state='other') + Traceback (most recent call last): + File "", line 1, in + File "oauthlib/oauth2/rfc6749/__init__.py", line 598, in parse_request_uri_response + **scope** + File "oauthlib/oauth2/rfc6749/parameters.py", line 197, in parse_implicit_response + raise ValueError("Mismatching or missing state in params.") + ValueError: Mismatching or missing state in params. + >>> def alert_scope_changed(message, old, new): + ... print(message, old, new) + ... + >>> oauthlib.signals.scope_changed.connect(alert_scope_changed) + >>> client.parse_request_body_response(response_body, scope=['other']) + ('Scope has changed from "other" to "hello world".', ['other'], ['hello', 'world']) + + .. _`Section 7.1`: https://tools.ietf.org/html/rfc6749#section-7.1 + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + """ + self.token = parse_implicit_response(uri, state=state, scope=scope) + self.populate_token_attributes(self.token) + return self.token diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/service_application.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/service_application.py new file mode 100644 index 0000000..7f336bb --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/service_application.py @@ -0,0 +1,175 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +from __future__ import absolute_import, unicode_literals + +import time + +from oauthlib.common import to_unicode + +from ..parameters import parse_token_response, prepare_token_request +from .base import Client + + +class ServiceApplicationClient(Client): + """A public client utilizing the JWT bearer grant. + + JWT bearer tokes can be used to request an access token when a client + wishes to utilize an existing trust relationship, expressed through the + semantics of (and digital signature or keyed message digest calculated + over) the JWT, without a direct user approval step at the authorization + server. + + This grant type does not involve an authorization step. It may be + used by both public and confidential clients. + """ + + grant_type = 'urn:ietf:params:oauth:grant-type:jwt-bearer' + + def __init__(self, client_id, private_key=None, subject=None, issuer=None, + audience=None, **kwargs): + """Initalize a JWT client with defaults for implicit use later. + + :param client_id: Client identifier given by the OAuth provider upon + registration. + + :param private_key: Private key used for signing and encrypting. + Must be given as a string. + + :param subject: The principal that is the subject of the JWT, i.e. + which user is the token requested on behalf of. + For example, ``foo@example.com. + + :param issuer: The JWT MUST contain an "iss" (issuer) claim that + contains a unique identifier for the entity that issued + the JWT. For example, ``your-client@provider.com``. + + :param audience: A value identifying the authorization server as an + intended audience, e.g. + ``https://provider.com/oauth2/token``. + + :param kwargs: Additional arguments to pass to base client, such as + state and token. See Client.__init__.__doc__ for + details. + """ + super(ServiceApplicationClient, self).__init__(client_id, **kwargs) + self.private_key = private_key + self.subject = subject + self.issuer = issuer + self.audience = audience + + def prepare_request_body(self, + private_key=None, + subject=None, + issuer=None, + audience=None, + expires_at=None, + issued_at=None, + extra_claims=None, + body='', + scope=None, + **kwargs): + """Create and add a JWT assertion to the request body. + + :param private_key: Private key used for signing and encrypting. + Must be given as a string. + + :param subject: (sub) The principal that is the subject of the JWT, + i.e. which user is the token requested on behalf of. + For example, ``foo@example.com. + + :param issuer: (iss) The JWT MUST contain an "iss" (issuer) claim that + contains a unique identifier for the entity that issued + the JWT. For example, ``your-client@provider.com``. + + :param audience: (aud) A value identifying the authorization server as an + intended audience, e.g. + ``https://provider.com/oauth2/token``. + + :param expires_at: A unix expiration timestamp for the JWT. Defaults + to an hour from now, i.e. ``time.time() + 3600``. + + :param issued_at: A unix timestamp of when the JWT was created. + Defaults to now, i.e. ``time.time()``. + + :param not_before: A unix timestamp after which the JWT may be used. + Not included unless provided. + + :param jwt_id: A unique JWT token identifier. Not included unless + provided. + + :param extra_claims: A dict of additional claims to include in the JWT. + + :param scope: The scope of the access request. + + :param body: Request body (string) with extra parameters. + + :param kwargs: Extra credentials to include in the token request. + + The "scope" parameter may be used, as defined in the Assertion + Framework for OAuth 2.0 Client Authentication and Authorization Grants + [I-D.ietf-oauth-assertions] specification, to indicate the requested + scope. + + Authentication of the client is optional, as described in + `Section 3.2.1`_ of OAuth 2.0 [RFC6749] and consequently, the + "client_id" is only needed when a form of client authentication that + relies on the parameter is used. + + The following non-normative example demonstrates an Access Token + Request with a JWT as an authorization grant (with extra line breaks + for display purposes only): + + .. code-block: http + + POST /token.oauth2 HTTP/1.1 + Host: as.example.com + Content-Type: application/x-www-form-urlencoded + + grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer + &assertion=eyJhbGciOiJFUzI1NiJ9. + eyJpc3Mi[...omitted for brevity...]. + J9l-ZhwP[...omitted for brevity...] + + .. _`Section 3.2.1`: https://tools.ietf.org/html/rfc6749#section-3.2.1 + """ + import jwt + + key = private_key or self.private_key + if not key: + raise ValueError('An encryption key must be supplied to make JWT' + ' token requests.') + claim = { + 'iss': issuer or self.issuer, + 'aud': audience or self.audience, + 'sub': subject or self.subject, + 'exp': int(expires_at or time.time() + 3600), + 'iat': int(issued_at or time.time()), + } + + for attr in ('iss', 'aud', 'sub'): + if claim[attr] is None: + raise ValueError( + 'Claim must include %s but none was given.' % attr) + + if 'not_before' in kwargs: + claim['nbf'] = kwargs.pop('not_before') + + if 'jwt_id' in kwargs: + claim['jti'] = kwargs.pop('jwt_id') + + claim.update(extra_claims or {}) + + assertion = jwt.encode(claim, key, 'RS256') + assertion = to_unicode(assertion) + + return prepare_token_request(self.grant_type, + body=body, + assertion=assertion, + scope=scope, + **kwargs) diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/web_application.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/web_application.py new file mode 100644 index 0000000..25280bf --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/clients/web_application.py @@ -0,0 +1,176 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +from __future__ import absolute_import, unicode_literals + +from ..parameters import (parse_authorization_code_response, + parse_token_response, prepare_grant_uri, + prepare_token_request) +from .base import Client + + +class WebApplicationClient(Client): + + """A client utilizing the authorization code grant workflow. + + A web application is a confidential client running on a web + server. Resource owners access the client via an HTML user + interface rendered in a user-agent on the device used by the + resource owner. The client credentials as well as any access + token issued to the client are stored on the web server and are + not exposed to or accessible by the resource owner. + + The authorization code grant type is used to obtain both access + tokens and refresh tokens and is optimized for confidential clients. + As a redirection-based flow, the client must be capable of + interacting with the resource owner's user-agent (typically a web + browser) and capable of receiving incoming requests (via redirection) + from the authorization server. + """ + + def __init__(self, client_id, code=None, **kwargs): + super(WebApplicationClient, self).__init__(client_id, **kwargs) + self.code = code + + def prepare_request_uri(self, uri, redirect_uri=None, scope=None, + state=None, **kwargs): + """Prepare the authorization code request URI + + The client constructs the request URI by adding the following + parameters to the query component of the authorization endpoint URI + using the "application/x-www-form-urlencoded" format, per `Appendix B`_: + + :param redirect_uri: OPTIONAL. The redirect URI must be an absolute URI + and it should have been registerd with the OAuth + provider prior to use. As described in `Section 3.1.2`_. + + :param scope: OPTIONAL. The scope of the access request as described by + Section 3.3`_. These may be any string but are commonly + URIs or various categories such as ``videos`` or ``documents``. + + :param state: RECOMMENDED. An opaque value used by the client to maintain + state between the request and callback. The authorization + server includes this value when redirecting the user-agent back + to the client. The parameter SHOULD be used for preventing + cross-site request forgery as described in `Section 10.12`_. + + :param kwargs: Extra arguments to include in the request URI. + + In addition to supplied parameters, OAuthLib will append the ``client_id`` + that was provided in the constructor as well as the mandatory ``response_type`` + argument, set to ``code``:: + + >>> from oauthlib.oauth2 import WebApplicationClient + >>> client = WebApplicationClient('your_id') + >>> client.prepare_request_uri('https://example.com') + 'https://example.com?client_id=your_id&response_type=code' + >>> client.prepare_request_uri('https://example.com', redirect_uri='https://a.b/callback') + 'https://example.com?client_id=your_id&response_type=code&redirect_uri=https%3A%2F%2Fa.b%2Fcallback' + >>> client.prepare_request_uri('https://example.com', scope=['profile', 'pictures']) + 'https://example.com?client_id=your_id&response_type=code&scope=profile+pictures' + >>> client.prepare_request_uri('https://example.com', foo='bar') + 'https://example.com?client_id=your_id&response_type=code&foo=bar' + + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + .. _`Section 2.2`: https://tools.ietf.org/html/rfc6749#section-2.2 + .. _`Section 3.1.2`: https://tools.ietf.org/html/rfc6749#section-3.1.2 + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`Section 10.12`: https://tools.ietf.org/html/rfc6749#section-10.12 + """ + return prepare_grant_uri(uri, self.client_id, 'code', + redirect_uri=redirect_uri, scope=scope, state=state, **kwargs) + + def prepare_request_body(self, client_id=None, code=None, body='', + redirect_uri=None, **kwargs): + """Prepare the access token request body. + + The client makes a request to the token endpoint by adding the + following parameters using the "application/x-www-form-urlencoded" + format in the HTTP request entity-body: + + :param client_id: REQUIRED, if the client is not authenticating with the + authorization server as described in `Section 3.2.1`_. + + :param code: REQUIRED. The authorization code received from the + authorization server. + + :param redirect_uri: REQUIRED, if the "redirect_uri" parameter was included in the + authorization request as described in `Section 4.1.1`_, and their + values MUST be identical. + + :param kwargs: Extra parameters to include in the token request. + + In addition OAuthLib will add the ``grant_type`` parameter set to + ``authorization_code``. + + If the client type is confidential or the client was issued client + credentials (or assigned other authentication requirements), the + client MUST authenticate with the authorization server as described + in `Section 3.2.1`_:: + + >>> from oauthlib.oauth2 import WebApplicationClient + >>> client = WebApplicationClient('your_id') + >>> client.prepare_request_body(code='sh35ksdf09sf') + 'grant_type=authorization_code&code=sh35ksdf09sf' + >>> client.prepare_request_body(code='sh35ksdf09sf', foo='bar') + 'grant_type=authorization_code&code=sh35ksdf09sf&foo=bar' + + .. _`Section 4.1.1`: https://tools.ietf.org/html/rfc6749#section-4.1.1 + .. _`Section 3.2.1`: https://tools.ietf.org/html/rfc6749#section-3.2.1 + """ + code = code or self.code + return prepare_token_request('authorization_code', code=code, body=body, + client_id=self.client_id, redirect_uri=redirect_uri, **kwargs) + + def parse_request_uri_response(self, uri, state=None): + """Parse the URI query for code and state. + + If the resource owner grants the access request, the authorization + server issues an authorization code and delivers it to the client by + adding the following parameters to the query component of the + redirection URI using the "application/x-www-form-urlencoded" format: + + :param uri: The callback URI that resulted from the user being redirected + back from the provider to you, the client. + :param state: The state provided in the authorization request. + + **code** + The authorization code generated by the authorization server. + The authorization code MUST expire shortly after it is issued + to mitigate the risk of leaks. A maximum authorization code + lifetime of 10 minutes is RECOMMENDED. The client MUST NOT + use the authorization code more than once. If an authorization + code is used more than once, the authorization server MUST deny + the request and SHOULD revoke (when possible) all tokens + previously issued based on that authorization code. + The authorization code is bound to the client identifier and + redirection URI. + + **state** + If the "state" parameter was present in the authorization request. + + This method is mainly intended to enforce strict state checking with + the added benefit of easily extracting parameters from the URI:: + + >>> from oauthlib.oauth2 import WebApplicationClient + >>> client = WebApplicationClient('your_id') + >>> uri = 'https://example.com/callback?code=sdfkjh345&state=sfetw45' + >>> client.parse_request_uri_response(uri, state='sfetw45') + {'state': 'sfetw45', 'code': 'sdfkjh345'} + >>> client.parse_request_uri_response(uri, state='other') + Traceback (most recent call last): + File "", line 1, in + File "oauthlib/oauth2/rfc6749/__init__.py", line 357, in parse_request_uri_response + back from the provider to you, the client. + File "oauthlib/oauth2/rfc6749/parameters.py", line 153, in parse_authorization_code_response + raise MismatchingStateError() + oauthlib.oauth2.rfc6749.errors.MismatchingStateError + """ + response = parse_authorization_code_response(uri, state=state) + self.populate_code_attributes(response) + return response diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/authorization.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/authorization.py new file mode 100644 index 0000000..92cde34 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/authorization.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +from __future__ import absolute_import, unicode_literals + +import logging + +from oauthlib.common import Request +from oauthlib.oauth2.rfc6749 import utils + +from .base import BaseEndpoint, catch_errors_and_unavailability + +log = logging.getLogger(__name__) + + +class AuthorizationEndpoint(BaseEndpoint): + + """Authorization endpoint - used by the client to obtain authorization + from the resource owner via user-agent redirection. + + The authorization endpoint is used to interact with the resource + owner and obtain an authorization grant. The authorization server + MUST first verify the identity of the resource owner. The way in + which the authorization server authenticates the resource owner (e.g. + username and password login, session cookies) is beyond the scope of + this specification. + + The endpoint URI MAY include an "application/x-www-form-urlencoded" + formatted (per `Appendix B`_) query component, + which MUST be retained when adding additional query parameters. The + endpoint URI MUST NOT include a fragment component:: + + https://example.com/path?query=component # OK + https://example.com/path?query=component#fragment # Not OK + + Since requests to the authorization endpoint result in user + authentication and the transmission of clear-text credentials (in the + HTTP response), the authorization server MUST require the use of TLS + as described in Section 1.6 when sending requests to the + authorization endpoint:: + + # We will deny any request which URI schema is not with https + + The authorization server MUST support the use of the HTTP "GET" + method [RFC2616] for the authorization endpoint, and MAY support the + use of the "POST" method as well:: + + # HTTP method is currently not enforced + + Parameters sent without a value MUST be treated as if they were + omitted from the request. The authorization server MUST ignore + unrecognized request parameters. Request and response parameters + MUST NOT be included more than once:: + + # Enforced through the design of oauthlib.common.Request + + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + """ + + def __init__(self, default_response_type, default_token_type, + response_types): + BaseEndpoint.__init__(self) + self._response_types = response_types + self._default_response_type = default_response_type + self._default_token_type = default_token_type + + @property + def response_types(self): + return self._response_types + + @property + def default_response_type(self): + return self._default_response_type + + @property + def default_response_type_handler(self): + return self.response_types.get(self.default_response_type) + + @property + def default_token_type(self): + return self._default_token_type + + @catch_errors_and_unavailability + def create_authorization_response(self, uri, http_method='GET', body=None, + headers=None, scopes=None, credentials=None): + """Extract response_type and route to the designated handler.""" + request = Request( + uri, http_method=http_method, body=body, headers=headers) + request.scopes = scopes + # TODO: decide whether this should be a required argument + request.user = None # TODO: explain this in docs + for k, v in (credentials or {}).items(): + setattr(request, k, v) + response_type_handler = self.response_types.get( + request.response_type, self.default_response_type_handler) + log.debug('Dispatching response_type %s request to %r.', + request.response_type, response_type_handler) + return response_type_handler.create_authorization_response( + request, self.default_token_type) + + @catch_errors_and_unavailability + def validate_authorization_request(self, uri, http_method='GET', body=None, + headers=None): + """Extract response_type and route to the designated handler.""" + request = Request( + uri, http_method=http_method, body=body, headers=headers) + + request.scopes = utils.scope_to_list(request.scope) + + response_type_handler = self.response_types.get( + request.response_type, self.default_response_type_handler) + return response_type_handler.validate_authorization_request(request) diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/base.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/base.py new file mode 100644 index 0000000..cdb015f --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/base.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +from __future__ import absolute_import, unicode_literals + +import functools +import logging + +from ..errors import (FatalClientError, OAuth2Error, ServerError, + TemporarilyUnavailableError) + +log = logging.getLogger(__name__) + + +class BaseEndpoint(object): + + def __init__(self): + self._available = True + self._catch_errors = False + + @property + def available(self): + return self._available + + @available.setter + def available(self, available): + self._available = available + + @property + def catch_errors(self): + return self._catch_errors + + @catch_errors.setter + def catch_errors(self, catch_errors): + self._catch_errors = catch_errors + + +def catch_errors_and_unavailability(f): + @functools.wraps(f) + def wrapper(endpoint, uri, *args, **kwargs): + if not endpoint.available: + e = TemporarilyUnavailableError() + log.info('Endpoint unavailable, ignoring request %s.' % uri) + return {}, e.json, 503 + + if endpoint.catch_errors: + try: + return f(endpoint, uri, *args, **kwargs) + except OAuth2Error: + raise + except FatalClientError: + raise + except Exception as e: + error = ServerError() + log.warning( + 'Exception caught while processing request, %s.' % e) + return {}, error.json, 500 + else: + return f(endpoint, uri, *args, **kwargs) + return wrapper diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/pre_configured.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/pre_configured.py new file mode 100644 index 0000000..6428b8d --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/pre_configured.py @@ -0,0 +1,223 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +from __future__ import absolute_import, unicode_literals + +from ..grant_types import (AuthCodeGrantDispatcher, AuthorizationCodeGrant, + ClientCredentialsGrant, ImplicitGrant, + OpenIDConnectAuthCode, OpenIDConnectImplicit, + RefreshTokenGrant, + ResourceOwnerPasswordCredentialsGrant) +from ..tokens import BearerToken +from .authorization import AuthorizationEndpoint +from .resource import ResourceEndpoint +from .revocation import RevocationEndpoint +from .token import TokenEndpoint + + +class Server(AuthorizationEndpoint, TokenEndpoint, ResourceEndpoint, + RevocationEndpoint): + + """An all-in-one endpoint featuring all four major grant types.""" + + def __init__(self, request_validator, token_expires_in=None, + token_generator=None, refresh_token_generator=None, + *args, **kwargs): + """Construct a new all-grants-in-one server. + + :param request_validator: An implementation of + oauthlib.oauth2.RequestValidator. + :param token_expires_in: An int or a function to generate a token + expiration offset (in seconds) given a + oauthlib.common.Request object. + :param token_generator: A function to generate a token from a request. + :param refresh_token_generator: A function to generate a token from a + request for the refresh token. + :param kwargs: Extra parameters to pass to authorization-, + token-, resource-, and revocation-endpoint constructors. + """ + auth_grant = AuthorizationCodeGrant(request_validator) + implicit_grant = ImplicitGrant(request_validator) + password_grant = ResourceOwnerPasswordCredentialsGrant( + request_validator) + credentials_grant = ClientCredentialsGrant(request_validator) + refresh_grant = RefreshTokenGrant(request_validator) + openid_connect_auth = OpenIDConnectAuthCode(request_validator) + openid_connect_implicit = OpenIDConnectImplicit(request_validator) + + bearer = BearerToken(request_validator, token_generator, + token_expires_in, refresh_token_generator) + + auth_grant_choice = AuthCodeGrantDispatcher( default_auth_grant=auth_grant, oidc_auth_grant=openid_connect_auth) + + # See http://openid.net/specs/oauth-v2-multiple-response-types-1_0.html#Combinations for valid combinations + # internally our AuthorizationEndpoint will ensure they can appear in any order for any valid combination + AuthorizationEndpoint.__init__(self, default_response_type='code', + response_types={ + 'code': auth_grant_choice, + 'token': implicit_grant, + 'id_token': openid_connect_implicit, + 'id_token token': openid_connect_implicit, + 'code token': openid_connect_auth, + 'code id_token': openid_connect_auth, + 'code token id_token': openid_connect_auth, + 'none': auth_grant + }, + default_token_type=bearer) + TokenEndpoint.__init__(self, default_grant_type='authorization_code', + grant_types={ + 'authorization_code': auth_grant, + 'password': password_grant, + 'client_credentials': credentials_grant, + 'refresh_token': refresh_grant, + 'openid': openid_connect_auth + }, + default_token_type=bearer) + ResourceEndpoint.__init__(self, default_token='Bearer', + token_types={'Bearer': bearer}) + RevocationEndpoint.__init__(self, request_validator) + + +class WebApplicationServer(AuthorizationEndpoint, TokenEndpoint, ResourceEndpoint, + RevocationEndpoint): + + """An all-in-one endpoint featuring Authorization code grant and Bearer tokens.""" + + def __init__(self, request_validator, token_generator=None, + token_expires_in=None, refresh_token_generator=None, **kwargs): + """Construct a new web application server. + + :param request_validator: An implementation of + oauthlib.oauth2.RequestValidator. + :param token_expires_in: An int or a function to generate a token + expiration offset (in seconds) given a + oauthlib.common.Request object. + :param token_generator: A function to generate a token from a request. + :param refresh_token_generator: A function to generate a token from a + request for the refresh token. + :param kwargs: Extra parameters to pass to authorization-, + token-, resource-, and revocation-endpoint constructors. + """ + auth_grant = AuthorizationCodeGrant(request_validator) + refresh_grant = RefreshTokenGrant(request_validator) + bearer = BearerToken(request_validator, token_generator, + token_expires_in, refresh_token_generator) + AuthorizationEndpoint.__init__(self, default_response_type='code', + response_types={'code': auth_grant}, + default_token_type=bearer) + TokenEndpoint.__init__(self, default_grant_type='authorization_code', + grant_types={ + 'authorization_code': auth_grant, + 'refresh_token': refresh_grant, + }, + default_token_type=bearer) + ResourceEndpoint.__init__(self, default_token='Bearer', + token_types={'Bearer': bearer}) + RevocationEndpoint.__init__(self, request_validator) + + +class MobileApplicationServer(AuthorizationEndpoint, ResourceEndpoint, + RevocationEndpoint): + + """An all-in-one endpoint featuring Implicit code grant and Bearer tokens.""" + + def __init__(self, request_validator, token_generator=None, + token_expires_in=None, refresh_token_generator=None, **kwargs): + """Construct a new implicit grant server. + + :param request_validator: An implementation of + oauthlib.oauth2.RequestValidator. + :param token_expires_in: An int or a function to generate a token + expiration offset (in seconds) given a + oauthlib.common.Request object. + :param token_generator: A function to generate a token from a request. + :param refresh_token_generator: A function to generate a token from a + request for the refresh token. + :param kwargs: Extra parameters to pass to authorization-, + token-, resource-, and revocation-endpoint constructors. + """ + implicit_grant = ImplicitGrant(request_validator) + bearer = BearerToken(request_validator, token_generator, + token_expires_in, refresh_token_generator) + AuthorizationEndpoint.__init__(self, default_response_type='token', + response_types={ + 'token': implicit_grant}, + default_token_type=bearer) + ResourceEndpoint.__init__(self, default_token='Bearer', + token_types={'Bearer': bearer}) + RevocationEndpoint.__init__(self, request_validator, + supported_token_types=['access_token']) + + +class LegacyApplicationServer(TokenEndpoint, ResourceEndpoint, + RevocationEndpoint): + + """An all-in-one endpoint featuring Resource Owner Password Credentials grant and Bearer tokens.""" + + def __init__(self, request_validator, token_generator=None, + token_expires_in=None, refresh_token_generator=None, **kwargs): + """Construct a resource owner password credentials grant server. + + :param request_validator: An implementation of + oauthlib.oauth2.RequestValidator. + :param token_expires_in: An int or a function to generate a token + expiration offset (in seconds) given a + oauthlib.common.Request object. + :param token_generator: A function to generate a token from a request. + :param refresh_token_generator: A function to generate a token from a + request for the refresh token. + :param kwargs: Extra parameters to pass to authorization-, + token-, resource-, and revocation-endpoint constructors. + """ + password_grant = ResourceOwnerPasswordCredentialsGrant( + request_validator) + refresh_grant = RefreshTokenGrant(request_validator) + bearer = BearerToken(request_validator, token_generator, + token_expires_in, refresh_token_generator) + TokenEndpoint.__init__(self, default_grant_type='password', + grant_types={ + 'password': password_grant, + 'refresh_token': refresh_grant, + }, + default_token_type=bearer) + ResourceEndpoint.__init__(self, default_token='Bearer', + token_types={'Bearer': bearer}) + RevocationEndpoint.__init__(self, request_validator) + + +class BackendApplicationServer(TokenEndpoint, ResourceEndpoint, + RevocationEndpoint): + + """An all-in-one endpoint featuring Client Credentials grant and Bearer tokens.""" + + def __init__(self, request_validator, token_generator=None, + token_expires_in=None, refresh_token_generator=None, **kwargs): + """Construct a client credentials grant server. + + :param request_validator: An implementation of + oauthlib.oauth2.RequestValidator. + :param token_expires_in: An int or a function to generate a token + expiration offset (in seconds) given a + oauthlib.common.Request object. + :param token_generator: A function to generate a token from a request. + :param refresh_token_generator: A function to generate a token from a + request for the refresh token. + :param kwargs: Extra parameters to pass to authorization-, + token-, resource-, and revocation-endpoint constructors. + """ + credentials_grant = ClientCredentialsGrant(request_validator) + bearer = BearerToken(request_validator, token_generator, + token_expires_in, refresh_token_generator) + TokenEndpoint.__init__(self, default_grant_type='client_credentials', + grant_types={ + 'client_credentials': credentials_grant}, + default_token_type=bearer) + ResourceEndpoint.__init__(self, default_token='Bearer', + token_types={'Bearer': bearer}) + RevocationEndpoint.__init__(self, request_validator, + supported_token_types=['access_token']) diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/resource.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/resource.py new file mode 100644 index 0000000..d03ed21 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/resource.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +from __future__ import absolute_import, unicode_literals + +import logging + +from oauthlib.common import Request + +from .base import BaseEndpoint, catch_errors_and_unavailability + +log = logging.getLogger(__name__) + + +class ResourceEndpoint(BaseEndpoint): + + """Authorizes access to protected resources. + + The client accesses protected resources by presenting the access + token to the resource server. The resource server MUST validate the + access token and ensure that it has not expired and that its scope + covers the requested resource. The methods used by the resource + server to validate the access token (as well as any error responses) + are beyond the scope of this specification but generally involve an + interaction or coordination between the resource server and the + authorization server:: + + # For most cases, returning a 403 should suffice. + + The method in which the client utilizes the access token to + authenticate with the resource server depends on the type of access + token issued by the authorization server. Typically, it involves + using the HTTP "Authorization" request header field [RFC2617] with an + authentication scheme defined by the specification of the access + token type used, such as [RFC6750]:: + + # Access tokens may also be provided in query and body + https://example.com/protected?access_token=kjfch2345sdf # Query + access_token=sdf23409df # Body + """ + + def __init__(self, default_token, token_types): + BaseEndpoint.__init__(self) + self._tokens = token_types + self._default_token = default_token + + @property + def default_token(self): + return self._default_token + + @property + def default_token_type_handler(self): + return self.tokens.get(self.default_token) + + @property + def tokens(self): + return self._tokens + + @catch_errors_and_unavailability + def verify_request(self, uri, http_method='GET', body=None, headers=None, + scopes=None): + """Validate client, code etc, return body + headers""" + request = Request(uri, http_method, body, headers) + request.token_type = self.find_token_type(request) + request.scopes = scopes + token_type_handler = self.tokens.get(request.token_type, + self.default_token_type_handler) + log.debug('Dispatching token_type %s request to %r.', + request.token_type, token_type_handler) + return token_type_handler.validate_request(request), request + + def find_token_type(self, request): + """Token type identification. + + RFC 6749 does not provide a method for easily differentiating between + different token types during protected resource access. We estimate + the most likely token type (if any) by asking each known token type + to give an estimation based on the request. + """ + estimates = sorted(((t.estimate_type(request), n) + for n, t in self.tokens.items())) + return estimates[0][1] if len(estimates) else None diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/revocation.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/revocation.py new file mode 100644 index 0000000..d5b5b78 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/revocation.py @@ -0,0 +1,134 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749.endpoint.revocation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +An implementation of the OAuth 2 `Token Revocation`_ spec (draft 11). + +.. _`Token Revocation`: https://tools.ietf.org/html/draft-ietf-oauth-revocation-11 +""" +from __future__ import absolute_import, unicode_literals + +import logging + +from oauthlib.common import Request + +from ..errors import (InvalidClientError, InvalidRequestError, OAuth2Error, + UnsupportedTokenTypeError) +from .base import BaseEndpoint, catch_errors_and_unavailability + +log = logging.getLogger(__name__) + + +class RevocationEndpoint(BaseEndpoint): + + """Token revocation endpoint. + + Endpoint used by authenticated clients to revoke access and refresh tokens. + Commonly this will be part of the Authorization Endpoint. + """ + + valid_token_types = ('access_token', 'refresh_token') + + def __init__(self, request_validator, supported_token_types=None, + enable_jsonp=False): + BaseEndpoint.__init__(self) + self.request_validator = request_validator + self.supported_token_types = ( + supported_token_types or self.valid_token_types) + self.enable_jsonp = enable_jsonp + + @catch_errors_and_unavailability + def create_revocation_response(self, uri, http_method='POST', body=None, + headers=None): + """Revoke supplied access or refresh token. + + + The authorization server responds with HTTP status code 200 if the + token has been revoked sucessfully or if the client submitted an + invalid token. + + Note: invalid tokens do not cause an error response since the client + cannot handle such an error in a reasonable way. Moreover, the purpose + of the revocation request, invalidating the particular token, is + already achieved. + + The content of the response body is ignored by the client as all + necessary information is conveyed in the response code. + + An invalid token type hint value is ignored by the authorization server + and does not influence the revocation response. + """ + request = Request( + uri, http_method=http_method, body=body, headers=headers) + try: + self.validate_revocation_request(request) + log.debug('Token revocation valid for %r.', request) + except OAuth2Error as e: + log.debug('Client error during validation of %r. %r.', request, e) + response_body = e.json + if self.enable_jsonp and request.callback: + response_body = '%s(%s);' % (request.callback, response_body) + return {}, response_body, e.status_code + + self.request_validator.revoke_token(request.token, + request.token_type_hint, request) + + response_body = '' + if self.enable_jsonp and request.callback: + response_body = request.callback + '();' + return {}, response_body, 200 + + def validate_revocation_request(self, request): + """Ensure the request is valid. + + The client constructs the request by including the following parameters + using the "application/x-www-form-urlencoded" format in the HTTP + request entity-body: + + token (REQUIRED). The token that the client wants to get revoked. + + token_type_hint (OPTIONAL). A hint about the type of the token + submitted for revocation. Clients MAY pass this parameter in order to + help the authorization server to optimize the token lookup. If the + server is unable to locate the token using the given hint, it MUST + extend its search accross all of its supported token types. An + authorization server MAY ignore this parameter, particularly if it is + able to detect the token type automatically. This specification + defines two such values: + + * access_token: An Access Token as defined in [RFC6749], + `section 1.4`_ + + * refresh_token: A Refresh Token as defined in [RFC6749], + `section 1.5`_ + + Specific implementations, profiles, and extensions of this + specification MAY define other values for this parameter using + the registry defined in `Section 4.1.2`_. + + The client also includes its authentication credentials as described in + `Section 2.3`_. of [`RFC6749`_]. + + .. _`section 1.4`: https://tools.ietf.org/html/rfc6749#section-1.4 + .. _`section 1.5`: https://tools.ietf.org/html/rfc6749#section-1.5 + .. _`section 2.3`: https://tools.ietf.org/html/rfc6749#section-2.3 + .. _`Section 4.1.2`: https://tools.ietf.org/html/draft-ietf-oauth-revocation-11#section-4.1.2 + .. _`RFC6749`: https://tools.ietf.org/html/rfc6749 + """ + if not request.token: + raise InvalidRequestError(request=request, + description='Missing token parameter.') + + if self.request_validator.client_authentication_required(request): + if not self.request_validator.authenticate_client(request): + log.debug('Client authentication failed, %r.', request) + raise InvalidClientError(request=request) + elif not self.request_validator.authenticate_client_id(request.client_id, request): + log.debug('Client authentication failed, %r.', request) + raise InvalidClientError(request=request) + + if (request.token_type_hint and + request.token_type_hint in self.valid_token_types and + request.token_type_hint not in self.supported_token_types): + raise UnsupportedTokenTypeError(request=request) diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/token.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/token.py new file mode 100644 index 0000000..90fb16f --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/endpoints/token.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 RFC6749. +""" +from __future__ import absolute_import, unicode_literals + +import logging + +from oauthlib.common import Request +from oauthlib.oauth2.rfc6749 import utils + +from .base import BaseEndpoint, catch_errors_and_unavailability + +log = logging.getLogger(__name__) + + +class TokenEndpoint(BaseEndpoint): + + """Token issuing endpoint. + + The token endpoint is used by the client to obtain an access token by + presenting its authorization grant or refresh token. The token + endpoint is used with every authorization grant except for the + implicit grant type (since an access token is issued directly). + + The means through which the client obtains the location of the token + endpoint are beyond the scope of this specification, but the location + is typically provided in the service documentation. + + The endpoint URI MAY include an "application/x-www-form-urlencoded" + formatted (per `Appendix B`_) query component, + which MUST be retained when adding additional query parameters. The + endpoint URI MUST NOT include a fragment component:: + + https://example.com/path?query=component # OK + https://example.com/path?query=component#fragment # Not OK + + Since requests to the authorization endpoint result in user + Since requests to the token endpoint result in the transmission of + clear-text credentials (in the HTTP request and response), the + authorization server MUST require the use of TLS as described in + Section 1.6 when sending requests to the token endpoint:: + + # We will deny any request which URI schema is not with https + + The client MUST use the HTTP "POST" method when making access token + requests:: + + # HTTP method is currently not enforced + + Parameters sent without a value MUST be treated as if they were + omitted from the request. The authorization server MUST ignore + unrecognized request parameters. Request and response parameters + MUST NOT be included more than once:: + + # Delegated to each grant type. + + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + """ + + def __init__(self, default_grant_type, default_token_type, grant_types): + BaseEndpoint.__init__(self) + self._grant_types = grant_types + self._default_token_type = default_token_type + self._default_grant_type = default_grant_type + + @property + def grant_types(self): + return self._grant_types + + @property + def default_grant_type(self): + return self._default_grant_type + + @property + def default_grant_type_handler(self): + return self.grant_types.get(self.default_grant_type) + + @property + def default_token_type(self): + return self._default_token_type + + @catch_errors_and_unavailability + def create_token_response(self, uri, http_method='GET', body=None, + headers=None, credentials=None, grant_type_for_scope=None, + claims=None): + """Extract grant_type and route to the designated handler.""" + request = Request( + uri, http_method=http_method, body=body, headers=headers) + + # 'scope' is an allowed Token Request param in both the "Resource Owner Password Credentials Grant" + # and "Client Credentials Grant" flows + # https://tools.ietf.org/html/rfc6749#section-4.3.2 + # https://tools.ietf.org/html/rfc6749#section-4.4.2 + request.scopes = utils.scope_to_list(request.scope) + + request.extra_credentials = credentials + if grant_type_for_scope: + request.grant_type = grant_type_for_scope + + # OpenID Connect claims, if provided. The server using oauthlib might choose + # to implement the claims parameter of the Authorization Request. In this case + # it should retrieve those claims and pass them via the claims argument here, + # as a dict. + if claims: + request.claims = claims + + grant_type_handler = self.grant_types.get(request.grant_type, + self.default_grant_type_handler) + log.debug('Dispatching grant_type %s request to %r.', + request.grant_type, grant_type_handler) + return grant_type_handler.create_token_response( + request, self.default_token_type) diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/errors.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/errors.py new file mode 100644 index 0000000..180f636 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/errors.py @@ -0,0 +1,415 @@ +# coding=utf-8 +""" +oauthlib.oauth2.rfc6749.errors +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Error used both by OAuth 2 clients and providers to represent the spec +defined error responses for all four core grant types. +""" +from __future__ import unicode_literals + +import json + +from oauthlib.common import add_params_to_uri, urlencode + + +class OAuth2Error(Exception): + error = None + status_code = 400 + description = '' + + def __init__(self, description=None, uri=None, state=None, + status_code=None, request=None): + """ + description: A human-readable ASCII [USASCII] text providing + additional information, used to assist the client + developer in understanding the error that occurred. + Values for the "error_description" parameter MUST NOT + include characters outside the set + x20-21 / x23-5B / x5D-7E. + + uri: A URI identifying a human-readable web page with information + about the error, used to provide the client developer with + additional information about the error. Values for the + "error_uri" parameter MUST conform to the URI- Reference + syntax, and thus MUST NOT include characters outside the set + x21 / x23-5B / x5D-7E. + + state: A CSRF protection value received from the client. + + request: Oauthlib Request object + """ + if description is not None: + self.description = description + + message = '(%s) %s' % (self.error, self.description) + if request: + message += ' ' + repr(request) + super(OAuth2Error, self).__init__(message) + + self.uri = uri + self.state = state + + if status_code: + self.status_code = status_code + + if request: + self.redirect_uri = request.redirect_uri + self.client_id = request.client_id + self.scopes = request.scopes + self.response_type = request.response_type + self.response_mode = request.response_mode + self.grant_type = request.grant_type + if not state: + self.state = request.state + else: + self.redirect_uri = None + self.client_id = None + self.scopes = None + self.response_type = None + self.response_mode = None + self.grant_type = None + + def in_uri(self, uri): + fragment = self.response_mode == "fragment" + return add_params_to_uri(uri, self.twotuples, fragment) + + @property + def twotuples(self): + error = [('error', self.error)] + if self.description: + error.append(('error_description', self.description)) + if self.uri: + error.append(('error_uri', self.uri)) + if self.state: + error.append(('state', self.state)) + return error + + @property + def urlencoded(self): + return urlencode(self.twotuples) + + @property + def json(self): + return json.dumps(dict(self.twotuples)) + + +class TokenExpiredError(OAuth2Error): + error = 'token_expired' + + +class InsecureTransportError(OAuth2Error): + error = 'insecure_transport' + description = 'OAuth 2 MUST utilize https.' + + +class MismatchingStateError(OAuth2Error): + error = 'mismatching_state' + description = 'CSRF Warning! State not equal in request and response.' + + +class MissingCodeError(OAuth2Error): + error = 'missing_code' + + +class MissingTokenError(OAuth2Error): + error = 'missing_token' + + +class MissingTokenTypeError(OAuth2Error): + error = 'missing_token_type' + + +class FatalClientError(OAuth2Error): + """ + Errors during authorization where user should not be redirected back. + + If the request fails due to a missing, invalid, or mismatching + redirection URI, or if the client identifier is missing or invalid, + the authorization server SHOULD inform the resource owner of the + error and MUST NOT automatically redirect the user-agent to the + invalid redirection URI. + + Instead the user should be informed of the error by the provider itself. + """ + pass + + +class InvalidRequestFatalError(FatalClientError): + """ + For fatal errors, the request is missing a required parameter, includes + an invalid parameter value, includes a parameter more than once, or is + otherwise malformed. + """ + error = 'invalid_request' + + +class InvalidRedirectURIError(InvalidRequestFatalError): + description = 'Invalid redirect URI.' + + +class MissingRedirectURIError(InvalidRequestFatalError): + description = 'Missing redirect URI.' + + +class MismatchingRedirectURIError(InvalidRequestFatalError): + description = 'Mismatching redirect URI.' + + +class InvalidClientIdError(InvalidRequestFatalError): + description = 'Invalid client_id parameter value.' + + +class MissingClientIdError(InvalidRequestFatalError): + description = 'Missing client_id parameter.' + + +class InvalidRequestError(OAuth2Error): + """ + The request is missing a required parameter, includes an invalid + parameter value, includes a parameter more than once, or is + otherwise malformed. + """ + error = 'invalid_request' + + +class MissingResponseTypeError(InvalidRequestError): + description = 'Missing response_type parameter.' + + +class AccessDeniedError(OAuth2Error): + """ + The resource owner or authorization server denied the request. + """ + error = 'access_denied' + status_code = 401 + + +class UnsupportedResponseTypeError(OAuth2Error): + """ + The authorization server does not support obtaining an authorization + code using this method. + """ + error = 'unsupported_response_type' + + +class InvalidScopeError(OAuth2Error): + """ + The requested scope is invalid, unknown, or malformed. + """ + error = 'invalid_scope' + status_code = 401 + + +class ServerError(OAuth2Error): + """ + The authorization server encountered an unexpected condition that + prevented it from fulfilling the request. (This error code is needed + because a 500 Internal Server Error HTTP status code cannot be returned + to the client via a HTTP redirect.) + """ + error = 'server_error' + + +class TemporarilyUnavailableError(OAuth2Error): + """ + The authorization server is currently unable to handle the request + due to a temporary overloading or maintenance of the server. + (This error code is needed because a 503 Service Unavailable HTTP + status code cannot be returned to the client via a HTTP redirect.) + """ + error = 'temporarily_unavailable' + + +class InvalidClientError(OAuth2Error): + """ + Client authentication failed (e.g. unknown client, no client + authentication included, or unsupported authentication method). + The authorization server MAY return an HTTP 401 (Unauthorized) status + code to indicate which HTTP authentication schemes are supported. + If the client attempted to authenticate via the "Authorization" request + header field, the authorization server MUST respond with an + HTTP 401 (Unauthorized) status code, and include the "WWW-Authenticate" + response header field matching the authentication scheme used by the + client. + """ + error = 'invalid_client' + status_code = 401 + + +class InvalidGrantError(OAuth2Error): + """ + The provided authorization grant (e.g. authorization code, resource + owner credentials) or refresh token is invalid, expired, revoked, does + not match the redirection URI used in the authorization request, or was + issued to another client. + """ + error = 'invalid_grant' + status_code = 401 + + +class UnauthorizedClientError(OAuth2Error): + """ + The authenticated client is not authorized to use this authorization + grant type. + """ + error = 'unauthorized_client' + status_code = 401 + + +class UnsupportedGrantTypeError(OAuth2Error): + """ + The authorization grant type is not supported by the authorization + server. + """ + error = 'unsupported_grant_type' + + +class UnsupportedTokenTypeError(OAuth2Error): + """ + The authorization server does not support the revocation of the + presented token type. I.e. the client tried to revoke an access token + on a server not supporting this feature. + """ + error = 'unsupported_token_type' + + +class FatalOpenIDClientError(FatalClientError): + pass + + +class OpenIDClientError(OAuth2Error): + pass + + +class InteractionRequired(OpenIDClientError): + """ + The Authorization Server requires End-User interaction to proceed. + + This error MAY be returned when the prompt parameter value in the + Authentication Request is none, but the Authentication Request cannot be + completed without displaying a user interface for End-User interaction. + """ + error = 'interaction_required' + status_code = 401 + + +class LoginRequired(OpenIDClientError): + """ + The Authorization Server requires End-User authentication. + + This error MAY be returned when the prompt parameter value in the + Authentication Request is none, but the Authentication Request cannot be + completed without displaying a user interface for End-User authentication. + """ + error = 'login_required' + status_code = 401 + + +class AccountSelectionRequired(OpenIDClientError): + """ + The End-User is REQUIRED to select a session at the Authorization Server. + + The End-User MAY be authenticated at the Authorization Server with + different associated accounts, but the End-User did not select a session. + This error MAY be returned when the prompt parameter value in the + Authentication Request is none, but the Authentication Request cannot be + completed without displaying a user interface to prompt for a session to + use. + """ + error = 'account_selection_required' + + +class ConsentRequired(OpenIDClientError): + """ + The Authorization Server requires End-User consent. + + This error MAY be returned when the prompt parameter value in the + Authentication Request is none, but the Authentication Request cannot be + completed without displaying a user interface for End-User consent. + """ + error = 'consent_required' + status_code = 401 + + +class InvalidRequestURI(OpenIDClientError): + """ + The request_uri in the Authorization Request returns an error or + contains invalid data. + """ + error = 'invalid_request_uri' + description = 'The request_uri in the Authorization Request returns an ' \ + 'error or contains invalid data.' + + +class InvalidRequestObject(OpenIDClientError): + """ + The request parameter contains an invalid Request Object. + """ + error = 'invalid_request_object' + description = 'The request parameter contains an invalid Request Object.' + + +class RequestNotSupported(OpenIDClientError): + """ + The OP does not support use of the request parameter. + """ + error = 'request_not_supported' + description = 'The request parameter is not supported.' + + +class RequestURINotSupported(OpenIDClientError): + """ + The OP does not support use of the request_uri parameter. + """ + error = 'request_uri_not_supported' + description = 'The request_uri parameter is not supported.' + + +class RegistrationNotSupported(OpenIDClientError): + """ + The OP does not support use of the registration parameter. + """ + error = 'registration_not_supported' + description = 'The registration parameter is not supported.' + + +class InvalidTokenError(OAuth2Error): + """ + The access token provided is expired, revoked, malformed, or + invalid for other reasons. The resource SHOULD respond with + the HTTP 401 (Unauthorized) status code. The client MAY + request a new access token and retry the protected resource + request. + """ + error = 'invalid_token' + status_code = 401 + description = ("The access token provided is expired, revoked, malformed, " + "or invalid for other reasons.") + + +class InsufficientScopeError(OAuth2Error): + """ + The request requires higher privileges than provided by the + access token. The resource server SHOULD respond with the HTTP + 403 (Forbidden) status code and MAY include the "scope" + attribute with the scope necessary to access the protected + resource. + """ + error = 'insufficient_scope' + status_code = 403 + description = ("The request requires higher privileges than provided by " + "the access token.") + + +def raise_from_error(error, params=None): + import inspect + import sys + kwargs = { + 'description': params.get('error_description'), + 'uri': params.get('error_uri'), + 'state': params.get('state') + } + for _, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass): + if cls.error == error: + raise cls(**kwargs) diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/authorization_code.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/authorization_code.py new file mode 100644 index 0000000..0660263 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/authorization_code.py @@ -0,0 +1,431 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +from __future__ import absolute_import, unicode_literals + +import json +import logging + +from oauthlib import common +from oauthlib.uri_validate import is_absolute_uri + +from .. import errors +from .base import GrantTypeBase + +log = logging.getLogger(__name__) + + +class AuthorizationCodeGrant(GrantTypeBase): + + """`Authorization Code Grant`_ + + The authorization code grant type is used to obtain both access + tokens and refresh tokens and is optimized for confidential clients. + Since this is a redirection-based flow, the client must be capable of + interacting with the resource owner's user-agent (typically a web + browser) and capable of receiving incoming requests (via redirection) + from the authorization server:: + + +----------+ + | Resource | + | Owner | + | | + +----------+ + ^ + | + (B) + +----|-----+ Client Identifier +---------------+ + | -+----(A)-- & Redirection URI ---->| | + | User- | | Authorization | + | Agent -+----(B)-- User authenticates --->| Server | + | | | | + | -+----(C)-- Authorization Code ---<| | + +-|----|---+ +---------------+ + | | ^ v + (A) (C) | | + | | | | + ^ v | | + +---------+ | | + | |>---(D)-- Authorization Code ---------' | + | Client | & Redirection URI | + | | | + | |<---(E)----- Access Token -------------------' + +---------+ (w/ Optional Refresh Token) + + Note: The lines illustrating steps (A), (B), and (C) are broken into + two parts as they pass through the user-agent. + + Figure 3: Authorization Code Flow + + The flow illustrated in Figure 3 includes the following steps: + + (A) The client initiates the flow by directing the resource owner's + user-agent to the authorization endpoint. The client includes + its client identifier, requested scope, local state, and a + redirection URI to which the authorization server will send the + user-agent back once access is granted (or denied). + + (B) The authorization server authenticates the resource owner (via + the user-agent) and establishes whether the resource owner + grants or denies the client's access request. + + (C) Assuming the resource owner grants access, the authorization + server redirects the user-agent back to the client using the + redirection URI provided earlier (in the request or during + client registration). The redirection URI includes an + authorization code and any local state provided by the client + earlier. + + (D) The client requests an access token from the authorization + server's token endpoint by including the authorization code + received in the previous step. When making the request, the + client authenticates with the authorization server. The client + includes the redirection URI used to obtain the authorization + code for verification. + + (E) The authorization server authenticates the client, validates the + authorization code, and ensures that the redirection URI + received matches the URI used to redirect the client in + step (C). If valid, the authorization server responds back with + an access token and, optionally, a refresh token. + + .. _`Authorization Code Grant`: https://tools.ietf.org/html/rfc6749#section-4.1 + """ + + default_response_mode = 'query' + response_types = ['code'] + + def create_authorization_code(self, request): + """Generates an authorization grant represented as a dictionary.""" + grant = {'code': common.generate_token()} + if hasattr(request, 'state') and request.state: + grant['state'] = request.state + log.debug('Created authorization code grant %r for request %r.', + grant, request) + return grant + + def create_authorization_response(self, request, token_handler): + """ + The client constructs the request URI by adding the following + parameters to the query component of the authorization endpoint URI + using the "application/x-www-form-urlencoded" format, per `Appendix B`_: + + response_type + REQUIRED. Value MUST be set to "code" for standard OAuth2 + authorization flow. For OpenID Connect it must be one of + "code token", "code id_token", or "code token id_token" - we + essentially test that "code" appears in the response_type. + client_id + REQUIRED. The client identifier as described in `Section 2.2`_. + redirect_uri + OPTIONAL. As described in `Section 3.1.2`_. + scope + OPTIONAL. The scope of the access request as described by + `Section 3.3`_. + state + RECOMMENDED. An opaque value used by the client to maintain + state between the request and callback. The authorization + server includes this value when redirecting the user-agent back + to the client. The parameter SHOULD be used for preventing + cross-site request forgery as described in `Section 10.12`_. + + The client directs the resource owner to the constructed URI using an + HTTP redirection response, or by other means available to it via the + user-agent. + + :param request: oauthlib.commong.Request + :param token_handler: A token handler instace, for example of type + oauthlib.oauth2.BearerToken. + :returns: headers, body, status + :raises: FatalClientError on invalid redirect URI or client id. + ValueError if scopes are not set on the request object. + + A few examples:: + + >>> from your_validator import your_validator + >>> request = Request('https://example.com/authorize?client_id=valid' + ... '&redirect_uri=http%3A%2F%2Fclient.com%2F') + >>> from oauthlib.common import Request + >>> from oauthlib.oauth2 import AuthorizationCodeGrant, BearerToken + >>> token = BearerToken(your_validator) + >>> grant = AuthorizationCodeGrant(your_validator) + >>> grant.create_authorization_response(request, token) + Traceback (most recent call last): + File "", line 1, in + File "oauthlib/oauth2/rfc6749/grant_types.py", line 513, in create_authorization_response + raise ValueError('Scopes must be set on post auth.') + ValueError: Scopes must be set on post auth. + >>> request.scopes = ['authorized', 'in', 'some', 'form'] + >>> grant.create_authorization_response(request, token) + (u'http://client.com/?error=invalid_request&error_description=Missing+response_type+parameter.', None, None, 400) + >>> request = Request('https://example.com/authorize?client_id=valid' + ... '&redirect_uri=http%3A%2F%2Fclient.com%2F' + ... '&response_type=code') + >>> request.scopes = ['authorized', 'in', 'some', 'form'] + >>> grant.create_authorization_response(request, token) + (u'http://client.com/?code=u3F05aEObJuP2k7DordviIgW5wl52N', None, None, 200) + >>> # If the client id or redirect uri fails validation + >>> grant.create_authorization_response(request, token) + Traceback (most recent call last): + File "", line 1, in + File "oauthlib/oauth2/rfc6749/grant_types.py", line 515, in create_authorization_response + >>> grant.create_authorization_response(request, token) + File "oauthlib/oauth2/rfc6749/grant_types.py", line 591, in validate_authorization_request + oauthlib.oauth2.rfc6749.errors.InvalidClientIdError + + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + .. _`Section 2.2`: https://tools.ietf.org/html/rfc6749#section-2.2 + .. _`Section 3.1.2`: https://tools.ietf.org/html/rfc6749#section-3.1.2 + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`Section 10.12`: https://tools.ietf.org/html/rfc6749#section-10.12 + """ + try: + # request.scopes is only mandated in post auth and both pre and + # post auth use validate_authorization_request + if not request.scopes: + raise ValueError('Scopes must be set on post auth.') + + self.validate_authorization_request(request) + log.debug('Pre resource owner authorization validation ok for %r.', + request) + + # If the request fails due to a missing, invalid, or mismatching + # redirection URI, or if the client identifier is missing or invalid, + # the authorization server SHOULD inform the resource owner of the + # error and MUST NOT automatically redirect the user-agent to the + # invalid redirection URI. + except errors.FatalClientError as e: + log.debug('Fatal client error during validation of %r. %r.', + request, e) + raise + + # If the resource owner denies the access request or if the request + # fails for reasons other than a missing or invalid redirection URI, + # the authorization server informs the client by adding the following + # parameters to the query component of the redirection URI using the + # "application/x-www-form-urlencoded" format, per Appendix B: + # https://tools.ietf.org/html/rfc6749#appendix-B + except errors.OAuth2Error as e: + log.debug('Client error during validation of %r. %r.', request, e) + request.redirect_uri = request.redirect_uri or self.error_uri + redirect_uri = common.add_params_to_uri( + request.redirect_uri, e.twotuples, + fragment=request.response_mode == "fragment") + return {'Location': redirect_uri}, None, 302 + + grant = self.create_authorization_code(request) + for modifier in self._code_modifiers: + grant = modifier(grant, token_handler, request) + log.debug('Saving grant %r for %r.', grant, request) + self.request_validator.save_authorization_code( + request.client_id, grant, request) + return self.prepare_authorization_response( + request, grant, {}, None, 302) + + def create_token_response(self, request, token_handler): + """Validate the authorization code. + + The client MUST NOT use the authorization code more than once. If an + authorization code is used more than once, the authorization server + MUST deny the request and SHOULD revoke (when possible) all tokens + previously issued based on that authorization code. The authorization + code is bound to the client identifier and redirection URI. + """ + headers = { + 'Content-Type': 'application/json', + 'Cache-Control': 'no-store', + 'Pragma': 'no-cache', + } + try: + self.validate_token_request(request) + log.debug('Token request validation ok for %r.', request) + except errors.OAuth2Error as e: + log.debug('Client error during validation of %r. %r.', request, e) + return headers, e.json, e.status_code + + token = token_handler.create_token(request, refresh_token=self.refresh_token, save_token=False) + for modifier in self._token_modifiers: + token = modifier(token, token_handler, request) + self.request_validator.save_token(token, request) + self.request_validator.invalidate_authorization_code( + request.client_id, request.code, request) + return headers, json.dumps(token), 200 + + def validate_authorization_request(self, request): + """Check the authorization request for normal and fatal errors. + + A normal error could be a missing response_type parameter or the client + attempting to access scope it is not allowed to ask authorization for. + Normal errors can safely be included in the redirection URI and + sent back to the client. + + Fatal errors occur when the client_id or redirect_uri is invalid or + missing. These must be caught by the provider and handled, how this + is done is outside of the scope of OAuthLib but showing an error + page describing the issue is a good idea. + """ + + # First check for fatal errors + + # If the request fails due to a missing, invalid, or mismatching + # redirection URI, or if the client identifier is missing or invalid, + # the authorization server SHOULD inform the resource owner of the + # error and MUST NOT automatically redirect the user-agent to the + # invalid redirection URI. + + # First check duplicate parameters + for param in ('client_id', 'response_type', 'redirect_uri', 'scope', 'state'): + try: + duplicate_params = request.duplicate_params + except ValueError: + raise errors.InvalidRequestFatalError(description='Unable to parse query string', request=request) + if param in duplicate_params: + raise errors.InvalidRequestFatalError(description='Duplicate %s parameter.' % param, request=request) + + # REQUIRED. The client identifier as described in Section 2.2. + # https://tools.ietf.org/html/rfc6749#section-2.2 + if not request.client_id: + raise errors.MissingClientIdError(request=request) + + if not self.request_validator.validate_client_id(request.client_id, request): + raise errors.InvalidClientIdError(request=request) + + # OPTIONAL. As described in Section 3.1.2. + # https://tools.ietf.org/html/rfc6749#section-3.1.2 + log.debug('Validating redirection uri %s for client %s.', + request.redirect_uri, request.client_id) + if request.redirect_uri is not None: + request.using_default_redirect_uri = False + log.debug('Using provided redirect_uri %s', request.redirect_uri) + if not is_absolute_uri(request.redirect_uri): + raise errors.InvalidRedirectURIError(request=request) + + if not self.request_validator.validate_redirect_uri( + request.client_id, request.redirect_uri, request): + raise errors.MismatchingRedirectURIError(request=request) + else: + request.redirect_uri = self.request_validator.get_default_redirect_uri( + request.client_id, request) + request.using_default_redirect_uri = True + log.debug('Using default redirect_uri %s.', request.redirect_uri) + if not request.redirect_uri: + raise errors.MissingRedirectURIError(request=request) + + # Then check for normal errors. + + # If the resource owner denies the access request or if the request + # fails for reasons other than a missing or invalid redirection URI, + # the authorization server informs the client by adding the following + # parameters to the query component of the redirection URI using the + # "application/x-www-form-urlencoded" format, per Appendix B. + # https://tools.ietf.org/html/rfc6749#appendix-B + + # Note that the correct parameters to be added are automatically + # populated through the use of specific exceptions. + + request_info = {} + for validator in self.custom_validators.pre_auth: + request_info.update(validator(request)) + + # REQUIRED. + if request.response_type is None: + raise errors.MissingResponseTypeError(request=request) + # Value MUST be set to "code" or one of the OpenID authorization code including + # response_types "code token", "code id_token", "code token id_token" + elif not 'code' in request.response_type and request.response_type != 'none': + raise errors.UnsupportedResponseTypeError(request=request) + + if not self.request_validator.validate_response_type(request.client_id, + request.response_type, + request.client, request): + + log.debug('Client %s is not authorized to use response_type %s.', + request.client_id, request.response_type) + raise errors.UnauthorizedClientError(request=request) + + # OPTIONAL. The scope of the access request as described by Section 3.3 + # https://tools.ietf.org/html/rfc6749#section-3.3 + self.validate_scopes(request) + + request_info.update({ + 'client_id': request.client_id, + 'redirect_uri': request.redirect_uri, + 'response_type': request.response_type, + 'state': request.state, + 'request': request + }) + + for validator in self.custom_validators.post_auth: + request_info.update(validator(request)) + + return request.scopes, request_info + + def validate_token_request(self, request): + # REQUIRED. Value MUST be set to "authorization_code". + if request.grant_type not in ('authorization_code', 'openid'): + raise errors.UnsupportedGrantTypeError(request=request) + + for validator in self.custom_validators.pre_token: + validator(request) + + if request.code is None: + raise errors.InvalidRequestError( + description='Missing code parameter.', request=request) + + for param in ('client_id', 'grant_type', 'redirect_uri'): + if param in request.duplicate_params: + raise errors.InvalidRequestError(description='Duplicate %s parameter.' % param, + request=request) + + if self.request_validator.client_authentication_required(request): + # If the client type is confidential or the client was issued client + # credentials (or assigned other authentication requirements), the + # client MUST authenticate with the authorization server as described + # in Section 3.2.1. + # https://tools.ietf.org/html/rfc6749#section-3.2.1 + if not self.request_validator.authenticate_client(request): + log.debug('Client authentication failed, %r.', request) + raise errors.InvalidClientError(request=request) + elif not self.request_validator.authenticate_client_id(request.client_id, request): + # REQUIRED, if the client is not authenticating with the + # authorization server as described in Section 3.2.1. + # https://tools.ietf.org/html/rfc6749#section-3.2.1 + log.debug('Client authentication failed, %r.', request) + raise errors.InvalidClientError(request=request) + + if not hasattr(request.client, 'client_id'): + raise NotImplementedError('Authenticate client must set the ' + 'request.client.client_id attribute ' + 'in authenticate_client.') + + request.client_id = request.client_id or request.client.client_id + + # Ensure client is authorized use of this grant type + self.validate_grant_type(request) + + # REQUIRED. The authorization code received from the + # authorization server. + if not self.request_validator.validate_code(request.client_id, + request.code, request.client, request): + log.debug('Client, %r (%r), is not allowed access to scopes %r.', + request.client_id, request.client, request.scopes) + raise errors.InvalidGrantError(request=request) + + for attr in ('user', 'scopes'): + if getattr(request, attr, None) is None: + log.debug('request.%s was not set on code validation.', attr) + + # REQUIRED, if the "redirect_uri" parameter was included in the + # authorization request as described in Section 4.1.1, and their + # values MUST be identical. + if not self.request_validator.confirm_redirect_uri(request.client_id, request.code, + request.redirect_uri, request.client, + request): + log.debug('Redirect_uri (%r) invalid for client %r (%r).', + request.redirect_uri, request.client_id, request.client) + raise errors.MismatchingRedirectURIError(request=request) + + for validator in self.custom_validators.post_token: + validator(request) diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/base.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/base.py new file mode 100644 index 0000000..e5d8ddd --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/base.py @@ -0,0 +1,185 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +from __future__ import absolute_import, unicode_literals + +import logging +from itertools import chain + +from oauthlib.common import add_params_to_uri +from oauthlib.oauth2.rfc6749 import errors, utils + +from ..request_validator import RequestValidator + +log = logging.getLogger(__name__) + +class ValidatorsContainer(object): + """ + Container object for holding custom validator callables to be invoked + as part of the grant type `validate_authorization_request()` or + `validate_authorization_request()` methods on the various grant types. + + Authorization validators must be callables that take a request object and + return a dict, which may contain items to be added to the `request_info` + returned from the grant_type after validation. + + Token validators must be callables that take a request object and + return None. + + Both authorization validators and token validators may raise OAuth2 + exceptions if validation conditions fail. + + Authorization validators added to `pre_auth` will be run BEFORE + the standard validations (but after the critical ones that raise + fatal errors) as part of `validate_authorization_request()` + + Authorization validators added to `post_auth` will be run AFTER + the standard validations as part of `validate_authorization_request()` + + Token validators added to `pre_token` will be run BEFORE + the standard validations as part of `validate_token_request()` + + Token validators added to `post_token` will be run AFTER + the standard validations as part of `validate_token_request()` + + For example: + + >>> def my_auth_validator(request): + ... return {'myval': True} + >>> auth_code_grant = AuthorizationCodeGrant(request_validator) + >>> auth_code_grant.custom_validators.pre_auth.append(my_auth_validator) + >>> def my_token_validator(request): + ... if not request.everything_okay: + ... raise errors.OAuth2Error("uh-oh") + >>> auth_code_grant.custom_validators.post_token.append(my_token_validator) + """ + + def __init__(self, post_auth, post_token, + pre_auth, pre_token): + self.pre_auth = pre_auth + self.post_auth = post_auth + self.pre_token = pre_token + self.post_token = post_token + + @property + def all_pre(self): + return chain(self.pre_auth, self.pre_token) + + @property + def all_post(self): + return chain(self.post_auth, self.post_token) + + +class GrantTypeBase(object): + error_uri = None + request_validator = None + default_response_mode = 'fragment' + refresh_token = True + response_types = ['code'] + + def __init__(self, request_validator=None, **kwargs): + self.request_validator = request_validator or RequestValidator() + + # Transforms class variables into instance variables: + self.response_types = self.response_types + self.refresh_token = self.refresh_token + self._setup_custom_validators(kwargs) + self._code_modifiers = [] + self._token_modifiers = [] + + for kw, val in kwargs.items(): + setattr(self, kw, val) + + def _setup_custom_validators(self, kwargs): + post_auth = kwargs.get('post_auth', []) + post_token = kwargs.get('post_token', []) + pre_auth = kwargs.get('pre_auth', []) + pre_token = kwargs.get('pre_token', []) + if not hasattr(self, 'validate_authorization_request'): + if post_auth or pre_auth: + msg = ("{} does not support authorization validators. Use " + "token validators instead.").format(self.__class__.__name__) + raise ValueError(msg) + # Using tuples here because they can't be appended to: + post_auth, pre_auth = (), () + self.custom_validators = ValidatorsContainer(post_auth, post_token, + pre_auth, pre_token) + + def register_response_type(self, response_type): + self.response_types.append(response_type) + + def register_code_modifier(self, modifier): + self._code_modifiers.append(modifier) + + def register_token_modifier(self, modifier): + self._token_modifiers.append(modifier) + + + def create_authorization_response(self, request, token_handler): + raise NotImplementedError('Subclasses must implement this method.') + + def create_token_response(self, request, token_handler): + raise NotImplementedError('Subclasses must implement this method.') + + def add_token(self, token, token_handler, request): + # Only add a hybrid access token on auth step if asked for + if not request.response_type in ["token", "code token", "id_token token", "code id_token token"]: + return token + + token.update(token_handler.create_token(request, refresh_token=False)) + return token + + def validate_grant_type(self, request): + client_id = getattr(request, 'client_id', None) + if not self.request_validator.validate_grant_type(client_id, + request.grant_type, request.client, request): + log.debug('Unauthorized from %r (%r) access to grant type %s.', + request.client_id, request.client, request.grant_type) + raise errors.UnauthorizedClientError(request=request) + + def validate_scopes(self, request): + if not request.scopes: + request.scopes = utils.scope_to_list(request.scope) or utils.scope_to_list( + self.request_validator.get_default_scopes(request.client_id, request)) + log.debug('Validating access to scopes %r for client %r (%r).', + request.scopes, request.client_id, request.client) + if not self.request_validator.validate_scopes(request.client_id, + request.scopes, request.client, request): + raise errors.InvalidScopeError(request=request) + + def prepare_authorization_response(self, request, token, headers, body, status): + """Place token according to response mode. + + Base classes can define a default response mode for their authorization + response by overriding the static `default_response_mode` member. + """ + request.response_mode = request.response_mode or self.default_response_mode + + if request.response_mode not in ('query', 'fragment'): + log.debug('Overriding invalid response mode %s with %s', + request.response_mode, self.default_response_mode) + request.response_mode = self.default_response_mode + + token_items = token.items() + + if request.response_type == 'none': + state = token.get('state', None) + if state: + token_items = [('state', state)] + else: + token_items = [] + + if request.response_mode == 'query': + headers['Location'] = add_params_to_uri( + request.redirect_uri, token_items, fragment=False) + return headers, body, status + + if request.response_mode == 'fragment': + headers['Location'] = add_params_to_uri( + request.redirect_uri, token_items, fragment=True) + return headers, body, status + + raise NotImplementedError( + 'Subclasses must set a valid default_response_mode') diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/client_credentials.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/client_credentials.py new file mode 100644 index 0000000..4c50a78 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/client_credentials.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +from __future__ import absolute_import, unicode_literals + +import json +import logging + +from .. import errors +from ..request_validator import RequestValidator +from .base import GrantTypeBase + +log = logging.getLogger(__name__) + + +class ClientCredentialsGrant(GrantTypeBase): + + """`Client Credentials Grant`_ + + The client can request an access token using only its client + credentials (or other supported means of authentication) when the + client is requesting access to the protected resources under its + control, or those of another resource owner that have been previously + arranged with the authorization server (the method of which is beyond + the scope of this specification). + + The client credentials grant type MUST only be used by confidential + clients:: + + +---------+ +---------------+ + : : : : + : :>-- A - Client Authentication --->: Authorization : + : Client : : Server : + : :<-- B ---- Access Token ---------<: : + : : : : + +---------+ +---------------+ + + Figure 6: Client Credentials Flow + + The flow illustrated in Figure 6 includes the following steps: + + (A) The client authenticates with the authorization server and + requests an access token from the token endpoint. + + (B) The authorization server authenticates the client, and if valid, + issues an access token. + + .. _`Client Credentials Grant`: https://tools.ietf.org/html/rfc6749#section-4.4 + """ + + def create_token_response(self, request, token_handler): + """Return token or error in JSON format. + + If the access token request is valid and authorized, the + authorization server issues an access token as described in + `Section 5.1`_. A refresh token SHOULD NOT be included. If the request + failed client authentication or is invalid, the authorization server + returns an error response as described in `Section 5.2`_. + + .. _`Section 5.1`: https://tools.ietf.org/html/rfc6749#section-5.1 + .. _`Section 5.2`: https://tools.ietf.org/html/rfc6749#section-5.2 + """ + headers = { + 'Content-Type': 'application/json', + 'Cache-Control': 'no-store', + 'Pragma': 'no-cache', + } + try: + log.debug('Validating access token request, %r.', request) + self.validate_token_request(request) + except errors.OAuth2Error as e: + log.debug('Client error in token request. %s.', e) + return headers, e.json, e.status_code + + token = token_handler.create_token(request, refresh_token=False, save_token=False) + + for modifier in self._token_modifiers: + token = modifier(token) + self.request_validator.save_token(token, request) + + log.debug('Issuing token to client id %r (%r), %r.', + request.client_id, request.client, token) + return headers, json.dumps(token), 200 + + def validate_token_request(self, request): + for validator in self.custom_validators.pre_token: + validator(request) + + if not getattr(request, 'grant_type', None): + raise errors.InvalidRequestError('Request is missing grant type.', + request=request) + + if not request.grant_type == 'client_credentials': + raise errors.UnsupportedGrantTypeError(request=request) + + for param in ('grant_type', 'scope'): + if param in request.duplicate_params: + raise errors.InvalidRequestError(description='Duplicate %s parameter.' % param, + request=request) + + log.debug('Authenticating client, %r.', request) + if not self.request_validator.authenticate_client(request): + log.debug('Client authentication failed, %r.', request) + raise errors.InvalidClientError(request=request) + else: + if not hasattr(request.client, 'client_id'): + raise NotImplementedError('Authenticate client must set the ' + 'request.client.client_id attribute ' + 'in authenticate_client.') + # Ensure client is authorized use of this grant type + self.validate_grant_type(request) + + log.debug('Authorizing access to user %r.', request.user) + request.client_id = request.client_id or request.client.client_id + self.validate_scopes(request) + + for validator in self.custom_validators.post_token: + validator(request) diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/implicit.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/implicit.py new file mode 100644 index 0000000..569282e --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/implicit.py @@ -0,0 +1,382 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +from __future__ import absolute_import, unicode_literals + +import logging + +from oauthlib import common +from oauthlib.uri_validate import is_absolute_uri + +from .. import errors +from ..request_validator import RequestValidator +from .base import GrantTypeBase + +log = logging.getLogger(__name__) + + +class ImplicitGrant(GrantTypeBase): + + """`Implicit Grant`_ + + The implicit grant type is used to obtain access tokens (it does not + support the issuance of refresh tokens) and is optimized for public + clients known to operate a particular redirection URI. These clients + are typically implemented in a browser using a scripting language + such as JavaScript. + + Unlike the authorization code grant type, in which the client makes + separate requests for authorization and for an access token, the + client receives the access token as the result of the authorization + request. + + The implicit grant type does not include client authentication, and + relies on the presence of the resource owner and the registration of + the redirection URI. Because the access token is encoded into the + redirection URI, it may be exposed to the resource owner and other + applications residing on the same device:: + + +----------+ + | Resource | + | Owner | + | | + +----------+ + ^ + | + (B) + +----|-----+ Client Identifier +---------------+ + | -+----(A)-- & Redirection URI --->| | + | User- | | Authorization | + | Agent -|----(B)-- User authenticates -->| Server | + | | | | + | |<---(C)--- Redirection URI ----<| | + | | with Access Token +---------------+ + | | in Fragment + | | +---------------+ + | |----(D)--- Redirection URI ---->| Web-Hosted | + | | without Fragment | Client | + | | | Resource | + | (F) |<---(E)------- Script ---------<| | + | | +---------------+ + +-|--------+ + | | + (A) (G) Access Token + | | + ^ v + +---------+ + | | + | Client | + | | + +---------+ + + Note: The lines illustrating steps (A) and (B) are broken into two + parts as they pass through the user-agent. + + Figure 4: Implicit Grant Flow + + The flow illustrated in Figure 4 includes the following steps: + + (A) The client initiates the flow by directing the resource owner's + user-agent to the authorization endpoint. The client includes + its client identifier, requested scope, local state, and a + redirection URI to which the authorization server will send the + user-agent back once access is granted (or denied). + + (B) The authorization server authenticates the resource owner (via + the user-agent) and establishes whether the resource owner + grants or denies the client's access request. + + (C) Assuming the resource owner grants access, the authorization + server redirects the user-agent back to the client using the + redirection URI provided earlier. The redirection URI includes + the access token in the URI fragment. + + (D) The user-agent follows the redirection instructions by making a + request to the web-hosted client resource (which does not + include the fragment per [RFC2616]). The user-agent retains the + fragment information locally. + + (E) The web-hosted client resource returns a web page (typically an + HTML document with an embedded script) capable of accessing the + full redirection URI including the fragment retained by the + user-agent, and extracting the access token (and other + parameters) contained in the fragment. + + (F) The user-agent executes the script provided by the web-hosted + client resource locally, which extracts the access token. + + (G) The user-agent passes the access token to the client. + + See `Section 10.3`_ and `Section 10.16`_ for important security considerations + when using the implicit grant. + + .. _`Implicit Grant`: https://tools.ietf.org/html/rfc6749#section-4.2 + .. _`Section 10.3`: https://tools.ietf.org/html/rfc6749#section-10.3 + .. _`Section 10.16`: https://tools.ietf.org/html/rfc6749#section-10.16 + """ + + response_types = ['token'] + grant_allows_refresh_token = False + + def create_authorization_response(self, request, token_handler): + """Create an authorization response. + The client constructs the request URI by adding the following + parameters to the query component of the authorization endpoint URI + using the "application/x-www-form-urlencoded" format, per `Appendix B`_: + + response_type + REQUIRED. Value MUST be set to "token" for standard OAuth2 implicit flow + or "id_token token" or just "id_token" for OIDC implicit flow + + client_id + REQUIRED. The client identifier as described in `Section 2.2`_. + + redirect_uri + OPTIONAL. As described in `Section 3.1.2`_. + + scope + OPTIONAL. The scope of the access request as described by + `Section 3.3`_. + + state + RECOMMENDED. An opaque value used by the client to maintain + state between the request and callback. The authorization + server includes this value when redirecting the user-agent back + to the client. The parameter SHOULD be used for preventing + cross-site request forgery as described in `Section 10.12`_. + + The authorization server validates the request to ensure that all + required parameters are present and valid. The authorization server + MUST verify that the redirection URI to which it will redirect the + access token matches a redirection URI registered by the client as + described in `Section 3.1.2`_. + + .. _`Section 2.2`: https://tools.ietf.org/html/rfc6749#section-2.2 + .. _`Section 3.1.2`: https://tools.ietf.org/html/rfc6749#section-3.1.2 + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`Section 10.12`: https://tools.ietf.org/html/rfc6749#section-10.12 + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + """ + return self.create_token_response(request, token_handler) + + def create_token_response(self, request, token_handler): + """Return token or error embedded in the URI fragment. + + If the resource owner grants the access request, the authorization + server issues an access token and delivers it to the client by adding + the following parameters to the fragment component of the redirection + URI using the "application/x-www-form-urlencoded" format, per + `Appendix B`_: + + access_token + REQUIRED. The access token issued by the authorization server. + + token_type + REQUIRED. The type of the token issued as described in + `Section 7.1`_. Value is case insensitive. + + expires_in + RECOMMENDED. The lifetime in seconds of the access token. For + example, the value "3600" denotes that the access token will + expire in one hour from the time the response was generated. + If omitted, the authorization server SHOULD provide the + expiration time via other means or document the default value. + + scope + OPTIONAL, if identical to the scope requested by the client; + otherwise, REQUIRED. The scope of the access token as + described by `Section 3.3`_. + + state + REQUIRED if the "state" parameter was present in the client + authorization request. The exact value received from the + client. + + The authorization server MUST NOT issue a refresh token. + + .. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`Section 7.1`: https://tools.ietf.org/html/rfc6749#section-7.1 + """ + try: + # request.scopes is only mandated in post auth and both pre and + # post auth use validate_authorization_request + if not request.scopes: + raise ValueError('Scopes must be set on post auth.') + + self.validate_token_request(request) + + # If the request fails due to a missing, invalid, or mismatching + # redirection URI, or if the client identifier is missing or invalid, + # the authorization server SHOULD inform the resource owner of the + # error and MUST NOT automatically redirect the user-agent to the + # invalid redirection URI. + except errors.FatalClientError as e: + log.debug('Fatal client error during validation of %r. %r.', + request, e) + raise + + # If the resource owner denies the access request or if the request + # fails for reasons other than a missing or invalid redirection URI, + # the authorization server informs the client by adding the following + # parameters to the fragment component of the redirection URI using the + # "application/x-www-form-urlencoded" format, per Appendix B: + # https://tools.ietf.org/html/rfc6749#appendix-B + except errors.OAuth2Error as e: + log.debug('Client error during validation of %r. %r.', request, e) + return {'Location': common.add_params_to_uri(request.redirect_uri, e.twotuples, + fragment=True)}, None, 302 + + # In OIDC implicit flow it is possible to have a request_type that does not include the access token! + # "id_token token" - return the access token and the id token + # "id_token" - don't return the access token + if "token" in request.response_type.split(): + token = token_handler.create_token(request, refresh_token=False, save_token=False) + else: + token = {} + + for modifier in self._token_modifiers: + token = modifier(token, token_handler, request) + self.request_validator.save_token(token, request) + return self.prepare_authorization_response( + request, token, {}, None, 302) + + def validate_authorization_request(self, request): + return self.validate_token_request(request) + + def validate_token_request(self, request): + """Check the token request for normal and fatal errors. + + This method is very similar to validate_authorization_request in + the AuthorizationCodeGrant but differ in a few subtle areas. + + A normal error could be a missing response_type parameter or the client + attempting to access scope it is not allowed to ask authorization for. + Normal errors can safely be included in the redirection URI and + sent back to the client. + + Fatal errors occur when the client_id or redirect_uri is invalid or + missing. These must be caught by the provider and handled, how this + is done is outside of the scope of OAuthLib but showing an error + page describing the issue is a good idea. + """ + + # First check for fatal errors + + # If the request fails due to a missing, invalid, or mismatching + # redirection URI, or if the client identifier is missing or invalid, + # the authorization server SHOULD inform the resource owner of the + # error and MUST NOT automatically redirect the user-agent to the + # invalid redirection URI. + + # First check duplicate parameters + for param in ('client_id', 'response_type', 'redirect_uri', 'scope', 'state'): + try: + duplicate_params = request.duplicate_params + except ValueError: + raise errors.InvalidRequestFatalError(description='Unable to parse query string', request=request) + if param in duplicate_params: + raise errors.InvalidRequestFatalError(description='Duplicate %s parameter.' % param, request=request) + + # REQUIRED. The client identifier as described in Section 2.2. + # https://tools.ietf.org/html/rfc6749#section-2.2 + if not request.client_id: + raise errors.MissingClientIdError(request=request) + + if not self.request_validator.validate_client_id(request.client_id, request): + raise errors.InvalidClientIdError(request=request) + + # OPTIONAL. As described in Section 3.1.2. + # https://tools.ietf.org/html/rfc6749#section-3.1.2 + if request.redirect_uri is not None: + request.using_default_redirect_uri = False + log.debug('Using provided redirect_uri %s', request.redirect_uri) + if not is_absolute_uri(request.redirect_uri): + raise errors.InvalidRedirectURIError(request=request) + + # The authorization server MUST verify that the redirection URI + # to which it will redirect the access token matches a + # redirection URI registered by the client as described in + # Section 3.1.2. + # https://tools.ietf.org/html/rfc6749#section-3.1.2 + if not self.request_validator.validate_redirect_uri( + request.client_id, request.redirect_uri, request): + raise errors.MismatchingRedirectURIError(request=request) + else: + request.redirect_uri = self.request_validator.get_default_redirect_uri( + request.client_id, request) + request.using_default_redirect_uri = True + log.debug('Using default redirect_uri %s.', request.redirect_uri) + if not request.redirect_uri: + raise errors.MissingRedirectURIError(request=request) + if not is_absolute_uri(request.redirect_uri): + raise errors.InvalidRedirectURIError(request=request) + + # Then check for normal errors. + + request_info = self._run_custom_validators(request, + self.custom_validators.all_pre) + + + # If the resource owner denies the access request or if the request + # fails for reasons other than a missing or invalid redirection URI, + # the authorization server informs the client by adding the following + # parameters to the fragment component of the redirection URI using the + # "application/x-www-form-urlencoded" format, per Appendix B. + # https://tools.ietf.org/html/rfc6749#appendix-B + + # Note that the correct parameters to be added are automatically + # populated through the use of specific exceptions + + # REQUIRED. + if request.response_type is None: + raise errors.MissingResponseTypeError(request=request) + # Value MUST be one of our registered types: "token" by default or if using OIDC "id_token" or "id_token token" + elif not set(request.response_type.split()).issubset(self.response_types): + raise errors.UnsupportedResponseTypeError(request=request) + + log.debug('Validating use of response_type token for client %r (%r).', + request.client_id, request.client) + if not self.request_validator.validate_response_type(request.client_id, + request.response_type, + request.client, request): + + log.debug('Client %s is not authorized to use response_type %s.', + request.client_id, request.response_type) + raise errors.UnauthorizedClientError(request=request) + + # OPTIONAL. The scope of the access request as described by Section 3.3 + # https://tools.ietf.org/html/rfc6749#section-3.3 + self.validate_scopes(request) + + request_info.update({ + 'client_id': request.client_id, + 'redirect_uri': request.redirect_uri, + 'response_type': request.response_type, + 'state': request.state, + 'request': request, + }) + + request_info = self._run_custom_validators(request, + self.custom_validators.all_post, + request_info) + + return request.scopes, request_info + + + def _run_custom_validators(self, + request, + validations, + request_info=None): + # Make a copy so we don't modify the existing request_info dict + request_info = {} if request_info is None else request_info.copy() + # For implicit grant, auth_validators and token_validators are + # basically equivalent since the token is returned from the + # authorization endpoint. + for validator in validations: + result = validator(request) + if result is not None: + request_info.update(result) + return request_info diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/openid_connect.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/openid_connect.py new file mode 100644 index 0000000..4c98864 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/openid_connect.py @@ -0,0 +1,390 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749.grant_types.openid_connect +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +from __future__ import absolute_import, unicode_literals + +import datetime +import logging +from json import loads + +from ..errors import ConsentRequired, InvalidRequestError, LoginRequired +from ..request_validator import RequestValidator +from .authorization_code import AuthorizationCodeGrant +from .base import GrantTypeBase +from .implicit import ImplicitGrant + +log = logging.getLogger(__name__) + +class OIDCNoPrompt(Exception): + """Exception used to inform users that no explicit authorization is needed. + + Normally users authorize requests after validation of the request is done. + Then post-authorization validation is again made and a response containing + an auth code or token is created. However, when OIDC clients request + no prompting of user authorization the final response is created directly. + + Example (without the shortcut for no prompt) + + scopes, req_info = endpoint.validate_authorization_request(url, ...) + authorization_view = create_fancy_auth_form(scopes, req_info) + return authorization_view + + Example (with the no prompt shortcut) + try: + scopes, req_info = endpoint.validate_authorization_request(url, ...) + authorization_view = create_fancy_auth_form(scopes, req_info) + return authorization_view + except OIDCNoPrompt: + # Note: Location will be set for you + headers, body, status = endpoint.create_authorization_response(url, ...) + redirect_view = create_redirect(headers, body, status) + return redirect_view + """ + + def __init__(self): + msg = ("OIDC request for no user interaction received. Do not ask user " + "for authorization, it should been done using silent " + "authentication through create_authorization_response. " + "See OIDCNoPrompt.__doc__ for more details.") + super(OIDCNoPrompt, self).__init__(msg) + + +class AuthCodeGrantDispatcher(object): + """ + This is an adapter class that will route simple Authorization Code requests, those that have response_type=code and a scope + including 'openid' to either the default_auth_grant or the oidc_auth_grant based on the scopes requested. + """ + def __init__(self, default_auth_grant=None, oidc_auth_grant=None): + self.default_auth_grant = default_auth_grant + self.oidc_auth_grant = oidc_auth_grant + + def _handler_for_request(self, request): + handler = self.default_auth_grant + + if request.scopes and "openid" in request.scopes: + handler = self.oidc_auth_grant + + log.debug('Selecting handler for request %r.', handler) + return handler + + def create_authorization_response(self, request, token_handler): + return self._handler_for_request(request).create_authorization_response(request, token_handler) + + def validate_authorization_request(self, request): + return self._handler_for_request(request).validate_authorization_request(request) + + +class OpenIDConnectBase(object): + + # Just proxy the majority of method calls through to the + # proxy_target grant type handler, which will usually be either + # the standard OAuth2 AuthCode or Implicit grant types. + def __getattr__(self, attr): + return getattr(self.proxy_target, attr) + + def __setattr__(self, attr, value): + proxied_attrs = set(('refresh_token', 'response_types')) + if attr in proxied_attrs: + setattr(self.proxy_target, attr, value) + else: + super(OpenIDConnectBase, self).__setattr__(attr, value) + + def validate_authorization_request(self, request): + """Validates the OpenID Connect authorization request parameters. + + :returns: (list of scopes, dict of request info) + """ + # If request.prompt is 'none' then no login/authorization form should + # be presented to the user. Instead, a silent login/authorization + # should be performed. + if request.prompt == 'none': + raise OIDCNoPrompt() + else: + return self.proxy_target.validate_authorization_request(request) + + def _inflate_claims(self, request): + # this may be called multiple times in a single request so make sure we only de-serialize the claims once + if request.claims and not isinstance(request.claims, dict): + # specific claims are requested during the Authorization Request and may be requested for inclusion + # in either the id_token or the UserInfo endpoint response + # see http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter + try: + request.claims = loads(request.claims) + except Exception as ex: + raise InvalidRequestError(description="Malformed claims parameter", + uri="http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter") + + def add_id_token(self, token, token_handler, request): + # Treat it as normal OAuth 2 auth code request if openid is not present + if not request.scopes or 'openid' not in request.scopes: + return token + + # Only add an id token on auth/token step if asked for. + if request.response_type and 'id_token' not in request.response_type: + return token + + if 'state' not in token: + token['state'] = request.state + + if request.max_age: + d = datetime.datetime.utcnow() + token['auth_time'] = d.isoformat("T") + "Z" + + # TODO: acr claims (probably better handled by server code using oauthlib in get_id_token) + + token['id_token'] = self.request_validator.get_id_token(token, token_handler, request) + + return token + + def openid_authorization_validator(self, request): + """Perform OpenID Connect specific authorization request validation. + + nonce + OPTIONAL. String value used to associate a Client session with + an ID Token, and to mitigate replay attacks. The value is + passed through unmodified from the Authentication Request to + the ID Token. Sufficient entropy MUST be present in the nonce + values used to prevent attackers from guessing values + + display + OPTIONAL. ASCII string value that specifies how the + Authorization Server displays the authentication and consent + user interface pages to the End-User. The defined values are: + + page - The Authorization Server SHOULD display the + authentication and consent UI consistent with a full User + Agent page view. If the display parameter is not specified, + this is the default display mode. + + popup - The Authorization Server SHOULD display the + authentication and consent UI consistent with a popup User + Agent window. The popup User Agent window should be of an + appropriate size for a login-focused dialog and should not + obscure the entire window that it is popping up over. + + touch - The Authorization Server SHOULD display the + authentication and consent UI consistent with a device that + leverages a touch interface. + + wap - The Authorization Server SHOULD display the + authentication and consent UI consistent with a "feature + phone" type display. + + The Authorization Server MAY also attempt to detect the + capabilities of the User Agent and present an appropriate + display. + + prompt + OPTIONAL. Space delimited, case sensitive list of ASCII string + values that specifies whether the Authorization Server prompts + the End-User for reauthentication and consent. The defined + values are: + + none - The Authorization Server MUST NOT display any + authentication or consent user interface pages. An error is + returned if an End-User is not already authenticated or the + Client does not have pre-configured consent for the + requested Claims or does not fulfill other conditions for + processing the request. The error code will typically be + login_required, interaction_required, or another code + defined in Section 3.1.2.6. This can be used as a method to + check for existing authentication and/or consent. + + login - The Authorization Server SHOULD prompt the End-User + for reauthentication. If it cannot reauthenticate the + End-User, it MUST return an error, typically + login_required. + + consent - The Authorization Server SHOULD prompt the + End-User for consent before returning information to the + Client. If it cannot obtain consent, it MUST return an + error, typically consent_required. + + select_account - The Authorization Server SHOULD prompt the + End-User to select a user account. This enables an End-User + who has multiple accounts at the Authorization Server to + select amongst the multiple accounts that they might have + current sessions for. If it cannot obtain an account + selection choice made by the End-User, it MUST return an + error, typically account_selection_required. + + The prompt parameter can be used by the Client to make sure + that the End-User is still present for the current session or + to bring attention to the request. If this parameter contains + none with any other value, an error is returned. + + max_age + OPTIONAL. Maximum Authentication Age. Specifies the allowable + elapsed time in seconds since the last time the End-User was + actively authenticated by the OP. If the elapsed time is + greater than this value, the OP MUST attempt to actively + re-authenticate the End-User. (The max_age request parameter + corresponds to the OpenID 2.0 PAPE [OpenID.PAPE] max_auth_age + request parameter.) When max_age is used, the ID Token returned + MUST include an auth_time Claim Value. + + ui_locales + OPTIONAL. End-User's preferred languages and scripts for the + user interface, represented as a space-separated list of BCP47 + [RFC5646] language tag values, ordered by preference. For + instance, the value "fr-CA fr en" represents a preference for + French as spoken in Canada, then French (without a region + designation), followed by English (without a region + designation). An error SHOULD NOT result if some or all of the + requested locales are not supported by the OpenID Provider. + + id_token_hint + OPTIONAL. ID Token previously issued by the Authorization + Server being passed as a hint about the End-User's current or + past authenticated session with the Client. If the End-User + identified by the ID Token is logged in or is logged in by the + request, then the Authorization Server returns a positive + response; otherwise, it SHOULD return an error, such as + login_required. When possible, an id_token_hint SHOULD be + present when prompt=none is used and an invalid_request error + MAY be returned if it is not; however, the server SHOULD + respond successfully when possible, even if it is not present. + The Authorization Server need not be listed as an audience of + the ID Token when it is used as an id_token_hint value. If the + ID Token received by the RP from the OP is encrypted, to use it + as an id_token_hint, the Client MUST decrypt the signed ID + Token contained within the encrypted ID Token. The Client MAY + re-encrypt the signed ID token to the Authentication Server + using a key that enables the server to decrypt the ID Token, + and use the re-encrypted ID token as the id_token_hint value. + + login_hint + OPTIONAL. Hint to the Authorization Server about the login + identifier the End-User might use to log in (if necessary). + This hint can be used by an RP if it first asks the End-User + for their e-mail address (or other identifier) and then wants + to pass that value as a hint to the discovered authorization + service. It is RECOMMENDED that the hint value match the value + used for discovery. This value MAY also be a phone number in + the format specified for the phone_number Claim. The use of + this parameter is left to the OP's discretion. + + acr_values + OPTIONAL. Requested Authentication Context Class Reference + values. Space-separated string that specifies the acr values + that the Authorization Server is being requested to use for + processing this Authentication Request, with the values + appearing in order of preference. The Authentication Context + Class satisfied by the authentication performed is returned as + the acr Claim Value, as specified in Section 2. The acr Claim + is requested as a Voluntary Claim by this parameter. + """ + + # Treat it as normal OAuth 2 auth code request if openid is not present + if not request.scopes or 'openid' not in request.scopes: + return {} + + prompt = request.prompt if request.prompt else [] + if hasattr(prompt, 'split'): + prompt = prompt.strip().split() + prompt = set(prompt) + + if 'none' in prompt: + + if len(prompt) > 1: + msg = "Prompt none is mutually exclusive with other values." + raise InvalidRequestError(request=request, description=msg) + + # prompt other than 'none' should be handled by the server code that + # uses oauthlib + if not request.id_token_hint: + msg = "Prompt is set to none yet id_token_hint is missing." + raise InvalidRequestError(request=request, description=msg) + + if not self.request_validator.validate_silent_login(request): + raise LoginRequired(request=request) + + if not self.request_validator.validate_silent_authorization(request): + raise ConsentRequired(request=request) + + self._inflate_claims(request) + + if not self.request_validator.validate_user_match( + request.id_token_hint, request.scopes, request.claims, request): + msg = "Session user does not match client supplied user." + raise LoginRequired(request=request, description=msg) + + request_info = { + 'display': request.display, + 'nonce': request.nonce, + 'prompt': prompt, + 'ui_locales': request.ui_locales.split() if request.ui_locales else [], + 'id_token_hint': request.id_token_hint, + 'login_hint': request.login_hint, + 'claims': request.claims + } + + return request_info + + def openid_implicit_authorization_validator(self, request): + """Additional validation when following the implicit flow. + """ + # Undefined in OpenID Connect, fall back to OAuth2 definition. + if request.response_type == 'token': + return {} + + # Treat it as normal OAuth 2 auth code request if openid is not present + if not request.scopes or 'openid' not in request.scopes: + return {} + + # REQUIRED. String value used to associate a Client session with an ID + # Token, and to mitigate replay attacks. The value is passed through + # unmodified from the Authentication Request to the ID Token. + # Sufficient entropy MUST be present in the nonce values used to + # prevent attackers from guessing values. For implementation notes, see + # Section 15.5.2. + if not request.nonce: + desc = 'Request is missing mandatory nonce parameter.' + raise InvalidRequestError(request=request, description=desc) + + return {} + + +class OpenIDConnectAuthCode(OpenIDConnectBase): + + def __init__(self, request_validator=None, **kwargs): + self.proxy_target = AuthorizationCodeGrant( + request_validator=request_validator, **kwargs) + self.custom_validators.post_auth.append( + self.openid_authorization_validator) + self.register_token_modifier(self.add_id_token) + +class OpenIDConnectImplicit(OpenIDConnectBase): + + def __init__(self, request_validator=None, **kwargs): + self.proxy_target = ImplicitGrant( + request_validator=request_validator, **kwargs) + self.register_response_type('id_token') + self.register_response_type('id_token token') + self.custom_validators.post_auth.append( + self.openid_authorization_validator) + self.custom_validators.post_auth.append( + self.openid_implicit_authorization_validator) + self.register_token_modifier(self.add_id_token) + +class OpenIDConnectHybrid(OpenIDConnectBase): + + def __init__(self, request_validator=None, **kwargs): + self.request_validator = request_validator or RequestValidator() + + self.proxy_target = AuthorizationCodeGrant( + request_validator=request_validator, **kwargs) + # All hybrid response types should be fragment-encoded. + self.proxy_target.default_response_mode = "fragment" + self.register_response_type('code id_token') + self.register_response_type('code token') + self.register_response_type('code id_token token') + self.custom_validators.post_auth.append( + self.openid_authorization_validator) + # Hybrid flows can return the id_token from the authorization + # endpoint as part of the 'code' response + self.register_code_modifier(self.add_token) + self.register_code_modifier(self.add_id_token) + self.register_token_modifier(self.add_id_token) diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/refresh_token.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/refresh_token.py new file mode 100644 index 0000000..c2d86f7 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/refresh_token.py @@ -0,0 +1,131 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +from __future__ import absolute_import, unicode_literals + +import json +import logging + +from .. import errors, utils +from ..request_validator import RequestValidator +from .base import GrantTypeBase + +log = logging.getLogger(__name__) + + +class RefreshTokenGrant(GrantTypeBase): + + """`Refresh token grant`_ + + .. _`Refresh token grant`: https://tools.ietf.org/html/rfc6749#section-6 + """ + + def __init__(self, request_validator=None, + issue_new_refresh_tokens=True, + **kwargs): + super(RefreshTokenGrant, self).__init__( + request_validator, + issue_new_refresh_tokens=issue_new_refresh_tokens, + **kwargs) + + def create_token_response(self, request, token_handler): + """Create a new access token from a refresh_token. + + If valid and authorized, the authorization server issues an access + token as described in `Section 5.1`_. If the request failed + verification or is invalid, the authorization server returns an error + response as described in `Section 5.2`_. + + The authorization server MAY issue a new refresh token, in which case + the client MUST discard the old refresh token and replace it with the + new refresh token. The authorization server MAY revoke the old + refresh token after issuing a new refresh token to the client. If a + new refresh token is issued, the refresh token scope MUST be + identical to that of the refresh token included by the client in the + request. + + .. _`Section 5.1`: https://tools.ietf.org/html/rfc6749#section-5.1 + .. _`Section 5.2`: https://tools.ietf.org/html/rfc6749#section-5.2 + """ + headers = { + 'Content-Type': 'application/json', + 'Cache-Control': 'no-store', + 'Pragma': 'no-cache', + } + try: + log.debug('Validating refresh token request, %r.', request) + self.validate_token_request(request) + except errors.OAuth2Error as e: + return headers, e.json, e.status_code + + token = token_handler.create_token(request, + refresh_token=self.issue_new_refresh_tokens, save_token=False) + + for modifier in self._token_modifiers: + token = modifier(token) + self.request_validator.save_token(token, request) + + log.debug('Issuing new token to client id %r (%r), %r.', + request.client_id, request.client, token) + return headers, json.dumps(token), 200 + + def validate_token_request(self, request): + # REQUIRED. Value MUST be set to "refresh_token". + if request.grant_type != 'refresh_token': + raise errors.UnsupportedGrantTypeError(request=request) + + for validator in self.custom_validators.pre_token: + validator(request) + + if request.refresh_token is None: + raise errors.InvalidRequestError( + description='Missing refresh token parameter.', + request=request) + + # Because refresh tokens are typically long-lasting credentials used to + # request additional access tokens, the refresh token is bound to the + # client to which it was issued. If the client type is confidential or + # the client was issued client credentials (or assigned other + # authentication requirements), the client MUST authenticate with the + # authorization server as described in Section 3.2.1. + # https://tools.ietf.org/html/rfc6749#section-3.2.1 + if self.request_validator.client_authentication_required(request): + log.debug('Authenticating client, %r.', request) + if not self.request_validator.authenticate_client(request): + log.debug('Invalid client (%r), denying access.', request) + raise errors.InvalidClientError(request=request) + elif not self.request_validator.authenticate_client_id(request.client_id, request): + log.debug('Client authentication failed, %r.', request) + raise errors.InvalidClientError(request=request) + + # Ensure client is authorized use of this grant type + self.validate_grant_type(request) + + # REQUIRED. The refresh token issued to the client. + log.debug('Validating refresh token %s for client %r.', + request.refresh_token, request.client) + if not self.request_validator.validate_refresh_token( + request.refresh_token, request.client, request): + log.debug('Invalid refresh token, %s, for client %r.', + request.refresh_token, request.client) + raise errors.InvalidGrantError(request=request) + + original_scopes = utils.scope_to_list( + self.request_validator.get_original_scopes( + request.refresh_token, request)) + + if request.scope: + request.scopes = utils.scope_to_list(request.scope) + if (not all((s in original_scopes for s in request.scopes)) + and not self.request_validator.is_within_original_scope( + request.scopes, request.refresh_token, request)): + log.debug('Refresh token %s lack requested scopes, %r.', + request.refresh_token, request.scopes) + raise errors.InvalidScopeError(request=request) + else: + request.scopes = original_scopes + + for validator in self.custom_validators.post_token: + validator(request) diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.py new file mode 100644 index 0000000..e5f04af --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.py @@ -0,0 +1,197 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +from __future__ import absolute_import, unicode_literals + +import json +import logging + +from .. import errors +from ..request_validator import RequestValidator +from .base import GrantTypeBase + +log = logging.getLogger(__name__) + + +class ResourceOwnerPasswordCredentialsGrant(GrantTypeBase): + + """`Resource Owner Password Credentials Grant`_ + + The resource owner password credentials grant type is suitable in + cases where the resource owner has a trust relationship with the + client, such as the device operating system or a highly privileged + application. The authorization server should take special care when + enabling this grant type and only allow it when other flows are not + viable. + + This grant type is suitable for clients capable of obtaining the + resource owner's credentials (username and password, typically using + an interactive form). It is also used to migrate existing clients + using direct authentication schemes such as HTTP Basic or Digest + authentication to OAuth by converting the stored credentials to an + access token:: + + +----------+ + | Resource | + | Owner | + | | + +----------+ + v + | Resource Owner + (A) Password Credentials + | + v + +---------+ +---------------+ + | |>--(B)---- Resource Owner ------->| | + | | Password Credentials | Authorization | + | Client | | Server | + | |<--(C)---- Access Token ---------<| | + | | (w/ Optional Refresh Token) | | + +---------+ +---------------+ + + Figure 5: Resource Owner Password Credentials Flow + + The flow illustrated in Figure 5 includes the following steps: + + (A) The resource owner provides the client with its username and + password. + + (B) The client requests an access token from the authorization + server's token endpoint by including the credentials received + from the resource owner. When making the request, the client + authenticates with the authorization server. + + (C) The authorization server authenticates the client and validates + the resource owner credentials, and if valid, issues an access + token. + + .. _`Resource Owner Password Credentials Grant`: https://tools.ietf.org/html/rfc6749#section-4.3 + """ + + def create_token_response(self, request, token_handler): + """Return token or error in json format. + + If the access token request is valid and authorized, the + authorization server issues an access token and optional refresh + token as described in `Section 5.1`_. If the request failed client + authentication or is invalid, the authorization server returns an + error response as described in `Section 5.2`_. + + .. _`Section 5.1`: https://tools.ietf.org/html/rfc6749#section-5.1 + .. _`Section 5.2`: https://tools.ietf.org/html/rfc6749#section-5.2 + """ + headers = { + 'Content-Type': 'application/json', + 'Cache-Control': 'no-store', + 'Pragma': 'no-cache', + } + try: + if self.request_validator.client_authentication_required(request): + log.debug('Authenticating client, %r.', request) + if not self.request_validator.authenticate_client(request): + log.debug('Client authentication failed, %r.', request) + raise errors.InvalidClientError(request=request) + elif not self.request_validator.authenticate_client_id(request.client_id, request): + log.debug('Client authentication failed, %r.', request) + raise errors.InvalidClientError(request=request) + log.debug('Validating access token request, %r.', request) + self.validate_token_request(request) + except errors.OAuth2Error as e: + log.debug('Client error in token request, %s.', e) + return headers, e.json, e.status_code + + token = token_handler.create_token(request, self.refresh_token, save_token=False) + + for modifier in self._token_modifiers: + token = modifier(token) + self.request_validator.save_token(token, request) + + log.debug('Issuing token %r to client id %r (%r) and username %s.', + token, request.client_id, request.client, request.username) + return headers, json.dumps(token), 200 + + def validate_token_request(self, request): + """ + The client makes a request to the token endpoint by adding the + following parameters using the "application/x-www-form-urlencoded" + format per Appendix B with a character encoding of UTF-8 in the HTTP + request entity-body: + + grant_type + REQUIRED. Value MUST be set to "password". + + username + REQUIRED. The resource owner username. + + password + REQUIRED. The resource owner password. + + scope + OPTIONAL. The scope of the access request as described by + `Section 3.3`_. + + If the client type is confidential or the client was issued client + credentials (or assigned other authentication requirements), the + client MUST authenticate with the authorization server as described + in `Section 3.2.1`_. + + The authorization server MUST: + + o require client authentication for confidential clients or for any + client that was issued client credentials (or with other + authentication requirements), + + o authenticate the client if client authentication is included, and + + o validate the resource owner password credentials using its + existing password validation algorithm. + + Since this access token request utilizes the resource owner's + password, the authorization server MUST protect the endpoint against + brute force attacks (e.g., using rate-limitation or generating + alerts). + + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`Section 3.2.1`: https://tools.ietf.org/html/rfc6749#section-3.2.1 + """ + for validator in self.custom_validators.pre_token: + validator(request) + + for param in ('grant_type', 'username', 'password'): + if not getattr(request, param, None): + raise errors.InvalidRequestError( + 'Request is missing %s parameter.' % param, request=request) + + for param in ('grant_type', 'username', 'password', 'scope'): + if param in request.duplicate_params: + raise errors.InvalidRequestError(description='Duplicate %s parameter.' % param, request=request) + + # This error should rarely (if ever) occur if requests are routed to + # grant type handlers based on the grant_type parameter. + if not request.grant_type == 'password': + raise errors.UnsupportedGrantTypeError(request=request) + + log.debug('Validating username %s.', request.username) + if not self.request_validator.validate_user(request.username, + request.password, request.client, request): + raise errors.InvalidGrantError( + 'Invalid credentials given.', request=request) + else: + if not hasattr(request.client, 'client_id'): + raise NotImplementedError( + 'Validate user must set the ' + 'request.client.client_id attribute ' + 'in authenticate_client.') + log.debug('Authorizing access to user %r.', request.user) + + # Ensure client is authorized use of this grant type + self.validate_grant_type(request) + + if request.client: + request.client_id = request.client_id or request.client.client_id + self.validate_scopes(request) + + for validator in self.custom_validators.post_token: + validator(request) diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/parameters.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/parameters.py new file mode 100644 index 0000000..0107933 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/parameters.py @@ -0,0 +1,409 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749.parameters +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module contains methods related to `Section 4`_ of the OAuth 2 RFC. + +.. _`Section 4`: https://tools.ietf.org/html/rfc6749#section-4 +""" +from __future__ import absolute_import, unicode_literals + +import json +import os +import time + +from oauthlib.common import add_params_to_qs, add_params_to_uri, unicode_type +from oauthlib.signals import scope_changed + +from .errors import (InsecureTransportError, MismatchingStateError, + MissingCodeError, MissingTokenError, + MissingTokenTypeError, raise_from_error) +from .tokens import OAuth2Token +from .utils import is_secure_transport, list_to_scope, scope_to_list + +try: + import urlparse +except ImportError: + import urllib.parse as urlparse + + +def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None, + scope=None, state=None, **kwargs): + """Prepare the authorization grant request URI. + + The client constructs the request URI by adding the following + parameters to the query component of the authorization endpoint URI + using the ``application/x-www-form-urlencoded`` format as defined by + [`W3C.REC-html401-19991224`_]: + + :param response_type: To indicate which OAuth 2 grant/flow is required, + "code" and "token". + :param client_id: The client identifier as described in `Section 2.2`_. + :param redirect_uri: The client provided URI to redirect back to after + authorization as described in `Section 3.1.2`_. + :param scope: The scope of the access request as described by + `Section 3.3`_. + + :param state: An opaque value used by the client to maintain + state between the request and callback. The authorization + server includes this value when redirecting the user-agent + back to the client. The parameter SHOULD be used for + preventing cross-site request forgery as described in + `Section 10.12`_. + :param kwargs: Extra arguments to embed in the grant/authorization URL. + + An example of an authorization code grant authorization URL: + + .. code-block:: http + + GET /authorize?response_type=code&client_id=s6BhdRkqt3&state=xyz + &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb HTTP/1.1 + Host: server.example.com + + .. _`W3C.REC-html401-19991224`: https://tools.ietf.org/html/rfc6749#ref-W3C.REC-html401-19991224 + .. _`Section 2.2`: https://tools.ietf.org/html/rfc6749#section-2.2 + .. _`Section 3.1.2`: https://tools.ietf.org/html/rfc6749#section-3.1.2 + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`section 10.12`: https://tools.ietf.org/html/rfc6749#section-10.12 + """ + if not is_secure_transport(uri): + raise InsecureTransportError() + + params = [(('response_type', response_type)), + (('client_id', client_id))] + + if redirect_uri: + params.append(('redirect_uri', redirect_uri)) + if scope: + params.append(('scope', list_to_scope(scope))) + if state: + params.append(('state', state)) + + for k in kwargs: + if kwargs[k]: + params.append((unicode_type(k), kwargs[k])) + + return add_params_to_uri(uri, params) + + +def prepare_token_request(grant_type, body='', **kwargs): + """Prepare the access token request. + + The client makes a request to the token endpoint by adding the + following parameters using the ``application/x-www-form-urlencoded`` + format in the HTTP request entity-body: + + :param grant_type: To indicate grant type being used, i.e. "password", + "authorization_code" or "client_credentials". + :param body: Existing request body to embed parameters in. + :param code: If using authorization code grant, pass the previously + obtained authorization code as the ``code`` argument. + :param redirect_uri: If the "redirect_uri" parameter was included in the + authorization request as described in + `Section 4.1.1`_, and their values MUST be identical. + :param kwargs: Extra arguments to embed in the request body. + + An example of an authorization code token request body: + + .. code-block:: http + + grant_type=authorization_code&code=SplxlOBeZQQYbYS6WxSbIA + &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb + + .. _`Section 4.1.1`: https://tools.ietf.org/html/rfc6749#section-4.1.1 + """ + params = [('grant_type', grant_type)] + + if 'scope' in kwargs: + kwargs['scope'] = list_to_scope(kwargs['scope']) + + for k in kwargs: + if kwargs[k]: + params.append((unicode_type(k), kwargs[k])) + + return add_params_to_qs(body, params) + + +def prepare_token_revocation_request(url, token, token_type_hint="access_token", + callback=None, body='', **kwargs): + """Prepare a token revocation request. + + The client constructs the request by including the following parameters + using the "application/x-www-form-urlencoded" format in the HTTP request + entity-body: + + token REQUIRED. The token that the client wants to get revoked. + + token_type_hint OPTIONAL. A hint about the type of the token submitted + for revocation. Clients MAY pass this parameter in order to help the + authorization server to optimize the token lookup. If the server is unable + to locate the token using the given hint, it MUST extend its search across + all of its supported token types. An authorization server MAY ignore this + parameter, particularly if it is able to detect the token type + automatically. This specification defines two such values: + + * access_token: An access token as defined in [RFC6749], + `Section 1.4`_ + + * refresh_token: A refresh token as defined in [RFC6749], + `Section 1.5`_ + + Specific implementations, profiles, and extensions of this + specification MAY define other values for this parameter using the + registry defined in `Section 4.1.2`_. + + .. _`Section 1.4`: https://tools.ietf.org/html/rfc6749#section-1.4 + .. _`Section 1.5`: https://tools.ietf.org/html/rfc6749#section-1.5 + .. _`Section 4.1.2`: https://tools.ietf.org/html/rfc7009#section-4.1.2 + + """ + if not is_secure_transport(url): + raise InsecureTransportError() + + params = [('token', token)] + + if token_type_hint: + params.append(('token_type_hint', token_type_hint)) + + for k in kwargs: + if kwargs[k]: + params.append((unicode_type(k), kwargs[k])) + + headers = {'Content-Type': 'application/x-www-form-urlencoded'} + + if callback: + params.append(('callback', callback)) + return add_params_to_uri(url, params), headers, body + else: + return url, headers, add_params_to_qs(body, params) + + +def parse_authorization_code_response(uri, state=None): + """Parse authorization grant response URI into a dict. + + If the resource owner grants the access request, the authorization + server issues an authorization code and delivers it to the client by + adding the following parameters to the query component of the + redirection URI using the ``application/x-www-form-urlencoded`` format: + + **code** + REQUIRED. The authorization code generated by the + authorization server. The authorization code MUST expire + shortly after it is issued to mitigate the risk of leaks. A + maximum authorization code lifetime of 10 minutes is + RECOMMENDED. The client MUST NOT use the authorization code + more than once. If an authorization code is used more than + once, the authorization server MUST deny the request and SHOULD + revoke (when possible) all tokens previously issued based on + that authorization code. The authorization code is bound to + the client identifier and redirection URI. + + **state** + REQUIRED if the "state" parameter was present in the client + authorization request. The exact value received from the + client. + + :param uri: The full redirect URL back to the client. + :param state: The state parameter from the authorization request. + + For example, the authorization server redirects the user-agent by + sending the following HTTP response: + + .. code-block:: http + + HTTP/1.1 302 Found + Location: https://client.example.com/cb?code=SplxlOBeZQQYbYS6WxSbIA + &state=xyz + + """ + if not is_secure_transport(uri): + raise InsecureTransportError() + + query = urlparse.urlparse(uri).query + params = dict(urlparse.parse_qsl(query)) + + if not 'code' in params: + raise MissingCodeError("Missing code parameter in response.") + + if state and params.get('state', None) != state: + raise MismatchingStateError() + + return params + + +def parse_implicit_response(uri, state=None, scope=None): + """Parse the implicit token response URI into a dict. + + If the resource owner grants the access request, the authorization + server issues an access token and delivers it to the client by adding + the following parameters to the fragment component of the redirection + URI using the ``application/x-www-form-urlencoded`` format: + + **access_token** + REQUIRED. The access token issued by the authorization server. + + **token_type** + REQUIRED. The type of the token issued as described in + Section 7.1. Value is case insensitive. + + **expires_in** + RECOMMENDED. The lifetime in seconds of the access token. For + example, the value "3600" denotes that the access token will + expire in one hour from the time the response was generated. + If omitted, the authorization server SHOULD provide the + expiration time via other means or document the default value. + + **scope** + OPTIONAL, if identical to the scope requested by the client, + otherwise REQUIRED. The scope of the access token as described + by Section 3.3. + + **state** + REQUIRED if the "state" parameter was present in the client + authorization request. The exact value received from the + client. + + Similar to the authorization code response, but with a full token provided + in the URL fragment: + + .. code-block:: http + + HTTP/1.1 302 Found + Location: http://example.com/cb#access_token=2YotnFZFEjr1zCsicMWpAA + &state=xyz&token_type=example&expires_in=3600 + """ + if not is_secure_transport(uri): + raise InsecureTransportError() + + fragment = urlparse.urlparse(uri).fragment + params = dict(urlparse.parse_qsl(fragment, keep_blank_values=True)) + + if 'scope' in params: + params['scope'] = scope_to_list(params['scope']) + + if 'expires_in' in params: + params['expires_at'] = time.time() + int(params['expires_in']) + + if state and params.get('state', None) != state: + raise ValueError("Mismatching or missing state in params.") + + params = OAuth2Token(params, old_scope=scope) + validate_token_parameters(params) + return params + + +def parse_token_response(body, scope=None): + """Parse the JSON token response body into a dict. + + The authorization server issues an access token and optional refresh + token, and constructs the response by adding the following parameters + to the entity body of the HTTP response with a 200 (OK) status code: + + access_token + REQUIRED. The access token issued by the authorization server. + token_type + REQUIRED. The type of the token issued as described in + `Section 7.1`_. Value is case insensitive. + expires_in + RECOMMENDED. The lifetime in seconds of the access token. For + example, the value "3600" denotes that the access token will + expire in one hour from the time the response was generated. + If omitted, the authorization server SHOULD provide the + expiration time via other means or document the default value. + refresh_token + OPTIONAL. The refresh token which can be used to obtain new + access tokens using the same authorization grant as described + in `Section 6`_. + scope + OPTIONAL, if identical to the scope requested by the client, + otherwise REQUIRED. The scope of the access token as described + by `Section 3.3`_. + + The parameters are included in the entity body of the HTTP response + using the "application/json" media type as defined by [`RFC4627`_]. The + parameters are serialized into a JSON structure by adding each + parameter at the highest structure level. Parameter names and string + values are included as JSON strings. Numerical values are included + as JSON numbers. The order of parameters does not matter and can + vary. + + :param body: The full json encoded response body. + :param scope: The scope requested during authorization. + + For example: + + .. code-block:: http + + HTTP/1.1 200 OK + Content-Type: application/json + Cache-Control: no-store + Pragma: no-cache + + { + "access_token":"2YotnFZFEjr1zCsicMWpAA", + "token_type":"example", + "expires_in":3600, + "refresh_token":"tGzv3JOkF0XG5Qx2TlKWIA", + "example_parameter":"example_value" + } + + .. _`Section 7.1`: https://tools.ietf.org/html/rfc6749#section-7.1 + .. _`Section 6`: https://tools.ietf.org/html/rfc6749#section-6 + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`RFC4627`: https://tools.ietf.org/html/rfc4627 + """ + try: + params = json.loads(body) + except ValueError: + + # Fall back to URL-encoded string, to support old implementations, + # including (at time of writing) Facebook. See: + # https://github.com/oauthlib/oauthlib/issues/267 + + params = dict(urlparse.parse_qsl(body)) + for key in ('expires_in', 'expires'): + if key in params: # cast a couple things to int + params[key] = int(params[key]) + + if 'scope' in params: + params['scope'] = scope_to_list(params['scope']) + + if 'expires' in params: + params['expires_in'] = params.pop('expires') + + if 'expires_in' in params: + params['expires_at'] = time.time() + int(params['expires_in']) + + params = OAuth2Token(params, old_scope=scope) + validate_token_parameters(params) + return params + + +def validate_token_parameters(params): + """Ensures token precence, token type, expiration and scope in params.""" + if 'error' in params: + raise_from_error(params.get('error'), params) + + if not 'access_token' in params: + raise MissingTokenError(description="Missing access token parameter.") + + if not 'token_type' in params: + if os.environ.get('OAUTHLIB_STRICT_TOKEN_TYPE'): + raise MissingTokenTypeError() + + # If the issued access token scope is different from the one requested by + # the client, the authorization server MUST include the "scope" response + # parameter to inform the client of the actual scope granted. + # https://tools.ietf.org/html/rfc6749#section-3.3 + if params.scope_changed: + message = 'Scope has changed from "{old}" to "{new}".'.format( + old=params.old_scope, new=params.scope, + ) + scope_changed.send(message=message, old=params.old_scopes, new=params.scopes) + if not os.environ.get('OAUTHLIB_RELAX_TOKEN_SCOPE', None): + w = Warning(message) + w.token = params + w.old_scope = params.old_scopes + w.new_scope = params.scopes + raise w diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/request_validator.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/request_validator.py new file mode 100644 index 0000000..fee7b8c --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/request_validator.py @@ -0,0 +1,573 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.oauth2.rfc6749.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +from __future__ import absolute_import, unicode_literals + +import logging + +log = logging.getLogger(__name__) + + +class RequestValidator(object): + + def client_authentication_required(self, request, *args, **kwargs): + """Determine if client authentication is required for current request. + + According to the rfc6749, client authentication is required in the following cases: + - Resource Owner Password Credentials Grant, when Client type is Confidential or when + Client was issued client credentials or whenever Client provided client + authentication, see `Section 4.3.2`_. + - Authorization Code Grant, when Client type is Confidential or when Client was issued + client credentials or whenever Client provided client authentication, + see `Section 4.1.3`_. + - Refresh Token Grant, when Client type is Confidential or when Client was issued + client credentials or whenever Client provided client authentication, see + `Section 6`_ + + :param request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Authorization Code Grant + - Resource Owner Password Credentials Grant + - Refresh Token Grant + + .. _`Section 4.3.2`: https://tools.ietf.org/html/rfc6749#section-4.3.2 + .. _`Section 4.1.3`: https://tools.ietf.org/html/rfc6749#section-4.1.3 + .. _`Section 6`: https://tools.ietf.org/html/rfc6749#section-6 + """ + return True + + def authenticate_client(self, request, *args, **kwargs): + """Authenticate client through means outside the OAuth 2 spec. + + Means of authentication is negotiated beforehand and may for example + be `HTTP Basic Authentication Scheme`_ which utilizes the Authorization + header. + + Headers may be accesses through request.headers and parameters found in + both body and query can be obtained by direct attribute access, i.e. + request.client_id for client_id in the URL query. + + :param request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Authorization Code Grant + - Resource Owner Password Credentials Grant (may be disabled) + - Client Credentials Grant + - Refresh Token Grant + + .. _`HTTP Basic Authentication Scheme`: https://tools.ietf.org/html/rfc1945#section-11.1 + """ + raise NotImplementedError('Subclasses must implement this method.') + + def authenticate_client_id(self, client_id, request, *args, **kwargs): + """Ensure client_id belong to a non-confidential client. + + A non-confidential client is one that is not required to authenticate + through other means, such as using HTTP Basic. + + Note, while not strictly necessary it can often be very convenient + to set request.client to the client object associated with the + given client_id. + + :param request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Authorization Code Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def confirm_redirect_uri(self, client_id, code, redirect_uri, client, request, + *args, **kwargs): + """Ensure that the authorization process represented by this authorization + code began with this 'redirect_uri'. + + If the client specifies a redirect_uri when obtaining code then that + redirect URI must be bound to the code and verified equal in this + method, according to RFC 6749 section 4.1.3. Do not compare against + the client's allowed redirect URIs, but against the URI used when the + code was saved. + + :param client_id: Unicode client identifier + :param code: Unicode authorization_code. + :param redirect_uri: Unicode absolute URI + :param client: Client object set by you, see authenticate_client. + :param request: The HTTP Request (oauthlib.common.Request) + :rtype: True or False + + Method is used by: + - Authorization Code Grant (during token request) + """ + raise NotImplementedError('Subclasses must implement this method.') + + def get_default_redirect_uri(self, client_id, request, *args, **kwargs): + """Get the default redirect URI for the client. + + :param client_id: Unicode client identifier + :param request: The HTTP Request (oauthlib.common.Request) + :rtype: The default redirect URI for the client + + Method is used by: + - Authorization Code Grant + - Implicit Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def get_default_scopes(self, client_id, request, *args, **kwargs): + """Get the default scopes for the client. + + :param client_id: Unicode client identifier + :param request: The HTTP Request (oauthlib.common.Request) + :rtype: List of default scopes + + Method is used by all core grant types: + - Authorization Code Grant + - Implicit Grant + - Resource Owner Password Credentials Grant + - Client Credentials grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def get_original_scopes(self, refresh_token, request, *args, **kwargs): + """Get the list of scopes associated with the refresh token. + + :param refresh_token: Unicode refresh token + :param request: The HTTP Request (oauthlib.common.Request) + :rtype: List of scopes. + + Method is used by: + - Refresh token grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def is_within_original_scope(self, request_scopes, refresh_token, request, *args, **kwargs): + """Check if requested scopes are within a scope of the refresh token. + + When access tokens are refreshed the scope of the new token + needs to be within the scope of the original token. This is + ensured by checking that all requested scopes strings are on + the list returned by the get_original_scopes. If this check + fails, is_within_original_scope is called. The method can be + used in situations where returning all valid scopes from the + get_original_scopes is not practical. + + :param request_scopes: A list of scopes that were requested by client + :param refresh_token: Unicode refresh_token + :param request: The HTTP Request (oauthlib.common.Request) + :rtype: True or False + + Method is used by: + - Refresh token grant + """ + return False + + def invalidate_authorization_code(self, client_id, code, request, *args, **kwargs): + """Invalidate an authorization code after use. + + :param client_id: Unicode client identifier + :param code: The authorization code grant (request.code). + :param request: The HTTP Request (oauthlib.common.Request) + + Method is used by: + - Authorization Code Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def revoke_token(self, token, token_type_hint, request, *args, **kwargs): + """Revoke an access or refresh token. + + :param token: The token string. + :param token_type_hint: access_token or refresh_token. + :param request: The HTTP Request (oauthlib.common.Request) + + Method is used by: + - Revocation Endpoint + """ + raise NotImplementedError('Subclasses must implement this method.') + + def rotate_refresh_token(self, request): + """Determine whether to rotate the refresh token. Default, yes. + + When access tokens are refreshed the old refresh token can be kept + or replaced with a new one (rotated). Return True to rotate and + and False for keeping original. + + :param request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Refresh Token Grant + """ + return True + + def save_authorization_code(self, client_id, code, request, *args, **kwargs): + """Persist the authorization_code. + + The code should at minimum be stored with: + - the client_id (client_id) + - the redirect URI used (request.redirect_uri) + - a resource owner / user (request.user) + - the authorized scopes (request.scopes) + - the client state, if given (code.get('state')) + + The 'code' argument is actually a dictionary, containing at least a + 'code' key with the actual authorization code: + + {'code': 'sdf345jsdf0934f'} + + It may also have a 'state' key containing a nonce for the client, if it + chose to send one. That value should be saved and used in + 'validate_code'. + + It may also have a 'claims' parameter which, when present, will be a dict + deserialized from JSON as described at + http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter + This value should be saved in this method and used again in 'validate_code'. + + :param client_id: Unicode client identifier + :param code: A dict of the authorization code grant and, optionally, state. + :param request: The HTTP Request (oauthlib.common.Request) + + Method is used by: + - Authorization Code Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def save_token(self, token, request, *args, **kwargs): + """Persist the token with a token type specific method. + + Currently, only save_bearer_token is supported. + """ + return self.save_bearer_token(token, request, *args, **kwargs) + + def save_bearer_token(self, token, request, *args, **kwargs): + """Persist the Bearer token. + + The Bearer token should at minimum be associated with: + - a client and it's client_id, if available + - a resource owner / user (request.user) + - authorized scopes (request.scopes) + - an expiration time + - a refresh token, if issued + - a claims document, if present in request.claims + + The Bearer token dict may hold a number of items:: + + { + 'token_type': 'Bearer', + 'access_token': 'askfjh234as9sd8', + 'expires_in': 3600, + 'scope': 'string of space separated authorized scopes', + 'refresh_token': '23sdf876234', # if issued + 'state': 'given_by_client', # if supplied by client + } + + Note that while "scope" is a string-separated list of authorized scopes, + the original list is still available in request.scopes + + Also note that if an Authorization Code grant request included a valid claims + parameter (for OpenID Connect) then the request.claims property will contain + the claims dict, which should be saved for later use when generating the + id_token and/or UserInfo response content. + + :param client_id: Unicode client identifier + :param token: A Bearer token dict + :param request: The HTTP Request (oauthlib.common.Request) + :rtype: The default redirect URI for the client + + Method is used by all core grant types issuing Bearer tokens: + - Authorization Code Grant + - Implicit Grant + - Resource Owner Password Credentials Grant (might not associate a client) + - Client Credentials grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def get_id_token(self, token, token_handler, request): + """ + In the OpenID Connect workflows when an ID Token is requested this method is called. + Subclasses should implement the construction, signing and optional encryption of the + ID Token as described in the OpenID Connect spec. + + In addition to the standard OAuth2 request properties, the request may also contain + these OIDC specific properties which are useful to this method: + + - nonce, if workflow is implicit or hybrid and it was provided + - claims, if provided to the original Authorization Code request + + The token parameter is a dict which may contain an ``access_token`` entry, in which + case the resulting ID Token *should* include a calculated ``at_hash`` claim. + + Similarly, when the request parameter has a ``code`` property defined, the ID Token + *should* include a calculated ``c_hash`` claim. + + http://openid.net/specs/openid-connect-core-1_0.html (sections `3.1.3.6`_, `3.2.2.10`_, `3.3.2.11`_) + + .. _`3.1.3.6`: http://openid.net/specs/openid-connect-core-1_0.html#CodeIDToken + .. _`3.2.2.10`: http://openid.net/specs/openid-connect-core-1_0.html#ImplicitIDToken + .. _`3.3.2.11`: http://openid.net/specs/openid-connect-core-1_0.html#HybridIDToken + + :param token: A Bearer token dict + :param token_handler: the token handler (BearerToken class) + :param request: the HTTP Request (oauthlib.common.Request) + :return: The ID Token (a JWS signed JWT) + """ + # the request.scope should be used by the get_id_token() method to determine which claims to include in the resulting id_token + raise NotImplementedError('Subclasses must implement this method.') + + def validate_bearer_token(self, token, scopes, request): + """Ensure the Bearer token is valid and authorized access to scopes. + + :param token: A string of random characters. + :param scopes: A list of scopes associated with the protected resource. + :param request: The HTTP Request (oauthlib.common.Request) + + A key to OAuth 2 security and restricting impact of leaked tokens is + the short expiration time of tokens, *always ensure the token has not + expired!*. + + Two different approaches to scope validation: + + 1) all(scopes). The token must be authorized access to all scopes + associated with the resource. For example, the + token has access to ``read-only`` and ``images``, + thus the client can view images but not upload new. + Allows for fine grained access control through + combining various scopes. + + 2) any(scopes). The token must be authorized access to one of the + scopes associated with the resource. For example, + token has access to ``read-only-images``. + Allows for fine grained, although arguably less + convenient, access control. + + A powerful way to use scopes would mimic UNIX ACLs and see a scope + as a group with certain privileges. For a restful API these might + map to HTTP verbs instead of read, write and execute. + + Note, the request.user attribute can be set to the resource owner + associated with this token. Similarly the request.client and + request.scopes attribute can be set to associated client object + and authorized scopes. If you then use a decorator such as the + one provided for django these attributes will be made available + in all protected views as keyword arguments. + + :param token: Unicode Bearer token + :param scopes: List of scopes (defined by you) + :param request: The HTTP Request (oauthlib.common.Request) + :rtype: True or False + + Method is indirectly used by all core Bearer token issuing grant types: + - Authorization Code Grant + - Implicit Grant + - Resource Owner Password Credentials Grant + - Client Credentials Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_client_id(self, client_id, request, *args, **kwargs): + """Ensure client_id belong to a valid and active client. + + Note, while not strictly necessary it can often be very convenient + to set request.client to the client object associated with the + given client_id. + + :param request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + - Authorization Code Grant + - Implicit Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_code(self, client_id, code, client, request, *args, **kwargs): + """Verify that the authorization_code is valid and assigned to the given + client. + + Before returning true, set the following based on the information stored + with the code in 'save_authorization_code': + + - request.user + - request.state (if given) + - request.scopes + - request.claims (if given) + OBS! The request.user attribute should be set to the resource owner + associated with this authorization code. Similarly request.scopes + must also be set. + + The request.claims property, if it was given, should assigned a dict. + + :param client_id: Unicode client identifier + :param code: Unicode authorization code + :param client: Client object set by you, see authenticate_client. + :param request: The HTTP Request (oauthlib.common.Request) + :rtype: True or False + + Method is used by: + - Authorization Code Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_grant_type(self, client_id, grant_type, client, request, *args, **kwargs): + """Ensure client is authorized to use the grant_type requested. + + :param client_id: Unicode client identifier + :param grant_type: Unicode grant type, i.e. authorization_code, password. + :param client: Client object set by you, see authenticate_client. + :param request: The HTTP Request (oauthlib.common.Request) + :rtype: True or False + + Method is used by: + - Authorization Code Grant + - Resource Owner Password Credentials Grant + - Client Credentials Grant + - Refresh Token Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_redirect_uri(self, client_id, redirect_uri, request, *args, **kwargs): + """Ensure client is authorized to redirect to the redirect_uri requested. + + All clients should register the absolute URIs of all URIs they intend + to redirect to. The registration is outside of the scope of oauthlib. + + :param client_id: Unicode client identifier + :param redirect_uri: Unicode absolute URI + :param request: The HTTP Request (oauthlib.common.Request) + :rtype: True or False + + Method is used by: + - Authorization Code Grant + - Implicit Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_refresh_token(self, refresh_token, client, request, *args, **kwargs): + """Ensure the Bearer token is valid and authorized access to scopes. + + OBS! The request.user attribute should be set to the resource owner + associated with this refresh token. + + :param refresh_token: Unicode refresh token + :param client: Client object set by you, see authenticate_client. + :param request: The HTTP Request (oauthlib.common.Request) + :rtype: True or False + + Method is used by: + - Authorization Code Grant (indirectly by issuing refresh tokens) + - Resource Owner Password Credentials Grant (also indirectly) + - Refresh Token Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_response_type(self, client_id, response_type, client, request, *args, **kwargs): + """Ensure client is authorized to use the response_type requested. + + :param client_id: Unicode client identifier + :param response_type: Unicode response type, i.e. code, token. + :param client: Client object set by you, see authenticate_client. + :param request: The HTTP Request (oauthlib.common.Request) + :rtype: True or False + + Method is used by: + - Authorization Code Grant + - Implicit Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_scopes(self, client_id, scopes, client, request, *args, **kwargs): + """Ensure the client is authorized access to requested scopes. + + :param client_id: Unicode client identifier + :param scopes: List of scopes (defined by you) + :param client: Client object set by you, see authenticate_client. + :param request: The HTTP Request (oauthlib.common.Request) + :rtype: True or False + + Method is used by all core grant types: + - Authorization Code Grant + - Implicit Grant + - Resource Owner Password Credentials Grant + - Client Credentials Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_silent_authorization(self, request): + """Ensure the logged in user has authorized silent OpenID authorization. + + Silent OpenID authorization allows access tokens and id tokens to be + granted to clients without any user prompt or interaction. + + :param request: The HTTP Request (oauthlib.common.Request) + :rtype: True or False + + Method is used by: + - OpenIDConnectAuthCode + - OpenIDConnectImplicit + - OpenIDConnectHybrid + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_silent_login(self, request): + """Ensure session user has authorized silent OpenID login. + + If no user is logged in or has not authorized silent login, this + method should return False. + + If the user is logged in but associated with multiple accounts and + not selected which one to link to the token then this method should + raise an oauthlib.oauth2.AccountSelectionRequired error. + + :param request: The HTTP Request (oauthlib.common.Request) + :rtype: True or False + + Method is used by: + - OpenIDConnectAuthCode + - OpenIDConnectImplicit + - OpenIDConnectHybrid + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_user(self, username, password, client, request, *args, **kwargs): + """Ensure the username and password is valid. + + OBS! The validation should also set the user attribute of the request + to a valid resource owner, i.e. request.user = username or similar. If + not set you will be unable to associate a token with a user in the + persistance method used (commonly, save_bearer_token). + + :param username: Unicode username + :param password: Unicode password + :param client: Client object set by you, see authenticate_client. + :param request: The HTTP Request (oauthlib.common.Request) + :rtype: True or False + + Method is used by: + - Resource Owner Password Credentials Grant + """ + raise NotImplementedError('Subclasses must implement this method.') + + def validate_user_match(self, id_token_hint, scopes, claims, request): + """Ensure client supplied user id hint matches session user. + + If the sub claim or id_token_hint is supplied then the session + user must match the given ID. + + :param id_token_hint: User identifier string. + :param scopes: List of OAuth 2 scopes and OpenID claims (strings). + :param claims: OpenID Connect claims dict. + :param request: The HTTP Request (oauthlib.common.Request) + :rtype: True or False + + Method is used by: + - OpenIDConnectAuthCode + - OpenIDConnectImplicit + - OpenIDConnectHybrid + """ + raise NotImplementedError('Subclasses must implement this method.') diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/tokens.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/tokens.py new file mode 100644 index 0000000..6cfa642 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/tokens.py @@ -0,0 +1,305 @@ +""" +oauthlib.oauth2.rfc6749.tokens +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This module contains methods for adding two types of access tokens to requests. + +- Bearer https://tools.ietf.org/html/rfc6750 +- MAC https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01 +""" +from __future__ import absolute_import, unicode_literals + +import hashlib +import hmac +from binascii import b2a_base64 + +from oauthlib import common +from oauthlib.common import add_params_to_qs, add_params_to_uri, unicode_type + +from . import utils + +try: + from urlparse import urlparse +except ImportError: + from urllib.parse import urlparse + + + + +class OAuth2Token(dict): + + def __init__(self, params, old_scope=None): + super(OAuth2Token, self).__init__(params) + self._new_scope = None + if 'scope' in params and params['scope']: + self._new_scope = set(utils.scope_to_list(params['scope'])) + if old_scope is not None: + self._old_scope = set(utils.scope_to_list(old_scope)) + if self._new_scope is None: + # the rfc says that if the scope hasn't changed, it's optional + # in params so set the new scope to the old scope + self._new_scope = self._old_scope + else: + self._old_scope = self._new_scope + + @property + def scope_changed(self): + return self._new_scope != self._old_scope + + @property + def old_scope(self): + return utils.list_to_scope(self._old_scope) + + @property + def old_scopes(self): + return list(self._old_scope) + + @property + def scope(self): + return utils.list_to_scope(self._new_scope) + + @property + def scopes(self): + return list(self._new_scope) + + @property + def missing_scopes(self): + return list(self._old_scope - self._new_scope) + + @property + def additional_scopes(self): + return list(self._new_scope - self._old_scope) + + +def prepare_mac_header(token, uri, key, http_method, + nonce=None, + headers=None, + body=None, + ext='', + hash_algorithm='hmac-sha-1', + issue_time=None, + draft=0): + """Add an `MAC Access Authentication`_ signature to headers. + + Unlike OAuth 1, this HMAC signature does not require inclusion of the + request payload/body, neither does it use a combination of client_secret + and token_secret but rather a mac_key provided together with the access + token. + + Currently two algorithms are supported, "hmac-sha-1" and "hmac-sha-256", + `extension algorithms`_ are not supported. + + Example MAC Authorization header, linebreaks added for clarity + + Authorization: MAC id="h480djs93hd8", + nonce="1336363200:dj83hs9s", + mac="bhCQXTVyfj5cmA9uKkPFx1zeOXM=" + + .. _`MAC Access Authentication`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01 + .. _`extension algorithms`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-7.1 + + :param uri: Request URI. + :param headers: Request headers as a dictionary. + :param http_method: HTTP Request method. + :param key: MAC given provided by token endpoint. + :param hash_algorithm: HMAC algorithm provided by token endpoint. + :param issue_time: Time when the MAC credentials were issued (datetime). + :param draft: MAC authentication specification version. + :return: headers dictionary with the authorization field added. + """ + http_method = http_method.upper() + host, port = utils.host_from_uri(uri) + + if hash_algorithm.lower() == 'hmac-sha-1': + h = hashlib.sha1 + elif hash_algorithm.lower() == 'hmac-sha-256': + h = hashlib.sha256 + else: + raise ValueError('unknown hash algorithm') + + if draft == 0: + nonce = nonce or '{0}:{1}'.format(utils.generate_age(issue_time), + common.generate_nonce()) + else: + ts = common.generate_timestamp() + nonce = common.generate_nonce() + + sch, net, path, par, query, fra = urlparse(uri) + + if query: + request_uri = path + '?' + query + else: + request_uri = path + + # Hash the body/payload + if body is not None and draft == 0: + body = body.encode('utf-8') + bodyhash = b2a_base64(h(body).digest())[:-1].decode('utf-8') + else: + bodyhash = '' + + # Create the normalized base string + base = [] + if draft == 0: + base.append(nonce) + else: + base.append(ts) + base.append(nonce) + base.append(http_method.upper()) + base.append(request_uri) + base.append(host) + base.append(port) + if draft == 0: + base.append(bodyhash) + base.append(ext or '') + base_string = '\n'.join(base) + '\n' + + # hmac struggles with unicode strings - http://bugs.python.org/issue5285 + if isinstance(key, unicode_type): + key = key.encode('utf-8') + sign = hmac.new(key, base_string.encode('utf-8'), h) + sign = b2a_base64(sign.digest())[:-1].decode('utf-8') + + header = [] + header.append('MAC id="%s"' % token) + if draft != 0: + header.append('ts="%s"' % ts) + header.append('nonce="%s"' % nonce) + if bodyhash: + header.append('bodyhash="%s"' % bodyhash) + if ext: + header.append('ext="%s"' % ext) + header.append('mac="%s"' % sign) + + headers = headers or {} + headers['Authorization'] = ', '.join(header) + return headers + + +def prepare_bearer_uri(token, uri): + """Add a `Bearer Token`_ to the request URI. + Not recommended, use only if client can't use authorization header or body. + + http://www.example.com/path?access_token=h480djs93hd8 + + .. _`Bearer Token`: https://tools.ietf.org/html/rfc6750 + """ + return add_params_to_uri(uri, [(('access_token', token))]) + + +def prepare_bearer_headers(token, headers=None): + """Add a `Bearer Token`_ to the request URI. + Recommended method of passing bearer tokens. + + Authorization: Bearer h480djs93hd8 + + .. _`Bearer Token`: https://tools.ietf.org/html/rfc6750 + """ + headers = headers or {} + headers['Authorization'] = 'Bearer %s' % token + return headers + + +def prepare_bearer_body(token, body=''): + """Add a `Bearer Token`_ to the request body. + + access_token=h480djs93hd8 + + .. _`Bearer Token`: https://tools.ietf.org/html/rfc6750 + """ + return add_params_to_qs(body, [(('access_token', token))]) + + +def random_token_generator(request, refresh_token=False): + return common.generate_token() + + +def signed_token_generator(private_pem, **kwargs): + def signed_token_generator(request): + request.claims = kwargs + return common.generate_signed_token(private_pem, request) + + return signed_token_generator + + +class TokenBase(object): + + def __call__(self, request, refresh_token=False): + raise NotImplementedError('Subclasses must implement this method.') + + def validate_request(self, request): + raise NotImplementedError('Subclasses must implement this method.') + + def estimate_type(self, request): + raise NotImplementedError('Subclasses must implement this method.') + + +class BearerToken(TokenBase): + __slots__ = ( + 'request_validator', 'token_generator', + 'refresh_token_generator', 'expires_in' + ) + + def __init__(self, request_validator=None, token_generator=None, + expires_in=None, refresh_token_generator=None): + self.request_validator = request_validator + self.token_generator = token_generator or random_token_generator + self.refresh_token_generator = ( + refresh_token_generator or self.token_generator + ) + self.expires_in = expires_in or 3600 + + def create_token(self, request, refresh_token=False, save_token=True): + """Create a BearerToken, by default without refresh token.""" + + if callable(self.expires_in): + expires_in = self.expires_in(request) + else: + expires_in = self.expires_in + + request.expires_in = expires_in + + token = { + 'access_token': self.token_generator(request), + 'expires_in': expires_in, + 'token_type': 'Bearer', + } + + # If provided, include - this is optional in some cases https://tools.ietf.org/html/rfc6749#section-3.3 but + # there is currently no mechanism to coordinate issuing a token for only a subset of the requested scopes so + # all tokens issued are for the entire set of requested scopes. + if request.scopes is not None: + token['scope'] = ' '.join(request.scopes) + + if request.state is not None: + token['state'] = request.state + + if refresh_token: + if (request.refresh_token and + not self.request_validator.rotate_refresh_token(request)): + token['refresh_token'] = request.refresh_token + else: + token['refresh_token'] = self.refresh_token_generator(request) + + token.update(request.extra_credentials or {}) + token = OAuth2Token(token) + if save_token: + self.request_validator.save_bearer_token(token, request) + return token + + def validate_request(self, request): + token = None + if 'Authorization' in request.headers: + token = request.headers.get('Authorization')[7:] + else: + token = request.access_token + return self.request_validator.validate_bearer_token( + token, request.scopes, request) + + def estimate_type(self, request): + if request.headers.get('Authorization', '').startswith('Bearer'): + return 9 + elif request.access_token is not None: + return 5 + else: + return 0 diff --git a/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/utils.py b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/utils.py new file mode 100644 index 0000000..f67019d --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/oauth2/rfc6749/utils.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +""" +oauthlib.utils +~~~~~~~~~~~~~~ + +This module contains utility methods used by various parts of the OAuth 2 spec. +""" +from __future__ import absolute_import, unicode_literals + +import datetime +import os + +from oauthlib.common import unicode_type, urldecode + +try: + from urllib import quote +except ImportError: + from urllib.parse import quote +try: + from urlparse import urlparse +except ImportError: + from urllib.parse import urlparse + + +def list_to_scope(scope): + """Convert a list of scopes to a space separated string.""" + if isinstance(scope, unicode_type) or scope is None: + return scope + elif isinstance(scope, (set, tuple, list)): + return " ".join([unicode_type(s) for s in scope]) + else: + raise ValueError("Invalid scope (%s), must be string, tuple, set, or list." % scope) + + +def scope_to_list(scope): + """Convert a space separated string to a list of scopes.""" + if isinstance(scope, (tuple, list, set)): + return [unicode_type(s) for s in scope] + elif scope is None: + return None + else: + return scope.strip().split(" ") + + +def params_from_uri(uri): + params = dict(urldecode(urlparse(uri).query)) + if 'scope' in params: + params['scope'] = scope_to_list(params['scope']) + return params + + +def host_from_uri(uri): + """Extract hostname and port from URI. + + Will use default port for HTTP and HTTPS if none is present in the URI. + """ + default_ports = { + 'HTTP': '80', + 'HTTPS': '443', + } + + sch, netloc, path, par, query, fra = urlparse(uri) + if ':' in netloc: + netloc, port = netloc.split(':', 1) + else: + port = default_ports.get(sch.upper()) + + return netloc, port + + +def escape(u): + """Escape a string in an OAuth-compatible fashion. + + TODO: verify whether this can in fact be used for OAuth 2 + + """ + if not isinstance(u, unicode_type): + raise ValueError('Only unicode objects are escapable.') + return quote(u.encode('utf-8'), safe=b'~') + + +def generate_age(issue_time): + """Generate a age parameter for MAC authentication draft 00.""" + td = datetime.datetime.now() - issue_time + age = (td.microseconds + (td.seconds + td.days * 24 * 3600) + * 10 ** 6) / 10 ** 6 + return unicode_type(age) + + +def is_secure_transport(uri): + """Check if the uri is over ssl.""" + if os.environ.get('OAUTHLIB_INSECURE_TRANSPORT'): + return True + return uri.lower().startswith('https://') diff --git a/env/lib/python3.6/site-packages/oauthlib/signals.py b/env/lib/python3.6/site-packages/oauthlib/signals.py new file mode 100644 index 0000000..2f86650 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/signals.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +""" + Implements signals based on blinker if available, otherwise + falls silently back to a noop. Shamelessly stolen from flask.signals: + https://github.com/mitsuhiko/flask/blob/master/flask/signals.py +""" +signals_available = False +try: + from blinker import Namespace + signals_available = True +except ImportError: + class Namespace(object): + def signal(self, name, doc=None): + return _FakeSignal(name, doc) + + class _FakeSignal(object): + """If blinker is unavailable, create a fake class with the same + interface that allows sending of signals but will fail with an + error on anything else. Instead of doing anything on send, it + will just ignore the arguments and do nothing instead. + """ + + def __init__(self, name, doc=None): + self.name = name + self.__doc__ = doc + def _fail(self, *args, **kwargs): + raise RuntimeError('signalling support is unavailable ' + 'because the blinker library is ' + 'not installed.') + send = lambda *a, **kw: None + connect = disconnect = has_receivers_for = receivers_for = \ + temporarily_connected_to = connected_to = _fail + del _fail + +# The namespace for code signals. If you are not oauthlib code, do +# not put signals in here. Create your own namespace instead. +_signals = Namespace() + + +# Core signals. +scope_changed = _signals.signal('scope-changed') diff --git a/env/lib/python3.6/site-packages/oauthlib/uri_validate.py b/env/lib/python3.6/site-packages/oauthlib/uri_validate.py new file mode 100644 index 0000000..ce8ea40 --- /dev/null +++ b/env/lib/python3.6/site-packages/oauthlib/uri_validate.py @@ -0,0 +1,216 @@ +""" +Regex for URIs + +These regex are directly derived from the collected ABNF in RFC3986 +(except for DIGIT, ALPHA and HEXDIG, defined by RFC2234). + +They should be processed with re.VERBOSE. + +Thanks Mark Nottingham for this code - https://gist.github.com/138549 +""" +from __future__ import unicode_literals + +import re + +# basics + +DIGIT = r"[\x30-\x39]" + +ALPHA = r"[\x41-\x5A\x61-\x7A]" + +HEXDIG = r"[\x30-\x39A-Fa-f]" + +# pct-encoded = "%" HEXDIG HEXDIG +pct_encoded = r" %% %(HEXDIG)s %(HEXDIG)s" % locals() + +# unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" +unreserved = r"(?: %(ALPHA)s | %(DIGIT)s | \- | \. | _ | ~ )" % locals() + +# gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" +gen_delims = r"(?: : | / | \? | \# | \[ | \] | @ )" + +# sub-delims = "!" / "$" / "&" / "'" / "(" / ")" +# / "*" / "+" / "," / ";" / "=" +sub_delims = r"""(?: ! | \$ | & | ' | \( | \) | + \* | \+ | , | ; | = )""" + +# pchar = unreserved / pct-encoded / sub-delims / ":" / "@" +pchar = r"(?: %(unreserved)s | %(pct_encoded)s | %(sub_delims)s | : | @ )" % locals( +) + +# reserved = gen-delims / sub-delims +reserved = r"(?: %(gen_delims)s | %(sub_delims)s )" % locals() + + +# scheme + +# scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." ) +scheme = r"%(ALPHA)s (?: %(ALPHA)s | %(DIGIT)s | \+ | \- | \. )*" % locals() + + +# authority + +# dec-octet = DIGIT ; 0-9 +# / %x31-39 DIGIT ; 10-99 +# / "1" 2DIGIT ; 100-199 +# / "2" %x30-34 DIGIT ; 200-249 +# / "25" %x30-35 ; 250-255 +dec_octet = r"""(?: %(DIGIT)s | + [\x31-\x39] %(DIGIT)s | + 1 %(DIGIT)s{2} | + 2 [\x30-\x34] %(DIGIT)s | + 25 [\x30-\x35] + ) +""" % locals() + +# IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet +IPv4address = r"%(dec_octet)s \. %(dec_octet)s \. %(dec_octet)s \. %(dec_octet)s" % locals( +) + +# h16 = 1*4HEXDIG +h16 = r"(?: %(HEXDIG)s ){1,4}" % locals() + +# ls32 = ( h16 ":" h16 ) / IPv4address +ls32 = r"(?: (?: %(h16)s : %(h16)s ) | %(IPv4address)s )" % locals() + +# IPv6address = 6( h16 ":" ) ls32 +# / "::" 5( h16 ":" ) ls32 +# / [ h16 ] "::" 4( h16 ":" ) ls32 +# / [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 +# / [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 +# / [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 +# / [ *4( h16 ":" ) h16 ] "::" ls32 +# / [ *5( h16 ":" ) h16 ] "::" h16 +# / [ *6( h16 ":" ) h16 ] "::" +IPv6address = r"""(?: (?: %(h16)s : ){6} %(ls32)s | + :: (?: %(h16)s : ){5} %(ls32)s | + %(h16)s :: (?: %(h16)s : ){4} %(ls32)s | + (?: %(h16)s : ) %(h16)s :: (?: %(h16)s : ){3} %(ls32)s | + (?: %(h16)s : ){2} %(h16)s :: (?: %(h16)s : ){2} %(ls32)s | + (?: %(h16)s : ){3} %(h16)s :: %(h16)s : %(ls32)s | + (?: %(h16)s : ){4} %(h16)s :: %(ls32)s | + (?: %(h16)s : ){5} %(h16)s :: %(h16)s | + (?: %(h16)s : ){6} %(h16)s :: + ) +""" % locals() + +# IPvFuture = "v" 1*HEXDIG "." 1*( unreserved / sub-delims / ":" ) +IPvFuture = r"v %(HEXDIG)s+ \. (?: %(unreserved)s | %(sub_delims)s | : )+" % locals() + +# IP-literal = "[" ( IPv6address / IPvFuture ) "]" +IP_literal = r"\[ (?: %(IPv6address)s | %(IPvFuture)s ) \]" % locals() + +# reg-name = *( unreserved / pct-encoded / sub-delims ) +reg_name = r"(?: %(unreserved)s | %(pct_encoded)s | %(sub_delims)s )*" % locals() + +# userinfo = *( unreserved / pct-encoded / sub-delims / ":" ) +userinfo = r"(?: %(unreserved)s | %(pct_encoded)s | %(sub_delims)s | : )" % locals( +) + +# host = IP-literal / IPv4address / reg-name +host = r"(?: %(IP_literal)s | %(IPv4address)s | %(reg_name)s )" % locals() + +# port = *DIGIT +port = r"(?: %(DIGIT)s )*" % locals() + +# authority = [ userinfo "@" ] host [ ":" port ] +authority = r"(?: %(userinfo)s @)? %(host)s (?: : %(port)s)?" % locals() + +# Path + +# segment = *pchar +segment = r"%(pchar)s*" % locals() + +# segment-nz = 1*pchar +segment_nz = r"%(pchar)s+" % locals() + +# segment-nz-nc = 1*( unreserved / pct-encoded / sub-delims / "@" ) +# ; non-zero-length segment without any colon ":" +segment_nz_nc = r"(?: %(unreserved)s | %(pct_encoded)s | %(sub_delims)s | @ )+" % locals() + +# path-abempty = *( "/" segment ) +path_abempty = r"(?: / %(segment)s )*" % locals() + +# path-absolute = "/" [ segment-nz *( "/" segment ) ] +path_absolute = r"/ (?: %(segment_nz)s (?: / %(segment)s )* )?" % locals() + +# path-noscheme = segment-nz-nc *( "/" segment ) +path_noscheme = r"%(segment_nz_nc)s (?: / %(segment)s )*" % locals() + +# path-rootless = segment-nz *( "/" segment ) +path_rootless = r"%(segment_nz)s (?: / %(segment)s )*" % locals() + +# path-empty = 0 +path_empty = r"" # FIXME + +# path = path-abempty ; begins with "/" or is empty +# / path-absolute ; begins with "/" but not "//" +# / path-noscheme ; begins with a non-colon segment +# / path-rootless ; begins with a segment +# / path-empty ; zero characters +path = r"""(?: %(path_abempty)s | + %(path_absolute)s | + %(path_noscheme)s | + %(path_rootless)s | + %(path_empty)s + ) +""" % locals() + +### Query and Fragment + +# query = *( pchar / "/" / "?" ) +query = r"(?: %(pchar)s | / | \? )*" % locals() + +# fragment = *( pchar / "/" / "?" ) +fragment = r"(?: %(pchar)s | / | \? )*" % locals() + +# URIs + +# hier-part = "//" authority path-abempty +# / path-absolute +# / path-rootless +# / path-empty +hier_part = r"""(?: (?: // %(authority)s %(path_abempty)s ) | + %(path_absolute)s | + %(path_rootless)s | + %(path_empty)s + ) +""" % locals() + +# relative-part = "//" authority path-abempty +# / path-absolute +# / path-noscheme +# / path-empty +relative_part = r"""(?: (?: // %(authority)s %(path_abempty)s ) | + %(path_absolute)s | + %(path_noscheme)s | + %(path_empty)s + ) +""" % locals() + +# relative-ref = relative-part [ "?" query ] [ "#" fragment ] +relative_ref = r"%(relative_part)s (?: \? %(query)s)? (?: \# %(fragment)s)?" % locals( +) + +# URI = scheme ":" hier-part [ "?" query ] [ "#" fragment ] +URI = r"^(?: %(scheme)s : %(hier_part)s (?: \? %(query)s )? (?: \# %(fragment)s )? )$" % locals( +) + +# URI-reference = URI / relative-ref +URI_reference = r"^(?: %(URI)s | %(relative_ref)s )$" % locals() + +# absolute-URI = scheme ":" hier-part [ "?" query ] +absolute_URI = r"^(?: %(scheme)s : %(hier_part)s (?: \? %(query)s )? )$" % locals( +) + + +def is_uri(uri): + return re.match(URI, uri, re.VERBOSE) + + +def is_uri_reference(uri): + return re.match(URI_reference, uri, re.VERBOSE) + + +def is_absolute_uri(uri): + return re.match(absolute_URI, uri, re.VERBOSE) diff --git a/env/lib/python3.6/site-packages/pip-18.0.dist-info/INSTALLER b/env/lib/python3.6/site-packages/pip-18.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/lib/python3.6/site-packages/pip-18.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/lib/python3.6/site-packages/pip-18.0.dist-info/LICENSE.txt b/env/lib/python3.6/site-packages/pip-18.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..d3379fa --- /dev/null +++ b/env/lib/python3.6/site-packages/pip-18.0.dist-info/LICENSE.txt @@ -0,0 +1,20 @@ +Copyright (c) 2008-2018 The pip developers (see AUTHORS.txt file) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python3.6/site-packages/pip-18.0.dist-info/METADATA b/env/lib/python3.6/site-packages/pip-18.0.dist-info/METADATA new file mode 100644 index 0000000..e1ebba8 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip-18.0.dist-info/METADATA @@ -0,0 +1,69 @@ +Metadata-Version: 2.1 +Name: pip +Version: 18.0 +Summary: The PyPA recommended tool for installing Python packages. +Home-page: https://pip.pypa.io/ +Author: The pip developers +Author-email: pypa-dev@groups.google.com +License: MIT +Keywords: distutils easy_install egg setuptools wheel virtualenv +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Software Development :: Build Tools +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.* + +pip +=== + +The `PyPA recommended`_ tool for installing Python packages. + +.. image:: https://img.shields.io/pypi/v/pip.svg + :target: https://pypi.org/project/pip/ + +.. image:: https://img.shields.io/travis/pypa/pip/master.svg?label=travis-ci + :target: https://travis-ci.org/pypa/pip + +.. image:: https://img.shields.io/appveyor/ci/pypa/pip.svg?label=appveyor-ci + :target: https://ci.appveyor.com/project/pypa/pip/history + +.. image:: https://readthedocs.org/projects/pip/badge/?version=latest + :target: https://pip.pypa.io/en/latest + +* `Installation`_ +* `Documentation`_ +* `Changelog`_ +* `GitHub Page`_ +* `Issue Tracking`_ +* `User mailing list`_ +* `Dev mailing list`_ +* User IRC: #pypa on Freenode. +* Dev IRC: #pypa-dev on Freenode. + +Code of Conduct +--------------- + +Everyone interacting in the pip project's codebases, issue trackers, chat +rooms and mailing lists is expected to follow the `PyPA Code of Conduct`_. + +.. _PyPA recommended: https://packaging.python.org/en/latest/current/ +.. _Installation: https://pip.pypa.io/en/stable/installing.html +.. _Documentation: https://pip.pypa.io/en/stable/ +.. _Changelog: https://pip.pypa.io/en/stable/news.html +.. _GitHub Page: https://github.com/pypa/pip +.. _Issue Tracking: https://github.com/pypa/pip/issues +.. _User mailing list: https://groups.google.com/forum/#!forum/python-virtualenv +.. _Dev mailing list: https://groups.google.com/forum/#!forum/pypa-dev +.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ + + diff --git a/env/lib/python3.6/site-packages/pip-18.0.dist-info/RECORD b/env/lib/python3.6/site-packages/pip-18.0.dist-info/RECORD new file mode 100644 index 0000000..5d1a1d8 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip-18.0.dist-info/RECORD @@ -0,0 +1,582 @@ +pip/__init__.py,sha256=IKM5mKN1HOw4yPACjY238eZg9mv0kYUOquDTItUjtP4,21 +pip/__main__.py,sha256=L3IHqBeasELUHvwy5CT_izVEMhM12tve289qut49DvU,623 +pip/_internal/__init__.py,sha256=wzTPPJ7Lieb-khuxuRLbXXOtx1greP5hZtJTnwUFPqk,11250 +pip/_internal/basecommand.py,sha256=m_PKP9SP7nRi3tWo1oeTU6sIf5gq1bRKv5SS-MdCJPM,9780 +pip/_internal/baseparser.py,sha256=vzVUbYmVqJlyfiJ_3Phpi33lcdZrRWQIRmYlzt-071g,8524 +pip/_internal/build_env.py,sha256=cI5lEav68fCpfiy-txgO4lc2b1aUtUSer4EzPJXPOYY,4209 +pip/_internal/cache.py,sha256=hurE4ppJjTd4j4PVwjg5zATjajNZLiiL2g1DzzFnE8U,6821 +pip/_internal/cmdoptions.py,sha256=goksRkLvAuxbfocy1pjotEFj3np0kd9g8BgvlvQpVzk,16319 +pip/_internal/compat.py,sha256=oxa3m7C2qQ4cNDi14QPKHV612YGpVoN5Q4cxe04phjc,7698 +pip/_internal/configuration.py,sha256=U0Yd9p131xJh_9DEl75Xlb82rwFzNIaGJqsJP7o2qGY,12982 +pip/_internal/download.py,sha256=c5Hkimq39eJdZ6DN0_0etjK43-0a5CK_W_3sVLqH87g,33300 +pip/_internal/exceptions.py,sha256=sgmH3aFK-mR9mGRlsVAJ8HfhRFf8J_zUbLg5vuZcmxM,8221 +pip/_internal/index.py,sha256=jgcLUbIWiwOyS9NPqLNuhau8QLtre1a6EQ-2f9p9lM8,41067 +pip/_internal/locations.py,sha256=rkI3KWmFhMXhPTtoglXf7uZKfJ3em1GF9n7e8mOiiPo,6301 +pip/_internal/pep425tags.py,sha256=luGuXXWlZHEn8rGb0V_jSwWVuO-mcMzOVpezA72YkFQ,10798 +pip/_internal/resolve.py,sha256=YZ6W49hQV3ybjvcj-IdBN-XVaV_SdW7jIePuVWMyKzE,13582 +pip/_internal/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156 +pip/_internal/wheel.py,sha256=3IlAsd9COXe2vUWccdv6ia1eCB-rE5vki3bhk18dMao,31844 +pip/_internal/commands/__init__.py,sha256=ICge5zPnW7CdvsioRLqPXpiaGtsN0FR0p5mIZ-moGtU,2246 +pip/_internal/commands/check.py,sha256=XDaA8RbTfhRr0IOmqFrQN6hhnJdXfLgHLBg7zMFNma8,1393 +pip/_internal/commands/completion.py,sha256=WPxXNj2bCfDtqP5Oe04AfzeFbfOHDnmLI41wz4GsruI,2924 +pip/_internal/commands/configuration.py,sha256=kzwz6y3tfreGjIcrXMAa0MvZMvPuIKkbvwzwJ4J-wk4,7116 +pip/_internal/commands/download.py,sha256=oNdLQccS-jxfSS-mrJbh47-32kdPIHMmvzIgucSsvao,9058 +pip/_internal/commands/freeze.py,sha256=87qc7ggLKhEC73x1Vul1Crf9-c3wSMSxS0fZOpJOxb0,3224 +pip/_internal/commands/hash.py,sha256=bskoCvz28Ayp7y2ZmJFjcACxisIjTPjzSfexfT24Ph4,1672 +pip/_internal/commands/help.py,sha256=6a_crHnyoNX0L63W-XMCXG6dseiH4-qPZDVWvf5jXKo,1043 +pip/_internal/commands/install.py,sha256=aTH1-QI_ywbQT0xwVVbdZOPba2CfzNBDb1aP1P6c0ZE,20294 +pip/_internal/commands/list.py,sha256=hrDoyY6hPIo94DrEKyVZl69uKH8uGbnSVOJroh-8yhk,10250 +pip/_internal/commands/search.py,sha256=47OjdUeQDMNUTI-y3hWnCE4iCHbWWAnjKigOY74QgwU,4713 +pip/_internal/commands/show.py,sha256=2aguOk_tp4bqeVUBhqdqtso5pJOBS1YmO_5H8qyO5SM,6280 +pip/_internal/commands/uninstall.py,sha256=rWNszUL0_r1XtZCzemNxDiX5U_PLf9CM1FYfrAU_HJI,2920 +pip/_internal/commands/wheel.py,sha256=bwV-ag37ZAodaqs58WnmQ2sruiFTVwR4ilFJFS8DMC8,7011 +pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63 +pip/_internal/models/index.py,sha256=40lauIZxYWPay3T8igBQwjE8SlDAsebphQfRA2WjfJI,418 +pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/operations/check.py,sha256=ahcOg5p68nNow6_wy5prYYK0KZq22lm0CsJn8AyDMCI,4937 +pip/_internal/operations/freeze.py,sha256=ww6GQnFB2Gs6IV8ObPA53EUy1JvOjgd_XEa6AVTvKMc,10066 +pip/_internal/operations/prepare.py,sha256=Sp8_aGb26jCaPLEt1Ymf_QUstKSW0SQMWopOlbltvS0,14334 +pip/_internal/req/__init__.py,sha256=JnNZWvKUQuqAwHh64LCD3zprzWIVQEXChTo2UGHzVqo,2093 +pip/_internal/req/req_file.py,sha256=0AwcYfriooNxlQJjYPOKJywCCSk_C8PcC732LFHN7QQ,11910 +pip/_internal/req/req_install.py,sha256=mzefObnBkthKgc_5SQvmsgZQGWGP1m0gBqUVco4Gab0,43743 +pip/_internal/req/req_set.py,sha256=5nZEZ4C3HUzR7zegLB7DJuUspiTxxi8LM4wj-PH-Yn8,7007 +pip/_internal/req/req_tracker.py,sha256=zH28YHV5TXAVh1ZOEZi6Z1Edkiu26dN2tXfR6VbQ3B4,2370 +pip/_internal/req/req_uninstall.py,sha256=IDBXvY563qXDlObo3uI2MLhNfTYA57JBwdt4q5rRx9Y,16636 +pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/utils/appdirs.py,sha256=TC14M6zMndoMHrC_ZZB-1b85AtX5RVt-kTAs_vX5mg4,9114 +pip/_internal/utils/deprecation.py,sha256=yQTe6dyWlBfxSBrOv_MdRXF1RPLER_EWOp-pa2zLoZc,3021 +pip/_internal/utils/encoding.py,sha256=D8tmfStCah6xh9OLhH9mWLr77q4akhg580YHJMKpq3Y,1025 +pip/_internal/utils/filesystem.py,sha256=buZGDuxZDbWtaW7CWFP9imqx_7EDsqmL7kZdJ5wFPfw,909 +pip/_internal/utils/glibc.py,sha256=prOrsBjmgkDE-hY4Pl120yF5MIlkkmGrFLs8XfIyT-w,3004 +pip/_internal/utils/hashes.py,sha256=rJk-gj6F-sHggXAG97dhynqUHFFgApyZLWgaG2xCHME,2900 +pip/_internal/utils/logging.py,sha256=McWNwXiFT5iXBBcdEZ1f3ntDWNLnAuhCUDL2fLcPIOU,6289 +pip/_internal/utils/misc.py,sha256=G56WFsA2bEl8bX-56hAdo24R1t0KrC4SxoWVnz9GIBg,28604 +pip/_internal/utils/outdated.py,sha256=MQ8vzB7b5SD4Yfe7uskhkvREbShbssicJ97mHXiCF4k,5276 +pip/_internal/utils/packaging.py,sha256=v5aEmCFeq5cJEKU7PwKzAhReDgA4aGeUONSuvkl-Iy0,2277 +pip/_internal/utils/setuptools_build.py,sha256=0blfscmNJW_iZ5DcswJeDB_PbtTEjfK9RL1R1WEDW2E,278 +pip/_internal/utils/temp_dir.py,sha256=n2FkVlwRX_hS61fYt3nSAh2e2V6CcZn_dfbPId1pAQE,2615 +pip/_internal/utils/typing.py,sha256=ztYtZAcqjCYDwP-WlF6EiAAskAsZBMMXtuqvfgZIlgQ,1139 +pip/_internal/utils/ui.py,sha256=JYw7ny1ytKQJdawBCwE54hS6n1eGGw9hjimBwnFQ52Y,13651 +pip/_internal/vcs/__init__.py,sha256=tFMlJNMnLcaDZB_nkbLBYbk0UbRrKihnKspzfosOHOA,15980 +pip/_internal/vcs/bazaar.py,sha256=UIcvDihmoALsdDjo2gyUtxQpEXimiH5Drrv7KkeoeXY,3582 +pip/_internal/vcs/git.py,sha256=BLbXbsv8punlVEQPz5Rqj9od4-w6TjXZSFgeJfSCFjQ,11275 +pip/_internal/vcs/mercurial.py,sha256=karAzCCaUbtGGYs4mR88yscWTs84BKYIQLejkB34BRs,3438 +pip/_internal/vcs/subversion.py,sha256=RgOv6cHC9nfILa3jjVj4MWaqVjpg8K93T1_2HoB6MRo,8863 +pip/_vendor/__init__.py,sha256=XnhkujjE1qUGRlYGYbIRrEGYYYBcNLBraE27HH48wYw,4756 +pip/_vendor/appdirs.py,sha256=BENKsvcA08IpccD9345-rMrg3aXWFA1q6BFEglnHg6I,24547 +pip/_vendor/distro.py,sha256=dOMrjIXv-3GmEbtP-NJc057Sv19P7ZAdke-v0TBeNio,42455 +pip/_vendor/ipaddress.py,sha256=2OgbkeAD2rLkcXqbcvof3J5R7lRwjNLoBySyTkBtKnc,79852 +pip/_vendor/pyparsing.py,sha256=YmrfFWmUgne4-FjtSotxVOJvtxea3TOtsrOtrhozkXs,225348 +pip/_vendor/retrying.py,sha256=k3fflf5_Mm0XcIJYhB7Tj34bqCCPhUDkYbx1NvW2FPE,9972 +pip/_vendor/six.py,sha256=A08MPb-Gi9FfInI3IW7HimXFmEH2T2IPzHgDvdhZPRA,30888 +pip/_vendor/cachecontrol/__init__.py,sha256=6cRPchVqkAkeUtYTSW8qCetjSqJo-GxP-n4VMVDbvmc,302 +pip/_vendor/cachecontrol/_cmd.py,sha256=URGE0KrA87QekCG3SGPatlSPT571dZTDjNa-ZXX3pDc,1295 +pip/_vendor/cachecontrol/adapter.py,sha256=eBGAtVNRZgtl_Kj5JV54miqL9YND-D0JZPahwY8kFtY,4863 +pip/_vendor/cachecontrol/cache.py,sha256=1fc4wJP8HYt1ycnJXeEw5pCpeBL2Cqxx6g9Fb0AYDWQ,805 +pip/_vendor/cachecontrol/compat.py,sha256=kHNvMRdt6s_Xwqq_9qJmr9ou3wYMOMUMxPPcwNxT8Mc,695 +pip/_vendor/cachecontrol/controller.py,sha256=U7g-YwizQ2O5NRgK_MZreF1ntM4E49C3PuF3od-Vwz4,13698 +pip/_vendor/cachecontrol/filewrapper.py,sha256=vACKO8Llzu_ZWyjV1Fxn1MA4TGU60N5N3GSrAFdAY2Q,2533 +pip/_vendor/cachecontrol/heuristics.py,sha256=BFGHJ3yQcxvZizfo90LLZ04T_Z5XSCXvFotrp7Us0sc,4070 +pip/_vendor/cachecontrol/serialize.py,sha256=GebE34fgToyWwAsRPguh8hEPN6CqoG-5hRMXRsjVABQ,6954 +pip/_vendor/cachecontrol/wrapper.py,sha256=sfr9YHWx-5TwNz1H5rT6QOo8ggII6v3vbEDjQFwR6wc,671 +pip/_vendor/cachecontrol/caches/__init__.py,sha256=-gHNKYvaeD0kOk5M74eOrsSgIKUtC6i6GfbmugGweEo,86 +pip/_vendor/cachecontrol/caches/file_cache.py,sha256=8vrSzzGcdfEfICago1uSFbkumNJMGLbCdEkXsmUIExw,4177 +pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=HxelMpNCo-dYr2fiJDwM3hhhRmxUYtB5tXm1GpAAT4Y,856 +pip/_vendor/certifi/__init__.py,sha256=KHDlQtQQTRmOG0TJi12ZIE5WWq2tYHM5ax30EX6UJ04,63 +pip/_vendor/certifi/__main__.py,sha256=FiOYt1Fltst7wk9DRa6GCoBr8qBUxlNQu_MKJf04E6s,41 +pip/_vendor/certifi/cacert.pem,sha256=0lwMLbfi4umzDdOmdLMdrNkgZxw-5y6PCE10PrnJy-k,268839 +pip/_vendor/certifi/core.py,sha256=xPQDdG_siy5A7BfqGWa7RJhcA61xXEqPiSrw9GNyhHE,836 +pip/_vendor/chardet/__init__.py,sha256=YsP5wQlsHJ2auF1RZJfypiSrCA7_bQiRm3ES_NI76-Y,1559 +pip/_vendor/chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 +pip/_vendor/chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 +pip/_vendor/chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 +pip/_vendor/chardet/charsetgroupprober.py,sha256=6bDu8YIiRuScX4ca9Igb0U69TA2PGXXDej6Cc4_9kO4,3787 +pip/_vendor/chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 +pip/_vendor/chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 +pip/_vendor/chardet/compat.py,sha256=PKTzHkSbtbHDqS9PyujMbX74q1a8mMpeQTDVsQhZMRw,1134 +pip/_vendor/chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 +pip/_vendor/chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 +pip/_vendor/chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 +pip/_vendor/chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 +pip/_vendor/chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 +pip/_vendor/chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 +pip/_vendor/chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 +pip/_vendor/chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 +pip/_vendor/chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 +pip/_vendor/chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 +pip/_vendor/chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 +pip/_vendor/chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 +pip/_vendor/chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 +pip/_vendor/chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 +pip/_vendor/chardet/langbulgarianmodel.py,sha256=1HqQS9Pbtnj1xQgxitJMvw8X6kKr5OockNCZWfEQrPE,12839 +pip/_vendor/chardet/langcyrillicmodel.py,sha256=LODajvsetH87yYDDQKA2CULXUH87tI223dhfjh9Zx9c,17948 +pip/_vendor/chardet/langgreekmodel.py,sha256=8YAW7bU8YwSJap0kIJSbPMw1BEqzGjWzqcqf0WgUKAA,12688 +pip/_vendor/chardet/langhebrewmodel.py,sha256=JSnqmE5E62tDLTPTvLpQsg5gOMO4PbdWRvV7Avkc0HA,11345 +pip/_vendor/chardet/langhungarianmodel.py,sha256=RhapYSG5l0ZaO-VV4Fan5sW0WRGQqhwBM61yx3yxyOA,12592 +pip/_vendor/chardet/langthaimodel.py,sha256=8l0173Gu_W6G8mxmQOTEF4ls2YdE7FxWf3QkSxEGXJQ,11290 +pip/_vendor/chardet/langturkishmodel.py,sha256=W22eRNJsqI6uWAfwXSKVWWnCerYqrI8dZQTm_M0lRFk,11102 +pip/_vendor/chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 +pip/_vendor/chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 +pip/_vendor/chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 +pip/_vendor/chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 +pip/_vendor/chardet/sbcharsetprober.py,sha256=LDSpCldDCFlYwUkGkwD2oFxLlPWIWXT09akH_2PiY74,5657 +pip/_vendor/chardet/sbcsgroupprober.py,sha256=1IprcCB_k1qfmnxGC6MBbxELlKqD3scW6S8YIwdeyXA,3546 +pip/_vendor/chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 +pip/_vendor/chardet/universaldetector.py,sha256=qL0174lSZE442eB21nnktT9_VcAye07laFWUeUrjttY,12485 +pip/_vendor/chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 +pip/_vendor/chardet/version.py,sha256=sp3B08mrDXB-pf3K9fqJ_zeDHOCLC8RrngQyDFap_7g,242 +pip/_vendor/chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +pip/_vendor/chardet/cli/chardetect.py,sha256=DI8dlV3FBD0c0XA_y3sQ78z754DUv1J8n34RtDjOXNw,2774 +pip/_vendor/colorama/__init__.py,sha256=V3-Hv_vOa-2lE5Q_0mGkdhZo-9e4XrGTW_44cU81qQY,240 +pip/_vendor/colorama/ansi.py,sha256=Fi0un-QLqRm-v7o_nKiOqyC8PapBJK7DLV_q9LKtTO0,2524 +pip/_vendor/colorama/ansitowin32.py,sha256=QrieYX2tsaWIO19P6biMa1zUCt-_abudoEp2_IdqZZU,9668 +pip/_vendor/colorama/initialise.py,sha256=cHqVJtb82OG7HUCxvQ2joG7N_CoxbIKbI_fgryZkj20,1917 +pip/_vendor/colorama/win32.py,sha256=5Hc7L1LabubrYDhdWAfRyzlt14ErP3YKDvf_zYaarLk,5426 +pip/_vendor/colorama/winterm.py,sha256=V7U7ojwG1q4n6PKripjEvW_htYQi5ueXSM3LUUoqqDY,6290 +pip/_vendor/distlib/__init__.py,sha256=GxRrh1augb66Eo9NB9jrdwQS02KcBypj9o_-C3oyKZI,581 +pip/_vendor/distlib/compat.py,sha256=xdNZmqFN5HwF30HjRn5M415pcC2kgXRBXn767xS8v-M,41404 +pip/_vendor/distlib/database.py,sha256=LqTcNkDyV4bWcc_qDxiYJHnXaNxFs1O1bFSAg_reaI0,50868 +pip/_vendor/distlib/index.py,sha256=Dd1kIV06XIdynNpKxHMMRRIKsXuoUsG7QIzntfVtZCI,21073 +pip/_vendor/distlib/locators.py,sha256=e4UaQSzNg5iG3PfQRH6lnVMfLOwhm2sVmGGRdjdB3ik,51657 +pip/_vendor/distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811 +pip/_vendor/distlib/markers.py,sha256=6Ac3cCfFBERexiESWIOXmg-apIP8l2esafNSX3KMy-8,4387 +pip/_vendor/distlib/metadata.py,sha256=Ns92dqeMxopDPQsiEWnhMtd4RagJaA58lz8O_vjCxyk,39986 +pip/_vendor/distlib/resources.py,sha256=2FGv0ZHF14KXjLIlL0R991lyQQGcewOS4mJ-5n-JVnc,10766 +pip/_vendor/distlib/scripts.py,sha256=WEqXkpRvqR6oe-QlMRYg8gEJxXRWJeWn1GPc0ihZ4N0,16585 +pip/_vendor/distlib/t32.exe,sha256=ftub1bsSPUCOnBn-eCtcarKTk0N0CBEP53BumkIxWJE,92672 +pip/_vendor/distlib/t64.exe,sha256=iChOG627LWTHY8-jzSwlo9SYU5a-0JHwQu4AqDz8I68,102400 +pip/_vendor/distlib/util.py,sha256=FnzjaibVcIg1xOtET6QPNeqTnn3LcWLCjNOficMyGKA,59494 +pip/_vendor/distlib/version.py,sha256=_n7F6juvQGAcn769E_SHa7fOcf5ERlEVymJ_EjPRwGw,23391 +pip/_vendor/distlib/w32.exe,sha256=NPYPpt7PIjVqABEu1CzabbDyHHkJpuw-_qZq_48H0j0,89088 +pip/_vendor/distlib/w64.exe,sha256=Yb-qr1OQEzL8KRGTk-XHUZDwMSljfQeZnVoTk-K4e7E,99328 +pip/_vendor/distlib/wheel.py,sha256=W9aKwi4CQL_bQFYb8IcwH-c6WK-yku5P8SY3RGPv-Mk,39506 +pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274 +pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971 +pip/_vendor/distlib/_backport/shutil.py,sha256=VW1t3uYqUjWZH7jV-6QiimLhnldoV5uIpH4EuiT1jfw,25647 +pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617 +pip/_vendor/distlib/_backport/sysconfig.py,sha256=JdJ9ztRy4Hc-b5-VS74x3nUtdEIVr_OBvMsIb8O2sjc,26964 +pip/_vendor/distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628 +pip/_vendor/html5lib/__init__.py,sha256=Ztrn7UvF-wIFAgRBBa0ML-Gu5AffH3BPX_INJx4SaBI,1162 +pip/_vendor/html5lib/_ihatexml.py,sha256=3LBtJMlzgwM8vpQiU1TvGmEEmNH72sV0yD8yS53y07A,16705 +pip/_vendor/html5lib/_inputstream.py,sha256=bPUWcAfJScK4xkjQQaG_HsI2BvEVbFvI0AsodDYPQj0,32552 +pip/_vendor/html5lib/_tokenizer.py,sha256=YAaOEBD6qc5ISq9Xt9Nif1OFgcybTTfMdwqBkZhpAq4,76580 +pip/_vendor/html5lib/_utils.py,sha256=ismpASeqa2jqEPQjHUj8vReAf7yIoKnvLN5fuOw6nv0,4015 +pip/_vendor/html5lib/constants.py,sha256=4lmZWLtEPRLnl8NzftOoYTJdo6jpeMtP6dqQC0g_bWQ,83518 +pip/_vendor/html5lib/html5parser.py,sha256=g5g2ezkusHxhi7b23vK_-d6K6BfIJRbqIQmvQ9z4EgI,118963 +pip/_vendor/html5lib/serializer.py,sha256=yfcfBHse2wDs6ojxn-kieJjLT5s1ipilQJ0gL3-rJis,15758 +pip/_vendor/html5lib/_trie/__init__.py,sha256=8VR1bcgD2OpeS2XExpu5yBhP_Q1K-lwKbBKICBPf1kU,289 +pip/_vendor/html5lib/_trie/_base.py,sha256=uJHVhzif9S0MJXgy9F98iEev5evi_rgUk5BmEbUSp8c,930 +pip/_vendor/html5lib/_trie/datrie.py,sha256=EQpqSfkZRuTbE-DuhW7xMdVDxdZNZ0CfmnYfHA_3zxM,1178 +pip/_vendor/html5lib/_trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775 +pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=lViZc2JMCclXi_5gduvmdzrRxtO5Xo9ONnbHBVCsykU,919 +pip/_vendor/html5lib/filters/base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286 +pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=egDXUEHXmAG9504xz0K6ALDgYkvUrC2q15YUVeNlVQg,2945 +pip/_vendor/html5lib/filters/lint.py,sha256=jk6q56xY0ojiYfvpdP-OZSm9eTqcAdRqhCoPItemPYA,3643 +pip/_vendor/html5lib/filters/optionaltags.py,sha256=8lWT75J0aBOHmPgfmqTHSfPpPMp01T84NKu0CRedxcE,10588 +pip/_vendor/html5lib/filters/sanitizer.py,sha256=4ON02KNjuqda1lCw5_JCUZxb0BzWR5M7ON84dtJ7dm0,26248 +pip/_vendor/html5lib/filters/whitespace.py,sha256=8eWqZxd4UC4zlFGW6iyY6f-2uuT8pOCSALc3IZt7_t4,1214 +pip/_vendor/html5lib/treeadapters/__init__.py,sha256=A0rY5gXIe4bJOiSGRO_j_tFhngRBO8QZPzPtPw5dFzo,679 +pip/_vendor/html5lib/treeadapters/genshi.py,sha256=CH27pAsDKmu4ZGkAUrwty7u0KauGLCZRLPMzaO3M5vo,1715 +pip/_vendor/html5lib/treeadapters/sax.py,sha256=BKS8woQTnKiqeffHsxChUqL4q2ZR_wb5fc9MJ3zQC8s,1776 +pip/_vendor/html5lib/treebuilders/__init__.py,sha256=AysSJyvPfikCMMsTVvaxwkgDieELD5dfR8FJIAuq7hY,3592 +pip/_vendor/html5lib/treebuilders/base.py,sha256=wQGp5yy22TNG8tJ6aREe4UUeTR7A99dEz0BXVaedWb4,14579 +pip/_vendor/html5lib/treebuilders/dom.py,sha256=SY3MsijXyzdNPc8aK5IQsupBoM8J67y56DgNtGvsb9g,8835 +pip/_vendor/html5lib/treebuilders/etree.py,sha256=aqIBOGj_dFYqBURIcTegGNBhAIJOw5iFDHb4jrkYH-8,12764 +pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=9V0dXxbJYYq-Skgb5-_OL2NkVYpjioEb4CHajo0e9yI,14122 +pip/_vendor/html5lib/treewalkers/__init__.py,sha256=yhXxHpjlSqfQyUag3v8-vWjMPriFBU8YRAPNpDgBTn8,5714 +pip/_vendor/html5lib/treewalkers/base.py,sha256=ouiOsuSzvI0KgzdWP8PlxIaSNs9falhbiinAEc_UIJY,7476 +pip/_vendor/html5lib/treewalkers/dom.py,sha256=EHyFR8D8lYNnyDU9lx_IKigVJRyecUGua0mOi7HBukc,1413 +pip/_vendor/html5lib/treewalkers/etree.py,sha256=sz1o6mmE93NQ53qJFDO7HKyDtuwgK-Ay3qSFZPC6u00,4550 +pip/_vendor/html5lib/treewalkers/etree_lxml.py,sha256=sY6wfRshWTllu6n48TPWpKsQRPp-0CQrT0hj_AdzHSU,6309 +pip/_vendor/html5lib/treewalkers/genshi.py,sha256=4D2PECZ5n3ZN3qu3jMl9yY7B81jnQApBQSVlfaIuYbA,2309 +pip/_vendor/idna/__init__.py,sha256=9Nt7xpyet3DmOrPUGooDdAwmHZZu1qUAy2EaJ93kGiQ,58 +pip/_vendor/idna/codec.py,sha256=lvYb7yu7PhAqFaAIAdWcwgaWI2UmgseUua-1c0AsG0A,3299 +pip/_vendor/idna/compat.py,sha256=R-h29D-6mrnJzbXxymrWUW7iZUvy-26TQwZ0ij57i4U,232 +pip/_vendor/idna/core.py,sha256=OwI5R_uuXU4PlOSoG8cjaMPA1hhdGGjjZ8I2MZhSPxo,11858 +pip/_vendor/idna/idnadata.py,sha256=zwxvoSsYqPHNa6xzXWHizXpDC28JJMGXRinhJ4Gkcz0,39285 +pip/_vendor/idna/intranges.py,sha256=TY1lpxZIQWEP6tNqjZkFA5hgoMWOj1OBmnUG8ihT87E,1749 +pip/_vendor/idna/package_data.py,sha256=Vt9rtr32BzO7O25rypo8nzAs3syTJhG1ojU3J-s2RFo,21 +pip/_vendor/idna/uts46data.py,sha256=czULzYN5Lr9K5MmOH-1g3CJY7QPjGeHjYmC3saJ_BHk,197803 +pip/_vendor/lockfile/__init__.py,sha256=Tqpz90DwKYfhPsfzVOJl84TL87pdFE5ePNHdXAxs4Tk,9371 +pip/_vendor/lockfile/linklockfile.py,sha256=C7OH3H4GdK68u4FQgp8fkP2kO4fyUTSyj3X6blgfobc,2652 +pip/_vendor/lockfile/mkdirlockfile.py,sha256=e3qgIL-etZMLsS-3ft19iW_8IQ360HNkGOqE3yBKsUw,3096 +pip/_vendor/lockfile/pidlockfile.py,sha256=ukH9uk6NFuxyVmG5QiWw4iKq3fT7MjqUguX95avYPIY,6090 +pip/_vendor/lockfile/sqlitelockfile.py,sha256=o2TMkMRY0iwn-iL1XMRRIFStMUkS4i3ajceeYNntKFg,5506 +pip/_vendor/lockfile/symlinklockfile.py,sha256=ABwXXmvTHvCl5viPblShL3PG-gGsLiT1roAMfDRwhi8,2616 +pip/_vendor/msgpack/__init__.py,sha256=y0bk2YbzK6J2e0J_dyreN6nD7yM2IezT6m_tU2h-Mdg,1677 +pip/_vendor/msgpack/_version.py,sha256=dN7wVIjbyuQIJ35B2o6gymQNDLPlj_7-uTfgCv7KErM,20 +pip/_vendor/msgpack/exceptions.py,sha256=lPkAi_u12NlFajDz4FELSHEdfU8hrR3zeTvKX8aQuz4,1056 +pip/_vendor/msgpack/fallback.py,sha256=h0ll8xnq12mI9PuQ9Qd_Ihtt08Sp8L0JqhG9KY8Vyjk,36411 +pip/_vendor/packaging/__about__.py,sha256=keVv6YdCEOUPM4CfpYUjc_mYRH0kiV1Yd_t2kcw4Fck,720 +pip/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513 +pip/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860 +pip/_vendor/packaging/_structures.py,sha256=DCpKtb7u94_oqgVsIJQTrTyZcb3Gz7sSGbk9vYDMME0,1418 +pip/_vendor/packaging/markers.py,sha256=ftZegBU5oEmulEKApDGEPgti2lYIchFQHAfH9tZy3_U,8221 +pip/_vendor/packaging/requirements.py,sha256=GIeW8Qx6DmjvbsumBHPUtn4JrWtHYUyLtNKLAsVmUF8,4441 +pip/_vendor/packaging/specifiers.py,sha256=cDlvKb4hRNwZ72VKE7KP9QbNOmqDPIvRrUFOwhz1e3Q,28026 +pip/_vendor/packaging/utils.py,sha256=c9obOpok2CpKDApkc2M5ma0YFnT-jtt4I6XI4F0jYiI,1580 +pip/_vendor/packaging/version.py,sha256=MKL8nbKLPLGPouIwFvwSVnYRzNpkMo5AIcsa6LGqDF8,12219 +pip/_vendor/pkg_resources/__init__.py,sha256=eyzgYXSzEXhTkYxCorE8r8yu4F5UhIaD8t73eyUiX_U,103750 +pip/_vendor/pkg_resources/py31compat.py,sha256=-ysVqoxLetAnL94uM0kHkomKQTC1JZLN2ZUjqUhMeKE,600 +pip/_vendor/progress/__init__.py,sha256=Hv3Y8Hr6RyM34NdZkrZQWMURjS2h5sONRHJSvZXWZgQ,3188 +pip/_vendor/progress/bar.py,sha256=hlkDAEv9pRRiWqR5XL6vIAgMG4u_dBGEW_8klQhBRq0,2942 +pip/_vendor/progress/counter.py,sha256=XtBuZY4yYmr50E2A_fAzjWhm0IkwaVwxNsNVYDE7nsw,1528 +pip/_vendor/progress/helpers.py,sha256=6FsBLh_xUlKiVua-zZIutCjxth-IO8FtyUj6I2tx9fg,2952 +pip/_vendor/progress/spinner.py,sha256=m7bASI2GUbLFG-PbAefdHtrrWWlJLFhhSBbw70gp2TY,1439 +pip/_vendor/pytoml/__init__.py,sha256=q12Xv23Tta44gtK4HGK68Gr4tKfciILidFPmPuoIqIo,92 +pip/_vendor/pytoml/core.py,sha256=9CrLLTs1PdWjEwRnYzt_i4dhHcZvGxs_GsMlYAX3iY4,509 +pip/_vendor/pytoml/parser.py,sha256=DSFFTgP0RhBE989f3cTlTvJP6pRCROYMHYBXYg-TZNk,11239 +pip/_vendor/pytoml/writer.py,sha256=-mSOVGaiGLrpj5BRR7czmquZXJGflcElHrwAd33J48A,3815 +pip/_vendor/requests/__init__.py,sha256=o2ykqAY7qT5lS007T6tDksb5uBB12oov39xXZ-E88vQ,4203 +pip/_vendor/requests/__version__.py,sha256=rJ2xgNOLhjspGkNPfgXTBctqqvsf2uJMFTaE0rlVtbI,436 +pip/_vendor/requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096 +pip/_vendor/requests/adapters.py,sha256=y5DISepvSsGlu3II_VUsdgKBej1dGY4b5beRrTE2tsI,21428 +pip/_vendor/requests/api.py,sha256=zub9ENcEUT2m9gwgBgqH5RIRDfrx2kwRpZ7L6hX3mcw,6261 +pip/_vendor/requests/auth.py,sha256=oRSQkBYcLkTEssudkzoR1UW1Glb1ts3p1RWusgHf1YU,10208 +pip/_vendor/requests/certs.py,sha256=nXRVq9DtGmv_1AYbwjTu9UrgAcdJv05ZvkNeaoLOZxY,465 +pip/_vendor/requests/compat.py,sha256=7EC6fZY4dJDxuBQnqUGwe13OTZ3VLGO3QfOApE5lE3I,1998 +pip/_vendor/requests/cookies.py,sha256=olUaLeNci_z1K-Bn5PeEKllSspmQqN9-s8Ug7CasaPE,18346 +pip/_vendor/requests/exceptions.py,sha256=-mLam3TAx80V09EaH3H-ZxR61eAVuLRZ8zgBBSLjK44,3197 +pip/_vendor/requests/help.py,sha256=T4K-Oo_FS9fxF8NHVR8hxMwFo71gIkRM7UddCc9vH7Y,3669 +pip/_vendor/requests/hooks.py,sha256=HXAHoC1FNTFRZX6-lNdvPM7Tst4kvGwYTN-AOKRxoRU,767 +pip/_vendor/requests/models.py,sha256=3fmmYdDW7U18SrVeZaseHuk8KPI-7vLp_435DY3ejes,34160 +pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695 +pip/_vendor/requests/sessions.py,sha256=71MK2HCadovka1vAx9dyDFWAuw69KgRPRBpd0HWSEAo,27829 +pip/_vendor/requests/status_codes.py,sha256=pgw-xlnxO5zHQWn3fKps2cxwQehKzPxEbdhIrMQe6Ec,4128 +pip/_vendor/requests/structures.py,sha256=zoP8qly2Jak5e89HwpqjN1z2diztI-_gaqts1raJJBc,2981 +pip/_vendor/requests/utils.py,sha256=3OxbbLUQFVdm84fdBD9nduXvhw6hIzj59mhvBomKuJI,30156 +pip/_vendor/urllib3/__init__.py,sha256=DZucS8tlzGYKmK5FIsyUViMghpCq_0_0Ouvm_Jp9GNc,2853 +pip/_vendor/urllib3/_collections.py,sha256=iNeAU_we9L3lMGRUKKdq24Mf7o050TXP5U4Jm9AzAY4,10841 +pip/_vendor/urllib3/connection.py,sha256=My76qeWMDkV-KP1l3iChXHOE7J-ZCaUdP3sPIiLA2uE,14485 +pip/_vendor/urllib3/connectionpool.py,sha256=w20OwKdIqk6f8FIl6QGgn6jf9gZ0-tmgH7VPxnpEgkQ,35464 +pip/_vendor/urllib3/exceptions.py,sha256=rFeIfBNKC8KJ61ux-MtJyJlEC9G9ggkmCeF751JwVR4,6604 +pip/_vendor/urllib3/fields.py,sha256=D_TE_SK15YatdbhWDMN0OE3X6UCJn1RTkANINCYOobE,5943 +pip/_vendor/urllib3/filepost.py,sha256=40CROlpRKVBpFUkD0R6wJf_PpvbcRQRFUu0OOQlFkKM,2436 +pip/_vendor/urllib3/poolmanager.py,sha256=FHBjb7odbP2LyQRzeitgpuh1AQAPyegzmrm2b3gSZlY,16821 +pip/_vendor/urllib3/request.py,sha256=fwjlq5nQfcUa7aoncR25z6-fJAX_oNTcPksKKGjBm38,5996 +pip/_vendor/urllib3/response.py,sha256=uAuOTZSuTodzvQWIDCZghDoKmZ2bKbgIRCfaVIIZfn8,24667 +pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/contrib/appengine.py,sha256=Q3BDy5C_TrI-3cSyo0ELNGlNiK2eSVptQAQMdz4PH9Q,11197 +pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=Q9-rO5Rh2-IqyEd4ZicpTDfMnOlf0IPPCkjhChBCjV4,4478 +pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=cM7fVZJRrdLZsprcdWe3meM_hvq8LR73UNDveIMa-20,15480 +pip/_vendor/urllib3/contrib/securetransport.py,sha256=BqXSlChN9_hjCWgyN6JdcgvBUdc37QCCX4u3_8zE_9o,30309 +pip/_vendor/urllib3/contrib/socks.py,sha256=Iom0snbHkCuZbZ7Sle2Kueha1W0jYAJ0SyCOtePLaio,6391 +pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=x2kLSh-ASZKsun0FxtraBuLVe3oHuth4YW6yZ5Vof-w,17560 +pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=Umy5u-3Z957GirdapnicXVOpHaM4xdOZABJuJxfaeJA,12162 +pip/_vendor/urllib3/packages/__init__.py,sha256=nlChrGzkjCkmhCX9HrF_qHPUgosfsPQkVIJxiiLhk9g,109 +pip/_vendor/urllib3/packages/ordered_dict.py,sha256=VQaPONfhVMsb8B63Xg7ZOydJqIE_jzeMhVN3Pec6ogw,8935 +pip/_vendor/urllib3/packages/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 +pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/packages/backports/makefile.py,sha256=r1IADol_pBBq2Y1ub4CPyuS2hXuShK47nfFngZRcRhI,1461 +pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py,sha256=WBVbxQBojNAxfZwNavkox3BgJiMA9BJmm-_fwd0jD_o,688 +pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=XCW0ydHg171GfOqNbvUAnRzQ0lc0twp5-dIlolgf4RM,5719 +pip/_vendor/urllib3/util/__init__.py,sha256=6Ran4oAVIy40Cu_oEPWnNV9bwF5rXx6G1DUZ7oehjPY,1044 +pip/_vendor/urllib3/util/connection.py,sha256=8K1VXm8BHsM3QATJJGBNRa_MStkzDy1Da2IaPAaCU8c,4279 +pip/_vendor/urllib3/util/queue.py,sha256=myTX3JDHntglKQNBf3b6dasHH-uF-W59vzGSQiFdAfI,497 +pip/_vendor/urllib3/util/request.py,sha256=H5_lrHvtwl2U2BbT1UYN9HpruNc1gsNFlz2njQmhPrQ,3705 +pip/_vendor/urllib3/util/response.py,sha256=SSNL888W-MQ8t3HAi44kNGgF682p6H__ytEXzBYxV_M,2343 +pip/_vendor/urllib3/util/retry.py,sha256=tlxiEq8OU2BSenPpPjYYO1URne8A-qTEgaykam6rZPg,15104 +pip/_vendor/urllib3/util/ssl_.py,sha256=iHJopgSv8_vXfmGg3lOsTS3ldMD9zhe130huHZxQEGU,14022 +pip/_vendor/urllib3/util/timeout.py,sha256=7lHNrgL5YH2cI1j-yZnzV_J8jBlRVdmFhQaNyM1_2b8,9757 +pip/_vendor/urllib3/util/url.py,sha256=qCY_HHUXvo05wAsEERALgExtlgxLnAHSQ7ce1b-g3SM,6487 +pip/_vendor/urllib3/util/wait.py,sha256=_4vvsT1BTTpqxQYK-2kXVfGsUsVRiuc4R4F-0Bf5BPc,5468 +pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579 +pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979 +pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305 +pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563 +pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307 +pip-18.0.dist-info/LICENSE.txt,sha256=ORqHhOMZ2uVDFHfUzJvFBPxdcf2eieHIDxzThV9dfPo,1090 +pip-18.0.dist-info/METADATA,sha256=2D-DQBbJs1zVe4qxVESZ4cxB8gEWzsrbMQDAamJSZeQ,2538 +pip-18.0.dist-info/RECORD,, +pip-18.0.dist-info/WHEEL,sha256=gduuPyBvFJQSQ0zdyxF7k0zynDXbIbvg5ZBHoXum5uk,110 +pip-18.0.dist-info/entry_points.txt,sha256=S_zfxY25QtQDVY1BiLAmOKSkkI5llzCKPLiYOSEupsY,98 +pip-18.0.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +../../../bin/pip,sha256=S4E7T_TFbJZ3e-6ZQlDUOLANleSl8krTQBqXbLaWtAQ,281 +../../../bin/pip3,sha256=S4E7T_TFbJZ3e-6ZQlDUOLANleSl8krTQBqXbLaWtAQ,281 +../../../bin/pip3.6,sha256=S4E7T_TFbJZ3e-6ZQlDUOLANleSl8krTQBqXbLaWtAQ,281 +pip-18.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip/_internal/utils/__pycache__/logging.cpython-36.pyc,, +pip/_internal/utils/__pycache__/appdirs.cpython-36.pyc,, +pip/_internal/utils/__pycache__/deprecation.cpython-36.pyc,, +pip/_internal/utils/__pycache__/filesystem.cpython-36.pyc,, +pip/_internal/utils/__pycache__/glibc.cpython-36.pyc,, +pip/_internal/utils/__pycache__/outdated.cpython-36.pyc,, +pip/_internal/utils/__pycache__/encoding.cpython-36.pyc,, +pip/_internal/utils/__pycache__/hashes.cpython-36.pyc,, +pip/_internal/utils/__pycache__/typing.cpython-36.pyc,, +pip/_internal/utils/__pycache__/packaging.cpython-36.pyc,, +pip/_internal/utils/__pycache__/temp_dir.cpython-36.pyc,, +pip/_internal/utils/__pycache__/ui.cpython-36.pyc,, +pip/_internal/utils/__pycache__/setuptools_build.cpython-36.pyc,, +pip/_internal/utils/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/utils/__pycache__/misc.cpython-36.pyc,, +pip/_internal/models/__pycache__/index.cpython-36.pyc,, +pip/_internal/models/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/__pycache__/build_env.cpython-36.pyc,, +pip/_internal/__pycache__/exceptions.cpython-36.pyc,, +pip/_internal/__pycache__/wheel.cpython-36.pyc,, +pip/_internal/__pycache__/status_codes.cpython-36.pyc,, +pip/_internal/__pycache__/index.cpython-36.pyc,, +pip/_internal/__pycache__/download.cpython-36.pyc,, +pip/_internal/__pycache__/resolve.cpython-36.pyc,, +pip/_internal/__pycache__/basecommand.cpython-36.pyc,, +pip/_internal/__pycache__/locations.cpython-36.pyc,, +pip/_internal/__pycache__/compat.cpython-36.pyc,, +pip/_internal/__pycache__/pep425tags.cpython-36.pyc,, +pip/_internal/__pycache__/configuration.cpython-36.pyc,, +pip/_internal/__pycache__/cache.cpython-36.pyc,, +pip/_internal/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/__pycache__/baseparser.cpython-36.pyc,, +pip/_internal/__pycache__/cmdoptions.cpython-36.pyc,, +pip/_internal/operations/__pycache__/prepare.cpython-36.pyc,, +pip/_internal/operations/__pycache__/check.cpython-36.pyc,, +pip/_internal/operations/__pycache__/freeze.cpython-36.pyc,, +pip/_internal/operations/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/req/__pycache__/req_tracker.cpython-36.pyc,, +pip/_internal/req/__pycache__/req_set.cpython-36.pyc,, +pip/_internal/req/__pycache__/req_uninstall.cpython-36.pyc,, +pip/_internal/req/__pycache__/req_install.cpython-36.pyc,, +pip/_internal/req/__pycache__/req_file.cpython-36.pyc,, +pip/_internal/req/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/vcs/__pycache__/subversion.cpython-36.pyc,, +pip/_internal/vcs/__pycache__/git.cpython-36.pyc,, +pip/_internal/vcs/__pycache__/__init__.cpython-36.pyc,, +pip/_internal/vcs/__pycache__/mercurial.cpython-36.pyc,, +pip/_internal/vcs/__pycache__/bazaar.cpython-36.pyc,, +pip/_internal/commands/__pycache__/show.cpython-36.pyc,, +pip/_internal/commands/__pycache__/help.cpython-36.pyc,, +pip/_internal/commands/__pycache__/wheel.cpython-36.pyc,, +pip/_internal/commands/__pycache__/completion.cpython-36.pyc,, +pip/_internal/commands/__pycache__/check.cpython-36.pyc,, +pip/_internal/commands/__pycache__/download.cpython-36.pyc,, +pip/_internal/commands/__pycache__/hash.cpython-36.pyc,, +pip/_internal/commands/__pycache__/list.cpython-36.pyc,, +pip/_internal/commands/__pycache__/configuration.cpython-36.pyc,, +pip/_internal/commands/__pycache__/install.cpython-36.pyc,, +pip/_internal/commands/__pycache__/search.cpython-36.pyc,, +pip/_internal/commands/__pycache__/freeze.cpython-36.pyc,, +pip/_internal/commands/__pycache__/uninstall.cpython-36.pyc,, +pip/_internal/commands/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/_structures.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/version.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/requirements.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/markers.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/_compat.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/__about__.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/utils.cpython-36.pyc,, +pip/_vendor/packaging/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/pytoml/__pycache__/parser.cpython-36.pyc,, +pip/_vendor/pytoml/__pycache__/writer.cpython-36.pyc,, +pip/_vendor/pytoml/__pycache__/core.cpython-36.pyc,, +pip/_vendor/pytoml/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/msgpack/__pycache__/exceptions.cpython-36.pyc,, +pip/_vendor/msgpack/__pycache__/_version.cpython-36.pyc,, +pip/_vendor/msgpack/__pycache__/fallback.cpython-36.pyc,, +pip/_vendor/msgpack/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/sjisprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/eucjpprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/cp949prober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/euctwfreq.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/gb2312prober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/version.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/euckrprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/euckrfreq.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/chardistribution.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/escsm.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/euctwprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/big5freq.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/compat.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/hebrewprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/latin1prober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/universaldetector.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/escprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/utf8prober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/gb2312freq.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/enums.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/langthaimodel.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/jpcntx.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/mbcssm.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/big5prober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/langcyrillicmodel.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/jisfreq.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-36.pyc,, +pip/_vendor/chardet/__pycache__/charsetprober.cpython-36.pyc,, +pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-36.pyc,, +pip/_vendor/chardet/cli/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/webencodings/__pycache__/labels.cpython-36.pyc,, +pip/_vendor/webencodings/__pycache__/tests.cpython-36.pyc,, +pip/_vendor/webencodings/__pycache__/mklabels.cpython-36.pyc,, +pip/_vendor/webencodings/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-36.pyc,, +pip/_vendor/progress/__pycache__/bar.cpython-36.pyc,, +pip/_vendor/progress/__pycache__/counter.cpython-36.pyc,, +pip/_vendor/progress/__pycache__/spinner.cpython-36.pyc,, +pip/_vendor/progress/__pycache__/helpers.cpython-36.pyc,, +pip/_vendor/progress/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-36.pyc,, +pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-36.pyc,, +pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/distlib/_backport/__pycache__/misc.cpython-36.pyc,, +pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/manifest.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/util.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/database.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/scripts.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/wheel.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/locators.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/version.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/index.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/markers.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/resources.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/compat.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/metadata.cpython-36.pyc,, +pip/_vendor/distlib/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/colorama/__pycache__/win32.cpython-36.pyc,, +pip/_vendor/colorama/__pycache__/initialise.cpython-36.pyc,, +pip/_vendor/colorama/__pycache__/ansi.cpython-36.pyc,, +pip/_vendor/colorama/__pycache__/winterm.cpython-36.pyc,, +pip/_vendor/colorama/__pycache__/ansitowin32.cpython-36.pyc,, +pip/_vendor/colorama/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/lockfile/__pycache__/mkdirlockfile.cpython-36.pyc,, +pip/_vendor/lockfile/__pycache__/pidlockfile.cpython-36.pyc,, +pip/_vendor/lockfile/__pycache__/linklockfile.cpython-36.pyc,, +pip/_vendor/lockfile/__pycache__/symlinklockfile.cpython-36.pyc,, +pip/_vendor/lockfile/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/lockfile/__pycache__/sqlitelockfile.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/controller.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/serialize.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/compat.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/cache.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/adapter.cpython-36.pyc,, +pip/_vendor/cachecontrol/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-36.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-36.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/idna/__pycache__/codec.cpython-36.pyc,, +pip/_vendor/idna/__pycache__/compat.cpython-36.pyc,, +pip/_vendor/idna/__pycache__/idnadata.cpython-36.pyc,, +pip/_vendor/idna/__pycache__/package_data.cpython-36.pyc,, +pip/_vendor/idna/__pycache__/intranges.cpython-36.pyc,, +pip/_vendor/idna/__pycache__/core.cpython-36.pyc,, +pip/_vendor/idna/__pycache__/uts46data.cpython-36.pyc,, +pip/_vendor/idna/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/__pycache__/appdirs.cpython-36.pyc,, +pip/_vendor/__pycache__/ipaddress.cpython-36.pyc,, +pip/_vendor/__pycache__/six.cpython-36.pyc,, +pip/_vendor/__pycache__/retrying.cpython-36.pyc,, +pip/_vendor/__pycache__/distro.cpython-36.pyc,, +pip/_vendor/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/__pycache__/pyparsing.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/help.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/_internal_utils.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/__version__.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/exceptions.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/certs.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/structures.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/sessions.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/status_codes.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/models.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/compat.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/adapters.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/packages.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/hooks.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/auth.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/utils.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/cookies.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/requests/__pycache__/api.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/lint.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/base.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-36.pyc,, +pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-36.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-36.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-36.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-36.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-36.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/constants.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/serializer.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/html5parser.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/_utils.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/_inputstream.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-36.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/datrie.cpython-36.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-36.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/py.cpython-36.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-36.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-36.pyc,, +pip/_vendor/certifi/__pycache__/__main__.cpython-36.pyc,, +pip/_vendor/certifi/__pycache__/core.cpython-36.pyc,, +pip/_vendor/certifi/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/timeout.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/response.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/url.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/request.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/connection.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/wait.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/retry.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/urllib3/util/__pycache__/queue.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/poolmanager.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/response.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/exceptions.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/request.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/filepost.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/connectionpool.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/connection.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/_collections.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/fields.cpython-36.pyc,, +pip/_vendor/urllib3/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-36.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/urllib3/packages/__pycache__/six.cpython-36.pyc,, +pip/_vendor/urllib3/packages/__pycache__/ordered_dict.cpython-36.pyc,, +pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-36.pyc,, +pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-36.pyc,, +pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-36.pyc,, +pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-36.pyc,, +pip/_vendor/pkg_resources/__pycache__/__init__.cpython-36.pyc,, +pip/__pycache__/__main__.cpython-36.pyc,, +pip/__pycache__/__init__.cpython-36.pyc,, diff --git a/env/lib/python3.6/site-packages/pip-18.0.dist-info/WHEEL b/env/lib/python3.6/site-packages/pip-18.0.dist-info/WHEEL new file mode 100644 index 0000000..1316c41 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip-18.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/env/lib/python3.6/site-packages/pip-18.0.dist-info/entry_points.txt b/env/lib/python3.6/site-packages/pip-18.0.dist-info/entry_points.txt new file mode 100644 index 0000000..f5809cb --- /dev/null +++ b/env/lib/python3.6/site-packages/pip-18.0.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +pip = pip._internal:main +pip3 = pip._internal:main +pip3.7 = pip._internal:main + diff --git a/env/lib/python3.6/site-packages/pip-18.0.dist-info/top_level.txt b/env/lib/python3.6/site-packages/pip-18.0.dist-info/top_level.txt new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/lib/python3.6/site-packages/pip-18.0.dist-info/top_level.txt @@ -0,0 +1 @@ +pip diff --git a/env/lib/python3.6/site-packages/pip/_internal/basecommand.py b/env/lib/python3.6/site-packages/pip/_internal/basecommand.py new file mode 100644 index 0000000..f0fe37e --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/basecommand.py @@ -0,0 +1,274 @@ +"""Base Command class, and related routines""" +from __future__ import absolute_import + +import logging +import logging.config +import optparse +import os +import sys + +from pip._internal import cmdoptions +from pip._internal.baseparser import ( + ConfigOptionParser, UpdatingDefaultsHelpFormatter, +) +from pip._internal.download import PipSession +from pip._internal.exceptions import ( + BadCommand, CommandError, InstallationError, PreviousBuildDirError, + UninstallationError, +) +from pip._internal.index import PackageFinder +from pip._internal.locations import running_under_virtualenv +from pip._internal.req.req_file import parse_requirements +from pip._internal.req.req_install import InstallRequirement +from pip._internal.status_codes import ( + ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR, + VIRTUALENV_NOT_FOUND, +) +from pip._internal.utils.logging import setup_logging +from pip._internal.utils.misc import get_prog, normalize_path +from pip._internal.utils.outdated import pip_version_check +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional # noqa: F401 + +__all__ = ['Command'] + +logger = logging.getLogger(__name__) + + +class Command(object): + name = None # type: Optional[str] + usage = None # type: Optional[str] + hidden = False # type: bool + ignore_require_venv = False # type: bool + + def __init__(self, isolated=False): + parser_kw = { + 'usage': self.usage, + 'prog': '%s %s' % (get_prog(), self.name), + 'formatter': UpdatingDefaultsHelpFormatter(), + 'add_help_option': False, + 'name': self.name, + 'description': self.__doc__, + 'isolated': isolated, + } + + self.parser = ConfigOptionParser(**parser_kw) + + # Commands should add options to this option group + optgroup_name = '%s Options' % self.name.capitalize() + self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) + + # Add the general options + gen_opts = cmdoptions.make_option_group( + cmdoptions.general_group, + self.parser, + ) + self.parser.add_option_group(gen_opts) + + def _build_session(self, options, retries=None, timeout=None): + session = PipSession( + cache=( + normalize_path(os.path.join(options.cache_dir, "http")) + if options.cache_dir else None + ), + retries=retries if retries is not None else options.retries, + insecure_hosts=options.trusted_hosts, + ) + + # Handle custom ca-bundles from the user + if options.cert: + session.verify = options.cert + + # Handle SSL client certificate + if options.client_cert: + session.cert = options.client_cert + + # Handle timeouts + if options.timeout or timeout: + session.timeout = ( + timeout if timeout is not None else options.timeout + ) + + # Handle configured proxies + if options.proxy: + session.proxies = { + "http": options.proxy, + "https": options.proxy, + } + + # Determine if we can prompt the user for authentication or not + session.auth.prompting = not options.no_input + + return session + + def parse_args(self, args): + # factored out for testability + return self.parser.parse_args(args) + + def main(self, args): + options, args = self.parse_args(args) + + # Set verbosity so that it can be used elsewhere. + self.verbosity = options.verbose - options.quiet + + setup_logging( + verbosity=self.verbosity, + no_color=options.no_color, + user_log_file=options.log, + ) + + # TODO: Try to get these passing down from the command? + # without resorting to os.environ to hold these. + # This also affects isolated builds and it should. + + if options.no_input: + os.environ['PIP_NO_INPUT'] = '1' + + if options.exists_action: + os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action) + + if options.require_venv and not self.ignore_require_venv: + # If a venv is required check if it can really be found + if not running_under_virtualenv(): + logger.critical( + 'Could not find an activated virtualenv (required).' + ) + sys.exit(VIRTUALENV_NOT_FOUND) + + try: + status = self.run(options, args) + # FIXME: all commands should return an exit status + # and when it is done, isinstance is not needed anymore + if isinstance(status, int): + return status + except PreviousBuildDirError as exc: + logger.critical(str(exc)) + logger.debug('Exception information:', exc_info=True) + + return PREVIOUS_BUILD_DIR_ERROR + except (InstallationError, UninstallationError, BadCommand) as exc: + logger.critical(str(exc)) + logger.debug('Exception information:', exc_info=True) + + return ERROR + except CommandError as exc: + logger.critical('ERROR: %s', exc) + logger.debug('Exception information:', exc_info=True) + + return ERROR + except KeyboardInterrupt: + logger.critical('Operation cancelled by user') + logger.debug('Exception information:', exc_info=True) + + return ERROR + except BaseException: + logger.critical('Exception:', exc_info=True) + + return UNKNOWN_ERROR + finally: + # Check if we're using the latest version of pip available + skip_version_check = ( + options.disable_pip_version_check or + getattr(options, "no_index", False) + ) + if not skip_version_check: + session = self._build_session( + options, + retries=0, + timeout=min(5, options.timeout) + ) + with session: + pip_version_check(session, options) + + # Shutdown the logging module + logging.shutdown() + + return SUCCESS + + +class RequirementCommand(Command): + + @staticmethod + def populate_requirement_set(requirement_set, args, options, finder, + session, name, wheel_cache): + """ + Marshal cmd line args into a requirement set. + """ + # NOTE: As a side-effect, options.require_hashes and + # requirement_set.require_hashes may be updated + + for filename in options.constraints: + for req_to_add in parse_requirements( + filename, + constraint=True, finder=finder, options=options, + session=session, wheel_cache=wheel_cache): + req_to_add.is_direct = True + requirement_set.add_requirement(req_to_add) + + for req in args: + req_to_add = InstallRequirement.from_line( + req, None, isolated=options.isolated_mode, + wheel_cache=wheel_cache + ) + req_to_add.is_direct = True + requirement_set.add_requirement(req_to_add) + + for req in options.editables: + req_to_add = InstallRequirement.from_editable( + req, + isolated=options.isolated_mode, + wheel_cache=wheel_cache + ) + req_to_add.is_direct = True + requirement_set.add_requirement(req_to_add) + + for filename in options.requirements: + for req_to_add in parse_requirements( + filename, + finder=finder, options=options, session=session, + wheel_cache=wheel_cache): + req_to_add.is_direct = True + requirement_set.add_requirement(req_to_add) + # If --require-hashes was a line in a requirements file, tell + # RequirementSet about it: + requirement_set.require_hashes = options.require_hashes + + if not (args or options.editables or options.requirements): + opts = {'name': name} + if options.find_links: + raise CommandError( + 'You must give at least one requirement to %(name)s ' + '(maybe you meant "pip %(name)s %(links)s"?)' % + dict(opts, links=' '.join(options.find_links))) + else: + raise CommandError( + 'You must give at least one requirement to %(name)s ' + '(see "pip help %(name)s")' % opts) + + def _build_package_finder(self, options, session, + platform=None, python_versions=None, + abi=None, implementation=None): + """ + Create a package finder appropriate to this requirement command. + """ + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index: + logger.debug('Ignoring indexes: %s', ','.join(index_urls)) + index_urls = [] + + return PackageFinder( + find_links=options.find_links, + format_control=options.format_control, + index_urls=index_urls, + trusted_hosts=options.trusted_hosts, + allow_all_prereleases=options.pre, + process_dependency_links=options.process_dependency_links, + session=session, + platform=platform, + versions=python_versions, + abi=abi, + implementation=implementation, + prefer_binary=options.prefer_binary, + ) diff --git a/env/lib/python3.6/site-packages/pip/_internal/baseparser.py b/env/lib/python3.6/site-packages/pip/_internal/baseparser.py new file mode 100644 index 0000000..9a8d129 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/baseparser.py @@ -0,0 +1,240 @@ +"""Base option parser setup""" +from __future__ import absolute_import + +import logging +import optparse +import sys +import textwrap +from distutils.util import strtobool + +from pip._vendor.six import string_types + +from pip._internal.compat import get_terminal_size +from pip._internal.configuration import Configuration, ConfigurationError + +logger = logging.getLogger(__name__) + + +class PrettyHelpFormatter(optparse.IndentedHelpFormatter): + """A prettier/less verbose help formatter for optparse.""" + + def __init__(self, *args, **kwargs): + # help position must be aligned with __init__.parseopts.description + kwargs['max_help_position'] = 30 + kwargs['indent_increment'] = 1 + kwargs['width'] = get_terminal_size()[0] - 2 + optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs) + + def format_option_strings(self, option): + return self._format_option_strings(option, ' <%s>', ', ') + + def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '): + """ + Return a comma-separated list of option strings and metavars. + + :param option: tuple of (short opt, long opt), e.g: ('-f', '--format') + :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar + :param optsep: separator + """ + opts = [] + + if option._short_opts: + opts.append(option._short_opts[0]) + if option._long_opts: + opts.append(option._long_opts[0]) + if len(opts) > 1: + opts.insert(1, optsep) + + if option.takes_value(): + metavar = option.metavar or option.dest.lower() + opts.append(mvarfmt % metavar.lower()) + + return ''.join(opts) + + def format_heading(self, heading): + if heading == 'Options': + return '' + return heading + ':\n' + + def format_usage(self, usage): + """ + Ensure there is only one newline between usage and the first heading + if there is no description. + """ + msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ") + return msg + + def format_description(self, description): + # leave full control over description to us + if description: + if hasattr(self.parser, 'main'): + label = 'Commands' + else: + label = 'Description' + # some doc strings have initial newlines, some don't + description = description.lstrip('\n') + # some doc strings have final newlines and spaces, some don't + description = description.rstrip() + # dedent, then reindent + description = self.indent_lines(textwrap.dedent(description), " ") + description = '%s:\n%s\n' % (label, description) + return description + else: + return '' + + def format_epilog(self, epilog): + # leave full control over epilog to us + if epilog: + return epilog + else: + return '' + + def indent_lines(self, text, indent): + new_lines = [indent + line for line in text.split('\n')] + return "\n".join(new_lines) + + +class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter): + """Custom help formatter for use in ConfigOptionParser. + + This is updates the defaults before expanding them, allowing + them to show up correctly in the help listing. + """ + + def expand_default(self, option): + if self.parser is not None: + self.parser._update_defaults(self.parser.defaults) + return optparse.IndentedHelpFormatter.expand_default(self, option) + + +class CustomOptionParser(optparse.OptionParser): + + def insert_option_group(self, idx, *args, **kwargs): + """Insert an OptionGroup at a given position.""" + group = self.add_option_group(*args, **kwargs) + + self.option_groups.pop() + self.option_groups.insert(idx, group) + + return group + + @property + def option_list_all(self): + """Get a list of all options, including those in option groups.""" + res = self.option_list[:] + for i in self.option_groups: + res.extend(i.option_list) + + return res + + +class ConfigOptionParser(CustomOptionParser): + """Custom option parser which updates its defaults by checking the + configuration files and environmental variables""" + + def __init__(self, *args, **kwargs): + self.name = kwargs.pop('name') + + isolated = kwargs.pop("isolated", False) + self.config = Configuration(isolated) + + assert self.name + optparse.OptionParser.__init__(self, *args, **kwargs) + + def check_default(self, option, key, val): + try: + return option.check_value(key, val) + except optparse.OptionValueError as exc: + print("An error occurred during configuration: %s" % exc) + sys.exit(3) + + def _get_ordered_configuration_items(self): + # Configuration gives keys in an unordered manner. Order them. + override_order = ["global", self.name, ":env:"] + + # Pool the options into different groups + section_items = {name: [] for name in override_order} + for section_key, val in self.config.items(): + # ignore empty values + if not val: + logger.debug( + "Ignoring configuration key '%s' as it's value is empty.", + section_key + ) + continue + + section, key = section_key.split(".", 1) + if section in override_order: + section_items[section].append((key, val)) + + # Yield each group in their override order + for section in override_order: + for key, val in section_items[section]: + yield key, val + + def _update_defaults(self, defaults): + """Updates the given defaults with values from the config files and + the environ. Does a little special handling for certain types of + options (lists).""" + + # Accumulate complex default state. + self.values = optparse.Values(self.defaults) + late_eval = set() + # Then set the options with those values + for key, val in self._get_ordered_configuration_items(): + # '--' because configuration supports only long names + option = self.get_option('--' + key) + + # Ignore options not present in this parser. E.g. non-globals put + # in [global] by users that want them to apply to all applicable + # commands. + if option is None: + continue + + if option.action in ('store_true', 'store_false', 'count'): + val = strtobool(val) + elif option.action == 'append': + val = val.split() + val = [self.check_default(option, key, v) for v in val] + elif option.action == 'callback': + late_eval.add(option.dest) + opt_str = option.get_opt_string() + val = option.convert_value(opt_str, val) + # From take_action + args = option.callback_args or () + kwargs = option.callback_kwargs or {} + option.callback(option, opt_str, val, self, *args, **kwargs) + else: + val = self.check_default(option, key, val) + + defaults[option.dest] = val + + for key in late_eval: + defaults[key] = getattr(self.values, key) + self.values = None + return defaults + + def get_default_values(self): + """Overriding to make updating the defaults after instantiation of + the option parser possible, _update_defaults() does the dirty work.""" + if not self.process_default_values: + # Old, pre-Optik 1.5 behaviour. + return optparse.Values(self.defaults) + + # Load the configuration, or error out in case of an error + try: + self.config.load() + except ConfigurationError as err: + self.exit(2, err.args[0]) + + defaults = self._update_defaults(self.defaults.copy()) # ours + for option in self._get_all_options(): + default = defaults.get(option.dest) + if isinstance(default, string_types): + opt_str = option.get_opt_string() + defaults[option.dest] = option.check_value(opt_str, default) + return optparse.Values(defaults) + + def error(self, msg): + self.print_usage(sys.stderr) + self.exit(2, "%s\n" % msg) diff --git a/env/lib/python3.6/site-packages/pip/_internal/build_env.py b/env/lib/python3.6/site-packages/pip/_internal/build_env.py new file mode 100644 index 0000000..f225f76 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/build_env.py @@ -0,0 +1,126 @@ +"""Build Environment used for isolation during sdist building +""" + +import logging +import os +import sys +from distutils.sysconfig import get_python_lib +from sysconfig import get_paths + +from pip._internal.utils.misc import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.ui import open_spinner + +logger = logging.getLogger(__name__) + + +class BuildEnvironment(object): + """Creates and manages an isolated environment to install build deps + """ + + def __init__(self): + self._temp_dir = TempDirectory(kind="build-env") + self._temp_dir.create() + + @property + def path(self): + return self._temp_dir.path + + def __enter__(self): + self.save_path = os.environ.get('PATH', None) + self.save_pythonpath = os.environ.get('PYTHONPATH', None) + self.save_nousersite = os.environ.get('PYTHONNOUSERSITE', None) + + install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix' + install_dirs = get_paths(install_scheme, vars={ + 'base': self.path, + 'platbase': self.path, + }) + + scripts = install_dirs['scripts'] + if self.save_path: + os.environ['PATH'] = scripts + os.pathsep + self.save_path + else: + os.environ['PATH'] = scripts + os.pathsep + os.defpath + + # Note: prefer distutils' sysconfig to get the + # library paths so PyPy is correctly supported. + purelib = get_python_lib(plat_specific=0, prefix=self.path) + platlib = get_python_lib(plat_specific=1, prefix=self.path) + if purelib == platlib: + lib_dirs = purelib + else: + lib_dirs = purelib + os.pathsep + platlib + if self.save_pythonpath: + os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \ + self.save_pythonpath + else: + os.environ['PYTHONPATH'] = lib_dirs + + os.environ['PYTHONNOUSERSITE'] = '1' + + return self.path + + def __exit__(self, exc_type, exc_val, exc_tb): + def restore_var(varname, old_value): + if old_value is None: + os.environ.pop(varname, None) + else: + os.environ[varname] = old_value + + restore_var('PATH', self.save_path) + restore_var('PYTHONPATH', self.save_pythonpath) + restore_var('PYTHONNOUSERSITE', self.save_nousersite) + + def cleanup(self): + self._temp_dir.cleanup() + + def install_requirements(self, finder, requirements, message): + args = [ + sys.executable, '-m', 'pip', 'install', '--ignore-installed', + '--no-user', '--prefix', self.path, '--no-warn-script-location', + ] + if logger.getEffectiveLevel() <= logging.DEBUG: + args.append('-v') + for format_control in ('no_binary', 'only_binary'): + formats = getattr(finder.format_control, format_control) + args.extend(('--' + format_control.replace('_', '-'), + ','.join(sorted(formats or {':none:'})))) + if finder.index_urls: + args.extend(['-i', finder.index_urls[0]]) + for extra_index in finder.index_urls[1:]: + args.extend(['--extra-index-url', extra_index]) + else: + args.append('--no-index') + for link in finder.find_links: + args.extend(['--find-links', link]) + for _, host, _ in finder.secure_origins: + args.extend(['--trusted-host', host]) + if finder.allow_all_prereleases: + args.append('--pre') + if finder.process_dependency_links: + args.append('--process-dependency-links') + args.append('--') + args.extend(requirements) + with open_spinner(message) as spinner: + call_subprocess(args, show_stdout=False, spinner=spinner) + + +class NoOpBuildEnvironment(BuildEnvironment): + """A no-op drop-in replacement for BuildEnvironment + """ + + def __init__(self): + pass + + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_val, exc_tb): + pass + + def cleanup(self): + pass + + def install_requirements(self, finder, requirements, message): + raise NotImplementedError() diff --git a/env/lib/python3.6/site-packages/pip/_internal/cache.py b/env/lib/python3.6/site-packages/pip/_internal/cache.py new file mode 100644 index 0000000..1aa17aa --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/cache.py @@ -0,0 +1,202 @@ +"""Cache Management +""" + +import errno +import hashlib +import logging +import os + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal import index +from pip._internal.compat import expanduser +from pip._internal.download import path_to_url +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.wheel import InvalidWheelFilename, Wheel + +logger = logging.getLogger(__name__) + + +class Cache(object): + """An abstract class - provides cache directories for data from links + + + :param cache_dir: The root of the cache. + :param format_control: A pip.index.FormatControl object to limit + binaries being read from the cache. + :param allowed_formats: which formats of files the cache should store. + ('binary' and 'source' are the only allowed values) + """ + + def __init__(self, cache_dir, format_control, allowed_formats): + super(Cache, self).__init__() + self.cache_dir = expanduser(cache_dir) if cache_dir else None + self.format_control = format_control + self.allowed_formats = allowed_formats + + _valid_formats = {"source", "binary"} + assert self.allowed_formats.union(_valid_formats) == _valid_formats + + def _get_cache_path_parts(self, link): + """Get parts of part that must be os.path.joined with cache_dir + """ + + # We want to generate an url to use as our cache key, we don't want to + # just re-use the URL because it might have other items in the fragment + # and we don't care about those. + key_parts = [link.url_without_fragment] + if link.hash_name is not None and link.hash is not None: + key_parts.append("=".join([link.hash_name, link.hash])) + key_url = "#".join(key_parts) + + # Encode our key url with sha224, we'll use this because it has similar + # security properties to sha256, but with a shorter total output (and + # thus less secure). However the differences don't make a lot of + # difference for our use case here. + hashed = hashlib.sha224(key_url.encode()).hexdigest() + + # We want to nest the directories some to prevent having a ton of top + # level directories where we might run out of sub directories on some + # FS. + parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] + + return parts + + def _get_candidates(self, link, package_name): + can_not_cache = ( + not self.cache_dir or + not package_name or + not link + ) + if can_not_cache: + return [] + + canonical_name = canonicalize_name(package_name) + formats = index.fmt_ctl_formats( + self.format_control, canonical_name + ) + if not self.allowed_formats.intersection(formats): + return [] + + root = self.get_path_for_link(link) + try: + return os.listdir(root) + except OSError as err: + if err.errno in {errno.ENOENT, errno.ENOTDIR}: + return [] + raise + + def get_path_for_link(self, link): + """Return a directory to store cached items in for link. + """ + raise NotImplementedError() + + def get(self, link, package_name): + """Returns a link to a cached item if it exists, otherwise returns the + passed link. + """ + raise NotImplementedError() + + def _link_for_candidate(self, link, candidate): + root = self.get_path_for_link(link) + path = os.path.join(root, candidate) + + return index.Link(path_to_url(path)) + + def cleanup(self): + pass + + +class SimpleWheelCache(Cache): + """A cache of wheels for future installs. + """ + + def __init__(self, cache_dir, format_control): + super(SimpleWheelCache, self).__init__( + cache_dir, format_control, {"binary"} + ) + + def get_path_for_link(self, link): + """Return a directory to store cached wheels for link + + Because there are M wheels for any one sdist, we provide a directory + to cache them in, and then consult that directory when looking up + cache hits. + + We only insert things into the cache if they have plausible version + numbers, so that we don't contaminate the cache with things that were + not unique. E.g. ./package might have dozens of installs done for it + and build a version of 0.0...and if we built and cached a wheel, we'd + end up using the same wheel even if the source has been edited. + + :param link: The link of the sdist for which this will cache wheels. + """ + parts = self._get_cache_path_parts(link) + + # Store wheels within the root cache_dir + return os.path.join(self.cache_dir, "wheels", *parts) + + def get(self, link, package_name): + candidates = [] + + for wheel_name in self._get_candidates(link, package_name): + try: + wheel = Wheel(wheel_name) + except InvalidWheelFilename: + continue + if not wheel.supported(): + # Built for a different python/arch/etc + continue + candidates.append((wheel.support_index_min(), wheel_name)) + + if not candidates: + return link + + return self._link_for_candidate(link, min(candidates)[1]) + + +class EphemWheelCache(SimpleWheelCache): + """A SimpleWheelCache that creates it's own temporary cache directory + """ + + def __init__(self, format_control): + self._temp_dir = TempDirectory(kind="ephem-wheel-cache") + self._temp_dir.create() + + super(EphemWheelCache, self).__init__( + self._temp_dir.path, format_control + ) + + def cleanup(self): + self._temp_dir.cleanup() + + +class WheelCache(Cache): + """Wraps EphemWheelCache and SimpleWheelCache into a single Cache + + This Cache allows for gracefully degradation, using the ephem wheel cache + when a certain link is not found in the simple wheel cache first. + """ + + def __init__(self, cache_dir, format_control): + super(WheelCache, self).__init__( + cache_dir, format_control, {'binary'} + ) + self._wheel_cache = SimpleWheelCache(cache_dir, format_control) + self._ephem_cache = EphemWheelCache(format_control) + + def get_path_for_link(self, link): + return self._wheel_cache.get_path_for_link(link) + + def get_ephem_path_for_link(self, link): + return self._ephem_cache.get_path_for_link(link) + + def get(self, link, package_name): + retval = self._wheel_cache.get(link, package_name) + if retval is link: + retval = self._ephem_cache.get(link, package_name) + return retval + + def cleanup(self): + self._wheel_cache.cleanup() + self._ephem_cache.cleanup() diff --git a/env/lib/python3.6/site-packages/pip/_internal/cmdoptions.py b/env/lib/python3.6/site-packages/pip/_internal/cmdoptions.py new file mode 100644 index 0000000..d9c8dd7 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/cmdoptions.py @@ -0,0 +1,619 @@ +""" +shared options and groups + +The principle here is to define options once, but *not* instantiate them +globally. One reason being that options with action='append' can carry state +between parses. pip parses general options twice internally, and shouldn't +pass on state. To be consistent, all options will follow this design. + +""" +from __future__ import absolute_import + +import warnings +from functools import partial +from optparse import SUPPRESS_HELP, Option, OptionGroup + +from pip._internal.index import ( + FormatControl, fmt_ctl_handle_mutual_exclude, fmt_ctl_no_binary, +) +from pip._internal.locations import USER_CACHE_DIR, src_prefix +from pip._internal.models.index import PyPI +from pip._internal.utils.hashes import STRONG_HASHES +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.ui import BAR_TYPES + +if MYPY_CHECK_RUNNING: + from typing import Any # noqa: F401 + + +def make_option_group(group, parser): + """ + Return an OptionGroup object + group -- assumed to be dict with 'name' and 'options' keys + parser -- an optparse Parser + """ + option_group = OptionGroup(parser, group['name']) + for option in group['options']: + option_group.add_option(option()) + return option_group + + +def check_install_build_global(options, check_options=None): + """Disable wheels if per-setup.py call options are set. + + :param options: The OptionParser options to update. + :param check_options: The options to check, if not supplied defaults to + options. + """ + if check_options is None: + check_options = options + + def getname(n): + return getattr(check_options, n, None) + names = ["build_options", "global_options", "install_options"] + if any(map(getname, names)): + control = options.format_control + fmt_ctl_no_binary(control) + warnings.warn( + 'Disabling all use of wheels due to the use of --build-options ' + '/ --global-options / --install-options.', stacklevel=2, + ) + + +########### +# options # +########### + +help_ = partial( + Option, + '-h', '--help', + dest='help', + action='help', + help='Show help.', +) # type: Any + +isolated_mode = partial( + Option, + "--isolated", + dest="isolated_mode", + action="store_true", + default=False, + help=( + "Run pip in an isolated mode, ignoring environment variables and user " + "configuration." + ), +) + +require_virtualenv = partial( + Option, + # Run only if inside a virtualenv, bail if not. + '--require-virtualenv', '--require-venv', + dest='require_venv', + action='store_true', + default=False, + help=SUPPRESS_HELP +) # type: Any + +verbose = partial( + Option, + '-v', '--verbose', + dest='verbose', + action='count', + default=0, + help='Give more output. Option is additive, and can be used up to 3 times.' +) + +no_color = partial( + Option, + '--no-color', + dest='no_color', + action='store_true', + default=False, + help="Suppress colored output", +) + +version = partial( + Option, + '-V', '--version', + dest='version', + action='store_true', + help='Show version and exit.', +) # type: Any + +quiet = partial( + Option, + '-q', '--quiet', + dest='quiet', + action='count', + default=0, + help=( + 'Give less output. Option is additive, and can be used up to 3' + ' times (corresponding to WARNING, ERROR, and CRITICAL logging' + ' levels).' + ), +) # type: Any + +progress_bar = partial( + Option, + '--progress-bar', + dest='progress_bar', + type='choice', + choices=list(BAR_TYPES.keys()), + default='on', + help=( + 'Specify type of progress to be displayed [' + + '|'.join(BAR_TYPES.keys()) + '] (default: %default)' + ), +) # type: Any + +log = partial( + Option, + "--log", "--log-file", "--local-log", + dest="log", + metavar="path", + help="Path to a verbose appending log." +) # type: Any + +no_input = partial( + Option, + # Don't ask for input + '--no-input', + dest='no_input', + action='store_true', + default=False, + help=SUPPRESS_HELP +) # type: Any + +proxy = partial( + Option, + '--proxy', + dest='proxy', + type='str', + default='', + help="Specify a proxy in the form [user:passwd@]proxy.server:port." +) # type: Any + +retries = partial( + Option, + '--retries', + dest='retries', + type='int', + default=5, + help="Maximum number of retries each connection should attempt " + "(default %default times).", +) # type: Any + +timeout = partial( + Option, + '--timeout', '--default-timeout', + metavar='sec', + dest='timeout', + type='float', + default=15, + help='Set the socket timeout (default %default seconds).', +) # type: Any + +skip_requirements_regex = partial( + Option, + # A regex to be used to skip requirements + '--skip-requirements-regex', + dest='skip_requirements_regex', + type='str', + default='', + help=SUPPRESS_HELP, +) # type: Any + + +def exists_action(): + return Option( + # Option when path already exist + '--exists-action', + dest='exists_action', + type='choice', + choices=['s', 'i', 'w', 'b', 'a'], + default=[], + action='append', + metavar='action', + help="Default action when a path already exists: " + "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort).", + ) + + +cert = partial( + Option, + '--cert', + dest='cert', + type='str', + metavar='path', + help="Path to alternate CA bundle.", +) # type: Any + +client_cert = partial( + Option, + '--client-cert', + dest='client_cert', + type='str', + default=None, + metavar='path', + help="Path to SSL client certificate, a single file containing the " + "private key and the certificate in PEM format.", +) # type: Any + +index_url = partial( + Option, + '-i', '--index-url', '--pypi-url', + dest='index_url', + metavar='URL', + default=PyPI.simple_url, + help="Base URL of Python Package Index (default %default). " + "This should point to a repository compliant with PEP 503 " + "(the simple repository API) or a local directory laid out " + "in the same format.", +) # type: Any + + +def extra_index_url(): + return Option( + '--extra-index-url', + dest='extra_index_urls', + metavar='URL', + action='append', + default=[], + help="Extra URLs of package indexes to use in addition to " + "--index-url. Should follow the same rules as " + "--index-url.", + ) + + +no_index = partial( + Option, + '--no-index', + dest='no_index', + action='store_true', + default=False, + help='Ignore package index (only looking at --find-links URLs instead).', +) # type: Any + + +def find_links(): + return Option( + '-f', '--find-links', + dest='find_links', + action='append', + default=[], + metavar='url', + help="If a url or path to an html file, then parse for links to " + "archives. If a local path or file:// url that's a directory, " + "then look for archives in the directory listing.", + ) + + +def trusted_host(): + return Option( + "--trusted-host", + dest="trusted_hosts", + action="append", + metavar="HOSTNAME", + default=[], + help="Mark this host as trusted, even though it does not have valid " + "or any HTTPS.", + ) + + +# Remove after 1.5 +process_dependency_links = partial( + Option, + "--process-dependency-links", + dest="process_dependency_links", + action="store_true", + default=False, + help="Enable the processing of dependency links.", +) # type: Any + + +def constraints(): + return Option( + '-c', '--constraint', + dest='constraints', + action='append', + default=[], + metavar='file', + help='Constrain versions using the given constraints file. ' + 'This option can be used multiple times.' + ) + + +def requirements(): + return Option( + '-r', '--requirement', + dest='requirements', + action='append', + default=[], + metavar='file', + help='Install from the given requirements file. ' + 'This option can be used multiple times.' + ) + + +def editable(): + return Option( + '-e', '--editable', + dest='editables', + action='append', + default=[], + metavar='path/url', + help=('Install a project in editable mode (i.e. setuptools ' + '"develop mode") from a local project path or a VCS url.'), + ) + + +src = partial( + Option, + '--src', '--source', '--source-dir', '--source-directory', + dest='src_dir', + metavar='dir', + default=src_prefix, + help='Directory to check out editable projects into. ' + 'The default in a virtualenv is "/src". ' + 'The default for global installs is "/src".' +) # type: Any + + +def _get_format_control(values, option): + """Get a format_control object.""" + return getattr(values, option.dest) + + +def _handle_no_binary(option, opt_str, value, parser): + existing = getattr(parser.values, option.dest) + fmt_ctl_handle_mutual_exclude( + value, existing.no_binary, existing.only_binary, + ) + + +def _handle_only_binary(option, opt_str, value, parser): + existing = getattr(parser.values, option.dest) + fmt_ctl_handle_mutual_exclude( + value, existing.only_binary, existing.no_binary, + ) + + +def no_binary(): + return Option( + "--no-binary", dest="format_control", action="callback", + callback=_handle_no_binary, type="str", + default=FormatControl(set(), set()), + help="Do not use binary packages. Can be supplied multiple times, and " + "each time adds to the existing value. Accepts either :all: to " + "disable all binary packages, :none: to empty the set, or one or " + "more package names with commas between them. Note that some " + "packages are tricky to compile and may fail to install when " + "this option is used on them.", + ) + + +def only_binary(): + return Option( + "--only-binary", dest="format_control", action="callback", + callback=_handle_only_binary, type="str", + default=FormatControl(set(), set()), + help="Do not use source packages. Can be supplied multiple times, and " + "each time adds to the existing value. Accepts either :all: to " + "disable all source packages, :none: to empty the set, or one or " + "more package names with commas between them. Packages without " + "binary distributions will fail to install when this option is " + "used on them.", + ) + + +def prefer_binary(): + return Option( + "--prefer-binary", + dest="prefer_binary", + action="store_true", + default=False, + help="Prefer older binary packages over newer source packages." + ) + + +cache_dir = partial( + Option, + "--cache-dir", + dest="cache_dir", + default=USER_CACHE_DIR, + metavar="dir", + help="Store the cache data in ." +) + +no_cache = partial( + Option, + "--no-cache-dir", + dest="cache_dir", + action="store_false", + help="Disable the cache.", +) + +no_deps = partial( + Option, + '--no-deps', '--no-dependencies', + dest='ignore_dependencies', + action='store_true', + default=False, + help="Don't install package dependencies.", +) # type: Any + +build_dir = partial( + Option, + '-b', '--build', '--build-dir', '--build-directory', + dest='build_dir', + metavar='dir', + help='Directory to unpack packages into and build in. Note that ' + 'an initial build still takes place in a temporary directory. ' + 'The location of temporary directories can be controlled by setting ' + 'the TMPDIR environment variable (TEMP on Windows) appropriately. ' + 'When passed, build directories are not cleaned in case of failures.' +) # type: Any + +ignore_requires_python = partial( + Option, + '--ignore-requires-python', + dest='ignore_requires_python', + action='store_true', + help='Ignore the Requires-Python information.' +) # type: Any + +no_build_isolation = partial( + Option, + '--no-build-isolation', + dest='build_isolation', + action='store_false', + default=True, + help='Disable isolation when building a modern source distribution. ' + 'Build dependencies specified by PEP 518 must be already installed ' + 'if this option is used.' +) # type: Any + +install_options = partial( + Option, + '--install-option', + dest='install_options', + action='append', + metavar='options', + help="Extra arguments to be supplied to the setup.py install " + "command (use like --install-option=\"--install-scripts=/usr/local/" + "bin\"). Use multiple --install-option options to pass multiple " + "options to setup.py install. If you are using an option with a " + "directory path, be sure to use absolute path.", +) # type: Any + +global_options = partial( + Option, + '--global-option', + dest='global_options', + action='append', + metavar='options', + help="Extra global options to be supplied to the setup.py " + "call before the install command.", +) # type: Any + +no_clean = partial( + Option, + '--no-clean', + action='store_true', + default=False, + help="Don't clean up build directories)." +) # type: Any + +pre = partial( + Option, + '--pre', + action='store_true', + default=False, + help="Include pre-release and development versions. By default, " + "pip only finds stable versions.", +) # type: Any + +disable_pip_version_check = partial( + Option, + "--disable-pip-version-check", + dest="disable_pip_version_check", + action="store_true", + default=False, + help="Don't periodically check PyPI to determine whether a new version " + "of pip is available for download. Implied with --no-index.", +) # type: Any + + +# Deprecated, Remove later +always_unzip = partial( + Option, + '-Z', '--always-unzip', + dest='always_unzip', + action='store_true', + help=SUPPRESS_HELP, +) # type: Any + + +def _merge_hash(option, opt_str, value, parser): + """Given a value spelled "algo:digest", append the digest to a list + pointed to in a dict by the algo name.""" + if not parser.values.hashes: + parser.values.hashes = {} + try: + algo, digest = value.split(':', 1) + except ValueError: + parser.error('Arguments to %s must be a hash name ' + 'followed by a value, like --hash=sha256:abcde...' % + opt_str) + if algo not in STRONG_HASHES: + parser.error('Allowed hash algorithms for %s are %s.' % + (opt_str, ', '.join(STRONG_HASHES))) + parser.values.hashes.setdefault(algo, []).append(digest) + + +hash = partial( + Option, + '--hash', + # Hash values eventually end up in InstallRequirement.hashes due to + # __dict__ copying in process_line(). + dest='hashes', + action='callback', + callback=_merge_hash, + type='string', + help="Verify that the package's archive matches this " + 'hash before installing. Example: --hash=sha256:abcdef...', +) # type: Any + + +require_hashes = partial( + Option, + '--require-hashes', + dest='require_hashes', + action='store_true', + default=False, + help='Require a hash to check each requirement against, for ' + 'repeatable installs. This option is implied when any package in a ' + 'requirements file has a --hash option.', +) # type: Any + + +########## +# groups # +########## + +general_group = { + 'name': 'General Options', + 'options': [ + help_, + isolated_mode, + require_virtualenv, + verbose, + version, + quiet, + log, + no_input, + proxy, + retries, + timeout, + skip_requirements_regex, + exists_action, + trusted_host, + cert, + client_cert, + cache_dir, + no_cache, + disable_pip_version_check, + no_color, + ] +} + +index_group = { + 'name': 'Package Index Options', + 'options': [ + index_url, + extra_index_url, + no_index, + find_links, + process_dependency_links, + ] +} diff --git a/env/lib/python3.6/site-packages/pip/_internal/commands/check.py b/env/lib/python3.6/site-packages/pip/_internal/commands/check.py new file mode 100644 index 0000000..e667075 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/commands/check.py @@ -0,0 +1,41 @@ +import logging + +from pip._internal.basecommand import Command +from pip._internal.operations.check import ( + check_package_set, create_package_set_from_installed, +) + +logger = logging.getLogger(__name__) + + +class CheckCommand(Command): + """Verify installed packages have compatible dependencies.""" + name = 'check' + usage = """ + %prog [options]""" + summary = 'Verify installed packages have compatible dependencies.' + + def run(self, options, args): + package_set = create_package_set_from_installed() + missing, conflicting = check_package_set(package_set) + + for project_name in missing: + version = package_set[project_name].version + for dependency in missing[project_name]: + logger.info( + "%s %s requires %s, which is not installed.", + project_name, version, dependency[0], + ) + + for project_name in conflicting: + version = package_set[project_name].version + for dep_name, dep_version, req in conflicting[project_name]: + logger.info( + "%s %s has requirement %s, but you have %s %s.", + project_name, version, req, dep_name, dep_version, + ) + + if missing or conflicting: + return 1 + else: + logger.info("No broken requirements found.") diff --git a/env/lib/python3.6/site-packages/pip/_internal/commands/completion.py b/env/lib/python3.6/site-packages/pip/_internal/commands/completion.py new file mode 100644 index 0000000..c4b3873 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/commands/completion.py @@ -0,0 +1,94 @@ +from __future__ import absolute_import + +import sys +import textwrap + +from pip._internal.basecommand import Command +from pip._internal.utils.misc import get_prog + +BASE_COMPLETION = """ +# pip %(shell)s completion start%(script)s# pip %(shell)s completion end +""" + +COMPLETION_SCRIPTS = { + 'bash': """ + _pip_completion() + { + COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\ + COMP_CWORD=$COMP_CWORD \\ + PIP_AUTO_COMPLETE=1 $1 ) ) + } + complete -o default -F _pip_completion %(prog)s + """, + 'zsh': """ + function _pip_completion { + local words cword + read -Ac words + read -cn cword + reply=( $( COMP_WORDS="$words[*]" \\ + COMP_CWORD=$(( cword-1 )) \\ + PIP_AUTO_COMPLETE=1 $words[1] ) ) + } + compctl -K _pip_completion %(prog)s + """, + 'fish': """ + function __fish_complete_pip + set -lx COMP_WORDS (commandline -o) "" + set -lx COMP_CWORD ( \\ + math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\ + ) + set -lx PIP_AUTO_COMPLETE 1 + string split \\ -- (eval $COMP_WORDS[1]) + end + complete -fa "(__fish_complete_pip)" -c %(prog)s + """, +} + + +class CompletionCommand(Command): + """A helper command to be used for command completion.""" + name = 'completion' + summary = 'A helper command used for command completion.' + ignore_require_venv = True + + def __init__(self, *args, **kw): + super(CompletionCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option( + '--bash', '-b', + action='store_const', + const='bash', + dest='shell', + help='Emit completion code for bash') + cmd_opts.add_option( + '--zsh', '-z', + action='store_const', + const='zsh', + dest='shell', + help='Emit completion code for zsh') + cmd_opts.add_option( + '--fish', '-f', + action='store_const', + const='fish', + dest='shell', + help='Emit completion code for fish') + + self.parser.insert_option_group(0, cmd_opts) + + def run(self, options, args): + """Prints the completion code of the given shell""" + shells = COMPLETION_SCRIPTS.keys() + shell_options = ['--' + shell for shell in sorted(shells)] + if options.shell in shells: + script = textwrap.dedent( + COMPLETION_SCRIPTS.get(options.shell, '') % { + 'prog': get_prog(), + } + ) + print(BASE_COMPLETION % {'script': script, 'shell': options.shell}) + else: + sys.stderr.write( + 'ERROR: You must pass %s\n' % ' or '.join(shell_options) + ) diff --git a/env/lib/python3.6/site-packages/pip/_internal/commands/configuration.py b/env/lib/python3.6/site-packages/pip/_internal/commands/configuration.py new file mode 100644 index 0000000..57448cb --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/commands/configuration.py @@ -0,0 +1,227 @@ +import logging +import os +import subprocess + +from pip._internal.basecommand import Command +from pip._internal.configuration import Configuration, kinds +from pip._internal.exceptions import PipError +from pip._internal.locations import venv_config_file +from pip._internal.status_codes import ERROR, SUCCESS +from pip._internal.utils.misc import get_prog + +logger = logging.getLogger(__name__) + + +class ConfigurationCommand(Command): + """Manage local and global configuration. + + Subcommands: + + list: List the active configuration (or from the file specified) + edit: Edit the configuration file in an editor + get: Get the value associated with name + set: Set the name=value + unset: Unset the value associated with name + + If none of --user, --global and --venv are passed, a virtual + environment configuration file is used if one is active and the file + exists. Otherwise, all modifications happen on the to the user file by + default. + """ + + name = 'config' + usage = """ + %prog [] list + %prog [] [--editor ] edit + + %prog [] get name + %prog [] set name value + %prog [] unset name + """ + + summary = "Manage local and global configuration." + + def __init__(self, *args, **kwargs): + super(ConfigurationCommand, self).__init__(*args, **kwargs) + + self.configuration = None + + self.cmd_opts.add_option( + '--editor', + dest='editor', + action='store', + default=None, + help=( + 'Editor to use to edit the file. Uses VISUAL or EDITOR ' + 'environment variables if not provided.' + ) + ) + + self.cmd_opts.add_option( + '--global', + dest='global_file', + action='store_true', + default=False, + help='Use the system-wide configuration file only' + ) + + self.cmd_opts.add_option( + '--user', + dest='user_file', + action='store_true', + default=False, + help='Use the user configuration file only' + ) + + self.cmd_opts.add_option( + '--venv', + dest='venv_file', + action='store_true', + default=False, + help='Use the virtualenv configuration file only' + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + handlers = { + "list": self.list_values, + "edit": self.open_in_editor, + "get": self.get_name, + "set": self.set_name_value, + "unset": self.unset_name + } + + # Determine action + if not args or args[0] not in handlers: + logger.error("Need an action ({}) to perform.".format( + ", ".join(sorted(handlers))) + ) + return ERROR + + action = args[0] + + # Determine which configuration files are to be loaded + # Depends on whether the command is modifying. + try: + load_only = self._determine_file( + options, need_value=(action in ["get", "set", "unset", "edit"]) + ) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + # Load a new configuration + self.configuration = Configuration( + isolated=options.isolated_mode, load_only=load_only + ) + self.configuration.load() + + # Error handling happens here, not in the action-handlers. + try: + handlers[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def _determine_file(self, options, need_value): + file_options = { + kinds.USER: options.user_file, + kinds.GLOBAL: options.global_file, + kinds.VENV: options.venv_file + } + + if sum(file_options.values()) == 0: + if not need_value: + return None + # Default to user, unless there's a virtualenv file. + elif os.path.exists(venv_config_file): + return kinds.VENV + else: + return kinds.USER + elif sum(file_options.values()) == 1: + # There's probably a better expression for this. + return [key for key in file_options if file_options[key]][0] + + raise PipError( + "Need exactly one file to operate upon " + "(--user, --venv, --global) to perform." + ) + + def list_values(self, options, args): + self._get_n_args(args, "list", n=0) + + for key, value in sorted(self.configuration.items()): + logger.info("%s=%r", key, value) + + def get_name(self, options, args): + key = self._get_n_args(args, "get [name]", n=1) + value = self.configuration.get_value(key) + + logger.info("%s", value) + + def set_name_value(self, options, args): + key, value = self._get_n_args(args, "set [name] [value]", n=2) + self.configuration.set_value(key, value) + + self._save_configuration() + + def unset_name(self, options, args): + key = self._get_n_args(args, "unset [name]", n=1) + self.configuration.unset_value(key) + + self._save_configuration() + + def open_in_editor(self, options, args): + editor = self._determine_editor(options) + + fname = self.configuration.get_file_to_edit() + if fname is None: + raise PipError("Could not determine appropriate file.") + + try: + subprocess.check_call([editor, fname]) + except subprocess.CalledProcessError as e: + raise PipError( + "Editor Subprocess exited with exit code {}" + .format(e.returncode) + ) + + def _get_n_args(self, args, example, n): + """Helper to make sure the command got the right number of arguments + """ + if len(args) != n: + msg = ( + 'Got unexpected number of arguments, expected {}. ' + '(example: "{} config {}")' + ).format(n, get_prog(), example) + raise PipError(msg) + + if n == 1: + return args[0] + else: + return args + + def _save_configuration(self): + # We successfully ran a modifying command. Need to save the + # configuration. + try: + self.configuration.save() + except Exception: + logger.error( + "Unable to save configuration. Please report this as a bug.", + exc_info=1 + ) + raise PipError("Internal Error.") + + def _determine_editor(self, options): + if options.editor is not None: + return options.editor + elif "VISUAL" in os.environ: + return os.environ["VISUAL"] + elif "EDITOR" in os.environ: + return os.environ["EDITOR"] + else: + raise PipError("Could not determine editor to use.") diff --git a/env/lib/python3.6/site-packages/pip/_internal/commands/download.py b/env/lib/python3.6/site-packages/pip/_internal/commands/download.py new file mode 100644 index 0000000..cf4827c --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/commands/download.py @@ -0,0 +1,236 @@ +from __future__ import absolute_import + +import logging +import os + +from pip._internal import cmdoptions +from pip._internal.basecommand import RequirementCommand +from pip._internal.exceptions import CommandError +from pip._internal.index import FormatControl +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req import RequirementSet +from pip._internal.req.req_tracker import RequirementTracker +from pip._internal.resolve import Resolver +from pip._internal.utils.filesystem import check_path_owner +from pip._internal.utils.misc import ensure_dir, normalize_path +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +class DownloadCommand(RequirementCommand): + """ + Download packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports downloading from "requirements files", which provide + an easy way to specify a whole environment to be downloaded. + """ + name = 'download' + + usage = """ + %prog [options] [package-index-options] ... + %prog [options] -r [package-index-options] ... + %prog [options] ... + %prog [options] ... + %prog [options] ...""" + + summary = 'Download packages.' + + def __init__(self, *args, **kw): + super(DownloadCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option(cmdoptions.constraints()) + cmd_opts.add_option(cmdoptions.requirements()) + cmd_opts.add_option(cmdoptions.build_dir()) + cmd_opts.add_option(cmdoptions.no_deps()) + cmd_opts.add_option(cmdoptions.global_options()) + cmd_opts.add_option(cmdoptions.no_binary()) + cmd_opts.add_option(cmdoptions.only_binary()) + cmd_opts.add_option(cmdoptions.prefer_binary()) + cmd_opts.add_option(cmdoptions.src()) + cmd_opts.add_option(cmdoptions.pre()) + cmd_opts.add_option(cmdoptions.no_clean()) + cmd_opts.add_option(cmdoptions.require_hashes()) + cmd_opts.add_option(cmdoptions.progress_bar()) + cmd_opts.add_option(cmdoptions.no_build_isolation()) + + cmd_opts.add_option( + '-d', '--dest', '--destination-dir', '--destination-directory', + dest='download_dir', + metavar='dir', + default=os.curdir, + help=("Download packages into ."), + ) + + cmd_opts.add_option( + '--platform', + dest='platform', + metavar='platform', + default=None, + help=("Only download wheels compatible with . " + "Defaults to the platform of the running system."), + ) + + cmd_opts.add_option( + '--python-version', + dest='python_version', + metavar='python_version', + default=None, + help=("Only download wheels compatible with Python " + "interpreter version . If not specified, then the " + "current system interpreter minor version is used. A major " + "version (e.g. '2') can be specified to match all " + "minor revs of that major version. A minor version " + "(e.g. '34') can also be specified."), + ) + + cmd_opts.add_option( + '--implementation', + dest='implementation', + metavar='implementation', + default=None, + help=("Only download wheels compatible with Python " + "implementation , e.g. 'pp', 'jy', 'cp', " + " or 'ip'. If not specified, then the current " + "interpreter implementation is used. Use 'py' to force " + "implementation-agnostic wheels."), + ) + + cmd_opts.add_option( + '--abi', + dest='abi', + metavar='abi', + default=None, + help=("Only download wheels compatible with Python " + "abi , e.g. 'pypy_41'. If not specified, then the " + "current interpreter abi tag is used. Generally " + "you will need to specify --implementation, " + "--platform, and --python-version when using " + "this option."), + ) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def run(self, options, args): + options.ignore_installed = True + # editable doesn't really make sense for `pip download`, but the bowels + # of the RequirementSet code require that property. + options.editables = [] + + if options.python_version: + python_versions = [options.python_version] + else: + python_versions = None + + dist_restriction_set = any([ + options.python_version, + options.platform, + options.abi, + options.implementation, + ]) + binary_only = FormatControl(set(), {':all:'}) + no_sdist_dependencies = ( + options.format_control != binary_only and + not options.ignore_dependencies + ) + if dist_restriction_set and no_sdist_dependencies: + raise CommandError( + "When restricting platform and interpreter constraints using " + "--python-version, --platform, --abi, or --implementation, " + "either --no-deps must be set, or --only-binary=:all: must be " + "set and --no-binary must not be set (or must be set to " + ":none:)." + ) + + options.src_dir = os.path.abspath(options.src_dir) + options.download_dir = normalize_path(options.download_dir) + + ensure_dir(options.download_dir) + + with self._build_session(options) as session: + finder = self._build_package_finder( + options=options, + session=session, + platform=options.platform, + python_versions=python_versions, + abi=options.abi, + implementation=options.implementation, + ) + build_delete = (not (options.no_clean or options.build_dir)) + if options.cache_dir and not check_path_owner(options.cache_dir): + logger.warning( + "The directory '%s' or its parent directory is not owned " + "by the current user and caching wheels has been " + "disabled. check the permissions and owner of that " + "directory. If executing pip with sudo, you may want " + "sudo's -H flag.", + options.cache_dir, + ) + options.cache_dir = None + + with RequirementTracker() as req_tracker, TempDirectory( + options.build_dir, delete=build_delete, kind="download" + ) as directory: + + requirement_set = RequirementSet( + require_hashes=options.require_hashes, + ) + self.populate_requirement_set( + requirement_set, + args, + options, + finder, + session, + self.name, + None + ) + + preparer = RequirementPreparer( + build_dir=directory.path, + src_dir=options.src_dir, + download_dir=options.download_dir, + wheel_download_dir=None, + progress_bar=options.progress_bar, + build_isolation=options.build_isolation, + req_tracker=req_tracker, + ) + + resolver = Resolver( + preparer=preparer, + finder=finder, + session=session, + wheel_cache=None, + use_user_site=False, + upgrade_strategy="to-satisfy-only", + force_reinstall=False, + ignore_dependencies=options.ignore_dependencies, + ignore_requires_python=False, + ignore_installed=True, + isolated=options.isolated_mode, + ) + resolver.resolve(requirement_set) + + downloaded = ' '.join([ + req.name for req in requirement_set.successfully_downloaded + ]) + if downloaded: + logger.info('Successfully downloaded %s', downloaded) + + # Clean up + if not options.no_clean: + requirement_set.cleanup_files() + + return requirement_set diff --git a/env/lib/python3.6/site-packages/pip/_internal/commands/freeze.py b/env/lib/python3.6/site-packages/pip/_internal/commands/freeze.py new file mode 100644 index 0000000..0d3d4ae --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/commands/freeze.py @@ -0,0 +1,96 @@ +from __future__ import absolute_import + +import sys + +from pip._internal import index +from pip._internal.basecommand import Command +from pip._internal.cache import WheelCache +from pip._internal.compat import stdlib_pkgs +from pip._internal.operations.freeze import freeze + +DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'} + + +class FreezeCommand(Command): + """ + Output installed packages in requirements format. + + packages are listed in a case-insensitive sorted order. + """ + name = 'freeze' + usage = """ + %prog [options]""" + summary = 'Output installed packages in requirements format.' + log_streams = ("ext://sys.stderr", "ext://sys.stderr") + + def __init__(self, *args, **kw): + super(FreezeCommand, self).__init__(*args, **kw) + + self.cmd_opts.add_option( + '-r', '--requirement', + dest='requirements', + action='append', + default=[], + metavar='file', + help="Use the order in the given requirements file and its " + "comments when generating output. This option can be " + "used multiple times.") + self.cmd_opts.add_option( + '-f', '--find-links', + dest='find_links', + action='append', + default=[], + metavar='URL', + help='URL for finding packages, which will be added to the ' + 'output.') + self.cmd_opts.add_option( + '-l', '--local', + dest='local', + action='store_true', + default=False, + help='If in a virtualenv that has global access, do not output ' + 'globally-installed packages.') + self.cmd_opts.add_option( + '--user', + dest='user', + action='store_true', + default=False, + help='Only output packages installed in user-site.') + self.cmd_opts.add_option( + '--all', + dest='freeze_all', + action='store_true', + help='Do not skip these packages in the output:' + ' %s' % ', '.join(DEV_PKGS)) + self.cmd_opts.add_option( + '--exclude-editable', + dest='exclude_editable', + action='store_true', + help='Exclude editable package from output.') + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + format_control = index.FormatControl(set(), set()) + wheel_cache = WheelCache(options.cache_dir, format_control) + skip = set(stdlib_pkgs) + if not options.freeze_all: + skip.update(DEV_PKGS) + + freeze_kwargs = dict( + requirement=options.requirements, + find_links=options.find_links, + local_only=options.local, + user_only=options.user, + skip_regex=options.skip_requirements_regex, + isolated=options.isolated_mode, + wheel_cache=wheel_cache, + skip=skip, + exclude_editable=options.exclude_editable, + ) + + try: + for line in freeze(**freeze_kwargs): + sys.stdout.write(line + '\n') + finally: + wheel_cache.cleanup() diff --git a/env/lib/python3.6/site-packages/pip/_internal/commands/hash.py b/env/lib/python3.6/site-packages/pip/_internal/commands/hash.py new file mode 100644 index 0000000..95353b0 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/commands/hash.py @@ -0,0 +1,57 @@ +from __future__ import absolute_import + +import hashlib +import logging +import sys + +from pip._internal.basecommand import Command +from pip._internal.status_codes import ERROR +from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES +from pip._internal.utils.misc import read_chunks + +logger = logging.getLogger(__name__) + + +class HashCommand(Command): + """ + Compute a hash of a local package archive. + + These can be used with --hash in a requirements file to do repeatable + installs. + + """ + name = 'hash' + usage = '%prog [options] ...' + summary = 'Compute hashes of package archives.' + ignore_require_venv = True + + def __init__(self, *args, **kw): + super(HashCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '-a', '--algorithm', + dest='algorithm', + choices=STRONG_HASHES, + action='store', + default=FAVORITE_HASH, + help='The hash algorithm to use: one of %s' % + ', '.join(STRONG_HASHES)) + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + if not args: + self.parser.print_usage(sys.stderr) + return ERROR + + algorithm = options.algorithm + for path in args: + logger.info('%s:\n--hash=%s:%s', + path, algorithm, _hash_of_file(path, algorithm)) + + +def _hash_of_file(path, algorithm): + """Return the hash digest of a file.""" + with open(path, 'rb') as archive: + hash = hashlib.new(algorithm) + for chunk in read_chunks(archive): + hash.update(chunk) + return hash.hexdigest() diff --git a/env/lib/python3.6/site-packages/pip/_internal/commands/help.py b/env/lib/python3.6/site-packages/pip/_internal/commands/help.py new file mode 100644 index 0000000..06ca2c1 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/commands/help.py @@ -0,0 +1,36 @@ +from __future__ import absolute_import + +from pip._internal.basecommand import SUCCESS, Command +from pip._internal.exceptions import CommandError + + +class HelpCommand(Command): + """Show help for commands""" + name = 'help' + usage = """ + %prog """ + summary = 'Show help for commands.' + ignore_require_venv = True + + def run(self, options, args): + from pip._internal.commands import commands_dict, get_similar_commands + + try: + # 'pip help' with no args is handled by pip.__init__.parseopt() + cmd_name = args[0] # the command we need help for + except IndexError: + return SUCCESS + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = ['unknown command "%s"' % cmd_name] + if guess: + msg.append('maybe you meant "%s"' % guess) + + raise CommandError(' - '.join(msg)) + + command = commands_dict[cmd_name]() + command.parser.print_help() + + return SUCCESS diff --git a/env/lib/python3.6/site-packages/pip/_internal/commands/install.py b/env/lib/python3.6/site-packages/pip/_internal/commands/install.py new file mode 100644 index 0000000..f42a1d1 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/commands/install.py @@ -0,0 +1,516 @@ +from __future__ import absolute_import + +import errno +import logging +import operator +import os +import shutil +from optparse import SUPPRESS_HELP + +from pip._vendor import pkg_resources + +from pip._internal import cmdoptions +from pip._internal.basecommand import RequirementCommand +from pip._internal.cache import WheelCache +from pip._internal.exceptions import ( + CommandError, InstallationError, PreviousBuildDirError, +) +from pip._internal.locations import distutils_scheme, virtualenv_no_global +from pip._internal.operations.check import check_install_conflicts +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req import RequirementSet, install_given_reqs +from pip._internal.req.req_tracker import RequirementTracker +from pip._internal.resolve import Resolver +from pip._internal.status_codes import ERROR +from pip._internal.utils.filesystem import check_path_owner +from pip._internal.utils.misc import ( + ensure_dir, get_installed_version, + protect_pip_from_modification_on_windows, +) +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.wheel import WheelBuilder + +try: + import wheel +except ImportError: + wheel = None + + +logger = logging.getLogger(__name__) + + +class InstallCommand(RequirementCommand): + """ + Install packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports installing from "requirements files", which provide + an easy way to specify a whole environment to be installed. + """ + name = 'install' + + usage = """ + %prog [options] [package-index-options] ... + %prog [options] -r [package-index-options] ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + summary = 'Install packages.' + + def __init__(self, *args, **kw): + super(InstallCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option(cmdoptions.requirements()) + cmd_opts.add_option(cmdoptions.constraints()) + cmd_opts.add_option(cmdoptions.no_deps()) + cmd_opts.add_option(cmdoptions.pre()) + + cmd_opts.add_option(cmdoptions.editable()) + cmd_opts.add_option( + '-t', '--target', + dest='target_dir', + metavar='dir', + default=None, + help='Install packages into . ' + 'By default this will not replace existing files/folders in ' + '. Use --upgrade to replace existing packages in ' + 'with new versions.' + ) + cmd_opts.add_option( + '--user', + dest='use_user_site', + action='store_true', + help="Install to the Python user install directory for your " + "platform. Typically ~/.local/, or %APPDATA%\\Python on " + "Windows. (See the Python documentation for site.USER_BASE " + "for full details.)") + cmd_opts.add_option( + '--no-user', + dest='use_user_site', + action='store_false', + help=SUPPRESS_HELP) + cmd_opts.add_option( + '--root', + dest='root_path', + metavar='dir', + default=None, + help="Install everything relative to this alternate root " + "directory.") + cmd_opts.add_option( + '--prefix', + dest='prefix_path', + metavar='dir', + default=None, + help="Installation prefix where lib, bin and other top-level " + "folders are placed") + + cmd_opts.add_option(cmdoptions.build_dir()) + + cmd_opts.add_option(cmdoptions.src()) + + cmd_opts.add_option( + '-U', '--upgrade', + dest='upgrade', + action='store_true', + help='Upgrade all specified packages to the newest available ' + 'version. The handling of dependencies depends on the ' + 'upgrade-strategy used.' + ) + + cmd_opts.add_option( + '--upgrade-strategy', + dest='upgrade_strategy', + default='only-if-needed', + choices=['only-if-needed', 'eager'], + help='Determines how dependency upgrading should be handled ' + '[default: %default]. ' + '"eager" - dependencies are upgraded regardless of ' + 'whether the currently installed version satisfies the ' + 'requirements of the upgraded package(s). ' + '"only-if-needed" - are upgraded only when they do not ' + 'satisfy the requirements of the upgraded package(s).' + ) + + cmd_opts.add_option( + '--force-reinstall', + dest='force_reinstall', + action='store_true', + help='Reinstall all packages even if they are already ' + 'up-to-date.') + + cmd_opts.add_option( + '-I', '--ignore-installed', + dest='ignore_installed', + action='store_true', + help='Ignore the installed packages (reinstalling instead).') + + cmd_opts.add_option(cmdoptions.ignore_requires_python()) + cmd_opts.add_option(cmdoptions.no_build_isolation()) + + cmd_opts.add_option(cmdoptions.install_options()) + cmd_opts.add_option(cmdoptions.global_options()) + + cmd_opts.add_option( + "--compile", + action="store_true", + dest="compile", + default=True, + help="Compile Python source files to bytecode", + ) + + cmd_opts.add_option( + "--no-compile", + action="store_false", + dest="compile", + help="Do not compile Python source files to bytecode", + ) + + cmd_opts.add_option( + "--no-warn-script-location", + action="store_false", + dest="warn_script_location", + default=True, + help="Do not warn when installing scripts outside PATH", + ) + cmd_opts.add_option( + "--no-warn-conflicts", + action="store_false", + dest="warn_about_conflicts", + default=True, + help="Do not warn about broken dependencies", + ) + + cmd_opts.add_option(cmdoptions.no_binary()) + cmd_opts.add_option(cmdoptions.only_binary()) + cmd_opts.add_option(cmdoptions.prefer_binary()) + cmd_opts.add_option(cmdoptions.no_clean()) + cmd_opts.add_option(cmdoptions.require_hashes()) + cmd_opts.add_option(cmdoptions.progress_bar()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def run(self, options, args): + cmdoptions.check_install_build_global(options) + + upgrade_strategy = "to-satisfy-only" + if options.upgrade: + upgrade_strategy = options.upgrade_strategy + + if options.build_dir: + options.build_dir = os.path.abspath(options.build_dir) + + options.src_dir = os.path.abspath(options.src_dir) + install_options = options.install_options or [] + if options.use_user_site: + if options.prefix_path: + raise CommandError( + "Can not combine '--user' and '--prefix' as they imply " + "different installation locations" + ) + if virtualenv_no_global(): + raise InstallationError( + "Can not perform a '--user' install. User site-packages " + "are not visible in this virtualenv." + ) + install_options.append('--user') + install_options.append('--prefix=') + + target_temp_dir = TempDirectory(kind="target") + if options.target_dir: + options.ignore_installed = True + options.target_dir = os.path.abspath(options.target_dir) + if (os.path.exists(options.target_dir) and not + os.path.isdir(options.target_dir)): + raise CommandError( + "Target path exists but is not a directory, will not " + "continue." + ) + + # Create a target directory for using with the target option + target_temp_dir.create() + install_options.append('--home=' + target_temp_dir.path) + + global_options = options.global_options or [] + + with self._build_session(options) as session: + finder = self._build_package_finder(options, session) + build_delete = (not (options.no_clean or options.build_dir)) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + + if options.cache_dir and not check_path_owner(options.cache_dir): + logger.warning( + "The directory '%s' or its parent directory is not owned " + "by the current user and caching wheels has been " + "disabled. check the permissions and owner of that " + "directory. If executing pip with sudo, you may want " + "sudo's -H flag.", + options.cache_dir, + ) + options.cache_dir = None + + with RequirementTracker() as req_tracker, TempDirectory( + options.build_dir, delete=build_delete, kind="install" + ) as directory: + requirement_set = RequirementSet( + require_hashes=options.require_hashes, + ) + + try: + self.populate_requirement_set( + requirement_set, args, options, finder, session, + self.name, wheel_cache + ) + preparer = RequirementPreparer( + build_dir=directory.path, + src_dir=options.src_dir, + download_dir=None, + wheel_download_dir=None, + progress_bar=options.progress_bar, + build_isolation=options.build_isolation, + req_tracker=req_tracker, + ) + + resolver = Resolver( + preparer=preparer, + finder=finder, + session=session, + wheel_cache=wheel_cache, + use_user_site=options.use_user_site, + upgrade_strategy=upgrade_strategy, + force_reinstall=options.force_reinstall, + ignore_dependencies=options.ignore_dependencies, + ignore_requires_python=options.ignore_requires_python, + ignore_installed=options.ignore_installed, + isolated=options.isolated_mode, + ) + resolver.resolve(requirement_set) + + protect_pip_from_modification_on_windows( + modifying_pip=requirement_set.has_requirement("pip") + ) + + # If caching is disabled or wheel is not installed don't + # try to build wheels. + if wheel and options.cache_dir: + # build wheels before install. + wb = WheelBuilder( + finder, preparer, wheel_cache, + build_options=[], global_options=[], + ) + # Ignore the result: a failed wheel will be + # installed from the sdist/vcs whatever. + wb.build( + requirement_set.requirements.values(), + session=session, autobuilding=True + ) + + to_install = resolver.get_installation_order( + requirement_set + ) + + # Consistency Checking of the package set we're installing. + should_warn_about_conflicts = ( + not options.ignore_dependencies and + options.warn_about_conflicts + ) + if should_warn_about_conflicts: + self._warn_about_conflicts(to_install) + + # Don't warn about script install locations if + # --target has been specified + warn_script_location = options.warn_script_location + if options.target_dir: + warn_script_location = False + + installed = install_given_reqs( + to_install, + install_options, + global_options, + root=options.root_path, + home=target_temp_dir.path, + prefix=options.prefix_path, + pycompile=options.compile, + warn_script_location=warn_script_location, + use_user_site=options.use_user_site, + ) + + lib_locations = get_lib_location_guesses( + user=options.use_user_site, + home=target_temp_dir.path, + root=options.root_path, + prefix=options.prefix_path, + isolated=options.isolated_mode, + ) + working_set = pkg_resources.WorkingSet(lib_locations) + + reqs = sorted(installed, key=operator.attrgetter('name')) + items = [] + for req in reqs: + item = req.name + try: + installed_version = get_installed_version( + req.name, working_set=working_set + ) + if installed_version: + item += '-' + installed_version + except Exception: + pass + items.append(item) + installed = ' '.join(items) + if installed: + logger.info('Successfully installed %s', installed) + except EnvironmentError as error: + show_traceback = (self.verbosity >= 1) + + message = create_env_error_message( + error, show_traceback, options.use_user_site, + ) + logger.error(message, exc_info=show_traceback) + + return ERROR + except PreviousBuildDirError: + options.no_clean = True + raise + finally: + # Clean up + if not options.no_clean: + requirement_set.cleanup_files() + wheel_cache.cleanup() + + if options.target_dir: + self._handle_target_dir( + options.target_dir, target_temp_dir, options.upgrade + ) + return requirement_set + + def _handle_target_dir(self, target_dir, target_temp_dir, upgrade): + ensure_dir(target_dir) + + # Checking both purelib and platlib directories for installed + # packages to be moved to target directory + lib_dir_list = [] + + with target_temp_dir: + # Checking both purelib and platlib directories for installed + # packages to be moved to target directory + scheme = distutils_scheme('', home=target_temp_dir.path) + purelib_dir = scheme['purelib'] + platlib_dir = scheme['platlib'] + data_dir = scheme['data'] + + if os.path.exists(purelib_dir): + lib_dir_list.append(purelib_dir) + if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: + lib_dir_list.append(platlib_dir) + if os.path.exists(data_dir): + lib_dir_list.append(data_dir) + + for lib_dir in lib_dir_list: + for item in os.listdir(lib_dir): + if lib_dir == data_dir: + ddir = os.path.join(data_dir, item) + if any(s.startswith(ddir) for s in lib_dir_list[:-1]): + continue + target_item_dir = os.path.join(target_dir, item) + if os.path.exists(target_item_dir): + if not upgrade: + logger.warning( + 'Target directory %s already exists. Specify ' + '--upgrade to force replacement.', + target_item_dir + ) + continue + if os.path.islink(target_item_dir): + logger.warning( + 'Target directory %s already exists and is ' + 'a link. Pip will not automatically replace ' + 'links, please remove if replacement is ' + 'desired.', + target_item_dir + ) + continue + if os.path.isdir(target_item_dir): + shutil.rmtree(target_item_dir) + else: + os.remove(target_item_dir) + + shutil.move( + os.path.join(lib_dir, item), + target_item_dir + ) + + def _warn_about_conflicts(self, to_install): + package_set, _dep_info = check_install_conflicts(to_install) + missing, conflicting = _dep_info + + # NOTE: There is some duplication here from pip check + for project_name in missing: + version = package_set[project_name][0] + for dependency in missing[project_name]: + logger.critical( + "%s %s requires %s, which is not installed.", + project_name, version, dependency[1], + ) + + for project_name in conflicting: + version = package_set[project_name][0] + for dep_name, dep_version, req in conflicting[project_name]: + logger.critical( + "%s %s has requirement %s, but you'll have %s %s which is " + "incompatible.", + project_name, version, req, dep_name, dep_version, + ) + + +def get_lib_location_guesses(*args, **kwargs): + scheme = distutils_scheme('', *args, **kwargs) + return [scheme['purelib'], scheme['platlib']] + + +def create_env_error_message(error, show_traceback, using_user_site): + """Format an error message for an EnvironmentError + + It may occur anytime during the execution of the install command. + """ + parts = [] + + # Mention the error if we are not going to show a traceback + parts.append("Could not install packages due to an EnvironmentError") + if not show_traceback: + parts.append(": ") + parts.append(str(error)) + else: + parts.append(".") + + # Spilt the error indication from a helper message (if any) + parts[-1] += "\n" + + # Suggest useful actions to the user: + # (1) using user site-packages or (2) verifying the permissions + if error.errno == errno.EACCES: + user_option_part = "Consider using the `--user` option" + permissions_part = "Check the permissions" + + if not using_user_site: + parts.extend([ + user_option_part, " or ", + permissions_part.lower(), + ]) + else: + parts.append(permissions_part) + parts.append(".\n") + + return "".join(parts).strip() + "\n" diff --git a/env/lib/python3.6/site-packages/pip/_internal/commands/list.py b/env/lib/python3.6/site-packages/pip/_internal/commands/list.py new file mode 100644 index 0000000..53fb015 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/commands/list.py @@ -0,0 +1,304 @@ +from __future__ import absolute_import + +import json +import logging + +from pip._vendor import six +from pip._vendor.six.moves import zip_longest + +from pip._internal.basecommand import Command +from pip._internal.cmdoptions import index_group, make_option_group +from pip._internal.exceptions import CommandError +from pip._internal.index import PackageFinder +from pip._internal.utils.misc import ( + dist_is_editable, get_installed_distributions, +) +from pip._internal.utils.packaging import get_installer + +logger = logging.getLogger(__name__) + + +class ListCommand(Command): + """ + List installed packages, including editables. + + Packages are listed in a case-insensitive sorted order. + """ + name = 'list' + usage = """ + %prog [options]""" + summary = 'List installed packages.' + + def __init__(self, *args, **kw): + super(ListCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option( + '-o', '--outdated', + action='store_true', + default=False, + help='List outdated packages') + cmd_opts.add_option( + '-u', '--uptodate', + action='store_true', + default=False, + help='List uptodate packages') + cmd_opts.add_option( + '-e', '--editable', + action='store_true', + default=False, + help='List editable projects.') + cmd_opts.add_option( + '-l', '--local', + action='store_true', + default=False, + help=('If in a virtualenv that has global access, do not list ' + 'globally-installed packages.'), + ) + self.cmd_opts.add_option( + '--user', + dest='user', + action='store_true', + default=False, + help='Only output packages installed in user-site.') + + cmd_opts.add_option( + '--pre', + action='store_true', + default=False, + help=("Include pre-release and development versions. By default, " + "pip only finds stable versions."), + ) + + cmd_opts.add_option( + '--format', + action='store', + dest='list_format', + default="columns", + choices=('columns', 'freeze', 'json'), + help="Select the output format among: columns (default), freeze, " + "or json", + ) + + cmd_opts.add_option( + '--not-required', + action='store_true', + dest='not_required', + help="List packages that are not dependencies of " + "installed packages.", + ) + + cmd_opts.add_option( + '--exclude-editable', + action='store_false', + dest='include_editable', + help='Exclude editable package from output.', + ) + cmd_opts.add_option( + '--include-editable', + action='store_true', + dest='include_editable', + help='Include editable package from output.', + default=True, + ) + index_opts = make_option_group(index_group, self.parser) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def _build_package_finder(self, options, index_urls, session): + """ + Create a package finder appropriate to this list command. + """ + return PackageFinder( + find_links=options.find_links, + index_urls=index_urls, + allow_all_prereleases=options.pre, + trusted_hosts=options.trusted_hosts, + process_dependency_links=options.process_dependency_links, + session=session, + ) + + def run(self, options, args): + if options.outdated and options.uptodate: + raise CommandError( + "Options --outdated and --uptodate cannot be combined.") + + packages = get_installed_distributions( + local_only=options.local, + user_only=options.user, + editables_only=options.editable, + include_editables=options.include_editable, + ) + + if options.outdated: + packages = self.get_outdated(packages, options) + elif options.uptodate: + packages = self.get_uptodate(packages, options) + + if options.not_required: + packages = self.get_not_required(packages, options) + + self.output_package_listing(packages, options) + + def get_outdated(self, packages, options): + return [ + dist for dist in self.iter_packages_latest_infos(packages, options) + if dist.latest_version > dist.parsed_version + ] + + def get_uptodate(self, packages, options): + return [ + dist for dist in self.iter_packages_latest_infos(packages, options) + if dist.latest_version == dist.parsed_version + ] + + def get_not_required(self, packages, options): + dep_keys = set() + for dist in packages: + dep_keys.update(requirement.key for requirement in dist.requires()) + return {pkg for pkg in packages if pkg.key not in dep_keys} + + def iter_packages_latest_infos(self, packages, options): + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index: + logger.debug('Ignoring indexes: %s', ','.join(index_urls)) + index_urls = [] + + dependency_links = [] + for dist in packages: + if dist.has_metadata('dependency_links.txt'): + dependency_links.extend( + dist.get_metadata_lines('dependency_links.txt'), + ) + + with self._build_session(options) as session: + finder = self._build_package_finder(options, index_urls, session) + finder.add_dependency_links(dependency_links) + + for dist in packages: + typ = 'unknown' + all_candidates = finder.find_all_candidates(dist.key) + if not options.pre: + # Remove prereleases + all_candidates = [candidate for candidate in all_candidates + if not candidate.version.is_prerelease] + + if not all_candidates: + continue + best_candidate = max(all_candidates, + key=finder._candidate_sort_key) + remote_version = best_candidate.version + if best_candidate.location.is_wheel: + typ = 'wheel' + else: + typ = 'sdist' + # This is dirty but makes the rest of the code much cleaner + dist.latest_version = remote_version + dist.latest_filetype = typ + yield dist + + def output_package_listing(self, packages, options): + packages = sorted( + packages, + key=lambda dist: dist.project_name.lower(), + ) + if options.list_format == 'columns' and packages: + data, header = format_for_columns(packages, options) + self.output_package_listing_columns(data, header) + elif options.list_format == 'freeze': + for dist in packages: + if options.verbose >= 1: + logger.info("%s==%s (%s)", dist.project_name, + dist.version, dist.location) + else: + logger.info("%s==%s", dist.project_name, dist.version) + elif options.list_format == 'json': + logger.info(format_for_json(packages, options)) + + def output_package_listing_columns(self, data, header): + # insert the header first: we need to know the size of column names + if len(data) > 0: + data.insert(0, header) + + pkg_strings, sizes = tabulate(data) + + # Create and add a separator. + if len(data) > 0: + pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes))) + + for val in pkg_strings: + logger.info(val) + + +def tabulate(vals): + # From pfmoore on GitHub: + # https://github.com/pypa/pip/issues/3651#issuecomment-216932564 + assert len(vals) > 0 + + sizes = [0] * max(len(x) for x in vals) + for row in vals: + sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)] + + result = [] + for row in vals: + display = " ".join([str(c).ljust(s) if c is not None else '' + for s, c in zip_longest(sizes, row)]) + result.append(display) + + return result, sizes + + +def format_for_columns(pkgs, options): + """ + Convert the package data into something usable + by output_package_listing_columns. + """ + running_outdated = options.outdated + # Adjust the header for the `pip list --outdated` case. + if running_outdated: + header = ["Package", "Version", "Latest", "Type"] + else: + header = ["Package", "Version"] + + data = [] + if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs): + header.append("Location") + if options.verbose >= 1: + header.append("Installer") + + for proj in pkgs: + # if we're working on the 'outdated' list, separate out the + # latest_version and type + row = [proj.project_name, proj.version] + + if running_outdated: + row.append(proj.latest_version) + row.append(proj.latest_filetype) + + if options.verbose >= 1 or dist_is_editable(proj): + row.append(proj.location) + if options.verbose >= 1: + row.append(get_installer(proj)) + + data.append(row) + + return data, header + + +def format_for_json(packages, options): + data = [] + for dist in packages: + info = { + 'name': dist.project_name, + 'version': six.text_type(dist.version), + } + if options.verbose >= 1: + info['location'] = dist.location + info['installer'] = get_installer(dist) + if options.outdated: + info['latest_version'] = six.text_type(dist.latest_version) + info['latest_filetype'] = dist.latest_filetype + data.append(info) + return json.dumps(data) diff --git a/env/lib/python3.6/site-packages/pip/_internal/commands/search.py b/env/lib/python3.6/site-packages/pip/_internal/commands/search.py new file mode 100644 index 0000000..a47a9a7 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/commands/search.py @@ -0,0 +1,135 @@ +from __future__ import absolute_import + +import logging +import sys +import textwrap +from collections import OrderedDict + +from pip._vendor import pkg_resources +from pip._vendor.packaging.version import parse as parse_version +# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is +# why we ignore the type on this import +from pip._vendor.six.moves import xmlrpc_client # type: ignore + +from pip._internal.basecommand import SUCCESS, Command +from pip._internal.compat import get_terminal_size +from pip._internal.download import PipXmlrpcTransport +from pip._internal.exceptions import CommandError +from pip._internal.models.index import PyPI +from pip._internal.status_codes import NO_MATCHES_FOUND +from pip._internal.utils.logging import indent_log + +logger = logging.getLogger(__name__) + + +class SearchCommand(Command): + """Search for PyPI packages whose name or summary contains .""" + name = 'search' + usage = """ + %prog [options] """ + summary = 'Search PyPI for packages.' + ignore_require_venv = True + + def __init__(self, *args, **kw): + super(SearchCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '-i', '--index', + dest='index', + metavar='URL', + default=PyPI.pypi_url, + help='Base URL of Python Package Index (default %default)') + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + if not args: + raise CommandError('Missing required argument (search query).') + query = args + pypi_hits = self.search(query, options) + hits = transform_hits(pypi_hits) + + terminal_width = None + if sys.stdout.isatty(): + terminal_width = get_terminal_size()[0] + + print_results(hits, terminal_width=terminal_width) + if pypi_hits: + return SUCCESS + return NO_MATCHES_FOUND + + def search(self, query, options): + index_url = options.index + with self._build_session(options) as session: + transport = PipXmlrpcTransport(index_url, session) + pypi = xmlrpc_client.ServerProxy(index_url, transport) + hits = pypi.search({'name': query, 'summary': query}, 'or') + return hits + + +def transform_hits(hits): + """ + The list from pypi is really a list of versions. We want a list of + packages with the list of versions stored inline. This converts the + list from pypi into one we can use. + """ + packages = OrderedDict() + for hit in hits: + name = hit['name'] + summary = hit['summary'] + version = hit['version'] + + if name not in packages.keys(): + packages[name] = { + 'name': name, + 'summary': summary, + 'versions': [version], + } + else: + packages[name]['versions'].append(version) + + # if this is the highest version, replace summary and score + if version == highest_version(packages[name]['versions']): + packages[name]['summary'] = summary + + return list(packages.values()) + + +def print_results(hits, name_column_width=None, terminal_width=None): + if not hits: + return + if name_column_width is None: + name_column_width = max([ + len(hit['name']) + len(highest_version(hit.get('versions', ['-']))) + for hit in hits + ]) + 4 + + installed_packages = [p.project_name for p in pkg_resources.working_set] + for hit in hits: + name = hit['name'] + summary = hit['summary'] or '' + latest = highest_version(hit.get('versions', ['-'])) + if terminal_width is not None: + target_width = terminal_width - name_column_width - 5 + if target_width > 10: + # wrap and indent summary to fit terminal + summary = textwrap.wrap(summary, target_width) + summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) + + line = '%-*s - %s' % (name_column_width, + '%s (%s)' % (name, latest), summary) + try: + logger.info(line) + if name in installed_packages: + dist = pkg_resources.get_distribution(name) + with indent_log(): + if dist.version == latest: + logger.info('INSTALLED: %s (latest)', dist.version) + else: + logger.info('INSTALLED: %s', dist.version) + logger.info('LATEST: %s', latest) + except UnicodeEncodeError: + pass + + +def highest_version(versions): + return max(versions, key=parse_version) diff --git a/env/lib/python3.6/site-packages/pip/_internal/commands/show.py b/env/lib/python3.6/site-packages/pip/_internal/commands/show.py new file mode 100644 index 0000000..4136133 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/commands/show.py @@ -0,0 +1,168 @@ +from __future__ import absolute_import + +import logging +import os +from email.parser import FeedParser # type: ignore + +from pip._vendor import pkg_resources +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.basecommand import Command +from pip._internal.status_codes import ERROR, SUCCESS + +logger = logging.getLogger(__name__) + + +class ShowCommand(Command): + """ + Show information about one or more installed packages. + + The output is in RFC-compliant mail header format. + """ + name = 'show' + usage = """ + %prog [options] ...""" + summary = 'Show information about installed packages.' + ignore_require_venv = True + + def __init__(self, *args, **kw): + super(ShowCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '-f', '--files', + dest='files', + action='store_true', + default=False, + help='Show the full list of installed files for each package.') + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + if not args: + logger.warning('ERROR: Please provide a package name or names.') + return ERROR + query = args + + results = search_packages_info(query) + if not print_results( + results, list_files=options.files, verbose=options.verbose): + return ERROR + return SUCCESS + + +def search_packages_info(query): + """ + Gather details from installed distributions. Print distribution name, + version, location, and installed files. Installed files requires a + pip generated 'installed-files.txt' in the distributions '.egg-info' + directory. + """ + installed = {} + for p in pkg_resources.working_set: + installed[canonicalize_name(p.project_name)] = p + + query_names = [canonicalize_name(name) for name in query] + + for dist in [installed[pkg] for pkg in query_names if pkg in installed]: + package = { + 'name': dist.project_name, + 'version': dist.version, + 'location': dist.location, + 'requires': [dep.project_name for dep in dist.requires()], + } + file_list = None + metadata = None + if isinstance(dist, pkg_resources.DistInfoDistribution): + # RECORDs should be part of .dist-info metadatas + if dist.has_metadata('RECORD'): + lines = dist.get_metadata_lines('RECORD') + paths = [l.split(',')[0] for l in lines] + paths = [os.path.join(dist.location, p) for p in paths] + file_list = [os.path.relpath(p, dist.location) for p in paths] + + if dist.has_metadata('METADATA'): + metadata = dist.get_metadata('METADATA') + else: + # Otherwise use pip's log for .egg-info's + if dist.has_metadata('installed-files.txt'): + paths = dist.get_metadata_lines('installed-files.txt') + paths = [os.path.join(dist.egg_info, p) for p in paths] + file_list = [os.path.relpath(p, dist.location) for p in paths] + + if dist.has_metadata('PKG-INFO'): + metadata = dist.get_metadata('PKG-INFO') + + if dist.has_metadata('entry_points.txt'): + entry_points = dist.get_metadata_lines('entry_points.txt') + package['entry_points'] = entry_points + + if dist.has_metadata('INSTALLER'): + for line in dist.get_metadata_lines('INSTALLER'): + if line.strip(): + package['installer'] = line.strip() + break + + # @todo: Should pkg_resources.Distribution have a + # `get_pkg_info` method? + feed_parser = FeedParser() + feed_parser.feed(metadata) + pkg_info_dict = feed_parser.close() + for key in ('metadata-version', 'summary', + 'home-page', 'author', 'author-email', 'license'): + package[key] = pkg_info_dict.get(key) + + # It looks like FeedParser cannot deal with repeated headers + classifiers = [] + for line in metadata.splitlines(): + if line.startswith('Classifier: '): + classifiers.append(line[len('Classifier: '):]) + package['classifiers'] = classifiers + + if file_list: + package['files'] = sorted(file_list) + yield package + + +def print_results(distributions, list_files=False, verbose=False): + """ + Print the informations from installed distributions found. + """ + results_printed = False + for i, dist in enumerate(distributions): + results_printed = True + if i > 0: + logger.info("---") + + name = dist.get('name', '') + required_by = [ + pkg.project_name for pkg in pkg_resources.working_set + if name in [required.name for required in pkg.requires()] + ] + + logger.info("Name: %s", name) + logger.info("Version: %s", dist.get('version', '')) + logger.info("Summary: %s", dist.get('summary', '')) + logger.info("Home-page: %s", dist.get('home-page', '')) + logger.info("Author: %s", dist.get('author', '')) + logger.info("Author-email: %s", dist.get('author-email', '')) + logger.info("License: %s", dist.get('license', '')) + logger.info("Location: %s", dist.get('location', '')) + logger.info("Requires: %s", ', '.join(dist.get('requires', []))) + logger.info("Required-by: %s", ', '.join(required_by)) + + if verbose: + logger.info("Metadata-Version: %s", + dist.get('metadata-version', '')) + logger.info("Installer: %s", dist.get('installer', '')) + logger.info("Classifiers:") + for classifier in dist.get('classifiers', []): + logger.info(" %s", classifier) + logger.info("Entry-points:") + for entry in dist.get('entry_points', []): + logger.info(" %s", entry.strip()) + if list_files: + logger.info("Files:") + for line in dist.get('files', []): + logger.info(" %s", line.strip()) + if "files" not in dist: + logger.info("Cannot locate installed-files.txt") + return results_printed diff --git a/env/lib/python3.6/site-packages/pip/_internal/commands/uninstall.py b/env/lib/python3.6/site-packages/pip/_internal/commands/uninstall.py new file mode 100644 index 0000000..ba5a2f5 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/commands/uninstall.py @@ -0,0 +1,77 @@ +from __future__ import absolute_import + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.basecommand import Command +from pip._internal.exceptions import InstallationError +from pip._internal.req import InstallRequirement, parse_requirements +from pip._internal.utils.misc import protect_pip_from_modification_on_windows + + +class UninstallCommand(Command): + """ + Uninstall packages. + + pip is able to uninstall most installed packages. Known exceptions are: + + - Pure distutils packages installed with ``python setup.py install``, which + leave behind no metadata to determine what files were installed. + - Script wrappers installed by ``python setup.py develop``. + """ + name = 'uninstall' + usage = """ + %prog [options] ... + %prog [options] -r ...""" + summary = 'Uninstall packages.' + + def __init__(self, *args, **kw): + super(UninstallCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '-r', '--requirement', + dest='requirements', + action='append', + default=[], + metavar='file', + help='Uninstall all the packages listed in the given requirements ' + 'file. This option can be used multiple times.', + ) + self.cmd_opts.add_option( + '-y', '--yes', + dest='yes', + action='store_true', + help="Don't ask for confirmation of uninstall deletions.") + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + with self._build_session(options) as session: + reqs_to_uninstall = {} + for name in args: + req = InstallRequirement.from_line( + name, isolated=options.isolated_mode, + ) + if req.name: + reqs_to_uninstall[canonicalize_name(req.name)] = req + for filename in options.requirements: + for req in parse_requirements( + filename, + options=options, + session=session): + if req.name: + reqs_to_uninstall[canonicalize_name(req.name)] = req + if not reqs_to_uninstall: + raise InstallationError( + 'You must give at least one requirement to %(name)s (see ' + '"pip help %(name)s")' % dict(name=self.name) + ) + + protect_pip_from_modification_on_windows( + modifying_pip="pip" in reqs_to_uninstall + ) + + for req in reqs_to_uninstall.values(): + uninstall_pathset = req.uninstall( + auto_confirm=options.yes, verbose=self.verbosity > 0, + ) + if uninstall_pathset: + uninstall_pathset.commit() diff --git a/env/lib/python3.6/site-packages/pip/_internal/commands/wheel.py b/env/lib/python3.6/site-packages/pip/_internal/commands/wheel.py new file mode 100644 index 0000000..4189387 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/commands/wheel.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +import logging +import os + +from pip._internal import cmdoptions +from pip._internal.basecommand import RequirementCommand +from pip._internal.cache import WheelCache +from pip._internal.exceptions import CommandError, PreviousBuildDirError +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req import RequirementSet +from pip._internal.req.req_tracker import RequirementTracker +from pip._internal.resolve import Resolver +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.wheel import WheelBuilder + +logger = logging.getLogger(__name__) + + +class WheelCommand(RequirementCommand): + """ + Build Wheel archives for your requirements and dependencies. + + Wheel is a built-package format, and offers the advantage of not + recompiling your software during every install. For more details, see the + wheel docs: https://wheel.readthedocs.io/en/latest/ + + Requirements: setuptools>=0.8, and wheel. + + 'pip wheel' uses the bdist_wheel setuptools extension from the wheel + package to build individual wheels. + + """ + + name = 'wheel' + usage = """ + %prog [options] ... + %prog [options] -r ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + summary = 'Build wheels from your requirements.' + + def __init__(self, *args, **kw): + super(WheelCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option( + '-w', '--wheel-dir', + dest='wheel_dir', + metavar='dir', + default=os.curdir, + help=("Build wheels into , where the default is the " + "current working directory."), + ) + cmd_opts.add_option(cmdoptions.no_binary()) + cmd_opts.add_option(cmdoptions.only_binary()) + cmd_opts.add_option(cmdoptions.prefer_binary()) + cmd_opts.add_option( + '--build-option', + dest='build_options', + metavar='options', + action='append', + help="Extra arguments to be supplied to 'setup.py bdist_wheel'.", + ) + cmd_opts.add_option(cmdoptions.no_build_isolation()) + cmd_opts.add_option(cmdoptions.constraints()) + cmd_opts.add_option(cmdoptions.editable()) + cmd_opts.add_option(cmdoptions.requirements()) + cmd_opts.add_option(cmdoptions.src()) + cmd_opts.add_option(cmdoptions.ignore_requires_python()) + cmd_opts.add_option(cmdoptions.no_deps()) + cmd_opts.add_option(cmdoptions.build_dir()) + cmd_opts.add_option(cmdoptions.progress_bar()) + + cmd_opts.add_option( + '--global-option', + dest='global_options', + action='append', + metavar='options', + help="Extra global options to be supplied to the setup.py " + "call before the 'bdist_wheel' command.") + + cmd_opts.add_option( + '--pre', + action='store_true', + default=False, + help=("Include pre-release and development versions. By default, " + "pip only finds stable versions."), + ) + + cmd_opts.add_option(cmdoptions.no_clean()) + cmd_opts.add_option(cmdoptions.require_hashes()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def run(self, options, args): + cmdoptions.check_install_build_global(options) + + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index: + logger.debug('Ignoring indexes: %s', ','.join(index_urls)) + index_urls = [] + + if options.build_dir: + options.build_dir = os.path.abspath(options.build_dir) + + options.src_dir = os.path.abspath(options.src_dir) + + with self._build_session(options) as session: + finder = self._build_package_finder(options, session) + build_delete = (not (options.no_clean or options.build_dir)) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + + with RequirementTracker() as req_tracker, TempDirectory( + options.build_dir, delete=build_delete, kind="wheel" + ) as directory: + + requirement_set = RequirementSet( + require_hashes=options.require_hashes, + ) + + try: + self.populate_requirement_set( + requirement_set, args, options, finder, session, + self.name, wheel_cache + ) + + preparer = RequirementPreparer( + build_dir=directory.path, + src_dir=options.src_dir, + download_dir=None, + wheel_download_dir=options.wheel_dir, + progress_bar=options.progress_bar, + build_isolation=options.build_isolation, + req_tracker=req_tracker, + ) + + resolver = Resolver( + preparer=preparer, + finder=finder, + session=session, + wheel_cache=wheel_cache, + use_user_site=False, + upgrade_strategy="to-satisfy-only", + force_reinstall=False, + ignore_dependencies=options.ignore_dependencies, + ignore_requires_python=options.ignore_requires_python, + ignore_installed=True, + isolated=options.isolated_mode, + ) + resolver.resolve(requirement_set) + + # build wheels + wb = WheelBuilder( + finder, preparer, wheel_cache, + build_options=options.build_options or [], + global_options=options.global_options or [], + no_clean=options.no_clean, + ) + wheels_built_successfully = wb.build( + requirement_set.requirements.values(), session=session, + ) + if not wheels_built_successfully: + raise CommandError( + "Failed to build one or more wheels" + ) + except PreviousBuildDirError: + options.no_clean = True + raise + finally: + if not options.no_clean: + requirement_set.cleanup_files() + wheel_cache.cleanup() diff --git a/env/lib/python3.6/site-packages/pip/_internal/compat.py b/env/lib/python3.6/site-packages/pip/_internal/compat.py new file mode 100644 index 0000000..e6c008d --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/compat.py @@ -0,0 +1,235 @@ +"""Stuff that differs in different Python versions and platform +distributions.""" +from __future__ import absolute_import, division + +import codecs +import locale +import logging +import os +import shutil +import sys + +from pip._vendor.six import text_type + +try: + import ipaddress +except ImportError: + try: + from pip._vendor import ipaddress # type: ignore + except ImportError: + import ipaddr as ipaddress # type: ignore + ipaddress.ip_address = ipaddress.IPAddress + ipaddress.ip_network = ipaddress.IPNetwork + + +__all__ = [ + "ipaddress", "uses_pycache", "console_to_str", "native_str", + "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size", +] + + +logger = logging.getLogger(__name__) + +if sys.version_info >= (3, 4): + uses_pycache = True + from importlib.util import cache_from_source +else: + import imp + + try: + cache_from_source = imp.cache_from_source # type: ignore + except AttributeError: + # does not use __pycache__ + cache_from_source = None + + uses_pycache = cache_from_source is not None + + +if sys.version_info >= (3, 5): + backslashreplace_decode = "backslashreplace" +else: + # In version 3.4 and older, backslashreplace exists + # but does not support use for decoding. + # We implement our own replace handler for this + # situation, so that we can consistently use + # backslash replacement for all versions. + def backslashreplace_decode_fn(err): + raw_bytes = (err.object[i] for i in range(err.start, err.end)) + if sys.version_info[0] == 2: + # Python 2 gave us characters - convert to numeric bytes + raw_bytes = (ord(b) for b in raw_bytes) + return u"".join(u"\\x%x" % c for c in raw_bytes), err.end + codecs.register_error( + "backslashreplace_decode", + backslashreplace_decode_fn, + ) + backslashreplace_decode = "backslashreplace_decode" + + +def console_to_str(data): + """Return a string, safe for output, of subprocess output. + + We assume the data is in the locale preferred encoding. + If it won't decode properly, we warn the user but decode as + best we can. + + We also ensure that the output can be safely written to + standard output without encoding errors. + """ + + # First, get the encoding we assume. This is the preferred + # encoding for the locale, unless that is not found, or + # it is ASCII, in which case assume UTF-8 + encoding = locale.getpreferredencoding() + if (not encoding) or codecs.lookup(encoding).name == "ascii": + encoding = "utf-8" + + # Now try to decode the data - if we fail, warn the user and + # decode with replacement. + try: + s = data.decode(encoding) + except UnicodeDecodeError: + logger.warning( + "Subprocess output does not appear to be encoded as %s", + encoding, + ) + s = data.decode(encoding, errors=backslashreplace_decode) + + # Make sure we can print the output, by encoding it to the output + # encoding with replacement of unencodable characters, and then + # decoding again. + # We use stderr's encoding because it's less likely to be + # redirected and if we don't find an encoding we skip this + # step (on the assumption that output is wrapped by something + # that won't fail). + # The double getattr is to deal with the possibility that we're + # being called in a situation where sys.__stderr__ doesn't exist, + # or doesn't have an encoding attribute. Neither of these cases + # should occur in normal pip use, but there's no harm in checking + # in case people use pip in (unsupported) unusual situations. + output_encoding = getattr(getattr(sys, "__stderr__", None), + "encoding", None) + + if output_encoding: + s = s.encode(output_encoding, errors="backslashreplace") + s = s.decode(output_encoding) + + return s + + +if sys.version_info >= (3,): + def native_str(s, replace=False): + if isinstance(s, bytes): + return s.decode('utf-8', 'replace' if replace else 'strict') + return s + +else: + def native_str(s, replace=False): + # Replace is ignored -- unicode to UTF-8 can't fail + if isinstance(s, text_type): + return s.encode('utf-8') + return s + + +def get_path_uid(path): + """ + Return path's uid. + + Does not follow symlinks: + https://github.com/pypa/pip/pull/935#discussion_r5307003 + + Placed this function in compat due to differences on AIX and + Jython, that should eventually go away. + + :raises OSError: When path is a symlink or can't be read. + """ + if hasattr(os, 'O_NOFOLLOW'): + fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) + file_uid = os.fstat(fd).st_uid + os.close(fd) + else: # AIX and Jython + # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW + if not os.path.islink(path): + # older versions of Jython don't have `os.fstat` + file_uid = os.stat(path).st_uid + else: + # raise OSError for parity with os.O_NOFOLLOW above + raise OSError( + "%s is a symlink; Will not return uid for symlinks" % path + ) + return file_uid + + +def expanduser(path): + """ + Expand ~ and ~user constructions. + + Includes a workaround for https://bugs.python.org/issue14768 + """ + expanded = os.path.expanduser(path) + if path.startswith('~/') and expanded.startswith('//'): + expanded = expanded[1:] + return expanded + + +# packages in the stdlib that may have installation metadata, but should not be +# considered 'installed'. this theoretically could be determined based on +# dist.location (py27:`sysconfig.get_paths()['stdlib']`, +# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may +# make this ineffective, so hard-coding +stdlib_pkgs = {"python", "wsgiref", "argparse"} + + +# windows detection, covers cpython and ironpython +WINDOWS = (sys.platform.startswith("win") or + (sys.platform == 'cli' and os.name == 'nt')) + + +def samefile(file1, file2): + """Provide an alternative for os.path.samefile on Windows/Python2""" + if hasattr(os.path, 'samefile'): + return os.path.samefile(file1, file2) + else: + path1 = os.path.normcase(os.path.abspath(file1)) + path2 = os.path.normcase(os.path.abspath(file2)) + return path1 == path2 + + +if hasattr(shutil, 'get_terminal_size'): + def get_terminal_size(): + """ + Returns a tuple (x, y) representing the width(x) and the height(y) + in characters of the terminal window. + """ + return tuple(shutil.get_terminal_size()) +else: + def get_terminal_size(): + """ + Returns a tuple (x, y) representing the width(x) and the height(y) + in characters of the terminal window. + """ + def ioctl_GWINSZ(fd): + try: + import fcntl + import termios + import struct + cr = struct.unpack_from( + 'hh', + fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678') + ) + except Exception: + return None + if cr == (0, 0): + return None + return cr + cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) + if not cr: + try: + fd = os.open(os.ctermid(), os.O_RDONLY) + cr = ioctl_GWINSZ(fd) + os.close(fd) + except Exception: + pass + if not cr: + cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) + return int(cr[1]), int(cr[0]) diff --git a/env/lib/python3.6/site-packages/pip/_internal/configuration.py b/env/lib/python3.6/site-packages/pip/_internal/configuration.py new file mode 100644 index 0000000..32133de --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/configuration.py @@ -0,0 +1,380 @@ +"""Configuration management setup + +Some terminology: +- name + As written in config files. +- value + Value associated with a name +- key + Name combined with it's section (section.name) +- variant + A single word describing where the configuration key-value pair came from +""" + +import locale +import logging +import os + +from pip._vendor import six +from pip._vendor.six.moves import configparser + +from pip._internal.exceptions import ConfigurationError +from pip._internal.locations import ( + legacy_config_file, new_config_file, running_under_virtualenv, + site_config_files, venv_config_file, +) +from pip._internal.utils.misc import ensure_dir, enum +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import ( # noqa: F401 + Any, Dict, Iterable, List, NewType, Optional, Tuple + ) + + RawConfigParser = configparser.RawConfigParser # Shorthand + Kind = NewType("Kind", str) + +logger = logging.getLogger(__name__) + + +# NOTE: Maybe use the optionx attribute to normalize keynames. +def _normalize_name(name): + # type: (str) -> str + """Make a name consistent regardless of source (environment or file) + """ + name = name.lower().replace('_', '-') + if name.startswith('--'): + name = name[2:] # only prefer long opts + return name + + +def _disassemble_key(name): + # type: (str) -> List[str] + return name.split(".", 1) + + +# The kinds of configurations there are. +kinds = enum( + USER="user", # User Specific + GLOBAL="global", # System Wide + VENV="venv", # Virtual Environment Specific + ENV="env", # from PIP_CONFIG_FILE + ENV_VAR="env-var", # from Environment Variables +) + + +class Configuration(object): + """Handles management of configuration. + + Provides an interface to accessing and managing configuration files. + + This class converts provides an API that takes "section.key-name" style + keys and stores the value associated with it as "key-name" under the + section "section". + + This allows for a clean interface wherein the both the section and the + key-name are preserved in an easy to manage form in the configuration files + and the data stored is also nice. + """ + + def __init__(self, isolated, load_only=None): + # type: (bool, Kind) -> None + super(Configuration, self).__init__() + + _valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.VENV, None] + if load_only not in _valid_load_only: + raise ConfigurationError( + "Got invalid value for load_only - should be one of {}".format( + ", ".join(map(repr, _valid_load_only[:-1])) + ) + ) + self.isolated = isolated # type: bool + self.load_only = load_only # type: Optional[Kind] + + # The order here determines the override order. + self._override_order = [ + kinds.GLOBAL, kinds.USER, kinds.VENV, kinds.ENV, kinds.ENV_VAR + ] + + self._ignore_env_names = ["version", "help"] + + # Because we keep track of where we got the data from + self._parsers = { + variant: [] for variant in self._override_order + } # type: Dict[Kind, List[Tuple[str, RawConfigParser]]] + self._config = { + variant: {} for variant in self._override_order + } # type: Dict[Kind, Dict[str, Any]] + self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]] + + def load(self): + # type: () -> None + """Loads configuration from configuration files and environment + """ + self._load_config_files() + if not self.isolated: + self._load_environment_vars() + + def get_file_to_edit(self): + # type: () -> Optional[str] + """Returns the file with highest priority in configuration + """ + assert self.load_only is not None, \ + "Need to be specified a file to be editing" + + try: + return self._get_parser_to_modify()[0] + except IndexError: + return None + + def items(self): + # type: () -> Iterable[Tuple[str, Any]] + """Returns key-value pairs like dict.items() representing the loaded + configuration + """ + return self._dictionary.items() + + def get_value(self, key): + # type: (str) -> Any + """Get a value from the configuration. + """ + try: + return self._dictionary[key] + except KeyError: + raise ConfigurationError("No such key - {}".format(key)) + + def set_value(self, key, value): + # type: (str, Any) -> None + """Modify a value in the configuration. + """ + self._ensure_have_load_only() + + fname, parser = self._get_parser_to_modify() + + if parser is not None: + section, name = _disassemble_key(key) + + # Modify the parser and the configuration + if not parser.has_section(section): + parser.add_section(section) + parser.set(section, name, value) + + self._config[self.load_only][key] = value + self._mark_as_modified(fname, parser) + + def unset_value(self, key): + # type: (str) -> None + """Unset a value in the configuration. + """ + self._ensure_have_load_only() + + if key not in self._config[self.load_only]: + raise ConfigurationError("No such key - {}".format(key)) + + fname, parser = self._get_parser_to_modify() + + if parser is not None: + section, name = _disassemble_key(key) + + # Remove the key in the parser + modified_something = False + if parser.has_section(section): + # Returns whether the option was removed or not + modified_something = parser.remove_option(section, name) + + if modified_something: + # name removed from parser, section may now be empty + section_iter = iter(parser.items(section)) + try: + val = six.next(section_iter) + except StopIteration: + val = None + + if val is None: + parser.remove_section(section) + + self._mark_as_modified(fname, parser) + else: + raise ConfigurationError( + "Fatal Internal error [id=1]. Please report as a bug." + ) + + del self._config[self.load_only][key] + + def save(self): + # type: () -> None + """Save the currentin-memory state. + """ + self._ensure_have_load_only() + + for fname, parser in self._modified_parsers: + logger.info("Writing to %s", fname) + + # Ensure directory exists. + ensure_dir(os.path.dirname(fname)) + + with open(fname, "w") as f: + parser.write(f) # type: ignore + + # + # Private routines + # + + def _ensure_have_load_only(self): + # type: () -> None + if self.load_only is None: + raise ConfigurationError("Needed a specific file to be modifying.") + logger.debug("Will be working with %s variant only", self.load_only) + + @property + def _dictionary(self): + # type: () -> Dict[str, Any] + """A dictionary representing the loaded configuration. + """ + # NOTE: Dictionaries are not populated if not loaded. So, conditionals + # are not needed here. + retval = {} + + for variant in self._override_order: + retval.update(self._config[variant]) + + return retval + + def _load_config_files(self): + # type: () -> None + """Loads configuration from configuration files + """ + config_files = dict(self._iter_config_files()) + if config_files[kinds.ENV][0:1] == [os.devnull]: + logger.debug( + "Skipping loading configuration files due to " + "environment's PIP_CONFIG_FILE being os.devnull" + ) + return + + for variant, files in config_files.items(): + for fname in files: + # If there's specific variant set in `load_only`, load only + # that variant, not the others. + if self.load_only is not None and variant != self.load_only: + logger.debug( + "Skipping file '%s' (variant: %s)", fname, variant + ) + continue + + parser = self._load_file(variant, fname) + + # Keeping track of the parsers used + self._parsers[variant].append((fname, parser)) + + def _load_file(self, variant, fname): + # type: (Kind, str) -> RawConfigParser + logger.debug("For variant '%s', will try loading '%s'", variant, fname) + parser = self._construct_parser(fname) + + for section in parser.sections(): + items = parser.items(section) + self._config[variant].update(self._normalized_keys(section, items)) + + return parser + + def _construct_parser(self, fname): + # type: (str) -> RawConfigParser + parser = configparser.RawConfigParser() + # If there is no such file, don't bother reading it but create the + # parser anyway, to hold the data. + # Doing this is useful when modifying and saving files, where we don't + # need to construct a parser. + if os.path.exists(fname): + try: + parser.read(fname) + except UnicodeDecodeError: + raise ConfigurationError(( + "ERROR: " + "Configuration file contains invalid %s characters.\n" + "Please fix your configuration, located at %s\n" + ) % (locale.getpreferredencoding(False), fname)) + return parser + + def _load_environment_vars(self): + # type: () -> None + """Loads configuration from environment variables + """ + self._config[kinds.ENV_VAR].update( + self._normalized_keys(":env:", self._get_environ_vars()) + ) + + def _normalized_keys(self, section, items): + # type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any] + """Normalizes items to construct a dictionary with normalized keys. + + This routine is where the names become keys and are made the same + regardless of source - configuration files or environment. + """ + normalized = {} + for name, val in items: + key = section + "." + _normalize_name(name) + normalized[key] = val + return normalized + + def _get_environ_vars(self): + # type: () -> Iterable[Tuple[str, str]] + """Returns a generator with all environmental vars with prefix PIP_""" + for key, val in os.environ.items(): + should_be_yielded = ( + key.startswith("PIP_") and + key[4:].lower() not in self._ignore_env_names + ) + if should_be_yielded: + yield key[4:].lower(), val + + # XXX: This is patched in the tests. + def _iter_config_files(self): + # type: () -> Iterable[Tuple[Kind, List[str]]] + """Yields variant and configuration files associated with it. + + This should be treated like items of a dictionary. + """ + # SMELL: Move the conditions out of this function + + # environment variables have the lowest priority + config_file = os.environ.get('PIP_CONFIG_FILE', None) + if config_file is not None: + yield kinds.ENV, [config_file] + else: + yield kinds.ENV, [] + + # at the base we have any global configuration + yield kinds.GLOBAL, list(site_config_files) + + # per-user configuration next + should_load_user_config = not self.isolated and not ( + config_file and os.path.exists(config_file) + ) + if should_load_user_config: + # The legacy config file is overridden by the new config file + yield kinds.USER, [legacy_config_file, new_config_file] + + # finally virtualenv configuration first trumping others + if running_under_virtualenv(): + yield kinds.VENV, [venv_config_file] + + def _get_parser_to_modify(self): + # type: () -> Tuple[str, RawConfigParser] + # Determine which parser to modify + parsers = self._parsers[self.load_only] + if not parsers: + # This should not happen if everything works correctly. + raise ConfigurationError( + "Fatal Internal error [id=2]. Please report as a bug." + ) + + # Use the highest priority parser. + return parsers[-1] + + # XXX: This is patched in the tests. + def _mark_as_modified(self, fname, parser): + # type: (str, RawConfigParser) -> None + file_parser_tuple = (fname, parser) + if file_parser_tuple not in self._modified_parsers: + self._modified_parsers.append(file_parser_tuple) diff --git a/env/lib/python3.6/site-packages/pip/_internal/download.py b/env/lib/python3.6/site-packages/pip/_internal/download.py new file mode 100644 index 0000000..96f3b65 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/download.py @@ -0,0 +1,921 @@ +from __future__ import absolute_import + +import cgi +import email.utils +import getpass +import json +import logging +import mimetypes +import os +import platform +import re +import shutil +import sys + +from pip._vendor import requests, six, urllib3 +from pip._vendor.cachecontrol import CacheControlAdapter +from pip._vendor.cachecontrol.caches import FileCache +from pip._vendor.lockfile import LockError +from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter +from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response +from pip._vendor.requests.structures import CaseInsensitiveDict +from pip._vendor.requests.utils import get_netrc_auth +# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is +# why we ignore the type on this import +from pip._vendor.six.moves import xmlrpc_client # type: ignore +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves.urllib import request as urllib_request +from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote +from pip._vendor.urllib3.util import IS_PYOPENSSL + +import pip +from pip._internal.exceptions import HashMismatch, InstallationError +from pip._internal.locations import write_delete_marker_file +from pip._internal.models.index import PyPI +from pip._internal.utils.encoding import auto_decode +from pip._internal.utils.filesystem import check_path_owner +from pip._internal.utils.glibc import libc_ver +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ( + ARCHIVE_EXTENSIONS, ask_path_exists, backup_dir, call_subprocess, consume, + display_path, format_size, get_installed_version, rmtree, splitext, + unpack_file, +) +from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.ui import DownloadProgressProvider +from pip._internal.vcs import vcs + +try: + import ssl # noqa +except ImportError: + ssl = None + +HAS_TLS = (ssl is not None) or IS_PYOPENSSL + +__all__ = ['get_file_content', + 'is_url', 'url_to_path', 'path_to_url', + 'is_archive_file', 'unpack_vcs_link', + 'unpack_file_url', 'is_vcs_url', 'is_file_url', + 'unpack_http_url', 'unpack_url'] + + +logger = logging.getLogger(__name__) + + +def user_agent(): + """ + Return a string representing the user agent. + """ + data = { + "installer": {"name": "pip", "version": pip.__version__}, + "python": platform.python_version(), + "implementation": { + "name": platform.python_implementation(), + }, + } + + if data["implementation"]["name"] == 'CPython': + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == 'PyPy': + if sys.pypy_version_info.releaselevel == 'final': + pypy_version_info = sys.pypy_version_info[:3] + else: + pypy_version_info = sys.pypy_version_info + data["implementation"]["version"] = ".".join( + [str(x) for x in pypy_version_info] + ) + elif data["implementation"]["name"] == 'Jython': + # Complete Guess + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == 'IronPython': + # Complete Guess + data["implementation"]["version"] = platform.python_version() + + if sys.platform.startswith("linux"): + from pip._vendor import distro + distro_infos = dict(filter( + lambda x: x[1], + zip(["name", "version", "id"], distro.linux_distribution()), + )) + libc = dict(filter( + lambda x: x[1], + zip(["lib", "version"], libc_ver()), + )) + if libc: + distro_infos["libc"] = libc + if distro_infos: + data["distro"] = distro_infos + + if sys.platform.startswith("darwin") and platform.mac_ver()[0]: + data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} + + if platform.system(): + data.setdefault("system", {})["name"] = platform.system() + + if platform.release(): + data.setdefault("system", {})["release"] = platform.release() + + if platform.machine(): + data["cpu"] = platform.machine() + + if HAS_TLS: + data["openssl_version"] = ssl.OPENSSL_VERSION + + setuptools_version = get_installed_version("setuptools") + if setuptools_version is not None: + data["setuptools_version"] = setuptools_version + + return "{data[installer][name]}/{data[installer][version]} {json}".format( + data=data, + json=json.dumps(data, separators=(",", ":"), sort_keys=True), + ) + + +class MultiDomainBasicAuth(AuthBase): + + def __init__(self, prompting=True): + self.prompting = prompting + self.passwords = {} + + def __call__(self, req): + parsed = urllib_parse.urlparse(req.url) + + # Get the netloc without any embedded credentials + netloc = parsed.netloc.rsplit("@", 1)[-1] + + # Set the url of the request to the url without any credentials + req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:]) + + # Use any stored credentials that we have for this netloc + username, password = self.passwords.get(netloc, (None, None)) + + # Extract credentials embedded in the url if we have none stored + if username is None: + username, password = self.parse_credentials(parsed.netloc) + + # Get creds from netrc if we still don't have them + if username is None and password is None: + netrc_auth = get_netrc_auth(req.url) + username, password = netrc_auth if netrc_auth else (None, None) + + if username or password: + # Store the username and password + self.passwords[netloc] = (username, password) + + # Send the basic auth with this request + req = HTTPBasicAuth(username or "", password or "")(req) + + # Attach a hook to handle 401 responses + req.register_hook("response", self.handle_401) + + return req + + def handle_401(self, resp, **kwargs): + # We only care about 401 responses, anything else we want to just + # pass through the actual response + if resp.status_code != 401: + return resp + + # We are not able to prompt the user so simply return the response + if not self.prompting: + return resp + + parsed = urllib_parse.urlparse(resp.url) + + # Prompt the user for a new username and password + username = six.moves.input("User for %s: " % parsed.netloc) + password = getpass.getpass("Password: ") + + # Store the new username and password to use for future requests + if username or password: + self.passwords[parsed.netloc] = (username, password) + + # Consume content and release the original connection to allow our new + # request to reuse the same one. + resp.content + resp.raw.release_conn() + + # Add our new username and password to the request + req = HTTPBasicAuth(username or "", password or "")(resp.request) + + # Send our new request + new_resp = resp.connection.send(req, **kwargs) + new_resp.history.append(resp) + + return new_resp + + def parse_credentials(self, netloc): + if "@" in netloc: + userinfo = netloc.rsplit("@", 1)[0] + if ":" in userinfo: + user, pwd = userinfo.split(":", 1) + return (urllib_unquote(user), urllib_unquote(pwd)) + return urllib_unquote(userinfo), None + return None, None + + +class LocalFSAdapter(BaseAdapter): + + def send(self, request, stream=None, timeout=None, verify=None, cert=None, + proxies=None): + pathname = url_to_path(request.url) + + resp = Response() + resp.status_code = 200 + resp.url = request.url + + try: + stats = os.stat(pathname) + except OSError as exc: + resp.status_code = 404 + resp.raw = exc + else: + modified = email.utils.formatdate(stats.st_mtime, usegmt=True) + content_type = mimetypes.guess_type(pathname)[0] or "text/plain" + resp.headers = CaseInsensitiveDict({ + "Content-Type": content_type, + "Content-Length": stats.st_size, + "Last-Modified": modified, + }) + + resp.raw = open(pathname, "rb") + resp.close = resp.raw.close + + return resp + + def close(self): + pass + + +class SafeFileCache(FileCache): + """ + A file based cache which is safe to use even when the target directory may + not be accessible or writable. + """ + + def __init__(self, *args, **kwargs): + super(SafeFileCache, self).__init__(*args, **kwargs) + + # Check to ensure that the directory containing our cache directory + # is owned by the user current executing pip. If it does not exist + # we will check the parent directory until we find one that does exist. + # If it is not owned by the user executing pip then we will disable + # the cache and log a warning. + if not check_path_owner(self.directory): + logger.warning( + "The directory '%s' or its parent directory is not owned by " + "the current user and the cache has been disabled. Please " + "check the permissions and owner of that directory. If " + "executing pip with sudo, you may want sudo's -H flag.", + self.directory, + ) + + # Set our directory to None to disable the Cache + self.directory = None + + def get(self, *args, **kwargs): + # If we don't have a directory, then the cache should be a no-op. + if self.directory is None: + return + + try: + return super(SafeFileCache, self).get(*args, **kwargs) + except (LockError, OSError, IOError): + # We intentionally silence this error, if we can't access the cache + # then we can just skip caching and process the request as if + # caching wasn't enabled. + pass + + def set(self, *args, **kwargs): + # If we don't have a directory, then the cache should be a no-op. + if self.directory is None: + return + + try: + return super(SafeFileCache, self).set(*args, **kwargs) + except (LockError, OSError, IOError): + # We intentionally silence this error, if we can't access the cache + # then we can just skip caching and process the request as if + # caching wasn't enabled. + pass + + def delete(self, *args, **kwargs): + # If we don't have a directory, then the cache should be a no-op. + if self.directory is None: + return + + try: + return super(SafeFileCache, self).delete(*args, **kwargs) + except (LockError, OSError, IOError): + # We intentionally silence this error, if we can't access the cache + # then we can just skip caching and process the request as if + # caching wasn't enabled. + pass + + +class InsecureHTTPAdapter(HTTPAdapter): + + def cert_verify(self, conn, url, verify, cert): + conn.cert_reqs = 'CERT_NONE' + conn.ca_certs = None + + +class PipSession(requests.Session): + + timeout = None + + def __init__(self, *args, **kwargs): + retries = kwargs.pop("retries", 0) + cache = kwargs.pop("cache", None) + insecure_hosts = kwargs.pop("insecure_hosts", []) + + super(PipSession, self).__init__(*args, **kwargs) + + # Attach our User Agent to the request + self.headers["User-Agent"] = user_agent() + + # Attach our Authentication handler to the session + self.auth = MultiDomainBasicAuth() + + # Create our urllib3.Retry instance which will allow us to customize + # how we handle retries. + retries = urllib3.Retry( + # Set the total number of retries that a particular request can + # have. + total=retries, + + # A 503 error from PyPI typically means that the Fastly -> Origin + # connection got interrupted in some way. A 503 error in general + # is typically considered a transient error so we'll go ahead and + # retry it. + # A 500 may indicate transient error in Amazon S3 + # A 520 or 527 - may indicate transient error in CloudFlare + status_forcelist=[500, 503, 520, 527], + + # Add a small amount of back off between failed requests in + # order to prevent hammering the service. + backoff_factor=0.25, + ) + + # We want to _only_ cache responses on securely fetched origins. We do + # this because we can't validate the response of an insecurely fetched + # origin, and we don't want someone to be able to poison the cache and + # require manual eviction from the cache to fix it. + if cache: + secure_adapter = CacheControlAdapter( + cache=SafeFileCache(cache, use_dir_lock=True), + max_retries=retries, + ) + else: + secure_adapter = HTTPAdapter(max_retries=retries) + + # Our Insecure HTTPAdapter disables HTTPS validation. It does not + # support caching (see above) so we'll use it for all http:// URLs as + # well as any https:// host that we've marked as ignoring TLS errors + # for. + insecure_adapter = InsecureHTTPAdapter(max_retries=retries) + + self.mount("https://", secure_adapter) + self.mount("http://", insecure_adapter) + + # Enable file:// urls + self.mount("file://", LocalFSAdapter()) + + # We want to use a non-validating adapter for any requests which are + # deemed insecure. + for host in insecure_hosts: + self.mount("https://{}/".format(host), insecure_adapter) + + def request(self, method, url, *args, **kwargs): + # Allow setting a default timeout on a session + kwargs.setdefault("timeout", self.timeout) + + # Dispatch the actual request + return super(PipSession, self).request(method, url, *args, **kwargs) + + +def get_file_content(url, comes_from=None, session=None): + """Gets the content of a file; it may be a filename, file: URL, or + http: URL. Returns (location, content). Content is unicode. + + :param url: File path or url. + :param comes_from: Origin description of requirements. + :param session: Instance of pip.download.PipSession. + """ + if session is None: + raise TypeError( + "get_file_content() missing 1 required keyword argument: 'session'" + ) + + match = _scheme_re.search(url) + if match: + scheme = match.group(1).lower() + if (scheme == 'file' and comes_from and + comes_from.startswith('http')): + raise InstallationError( + 'Requirements file %s references URL %s, which is local' + % (comes_from, url)) + if scheme == 'file': + path = url.split(':', 1)[1] + path = path.replace('\\', '/') + match = _url_slash_drive_re.match(path) + if match: + path = match.group(1) + ':' + path.split('|', 1)[1] + path = urllib_parse.unquote(path) + if path.startswith('/'): + path = '/' + path.lstrip('/') + url = path + else: + # FIXME: catch some errors + resp = session.get(url) + resp.raise_for_status() + return resp.url, resp.text + try: + with open(url, 'rb') as f: + content = auto_decode(f.read()) + except IOError as exc: + raise InstallationError( + 'Could not open requirements file: %s' % str(exc) + ) + return url, content + + +_scheme_re = re.compile(r'^(http|https|file):', re.I) +_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) + + +def is_url(name): + """Returns true if the name looks like a URL""" + if ':' not in name: + return False + scheme = name.split(':', 1)[0].lower() + return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes + + +def url_to_path(url): + """ + Convert a file: URL to a path. + """ + assert url.startswith('file:'), ( + "You can only turn file: urls into filenames (not %r)" % url) + + _, netloc, path, _, _ = urllib_parse.urlsplit(url) + + # if we have a UNC path, prepend UNC share notation + if netloc: + netloc = '\\\\' + netloc + + path = urllib_request.url2pathname(netloc + path) + return path + + +def path_to_url(path): + """ + Convert a path to a file: URL. The path will be made absolute and have + quoted path parts. + """ + path = os.path.normpath(os.path.abspath(path)) + url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path)) + return url + + +def is_archive_file(name): + """Return True if `name` is a considered as an archive file.""" + ext = splitext(name)[1].lower() + if ext in ARCHIVE_EXTENSIONS: + return True + return False + + +def unpack_vcs_link(link, location): + vcs_backend = _get_used_vcs_backend(link) + vcs_backend.unpack(location) + + +def _get_used_vcs_backend(link): + for backend in vcs.backends: + if link.scheme in backend.schemes: + vcs_backend = backend(link.url) + return vcs_backend + + +def is_vcs_url(link): + return bool(_get_used_vcs_backend(link)) + + +def is_file_url(link): + return link.url.lower().startswith('file:') + + +def is_dir_url(link): + """Return whether a file:// Link points to a directory. + + ``link`` must not have any other scheme but file://. Call is_file_url() + first. + + """ + link_path = url_to_path(link.url_without_fragment) + return os.path.isdir(link_path) + + +def _progress_indicator(iterable, *args, **kwargs): + return iterable + + +def _download_url(resp, link, content_file, hashes, progress_bar): + try: + total_length = int(resp.headers['content-length']) + except (ValueError, KeyError, TypeError): + total_length = 0 + + cached_resp = getattr(resp, "from_cache", False) + if logger.getEffectiveLevel() > logging.INFO: + show_progress = False + elif cached_resp: + show_progress = False + elif total_length > (40 * 1000): + show_progress = True + elif not total_length: + show_progress = True + else: + show_progress = False + + show_url = link.show_url + + def resp_read(chunk_size): + try: + # Special case for urllib3. + for chunk in resp.raw.stream( + chunk_size, + # We use decode_content=False here because we don't + # want urllib3 to mess with the raw bytes we get + # from the server. If we decompress inside of + # urllib3 then we cannot verify the checksum + # because the checksum will be of the compressed + # file. This breakage will only occur if the + # server adds a Content-Encoding header, which + # depends on how the server was configured: + # - Some servers will notice that the file isn't a + # compressible file and will leave the file alone + # and with an empty Content-Encoding + # - Some servers will notice that the file is + # already compressed and will leave the file + # alone and will add a Content-Encoding: gzip + # header + # - Some servers won't notice anything at all and + # will take a file that's already been compressed + # and compress it again and set the + # Content-Encoding: gzip header + # + # By setting this not to decode automatically we + # hope to eliminate problems with the second case. + decode_content=False): + yield chunk + except AttributeError: + # Standard file-like object. + while True: + chunk = resp.raw.read(chunk_size) + if not chunk: + break + yield chunk + + def written_chunks(chunks): + for chunk in chunks: + content_file.write(chunk) + yield chunk + + progress_indicator = _progress_indicator + + if link.netloc == PyPI.netloc: + url = show_url + else: + url = link.url_without_fragment + + if show_progress: # We don't show progress on cached responses + progress_indicator = DownloadProgressProvider(progress_bar, + max=total_length) + if total_length: + logger.info("Downloading %s (%s)", url, format_size(total_length)) + else: + logger.info("Downloading %s", url) + elif cached_resp: + logger.info("Using cached %s", url) + else: + logger.info("Downloading %s", url) + + logger.debug('Downloading from URL %s', link) + + downloaded_chunks = written_chunks( + progress_indicator( + resp_read(CONTENT_CHUNK_SIZE), + CONTENT_CHUNK_SIZE + ) + ) + if hashes: + hashes.check_against_chunks(downloaded_chunks) + else: + consume(downloaded_chunks) + + +def _copy_file(filename, location, link): + copy = True + download_location = os.path.join(location, link.filename) + if os.path.exists(download_location): + response = ask_path_exists( + 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' % + display_path(download_location), ('i', 'w', 'b', 'a')) + if response == 'i': + copy = False + elif response == 'w': + logger.warning('Deleting %s', display_path(download_location)) + os.remove(download_location) + elif response == 'b': + dest_file = backup_dir(download_location) + logger.warning( + 'Backing up %s to %s', + display_path(download_location), + display_path(dest_file), + ) + shutil.move(download_location, dest_file) + elif response == 'a': + sys.exit(-1) + if copy: + shutil.copy(filename, download_location) + logger.info('Saved %s', display_path(download_location)) + + +def unpack_http_url(link, location, download_dir=None, + session=None, hashes=None, progress_bar="on"): + if session is None: + raise TypeError( + "unpack_http_url() missing 1 required keyword argument: 'session'" + ) + + with TempDirectory(kind="unpack") as temp_dir: + # If a download dir is specified, is the file already downloaded there? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir(link, + download_dir, + hashes) + + if already_downloaded_path: + from_path = already_downloaded_path + content_type = mimetypes.guess_type(from_path)[0] + else: + # let's download to a tmp dir + from_path, content_type = _download_http_url(link, + session, + temp_dir.path, + hashes, + progress_bar) + + # unpack the archive to the build dir location. even when only + # downloading archives, they have to be unpacked to parse dependencies + unpack_file(from_path, location, content_type, link) + + # a download dir is specified; let's copy the archive there + if download_dir and not already_downloaded_path: + _copy_file(from_path, download_dir, link) + + if not already_downloaded_path: + os.unlink(from_path) + + +def unpack_file_url(link, location, download_dir=None, hashes=None): + """Unpack link into location. + + If download_dir is provided and link points to a file, make a copy + of the link file inside download_dir. + """ + link_path = url_to_path(link.url_without_fragment) + + # If it's a url to a local directory + if is_dir_url(link): + if os.path.isdir(location): + rmtree(location) + shutil.copytree(link_path, location, symlinks=True) + if download_dir: + logger.info('Link is a directory, ignoring download_dir') + return + + # If --require-hashes is off, `hashes` is either empty, the + # link's embedded hash, or MissingHashes; it is required to + # match. If --require-hashes is on, we are satisfied by any + # hash in `hashes` matching: a URL-based or an option-based + # one; no internet-sourced hash will be in `hashes`. + if hashes: + hashes.check_against_path(link_path) + + # If a download dir is specified, is the file already there and valid? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir(link, + download_dir, + hashes) + + if already_downloaded_path: + from_path = already_downloaded_path + else: + from_path = link_path + + content_type = mimetypes.guess_type(from_path)[0] + + # unpack the archive to the build dir location. even when only downloading + # archives, they have to be unpacked to parse dependencies + unpack_file(from_path, location, content_type, link) + + # a download dir is specified and not already downloaded + if download_dir and not already_downloaded_path: + _copy_file(from_path, download_dir, link) + + +def _copy_dist_from_dir(link_path, location): + """Copy distribution files in `link_path` to `location`. + + Invoked when user requests to install a local directory. E.g.: + + pip install . + pip install ~/dev/git-repos/python-prompt-toolkit + + """ + + # Note: This is currently VERY SLOW if you have a lot of data in the + # directory, because it copies everything with `shutil.copytree`. + # What it should really do is build an sdist and install that. + # See https://github.com/pypa/pip/issues/2195 + + if os.path.isdir(location): + rmtree(location) + + # build an sdist + setup_py = 'setup.py' + sdist_args = [sys.executable] + sdist_args.append('-c') + sdist_args.append(SETUPTOOLS_SHIM % setup_py) + sdist_args.append('sdist') + sdist_args += ['--dist-dir', location] + logger.info('Running setup.py sdist for %s', link_path) + + with indent_log(): + call_subprocess(sdist_args, cwd=link_path, show_stdout=False) + + # unpack sdist into `location` + sdist = os.path.join(location, os.listdir(location)[0]) + logger.info('Unpacking sdist %s into %s', sdist, location) + unpack_file(sdist, location, content_type=None, link=None) + + +class PipXmlrpcTransport(xmlrpc_client.Transport): + """Provide a `xmlrpclib.Transport` implementation via a `PipSession` + object. + """ + + def __init__(self, index_url, session, use_datetime=False): + xmlrpc_client.Transport.__init__(self, use_datetime) + index_parts = urllib_parse.urlparse(index_url) + self._scheme = index_parts.scheme + self._session = session + + def request(self, host, handler, request_body, verbose=False): + parts = (self._scheme, host, handler, None, None, None) + url = urllib_parse.urlunparse(parts) + try: + headers = {'Content-Type': 'text/xml'} + response = self._session.post(url, data=request_body, + headers=headers, stream=True) + response.raise_for_status() + self.verbose = verbose + return self.parse_response(response.raw) + except requests.HTTPError as exc: + logger.critical( + "HTTP error %s while getting %s", + exc.response.status_code, url, + ) + raise + + +def unpack_url(link, location, download_dir=None, + only_download=False, session=None, hashes=None, + progress_bar="on"): + """Unpack link. + If link is a VCS link: + if only_download, export into download_dir and ignore location + else unpack into location + for other types of link: + - unpack into location + - if download_dir, copy the file into download_dir + - if only_download, mark location for deletion + + :param hashes: A Hashes object, one of whose embedded hashes must match, + or HashMismatch will be raised. If the Hashes is empty, no matches are + required, and unhashable types of requirements (like VCS ones, which + would ordinarily raise HashUnsupported) are allowed. + """ + # non-editable vcs urls + if is_vcs_url(link): + unpack_vcs_link(link, location) + + # file urls + elif is_file_url(link): + unpack_file_url(link, location, download_dir, hashes=hashes) + + # http urls + else: + if session is None: + session = PipSession() + + unpack_http_url( + link, + location, + download_dir, + session, + hashes=hashes, + progress_bar=progress_bar + ) + if only_download: + write_delete_marker_file(location) + + +def _download_http_url(link, session, temp_dir, hashes, progress_bar): + """Download link url into temp_dir using provided session""" + target_url = link.url.split('#', 1)[0] + try: + resp = session.get( + target_url, + # We use Accept-Encoding: identity here because requests + # defaults to accepting compressed responses. This breaks in + # a variety of ways depending on how the server is configured. + # - Some servers will notice that the file isn't a compressible + # file and will leave the file alone and with an empty + # Content-Encoding + # - Some servers will notice that the file is already + # compressed and will leave the file alone and will add a + # Content-Encoding: gzip header + # - Some servers won't notice anything at all and will take + # a file that's already been compressed and compress it again + # and set the Content-Encoding: gzip header + # By setting this to request only the identity encoding We're + # hoping to eliminate the third case. Hopefully there does not + # exist a server which when given a file will notice it is + # already compressed and that you're not asking for a + # compressed file and will then decompress it before sending + # because if that's the case I don't think it'll ever be + # possible to make this work. + headers={"Accept-Encoding": "identity"}, + stream=True, + ) + resp.raise_for_status() + except requests.HTTPError as exc: + logger.critical( + "HTTP error %s while getting %s", exc.response.status_code, link, + ) + raise + + content_type = resp.headers.get('content-type', '') + filename = link.filename # fallback + # Have a look at the Content-Disposition header for a better guess + content_disposition = resp.headers.get('content-disposition') + if content_disposition: + type, params = cgi.parse_header(content_disposition) + # We use ``or`` here because we don't want to use an "empty" value + # from the filename param. + filename = params.get('filename') or filename + ext = splitext(filename)[1] + if not ext: + ext = mimetypes.guess_extension(content_type) + if ext: + filename += ext + if not ext and link.url != resp.url: + ext = os.path.splitext(resp.url)[1] + if ext: + filename += ext + file_path = os.path.join(temp_dir, filename) + with open(file_path, 'wb') as content_file: + _download_url(resp, link, content_file, hashes, progress_bar) + return file_path, content_type + + +def _check_download_dir(link, download_dir, hashes): + """ Check download_dir for previously downloaded file with correct hash + If a correct file is found return its path else None + """ + download_path = os.path.join(download_dir, link.filename) + if os.path.exists(download_path): + # If already downloaded, does its hash match? + logger.info('File was already downloaded %s', download_path) + if hashes: + try: + hashes.check_against_path(download_path) + except HashMismatch: + logger.warning( + 'Previously-downloaded file %s has bad hash. ' + 'Re-downloading.', + download_path + ) + os.unlink(download_path) + return None + return download_path + return None diff --git a/env/lib/python3.6/site-packages/pip/_internal/exceptions.py b/env/lib/python3.6/site-packages/pip/_internal/exceptions.py new file mode 100644 index 0000000..ad6f412 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/exceptions.py @@ -0,0 +1,249 @@ +"""Exceptions used throughout package""" +from __future__ import absolute_import + +from itertools import chain, groupby, repeat + +from pip._vendor.six import iteritems + + +class PipError(Exception): + """Base pip exception""" + + +class ConfigurationError(PipError): + """General exception in configuration""" + + +class InstallationError(PipError): + """General exception during installation""" + + +class UninstallationError(PipError): + """General exception during uninstallation""" + + +class DistributionNotFound(InstallationError): + """Raised when a distribution cannot be found to satisfy a requirement""" + + +class RequirementsFileParseError(InstallationError): + """Raised when a general error occurs parsing a requirements file line.""" + + +class BestVersionAlreadyInstalled(PipError): + """Raised when the most up-to-date version of a package is already + installed.""" + + +class BadCommand(PipError): + """Raised when virtualenv or a command is not found""" + + +class CommandError(PipError): + """Raised when there is an error in command-line arguments""" + + +class PreviousBuildDirError(PipError): + """Raised when there's a previous conflicting build directory""" + + +class InvalidWheelFilename(InstallationError): + """Invalid wheel filename.""" + + +class UnsupportedWheel(InstallationError): + """Unsupported wheel.""" + + +class HashErrors(InstallationError): + """Multiple HashError instances rolled into one for reporting""" + + def __init__(self): + self.errors = [] + + def append(self, error): + self.errors.append(error) + + def __str__(self): + lines = [] + self.errors.sort(key=lambda e: e.order) + for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): + lines.append(cls.head) + lines.extend(e.body() for e in errors_of_cls) + if lines: + return '\n'.join(lines) + + def __nonzero__(self): + return bool(self.errors) + + def __bool__(self): + return self.__nonzero__() + + +class HashError(InstallationError): + """ + A failure to verify a package against known-good hashes + + :cvar order: An int sorting hash exception classes by difficulty of + recovery (lower being harder), so the user doesn't bother fretting + about unpinned packages when he has deeper issues, like VCS + dependencies, to deal with. Also keeps error reports in a + deterministic order. + :cvar head: A section heading for display above potentially many + exceptions of this kind + :ivar req: The InstallRequirement that triggered this error. This is + pasted on after the exception is instantiated, because it's not + typically available earlier. + + """ + req = None + head = '' + + def body(self): + """Return a summary of me for display under the heading. + + This default implementation simply prints a description of the + triggering requirement. + + :param req: The InstallRequirement that provoked this error, with + populate_link() having already been called + + """ + return ' %s' % self._requirement_name() + + def __str__(self): + return '%s\n%s' % (self.head, self.body()) + + def _requirement_name(self): + """Return a description of the requirement that triggered me. + + This default implementation returns long description of the req, with + line numbers + + """ + return str(self.req) if self.req else 'unknown package' + + +class VcsHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 0 + head = ("Can't verify hashes for these requirements because we don't " + "have a way to hash version control repositories:") + + +class DirectoryUrlHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 1 + head = ("Can't verify hashes for these file:// requirements because they " + "point to directories:") + + +class HashMissing(HashError): + """A hash was needed for a requirement but is absent.""" + + order = 2 + head = ('Hashes are required in --require-hashes mode, but they are ' + 'missing from some requirements. Here is a list of those ' + 'requirements along with the hashes their downloaded archives ' + 'actually had. Add lines like these to your requirements files to ' + 'prevent tampering. (If you did not enable --require-hashes ' + 'manually, note that it turns on automatically when any package ' + 'has a hash.)') + + def __init__(self, gotten_hash): + """ + :param gotten_hash: The hash of the (possibly malicious) archive we + just downloaded + """ + self.gotten_hash = gotten_hash + + def body(self): + # Dodge circular import. + from pip._internal.utils.hashes import FAVORITE_HASH + + package = None + if self.req: + # In the case of URL-based requirements, display the original URL + # seen in the requirements file rather than the package name, + # so the output can be directly copied into the requirements file. + package = (self.req.original_link if self.req.original_link + # In case someone feeds something downright stupid + # to InstallRequirement's constructor. + else getattr(self.req, 'req', None)) + return ' %s --hash=%s:%s' % (package or 'unknown package', + FAVORITE_HASH, + self.gotten_hash) + + +class HashUnpinned(HashError): + """A requirement had a hash specified but was not pinned to a specific + version.""" + + order = 3 + head = ('In --require-hashes mode, all requirements must have their ' + 'versions pinned with ==. These do not:') + + +class HashMismatch(HashError): + """ + Distribution file hash values don't match. + + :ivar package_name: The name of the package that triggered the hash + mismatch. Feel free to write to this after the exception is raise to + improve its error message. + + """ + order = 4 + head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS ' + 'FILE. If you have updated the package versions, please update ' + 'the hashes. Otherwise, examine the package contents carefully; ' + 'someone may have tampered with them.') + + def __init__(self, allowed, gots): + """ + :param allowed: A dict of algorithm names pointing to lists of allowed + hex digests + :param gots: A dict of algorithm names pointing to hashes we + actually got from the files under suspicion + """ + self.allowed = allowed + self.gots = gots + + def body(self): + return ' %s:\n%s' % (self._requirement_name(), + self._hash_comparison()) + + def _hash_comparison(self): + """ + Return a comparison of actual and expected hash values. + + Example:: + + Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde + or 123451234512345123451234512345123451234512345 + Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef + + """ + def hash_then_or(hash_name): + # For now, all the decent hashes have 6-char names, so we can get + # away with hard-coding space literals. + return chain([hash_name], repeat(' or')) + + lines = [] + for hash_name, expecteds in iteritems(self.allowed): + prefix = hash_then_or(hash_name) + lines.extend((' Expected %s %s' % (next(prefix), e)) + for e in expecteds) + lines.append(' Got %s\n' % + self.gots[hash_name].hexdigest()) + prefix = ' or' + return '\n'.join(lines) + + +class UnsupportedPythonVersion(InstallationError): + """Unsupported python version according to Requires-Python package + metadata.""" diff --git a/env/lib/python3.6/site-packages/pip/_internal/index.py b/env/lib/python3.6/site-packages/pip/_internal/index.py new file mode 100644 index 0000000..8c0ec82 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/index.py @@ -0,0 +1,1127 @@ +"""Routines related to PyPI, indexes""" +from __future__ import absolute_import + +import cgi +import itertools +import logging +import mimetypes +import os +import posixpath +import re +import sys +from collections import namedtuple + +from pip._vendor import html5lib, requests, six +from pip._vendor.distlib.compat import unescape +from pip._vendor.packaging import specifiers +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import parse as parse_version +from pip._vendor.requests.exceptions import SSLError +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves.urllib import request as urllib_request + +from pip._internal.compat import ipaddress +from pip._internal.download import HAS_TLS, is_url, path_to_url, url_to_path +from pip._internal.exceptions import ( + BestVersionAlreadyInstalled, DistributionNotFound, InvalidWheelFilename, + UnsupportedWheel, +) +from pip._internal.models.index import PyPI +from pip._internal.pep425tags import get_supported +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ( + ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, cached_property, normalize_path, + remove_auth_from_url, splitext, +) +from pip._internal.utils.packaging import check_requires_python +from pip._internal.wheel import Wheel, wheel_ext + +__all__ = ['FormatControl', 'fmt_ctl_handle_mutual_exclude', 'PackageFinder'] + + +SECURE_ORIGINS = [ + # protocol, hostname, port + # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) + ("https", "*", "*"), + ("*", "localhost", "*"), + ("*", "127.0.0.0/8", "*"), + ("*", "::1/128", "*"), + ("file", "*", None), + # ssh is always secure. + ("ssh", "*", "*"), +] + + +logger = logging.getLogger(__name__) + + +class InstallationCandidate(object): + + def __init__(self, project, version, location): + self.project = project + self.version = parse_version(version) + self.location = location + self._key = (self.project, self.version, self.location) + + def __repr__(self): + return "".format( + self.project, self.version, self.location, + ) + + def __hash__(self): + return hash(self._key) + + def __lt__(self, other): + return self._compare(other, lambda s, o: s < o) + + def __le__(self, other): + return self._compare(other, lambda s, o: s <= o) + + def __eq__(self, other): + return self._compare(other, lambda s, o: s == o) + + def __ge__(self, other): + return self._compare(other, lambda s, o: s >= o) + + def __gt__(self, other): + return self._compare(other, lambda s, o: s > o) + + def __ne__(self, other): + return self._compare(other, lambda s, o: s != o) + + def _compare(self, other, method): + if not isinstance(other, InstallationCandidate): + return NotImplemented + + return method(self._key, other._key) + + +class PackageFinder(object): + """This finds packages. + + This is meant to match easy_install's technique for looking for + packages, by reading pages and looking for appropriate links. + """ + + def __init__(self, find_links, index_urls, allow_all_prereleases=False, + trusted_hosts=None, process_dependency_links=False, + session=None, format_control=None, platform=None, + versions=None, abi=None, implementation=None, + prefer_binary=False): + """Create a PackageFinder. + + :param format_control: A FormatControl object or None. Used to control + the selection of source packages / binary packages when consulting + the index and links. + :param platform: A string or None. If None, searches for packages + that are supported by the current system. Otherwise, will find + packages that can be built on the platform passed in. These + packages will only be downloaded for distribution: they will + not be built locally. + :param versions: A list of strings or None. This is passed directly + to pep425tags.py in the get_supported() method. + :param abi: A string or None. This is passed directly + to pep425tags.py in the get_supported() method. + :param implementation: A string or None. This is passed directly + to pep425tags.py in the get_supported() method. + """ + if session is None: + raise TypeError( + "PackageFinder() missing 1 required keyword argument: " + "'session'" + ) + + # Build find_links. If an argument starts with ~, it may be + # a local file relative to a home directory. So try normalizing + # it and if it exists, use the normalized version. + # This is deliberately conservative - it might be fine just to + # blindly normalize anything starting with a ~... + self.find_links = [] + for link in find_links: + if link.startswith('~'): + new_link = normalize_path(link) + if os.path.exists(new_link): + link = new_link + self.find_links.append(link) + + self.index_urls = index_urls + self.dependency_links = [] + + # These are boring links that have already been logged somehow: + self.logged_links = set() + + self.format_control = format_control or FormatControl(set(), set()) + + # Domains that we won't emit warnings for when not using HTTPS + self.secure_origins = [ + ("*", host, "*") + for host in (trusted_hosts if trusted_hosts else []) + ] + + # Do we want to allow _all_ pre-releases? + self.allow_all_prereleases = allow_all_prereleases + + # Do we process dependency links? + self.process_dependency_links = process_dependency_links + + # The Session we'll use to make requests + self.session = session + + # The valid tags to check potential found wheel candidates against + self.valid_tags = get_supported( + versions=versions, + platform=platform, + abi=abi, + impl=implementation, + ) + + # Do we prefer old, but valid, binary dist over new source dist + self.prefer_binary = prefer_binary + + # If we don't have TLS enabled, then WARN if anyplace we're looking + # relies on TLS. + if not HAS_TLS: + for link in itertools.chain(self.index_urls, self.find_links): + parsed = urllib_parse.urlparse(link) + if parsed.scheme == "https": + logger.warning( + "pip is configured with locations that require " + "TLS/SSL, however the ssl module in Python is not " + "available." + ) + break + + def get_formatted_locations(self): + lines = [] + if self.index_urls and self.index_urls != [PyPI.simple_url]: + lines.append( + "Looking in indexes: {}".format(", ".join( + remove_auth_from_url(url) for url in self.index_urls)) + ) + if self.find_links: + lines.append( + "Looking in links: {}".format(", ".join(self.find_links)) + ) + return "\n".join(lines) + + def add_dependency_links(self, links): + # # FIXME: this shouldn't be global list this, it should only + # # apply to requirements of the package that specifies the + # # dependency_links value + # # FIXME: also, we should track comes_from (i.e., use Link) + if self.process_dependency_links: + deprecated( + "Dependency Links processing has been deprecated and will be " + "removed in a future release.", + replacement=None, + gone_in="18.2", + issue=4187, + ) + self.dependency_links.extend(links) + + @staticmethod + def _sort_locations(locations, expand_dir=False): + """ + Sort locations into "files" (archives) and "urls", and return + a pair of lists (files,urls) + """ + files = [] + urls = [] + + # puts the url for the given file path into the appropriate list + def sort_path(path): + url = path_to_url(path) + if mimetypes.guess_type(url, strict=False)[0] == 'text/html': + urls.append(url) + else: + files.append(url) + + for url in locations: + + is_local_path = os.path.exists(url) + is_file_url = url.startswith('file:') + + if is_local_path or is_file_url: + if is_local_path: + path = url + else: + path = url_to_path(url) + if os.path.isdir(path): + if expand_dir: + path = os.path.realpath(path) + for item in os.listdir(path): + sort_path(os.path.join(path, item)) + elif is_file_url: + urls.append(url) + elif os.path.isfile(path): + sort_path(path) + else: + logger.warning( + "Url '%s' is ignored: it is neither a file " + "nor a directory.", url, + ) + elif is_url(url): + # Only add url with clear scheme + urls.append(url) + else: + logger.warning( + "Url '%s' is ignored. It is either a non-existing " + "path or lacks a specific scheme.", url, + ) + + return files, urls + + def _candidate_sort_key(self, candidate): + """ + Function used to generate link sort key for link tuples. + The greater the return value, the more preferred it is. + If not finding wheels, then sorted by version only. + If finding wheels, then the sort order is by version, then: + 1. existing installs + 2. wheels ordered via Wheel.support_index_min(self.valid_tags) + 3. source archives + If prefer_binary was set, then all wheels are sorted above sources. + Note: it was considered to embed this logic into the Link + comparison operators, but then different sdist links + with the same version, would have to be considered equal + """ + support_num = len(self.valid_tags) + build_tag = tuple() + binary_preference = 0 + if candidate.location.is_wheel: + # can raise InvalidWheelFilename + wheel = Wheel(candidate.location.filename) + if not wheel.supported(self.valid_tags): + raise UnsupportedWheel( + "%s is not a supported wheel for this platform. It " + "can't be sorted." % wheel.filename + ) + if self.prefer_binary: + binary_preference = 1 + pri = -(wheel.support_index_min(self.valid_tags)) + if wheel.build_tag is not None: + match = re.match(r'^(\d+)(.*)$', wheel.build_tag) + build_tag_groups = match.groups() + build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) + else: # sdist + pri = -(support_num) + return (binary_preference, candidate.version, build_tag, pri) + + def _validate_secure_origin(self, logger, location): + # Determine if this url used a secure transport mechanism + parsed = urllib_parse.urlparse(str(location)) + origin = (parsed.scheme, parsed.hostname, parsed.port) + + # The protocol to use to see if the protocol matches. + # Don't count the repository type as part of the protocol: in + # cases such as "git+ssh", only use "ssh". (I.e., Only verify against + # the last scheme.) + protocol = origin[0].rsplit('+', 1)[-1] + + # Determine if our origin is a secure origin by looking through our + # hardcoded list of secure origins, as well as any additional ones + # configured on this PackageFinder instance. + for secure_origin in (SECURE_ORIGINS + self.secure_origins): + if protocol != secure_origin[0] and secure_origin[0] != "*": + continue + + try: + # We need to do this decode dance to ensure that we have a + # unicode object, even on Python 2.x. + addr = ipaddress.ip_address( + origin[1] + if ( + isinstance(origin[1], six.text_type) or + origin[1] is None + ) + else origin[1].decode("utf8") + ) + network = ipaddress.ip_network( + secure_origin[1] + if isinstance(secure_origin[1], six.text_type) + else secure_origin[1].decode("utf8") + ) + except ValueError: + # We don't have both a valid address or a valid network, so + # we'll check this origin against hostnames. + if (origin[1] and + origin[1].lower() != secure_origin[1].lower() and + secure_origin[1] != "*"): + continue + else: + # We have a valid address and network, so see if the address + # is contained within the network. + if addr not in network: + continue + + # Check to see if the port patches + if (origin[2] != secure_origin[2] and + secure_origin[2] != "*" and + secure_origin[2] is not None): + continue + + # If we've gotten here, then this origin matches the current + # secure origin and we should return True + return True + + # If we've gotten to this point, then the origin isn't secure and we + # will not accept it as a valid location to search. We will however + # log a warning that we are ignoring it. + logger.warning( + "The repository located at %s is not a trusted or secure host and " + "is being ignored. If this repository is available via HTTPS we " + "recommend you use HTTPS instead, otherwise you may silence " + "this warning and allow it anyway with '--trusted-host %s'.", + parsed.hostname, + parsed.hostname, + ) + + return False + + def _get_index_urls_locations(self, project_name): + """Returns the locations found via self.index_urls + + Checks the url_name on the main (first in the list) index and + use this url_name to produce all locations + """ + + def mkurl_pypi_url(url): + loc = posixpath.join( + url, + urllib_parse.quote(canonicalize_name(project_name))) + # For maximum compatibility with easy_install, ensure the path + # ends in a trailing slash. Although this isn't in the spec + # (and PyPI can handle it without the slash) some other index + # implementations might break if they relied on easy_install's + # behavior. + if not loc.endswith('/'): + loc = loc + '/' + return loc + + return [mkurl_pypi_url(url) for url in self.index_urls] + + def find_all_candidates(self, project_name): + """Find all available InstallationCandidate for project_name + + This checks index_urls, find_links and dependency_links. + All versions found are returned as an InstallationCandidate list. + + See _link_package_versions for details on which files are accepted + """ + index_locations = self._get_index_urls_locations(project_name) + index_file_loc, index_url_loc = self._sort_locations(index_locations) + fl_file_loc, fl_url_loc = self._sort_locations( + self.find_links, expand_dir=True, + ) + dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links) + + file_locations = (Link(url) for url in itertools.chain( + index_file_loc, fl_file_loc, dep_file_loc, + )) + + # We trust every url that the user has given us whether it was given + # via --index-url or --find-links + # We explicitly do not trust links that came from dependency_links + # We want to filter out any thing which does not have a secure origin. + url_locations = [ + link for link in itertools.chain( + (Link(url) for url in index_url_loc), + (Link(url) for url in fl_url_loc), + (Link(url) for url in dep_url_loc), + ) + if self._validate_secure_origin(logger, link) + ] + + logger.debug('%d location(s) to search for versions of %s:', + len(url_locations), project_name) + + for location in url_locations: + logger.debug('* %s', location) + + canonical_name = canonicalize_name(project_name) + formats = fmt_ctl_formats(self.format_control, canonical_name) + search = Search(project_name, canonical_name, formats) + find_links_versions = self._package_versions( + # We trust every directly linked archive in find_links + (Link(url, '-f') for url in self.find_links), + search + ) + + page_versions = [] + for page in self._get_pages(url_locations, project_name): + logger.debug('Analyzing links from page %s', page.url) + with indent_log(): + page_versions.extend( + self._package_versions(page.links, search) + ) + + dependency_versions = self._package_versions( + (Link(url) for url in self.dependency_links), search + ) + if dependency_versions: + logger.debug( + 'dependency_links found: %s', + ', '.join([ + version.location.url for version in dependency_versions + ]) + ) + + file_versions = self._package_versions(file_locations, search) + if file_versions: + file_versions.sort(reverse=True) + logger.debug( + 'Local files found: %s', + ', '.join([ + url_to_path(candidate.location.url) + for candidate in file_versions + ]) + ) + + # This is an intentional priority ordering + return ( + file_versions + find_links_versions + page_versions + + dependency_versions + ) + + def find_requirement(self, req, upgrade): + """Try to find a Link matching req + + Expects req, an InstallRequirement and upgrade, a boolean + Returns a Link if found, + Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise + """ + all_candidates = self.find_all_candidates(req.name) + + # Filter out anything which doesn't match our specifier + compatible_versions = set( + req.specifier.filter( + # We turn the version object into a str here because otherwise + # when we're debundled but setuptools isn't, Python will see + # packaging.version.Version and + # pkg_resources._vendor.packaging.version.Version as different + # types. This way we'll use a str as a common data interchange + # format. If we stop using the pkg_resources provided specifier + # and start using our own, we can drop the cast to str(). + [str(c.version) for c in all_candidates], + prereleases=( + self.allow_all_prereleases + if self.allow_all_prereleases else None + ), + ) + ) + applicable_candidates = [ + # Again, converting to str to deal with debundling. + c for c in all_candidates if str(c.version) in compatible_versions + ] + + if applicable_candidates: + best_candidate = max(applicable_candidates, + key=self._candidate_sort_key) + else: + best_candidate = None + + if req.satisfied_by is not None: + installed_version = parse_version(req.satisfied_by.version) + else: + installed_version = None + + if installed_version is None and best_candidate is None: + logger.critical( + 'Could not find a version that satisfies the requirement %s ' + '(from versions: %s)', + req, + ', '.join( + sorted( + {str(c.version) for c in all_candidates}, + key=parse_version, + ) + ) + ) + + raise DistributionNotFound( + 'No matching distribution found for %s' % req + ) + + best_installed = False + if installed_version and ( + best_candidate is None or + best_candidate.version <= installed_version): + best_installed = True + + if not upgrade and installed_version is not None: + if best_installed: + logger.debug( + 'Existing installed version (%s) is most up-to-date and ' + 'satisfies requirement', + installed_version, + ) + else: + logger.debug( + 'Existing installed version (%s) satisfies requirement ' + '(most up-to-date version is %s)', + installed_version, + best_candidate.version, + ) + return None + + if best_installed: + # We have an existing version, and its the best version + logger.debug( + 'Installed version (%s) is most up-to-date (past versions: ' + '%s)', + installed_version, + ', '.join(sorted(compatible_versions, key=parse_version)) or + "none", + ) + raise BestVersionAlreadyInstalled + + logger.debug( + 'Using version %s (newest of versions: %s)', + best_candidate.version, + ', '.join(sorted(compatible_versions, key=parse_version)) + ) + return best_candidate.location + + def _get_pages(self, locations, project_name): + """ + Yields (page, page_url) from the given locations, skipping + locations that have errors. + """ + seen = set() + for location in locations: + if location in seen: + continue + seen.add(location) + + page = self._get_page(location) + if page is None: + continue + + yield page + + _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') + + def _sort_links(self, links): + """ + Returns elements of links in order, non-egg links first, egg links + second, while eliminating duplicates + """ + eggs, no_eggs = [], [] + seen = set() + for link in links: + if link not in seen: + seen.add(link) + if link.egg_fragment: + eggs.append(link) + else: + no_eggs.append(link) + return no_eggs + eggs + + def _package_versions(self, links, search): + result = [] + for link in self._sort_links(links): + v = self._link_package_versions(link, search) + if v is not None: + result.append(v) + return result + + def _log_skipped_link(self, link, reason): + if link not in self.logged_links: + logger.debug('Skipping link %s; %s', link, reason) + self.logged_links.add(link) + + def _link_package_versions(self, link, search): + """Return an InstallationCandidate or None""" + version = None + if link.egg_fragment: + egg_info = link.egg_fragment + ext = link.ext + else: + egg_info, ext = link.splitext() + if not ext: + self._log_skipped_link(link, 'not a file') + return + if ext not in SUPPORTED_EXTENSIONS: + self._log_skipped_link( + link, 'unsupported archive format: %s' % ext, + ) + return + if "binary" not in search.formats and ext == wheel_ext: + self._log_skipped_link( + link, 'No binaries permitted for %s' % search.supplied, + ) + return + if "macosx10" in link.path and ext == '.zip': + self._log_skipped_link(link, 'macosx10 one') + return + if ext == wheel_ext: + try: + wheel = Wheel(link.filename) + except InvalidWheelFilename: + self._log_skipped_link(link, 'invalid wheel filename') + return + if canonicalize_name(wheel.name) != search.canonical: + self._log_skipped_link( + link, 'wrong project name (not %s)' % search.supplied) + return + + if not wheel.supported(self.valid_tags): + self._log_skipped_link( + link, 'it is not compatible with this Python') + return + + version = wheel.version + + # This should be up by the search.ok_binary check, but see issue 2700. + if "source" not in search.formats and ext != wheel_ext: + self._log_skipped_link( + link, 'No sources permitted for %s' % search.supplied, + ) + return + + if not version: + version = egg_info_matches(egg_info, search.supplied, link) + if version is None: + self._log_skipped_link( + link, 'Missing project version for %s' % search.supplied) + return + + match = self._py_version_re.search(version) + if match: + version = version[:match.start()] + py_version = match.group(1) + if py_version != sys.version[:3]: + self._log_skipped_link( + link, 'Python version is incorrect') + return + try: + support_this_python = check_requires_python(link.requires_python) + except specifiers.InvalidSpecifier: + logger.debug("Package %s has an invalid Requires-Python entry: %s", + link.filename, link.requires_python) + support_this_python = True + + if not support_this_python: + logger.debug("The package %s is incompatible with the python" + "version in use. Acceptable python versions are:%s", + link, link.requires_python) + return + logger.debug('Found link %s, version: %s', link, version) + + return InstallationCandidate(search.supplied, version, link) + + def _get_page(self, link): + return HTMLPage.get_page(link, session=self.session) + + +def egg_info_matches( + egg_info, search_name, link, + _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)): + """Pull the version part out of a string. + + :param egg_info: The string to parse. E.g. foo-2.1 + :param search_name: The name of the package this belongs to. None to + infer the name. Note that this cannot unambiguously parse strings + like foo-2-2 which might be foo, 2-2 or foo-2, 2. + :param link: The link the string came from, for logging on failure. + """ + match = _egg_info_re.search(egg_info) + if not match: + logger.debug('Could not parse version from link: %s', link) + return None + if search_name is None: + full_match = match.group(0) + return full_match[full_match.index('-'):] + name = match.group(0).lower() + # To match the "safe" name that pkg_resources creates: + name = name.replace('_', '-') + # project name and version must be separated by a dash + look_for = search_name.lower() + "-" + if name.startswith(look_for): + return match.group(0)[len(look_for):] + else: + return None + + +class HTMLPage(object): + """Represents one page, along with its URL""" + + def __init__(self, content, url, headers=None): + # Determine if we have any encoding information in our headers + encoding = None + if headers and "Content-Type" in headers: + content_type, params = cgi.parse_header(headers["Content-Type"]) + + if "charset" in params: + encoding = params['charset'] + + self.content = content + self.parsed = html5lib.parse( + self.content, + transport_encoding=encoding, + namespaceHTMLElements=False, + ) + self.url = url + self.headers = headers + + def __str__(self): + return self.url + + @classmethod + def get_page(cls, link, skip_archives=True, session=None): + if session is None: + raise TypeError( + "get_page() missing 1 required keyword argument: 'session'" + ) + + url = link.url + url = url.split('#', 1)[0] + + # Check for VCS schemes that do not support lookup as web pages. + from pip._internal.vcs import VcsSupport + for scheme in VcsSupport.schemes: + if url.lower().startswith(scheme) and url[len(scheme)] in '+:': + logger.debug('Cannot look at %s URL %s', scheme, link) + return None + + try: + if skip_archives: + filename = link.filename + for bad_ext in ARCHIVE_EXTENSIONS: + if filename.endswith(bad_ext): + content_type = cls._get_content_type( + url, session=session, + ) + if content_type.lower().startswith('text/html'): + break + else: + logger.debug( + 'Skipping page %s because of Content-Type: %s', + link, + content_type, + ) + return + + logger.debug('Getting page %s', url) + + # Tack index.html onto file:// URLs that point to directories + (scheme, netloc, path, params, query, fragment) = \ + urllib_parse.urlparse(url) + if (scheme == 'file' and + os.path.isdir(urllib_request.url2pathname(path))): + # add trailing slash if not present so urljoin doesn't trim + # final segment + if not url.endswith('/'): + url += '/' + url = urllib_parse.urljoin(url, 'index.html') + logger.debug(' file: URL is directory, getting %s', url) + + resp = session.get( + url, + headers={ + "Accept": "text/html", + "Cache-Control": "max-age=600", + }, + ) + resp.raise_for_status() + + # The check for archives above only works if the url ends with + # something that looks like an archive. However that is not a + # requirement of an url. Unless we issue a HEAD request on every + # url we cannot know ahead of time for sure if something is HTML + # or not. However we can check after we've downloaded it. + content_type = resp.headers.get('Content-Type', 'unknown') + if not content_type.lower().startswith("text/html"): + logger.debug( + 'Skipping page %s because of Content-Type: %s', + link, + content_type, + ) + return + + inst = cls(resp.content, resp.url, resp.headers) + except requests.HTTPError as exc: + cls._handle_fail(link, exc, url) + except SSLError as exc: + reason = "There was a problem confirming the ssl certificate: " + reason += str(exc) + cls._handle_fail(link, reason, url, meth=logger.info) + except requests.ConnectionError as exc: + cls._handle_fail(link, "connection error: %s" % exc, url) + except requests.Timeout: + cls._handle_fail(link, "timed out", url) + else: + return inst + + @staticmethod + def _handle_fail(link, reason, url, meth=None): + if meth is None: + meth = logger.debug + + meth("Could not fetch URL %s: %s - skipping", link, reason) + + @staticmethod + def _get_content_type(url, session): + """Get the Content-Type of the given url, using a HEAD request""" + scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url) + if scheme not in {'http', 'https'}: + # FIXME: some warning or something? + # assertion error? + return '' + + resp = session.head(url, allow_redirects=True) + resp.raise_for_status() + + return resp.headers.get("Content-Type", "") + + @cached_property + def base_url(self): + bases = [ + x for x in self.parsed.findall(".//base") + if x.get("href") is not None + ] + if bases and bases[0].get("href"): + return bases[0].get("href") + else: + return self.url + + @property + def links(self): + """Yields all links in the page""" + for anchor in self.parsed.findall(".//a"): + if anchor.get("href"): + href = anchor.get("href") + url = self.clean_link( + urllib_parse.urljoin(self.base_url, href) + ) + pyrequire = anchor.get('data-requires-python') + pyrequire = unescape(pyrequire) if pyrequire else None + yield Link(url, self, requires_python=pyrequire) + + _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) + + def clean_link(self, url): + """Makes sure a link is fully encoded. That is, if a ' ' shows up in + the link, it will be rewritten to %20 (while not over-quoting + % or other characters).""" + return self._clean_re.sub( + lambda match: '%%%2x' % ord(match.group(0)), url) + + +class Link(object): + + def __init__(self, url, comes_from=None, requires_python=None): + """ + Object representing a parsed link from https://pypi.org/simple/* + + url: + url of the resource pointed to (href of the link) + comes_from: + instance of HTMLPage where the link was found, or string. + requires_python: + String containing the `Requires-Python` metadata field, specified + in PEP 345. This may be specified by a data-requires-python + attribute in the HTML link tag, as described in PEP 503. + """ + + # url can be a UNC windows share + if url.startswith('\\\\'): + url = path_to_url(url) + + self.url = url + self.comes_from = comes_from + self.requires_python = requires_python if requires_python else None + + def __str__(self): + if self.requires_python: + rp = ' (requires-python:%s)' % self.requires_python + else: + rp = '' + if self.comes_from: + return '%s (from %s)%s' % (self.url, self.comes_from, rp) + else: + return str(self.url) + + def __repr__(self): + return '' % self + + def __eq__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url == other.url + + def __ne__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url != other.url + + def __lt__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url < other.url + + def __le__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url <= other.url + + def __gt__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url > other.url + + def __ge__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url >= other.url + + def __hash__(self): + return hash(self.url) + + @property + def filename(self): + _, netloc, path, _, _ = urllib_parse.urlsplit(self.url) + name = posixpath.basename(path.rstrip('/')) or netloc + name = urllib_parse.unquote(name) + assert name, ('URL %r produced no filename' % self.url) + return name + + @property + def scheme(self): + return urllib_parse.urlsplit(self.url)[0] + + @property + def netloc(self): + return urllib_parse.urlsplit(self.url)[1] + + @property + def path(self): + return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2]) + + def splitext(self): + return splitext(posixpath.basename(self.path.rstrip('/'))) + + @property + def ext(self): + return self.splitext()[1] + + @property + def url_without_fragment(self): + scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url) + return urllib_parse.urlunsplit((scheme, netloc, path, query, None)) + + _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)') + + @property + def egg_fragment(self): + match = self._egg_fragment_re.search(self.url) + if not match: + return None + return match.group(1) + + _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)') + + @property + def subdirectory_fragment(self): + match = self._subdirectory_fragment_re.search(self.url) + if not match: + return None + return match.group(1) + + _hash_re = re.compile( + r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)' + ) + + @property + def hash(self): + match = self._hash_re.search(self.url) + if match: + return match.group(2) + return None + + @property + def hash_name(self): + match = self._hash_re.search(self.url) + if match: + return match.group(1) + return None + + @property + def show_url(self): + return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0]) + + @property + def is_wheel(self): + return self.ext == wheel_ext + + @property + def is_artifact(self): + """ + Determines if this points to an actual artifact (e.g. a tarball) or if + it points to an "abstract" thing like a path or a VCS location. + """ + from pip._internal.vcs import vcs + + if self.scheme in vcs.all_schemes: + return False + + return True + + +FormatControl = namedtuple('FormatControl', 'no_binary only_binary') +"""This object has two fields, no_binary and only_binary. + +If a field is falsy, it isn't set. If it is {':all:'}, it should match all +packages except those listed in the other field. Only one field can be set +to {':all:'} at a time. The rest of the time exact package name matches +are listed, with any given package only showing up in one field at a time. +""" + + +def fmt_ctl_handle_mutual_exclude(value, target, other): + new = value.split(',') + while ':all:' in new: + other.clear() + target.clear() + target.add(':all:') + del new[:new.index(':all:') + 1] + if ':none:' not in new: + # Without a none, we want to discard everything as :all: covers it + return + for name in new: + if name == ':none:': + target.clear() + continue + name = canonicalize_name(name) + other.discard(name) + target.add(name) + + +def fmt_ctl_formats(fmt_ctl, canonical_name): + result = {"binary", "source"} + if canonical_name in fmt_ctl.only_binary: + result.discard('source') + elif canonical_name in fmt_ctl.no_binary: + result.discard('binary') + elif ':all:' in fmt_ctl.only_binary: + result.discard('source') + elif ':all:' in fmt_ctl.no_binary: + result.discard('binary') + return frozenset(result) + + +def fmt_ctl_no_binary(fmt_ctl): + fmt_ctl_handle_mutual_exclude( + ':all:', fmt_ctl.no_binary, fmt_ctl.only_binary, + ) + + +Search = namedtuple('Search', 'supplied canonical formats') +"""Capture key aspects of a search. + +:attribute supplied: The user supplied package. +:attribute canonical: The canonical package name. +:attribute formats: The formats allowed for this package. Should be a set + with 'binary' or 'source' or both in it. +""" diff --git a/env/lib/python3.6/site-packages/pip/_internal/locations.py b/env/lib/python3.6/site-packages/pip/_internal/locations.py new file mode 100644 index 0000000..4a7b61e --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/locations.py @@ -0,0 +1,194 @@ +"""Locations where we look for configs, install stuff, etc""" +from __future__ import absolute_import + +import os +import os.path +import platform +import site +import sys +import sysconfig +from distutils import sysconfig as distutils_sysconfig +from distutils.command.install import SCHEME_KEYS # type: ignore + +from pip._internal.compat import WINDOWS, expanduser +from pip._internal.utils import appdirs + +# Application Directories +USER_CACHE_DIR = appdirs.user_cache_dir("pip") + + +DELETE_MARKER_MESSAGE = '''\ +This file is placed here by pip to indicate the source was put +here by pip. + +Once this package is successfully installed this source code will be +deleted (unless you remove this file). +''' +PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt' + + +def write_delete_marker_file(directory): + """ + Write the pip delete marker file into this directory. + """ + filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME) + with open(filepath, 'w') as marker_fp: + marker_fp.write(DELETE_MARKER_MESSAGE) + + +def running_under_virtualenv(): + """ + Return True if we're running inside a virtualenv, False otherwise. + + """ + if hasattr(sys, 'real_prefix'): + return True + elif sys.prefix != getattr(sys, "base_prefix", sys.prefix): + return True + + return False + + +def virtualenv_no_global(): + """ + Return True if in a venv and no system site packages. + """ + # this mirrors the logic in virtualenv.py for locating the + # no-global-site-packages.txt file + site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) + no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt') + if running_under_virtualenv() and os.path.isfile(no_global_file): + return True + + +if running_under_virtualenv(): + src_prefix = os.path.join(sys.prefix, 'src') +else: + # FIXME: keep src in cwd for now (it is not a temporary folder) + try: + src_prefix = os.path.join(os.getcwd(), 'src') + except OSError: + # In case the current working directory has been renamed or deleted + sys.exit( + "The folder you are executing pip from can no longer be found." + ) + +# under macOS + virtualenv sys.prefix is not properly resolved +# it is something like /path/to/python/bin/.. +# Note: using realpath due to tmp dirs on OSX being symlinks +src_prefix = os.path.abspath(src_prefix) + +# FIXME doesn't account for venv linked to global site-packages + +site_packages = sysconfig.get_path("purelib") +# This is because of a bug in PyPy's sysconfig module, see +# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths +# for more information. +if platform.python_implementation().lower() == "pypy": + site_packages = distutils_sysconfig.get_python_lib() +try: + # Use getusersitepackages if this is present, as it ensures that the + # value is initialised properly. + user_site = site.getusersitepackages() +except AttributeError: + user_site = site.USER_SITE +user_dir = expanduser('~') +if WINDOWS: + bin_py = os.path.join(sys.prefix, 'Scripts') + bin_user = os.path.join(user_site, 'Scripts') + # buildout uses 'bin' on Windows too? + if not os.path.exists(bin_py): + bin_py = os.path.join(sys.prefix, 'bin') + bin_user = os.path.join(user_site, 'bin') + + config_basename = 'pip.ini' + + legacy_storage_dir = os.path.join(user_dir, 'pip') + legacy_config_file = os.path.join( + legacy_storage_dir, + config_basename, + ) +else: + bin_py = os.path.join(sys.prefix, 'bin') + bin_user = os.path.join(user_site, 'bin') + + config_basename = 'pip.conf' + + legacy_storage_dir = os.path.join(user_dir, '.pip') + legacy_config_file = os.path.join( + legacy_storage_dir, + config_basename, + ) + # Forcing to use /usr/local/bin for standard macOS framework installs + # Also log to ~/Library/Logs/ for use with the Console.app log viewer + if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/': + bin_py = '/usr/local/bin' + +site_config_files = [ + os.path.join(path, config_basename) + for path in appdirs.site_config_dirs('pip') +] + +venv_config_file = os.path.join(sys.prefix, config_basename) +new_config_file = os.path.join(appdirs.user_config_dir("pip"), config_basename) + + +def distutils_scheme(dist_name, user=False, home=None, root=None, + isolated=False, prefix=None): + """ + Return a distutils install scheme + """ + from distutils.dist import Distribution + + scheme = {} + + if isolated: + extra_dist_args = {"script_args": ["--no-user-cfg"]} + else: + extra_dist_args = {} + dist_args = {'name': dist_name} + dist_args.update(extra_dist_args) + + d = Distribution(dist_args) + d.parse_config_files() + i = d.get_command_obj('install', create=True) + # NOTE: setting user or home has the side-effect of creating the home dir + # or user base for installations during finalize_options() + # ideally, we'd prefer a scheme class that has no side-effects. + assert not (user and prefix), "user={} prefix={}".format(user, prefix) + i.user = user or i.user + if user: + i.prefix = "" + i.prefix = prefix or i.prefix + i.home = home or i.home + i.root = root or i.root + i.finalize_options() + for key in SCHEME_KEYS: + scheme[key] = getattr(i, 'install_' + key) + + # install_lib specified in setup.cfg should install *everything* + # into there (i.e. it takes precedence over both purelib and + # platlib). Note, i.install_lib is *always* set after + # finalize_options(); we only want to override here if the user + # has explicitly requested it hence going back to the config + if 'install_lib' in d.get_option_dict('install'): + scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) + + if running_under_virtualenv(): + scheme['headers'] = os.path.join( + sys.prefix, + 'include', + 'site', + 'python' + sys.version[:3], + dist_name, + ) + + if root is not None: + path_no_drive = os.path.splitdrive( + os.path.abspath(scheme["headers"]))[1] + scheme["headers"] = os.path.join( + root, + path_no_drive[1:], + ) + + return scheme diff --git a/env/lib/python3.6/site-packages/pip/_internal/models/index.py b/env/lib/python3.6/site-packages/pip/_internal/models/index.py new file mode 100644 index 0000000..a7f10c8 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/models/index.py @@ -0,0 +1,15 @@ +from pip._vendor.six.moves.urllib import parse as urllib_parse + + +class Index(object): + def __init__(self, url): + self.url = url + self.netloc = urllib_parse.urlsplit(url).netloc + self.simple_url = self.url_to_path('simple') + self.pypi_url = self.url_to_path('pypi') + + def url_to_path(self, path): + return urllib_parse.urljoin(self.url, path) + + +PyPI = Index('https://pypi.org/') diff --git a/env/lib/python3.6/site-packages/pip/_internal/operations/check.py b/env/lib/python3.6/site-packages/pip/_internal/operations/check.py new file mode 100644 index 0000000..799257a --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/operations/check.py @@ -0,0 +1,148 @@ +"""Validation of dependencies of packages +""" + +from collections import namedtuple + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.operations.prepare import make_abstract_dist +from pip._internal.utils.misc import get_installed_distributions +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from pip._internal.req.req_install import InstallRequirement # noqa: F401 + from typing import ( # noqa: F401 + Any, Callable, Dict, Iterator, Optional, Set, Tuple, List + ) + + # Shorthands + PackageSet = Dict[str, 'PackageDetails'] + Missing = Tuple[str, Any] + Conflicting = Tuple[str, str, Any] + + MissingDict = Dict[str, List[Missing]] + ConflictingDict = Dict[str, List[Conflicting]] + CheckResult = Tuple[MissingDict, ConflictingDict] + +PackageDetails = namedtuple('PackageDetails', ['version', 'requires']) + + +def create_package_set_from_installed(**kwargs): + # type: (**Any) -> PackageSet + """Converts a list of distributions into a PackageSet. + """ + # Default to using all packages installed on the system + if kwargs == {}: + kwargs = {"local_only": False, "skip": ()} + + package_set = {} + for dist in get_installed_distributions(**kwargs): + name = canonicalize_name(dist.project_name) + package_set[name] = PackageDetails(dist.version, dist.requires()) + return package_set + + +def check_package_set(package_set, should_ignore=None): + # type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult + """Check if a package set is consistent + + If should_ignore is passed, it should be a callable that takes a + package name and returns a boolean. + """ + if should_ignore is None: + def should_ignore(name): + return False + + missing = dict() + conflicting = dict() + + for package_name in package_set: + # Info about dependencies of package_name + missing_deps = set() # type: Set[Missing] + conflicting_deps = set() # type: Set[Conflicting] + + if should_ignore(package_name): + continue + + for req in package_set[package_name].requires: + name = canonicalize_name(req.project_name) # type: str + + # Check if it's missing + if name not in package_set: + missed = True + if req.marker is not None: + missed = req.marker.evaluate() + if missed: + missing_deps.add((name, req)) + continue + + # Check if there's a conflict + version = package_set[name].version # type: str + if not req.specifier.contains(version, prereleases=True): + conflicting_deps.add((name, version, req)) + + if missing_deps: + missing[package_name] = sorted(missing_deps, key=str) + if conflicting_deps: + conflicting[package_name] = sorted(conflicting_deps, key=str) + + return missing, conflicting + + +def check_install_conflicts(to_install): + # type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult] + """For checking if the dependency graph would be consistent after \ + installing given requirements + """ + # Start from the current state + package_set = create_package_set_from_installed() + # Install packages + would_be_installed = _simulate_installation_of(to_install, package_set) + + # Only warn about directly-dependent packages; create a whitelist of them + whitelist = _create_whitelist(would_be_installed, package_set) + + return ( + package_set, + check_package_set( + package_set, should_ignore=lambda name: name not in whitelist + ) + ) + + +# NOTE from @pradyunsg +# This required a minor update in dependency link handling logic over at +# operations.prepare.IsSDist.dist() to get it working +def _simulate_installation_of(to_install, package_set): + # type: (List[InstallRequirement], PackageSet) -> Set[str] + """Computes the version of packages after installing to_install. + """ + + # Keep track of packages that were installed + installed = set() + + # Modify it as installing requirement_set would (assuming no errors) + for inst_req in to_install: + dist = make_abstract_dist(inst_req).dist(finder=None) + name = canonicalize_name(dist.key) + package_set[name] = PackageDetails(dist.version, dist.requires()) + + installed.add(name) + + return installed + + +def _create_whitelist(would_be_installed, package_set): + # type: (Set[str], PackageSet) -> Set[str] + packages_affected = set(would_be_installed) + + for package_name in package_set: + if package_name in packages_affected: + continue + + for req in package_set[package_name].requires: + if canonicalize_name(req.name) in packages_affected: + packages_affected.add(package_name) + break + + return packages_affected diff --git a/env/lib/python3.6/site-packages/pip/_internal/operations/freeze.py b/env/lib/python3.6/site-packages/pip/_internal/operations/freeze.py new file mode 100644 index 0000000..4bbc27b --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/operations/freeze.py @@ -0,0 +1,253 @@ +from __future__ import absolute_import + +import collections +import logging +import os +import re + +from pip._vendor import pkg_resources, six +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.pkg_resources import RequirementParseError + +from pip._internal.exceptions import InstallationError +from pip._internal.req import InstallRequirement +from pip._internal.req.req_file import COMMENT_RE +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.misc import ( + dist_is_editable, get_installed_distributions, +) + +logger = logging.getLogger(__name__) + + +def freeze( + requirement=None, + find_links=None, local_only=None, user_only=None, skip_regex=None, + isolated=False, + wheel_cache=None, + exclude_editable=False, + skip=()): + find_links = find_links or [] + skip_match = None + + if skip_regex: + skip_match = re.compile(skip_regex).search + + dependency_links = [] + + for dist in pkg_resources.working_set: + if dist.has_metadata('dependency_links.txt'): + dependency_links.extend( + dist.get_metadata_lines('dependency_links.txt') + ) + for link in find_links: + if '#egg=' in link: + dependency_links.append(link) + for link in find_links: + yield '-f %s' % link + installations = {} + for dist in get_installed_distributions(local_only=local_only, + skip=(), + user_only=user_only): + try: + req = FrozenRequirement.from_dist( + dist, + dependency_links + ) + except RequirementParseError: + logger.warning( + "Could not parse requirement: %s", + dist.project_name + ) + continue + if exclude_editable and req.editable: + continue + installations[req.name] = req + + if requirement: + # the options that don't get turned into an InstallRequirement + # should only be emitted once, even if the same option is in multiple + # requirements files, so we need to keep track of what has been emitted + # so that we don't emit it again if it's seen again + emitted_options = set() + # keep track of which files a requirement is in so that we can + # give an accurate warning if a requirement appears multiple times. + req_files = collections.defaultdict(list) + for req_file_path in requirement: + with open(req_file_path) as req_file: + for line in req_file: + if (not line.strip() or + line.strip().startswith('#') or + (skip_match and skip_match(line)) or + line.startswith(( + '-r', '--requirement', + '-Z', '--always-unzip', + '-f', '--find-links', + '-i', '--index-url', + '--pre', + '--trusted-host', + '--process-dependency-links', + '--extra-index-url'))): + line = line.rstrip() + if line not in emitted_options: + emitted_options.add(line) + yield line + continue + + if line.startswith('-e') or line.startswith('--editable'): + if line.startswith('-e'): + line = line[2:].strip() + else: + line = line[len('--editable'):].strip().lstrip('=') + line_req = InstallRequirement.from_editable( + line, + isolated=isolated, + wheel_cache=wheel_cache, + ) + else: + line_req = InstallRequirement.from_line( + COMMENT_RE.sub('', line).strip(), + isolated=isolated, + wheel_cache=wheel_cache, + ) + + if not line_req.name: + logger.info( + "Skipping line in requirement file [%s] because " + "it's not clear what it would install: %s", + req_file_path, line.strip(), + ) + logger.info( + " (add #egg=PackageName to the URL to avoid" + " this warning)" + ) + elif line_req.name not in installations: + # either it's not installed, or it is installed + # but has been processed already + if not req_files[line_req.name]: + logger.warning( + "Requirement file [%s] contains %s, but that " + "package is not installed", + req_file_path, + COMMENT_RE.sub('', line).strip(), + ) + else: + req_files[line_req.name].append(req_file_path) + else: + yield str(installations[line_req.name]).rstrip() + del installations[line_req.name] + req_files[line_req.name].append(req_file_path) + + # Warn about requirements that were included multiple times (in a + # single requirements file or in different requirements files). + for name, files in six.iteritems(req_files): + if len(files) > 1: + logger.warning("Requirement %s included multiple times [%s]", + name, ', '.join(sorted(set(files)))) + + yield( + '## The following requirements were added by ' + 'pip freeze:' + ) + for installation in sorted( + installations.values(), key=lambda x: x.name.lower()): + if canonicalize_name(installation.name) not in skip: + yield str(installation).rstrip() + + +class FrozenRequirement(object): + def __init__(self, name, req, editable, comments=()): + self.name = name + self.req = req + self.editable = editable + self.comments = comments + + _rev_re = re.compile(r'-r(\d+)$') + _date_re = re.compile(r'-(20\d\d\d\d\d\d)$') + + @classmethod + def from_dist(cls, dist, dependency_links): + location = os.path.normcase(os.path.abspath(dist.location)) + comments = [] + from pip._internal.vcs import vcs, get_src_requirement + if dist_is_editable(dist) and vcs.get_backend_name(location): + editable = True + try: + req = get_src_requirement(dist, location) + except InstallationError as exc: + logger.warning( + "Error when trying to get requirement for VCS system %s, " + "falling back to uneditable format", exc + ) + req = None + if req is None: + logger.warning( + 'Could not determine repository location of %s', location + ) + comments.append( + '## !! Could not determine repository location' + ) + req = dist.as_requirement() + editable = False + else: + editable = False + req = dist.as_requirement() + specs = req.specs + assert len(specs) == 1 and specs[0][0] in ["==", "==="], \ + 'Expected 1 spec with == or ===; specs = %r; dist = %r' % \ + (specs, dist) + version = specs[0][1] + ver_match = cls._rev_re.search(version) + date_match = cls._date_re.search(version) + if ver_match or date_match: + svn_backend = vcs.get_backend('svn') + if svn_backend: + svn_location = svn_backend().get_location( + dist, + dependency_links, + ) + if not svn_location: + logger.warning( + 'Warning: cannot find svn location for %s', req, + ) + comments.append( + '## FIXME: could not find svn URL in dependency_links ' + 'for this package:' + ) + else: + deprecated( + "SVN editable detection based on dependency links " + "will be dropped in the future.", + replacement=None, + gone_in="18.2", + issue=4187, + ) + comments.append( + '# Installing as editable to satisfy requirement %s:' % + req + ) + if ver_match: + rev = ver_match.group(1) + else: + rev = '{%s}' % date_match.group(1) + editable = True + req = '%s@%s#egg=%s' % ( + svn_location, + rev, + cls.egg_name(dist) + ) + return cls(dist.project_name, req, editable, comments) + + @staticmethod + def egg_name(dist): + name = dist.egg_name() + match = re.search(r'-py\d\.\d$', name) + if match: + name = name[:match.start()] + return name + + def __str__(self): + req = self.req + if self.editable: + req = '-e %s' % req + return '\n'.join(list(self.comments) + [str(req)]) + '\n' diff --git a/env/lib/python3.6/site-packages/pip/_internal/operations/prepare.py b/env/lib/python3.6/site-packages/pip/_internal/operations/prepare.py new file mode 100644 index 0000000..7740c28 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/operations/prepare.py @@ -0,0 +1,355 @@ +"""Prepares a distribution for installation +""" + +import logging +import os + +from pip._vendor import pkg_resources, requests + +from pip._internal.build_env import BuildEnvironment +from pip._internal.compat import expanduser +from pip._internal.download import ( + is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path, +) +from pip._internal.exceptions import ( + DirectoryUrlHashUnsupported, HashUnpinned, InstallationError, + PreviousBuildDirError, VcsHashUnsupported, +) +from pip._internal.utils.hashes import MissingHashes +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import display_path, normalize_path +from pip._internal.vcs import vcs + +logger = logging.getLogger(__name__) + + +def make_abstract_dist(req): + """Factory to make an abstract dist object. + + Preconditions: Either an editable req with a source_dir, or satisfied_by or + a wheel link, or a non-editable req with a source_dir. + + :return: A concrete DistAbstraction. + """ + if req.editable: + return IsSDist(req) + elif req.link and req.link.is_wheel: + return IsWheel(req) + else: + return IsSDist(req) + + +class DistAbstraction(object): + """Abstracts out the wheel vs non-wheel Resolver.resolve() logic. + + The requirements for anything installable are as follows: + - we must be able to determine the requirement name + (or we can't correctly handle the non-upgrade case). + - we must be able to generate a list of run-time dependencies + without installing any additional packages (or we would + have to either burn time by doing temporary isolated installs + or alternatively violate pips 'don't start installing unless + all requirements are available' rule - neither of which are + desirable). + - for packages with setup requirements, we must also be able + to determine their requirements without installing additional + packages (for the same reason as run-time dependencies) + - we must be able to create a Distribution object exposing the + above metadata. + """ + + def __init__(self, req): + self.req = req + + def dist(self, finder): + """Return a setuptools Dist object.""" + raise NotImplementedError(self.dist) + + def prep_for_dist(self, finder): + """Ensure that we can get a Dist for this requirement.""" + raise NotImplementedError(self.dist) + + +class IsWheel(DistAbstraction): + + def dist(self, finder): + return list(pkg_resources.find_distributions( + self.req.source_dir))[0] + + def prep_for_dist(self, finder, build_isolation): + # FIXME:https://github.com/pypa/pip/issues/1112 + pass + + +class IsSDist(DistAbstraction): + + def dist(self, finder): + dist = self.req.get_dist() + # FIXME: shouldn't be globally added. + if finder and dist.has_metadata('dependency_links.txt'): + finder.add_dependency_links( + dist.get_metadata_lines('dependency_links.txt') + ) + return dist + + def prep_for_dist(self, finder, build_isolation): + # Before calling "setup.py egg_info", we need to set-up the build + # environment. + build_requirements = self.req.get_pep_518_info() + should_isolate = build_isolation and build_requirements is not None + + if should_isolate: + # Haven't implemented PEP 517 yet, so spew a warning about it if + # build-requirements don't include setuptools and wheel. + missing_requirements = {'setuptools', 'wheel'} - { + pkg_resources.Requirement(r).key for r in build_requirements + } + if missing_requirements: + logger.warning( + "Missing build requirements in pyproject.toml for %s.", + self.req, + ) + logger.warning( + "This version of pip does not implement PEP 517 so it " + "cannot build a wheel without %s.", + " and ".join(map(repr, sorted(missing_requirements))) + ) + + # Isolate in a BuildEnvironment and install the build-time + # requirements. + self.req.build_env = BuildEnvironment() + self.req.build_env.install_requirements( + finder, build_requirements, + "Installing build dependencies" + ) + + self.req.run_egg_info() + self.req.assert_source_matches_version() + + +class Installed(DistAbstraction): + + def dist(self, finder): + return self.req.satisfied_by + + def prep_for_dist(self, finder): + pass + + +class RequirementPreparer(object): + """Prepares a Requirement + """ + + def __init__(self, build_dir, download_dir, src_dir, wheel_download_dir, + progress_bar, build_isolation, req_tracker): + super(RequirementPreparer, self).__init__() + + self.src_dir = src_dir + self.build_dir = build_dir + self.req_tracker = req_tracker + + # Where still packed archives should be written to. If None, they are + # not saved, and are deleted immediately after unpacking. + self.download_dir = download_dir + + # Where still-packed .whl files should be written to. If None, they are + # written to the download_dir parameter. Separate to download_dir to + # permit only keeping wheel archives for pip wheel. + if wheel_download_dir: + wheel_download_dir = normalize_path(wheel_download_dir) + self.wheel_download_dir = wheel_download_dir + + # NOTE + # download_dir and wheel_download_dir overlap semantically and may + # be combined if we're willing to have non-wheel archives present in + # the wheelhouse output by 'pip wheel'. + + self.progress_bar = progress_bar + + # Is build isolation allowed? + self.build_isolation = build_isolation + + @property + def _download_should_save(self): + # TODO: Modify to reduce indentation needed + if self.download_dir: + self.download_dir = expanduser(self.download_dir) + if os.path.exists(self.download_dir): + return True + else: + logger.critical('Could not find download directory') + raise InstallationError( + "Could not find or access download directory '%s'" + % display_path(self.download_dir)) + return False + + def prepare_linked_requirement(self, req, session, finder, + upgrade_allowed, require_hashes): + """Prepare a requirement that would be obtained from req.link + """ + # TODO: Breakup into smaller functions + if req.link and req.link.scheme == 'file': + path = url_to_path(req.link.url) + logger.info('Processing %s', display_path(path)) + else: + logger.info('Collecting %s', req) + + with indent_log(): + # @@ if filesystem packages are not marked + # editable in a req, a non deterministic error + # occurs when the script attempts to unpack the + # build directory + req.ensure_has_source_dir(self.build_dir) + # If a checkout exists, it's unwise to keep going. version + # inconsistencies are logged later, but do not fail the + # installation. + # FIXME: this won't upgrade when there's an existing + # package unpacked in `req.source_dir` + # package unpacked in `req.source_dir` + if os.path.exists(os.path.join(req.source_dir, 'setup.py')): + raise PreviousBuildDirError( + "pip can't proceed with requirements '%s' due to a" + " pre-existing build directory (%s). This is " + "likely due to a previous installation that failed" + ". pip is being responsible and not assuming it " + "can delete this. Please delete it and try again." + % (req, req.source_dir) + ) + req.populate_link(finder, upgrade_allowed, require_hashes) + + # We can't hit this spot and have populate_link return None. + # req.satisfied_by is None here (because we're + # guarded) and upgrade has no impact except when satisfied_by + # is not None. + # Then inside find_requirement existing_applicable -> False + # If no new versions are found, DistributionNotFound is raised, + # otherwise a result is guaranteed. + assert req.link + link = req.link + + # Now that we have the real link, we can tell what kind of + # requirements we have and raise some more informative errors + # than otherwise. (For example, we can raise VcsHashUnsupported + # for a VCS URL rather than HashMissing.) + if require_hashes: + # We could check these first 2 conditions inside + # unpack_url and save repetition of conditions, but then + # we would report less-useful error messages for + # unhashable requirements, complaining that there's no + # hash provided. + if is_vcs_url(link): + raise VcsHashUnsupported() + elif is_file_url(link) and is_dir_url(link): + raise DirectoryUrlHashUnsupported() + if not req.original_link and not req.is_pinned: + # Unpinned packages are asking for trouble when a new + # version is uploaded. This isn't a security check, but + # it saves users a surprising hash mismatch in the + # future. + # + # file:/// URLs aren't pinnable, so don't complain + # about them not being pinned. + raise HashUnpinned() + + hashes = req.hashes(trust_internet=not require_hashes) + if require_hashes and not hashes: + # Known-good hashes are missing for this requirement, so + # shim it with a facade object that will provoke hash + # computation and then raise a HashMissing exception + # showing the user what the hash should be. + hashes = MissingHashes() + + try: + download_dir = self.download_dir + # We always delete unpacked sdists after pip ran. + autodelete_unpacked = True + if req.link.is_wheel and self.wheel_download_dir: + # when doing 'pip wheel` we download wheels to a + # dedicated dir. + download_dir = self.wheel_download_dir + if req.link.is_wheel: + if download_dir: + # When downloading, we only unpack wheels to get + # metadata. + autodelete_unpacked = True + else: + # When installing a wheel, we use the unpacked + # wheel. + autodelete_unpacked = False + unpack_url( + req.link, req.source_dir, + download_dir, autodelete_unpacked, + session=session, hashes=hashes, + progress_bar=self.progress_bar + ) + except requests.HTTPError as exc: + logger.critical( + 'Could not install requirement %s because of error %s', + req, + exc, + ) + raise InstallationError( + 'Could not install requirement %s because of HTTP ' + 'error %s for URL %s' % + (req, exc, req.link) + ) + abstract_dist = make_abstract_dist(req) + with self.req_tracker.track(req): + abstract_dist.prep_for_dist(finder, self.build_isolation) + if self._download_should_save: + # Make a .zip of the source_dir we already created. + if req.link.scheme in vcs.all_schemes: + req.archive(self.download_dir) + return abstract_dist + + def prepare_editable_requirement(self, req, require_hashes, use_user_site, + finder): + """Prepare an editable requirement + """ + assert req.editable, "cannot prepare a non-editable req as editable" + + logger.info('Obtaining %s', req) + + with indent_log(): + if require_hashes: + raise InstallationError( + 'The editable requirement %s cannot be installed when ' + 'requiring hashes, because there is no single file to ' + 'hash.' % req + ) + req.ensure_has_source_dir(self.src_dir) + req.update_editable(not self._download_should_save) + + abstract_dist = make_abstract_dist(req) + with self.req_tracker.track(req): + abstract_dist.prep_for_dist(finder, self.build_isolation) + + if self._download_should_save: + req.archive(self.download_dir) + req.check_if_exists(use_user_site) + + return abstract_dist + + def prepare_installed_requirement(self, req, require_hashes, skip_reason): + """Prepare an already-installed requirement + """ + assert req.satisfied_by, "req should have been satisfied but isn't" + assert skip_reason is not None, ( + "did not get skip reason skipped but req.satisfied_by " + "is set to %r" % (req.satisfied_by,) + ) + logger.info( + 'Requirement %s: %s (%s)', + skip_reason, req, req.satisfied_by.version + ) + with indent_log(): + if require_hashes: + logger.debug( + 'Since it is already installed, we are trusting this ' + 'package without checking its hash. To ensure a ' + 'completely repeatable environment, install into an ' + 'empty virtualenv.' + ) + abstract_dist = Installed(req) + + return abstract_dist diff --git a/env/lib/python3.6/site-packages/pip/_internal/pep425tags.py b/env/lib/python3.6/site-packages/pip/_internal/pep425tags.py new file mode 100644 index 0000000..0b5c783 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/pep425tags.py @@ -0,0 +1,317 @@ +"""Generate and work with PEP 425 Compatibility Tags.""" +from __future__ import absolute_import + +import distutils.util +import logging +import platform +import re +import sys +import sysconfig +import warnings +from collections import OrderedDict + +import pip._internal.utils.glibc + +logger = logging.getLogger(__name__) + +_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)') + + +def get_config_var(var): + try: + return sysconfig.get_config_var(var) + except IOError as e: # Issue #1074 + warnings.warn("{}".format(e), RuntimeWarning) + return None + + +def get_abbr_impl(): + """Return abbreviated implementation name.""" + if hasattr(sys, 'pypy_version_info'): + pyimpl = 'pp' + elif sys.platform.startswith('java'): + pyimpl = 'jy' + elif sys.platform == 'cli': + pyimpl = 'ip' + else: + pyimpl = 'cp' + return pyimpl + + +def get_impl_ver(): + """Return implementation version.""" + impl_ver = get_config_var("py_version_nodot") + if not impl_ver or get_abbr_impl() == 'pp': + impl_ver = ''.join(map(str, get_impl_version_info())) + return impl_ver + + +def get_impl_version_info(): + """Return sys.version_info-like tuple for use in decrementing the minor + version.""" + if get_abbr_impl() == 'pp': + # as per https://github.com/pypa/pip/issues/2882 + return (sys.version_info[0], sys.pypy_version_info.major, + sys.pypy_version_info.minor) + else: + return sys.version_info[0], sys.version_info[1] + + +def get_impl_tag(): + """ + Returns the Tag for this specific implementation. + """ + return "{}{}".format(get_abbr_impl(), get_impl_ver()) + + +def get_flag(var, fallback, expected=True, warn=True): + """Use a fallback method for determining SOABI flags if the needed config + var is unset or unavailable.""" + val = get_config_var(var) + if val is None: + if warn: + logger.debug("Config variable '%s' is unset, Python ABI tag may " + "be incorrect", var) + return fallback() + return val == expected + + +def get_abi_tag(): + """Return the ABI tag based on SOABI (if available) or emulate SOABI + (CPython 2, PyPy).""" + soabi = get_config_var('SOABI') + impl = get_abbr_impl() + if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'): + d = '' + m = '' + u = '' + if get_flag('Py_DEBUG', + lambda: hasattr(sys, 'gettotalrefcount'), + warn=(impl == 'cp')): + d = 'd' + if get_flag('WITH_PYMALLOC', + lambda: impl == 'cp', + warn=(impl == 'cp')): + m = 'm' + if get_flag('Py_UNICODE_SIZE', + lambda: sys.maxunicode == 0x10ffff, + expected=4, + warn=(impl == 'cp' and + sys.version_info < (3, 3))) \ + and sys.version_info < (3, 3): + u = 'u' + abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) + elif soabi and soabi.startswith('cpython-'): + abi = 'cp' + soabi.split('-')[1] + elif soabi: + abi = soabi.replace('.', '_').replace('-', '_') + else: + abi = None + return abi + + +def _is_running_32bit(): + return sys.maxsize == 2147483647 + + +def get_platform(): + """Return our platform name 'win32', 'linux_x86_64'""" + if sys.platform == 'darwin': + # distutils.util.get_platform() returns the release based on the value + # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may + # be significantly older than the user's current machine. + release, _, machine = platform.mac_ver() + split_ver = release.split('.') + + if machine == "x86_64" and _is_running_32bit(): + machine = "i386" + elif machine == "ppc64" and _is_running_32bit(): + machine = "ppc" + + return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine) + + # XXX remove distutils dependency + result = distutils.util.get_platform().replace('.', '_').replace('-', '_') + if result == "linux_x86_64" and _is_running_32bit(): + # 32 bit Python program (running on a 64 bit Linux): pip should only + # install and run 32 bit compiled extensions in that case. + result = "linux_i686" + + return result + + +def is_manylinux1_compatible(): + # Only Linux, and only x86-64 / i686 + if get_platform() not in {"linux_x86_64", "linux_i686"}: + return False + + # Check for presence of _manylinux module + try: + import _manylinux + return bool(_manylinux.manylinux1_compatible) + except (ImportError, AttributeError): + # Fall through to heuristic check below + pass + + # Check glibc version. CentOS 5 uses glibc 2.5. + return pip._internal.utils.glibc.have_compatible_glibc(2, 5) + + +def get_darwin_arches(major, minor, machine): + """Return a list of supported arches (including group arches) for + the given major, minor and machine architecture of an macOS machine. + """ + arches = [] + + def _supports_arch(major, minor, arch): + # Looking at the application support for macOS versions in the chart + # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears + # our timeline looks roughly like: + # + # 10.0 - Introduces ppc support. + # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64 + # and x86_64 support is CLI only, and cannot be used for GUI + # applications. + # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications. + # 10.6 - Drops support for ppc64 + # 10.7 - Drops support for ppc + # + # Given that we do not know if we're installing a CLI or a GUI + # application, we must be conservative and assume it might be a GUI + # application and behave as if ppc64 and x86_64 support did not occur + # until 10.5. + # + # Note: The above information is taken from the "Application support" + # column in the chart not the "Processor support" since I believe + # that we care about what instruction sets an application can use + # not which processors the OS supports. + if arch == 'ppc': + return (major, minor) <= (10, 5) + if arch == 'ppc64': + return (major, minor) == (10, 5) + if arch == 'i386': + return (major, minor) >= (10, 4) + if arch == 'x86_64': + return (major, minor) >= (10, 5) + if arch in groups: + for garch in groups[arch]: + if _supports_arch(major, minor, garch): + return True + return False + + groups = OrderedDict([ + ("fat", ("i386", "ppc")), + ("intel", ("x86_64", "i386")), + ("fat64", ("x86_64", "ppc64")), + ("fat32", ("x86_64", "i386", "ppc")), + ]) + + if _supports_arch(major, minor, machine): + arches.append(machine) + + for garch in groups: + if machine in groups[garch] and _supports_arch(major, minor, garch): + arches.append(garch) + + arches.append('universal') + + return arches + + +def get_supported(versions=None, noarch=False, platform=None, + impl=None, abi=None): + """Return a list of supported tags for each version specified in + `versions`. + + :param versions: a list of string versions, of the form ["33", "32"], + or None. The first version will be assumed to support our ABI. + :param platform: specify the exact platform you want valid + tags for, or None. If None, use the local system platform. + :param impl: specify the exact implementation you want valid + tags for, or None. If None, use the local interpreter impl. + :param abi: specify the exact abi you want valid + tags for, or None. If None, use the local interpreter abi. + """ + supported = [] + + # Versions must be given with respect to the preference + if versions is None: + versions = [] + version_info = get_impl_version_info() + major = version_info[:-1] + # Support all previous minor Python versions. + for minor in range(version_info[-1], -1, -1): + versions.append(''.join(map(str, major + (minor,)))) + + impl = impl or get_abbr_impl() + + abis = [] + + abi = abi or get_abi_tag() + if abi: + abis[0:0] = [abi] + + abi3s = set() + import imp + for suffix in imp.get_suffixes(): + if suffix[0].startswith('.abi'): + abi3s.add(suffix[0].split('.', 2)[1]) + + abis.extend(sorted(list(abi3s))) + + abis.append('none') + + if not noarch: + arch = platform or get_platform() + if arch.startswith('macosx'): + # support macosx-10.6-intel on macosx-10.9-x86_64 + match = _osx_arch_pat.match(arch) + if match: + name, major, minor, actual_arch = match.groups() + tpl = '{}_{}_%i_%s'.format(name, major) + arches = [] + for m in reversed(range(int(minor) + 1)): + for a in get_darwin_arches(int(major), m, actual_arch): + arches.append(tpl % (m, a)) + else: + # arch pattern didn't match (?!) + arches = [arch] + elif platform is None and is_manylinux1_compatible(): + arches = [arch.replace('linux', 'manylinux1'), arch] + else: + arches = [arch] + + # Current version, current API (built specifically for our Python): + for abi in abis: + for arch in arches: + supported.append(('%s%s' % (impl, versions[0]), abi, arch)) + + # abi3 modules compatible with older version of Python + for version in versions[1:]: + # abi3 was introduced in Python 3.2 + if version in {'31', '30'}: + break + for abi in abi3s: # empty set if not Python 3 + for arch in arches: + supported.append(("%s%s" % (impl, version), abi, arch)) + + # Has binaries, does not use the Python API: + for arch in arches: + supported.append(('py%s' % (versions[0][0]), 'none', arch)) + + # No abi / arch, but requires our implementation: + supported.append(('%s%s' % (impl, versions[0]), 'none', 'any')) + # Tagged specifically as being cross-version compatible + # (with just the major version specified) + supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) + + # No abi / arch, generic Python + for i, version in enumerate(versions): + supported.append(('py%s' % (version,), 'none', 'any')) + if i == 0: + supported.append(('py%s' % (version[0]), 'none', 'any')) + + return supported + + +implementation_tag = get_impl_tag() diff --git a/env/lib/python3.6/site-packages/pip/_internal/req/req_file.py b/env/lib/python3.6/site-packages/pip/_internal/req/req_file.py new file mode 100644 index 0000000..f868497 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/req/req_file.py @@ -0,0 +1,338 @@ +""" +Requirements file parsing +""" + +from __future__ import absolute_import + +import optparse +import os +import re +import shlex +import sys + +from pip._vendor.six.moves import filterfalse +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal import cmdoptions +from pip._internal.download import get_file_content +from pip._internal.exceptions import RequirementsFileParseError +from pip._internal.req.req_install import InstallRequirement + +__all__ = ['parse_requirements'] + +SCHEME_RE = re.compile(r'^(http|https|file):', re.I) +COMMENT_RE = re.compile(r'(^|\s)+#.*$') + +# Matches environment variable-style values in '${MY_VARIABLE_1}' with the +# variable name consisting of only uppercase letters, digits or the '_' +# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1, +# 2013 Edition. +ENV_VAR_RE = re.compile(r'(?P\$\{(?P[A-Z0-9_]+)\})') + +SUPPORTED_OPTIONS = [ + cmdoptions.constraints, + cmdoptions.editable, + cmdoptions.requirements, + cmdoptions.no_index, + cmdoptions.index_url, + cmdoptions.find_links, + cmdoptions.extra_index_url, + cmdoptions.always_unzip, + cmdoptions.no_binary, + cmdoptions.only_binary, + cmdoptions.pre, + cmdoptions.process_dependency_links, + cmdoptions.trusted_host, + cmdoptions.require_hashes, +] + +# options to be passed to requirements +SUPPORTED_OPTIONS_REQ = [ + cmdoptions.install_options, + cmdoptions.global_options, + cmdoptions.hash, +] + +# the 'dest' string values +SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ] + + +def parse_requirements(filename, finder=None, comes_from=None, options=None, + session=None, constraint=False, wheel_cache=None): + """Parse a requirements file and yield InstallRequirement instances. + + :param filename: Path or url of requirements file. + :param finder: Instance of pip.index.PackageFinder. + :param comes_from: Origin description of requirements. + :param options: cli options. + :param session: Instance of pip.download.PipSession. + :param constraint: If true, parsing a constraint file rather than + requirements file. + :param wheel_cache: Instance of pip.wheel.WheelCache + """ + if session is None: + raise TypeError( + "parse_requirements() missing 1 required keyword argument: " + "'session'" + ) + + _, content = get_file_content( + filename, comes_from=comes_from, session=session + ) + + lines_enum = preprocess(content, options) + + for line_number, line in lines_enum: + req_iter = process_line(line, filename, line_number, finder, + comes_from, options, session, wheel_cache, + constraint=constraint) + for req in req_iter: + yield req + + +def preprocess(content, options): + """Split, filter, and join lines, and return a line iterator + + :param content: the content of the requirements file + :param options: cli options + """ + lines_enum = enumerate(content.splitlines(), start=1) + lines_enum = join_lines(lines_enum) + lines_enum = ignore_comments(lines_enum) + lines_enum = skip_regex(lines_enum, options) + lines_enum = expand_env_variables(lines_enum) + return lines_enum + + +def process_line(line, filename, line_number, finder=None, comes_from=None, + options=None, session=None, wheel_cache=None, + constraint=False): + """Process a single requirements line; This can result in creating/yielding + requirements, or updating the finder. + + For lines that contain requirements, the only options that have an effect + are from SUPPORTED_OPTIONS_REQ, and they are scoped to the + requirement. Other options from SUPPORTED_OPTIONS may be present, but are + ignored. + + For lines that do not contain requirements, the only options that have an + effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may + be present, but are ignored. These lines may contain multiple options + (although our docs imply only one is supported), and all our parsed and + affect the finder. + + :param constraint: If True, parsing a constraints file. + :param options: OptionParser options that we may update + """ + parser = build_parser(line) + defaults = parser.get_default_values() + defaults.index_url = None + if finder: + # `finder.format_control` will be updated during parsing + defaults.format_control = finder.format_control + args_str, options_str = break_args_options(line) + if sys.version_info < (2, 7, 3): + # Prior to 2.7.3, shlex cannot deal with unicode entries + options_str = options_str.encode('utf8') + opts, _ = parser.parse_args(shlex.split(options_str), defaults) + + # preserve for the nested code path + line_comes_from = '%s %s (line %s)' % ( + '-c' if constraint else '-r', filename, line_number, + ) + + # yield a line requirement + if args_str: + isolated = options.isolated_mode if options else False + if options: + cmdoptions.check_install_build_global(options, opts) + # get the options that apply to requirements + req_options = {} + for dest in SUPPORTED_OPTIONS_REQ_DEST: + if dest in opts.__dict__ and opts.__dict__[dest]: + req_options[dest] = opts.__dict__[dest] + yield InstallRequirement.from_line( + args_str, line_comes_from, constraint=constraint, + isolated=isolated, options=req_options, wheel_cache=wheel_cache + ) + + # yield an editable requirement + elif opts.editables: + isolated = options.isolated_mode if options else False + yield InstallRequirement.from_editable( + opts.editables[0], comes_from=line_comes_from, + constraint=constraint, isolated=isolated, wheel_cache=wheel_cache + ) + + # parse a nested requirements file + elif opts.requirements or opts.constraints: + if opts.requirements: + req_path = opts.requirements[0] + nested_constraint = False + else: + req_path = opts.constraints[0] + nested_constraint = True + # original file is over http + if SCHEME_RE.search(filename): + # do a url join so relative paths work + req_path = urllib_parse.urljoin(filename, req_path) + # original file and nested file are paths + elif not SCHEME_RE.search(req_path): + # do a join so relative paths work + req_path = os.path.join(os.path.dirname(filename), req_path) + # TODO: Why not use `comes_from='-r {} (line {})'` here as well? + parser = parse_requirements( + req_path, finder, comes_from, options, session, + constraint=nested_constraint, wheel_cache=wheel_cache + ) + for req in parser: + yield req + + # percolate hash-checking option upward + elif opts.require_hashes: + options.require_hashes = opts.require_hashes + + # set finder options + elif finder: + if opts.index_url: + finder.index_urls = [opts.index_url] + if opts.no_index is True: + finder.index_urls = [] + if opts.extra_index_urls: + finder.index_urls.extend(opts.extra_index_urls) + if opts.find_links: + # FIXME: it would be nice to keep track of the source + # of the find_links: support a find-links local path + # relative to a requirements file. + value = opts.find_links[0] + req_dir = os.path.dirname(os.path.abspath(filename)) + relative_to_reqs_file = os.path.join(req_dir, value) + if os.path.exists(relative_to_reqs_file): + value = relative_to_reqs_file + finder.find_links.append(value) + if opts.pre: + finder.allow_all_prereleases = True + if opts.process_dependency_links: + finder.process_dependency_links = True + if opts.trusted_hosts: + finder.secure_origins.extend( + ("*", host, "*") for host in opts.trusted_hosts) + + +def break_args_options(line): + """Break up the line into an args and options string. We only want to shlex + (and then optparse) the options, not the args. args can contain markers + which are corrupted by shlex. + """ + tokens = line.split(' ') + args = [] + options = tokens[:] + for token in tokens: + if token.startswith('-') or token.startswith('--'): + break + else: + args.append(token) + options.pop(0) + return ' '.join(args), ' '.join(options) + + +def build_parser(line): + """ + Return a parser for parsing requirement lines + """ + parser = optparse.OptionParser(add_help_option=False) + + option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ + for option_factory in option_factories: + option = option_factory() + parser.add_option(option) + + # By default optparse sys.exits on parsing errors. We want to wrap + # that in our own exception. + def parser_exit(self, msg): + # add offending line + msg = 'Invalid requirement: %s\n%s' % (line, msg) + raise RequirementsFileParseError(msg) + parser.exit = parser_exit + + return parser + + +def join_lines(lines_enum): + """Joins a line ending in '\' with the previous line (except when following + comments). The joined line takes on the index of the first line. + """ + primary_line_number = None + new_line = [] + for line_number, line in lines_enum: + if not line.endswith('\\') or COMMENT_RE.match(line): + if COMMENT_RE.match(line): + # this ensures comments are always matched later + line = ' ' + line + if new_line: + new_line.append(line) + yield primary_line_number, ''.join(new_line) + new_line = [] + else: + yield line_number, line + else: + if not new_line: + primary_line_number = line_number + new_line.append(line.strip('\\')) + + # last line contains \ + if new_line: + yield primary_line_number, ''.join(new_line) + + # TODO: handle space after '\'. + + +def ignore_comments(lines_enum): + """ + Strips comments and filter empty lines. + """ + for line_number, line in lines_enum: + line = COMMENT_RE.sub('', line) + line = line.strip() + if line: + yield line_number, line + + +def skip_regex(lines_enum, options): + """ + Skip lines that match '--skip-requirements-regex' pattern + + Note: the regex pattern is only built once + """ + skip_regex = options.skip_requirements_regex if options else None + if skip_regex: + pattern = re.compile(skip_regex) + lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum) + return lines_enum + + +def expand_env_variables(lines_enum): + """Replace all environment variables that can be retrieved via `os.getenv`. + + The only allowed format for environment variables defined in the + requirement file is `${MY_VARIABLE_1}` to ensure two things: + + 1. Strings that contain a `$` aren't accidentally (partially) expanded. + 2. Ensure consistency across platforms for requirement files. + + These points are the result of a discusssion on the `github pull + request #3514 `_. + + Valid characters in variable names follow the `POSIX standard + `_ and are limited + to uppercase letter, digits and the `_` (underscore). + """ + for line_number, line in lines_enum: + for env_var, var_name in ENV_VAR_RE.findall(line): + value = os.getenv(var_name) + if not value: + continue + + line = line.replace(env_var, value) + + yield line_number, line diff --git a/env/lib/python3.6/site-packages/pip/_internal/req/req_install.py b/env/lib/python3.6/site-packages/pip/_internal/req/req_install.py new file mode 100644 index 0000000..462c80a --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/req/req_install.py @@ -0,0 +1,1142 @@ +from __future__ import absolute_import + +import io +import logging +import os +import re +import shutil +import sys +import sysconfig +import traceback +import zipfile +from distutils.util import change_root +from email.parser import FeedParser # type: ignore + +from pip._vendor import pkg_resources, pytoml, six +from pip._vendor.packaging import specifiers +from pip._vendor.packaging.markers import Marker +from pip._vendor.packaging.requirements import InvalidRequirement, Requirement +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import Version +from pip._vendor.packaging.version import parse as parse_version +from pip._vendor.pkg_resources import RequirementParseError, parse_requirements + +from pip._internal import wheel +from pip._internal.build_env import NoOpBuildEnvironment +from pip._internal.compat import native_str +from pip._internal.download import ( + is_archive_file, is_url, path_to_url, url_to_path, +) +from pip._internal.exceptions import InstallationError +from pip._internal.locations import ( + PIP_DELETE_MARKER_FILENAME, running_under_virtualenv, +) +from pip._internal.req.req_uninstall import UninstallPathSet +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ( + _make_build_dir, ask_path_exists, backup_dir, call_subprocess, + display_path, dist_in_site_packages, dist_in_usersite, ensure_dir, + get_installed_version, is_installable_dir, read_text_file, rmtree, +) +from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.ui import open_spinner +from pip._internal.vcs import vcs +from pip._internal.wheel import Wheel, move_wheel_files + +logger = logging.getLogger(__name__) + +operators = specifiers.Specifier._operators.keys() + + +def _strip_extras(path): + m = re.match(r'^(.+)(\[[^\]]+\])$', path) + extras = None + if m: + path_no_extras = m.group(1) + extras = m.group(2) + else: + path_no_extras = path + + return path_no_extras, extras + + +class InstallRequirement(object): + """ + Represents something that may be installed later on, may have information + about where to fetch the relavant requirement and also contains logic for + installing the said requirement. + """ + + def __init__(self, req, comes_from, source_dir=None, editable=False, + link=None, update=True, markers=None, + isolated=False, options=None, wheel_cache=None, + constraint=False, extras=()): + assert req is None or isinstance(req, Requirement), req + self.req = req + self.comes_from = comes_from + self.constraint = constraint + if source_dir is not None: + self.source_dir = os.path.normpath(os.path.abspath(source_dir)) + else: + self.source_dir = None + self.editable = editable + + self._wheel_cache = wheel_cache + if link is not None: + self.link = self.original_link = link + else: + from pip._internal.index import Link + self.link = self.original_link = req and req.url and Link(req.url) + + if extras: + self.extras = extras + elif req: + self.extras = { + pkg_resources.safe_extra(extra) for extra in req.extras + } + else: + self.extras = set() + if markers is not None: + self.markers = markers + else: + self.markers = req and req.marker + self._egg_info_path = None + # This holds the pkg_resources.Distribution object if this requirement + # is already available: + self.satisfied_by = None + # This hold the pkg_resources.Distribution object if this requirement + # conflicts with another installed distribution: + self.conflicts_with = None + # Temporary build location + self._temp_build_dir = TempDirectory(kind="req-build") + # Used to store the global directory where the _temp_build_dir should + # have been created. Cf _correct_build_location method. + self._ideal_build_dir = None + # True if the editable should be updated: + self.update = update + # Set to True after successful installation + self.install_succeeded = None + # UninstallPathSet of uninstalled distribution (for possible rollback) + self.uninstalled_pathset = None + self.options = options if options else {} + # Set to True after successful preparation of this requirement + self.prepared = False + self.is_direct = False + + self.isolated = isolated + self.build_env = NoOpBuildEnvironment() + + # Constructors + # TODO: Move these out of this class into custom methods. + @classmethod + def from_editable(cls, editable_req, comes_from=None, isolated=False, + options=None, wheel_cache=None, constraint=False): + from pip._internal.index import Link + + name, url, extras_override = parse_editable(editable_req) + if url.startswith('file:'): + source_dir = url_to_path(url) + else: + source_dir = None + + if name is not None: + try: + req = Requirement(name) + except InvalidRequirement: + raise InstallationError("Invalid requirement: '%s'" % name) + else: + req = None + return cls( + req, comes_from, source_dir=source_dir, + editable=True, + link=Link(url), + constraint=constraint, + isolated=isolated, + options=options if options else {}, + wheel_cache=wheel_cache, + extras=extras_override or (), + ) + + @classmethod + def from_req(cls, req, comes_from=None, isolated=False, wheel_cache=None): + try: + req = Requirement(req) + except InvalidRequirement: + raise InstallationError("Invalid requirement: '%s'" % req) + if req.url: + raise InstallationError( + "Direct url requirement (like %s) are not allowed for " + "dependencies" % req + ) + return cls(req, comes_from, isolated=isolated, wheel_cache=wheel_cache) + + @classmethod + def from_line( + cls, name, comes_from=None, isolated=False, options=None, + wheel_cache=None, constraint=False): + """Creates an InstallRequirement from a name, which might be a + requirement, directory containing 'setup.py', filename, or URL. + """ + from pip._internal.index import Link + + if is_url(name): + marker_sep = '; ' + else: + marker_sep = ';' + if marker_sep in name: + name, markers = name.split(marker_sep, 1) + markers = markers.strip() + if not markers: + markers = None + else: + markers = Marker(markers) + else: + markers = None + name = name.strip() + req = None + path = os.path.normpath(os.path.abspath(name)) + link = None + extras = None + + if is_url(name): + link = Link(name) + else: + p, extras = _strip_extras(path) + looks_like_dir = os.path.isdir(p) and ( + os.path.sep in name or + (os.path.altsep is not None and os.path.altsep in name) or + name.startswith('.') + ) + if looks_like_dir: + if not is_installable_dir(p): + raise InstallationError( + "Directory %r is not installable. File 'setup.py' " + "not found." % name + ) + link = Link(path_to_url(p)) + elif is_archive_file(p): + if not os.path.isfile(p): + logger.warning( + 'Requirement %r looks like a filename, but the ' + 'file does not exist', + name + ) + link = Link(path_to_url(p)) + + # it's a local file, dir, or url + if link: + # Handle relative file URLs + if link.scheme == 'file' and re.search(r'\.\./', link.url): + link = Link( + path_to_url(os.path.normpath(os.path.abspath(link.path)))) + # wheel file + if link.is_wheel: + wheel = Wheel(link.filename) # can raise InvalidWheelFilename + req = "%s==%s" % (wheel.name, wheel.version) + else: + # set the req to the egg fragment. when it's not there, this + # will become an 'unnamed' requirement + req = link.egg_fragment + + # a requirement specifier + else: + req = name + + if extras: + extras = Requirement("placeholder" + extras.lower()).extras + else: + extras = () + if req is not None: + try: + req = Requirement(req) + except InvalidRequirement: + if os.path.sep in req: + add_msg = "It looks like a path." + add_msg += deduce_helpful_msg(req) + elif '=' in req and not any(op in req for op in operators): + add_msg = "= is not a valid operator. Did you mean == ?" + else: + add_msg = traceback.format_exc() + raise InstallationError( + "Invalid requirement: '%s'\n%s" % (req, add_msg)) + return cls( + req, comes_from, link=link, markers=markers, + isolated=isolated, + options=options if options else {}, + wheel_cache=wheel_cache, + constraint=constraint, + extras=extras, + ) + + def __str__(self): + if self.req: + s = str(self.req) + if self.link: + s += ' from %s' % self.link.url + else: + s = self.link.url if self.link else None + if self.satisfied_by is not None: + s += ' in %s' % display_path(self.satisfied_by.location) + if self.comes_from: + if isinstance(self.comes_from, six.string_types): + comes_from = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += ' (from %s)' % comes_from + return s + + def __repr__(self): + return '<%s object: %s editable=%r>' % ( + self.__class__.__name__, str(self), self.editable) + + def populate_link(self, finder, upgrade, require_hashes): + """Ensure that if a link can be found for this, that it is found. + + Note that self.link may still be None - if Upgrade is False and the + requirement is already installed. + + If require_hashes is True, don't use the wheel cache, because cached + wheels, always built locally, have different hashes than the files + downloaded from the index server and thus throw false hash mismatches. + Furthermore, cached wheels at present have undeterministic contents due + to file modification times. + """ + if self.link is None: + self.link = finder.find_requirement(self, upgrade) + if self._wheel_cache is not None and not require_hashes: + old_link = self.link + self.link = self._wheel_cache.get(self.link, self.name) + if old_link != self.link: + logger.debug('Using cached wheel link: %s', self.link) + + # Things that are valid for all kinds of requirements? + @property + def name(self): + if self.req is None: + return None + return native_str(pkg_resources.safe_name(self.req.name)) + + @property + def specifier(self): + return self.req.specifier + + @property + def is_pinned(self): + """Return whether I am pinned to an exact version. + + For example, some-package==1.2 is pinned; some-package>1.2 is not. + """ + specifiers = self.specifier + return (len(specifiers) == 1 and + next(iter(specifiers)).operator in {'==', '==='}) + + @property + def installed_version(self): + return get_installed_version(self.name) + + def match_markers(self, extras_requested=None): + if not extras_requested: + # Provide an extra to safely evaluate the markers + # without matching any extra + extras_requested = ('',) + if self.markers is not None: + return any( + self.markers.evaluate({'extra': extra}) + for extra in extras_requested) + else: + return True + + @property + def has_hash_options(self): + """Return whether any known-good hashes are specified as options. + + These activate --require-hashes mode; hashes specified as part of a + URL do not. + + """ + return bool(self.options.get('hashes', {})) + + def hashes(self, trust_internet=True): + """Return a hash-comparer that considers my option- and URL-based + hashes to be known-good. + + Hashes in URLs--ones embedded in the requirements file, not ones + downloaded from an index server--are almost peers with ones from + flags. They satisfy --require-hashes (whether it was implicitly or + explicitly activated) but do not activate it. md5 and sha224 are not + allowed in flags, which should nudge people toward good algos. We + always OR all hashes together, even ones from URLs. + + :param trust_internet: Whether to trust URL-based (#md5=...) hashes + downloaded from the internet, as by populate_link() + + """ + good_hashes = self.options.get('hashes', {}).copy() + link = self.link if trust_internet else self.original_link + if link and link.hash: + good_hashes.setdefault(link.hash_name, []).append(link.hash) + return Hashes(good_hashes) + + def from_path(self): + """Format a nice indicator to show where this "comes from" + """ + if self.req is None: + return None + s = str(self.req) + if self.comes_from: + if isinstance(self.comes_from, six.string_types): + comes_from = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += '->' + comes_from + return s + + def build_location(self, build_dir): + assert build_dir is not None + if self._temp_build_dir.path is not None: + return self._temp_build_dir.path + if self.req is None: + # for requirement via a path to a directory: the name of the + # package is not available yet so we create a temp directory + # Once run_egg_info will have run, we'll be able + # to fix it via _correct_build_location + # Some systems have /tmp as a symlink which confuses custom + # builds (such as numpy). Thus, we ensure that the real path + # is returned. + self._temp_build_dir.create() + self._ideal_build_dir = build_dir + + return self._temp_build_dir.path + if self.editable: + name = self.name.lower() + else: + name = self.name + # FIXME: Is there a better place to create the build_dir? (hg and bzr + # need this) + if not os.path.exists(build_dir): + logger.debug('Creating directory %s', build_dir) + _make_build_dir(build_dir) + return os.path.join(build_dir, name) + + def _correct_build_location(self): + """Move self._temp_build_dir to self._ideal_build_dir/self.req.name + + For some requirements (e.g. a path to a directory), the name of the + package is not available until we run egg_info, so the build_location + will return a temporary directory and store the _ideal_build_dir. + + This is only called by self.egg_info_path to fix the temporary build + directory. + """ + if self.source_dir is not None: + return + assert self.req is not None + assert self._temp_build_dir.path + assert self._ideal_build_dir.path + old_location = self._temp_build_dir.path + self._temp_build_dir.path = None + + new_location = self.build_location(self._ideal_build_dir) + if os.path.exists(new_location): + raise InstallationError( + 'A package already exists in %s; please remove it to continue' + % display_path(new_location)) + logger.debug( + 'Moving package %s from %s to new location %s', + self, display_path(old_location), display_path(new_location), + ) + shutil.move(old_location, new_location) + self._temp_build_dir.path = new_location + self._ideal_build_dir = None + self.source_dir = os.path.normpath(os.path.abspath(new_location)) + self._egg_info_path = None + + def remove_temporary_source(self): + """Remove the source files from this requirement, if they are marked + for deletion""" + if self.source_dir and os.path.exists( + os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)): + logger.debug('Removing source in %s', self.source_dir) + rmtree(self.source_dir) + self.source_dir = None + self._temp_build_dir.cleanup() + self.build_env.cleanup() + + def check_if_exists(self, use_user_site): + """Find an installed distribution that satisfies or conflicts + with this requirement, and set self.satisfied_by or + self.conflicts_with appropriately. + """ + if self.req is None: + return False + try: + # get_distribution() will resolve the entire list of requirements + # anyway, and we've already determined that we need the requirement + # in question, so strip the marker so that we don't try to + # evaluate it. + no_marker = Requirement(str(self.req)) + no_marker.marker = None + self.satisfied_by = pkg_resources.get_distribution(str(no_marker)) + if self.editable and self.satisfied_by: + self.conflicts_with = self.satisfied_by + # when installing editables, nothing pre-existing should ever + # satisfy + self.satisfied_by = None + return True + except pkg_resources.DistributionNotFound: + return False + except pkg_resources.VersionConflict: + existing_dist = pkg_resources.get_distribution( + self.req.name + ) + if use_user_site: + if dist_in_usersite(existing_dist): + self.conflicts_with = existing_dist + elif (running_under_virtualenv() and + dist_in_site_packages(existing_dist)): + raise InstallationError( + "Will not install to the user site because it will " + "lack sys.path precedence to %s in %s" % + (existing_dist.project_name, existing_dist.location) + ) + else: + self.conflicts_with = existing_dist + return True + + # Things valid for wheels + @property + def is_wheel(self): + return self.link and self.link.is_wheel + + def move_wheel_files(self, wheeldir, root=None, home=None, prefix=None, + warn_script_location=True, use_user_site=False, + pycompile=True): + move_wheel_files( + self.name, self.req, wheeldir, + user=use_user_site, + home=home, + root=root, + prefix=prefix, + pycompile=pycompile, + isolated=self.isolated, + warn_script_location=warn_script_location, + ) + + # Things valid for sdists + @property + def setup_py_dir(self): + return os.path.join( + self.source_dir, + self.link and self.link.subdirectory_fragment or '') + + @property + def setup_py(self): + assert self.source_dir, "No source dir for %s" % self + + setup_py = os.path.join(self.setup_py_dir, 'setup.py') + + # Python2 __file__ should not be unicode + if six.PY2 and isinstance(setup_py, six.text_type): + setup_py = setup_py.encode(sys.getfilesystemencoding()) + + return setup_py + + @property + def pyproject_toml(self): + assert self.source_dir, "No source dir for %s" % self + + pp_toml = os.path.join(self.setup_py_dir, 'pyproject.toml') + + # Python2 __file__ should not be unicode + if six.PY2 and isinstance(pp_toml, six.text_type): + pp_toml = pp_toml.encode(sys.getfilesystemencoding()) + + return pp_toml + + def get_pep_518_info(self): + """Get PEP 518 build-time requirements. + + Returns the list of the packages required to build the project, + specified as per PEP 518 within the package. If `pyproject.toml` is not + present, returns None to signify not using the same. + """ + # If pyproject.toml does not exist, don't do anything. + if not os.path.isfile(self.pyproject_toml): + return None + + error_template = ( + "{package} has a pyproject.toml file that does not comply " + "with PEP 518: {reason}" + ) + + with io.open(self.pyproject_toml, encoding="utf-8") as f: + pp_toml = pytoml.load(f) + + # If there is no build-system table, just use setuptools and wheel. + if "build-system" not in pp_toml: + return ["setuptools", "wheel"] + + # Specifying the build-system table but not the requires key is invalid + build_system = pp_toml["build-system"] + if "requires" not in build_system: + raise InstallationError( + error_template.format(package=self, reason=( + "it has a 'build-system' table but not " + "'build-system.requires' which is mandatory in the table" + )) + ) + + # Error out if it's not a list of strings + requires = build_system["requires"] + if not _is_list_of_str(requires): + raise InstallationError(error_template.format( + package=self, + reason="'build-system.requires' is not a list of strings.", + )) + + return requires + + def run_egg_info(self): + assert self.source_dir + if self.name: + logger.debug( + 'Running setup.py (path:%s) egg_info for package %s', + self.setup_py, self.name, + ) + else: + logger.debug( + 'Running setup.py (path:%s) egg_info for package from %s', + self.setup_py, self.link, + ) + + with indent_log(): + script = SETUPTOOLS_SHIM % self.setup_py + base_cmd = [sys.executable, '-c', script] + if self.isolated: + base_cmd += ["--no-user-cfg"] + egg_info_cmd = base_cmd + ['egg_info'] + # We can't put the .egg-info files at the root, because then the + # source code will be mistaken for an installed egg, causing + # problems + if self.editable: + egg_base_option = [] + else: + egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info') + ensure_dir(egg_info_dir) + egg_base_option = ['--egg-base', 'pip-egg-info'] + with self.build_env: + call_subprocess( + egg_info_cmd + egg_base_option, + cwd=self.setup_py_dir, + show_stdout=False, + command_desc='python setup.py egg_info') + + if not self.req: + if isinstance(parse_version(self.pkg_info()["Version"]), Version): + op = "==" + else: + op = "===" + self.req = Requirement( + "".join([ + self.pkg_info()["Name"], + op, + self.pkg_info()["Version"], + ]) + ) + self._correct_build_location() + else: + metadata_name = canonicalize_name(self.pkg_info()["Name"]) + if canonicalize_name(self.req.name) != metadata_name: + logger.warning( + 'Running setup.py (path:%s) egg_info for package %s ' + 'produced metadata for project name %s. Fix your ' + '#egg=%s fragments.', + self.setup_py, self.name, metadata_name, self.name + ) + self.req = Requirement(metadata_name) + + def egg_info_data(self, filename): + if self.satisfied_by is not None: + if not self.satisfied_by.has_metadata(filename): + return None + return self.satisfied_by.get_metadata(filename) + assert self.source_dir + filename = self.egg_info_path(filename) + if not os.path.exists(filename): + return None + data = read_text_file(filename) + return data + + def egg_info_path(self, filename): + if self._egg_info_path is None: + if self.editable: + base = self.source_dir + else: + base = os.path.join(self.setup_py_dir, 'pip-egg-info') + filenames = os.listdir(base) + if self.editable: + filenames = [] + for root, dirs, files in os.walk(base): + for dir in vcs.dirnames: + if dir in dirs: + dirs.remove(dir) + # Iterate over a copy of ``dirs``, since mutating + # a list while iterating over it can cause trouble. + # (See https://github.com/pypa/pip/pull/462.) + for dir in list(dirs): + # Don't search in anything that looks like a virtualenv + # environment + if ( + os.path.lexists( + os.path.join(root, dir, 'bin', 'python') + ) or + os.path.exists( + os.path.join( + root, dir, 'Scripts', 'Python.exe' + ) + )): + dirs.remove(dir) + # Also don't search through tests + elif dir == 'test' or dir == 'tests': + dirs.remove(dir) + filenames.extend([os.path.join(root, dir) + for dir in dirs]) + filenames = [f for f in filenames if f.endswith('.egg-info')] + + if not filenames: + raise InstallationError( + "Files/directories (from %s) not found in %s" + % (filename, base) + ) + # if we have more than one match, we pick the toplevel one. This + # can easily be the case if there is a dist folder which contains + # an extracted tarball for testing purposes. + if len(filenames) > 1: + filenames.sort( + key=lambda x: x.count(os.path.sep) + + (os.path.altsep and x.count(os.path.altsep) or 0) + ) + self._egg_info_path = os.path.join(base, filenames[0]) + return os.path.join(self._egg_info_path, filename) + + def pkg_info(self): + p = FeedParser() + data = self.egg_info_data('PKG-INFO') + if not data: + logger.warning( + 'No PKG-INFO file found in %s', + display_path(self.egg_info_path('PKG-INFO')), + ) + p.feed(data or '') + return p.close() + + _requirements_section_re = re.compile(r'\[(.*?)\]') + + def get_dist(self): + """Return a pkg_resources.Distribution built from self.egg_info_path""" + egg_info = self.egg_info_path('').rstrip(os.path.sep) + base_dir = os.path.dirname(egg_info) + metadata = pkg_resources.PathMetadata(base_dir, egg_info) + dist_name = os.path.splitext(os.path.basename(egg_info))[0] + return pkg_resources.Distribution( + os.path.dirname(egg_info), + project_name=dist_name, + metadata=metadata, + ) + + def assert_source_matches_version(self): + assert self.source_dir + version = self.pkg_info()['version'] + if self.req.specifier and version not in self.req.specifier: + logger.warning( + 'Requested %s, but installing version %s', + self, + version, + ) + else: + logger.debug( + 'Source in %s has version %s, which satisfies requirement %s', + display_path(self.source_dir), + version, + self, + ) + + # For both source distributions and editables + def ensure_has_source_dir(self, parent_dir): + """Ensure that a source_dir is set. + + This will create a temporary build dir if the name of the requirement + isn't known yet. + + :param parent_dir: The ideal pip parent_dir for the source_dir. + Generally src_dir for editables and build_dir for sdists. + :return: self.source_dir + """ + if self.source_dir is None: + self.source_dir = self.build_location(parent_dir) + return self.source_dir + + # For editable installations + def install_editable(self, install_options, + global_options=(), prefix=None): + logger.info('Running setup.py develop for %s', self.name) + + if self.isolated: + global_options = list(global_options) + ["--no-user-cfg"] + + if prefix: + prefix_param = ['--prefix={}'.format(prefix)] + install_options = list(install_options) + prefix_param + + with indent_log(): + # FIXME: should we do --install-headers here too? + with self.build_env: + call_subprocess( + [ + sys.executable, + '-c', + SETUPTOOLS_SHIM % self.setup_py + ] + + list(global_options) + + ['develop', '--no-deps'] + + list(install_options), + + cwd=self.setup_py_dir, + show_stdout=False, + ) + + self.install_succeeded = True + + def update_editable(self, obtain=True): + if not self.link: + logger.debug( + "Cannot update repository at %s; repository location is " + "unknown", + self.source_dir, + ) + return + assert self.editable + assert self.source_dir + if self.link.scheme == 'file': + # Static paths don't get updated + return + assert '+' in self.link.url, "bad url: %r" % self.link.url + if not self.update: + return + vc_type, url = self.link.url.split('+', 1) + backend = vcs.get_backend(vc_type) + if backend: + vcs_backend = backend(self.link.url) + if obtain: + vcs_backend.obtain(self.source_dir) + else: + vcs_backend.export(self.source_dir) + else: + assert 0, ( + 'Unexpected version control type (in %s): %s' + % (self.link, vc_type)) + + # Top-level Actions + def uninstall(self, auto_confirm=False, verbose=False, + use_user_site=False): + """ + Uninstall the distribution currently satisfying this requirement. + + Prompts before removing or modifying files unless + ``auto_confirm`` is True. + + Refuses to delete or modify files outside of ``sys.prefix`` - + thus uninstallation within a virtual environment can only + modify that virtual environment, even if the virtualenv is + linked to global site-packages. + + """ + if not self.check_if_exists(use_user_site): + logger.warning("Skipping %s as it is not installed.", self.name) + return + dist = self.satisfied_by or self.conflicts_with + + uninstalled_pathset = UninstallPathSet.from_dist(dist) + uninstalled_pathset.remove(auto_confirm, verbose) + return uninstalled_pathset + + def _clean_zip_name(self, name, prefix): # only used by archive. + assert name.startswith(prefix + os.path.sep), ( + "name %r doesn't start with prefix %r" % (name, prefix) + ) + name = name[len(prefix) + 1:] + name = name.replace(os.path.sep, '/') + return name + + # TODO: Investigate if this should be kept in InstallRequirement + # Seems to be used only when VCS + downloads + def archive(self, build_dir): + assert self.source_dir + create_archive = True + archive_name = '%s-%s.zip' % (self.name, self.pkg_info()["version"]) + archive_path = os.path.join(build_dir, archive_name) + if os.path.exists(archive_path): + response = ask_path_exists( + 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' % + display_path(archive_path), ('i', 'w', 'b', 'a')) + if response == 'i': + create_archive = False + elif response == 'w': + logger.warning('Deleting %s', display_path(archive_path)) + os.remove(archive_path) + elif response == 'b': + dest_file = backup_dir(archive_path) + logger.warning( + 'Backing up %s to %s', + display_path(archive_path), + display_path(dest_file), + ) + shutil.move(archive_path, dest_file) + elif response == 'a': + sys.exit(-1) + if create_archive: + zip = zipfile.ZipFile( + archive_path, 'w', zipfile.ZIP_DEFLATED, + allowZip64=True + ) + dir = os.path.normcase(os.path.abspath(self.setup_py_dir)) + for dirpath, dirnames, filenames in os.walk(dir): + if 'pip-egg-info' in dirnames: + dirnames.remove('pip-egg-info') + for dirname in dirnames: + dirname = os.path.join(dirpath, dirname) + name = self._clean_zip_name(dirname, dir) + zipdir = zipfile.ZipInfo(self.name + '/' + name + '/') + zipdir.external_attr = 0x1ED << 16 # 0o755 + zip.writestr(zipdir, '') + for filename in filenames: + if filename == PIP_DELETE_MARKER_FILENAME: + continue + filename = os.path.join(dirpath, filename) + name = self._clean_zip_name(filename, dir) + zip.write(filename, self.name + '/' + name) + zip.close() + logger.info('Saved %s', display_path(archive_path)) + + def install(self, install_options, global_options=None, root=None, + home=None, prefix=None, warn_script_location=True, + use_user_site=False, pycompile=True): + global_options = global_options if global_options is not None else [] + if self.editable: + self.install_editable( + install_options, global_options, prefix=prefix, + ) + return + if self.is_wheel: + version = wheel.wheel_version(self.source_dir) + wheel.check_compatibility(version, self.name) + + self.move_wheel_files( + self.source_dir, root=root, prefix=prefix, home=home, + warn_script_location=warn_script_location, + use_user_site=use_user_site, pycompile=pycompile, + ) + self.install_succeeded = True + return + + # Extend the list of global and install options passed on to + # the setup.py call with the ones from the requirements file. + # Options specified in requirements file override those + # specified on the command line, since the last option given + # to setup.py is the one that is used. + global_options = list(global_options) + \ + self.options.get('global_options', []) + install_options = list(install_options) + \ + self.options.get('install_options', []) + + if self.isolated: + global_options = global_options + ["--no-user-cfg"] + + with TempDirectory(kind="record") as temp_dir: + record_filename = os.path.join(temp_dir.path, 'install-record.txt') + install_args = self.get_install_args( + global_options, record_filename, root, prefix, pycompile, + ) + msg = 'Running setup.py install for %s' % (self.name,) + with open_spinner(msg) as spinner: + with indent_log(): + with self.build_env: + call_subprocess( + install_args + install_options, + cwd=self.setup_py_dir, + show_stdout=False, + spinner=spinner, + ) + + if not os.path.exists(record_filename): + logger.debug('Record file %s not found', record_filename) + return + self.install_succeeded = True + + def prepend_root(path): + if root is None or not os.path.isabs(path): + return path + else: + return change_root(root, path) + + with open(record_filename) as f: + for line in f: + directory = os.path.dirname(line) + if directory.endswith('.egg-info'): + egg_info_dir = prepend_root(directory) + break + else: + logger.warning( + 'Could not find .egg-info directory in install record' + ' for %s', + self, + ) + # FIXME: put the record somewhere + # FIXME: should this be an error? + return + new_lines = [] + with open(record_filename) as f: + for line in f: + filename = line.strip() + if os.path.isdir(filename): + filename += os.path.sep + new_lines.append( + os.path.relpath(prepend_root(filename), egg_info_dir) + ) + new_lines.sort() + ensure_dir(egg_info_dir) + inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') + with open(inst_files_path, 'w') as f: + f.write('\n'.join(new_lines) + '\n') + + def get_install_args(self, global_options, record_filename, root, prefix, + pycompile): + install_args = [sys.executable, "-u"] + install_args.append('-c') + install_args.append(SETUPTOOLS_SHIM % self.setup_py) + install_args += list(global_options) + \ + ['install', '--record', record_filename] + install_args += ['--single-version-externally-managed'] + + if root is not None: + install_args += ['--root', root] + if prefix is not None: + install_args += ['--prefix', prefix] + + if pycompile: + install_args += ["--compile"] + else: + install_args += ["--no-compile"] + + if running_under_virtualenv(): + py_ver_str = 'python' + sysconfig.get_python_version() + install_args += ['--install-headers', + os.path.join(sys.prefix, 'include', 'site', + py_ver_str, self.name)] + + return install_args + + +def parse_editable(editable_req): + """Parses an editable requirement into: + - a requirement name + - an URL + - extras + - editable options + Accepted requirements: + svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir + .[some_extra] + """ + + from pip._internal.index import Link + + url = editable_req + + # If a file path is specified with extras, strip off the extras. + url_no_extras, extras = _strip_extras(url) + + if os.path.isdir(url_no_extras): + if not os.path.exists(os.path.join(url_no_extras, 'setup.py')): + raise InstallationError( + "Directory %r is not installable. File 'setup.py' not found." % + url_no_extras + ) + # Treating it as code that has already been checked out + url_no_extras = path_to_url(url_no_extras) + + if url_no_extras.lower().startswith('file:'): + package_name = Link(url_no_extras).egg_fragment + if extras: + return ( + package_name, + url_no_extras, + Requirement("placeholder" + extras.lower()).extras, + ) + else: + return package_name, url_no_extras, None + + for version_control in vcs: + if url.lower().startswith('%s:' % version_control): + url = '%s+%s' % (version_control, url) + break + + if '+' not in url: + raise InstallationError( + '%s should either be a path to a local project or a VCS url ' + 'beginning with svn+, git+, hg+, or bzr+' % + editable_req + ) + + vc_type = url.split('+', 1)[0].lower() + + if not vcs.get_backend(vc_type): + error_message = 'For --editable=%s only ' % editable_req + \ + ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \ + ' is currently supported' + raise InstallationError(error_message) + + package_name = Link(url).egg_fragment + if not package_name: + raise InstallationError( + "Could not detect requirement name for '%s', please specify one " + "with #egg=your_package_name" % editable_req + ) + return package_name, url, None + + +def deduce_helpful_msg(req): + """Returns helpful msg in case requirements file does not exist, + or cannot be parsed. + + :params req: Requirements file path + """ + msg = "" + if os.path.exists(req): + msg = " It does exist." + # Try to parse and check if it is a requirements file. + try: + with open(req, 'r') as fp: + # parse first line only + next(parse_requirements(fp.read())) + msg += " The argument you provided " + \ + "(%s) appears to be a" % (req) + \ + " requirements file. If that is the" + \ + " case, use the '-r' flag to install" + \ + " the packages specified within it." + except RequirementParseError: + logger.debug("Cannot parse '%s' as requirements \ + file" % (req), exc_info=1) + else: + msg += " File '%s' does not exist." % (req) + return msg + + +def _is_list_of_str(obj): + return ( + isinstance(obj, list) and + all(isinstance(item, six.string_types) for item in obj) + ) diff --git a/env/lib/python3.6/site-packages/pip/_internal/req/req_set.py b/env/lib/python3.6/site-packages/pip/_internal/req/req_set.py new file mode 100644 index 0000000..2bc6b74 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/req/req_set.py @@ -0,0 +1,161 @@ +from __future__ import absolute_import + +import logging +from collections import OrderedDict + +from pip._internal.exceptions import InstallationError +from pip._internal.utils.logging import indent_log +from pip._internal.wheel import Wheel + +logger = logging.getLogger(__name__) + + +class RequirementSet(object): + + def __init__(self, require_hashes=False): + """Create a RequirementSet. + """ + + self.requirements = OrderedDict() + self.require_hashes = require_hashes + + # Mapping of alias: real_name + self.requirement_aliases = {} + self.unnamed_requirements = [] + self.successfully_downloaded = [] + self.reqs_to_cleanup = [] + + def __str__(self): + reqs = [req for req in self.requirements.values() + if not req.comes_from] + reqs.sort(key=lambda req: req.name.lower()) + return ' '.join([str(req.req) for req in reqs]) + + def __repr__(self): + reqs = [req for req in self.requirements.values()] + reqs.sort(key=lambda req: req.name.lower()) + reqs_str = ', '.join([str(req.req) for req in reqs]) + return ('<%s object; %d requirement(s): %s>' + % (self.__class__.__name__, len(reqs), reqs_str)) + + def add_requirement(self, install_req, parent_req_name=None, + extras_requested=None): + """Add install_req as a requirement to install. + + :param parent_req_name: The name of the requirement that needed this + added. The name is used because when multiple unnamed requirements + resolve to the same name, we could otherwise end up with dependency + links that point outside the Requirements set. parent_req must + already be added. Note that None implies that this is a user + supplied requirement, vs an inferred one. + :param extras_requested: an iterable of extras used to evaluate the + environment markers. + :return: Additional requirements to scan. That is either [] if + the requirement is not applicable, or [install_req] if the + requirement is applicable and has just been added. + """ + name = install_req.name + if not install_req.match_markers(extras_requested): + logger.info("Ignoring %s: markers '%s' don't match your " + "environment", install_req.name, + install_req.markers) + return [], None + + # This check has to come after we filter requirements with the + # environment markers. + if install_req.link and install_req.link.is_wheel: + wheel = Wheel(install_req.link.filename) + if not wheel.supported(): + raise InstallationError( + "%s is not a supported wheel on this platform." % + wheel.filename + ) + + # This next bit is really a sanity check. + assert install_req.is_direct == (parent_req_name is None), ( + "a direct req shouldn't have a parent and also, " + "a non direct req should have a parent" + ) + + if not name: + # url or path requirement w/o an egg fragment + self.unnamed_requirements.append(install_req) + return [install_req], None + else: + try: + existing_req = self.get_requirement(name) + except KeyError: + existing_req = None + if (parent_req_name is None and existing_req and not + existing_req.constraint and + existing_req.extras == install_req.extras and not + existing_req.req.specifier == install_req.req.specifier): + raise InstallationError( + 'Double requirement given: %s (already in %s, name=%r)' + % (install_req, existing_req, name)) + if not existing_req: + # Add requirement + self.requirements[name] = install_req + # FIXME: what about other normalizations? E.g., _ vs. -? + if name.lower() != name: + self.requirement_aliases[name.lower()] = name + result = [install_req] + else: + # Assume there's no need to scan, and that we've already + # encountered this for scanning. + result = [] + if not install_req.constraint and existing_req.constraint: + if (install_req.link and not (existing_req.link and + install_req.link.path == existing_req.link.path)): + self.reqs_to_cleanup.append(install_req) + raise InstallationError( + "Could not satisfy constraints for '%s': " + "installation from path or url cannot be " + "constrained to a version" % name, + ) + # If we're now installing a constraint, mark the existing + # object for real installation. + existing_req.constraint = False + existing_req.extras = tuple( + sorted(set(existing_req.extras).union( + set(install_req.extras)))) + logger.debug("Setting %s extras to: %s", + existing_req, existing_req.extras) + # And now we need to scan this. + result = [existing_req] + # Canonicalise to the already-added object for the backref + # check below. + install_req = existing_req + + # We return install_req here to allow for the caller to add it to + # the dependency information for the parent package. + return result, install_req + + def has_requirement(self, project_name): + name = project_name.lower() + if (name in self.requirements and + not self.requirements[name].constraint or + name in self.requirement_aliases and + not self.requirements[self.requirement_aliases[name]].constraint): + return True + return False + + @property + def has_requirements(self): + return list(req for req in self.requirements.values() if not + req.constraint) or self.unnamed_requirements + + def get_requirement(self, project_name): + for name in project_name, project_name.lower(): + if name in self.requirements: + return self.requirements[name] + if name in self.requirement_aliases: + return self.requirements[self.requirement_aliases[name]] + raise KeyError("No project with the name %r" % project_name) + + def cleanup_files(self): + """Clean up files, remove builds.""" + logger.debug('Cleaning up...') + with indent_log(): + for req in self.reqs_to_cleanup: + req.remove_temporary_source() diff --git a/env/lib/python3.6/site-packages/pip/_internal/req/req_tracker.py b/env/lib/python3.6/site-packages/pip/_internal/req/req_tracker.py new file mode 100644 index 0000000..0a86f4c --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/req/req_tracker.py @@ -0,0 +1,76 @@ +from __future__ import absolute_import + +import contextlib +import errno +import hashlib +import logging +import os + +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +class RequirementTracker(object): + + def __init__(self): + self._root = os.environ.get('PIP_REQ_TRACKER') + if self._root is None: + self._temp_dir = TempDirectory(delete=False, kind='req-tracker') + self._temp_dir.create() + self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path + logger.debug('Created requirements tracker %r', self._root) + else: + self._temp_dir = None + logger.debug('Re-using requirements tracker %r', self._root) + self._entries = set() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.cleanup() + + def _entry_path(self, link): + hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest() + return os.path.join(self._root, hashed) + + def add(self, req): + link = req.link + info = str(req) + entry_path = self._entry_path(link) + try: + with open(entry_path) as fp: + # Error, these's already a build in progress. + raise LookupError('%s is already being built: %s' + % (link, fp.read())) + except IOError as e: + if e.errno != errno.ENOENT: + raise + assert req not in self._entries + with open(entry_path, 'w') as fp: + fp.write(info) + self._entries.add(req) + logger.debug('Added %s to build tracker %r', req, self._root) + + def remove(self, req): + link = req.link + self._entries.remove(req) + os.unlink(self._entry_path(link)) + logger.debug('Removed %s from build tracker %r', req, self._root) + + def cleanup(self): + for req in set(self._entries): + self.remove(req) + remove = self._temp_dir is not None + if remove: + self._temp_dir.cleanup() + logger.debug('%s build tracker %r', + 'Removed' if remove else 'Cleaned', + self._root) + + @contextlib.contextmanager + def track(self, req): + self.add(req) + yield + self.remove(req) diff --git a/env/lib/python3.6/site-packages/pip/_internal/req/req_uninstall.py b/env/lib/python3.6/site-packages/pip/_internal/req/req_uninstall.py new file mode 100644 index 0000000..f7ec3a8 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/req/req_uninstall.py @@ -0,0 +1,457 @@ +from __future__ import absolute_import + +import csv +import functools +import logging +import os +import sys +import sysconfig + +from pip._vendor import pkg_resources + +from pip._internal.compat import WINDOWS, cache_from_source, uses_pycache +from pip._internal.exceptions import UninstallationError +from pip._internal.locations import bin_py, bin_user +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ( + FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local, + normalize_path, renames, +) +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +def _script_names(dist, script_name, is_gui): + """Create the fully qualified name of the files created by + {console,gui}_scripts for the given ``dist``. + Returns the list of file names + """ + if dist_in_usersite(dist): + bin_dir = bin_user + else: + bin_dir = bin_py + exe_name = os.path.join(bin_dir, script_name) + paths_to_remove = [exe_name] + if WINDOWS: + paths_to_remove.append(exe_name + '.exe') + paths_to_remove.append(exe_name + '.exe.manifest') + if is_gui: + paths_to_remove.append(exe_name + '-script.pyw') + else: + paths_to_remove.append(exe_name + '-script.py') + return paths_to_remove + + +def _unique(fn): + @functools.wraps(fn) + def unique(*args, **kw): + seen = set() + for item in fn(*args, **kw): + if item not in seen: + seen.add(item) + yield item + return unique + + +@_unique +def uninstallation_paths(dist): + """ + Yield all the uninstallation paths for dist based on RECORD-without-.py[co] + + Yield paths to all the files in RECORD. For each .py file in RECORD, add + the .pyc and .pyo in the same directory. + + UninstallPathSet.add() takes care of the __pycache__ .py[co]. + """ + r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD'))) + for row in r: + path = os.path.join(dist.location, row[0]) + yield path + if path.endswith('.py'): + dn, fn = os.path.split(path) + base = fn[:-3] + path = os.path.join(dn, base + '.pyc') + yield path + path = os.path.join(dn, base + '.pyo') + yield path + + +def compact(paths): + """Compact a path set to contain the minimal number of paths + necessary to contain all paths in the set. If /a/path/ and + /a/path/to/a/file.txt are both in the set, leave only the + shorter path.""" + + sep = os.path.sep + short_paths = set() + for path in sorted(paths, key=len): + should_add = any( + path.startswith(shortpath.rstrip("*")) and + path[len(shortpath.rstrip("*").rstrip(sep))] == sep + for shortpath in short_paths + ) + if not should_add: + short_paths.add(path) + return short_paths + + +def compress_for_output_listing(paths): + """Returns a tuple of 2 sets of which paths to display to user + + The first set contains paths that would be deleted. Files of a package + are not added and the top-level directory of the package has a '*' added + at the end - to signify that all it's contents are removed. + + The second set contains files that would have been skipped in the above + folders. + """ + + will_remove = list(paths) + will_skip = set() + + # Determine folders and files + folders = set() + files = set() + for path in will_remove: + if path.endswith(".pyc"): + continue + if path.endswith("__init__.py") or ".dist-info" in path: + folders.add(os.path.dirname(path)) + files.add(path) + + folders = compact(folders) + + # This walks the tree using os.walk to not miss extra folders + # that might get added. + for folder in folders: + for dirpath, _, dirfiles in os.walk(folder): + for fname in dirfiles: + if fname.endswith(".pyc"): + continue + + file_ = os.path.normcase(os.path.join(dirpath, fname)) + if os.path.isfile(file_) and file_ not in files: + # We are skipping this file. Add it to the set. + will_skip.add(file_) + + will_remove = files | { + os.path.join(folder, "*") for folder in folders + } + + return will_remove, will_skip + + +class UninstallPathSet(object): + """A set of file paths to be removed in the uninstallation of a + requirement.""" + def __init__(self, dist): + self.paths = set() + self._refuse = set() + self.pth = {} + self.dist = dist + self.save_dir = TempDirectory(kind="uninstall") + self._moved_paths = [] + + def _permitted(self, path): + """ + Return True if the given path is one we are permitted to + remove/modify, False otherwise. + + """ + return is_local(path) + + def add(self, path): + head, tail = os.path.split(path) + + # we normalize the head to resolve parent directory symlinks, but not + # the tail, since we only want to uninstall symlinks, not their targets + path = os.path.join(normalize_path(head), os.path.normcase(tail)) + + if not os.path.exists(path): + return + if self._permitted(path): + self.paths.add(path) + else: + self._refuse.add(path) + + # __pycache__ files can show up after 'installed-files.txt' is created, + # due to imports + if os.path.splitext(path)[1] == '.py' and uses_pycache: + self.add(cache_from_source(path)) + + def add_pth(self, pth_file, entry): + pth_file = normalize_path(pth_file) + if self._permitted(pth_file): + if pth_file not in self.pth: + self.pth[pth_file] = UninstallPthEntries(pth_file) + self.pth[pth_file].add(entry) + else: + self._refuse.add(pth_file) + + def _stash(self, path): + return os.path.join( + self.save_dir.path, os.path.splitdrive(path)[1].lstrip(os.path.sep) + ) + + def remove(self, auto_confirm=False, verbose=False): + """Remove paths in ``self.paths`` with confirmation (unless + ``auto_confirm`` is True).""" + + if not self.paths: + logger.info( + "Can't uninstall '%s'. No files were found to uninstall.", + self.dist.project_name, + ) + return + + dist_name_version = ( + self.dist.project_name + "-" + self.dist.version + ) + logger.info('Uninstalling %s:', dist_name_version) + + with indent_log(): + if auto_confirm or self._allowed_to_proceed(verbose): + self.save_dir.create() + + for path in sorted(compact(self.paths)): + new_path = self._stash(path) + logger.debug('Removing file or directory %s', path) + self._moved_paths.append(path) + renames(path, new_path) + for pth in self.pth.values(): + pth.remove() + + logger.info('Successfully uninstalled %s', dist_name_version) + + def _allowed_to_proceed(self, verbose): + """Display which files would be deleted and prompt for confirmation + """ + + def _display(msg, paths): + if not paths: + return + + logger.info(msg) + with indent_log(): + for path in sorted(compact(paths)): + logger.info(path) + + if not verbose: + will_remove, will_skip = compress_for_output_listing(self.paths) + else: + # In verbose mode, display all the files that are going to be + # deleted. + will_remove = list(self.paths) + will_skip = set() + + _display('Would remove:', will_remove) + _display('Would not remove (might be manually added):', will_skip) + _display('Would not remove (outside of prefix):', self._refuse) + + return ask('Proceed (y/n)? ', ('y', 'n')) == 'y' + + def rollback(self): + """Rollback the changes previously made by remove().""" + if self.save_dir.path is None: + logger.error( + "Can't roll back %s; was not uninstalled", + self.dist.project_name, + ) + return False + logger.info('Rolling back uninstall of %s', self.dist.project_name) + for path in self._moved_paths: + tmp_path = self._stash(path) + logger.debug('Replacing %s', path) + renames(tmp_path, path) + for pth in self.pth.values(): + pth.rollback() + + def commit(self): + """Remove temporary save dir: rollback will no longer be possible.""" + self.save_dir.cleanup() + self._moved_paths = [] + + @classmethod + def from_dist(cls, dist): + dist_path = normalize_path(dist.location) + if not dist_is_local(dist): + logger.info( + "Not uninstalling %s at %s, outside environment %s", + dist.key, + dist_path, + sys.prefix, + ) + return cls(dist) + + if dist_path in {p for p in {sysconfig.get_path("stdlib"), + sysconfig.get_path("platstdlib")} + if p}: + logger.info( + "Not uninstalling %s at %s, as it is in the standard library.", + dist.key, + dist_path, + ) + return cls(dist) + + paths_to_remove = cls(dist) + develop_egg_link = egg_link_path(dist) + develop_egg_link_egg_info = '{}.egg-info'.format( + pkg_resources.to_filename(dist.project_name)) + egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info) + # Special case for distutils installed package + distutils_egg_info = getattr(dist._provider, 'path', None) + + # Uninstall cases order do matter as in the case of 2 installs of the + # same package, pip needs to uninstall the currently detected version + if (egg_info_exists and dist.egg_info.endswith('.egg-info') and + not dist.egg_info.endswith(develop_egg_link_egg_info)): + # if dist.egg_info.endswith(develop_egg_link_egg_info), we + # are in fact in the develop_egg_link case + paths_to_remove.add(dist.egg_info) + if dist.has_metadata('installed-files.txt'): + for installed_file in dist.get_metadata( + 'installed-files.txt').splitlines(): + path = os.path.normpath( + os.path.join(dist.egg_info, installed_file) + ) + paths_to_remove.add(path) + # FIXME: need a test for this elif block + # occurs with --single-version-externally-managed/--record outside + # of pip + elif dist.has_metadata('top_level.txt'): + if dist.has_metadata('namespace_packages.txt'): + namespaces = dist.get_metadata('namespace_packages.txt') + else: + namespaces = [] + for top_level_pkg in [ + p for p + in dist.get_metadata('top_level.txt').splitlines() + if p and p not in namespaces]: + path = os.path.join(dist.location, top_level_pkg) + paths_to_remove.add(path) + paths_to_remove.add(path + '.py') + paths_to_remove.add(path + '.pyc') + paths_to_remove.add(path + '.pyo') + + elif distutils_egg_info: + raise UninstallationError( + "Cannot uninstall {!r}. It is a distutils installed project " + "and thus we cannot accurately determine which files belong " + "to it which would lead to only a partial uninstall.".format( + dist.project_name, + ) + ) + + elif dist.location.endswith('.egg'): + # package installed by easy_install + # We cannot match on dist.egg_name because it can slightly vary + # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg + paths_to_remove.add(dist.location) + easy_install_egg = os.path.split(dist.location)[1] + easy_install_pth = os.path.join(os.path.dirname(dist.location), + 'easy-install.pth') + paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg) + + elif egg_info_exists and dist.egg_info.endswith('.dist-info'): + for path in uninstallation_paths(dist): + paths_to_remove.add(path) + + elif develop_egg_link: + # develop egg + with open(develop_egg_link, 'r') as fh: + link_pointer = os.path.normcase(fh.readline().strip()) + assert (link_pointer == dist.location), ( + 'Egg-link %s does not match installed location of %s ' + '(at %s)' % (link_pointer, dist.project_name, dist.location) + ) + paths_to_remove.add(develop_egg_link) + easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), + 'easy-install.pth') + paths_to_remove.add_pth(easy_install_pth, dist.location) + + else: + logger.debug( + 'Not sure how to uninstall: %s - Check: %s', + dist, dist.location, + ) + + # find distutils scripts= scripts + if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'): + for script in dist.metadata_listdir('scripts'): + if dist_in_usersite(dist): + bin_dir = bin_user + else: + bin_dir = bin_py + paths_to_remove.add(os.path.join(bin_dir, script)) + if WINDOWS: + paths_to_remove.add(os.path.join(bin_dir, script) + '.bat') + + # find console_scripts + _scripts_to_remove = [] + console_scripts = dist.get_entry_map(group='console_scripts') + for name in console_scripts.keys(): + _scripts_to_remove.extend(_script_names(dist, name, False)) + # find gui_scripts + gui_scripts = dist.get_entry_map(group='gui_scripts') + for name in gui_scripts.keys(): + _scripts_to_remove.extend(_script_names(dist, name, True)) + + for s in _scripts_to_remove: + paths_to_remove.add(s) + + return paths_to_remove + + +class UninstallPthEntries(object): + def __init__(self, pth_file): + if not os.path.isfile(pth_file): + raise UninstallationError( + "Cannot remove entries from nonexistent file %s" % pth_file + ) + self.file = pth_file + self.entries = set() + self._saved_lines = None + + def add(self, entry): + entry = os.path.normcase(entry) + # On Windows, os.path.normcase converts the entry to use + # backslashes. This is correct for entries that describe absolute + # paths outside of site-packages, but all the others use forward + # slashes. + if WINDOWS and not os.path.splitdrive(entry)[0]: + entry = entry.replace('\\', '/') + self.entries.add(entry) + + def remove(self): + logger.debug('Removing pth entries from %s:', self.file) + with open(self.file, 'rb') as fh: + # windows uses '\r\n' with py3k, but uses '\n' with py2.x + lines = fh.readlines() + self._saved_lines = lines + if any(b'\r\n' in line for line in lines): + endline = '\r\n' + else: + endline = '\n' + # handle missing trailing newline + if lines and not lines[-1].endswith(endline.encode("utf-8")): + lines[-1] = lines[-1] + endline.encode("utf-8") + for entry in self.entries: + try: + logger.debug('Removing entry: %s', entry) + lines.remove((entry + endline).encode("utf-8")) + except ValueError: + pass + with open(self.file, 'wb') as fh: + fh.writelines(lines) + + def rollback(self): + if self._saved_lines is None: + logger.error( + 'Cannot roll back changes to %s, none were made', self.file + ) + return False + logger.debug('Rolling %s back to previous state', self.file) + with open(self.file, 'wb') as fh: + fh.writelines(self._saved_lines) + return True diff --git a/env/lib/python3.6/site-packages/pip/_internal/resolve.py b/env/lib/python3.6/site-packages/pip/_internal/resolve.py new file mode 100644 index 0000000..8480e48 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/resolve.py @@ -0,0 +1,353 @@ +"""Dependency Resolution + +The dependency resolution in pip is performed as follows: + +for top-level requirements: + a. only one spec allowed per project, regardless of conflicts or not. + otherwise a "double requirement" exception is raised + b. they override sub-dependency requirements. +for sub-dependencies + a. "first found, wins" (where the order is breadth first) +""" + +import logging +from collections import defaultdict +from itertools import chain + +from pip._internal.exceptions import ( + BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors, + UnsupportedPythonVersion, +) +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import dist_in_usersite, ensure_dir +from pip._internal.utils.packaging import check_dist_requires_python + +logger = logging.getLogger(__name__) + + +class Resolver(object): + """Resolves which packages need to be installed/uninstalled to perform \ + the requested operation without breaking the requirements of any package. + """ + + _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} + + def __init__(self, preparer, session, finder, wheel_cache, use_user_site, + ignore_dependencies, ignore_installed, ignore_requires_python, + force_reinstall, isolated, upgrade_strategy): + super(Resolver, self).__init__() + assert upgrade_strategy in self._allowed_strategies + + self.preparer = preparer + self.finder = finder + self.session = session + + # NOTE: This would eventually be replaced with a cache that can give + # information about both sdist and wheels transparently. + self.wheel_cache = wheel_cache + + self.require_hashes = None # This is set in resolve + + self.upgrade_strategy = upgrade_strategy + self.force_reinstall = force_reinstall + self.isolated = isolated + self.ignore_dependencies = ignore_dependencies + self.ignore_installed = ignore_installed + self.ignore_requires_python = ignore_requires_python + self.use_user_site = use_user_site + + self._discovered_dependencies = defaultdict(list) + + def resolve(self, requirement_set): + """Resolve what operations need to be done + + As a side-effect of this method, the packages (and their dependencies) + are downloaded, unpacked and prepared for installation. This + preparation is done by ``pip.operations.prepare``. + + Once PyPI has static dependency metadata available, it would be + possible to move the preparation to become a step separated from + dependency resolution. + """ + # make the wheelhouse + if self.preparer.wheel_download_dir: + ensure_dir(self.preparer.wheel_download_dir) + + # If any top-level requirement has a hash specified, enter + # hash-checking mode, which requires hashes from all. + root_reqs = ( + requirement_set.unnamed_requirements + + list(requirement_set.requirements.values()) + ) + self.require_hashes = ( + requirement_set.require_hashes or + any(req.has_hash_options for req in root_reqs) + ) + + # Display where finder is looking for packages + locations = self.finder.get_formatted_locations() + if locations: + logger.info(locations) + + # Actually prepare the files, and collect any exceptions. Most hash + # exceptions cannot be checked ahead of time, because + # req.populate_link() needs to be called before we can make decisions + # based on link type. + discovered_reqs = [] + hash_errors = HashErrors() + for req in chain(root_reqs, discovered_reqs): + try: + discovered_reqs.extend( + self._resolve_one(requirement_set, req) + ) + except HashError as exc: + exc.req = req + hash_errors.append(exc) + + if hash_errors: + raise hash_errors + + def _is_upgrade_allowed(self, req): + if self.upgrade_strategy == "to-satisfy-only": + return False + elif self.upgrade_strategy == "eager": + return True + else: + assert self.upgrade_strategy == "only-if-needed" + return req.is_direct + + def _set_req_to_reinstall(self, req): + """ + Set a requirement to be installed. + """ + # Don't uninstall the conflict if doing a user install and the + # conflict is not a user install. + if not self.use_user_site or dist_in_usersite(req.satisfied_by): + req.conflicts_with = req.satisfied_by + req.satisfied_by = None + + # XXX: Stop passing requirement_set for options + def _check_skip_installed(self, req_to_install): + """Check if req_to_install should be skipped. + + This will check if the req is installed, and whether we should upgrade + or reinstall it, taking into account all the relevant user options. + + After calling this req_to_install will only have satisfied_by set to + None if the req_to_install is to be upgraded/reinstalled etc. Any + other value will be a dist recording the current thing installed that + satisfies the requirement. + + Note that for vcs urls and the like we can't assess skipping in this + routine - we simply identify that we need to pull the thing down, + then later on it is pulled down and introspected to assess upgrade/ + reinstalls etc. + + :return: A text reason for why it was skipped, or None. + """ + if self.ignore_installed: + return None + + req_to_install.check_if_exists(self.use_user_site) + if not req_to_install.satisfied_by: + return None + + if self.force_reinstall: + self._set_req_to_reinstall(req_to_install) + return None + + if not self._is_upgrade_allowed(req_to_install): + if self.upgrade_strategy == "only-if-needed": + return 'already satisfied, skipping upgrade' + return 'already satisfied' + + # Check for the possibility of an upgrade. For link-based + # requirements we have to pull the tree down and inspect to assess + # the version #, so it's handled way down. + if not req_to_install.link: + try: + self.finder.find_requirement(req_to_install, upgrade=True) + except BestVersionAlreadyInstalled: + # Then the best version is installed. + return 'already up-to-date' + except DistributionNotFound: + # No distribution found, so we squash the error. It will + # be raised later when we re-try later to do the install. + # Why don't we just raise here? + pass + + self._set_req_to_reinstall(req_to_install) + return None + + def _get_abstract_dist_for(self, req): + """Takes a InstallRequirement and returns a single AbstractDist \ + representing a prepared variant of the same. + """ + assert self.require_hashes is not None, ( + "require_hashes should have been set in Resolver.resolve()" + ) + + if req.editable: + return self.preparer.prepare_editable_requirement( + req, self.require_hashes, self.use_user_site, self.finder, + ) + + # satisfied_by is only evaluated by calling _check_skip_installed, + # so it must be None here. + assert req.satisfied_by is None + skip_reason = self._check_skip_installed(req) + + if req.satisfied_by: + return self.preparer.prepare_installed_requirement( + req, self.require_hashes, skip_reason + ) + + upgrade_allowed = self._is_upgrade_allowed(req) + abstract_dist = self.preparer.prepare_linked_requirement( + req, self.session, self.finder, upgrade_allowed, + self.require_hashes + ) + + # NOTE + # The following portion is for determining if a certain package is + # going to be re-installed/upgraded or not and reporting to the user. + # This should probably get cleaned up in a future refactor. + + # req.req is only avail after unpack for URL + # pkgs repeat check_if_exists to uninstall-on-upgrade + # (#14) + if not self.ignore_installed: + req.check_if_exists(self.use_user_site) + + if req.satisfied_by: + should_modify = ( + self.upgrade_strategy != "to-satisfy-only" or + self.force_reinstall or + self.ignore_installed or + req.link.scheme == 'file' + ) + if should_modify: + self._set_req_to_reinstall(req) + else: + logger.info( + 'Requirement already satisfied (use --upgrade to upgrade):' + ' %s', req, + ) + + return abstract_dist + + def _resolve_one(self, requirement_set, req_to_install): + """Prepare a single requirements file. + + :return: A list of additional InstallRequirements to also install. + """ + # Tell user what we are doing for this requirement: + # obtain (editable), skipping, processing (local url), collecting + # (remote url or package name) + if req_to_install.constraint or req_to_install.prepared: + return [] + + req_to_install.prepared = True + + # register tmp src for cleanup in case something goes wrong + requirement_set.reqs_to_cleanup.append(req_to_install) + + abstract_dist = self._get_abstract_dist_for(req_to_install) + + # Parse and return dependencies + dist = abstract_dist.dist(self.finder) + try: + check_dist_requires_python(dist) + except UnsupportedPythonVersion as err: + if self.ignore_requires_python: + logger.warning(err.args[0]) + else: + raise + + more_reqs = [] + + def add_req(subreq, extras_requested): + sub_install_req = InstallRequirement.from_req( + str(subreq), + req_to_install, + isolated=self.isolated, + wheel_cache=self.wheel_cache, + ) + parent_req_name = req_to_install.name + to_scan_again, add_to_parent = requirement_set.add_requirement( + sub_install_req, + parent_req_name=parent_req_name, + extras_requested=extras_requested, + ) + if parent_req_name and add_to_parent: + self._discovered_dependencies[parent_req_name].append( + add_to_parent + ) + more_reqs.extend(to_scan_again) + + with indent_log(): + # We add req_to_install before its dependencies, so that we + # can refer to it when adding dependencies. + if not requirement_set.has_requirement(req_to_install.name): + # 'unnamed' requirements will get added here + req_to_install.is_direct = True + requirement_set.add_requirement( + req_to_install, parent_req_name=None, + ) + + if not self.ignore_dependencies: + if req_to_install.extras: + logger.debug( + "Installing extra requirements: %r", + ','.join(req_to_install.extras), + ) + missing_requested = sorted( + set(req_to_install.extras) - set(dist.extras) + ) + for missing in missing_requested: + logger.warning( + '%s does not provide the extra \'%s\'', + dist, missing + ) + + available_requested = sorted( + set(dist.extras) & set(req_to_install.extras) + ) + for subreq in dist.requires(available_requested): + add_req(subreq, extras_requested=available_requested) + + if not req_to_install.editable and not req_to_install.satisfied_by: + # XXX: --no-install leads this to report 'Successfully + # downloaded' for only non-editable reqs, even though we took + # action on them. + requirement_set.successfully_downloaded.append(req_to_install) + + return more_reqs + + def get_installation_order(self, req_set): + """Create the installation order. + + The installation order is topological - requirements are installed + before the requiring thing. We break cycles at an arbitrary point, + and make no other guarantees. + """ + # The current implementation, which we may change at any point + # installs the user specified things in the order given, except when + # dependencies must come earlier to achieve topological order. + order = [] + ordered_reqs = set() + + def schedule(req): + if req.satisfied_by or req in ordered_reqs: + return + if req.constraint: + return + ordered_reqs.add(req) + for dep in self._discovered_dependencies[req.name]: + schedule(dep) + order.append(req) + + for install_req in req_set.requirements.values(): + schedule(install_req) + return order diff --git a/env/lib/python3.6/site-packages/pip/_internal/status_codes.py b/env/lib/python3.6/site-packages/pip/_internal/status_codes.py new file mode 100644 index 0000000..275360a --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/status_codes.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import + +SUCCESS = 0 +ERROR = 1 +UNKNOWN_ERROR = 2 +VIRTUALENV_NOT_FOUND = 3 +PREVIOUS_BUILD_DIR_ERROR = 4 +NO_MATCHES_FOUND = 23 diff --git a/env/lib/python3.6/site-packages/pip/_internal/utils/appdirs.py b/env/lib/python3.6/site-packages/pip/_internal/utils/appdirs.py new file mode 100644 index 0000000..28c5d4b --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/utils/appdirs.py @@ -0,0 +1,258 @@ +""" +This code was taken from https://github.com/ActiveState/appdirs and modified +to suit our purposes. +""" +from __future__ import absolute_import + +import os +import sys + +from pip._vendor.six import PY2, text_type + +from pip._internal.compat import WINDOWS, expanduser + + +def user_cache_dir(appname): + r""" + Return full path to the user-specific cache dir for this application. + + "appname" is the name of application. + + Typical user cache directories are: + macOS: ~/Library/Caches/ + Unix: ~/.cache/ (XDG default) + Windows: C:\Users\\AppData\Local\\Cache + + On Windows the only suggestion in the MSDN docs is that local settings go + in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the + non-roaming app data dir (the default returned by `user_data_dir`). Apps + typically put cache data somewhere *under* the given dir here. Some + examples: + ...\Mozilla\Firefox\Profiles\\Cache + ...\Acme\SuperApp\Cache\1.0 + + OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. + """ + if WINDOWS: + # Get the base path + path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + + # When using Python 2, return paths as bytes on Windows like we do on + # other operating systems. See helper function docs for more details. + if PY2 and isinstance(path, text_type): + path = _win_path_to_bytes(path) + + # Add our app name and Cache directory to it + path = os.path.join(path, appname, "Cache") + elif sys.platform == "darwin": + # Get the base path + path = expanduser("~/Library/Caches") + + # Add our app name to it + path = os.path.join(path, appname) + else: + # Get the base path + path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache")) + + # Add our app name to it + path = os.path.join(path, appname) + + return path + + +def user_data_dir(appname, roaming=False): + r""" + Return full path to the user-specific data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user data directories are: + macOS: ~/Library/Application Support/ + if it exists, else ~/.config/ + Unix: ~/.local/share/ # or in + $XDG_DATA_HOME, if defined + Win XP (not roaming): C:\Documents and Settings\\ ... + ...Application Data\ + Win XP (roaming): C:\Documents and Settings\\Local ... + ...Settings\Application Data\ + Win 7 (not roaming): C:\\Users\\AppData\Local\ + Win 7 (roaming): C:\\Users\\AppData\Roaming\ + + For Unix, we follow the XDG spec and support $XDG_DATA_HOME. + That means, by default "~/.local/share/". + """ + if WINDOWS: + const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" + path = os.path.join(os.path.normpath(_get_win_folder(const)), appname) + elif sys.platform == "darwin": + path = os.path.join( + expanduser('~/Library/Application Support/'), + appname, + ) if os.path.isdir(os.path.join( + expanduser('~/Library/Application Support/'), + appname, + ) + ) else os.path.join( + expanduser('~/.config/'), + appname, + ) + else: + path = os.path.join( + os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")), + appname, + ) + + return path + + +def user_config_dir(appname, roaming=True): + """Return full path to the user-specific config dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "roaming" (boolean, default True) can be set False to not use the + Windows roaming appdata directory. That means that for users on a + Windows network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user data directories are: + macOS: same as user_data_dir + Unix: ~/.config/ + Win *: same as user_data_dir + + For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. + That means, by default "~/.config/". + """ + if WINDOWS: + path = user_data_dir(appname, roaming=roaming) + elif sys.platform == "darwin": + path = user_data_dir(appname) + else: + path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config")) + path = os.path.join(path, appname) + + return path + + +# for the discussion regarding site_config_dirs locations +# see +def site_config_dirs(appname): + r"""Return a list of potential user-shared config dirs for this application. + + "appname" is the name of application. + + Typical user config directories are: + macOS: /Library/Application Support// + Unix: /etc or $XDG_CONFIG_DIRS[i]// for each value in + $XDG_CONFIG_DIRS + Win XP: C:\Documents and Settings\All Users\Application ... + ...Data\\ + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory + on Vista.) + Win 7: Hidden, but writeable on Win 7: + C:\ProgramData\\ + """ + if WINDOWS: + path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) + pathlist = [os.path.join(path, appname)] + elif sys.platform == 'darwin': + pathlist = [os.path.join('/Library/Application Support', appname)] + else: + # try looking in $XDG_CONFIG_DIRS + xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') + if xdg_config_dirs: + pathlist = [ + os.path.join(expanduser(x), appname) + for x in xdg_config_dirs.split(os.pathsep) + ] + else: + pathlist = [] + + # always look in /etc directly as well + pathlist.append('/etc') + + return pathlist + + +# -- Windows support functions -- + +def _get_win_folder_from_registry(csidl_name): + """ + This is a fallback technique at best. I'm not sure if using the + registry for this guarantees us the correct answer for all CSIDL_* + names. + """ + import _winreg + + shell_folder_name = { + "CSIDL_APPDATA": "AppData", + "CSIDL_COMMON_APPDATA": "Common AppData", + "CSIDL_LOCAL_APPDATA": "Local AppData", + }[csidl_name] + + key = _winreg.OpenKey( + _winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" + ) + directory, _type = _winreg.QueryValueEx(key, shell_folder_name) + return directory + + +def _get_win_folder_with_ctypes(csidl_name): + csidl_const = { + "CSIDL_APPDATA": 26, + "CSIDL_COMMON_APPDATA": 35, + "CSIDL_LOCAL_APPDATA": 28, + }[csidl_name] + + buf = ctypes.create_unicode_buffer(1024) + ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in buf: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf2 = ctypes.create_unicode_buffer(1024) + if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): + buf = buf2 + + return buf.value + + +if WINDOWS: + try: + import ctypes + _get_win_folder = _get_win_folder_with_ctypes + except ImportError: + _get_win_folder = _get_win_folder_from_registry + + +def _win_path_to_bytes(path): + """Encode Windows paths to bytes. Only used on Python 2. + + Motivation is to be consistent with other operating systems where paths + are also returned as bytes. This avoids problems mixing bytes and Unicode + elsewhere in the codebase. For more details and discussion see + . + + If encoding using ASCII and MBCS fails, return the original Unicode path. + """ + for encoding in ('ASCII', 'MBCS'): + try: + return path.encode(encoding) + except (UnicodeEncodeError, LookupError): + pass + return path diff --git a/env/lib/python3.6/site-packages/pip/_internal/utils/deprecation.py b/env/lib/python3.6/site-packages/pip/_internal/utils/deprecation.py new file mode 100644 index 0000000..bd744cf --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/utils/deprecation.py @@ -0,0 +1,89 @@ +""" +A module that implements tooling to enable easy warnings about deprecations. +""" +from __future__ import absolute_import + +import logging +import warnings + +from pip._vendor.packaging.version import parse + +from pip import __version__ as current_version +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Any, Optional # noqa: F401 + + +class PipDeprecationWarning(Warning): + pass + + +_original_showwarning = None # type: Any + + +# Warnings <-> Logging Integration +def _showwarning(message, category, filename, lineno, file=None, line=None): + if file is not None: + if _original_showwarning is not None: + _original_showwarning( + message, category, filename, lineno, file, line, + ) + elif issubclass(category, PipDeprecationWarning): + # We use a specially named logger which will handle all of the + # deprecation messages for pip. + logger = logging.getLogger("pip._internal.deprecations") + logger.warning(message) + else: + _original_showwarning( + message, category, filename, lineno, file, line, + ) + + +def install_warning_logger(): + # Enable our Deprecation Warnings + warnings.simplefilter("default", PipDeprecationWarning, append=True) + + global _original_showwarning + + if _original_showwarning is None: + _original_showwarning = warnings.showwarning + warnings.showwarning = _showwarning + + +def deprecated(reason, replacement, gone_in, issue=None): + # type: (str, Optional[str], Optional[str], Optional[int]) -> None + """Helper to deprecate existing functionality. + + reason: + Textual reason shown to the user about why this functionality has + been deprecated. + replacement: + Textual suggestion shown to the user about what alternative + functionality they can use. + gone_in: + The version of pip does this functionality should get removed in. + Raises errors if pip's current version is greater than or equal to + this. + issue: + Issue number on the tracker that would serve as a useful place for + users to find related discussion and provide feedback. + + Always pass replacement, gone_in and issue as keyword arguments for clarity + at the call site. + """ + + # Construct a nice message. + # This is purposely eagerly formatted as we want it to appear as if someone + # typed this entire message out. + message = "DEPRECATION: " + reason + if replacement is not None: + message += " A possible replacement is {}.".format(replacement) + if issue is not None: + url = "https://github.com/pypa/pip/issues/" + str(issue) + message += " You can find discussion regarding this at {}.".format(url) + + # Raise as an error if it has to be removed. + if gone_in is not None and parse(current_version) >= parse(gone_in): + raise PipDeprecationWarning(message) + warnings.warn(message, category=PipDeprecationWarning, stacklevel=2) diff --git a/env/lib/python3.6/site-packages/pip/_internal/utils/encoding.py b/env/lib/python3.6/site-packages/pip/_internal/utils/encoding.py new file mode 100644 index 0000000..56f6036 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/utils/encoding.py @@ -0,0 +1,33 @@ +import codecs +import locale +import re +import sys + +BOMS = [ + (codecs.BOM_UTF8, 'utf8'), + (codecs.BOM_UTF16, 'utf16'), + (codecs.BOM_UTF16_BE, 'utf16-be'), + (codecs.BOM_UTF16_LE, 'utf16-le'), + (codecs.BOM_UTF32, 'utf32'), + (codecs.BOM_UTF32_BE, 'utf32-be'), + (codecs.BOM_UTF32_LE, 'utf32-le'), +] + +ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)') + + +def auto_decode(data): + """Check a bytes string for a BOM to correctly detect the encoding + + Fallback to locale.getpreferredencoding(False) like open() on Python3""" + for bom, encoding in BOMS: + if data.startswith(bom): + return data[len(bom):].decode(encoding) + # Lets check the first two lines as in PEP263 + for line in data.split(b'\n')[:2]: + if line[0:1] == b'#' and ENCODING_RE.search(line): + encoding = ENCODING_RE.search(line).groups()[0].decode('ascii') + return data.decode(encoding) + return data.decode( + locale.getpreferredencoding(False) or sys.getdefaultencoding(), + ) diff --git a/env/lib/python3.6/site-packages/pip/_internal/utils/filesystem.py b/env/lib/python3.6/site-packages/pip/_internal/utils/filesystem.py new file mode 100644 index 0000000..ee45501 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/utils/filesystem.py @@ -0,0 +1,28 @@ +import os +import os.path + +from pip._internal.compat import get_path_uid + + +def check_path_owner(path): + # If we don't have a way to check the effective uid of this process, then + # we'll just assume that we own the directory. + if not hasattr(os, "geteuid"): + return True + + previous = None + while path != previous: + if os.path.lexists(path): + # Check if path is writable by current user. + if os.geteuid() == 0: + # Special handling for root user in order to handle properly + # cases where users use sudo without -H flag. + try: + path_uid = get_path_uid(path) + except OSError: + return False + return path_uid == 0 + else: + return os.access(path, os.W_OK) + else: + previous, path = path, os.path.dirname(path) diff --git a/env/lib/python3.6/site-packages/pip/_internal/utils/glibc.py b/env/lib/python3.6/site-packages/pip/_internal/utils/glibc.py new file mode 100644 index 0000000..ebcfc5b --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/utils/glibc.py @@ -0,0 +1,84 @@ +from __future__ import absolute_import + +import ctypes +import re +import warnings + + +def glibc_version_string(): + "Returns glibc version string, or None if not using glibc." + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + process_namespace = ctypes.CDLL(None) + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +# Separated out from have_compatible_glibc for easier unit testing +def check_glibc_version(version_str, required_major, minimum_minor): + # Parse string and check against requested version. + # + # We use a regexp instead of str.split because we want to discard any + # random junk that might come after the minor version -- this might happen + # in patched/forked versions of glibc (e.g. Linaro's version of glibc + # uses version strings like "2.20-2014.11"). See gh-3588. + m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) + if not m: + warnings.warn("Expected glibc version with 2 components major.minor," + " got: %s" % version_str, RuntimeWarning) + return False + return (int(m.group("major")) == required_major and + int(m.group("minor")) >= minimum_minor) + + +def have_compatible_glibc(required_major, minimum_minor): + version_str = glibc_version_string() + if version_str is None: + return False + return check_glibc_version(version_str, required_major, minimum_minor) + + +# platform.libc_ver regularly returns completely nonsensical glibc +# versions. E.g. on my computer, platform says: +# +# ~$ python2.7 -c 'import platform; print(platform.libc_ver())' +# ('glibc', '2.7') +# ~$ python3.5 -c 'import platform; print(platform.libc_ver())' +# ('glibc', '2.9') +# +# But the truth is: +# +# ~$ ldd --version +# ldd (Debian GLIBC 2.22-11) 2.22 +# +# This is unfortunate, because it means that the linehaul data on libc +# versions that was generated by pip 8.1.2 and earlier is useless and +# misleading. Solution: instead of using platform, use our code that actually +# works. +def libc_ver(): + """Try to determine the glibc version + + Returns a tuple of strings (lib, version) which default to empty strings + in case the lookup fails. + """ + glibc_version = glibc_version_string() + if glibc_version is None: + return ("", "") + else: + return ("glibc", glibc_version) diff --git a/env/lib/python3.6/site-packages/pip/_internal/utils/hashes.py b/env/lib/python3.6/site-packages/pip/_internal/utils/hashes.py new file mode 100644 index 0000000..8b909ba --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/utils/hashes.py @@ -0,0 +1,94 @@ +from __future__ import absolute_import + +import hashlib + +from pip._vendor.six import iteritems, iterkeys, itervalues + +from pip._internal.exceptions import ( + HashMismatch, HashMissing, InstallationError, +) +from pip._internal.utils.misc import read_chunks + +# The recommended hash algo of the moment. Change this whenever the state of +# the art changes; it won't hurt backward compatibility. +FAVORITE_HASH = 'sha256' + + +# Names of hashlib algorithms allowed by the --hash option and ``pip hash`` +# Currently, those are the ones at least as collision-resistant as sha256. +STRONG_HASHES = ['sha256', 'sha384', 'sha512'] + + +class Hashes(object): + """A wrapper that builds multiple hashes at once and checks them against + known-good values + + """ + def __init__(self, hashes=None): + """ + :param hashes: A dict of algorithm names pointing to lists of allowed + hex digests + """ + self._allowed = {} if hashes is None else hashes + + def check_against_chunks(self, chunks): + """Check good hashes against ones built from iterable of chunks of + data. + + Raise HashMismatch if none match. + + """ + gots = {} + for hash_name in iterkeys(self._allowed): + try: + gots[hash_name] = hashlib.new(hash_name) + except (ValueError, TypeError): + raise InstallationError('Unknown hash name: %s' % hash_name) + + for chunk in chunks: + for hash in itervalues(gots): + hash.update(chunk) + + for hash_name, got in iteritems(gots): + if got.hexdigest() in self._allowed[hash_name]: + return + self._raise(gots) + + def _raise(self, gots): + raise HashMismatch(self._allowed, gots) + + def check_against_file(self, file): + """Check good hashes against a file-like object + + Raise HashMismatch if none match. + + """ + return self.check_against_chunks(read_chunks(file)) + + def check_against_path(self, path): + with open(path, 'rb') as file: + return self.check_against_file(file) + + def __nonzero__(self): + """Return whether I know any known-good hashes.""" + return bool(self._allowed) + + def __bool__(self): + return self.__nonzero__() + + +class MissingHashes(Hashes): + """A workalike for Hashes used when we're missing a hash for a requirement + + It computes the actual hash of the requirement and raises a HashMissing + exception showing it to the user. + + """ + def __init__(self): + """Don't offer the ``hashes`` kwarg.""" + # Pass our favorite hash in to generate a "gotten hash". With the + # empty list, it will never match, so an error will always raise. + super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []}) + + def _raise(self, gots): + raise HashMissing(gots[FAVORITE_HASH].hexdigest()) diff --git a/env/lib/python3.6/site-packages/pip/_internal/utils/logging.py b/env/lib/python3.6/site-packages/pip/_internal/utils/logging.py new file mode 100644 index 0000000..66c1d39 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/utils/logging.py @@ -0,0 +1,225 @@ +from __future__ import absolute_import + +import contextlib +import logging +import logging.handlers +import os + +from pip._internal.compat import WINDOWS +from pip._internal.utils.misc import ensure_dir + +try: + import threading +except ImportError: + import dummy_threading as threading # type: ignore + + +try: + from pip._vendor import colorama +# Lots of different errors can come from this, including SystemError and +# ImportError. +except Exception: + colorama = None + + +_log_state = threading.local() +_log_state.indentation = 0 + + +@contextlib.contextmanager +def indent_log(num=2): + """ + A context manager which will cause the log output to be indented for any + log messages emitted inside it. + """ + _log_state.indentation += num + try: + yield + finally: + _log_state.indentation -= num + + +def get_indentation(): + return getattr(_log_state, 'indentation', 0) + + +class IndentingFormatter(logging.Formatter): + + def format(self, record): + """ + Calls the standard formatter, but will indent all of the log messages + by our current indentation level. + """ + formatted = logging.Formatter.format(self, record) + formatted = "".join([ + (" " * get_indentation()) + line + for line in formatted.splitlines(True) + ]) + return formatted + + +def _color_wrap(*colors): + def wrapped(inp): + return "".join(list(colors) + [inp, colorama.Style.RESET_ALL]) + return wrapped + + +class ColorizedStreamHandler(logging.StreamHandler): + + # Don't build up a list of colors if we don't have colorama + if colorama: + COLORS = [ + # This needs to be in order from highest logging level to lowest. + (logging.ERROR, _color_wrap(colorama.Fore.RED)), + (logging.WARNING, _color_wrap(colorama.Fore.YELLOW)), + ] + else: + COLORS = [] + + def __init__(self, stream=None, no_color=None): + logging.StreamHandler.__init__(self, stream) + self._no_color = no_color + + if WINDOWS and colorama: + self.stream = colorama.AnsiToWin32(self.stream) + + def should_color(self): + # Don't colorize things if we do not have colorama or if told not to + if not colorama or self._no_color: + return False + + real_stream = ( + self.stream if not isinstance(self.stream, colorama.AnsiToWin32) + else self.stream.wrapped + ) + + # If the stream is a tty we should color it + if hasattr(real_stream, "isatty") and real_stream.isatty(): + return True + + # If we have an ANSI term we should color it + if os.environ.get("TERM") == "ANSI": + return True + + # If anything else we should not color it + return False + + def format(self, record): + msg = logging.StreamHandler.format(self, record) + + if self.should_color(): + for level, color in self.COLORS: + if record.levelno >= level: + msg = color(msg) + break + + return msg + + +class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): + + def _open(self): + ensure_dir(os.path.dirname(self.baseFilename)) + return logging.handlers.RotatingFileHandler._open(self) + + +class MaxLevelFilter(logging.Filter): + + def __init__(self, level): + self.level = level + + def filter(self, record): + return record.levelno < self.level + + +def setup_logging(verbosity, no_color, user_log_file): + """Configures and sets up all of the logging + """ + + # Determine the level to be logging at. + if verbosity >= 1: + level = "DEBUG" + elif verbosity == -1: + level = "WARNING" + elif verbosity == -2: + level = "ERROR" + elif verbosity <= -3: + level = "CRITICAL" + else: + level = "INFO" + + # The "root" logger should match the "console" level *unless* we also need + # to log to a user log file. + include_user_log = user_log_file is not None + if include_user_log: + additional_log_file = user_log_file + root_level = "DEBUG" + else: + additional_log_file = "/dev/null" + root_level = level + + # Disable any logging besides WARNING unless we have DEBUG level logging + # enabled for vendored libraries. + vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG" + + # Shorthands for clarity + log_streams = { + "stdout": "ext://sys.stdout", + "stderr": "ext://sys.stderr", + } + handler_classes = { + "stream": "pip._internal.utils.logging.ColorizedStreamHandler", + "file": "pip._internal.utils.logging.BetterRotatingFileHandler", + } + + logging.config.dictConfig({ + "version": 1, + "disable_existing_loggers": False, + "filters": { + "exclude_warnings": { + "()": "pip._internal.utils.logging.MaxLevelFilter", + "level": logging.WARNING, + }, + }, + "formatters": { + "indent": { + "()": IndentingFormatter, + "format": "%(message)s", + }, + }, + "handlers": { + "console": { + "level": level, + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stdout"], + "filters": ["exclude_warnings"], + "formatter": "indent", + }, + "console_errors": { + "level": "WARNING", + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stderr"], + "formatter": "indent", + }, + "user_log": { + "level": "DEBUG", + "class": handler_classes["file"], + "filename": additional_log_file, + "delay": True, + "formatter": "indent", + }, + }, + "root": { + "level": root_level, + "handlers": ["console", "console_errors"] + ( + ["user_log"] if include_user_log else [] + ), + }, + "loggers": { + "pip._vendor": { + "level": vendored_log_level + } + }, + }) diff --git a/env/lib/python3.6/site-packages/pip/_internal/utils/misc.py b/env/lib/python3.6/site-packages/pip/_internal/utils/misc.py new file mode 100644 index 0000000..3236af6 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/utils/misc.py @@ -0,0 +1,899 @@ +from __future__ import absolute_import + +import contextlib +import errno +import io +import locale +# we have a submodule named 'logging' which would shadow this if we used the +# regular name: +import logging as std_logging +import os +import posixpath +import re +import shutil +import stat +import subprocess +import sys +import tarfile +import zipfile +from collections import deque + +from pip._vendor import pkg_resources +# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is +# why we ignore the type on this import. +from pip._vendor.retrying import retry # type: ignore +from pip._vendor.six import PY2 +from pip._vendor.six.moves import input +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.compat import ( + WINDOWS, console_to_str, expanduser, stdlib_pkgs, +) +from pip._internal.exceptions import CommandError, InstallationError +from pip._internal.locations import ( + running_under_virtualenv, site_packages, user_site, virtualenv_no_global, + write_delete_marker_file, +) + +if PY2: + from io import BytesIO as StringIO +else: + from io import StringIO + +__all__ = ['rmtree', 'display_path', 'backup_dir', + 'ask', 'splitext', + 'format_size', 'is_installable_dir', + 'is_svn_page', 'file_contents', + 'split_leading_dir', 'has_leading_dir', + 'normalize_path', + 'renames', 'get_prog', + 'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess', + 'captured_stdout', 'ensure_dir', + 'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS', + 'get_installed_version', 'remove_auth_from_url'] + + +logger = std_logging.getLogger(__name__) + +BZ2_EXTENSIONS = ('.tar.bz2', '.tbz') +XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma') +ZIP_EXTENSIONS = ('.zip', '.whl') +TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar') +ARCHIVE_EXTENSIONS = ( + ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS) +SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS +try: + import bz2 # noqa + SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS +except ImportError: + logger.debug('bz2 module is not available') + +try: + # Only for Python 3.3+ + import lzma # noqa + SUPPORTED_EXTENSIONS += XZ_EXTENSIONS +except ImportError: + logger.debug('lzma module is not available') + + +def import_or_raise(pkg_or_module_string, ExceptionType, *args, **kwargs): + try: + return __import__(pkg_or_module_string) + except ImportError: + raise ExceptionType(*args, **kwargs) + + +def ensure_dir(path): + """os.path.makedirs without EEXIST.""" + try: + os.makedirs(path) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + +def get_prog(): + try: + prog = os.path.basename(sys.argv[0]) + if prog in ('__main__.py', '-c'): + return "%s -m pip" % sys.executable + else: + return prog + except (AttributeError, TypeError, IndexError): + pass + return 'pip' + + +# Retry every half second for up to 3 seconds +@retry(stop_max_delay=3000, wait_fixed=500) +def rmtree(dir, ignore_errors=False): + shutil.rmtree(dir, ignore_errors=ignore_errors, + onerror=rmtree_errorhandler) + + +def rmtree_errorhandler(func, path, exc_info): + """On Windows, the files in .svn are read-only, so when rmtree() tries to + remove them, an exception is thrown. We catch that here, remove the + read-only attribute, and hopefully continue without problems.""" + # if file type currently read only + if os.stat(path).st_mode & stat.S_IREAD: + # convert to read/write + os.chmod(path, stat.S_IWRITE) + # use the original function to repeat the operation + func(path) + return + else: + raise + + +def display_path(path): + """Gives the display value for a given path, making it relative to cwd + if possible.""" + path = os.path.normcase(os.path.abspath(path)) + if sys.version_info[0] == 2: + path = path.decode(sys.getfilesystemencoding(), 'replace') + path = path.encode(sys.getdefaultencoding(), 'replace') + if path.startswith(os.getcwd() + os.path.sep): + path = '.' + path[len(os.getcwd()):] + return path + + +def backup_dir(dir, ext='.bak'): + """Figure out the name of a directory to back up the given dir to + (adding .bak, .bak2, etc)""" + n = 1 + extension = ext + while os.path.exists(dir + extension): + n += 1 + extension = ext + str(n) + return dir + extension + + +def ask_path_exists(message, options): + for action in os.environ.get('PIP_EXISTS_ACTION', '').split(): + if action in options: + return action + return ask(message, options) + + +def ask(message, options): + """Ask the message interactively, with the given possible responses""" + while 1: + if os.environ.get('PIP_NO_INPUT'): + raise Exception( + 'No input was expected ($PIP_NO_INPUT set); question: %s' % + message + ) + response = input(message) + response = response.strip().lower() + if response not in options: + print( + 'Your response (%r) was not one of the expected responses: ' + '%s' % (response, ', '.join(options)) + ) + else: + return response + + +def format_size(bytes): + if bytes > 1000 * 1000: + return '%.1fMB' % (bytes / 1000.0 / 1000) + elif bytes > 10 * 1000: + return '%ikB' % (bytes / 1000) + elif bytes > 1000: + return '%.1fkB' % (bytes / 1000.0) + else: + return '%ibytes' % bytes + + +def is_installable_dir(path): + """Return True if `path` is a directory containing a setup.py file.""" + if not os.path.isdir(path): + return False + setup_py = os.path.join(path, 'setup.py') + if os.path.isfile(setup_py): + return True + return False + + +def is_svn_page(html): + """ + Returns true if the page appears to be the index page of an svn repository + """ + return (re.search(r'[^<]*Revision \d+:', html) and + re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I)) + + +def file_contents(filename): + with open(filename, 'rb') as fp: + return fp.read().decode('utf-8') + + +def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): + """Yield pieces of data from a file-like object until EOF.""" + while True: + chunk = file.read(size) + if not chunk: + break + yield chunk + + +def split_leading_dir(path): + path = path.lstrip('/').lstrip('\\') + if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or + '\\' not in path): + return path.split('/', 1) + elif '\\' in path: + return path.split('\\', 1) + else: + return path, '' + + +def has_leading_dir(paths): + """Returns true if all the paths have the same leading path name + (i.e., everything is in one subdirectory in an archive)""" + common_prefix = None + for path in paths: + prefix, rest = split_leading_dir(path) + if not prefix: + return False + elif common_prefix is None: + common_prefix = prefix + elif prefix != common_prefix: + return False + return True + + +def normalize_path(path, resolve_symlinks=True): + """ + Convert a path to its canonical, case-normalized, absolute version. + + """ + path = expanduser(path) + if resolve_symlinks: + path = os.path.realpath(path) + else: + path = os.path.abspath(path) + return os.path.normcase(path) + + +def splitext(path): + """Like os.path.splitext, but take off .tar too""" + base, ext = posixpath.splitext(path) + if base.lower().endswith('.tar'): + ext = base[-4:] + ext + base = base[:-4] + return base, ext + + +def renames(old, new): + """Like os.renames(), but handles renaming across devices.""" + # Implementation borrowed from os.renames(). + head, tail = os.path.split(new) + if head and tail and not os.path.exists(head): + os.makedirs(head) + + shutil.move(old, new) + + head, tail = os.path.split(old) + if head and tail: + try: + os.removedirs(head) + except OSError: + pass + + +def is_local(path): + """ + Return True if path is within sys.prefix, if we're running in a virtualenv. + + If we're not in a virtualenv, all paths are considered "local." + + """ + if not running_under_virtualenv(): + return True + return normalize_path(path).startswith(normalize_path(sys.prefix)) + + +def dist_is_local(dist): + """ + Return True if given Distribution object is installed locally + (i.e. within current virtualenv). + + Always True if we're not in a virtualenv. + + """ + return is_local(dist_location(dist)) + + +def dist_in_usersite(dist): + """ + Return True if given Distribution is installed in user site. + """ + norm_path = normalize_path(dist_location(dist)) + return norm_path.startswith(normalize_path(user_site)) + + +def dist_in_site_packages(dist): + """ + Return True if given Distribution is installed in + sysconfig.get_python_lib(). + """ + return normalize_path( + dist_location(dist) + ).startswith(normalize_path(site_packages)) + + +def dist_is_editable(dist): + """Is distribution an editable install?""" + for path_item in sys.path: + egg_link = os.path.join(path_item, dist.project_name + '.egg-link') + if os.path.isfile(egg_link): + return True + return False + + +def get_installed_distributions(local_only=True, + skip=stdlib_pkgs, + include_editables=True, + editables_only=False, + user_only=False): + """ + Return a list of installed Distribution objects. + + If ``local_only`` is True (default), only return installations + local to the current virtualenv, if in a virtualenv. + + ``skip`` argument is an iterable of lower-case project names to + ignore; defaults to stdlib_pkgs + + If ``include_editables`` is False, don't report editables. + + If ``editables_only`` is True , only report editables. + + If ``user_only`` is True , only report installations in the user + site directory. + + """ + if local_only: + local_test = dist_is_local + else: + def local_test(d): + return True + + if include_editables: + def editable_test(d): + return True + else: + def editable_test(d): + return not dist_is_editable(d) + + if editables_only: + def editables_only_test(d): + return dist_is_editable(d) + else: + def editables_only_test(d): + return True + + if user_only: + user_test = dist_in_usersite + else: + def user_test(d): + return True + + return [d for d in pkg_resources.working_set + if local_test(d) and + d.key not in skip and + editable_test(d) and + editables_only_test(d) and + user_test(d) + ] + + +def egg_link_path(dist): + """ + Return the path for the .egg-link file if it exists, otherwise, None. + + There's 3 scenarios: + 1) not in a virtualenv + try to find in site.USER_SITE, then site_packages + 2) in a no-global virtualenv + try to find in site_packages + 3) in a yes-global virtualenv + try to find in site_packages, then site.USER_SITE + (don't look in global location) + + For #1 and #3, there could be odd cases, where there's an egg-link in 2 + locations. + + This method will just return the first one found. + """ + sites = [] + if running_under_virtualenv(): + if virtualenv_no_global(): + sites.append(site_packages) + else: + sites.append(site_packages) + if user_site: + sites.append(user_site) + else: + if user_site: + sites.append(user_site) + sites.append(site_packages) + + for site in sites: + egglink = os.path.join(site, dist.project_name) + '.egg-link' + if os.path.isfile(egglink): + return egglink + + +def dist_location(dist): + """ + Get the site-packages location of this distribution. Generally + this is dist.location, except in the case of develop-installed + packages, where dist.location is the source code location, and we + want to know where the egg-link file is. + + """ + egg_link = egg_link_path(dist) + if egg_link: + return egg_link + return dist.location + + +def current_umask(): + """Get the current umask which involves having to set it temporarily.""" + mask = os.umask(0) + os.umask(mask) + return mask + + +def unzip_file(filename, location, flatten=True): + """ + Unzip the file (with path `filename`) to the destination `location`. All + files are written based on system defaults and umask (i.e. permissions are + not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + zipfp = open(filename, 'rb') + try: + zip = zipfile.ZipFile(zipfp, allowZip64=True) + leading = has_leading_dir(zip.namelist()) and flatten + for info in zip.infolist(): + name = info.filename + data = zip.read(name) + fn = name + if leading: + fn = split_leading_dir(name)[1] + fn = os.path.join(location, fn) + dir = os.path.dirname(fn) + if fn.endswith('/') or fn.endswith('\\'): + # A directory + ensure_dir(fn) + else: + ensure_dir(dir) + fp = open(fn, 'wb') + try: + fp.write(data) + finally: + fp.close() + mode = info.external_attr >> 16 + # if mode and regular file and any execute permissions for + # user/group/world? + if mode and stat.S_ISREG(mode) and mode & 0o111: + # make dest file have execute for user/group/world + # (chmod +x) no-op on windows per python docs + os.chmod(fn, (0o777 - current_umask() | 0o111)) + finally: + zipfp.close() + + +def untar_file(filename, location): + """ + Untar the file (with path `filename`) to the destination `location`. + All files are written based on system defaults and umask (i.e. permissions + are not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): + mode = 'r:gz' + elif filename.lower().endswith(BZ2_EXTENSIONS): + mode = 'r:bz2' + elif filename.lower().endswith(XZ_EXTENSIONS): + mode = 'r:xz' + elif filename.lower().endswith('.tar'): + mode = 'r' + else: + logger.warning( + 'Cannot determine compression type for file %s', filename, + ) + mode = 'r:*' + tar = tarfile.open(filename, mode) + try: + # note: python<=2.5 doesn't seem to know about pax headers, filter them + leading = has_leading_dir([ + member.name for member in tar.getmembers() + if member.name != 'pax_global_header' + ]) + for member in tar.getmembers(): + fn = member.name + if fn == 'pax_global_header': + continue + if leading: + fn = split_leading_dir(fn)[1] + path = os.path.join(location, fn) + if member.isdir(): + ensure_dir(path) + elif member.issym(): + try: + tar._extract_member(member, path) + except Exception as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + 'In the tar file %s the member %s is invalid: %s', + filename, member.name, exc, + ) + continue + else: + try: + fp = tar.extractfile(member) + except (KeyError, AttributeError) as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + 'In the tar file %s the member %s is invalid: %s', + filename, member.name, exc, + ) + continue + ensure_dir(os.path.dirname(path)) + with open(path, 'wb') as destfp: + shutil.copyfileobj(fp, destfp) + fp.close() + # Update the timestamp (useful for cython compiled files) + tar.utime(member, path) + # member have any execute permissions for user/group/world? + if member.mode & 0o111: + # make dest file have execute for user/group/world + # no-op on windows per python docs + os.chmod(path, (0o777 - current_umask() | 0o111)) + finally: + tar.close() + + +def unpack_file(filename, location, content_type, link): + filename = os.path.realpath(filename) + if (content_type == 'application/zip' or + filename.lower().endswith(ZIP_EXTENSIONS) or + zipfile.is_zipfile(filename)): + unzip_file( + filename, + location, + flatten=not filename.endswith('.whl') + ) + elif (content_type == 'application/x-gzip' or + tarfile.is_tarfile(filename) or + filename.lower().endswith( + TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)): + untar_file(filename, location) + elif (content_type and content_type.startswith('text/html') and + is_svn_page(file_contents(filename))): + # We don't really care about this + from pip._internal.vcs.subversion import Subversion + Subversion('svn+' + link.url).unpack(location) + else: + # FIXME: handle? + # FIXME: magic signatures? + logger.critical( + 'Cannot unpack file %s (downloaded from %s, content-type: %s); ' + 'cannot detect archive format', + filename, location, content_type, + ) + raise InstallationError( + 'Cannot determine archive format of %s' % location + ) + + +def call_subprocess(cmd, show_stdout=True, cwd=None, + on_returncode='raise', + command_desc=None, + extra_environ=None, unset_environ=None, spinner=None): + """ + Args: + unset_environ: an iterable of environment variable names to unset + prior to calling subprocess.Popen(). + """ + if unset_environ is None: + unset_environ = [] + # This function's handling of subprocess output is confusing and I + # previously broke it terribly, so as penance I will write a long comment + # explaining things. + # + # The obvious thing that affects output is the show_stdout= + # kwarg. show_stdout=True means, let the subprocess write directly to our + # stdout. Even though it is nominally the default, it is almost never used + # inside pip (and should not be used in new code without a very good + # reason); as of 2016-02-22 it is only used in a few places inside the VCS + # wrapper code. Ideally we should get rid of it entirely, because it + # creates a lot of complexity here for a rarely used feature. + # + # Most places in pip set show_stdout=False. What this means is: + # - We connect the child stdout to a pipe, which we read. + # - By default, we hide the output but show a spinner -- unless the + # subprocess exits with an error, in which case we show the output. + # - If the --verbose option was passed (= loglevel is DEBUG), then we show + # the output unconditionally. (But in this case we don't want to show + # the output a second time if it turns out that there was an error.) + # + # stderr is always merged with stdout (even if show_stdout=True). + if show_stdout: + stdout = None + else: + stdout = subprocess.PIPE + if command_desc is None: + cmd_parts = [] + for part in cmd: + if ' ' in part or '\n' in part or '"' in part or "'" in part: + part = '"%s"' % part.replace('"', '\\"') + cmd_parts.append(part) + command_desc = ' '.join(cmd_parts) + logger.debug("Running command %s", command_desc) + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + for name in unset_environ: + env.pop(name, None) + try: + proc = subprocess.Popen( + cmd, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, + stdout=stdout, cwd=cwd, env=env, + ) + proc.stdin.close() + except Exception as exc: + logger.critical( + "Error %s while executing command %s", exc, command_desc, + ) + raise + all_output = [] + if stdout is not None: + while True: + line = console_to_str(proc.stdout.readline()) + if not line: + break + line = line.rstrip() + all_output.append(line + '\n') + if logger.getEffectiveLevel() <= std_logging.DEBUG: + # Show the line immediately + logger.debug(line) + else: + # Update the spinner + if spinner is not None: + spinner.spin() + try: + proc.wait() + finally: + if proc.stdout: + proc.stdout.close() + if spinner is not None: + if proc.returncode: + spinner.finish("error") + else: + spinner.finish("done") + if proc.returncode: + if on_returncode == 'raise': + if (logger.getEffectiveLevel() > std_logging.DEBUG and + not show_stdout): + logger.info( + 'Complete output from command %s:', command_desc, + ) + logger.info( + ''.join(all_output) + + '\n----------------------------------------' + ) + raise InstallationError( + 'Command "%s" failed with error code %s in %s' + % (command_desc, proc.returncode, cwd)) + elif on_returncode == 'warn': + logger.warning( + 'Command "%s" had error code %s in %s', + command_desc, proc.returncode, cwd, + ) + elif on_returncode == 'ignore': + pass + else: + raise ValueError('Invalid value: on_returncode=%s' % + repr(on_returncode)) + if not show_stdout: + return ''.join(all_output) + + +def read_text_file(filename): + """Return the contents of *filename*. + + Try to decode the file contents with utf-8, the preferred system encoding + (e.g., cp1252 on some Windows machines), and latin1, in that order. + Decoding a byte string with latin1 will never raise an error. In the worst + case, the returned string will contain some garbage characters. + + """ + with open(filename, 'rb') as fp: + data = fp.read() + + encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1'] + for enc in encodings: + try: + data = data.decode(enc) + except UnicodeDecodeError: + continue + break + + assert type(data) != bytes # Latin1 should have worked. + return data + + +def _make_build_dir(build_dir): + os.makedirs(build_dir) + write_delete_marker_file(build_dir) + + +class FakeFile(object): + """Wrap a list of lines in an object with readline() to make + ConfigParser happy.""" + def __init__(self, lines): + self._gen = (l for l in lines) + + def readline(self): + try: + try: + return next(self._gen) + except NameError: + return self._gen.next() + except StopIteration: + return '' + + def __iter__(self): + return self._gen + + +class StreamWrapper(StringIO): + + @classmethod + def from_stream(cls, orig_stream): + cls.orig_stream = orig_stream + return cls() + + # compileall.compile_dir() needs stdout.encoding to print to stdout + @property + def encoding(self): + return self.orig_stream.encoding + + +@contextlib.contextmanager +def captured_output(stream_name): + """Return a context manager used by captured_stdout/stdin/stderr + that temporarily replaces the sys stream *stream_name* with a StringIO. + + Taken from Lib/support/__init__.py in the CPython repo. + """ + orig_stdout = getattr(sys, stream_name) + setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout)) + try: + yield getattr(sys, stream_name) + finally: + setattr(sys, stream_name, orig_stdout) + + +def captured_stdout(): + """Capture the output of sys.stdout: + + with captured_stdout() as stdout: + print('hello') + self.assertEqual(stdout.getvalue(), 'hello\n') + + Taken from Lib/support/__init__.py in the CPython repo. + """ + return captured_output('stdout') + + +class cached_property(object): + """A property that is only computed once per instance and then replaces + itself with an ordinary attribute. Deleting the attribute resets the + property. + + Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175 + """ + + def __init__(self, func): + self.__doc__ = getattr(func, '__doc__') + self.func = func + + def __get__(self, obj, cls): + if obj is None: + # We're being accessed from the class itself, not from an object + return self + value = obj.__dict__[self.func.__name__] = self.func(obj) + return value + + +def get_installed_version(dist_name, working_set=None): + """Get the installed version of dist_name avoiding pkg_resources cache""" + # Create a requirement that we'll look for inside of setuptools. + req = pkg_resources.Requirement.parse(dist_name) + + if working_set is None: + # We want to avoid having this cached, so we need to construct a new + # working set each time. + working_set = pkg_resources.WorkingSet() + + # Get the installed distribution from our working set + dist = working_set.find(req) + + # Check to see if we got an installed distribution or not, if we did + # we want to return it's version. + return dist.version if dist else None + + +def consume(iterator): + """Consume an iterable at C speed.""" + deque(iterator, maxlen=0) + + +# Simulates an enum +def enum(*sequential, **named): + enums = dict(zip(sequential, range(len(sequential))), **named) + reverse = {value: key for key, value in enums.items()} + enums['reverse_mapping'] = reverse + return type('Enum', (), enums) + + +def remove_auth_from_url(url): + # Return a copy of url with 'username:password@' removed. + # username/pass params are passed to subversion through flags + # and are not recognized in the url. + + # parsed url + purl = urllib_parse.urlsplit(url) + stripped_netloc = \ + purl.netloc.split('@')[-1] + + # stripped url + url_pieces = ( + purl.scheme, stripped_netloc, purl.path, purl.query, purl.fragment + ) + surl = urllib_parse.urlunsplit(url_pieces) + return surl + + +def protect_pip_from_modification_on_windows(modifying_pip): + """Protection of pip.exe from modification on Windows + + On Windows, any operation modifying pip should be run as: + python -m pip ... + """ + pip_names = [ + "pip.exe", + "pip{}.exe".format(sys.version_info[0]), + "pip{}.{}.exe".format(*sys.version_info[:2]) + ] + + # See https://github.com/pypa/pip/issues/1299 for more discussion + should_show_use_python_msg = ( + modifying_pip and + WINDOWS and + os.path.basename(sys.argv[0]) in pip_names + ) + + if should_show_use_python_msg: + new_command = [ + sys.executable, "-m", "pip" + ] + sys.argv[1:] + raise CommandError( + 'To modify pip, please run the following command:\n{}' + .format(" ".join(new_command)) + ) diff --git a/env/lib/python3.6/site-packages/pip/_internal/utils/outdated.py b/env/lib/python3.6/site-packages/pip/_internal/utils/outdated.py new file mode 100644 index 0000000..2b3fa95 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/utils/outdated.py @@ -0,0 +1,145 @@ +from __future__ import absolute_import + +import datetime +import json +import logging +import os.path +import sys + +from pip._vendor import lockfile, pkg_resources +from pip._vendor.packaging import version as packaging_version + +from pip._internal.compat import WINDOWS +from pip._internal.index import PackageFinder +from pip._internal.utils.filesystem import check_path_owner +from pip._internal.utils.misc import ensure_dir, get_installed_version + +SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" + + +logger = logging.getLogger(__name__) + + +class SelfCheckState(object): + def __init__(self, cache_dir): + self.statefile_path = os.path.join(cache_dir, "selfcheck.json") + + # Load the existing state + try: + with open(self.statefile_path) as statefile: + self.state = json.load(statefile)[sys.prefix] + except (IOError, ValueError, KeyError): + self.state = {} + + def save(self, pypi_version, current_time): + # Check to make sure that we own the directory + if not check_path_owner(os.path.dirname(self.statefile_path)): + return + + # Now that we've ensured the directory is owned by this user, we'll go + # ahead and make sure that all our directories are created. + ensure_dir(os.path.dirname(self.statefile_path)) + + # Attempt to write out our version check file + with lockfile.LockFile(self.statefile_path): + if os.path.exists(self.statefile_path): + with open(self.statefile_path) as statefile: + state = json.load(statefile) + else: + state = {} + + state[sys.prefix] = { + "last_check": current_time.strftime(SELFCHECK_DATE_FMT), + "pypi_version": pypi_version, + } + + with open(self.statefile_path, "w") as statefile: + json.dump(state, statefile, sort_keys=True, + separators=(",", ":")) + + +def was_installed_by_pip(pkg): + """Checks whether pkg was installed by pip + + This is used not to display the upgrade message when pip is in fact + installed by system package manager, such as dnf on Fedora. + """ + try: + dist = pkg_resources.get_distribution(pkg) + return (dist.has_metadata('INSTALLER') and + 'pip' in dist.get_metadata_lines('INSTALLER')) + except pkg_resources.DistributionNotFound: + return False + + +def pip_version_check(session, options): + """Check for an update for pip. + + Limit the frequency of checks to once per week. State is stored either in + the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix + of the pip script path. + """ + installed_version = get_installed_version("pip") + if not installed_version: + return + + pip_version = packaging_version.parse(installed_version) + pypi_version = None + + try: + state = SelfCheckState(cache_dir=options.cache_dir) + + current_time = datetime.datetime.utcnow() + # Determine if we need to refresh the state + if "last_check" in state.state and "pypi_version" in state.state: + last_check = datetime.datetime.strptime( + state.state["last_check"], + SELFCHECK_DATE_FMT + ) + if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60: + pypi_version = state.state["pypi_version"] + + # Refresh the version if we need to or just see if we need to warn + if pypi_version is None: + # Lets use PackageFinder to see what the latest pip version is + finder = PackageFinder( + find_links=options.find_links, + index_urls=[options.index_url] + options.extra_index_urls, + allow_all_prereleases=False, # Explicitly set to False + trusted_hosts=options.trusted_hosts, + process_dependency_links=options.process_dependency_links, + session=session, + ) + all_candidates = finder.find_all_candidates("pip") + if not all_candidates: + return + pypi_version = str( + max(all_candidates, key=lambda c: c.version).version + ) + + # save that we've performed a check + state.save(pypi_version, current_time) + + remote_version = packaging_version.parse(pypi_version) + + # Determine if our pypi_version is older + if (pip_version < remote_version and + pip_version.base_version != remote_version.base_version and + was_installed_by_pip('pip')): + # Advise "python -m pip" on Windows to avoid issues + # with overwriting pip.exe. + if WINDOWS: + pip_cmd = "python -m pip" + else: + pip_cmd = "pip" + logger.warning( + "You are using pip version %s, however version %s is " + "available.\nYou should consider upgrading via the " + "'%s install --upgrade pip' command.", + pip_version, pypi_version, pip_cmd + ) + except Exception: + logger.debug( + "There was an error checking the latest version of pip", + exc_info=True, + ) diff --git a/env/lib/python3.6/site-packages/pip/_internal/utils/packaging.py b/env/lib/python3.6/site-packages/pip/_internal/utils/packaging.py new file mode 100644 index 0000000..5f9bb93 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/utils/packaging.py @@ -0,0 +1,70 @@ +from __future__ import absolute_import + +import logging +import sys +from email.parser import FeedParser # type: ignore + +from pip._vendor import pkg_resources +from pip._vendor.packaging import specifiers, version + +from pip._internal import exceptions + +logger = logging.getLogger(__name__) + + +def check_requires_python(requires_python): + """ + Check if the python version in use match the `requires_python` specifier. + + Returns `True` if the version of python in use matches the requirement. + Returns `False` if the version of python in use does not matches the + requirement. + + Raises an InvalidSpecifier if `requires_python` have an invalid format. + """ + if requires_python is None: + # The package provides no information + return True + requires_python_specifier = specifiers.SpecifierSet(requires_python) + + # We only use major.minor.micro + python_version = version.parse('.'.join(map(str, sys.version_info[:3]))) + return python_version in requires_python_specifier + + +def get_metadata(dist): + if (isinstance(dist, pkg_resources.DistInfoDistribution) and + dist.has_metadata('METADATA')): + return dist.get_metadata('METADATA') + elif dist.has_metadata('PKG-INFO'): + return dist.get_metadata('PKG-INFO') + + +def check_dist_requires_python(dist): + metadata = get_metadata(dist) + feed_parser = FeedParser() + feed_parser.feed(metadata) + pkg_info_dict = feed_parser.close() + requires_python = pkg_info_dict.get('Requires-Python') + try: + if not check_requires_python(requires_python): + raise exceptions.UnsupportedPythonVersion( + "%s requires Python '%s' but the running Python is %s" % ( + dist.project_name, + requires_python, + '.'.join(map(str, sys.version_info[:3])),) + ) + except specifiers.InvalidSpecifier as e: + logger.warning( + "Package %s has an invalid Requires-Python entry %s - %s", + dist.project_name, requires_python, e, + ) + return + + +def get_installer(dist): + if dist.has_metadata('INSTALLER'): + for line in dist.get_metadata_lines('INSTALLER'): + if line.strip(): + return line.strip() + return '' diff --git a/env/lib/python3.6/site-packages/pip/_internal/utils/setuptools_build.py b/env/lib/python3.6/site-packages/pip/_internal/utils/setuptools_build.py new file mode 100644 index 0000000..03973e9 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/utils/setuptools_build.py @@ -0,0 +1,8 @@ +# Shim to wrap setup.py invocation with setuptools +SETUPTOOLS_SHIM = ( + "import setuptools, tokenize;__file__=%r;" + "f=getattr(tokenize, 'open', open)(__file__);" + "code=f.read().replace('\\r\\n', '\\n');" + "f.close();" + "exec(compile(code, __file__, 'exec'))" +) diff --git a/env/lib/python3.6/site-packages/pip/_internal/utils/temp_dir.py b/env/lib/python3.6/site-packages/pip/_internal/utils/temp_dir.py new file mode 100644 index 0000000..edc506b --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/utils/temp_dir.py @@ -0,0 +1,82 @@ +from __future__ import absolute_import + +import logging +import os.path +import tempfile + +from pip._internal.utils.misc import rmtree + +logger = logging.getLogger(__name__) + + +class TempDirectory(object): + """Helper class that owns and cleans up a temporary directory. + + This class can be used as a context manager or as an OO representation of a + temporary directory. + + Attributes: + path + Location to the created temporary directory or None + delete + Whether the directory should be deleted when exiting + (when used as a contextmanager) + + Methods: + create() + Creates a temporary directory and stores its path in the path + attribute. + cleanup() + Deletes the temporary directory and sets path attribute to None + + When used as a context manager, a temporary directory is created on + entering the context and, if the delete attribute is True, on exiting the + context the created directory is deleted. + """ + + def __init__(self, path=None, delete=None, kind="temp"): + super(TempDirectory, self).__init__() + + if path is None and delete is None: + # If we were not given an explicit directory, and we were not given + # an explicit delete option, then we'll default to deleting. + delete = True + + self.path = path + self.delete = delete + self.kind = kind + + def __repr__(self): + return "<{} {!r}>".format(self.__class__.__name__, self.path) + + def __enter__(self): + self.create() + return self + + def __exit__(self, exc, value, tb): + if self.delete: + self.cleanup() + + def create(self): + """Create a temporary directory and store it's path in self.path + """ + if self.path is not None: + logger.debug( + "Skipped creation of temporary directory: {}".format(self.path) + ) + return + # We realpath here because some systems have their default tmpdir + # symlinked to another directory. This tends to confuse build + # scripts, so we canonicalize the path by traversing potential + # symlinks here. + self.path = os.path.realpath( + tempfile.mkdtemp(prefix="pip-{}-".format(self.kind)) + ) + logger.debug("Created temporary directory: {}".format(self.path)) + + def cleanup(self): + """Remove the temporary directory created and reset state + """ + if self.path is not None and os.path.exists(self.path): + rmtree(self.path) + self.path = None diff --git a/env/lib/python3.6/site-packages/pip/_internal/utils/typing.py b/env/lib/python3.6/site-packages/pip/_internal/utils/typing.py new file mode 100644 index 0000000..e085cdf --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/utils/typing.py @@ -0,0 +1,29 @@ +"""For neatly implementing static typing in pip. + +`mypy` - the static type analysis tool we use - uses the `typing` module, which +provides core functionality fundamental to mypy's functioning. + +Generally, `typing` would be imported at runtime and used in that fashion - +it acts as a no-op at runtime and does not have any run-time overhead by +design. + +As it turns out, `typing` is not vendorable - it uses separate sources for +Python 2/Python 3. Thus, this codebase can not expect it to be present. +To work around this, mypy allows the typing import to be behind a False-y +optional to prevent it from running at runtime and type-comments can be used +to remove the need for the types to be accessible directly during runtime. + +This module provides the False-y guard in a nicely named fashion so that a +curious maintainer can reach here to read this. + +In pip, all static-typing related imports should be guarded as follows: + + from pip._internal.utils.typing import MYPY_CHECK_RUNNING + + if MYPY_CHECK_RUNNING: + from typing import ... # noqa: F401 + +Ref: https://github.com/python/mypy/issues/3216 +""" + +MYPY_CHECK_RUNNING = False diff --git a/env/lib/python3.6/site-packages/pip/_internal/utils/ui.py b/env/lib/python3.6/site-packages/pip/_internal/utils/ui.py new file mode 100644 index 0000000..9429aae --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/utils/ui.py @@ -0,0 +1,421 @@ +from __future__ import absolute_import, division + +import contextlib +import itertools +import logging +import sys +import time +from signal import SIGINT, default_int_handler, signal + +from pip._vendor import six +from pip._vendor.progress.bar import ( + Bar, ChargingBar, FillingCirclesBar, FillingSquaresBar, IncrementalBar, + ShadyBar, +) +from pip._vendor.progress.helpers import HIDE_CURSOR, SHOW_CURSOR, WritelnMixin +from pip._vendor.progress.spinner import Spinner + +from pip._internal.compat import WINDOWS +from pip._internal.utils.logging import get_indentation +from pip._internal.utils.misc import format_size +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Any # noqa: F401 + +try: + from pip._vendor import colorama +# Lots of different errors can come from this, including SystemError and +# ImportError. +except Exception: + colorama = None + +logger = logging.getLogger(__name__) + + +def _select_progress_class(preferred, fallback): + encoding = getattr(preferred.file, "encoding", None) + + # If we don't know what encoding this file is in, then we'll just assume + # that it doesn't support unicode and use the ASCII bar. + if not encoding: + return fallback + + # Collect all of the possible characters we want to use with the preferred + # bar. + characters = [ + getattr(preferred, "empty_fill", six.text_type()), + getattr(preferred, "fill", six.text_type()), + ] + characters += list(getattr(preferred, "phases", [])) + + # Try to decode the characters we're using for the bar using the encoding + # of the given file, if this works then we'll assume that we can use the + # fancier bar and if not we'll fall back to the plaintext bar. + try: + six.text_type().join(characters).encode(encoding) + except UnicodeEncodeError: + return fallback + else: + return preferred + + +_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any + + +class InterruptibleMixin(object): + """ + Helper to ensure that self.finish() gets called on keyboard interrupt. + + This allows downloads to be interrupted without leaving temporary state + (like hidden cursors) behind. + + This class is similar to the progress library's existing SigIntMixin + helper, but as of version 1.2, that helper has the following problems: + + 1. It calls sys.exit(). + 2. It discards the existing SIGINT handler completely. + 3. It leaves its own handler in place even after an uninterrupted finish, + which will have unexpected delayed effects if the user triggers an + unrelated keyboard interrupt some time after a progress-displaying + download has already completed, for example. + """ + + def __init__(self, *args, **kwargs): + """ + Save the original SIGINT handler for later. + """ + super(InterruptibleMixin, self).__init__(*args, **kwargs) + + self.original_handler = signal(SIGINT, self.handle_sigint) + + # If signal() returns None, the previous handler was not installed from + # Python, and we cannot restore it. This probably should not happen, + # but if it does, we must restore something sensible instead, at least. + # The least bad option should be Python's default SIGINT handler, which + # just raises KeyboardInterrupt. + if self.original_handler is None: + self.original_handler = default_int_handler + + def finish(self): + """ + Restore the original SIGINT handler after finishing. + + This should happen regardless of whether the progress display finishes + normally, or gets interrupted. + """ + super(InterruptibleMixin, self).finish() + signal(SIGINT, self.original_handler) + + def handle_sigint(self, signum, frame): + """ + Call self.finish() before delegating to the original SIGINT handler. + + This handler should only be in place while the progress display is + active. + """ + self.finish() + self.original_handler(signum, frame) + + +class SilentBar(Bar): + + def update(self): + pass + + +class BlueEmojiBar(IncrementalBar): + + suffix = "%(percent)d%%" + bar_prefix = " " + bar_suffix = " " + phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any + + +class DownloadProgressMixin(object): + + def __init__(self, *args, **kwargs): + super(DownloadProgressMixin, self).__init__(*args, **kwargs) + self.message = (" " * (get_indentation() + 2)) + self.message + + @property + def downloaded(self): + return format_size(self.index) + + @property + def download_speed(self): + # Avoid zero division errors... + if self.avg == 0.0: + return "..." + return format_size(1 / self.avg) + "/s" + + @property + def pretty_eta(self): + if self.eta: + return "eta %s" % self.eta_td + return "" + + def iter(self, it, n=1): + for x in it: + yield x + self.next(n) + self.finish() + + +class WindowsMixin(object): + + def __init__(self, *args, **kwargs): + # The Windows terminal does not support the hide/show cursor ANSI codes + # even with colorama. So we'll ensure that hide_cursor is False on + # Windows. + # This call neds to go before the super() call, so that hide_cursor + # is set in time. The base progress bar class writes the "hide cursor" + # code to the terminal in its init, so if we don't set this soon + # enough, we get a "hide" with no corresponding "show"... + if WINDOWS and self.hide_cursor: + self.hide_cursor = False + + super(WindowsMixin, self).__init__(*args, **kwargs) + + # Check if we are running on Windows and we have the colorama module, + # if we do then wrap our file with it. + if WINDOWS and colorama: + self.file = colorama.AnsiToWin32(self.file) + # The progress code expects to be able to call self.file.isatty() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.isatty = lambda: self.file.wrapped.isatty() + # The progress code expects to be able to call self.file.flush() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.flush = lambda: self.file.wrapped.flush() + + +class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, + DownloadProgressMixin): + + file = sys.stdout + message = "%(percent)d%%" + suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s" + +# NOTE: The "type: ignore" comments on the following classes are there to +# work around https://github.com/python/typing/issues/241 + + +class DefaultDownloadProgressBar(BaseDownloadProgressBar, + _BaseBar): # type: ignore + pass + + +class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore + pass + + +class DownloadIncrementalBar(BaseDownloadProgressBar, # type: ignore + IncrementalBar): + pass + + +class DownloadChargingBar(BaseDownloadProgressBar, # type: ignore + ChargingBar): + pass + + +class DownloadShadyBar(BaseDownloadProgressBar, ShadyBar): # type: ignore + pass + + +class DownloadFillingSquaresBar(BaseDownloadProgressBar, # type: ignore + FillingSquaresBar): + pass + + +class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore + FillingCirclesBar): + pass + + +class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore + BlueEmojiBar): + pass + + +class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin, + DownloadProgressMixin, WritelnMixin, Spinner): + + file = sys.stdout + suffix = "%(downloaded)s %(download_speed)s" + + def next_phase(self): + if not hasattr(self, "_phaser"): + self._phaser = itertools.cycle(self.phases) + return next(self._phaser) + + def update(self): + message = self.message % self + phase = self.next_phase() + suffix = self.suffix % self + line = ''.join([ + message, + " " if message else "", + phase, + " " if suffix else "", + suffix, + ]) + + self.writeln(line) + + +BAR_TYPES = { + "off": (DownloadSilentBar, DownloadSilentBar), + "on": (DefaultDownloadProgressBar, DownloadProgressSpinner), + "ascii": (DownloadIncrementalBar, DownloadProgressSpinner), + "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner), + "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner) +} + + +def DownloadProgressProvider(progress_bar, max=None): + if max is None or max == 0: + return BAR_TYPES[progress_bar][1]().iter + else: + return BAR_TYPES[progress_bar][0](max=max).iter + + +################################################################ +# Generic "something is happening" spinners +# +# We don't even try using progress.spinner.Spinner here because it's actually +# simpler to reimplement from scratch than to coerce their code into doing +# what we need. +################################################################ + +@contextlib.contextmanager +def hidden_cursor(file): + # The Windows terminal does not support the hide/show cursor ANSI codes, + # even via colorama. So don't even try. + if WINDOWS: + yield + # We don't want to clutter the output with control characters if we're + # writing to a file, or if the user is running with --quiet. + # See https://github.com/pypa/pip/issues/3418 + elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: + yield + else: + file.write(HIDE_CURSOR) + try: + yield + finally: + file.write(SHOW_CURSOR) + + +class RateLimiter(object): + def __init__(self, min_update_interval_seconds): + self._min_update_interval_seconds = min_update_interval_seconds + self._last_update = 0 + + def ready(self): + now = time.time() + delta = now - self._last_update + return delta >= self._min_update_interval_seconds + + def reset(self): + self._last_update = time.time() + + +class InteractiveSpinner(object): + def __init__(self, message, file=None, spin_chars="-\\|/", + # Empirically, 8 updates/second looks nice + min_update_interval_seconds=0.125): + self._message = message + if file is None: + file = sys.stdout + self._file = file + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._finished = False + + self._spin_cycle = itertools.cycle(spin_chars) + + self._file.write(" " * get_indentation() + self._message + " ... ") + self._width = 0 + + def _write(self, status): + assert not self._finished + # Erase what we wrote before by backspacing to the beginning, writing + # spaces to overwrite the old text, and then backspacing again + backup = "\b" * self._width + self._file.write(backup + " " * self._width + backup) + # Now we have a blank slate to add our status + self._file.write(status) + self._width = len(status) + self._file.flush() + self._rate_limiter.reset() + + def spin(self): + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._write(next(self._spin_cycle)) + + def finish(self, final_status): + if self._finished: + return + self._write(final_status) + self._file.write("\n") + self._file.flush() + self._finished = True + + +# Used for dumb terminals, non-interactive installs (no tty), etc. +# We still print updates occasionally (once every 60 seconds by default) to +# act as a keep-alive for systems like Travis-CI that take lack-of-output as +# an indication that a task has frozen. +class NonInteractiveSpinner(object): + def __init__(self, message, min_update_interval_seconds=60): + self._message = message + self._finished = False + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._update("started") + + def _update(self, status): + assert not self._finished + self._rate_limiter.reset() + logger.info("%s: %s", self._message, status) + + def spin(self): + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._update("still running...") + + def finish(self, final_status): + if self._finished: + return + self._update("finished with status '%s'" % (final_status,)) + self._finished = True + + +@contextlib.contextmanager +def open_spinner(message): + # Interactive spinner goes directly to sys.stdout rather than being routed + # through the logging system, but it acts like it has level INFO, + # i.e. it's only displayed if we're at level INFO or better. + # Non-interactive spinner goes through the logging system, so it is always + # in sync with logging configuration. + if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: + spinner = InteractiveSpinner(message) + else: + spinner = NonInteractiveSpinner(message) + try: + with hidden_cursor(sys.stdout): + yield spinner + except KeyboardInterrupt: + spinner.finish("canceled") + raise + except Exception: + spinner.finish("error") + raise + else: + spinner.finish("done") diff --git a/env/lib/python3.6/site-packages/pip/_internal/vcs/bazaar.py b/env/lib/python3.6/site-packages/pip/_internal/vcs/bazaar.py new file mode 100644 index 0000000..93b7616 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/vcs/bazaar.py @@ -0,0 +1,110 @@ +from __future__ import absolute_import + +import logging +import os + +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.download import path_to_url +from pip._internal.utils.misc import display_path, rmtree +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.vcs import VersionControl, vcs + +logger = logging.getLogger(__name__) + + +class Bazaar(VersionControl): + name = 'bzr' + dirname = '.bzr' + repo_name = 'branch' + schemes = ( + 'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp', + 'bzr+lp', + ) + + def __init__(self, url=None, *args, **kwargs): + super(Bazaar, self).__init__(url, *args, **kwargs) + # This is only needed for python <2.7.5 + # Register lp but do not expose as a scheme to support bzr+lp. + if getattr(urllib_parse, 'uses_fragment', None): + urllib_parse.uses_fragment.extend(['lp']) + + def get_base_rev_args(self, rev): + return ['-r', rev] + + def export(self, location): + """ + Export the Bazaar repository at the url to the destination location + """ + # Remove the location to make sure Bazaar can export it correctly + if os.path.exists(location): + rmtree(location) + + with TempDirectory(kind="export") as temp_dir: + self.unpack(temp_dir.path) + + self.run_command( + ['export', location], + cwd=temp_dir.path, show_stdout=False, + ) + + def fetch_new(self, dest, url, rev_options): + rev_display = rev_options.to_display() + logger.info( + 'Checking out %s%s to %s', + url, + rev_display, + display_path(dest), + ) + cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest] + self.run_command(cmd_args) + + def switch(self, dest, url, rev_options): + self.run_command(['switch', url], cwd=dest) + + def update(self, dest, rev_options): + cmd_args = ['pull', '-q'] + rev_options.to_args() + self.run_command(cmd_args, cwd=dest) + + def get_url_rev(self, url): + # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it + url, rev = super(Bazaar, self).get_url_rev(url) + if url.startswith('ssh://'): + url = 'bzr+' + url + return url, rev + + def get_url(self, location): + urls = self.run_command(['info'], show_stdout=False, cwd=location) + for line in urls.splitlines(): + line = line.strip() + for x in ('checkout of branch: ', + 'parent branch: '): + if line.startswith(x): + repo = line.split(x)[1] + if self._is_local_repository(repo): + return path_to_url(repo) + return repo + return None + + def get_revision(self, location): + revision = self.run_command( + ['revno'], show_stdout=False, cwd=location, + ) + return revision.splitlines()[-1] + + def get_src_requirement(self, dist, location): + repo = self.get_url(location) + if not repo: + return None + if not repo.lower().startswith('bzr:'): + repo = 'bzr+' + repo + egg_project_name = dist.egg_name().split('-', 1)[0] + current_rev = self.get_revision(location) + return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name) + + def is_commit_id_equal(self, dest, name): + """Always assume the versions don't match""" + return False + + +vcs.register(Bazaar) diff --git a/env/lib/python3.6/site-packages/pip/_internal/vcs/git.py b/env/lib/python3.6/site-packages/pip/_internal/vcs/git.py new file mode 100644 index 0000000..9ee2e01 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/vcs/git.py @@ -0,0 +1,309 @@ +from __future__ import absolute_import + +import logging +import os.path +import re + +from pip._vendor.packaging.version import parse as parse_version +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves.urllib import request as urllib_request + +from pip._internal.compat import samefile +from pip._internal.exceptions import BadCommand +from pip._internal.utils.misc import display_path +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.vcs import VersionControl, vcs + +urlsplit = urllib_parse.urlsplit +urlunsplit = urllib_parse.urlunsplit + + +logger = logging.getLogger(__name__) + + +HASH_REGEX = re.compile('[a-fA-F0-9]{40}') + + +def looks_like_hash(sha): + return bool(HASH_REGEX.match(sha)) + + +class Git(VersionControl): + name = 'git' + dirname = '.git' + repo_name = 'clone' + schemes = ( + 'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file', + ) + # Prevent the user's environment variables from interfering with pip: + # https://github.com/pypa/pip/issues/1130 + unset_environ = ('GIT_DIR', 'GIT_WORK_TREE') + default_arg_rev = 'HEAD' + + def __init__(self, url=None, *args, **kwargs): + + # Works around an apparent Git bug + # (see https://article.gmane.org/gmane.comp.version-control.git/146500) + if url: + scheme, netloc, path, query, fragment = urlsplit(url) + if scheme.endswith('file'): + initial_slashes = path[:-len(path.lstrip('/'))] + newpath = ( + initial_slashes + + urllib_request.url2pathname(path) + .replace('\\', '/').lstrip('/') + ) + url = urlunsplit((scheme, netloc, newpath, query, fragment)) + after_plus = scheme.find('+') + 1 + url = scheme[:after_plus] + urlunsplit( + (scheme[after_plus:], netloc, newpath, query, fragment), + ) + + super(Git, self).__init__(url, *args, **kwargs) + + def get_base_rev_args(self, rev): + return [rev] + + def get_git_version(self): + VERSION_PFX = 'git version ' + version = self.run_command(['version'], show_stdout=False) + if version.startswith(VERSION_PFX): + version = version[len(VERSION_PFX):].split()[0] + else: + version = '' + # get first 3 positions of the git version becasue + # on windows it is x.y.z.windows.t, and this parses as + # LegacyVersion which always smaller than a Version. + version = '.'.join(version.split('.')[:3]) + return parse_version(version) + + def export(self, location): + """Export the Git repository at the url to the destination location""" + if not location.endswith('/'): + location = location + '/' + + with TempDirectory(kind="export") as temp_dir: + self.unpack(temp_dir.path) + self.run_command( + ['checkout-index', '-a', '-f', '--prefix', location], + show_stdout=False, cwd=temp_dir.path + ) + + def get_revision_sha(self, dest, rev): + """ + Return a commit hash for the given revision if it names a remote + branch or tag. Otherwise, return None. + + Args: + dest: the repository directory. + rev: the revision name. + """ + # Pass rev to pre-filter the list. + output = self.run_command(['show-ref', rev], cwd=dest, + show_stdout=False, on_returncode='ignore') + refs = {} + for line in output.strip().splitlines(): + try: + sha, ref = line.split() + except ValueError: + # Include the offending line to simplify troubleshooting if + # this error ever occurs. + raise ValueError('unexpected show-ref line: {!r}'.format(line)) + + refs[ref] = sha + + branch_ref = 'refs/remotes/origin/{}'.format(rev) + tag_ref = 'refs/tags/{}'.format(rev) + + return refs.get(branch_ref) or refs.get(tag_ref) + + def check_rev_options(self, dest, rev_options): + """Check the revision options before checkout. + + Returns a new RevOptions object for the SHA1 of the branch or tag + if found. + + Args: + rev_options: a RevOptions object. + """ + rev = rev_options.arg_rev + sha = self.get_revision_sha(dest, rev) + + if sha is not None: + return rev_options.make_new(sha) + + # Do not show a warning for the common case of something that has + # the form of a Git commit hash. + if not looks_like_hash(rev): + logger.warning( + "Did not find branch or tag '%s', assuming revision or ref.", + rev, + ) + return rev_options + + def is_commit_id_equal(self, dest, name): + """ + Return whether the current commit hash equals the given name. + + Args: + dest: the repository directory. + name: a string name. + """ + if not name: + # Then avoid an unnecessary subprocess call. + return False + + return self.get_revision(dest) == name + + def fetch_new(self, dest, url, rev_options): + rev_display = rev_options.to_display() + logger.info( + 'Cloning %s%s to %s', url, rev_display, display_path(dest), + ) + self.run_command(['clone', '-q', url, dest]) + + if rev_options.rev: + # Then a specific revision was requested. + rev_options = self.check_rev_options(dest, rev_options) + # Only do a checkout if the current commit id doesn't match + # the requested revision. + if not self.is_commit_id_equal(dest, rev_options.rev): + rev = rev_options.rev + # Only fetch the revision if it's a ref + if rev.startswith('refs/'): + self.run_command( + ['fetch', '-q', url] + rev_options.to_args(), + cwd=dest, + ) + # Change the revision to the SHA of the ref we fetched + rev = 'FETCH_HEAD' + self.run_command(['checkout', '-q', rev], cwd=dest) + + #: repo may contain submodules + self.update_submodules(dest) + + def switch(self, dest, url, rev_options): + self.run_command(['config', 'remote.origin.url', url], cwd=dest) + cmd_args = ['checkout', '-q'] + rev_options.to_args() + self.run_command(cmd_args, cwd=dest) + + self.update_submodules(dest) + + def update(self, dest, rev_options): + # First fetch changes from the default remote + if self.get_git_version() >= parse_version('1.9.0'): + # fetch tags in addition to everything else + self.run_command(['fetch', '-q', '--tags'], cwd=dest) + else: + self.run_command(['fetch', '-q'], cwd=dest) + # Then reset to wanted revision (maybe even origin/master) + rev_options = self.check_rev_options(dest, rev_options) + cmd_args = ['reset', '--hard', '-q'] + rev_options.to_args() + self.run_command(cmd_args, cwd=dest) + #: update submodules + self.update_submodules(dest) + + def get_url(self, location): + """Return URL of the first remote encountered.""" + remotes = self.run_command( + ['config', '--get-regexp', r'remote\..*\.url'], + show_stdout=False, cwd=location, + ) + remotes = remotes.splitlines() + found_remote = remotes[0] + for remote in remotes: + if remote.startswith('remote.origin.url '): + found_remote = remote + break + url = found_remote.split(' ')[1] + return url.strip() + + def get_revision(self, location): + current_rev = self.run_command( + ['rev-parse', 'HEAD'], show_stdout=False, cwd=location, + ) + return current_rev.strip() + + def _get_subdirectory(self, location): + """Return the relative path of setup.py to the git repo root.""" + # find the repo root + git_dir = self.run_command(['rev-parse', '--git-dir'], + show_stdout=False, cwd=location).strip() + if not os.path.isabs(git_dir): + git_dir = os.path.join(location, git_dir) + root_dir = os.path.join(git_dir, '..') + # find setup.py + orig_location = location + while not os.path.exists(os.path.join(location, 'setup.py')): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding setup.py + logger.warning( + "Could not find setup.py for directory %s (tried all " + "parent directories)", + orig_location, + ) + return None + # relative path of setup.py to repo root + if samefile(root_dir, location): + return None + return os.path.relpath(location, root_dir) + + def get_src_requirement(self, dist, location): + repo = self.get_url(location) + if not repo.lower().startswith('git:'): + repo = 'git+' + repo + egg_project_name = dist.egg_name().split('-', 1)[0] + if not repo: + return None + current_rev = self.get_revision(location) + req = '%s@%s#egg=%s' % (repo, current_rev, egg_project_name) + subdirectory = self._get_subdirectory(location) + if subdirectory: + req += '&subdirectory=' + subdirectory + return req + + def get_url_rev(self, url): + """ + Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. + That's required because although they use SSH they sometimes don't + work with a ssh:// scheme (e.g. GitHub). But we need a scheme for + parsing. Hence we remove it again afterwards and return it as a stub. + """ + if '://' not in url: + assert 'file:' not in url + url = url.replace('git+', 'git+ssh://') + url, rev = super(Git, self).get_url_rev(url) + url = url.replace('ssh://', '') + else: + url, rev = super(Git, self).get_url_rev(url) + + return url, rev + + def update_submodules(self, location): + if not os.path.exists(os.path.join(location, '.gitmodules')): + return + self.run_command( + ['submodule', 'update', '--init', '--recursive', '-q'], + cwd=location, + ) + + @classmethod + def controls_location(cls, location): + if super(Git, cls).controls_location(location): + return True + try: + r = cls().run_command(['rev-parse'], + cwd=location, + show_stdout=False, + on_returncode='ignore') + return not r + except BadCommand: + logger.debug("could not determine if %s is under git control " + "because git is not available", location) + return False + + +vcs.register(Git) diff --git a/env/lib/python3.6/site-packages/pip/_internal/vcs/mercurial.py b/env/lib/python3.6/site-packages/pip/_internal/vcs/mercurial.py new file mode 100644 index 0000000..2d0750c --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/vcs/mercurial.py @@ -0,0 +1,102 @@ +from __future__ import absolute_import + +import logging +import os + +from pip._vendor.six.moves import configparser + +from pip._internal.download import path_to_url +from pip._internal.utils.misc import display_path +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.vcs import VersionControl, vcs + +logger = logging.getLogger(__name__) + + +class Mercurial(VersionControl): + name = 'hg' + dirname = '.hg' + repo_name = 'clone' + schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http') + + def get_base_rev_args(self, rev): + return [rev] + + def export(self, location): + """Export the Hg repository at the url to the destination location""" + with TempDirectory(kind="export") as temp_dir: + self.unpack(temp_dir.path) + + self.run_command( + ['archive', location], show_stdout=False, cwd=temp_dir.path + ) + + def fetch_new(self, dest, url, rev_options): + rev_display = rev_options.to_display() + logger.info( + 'Cloning hg %s%s to %s', + url, + rev_display, + display_path(dest), + ) + self.run_command(['clone', '--noupdate', '-q', url, dest]) + cmd_args = ['update', '-q'] + rev_options.to_args() + self.run_command(cmd_args, cwd=dest) + + def switch(self, dest, url, rev_options): + repo_config = os.path.join(dest, self.dirname, 'hgrc') + config = configparser.SafeConfigParser() + try: + config.read(repo_config) + config.set('paths', 'default', url) + with open(repo_config, 'w') as config_file: + config.write(config_file) + except (OSError, configparser.NoSectionError) as exc: + logger.warning( + 'Could not switch Mercurial repository to %s: %s', url, exc, + ) + else: + cmd_args = ['update', '-q'] + rev_options.to_args() + self.run_command(cmd_args, cwd=dest) + + def update(self, dest, rev_options): + self.run_command(['pull', '-q'], cwd=dest) + cmd_args = ['update', '-q'] + rev_options.to_args() + self.run_command(cmd_args, cwd=dest) + + def get_url(self, location): + url = self.run_command( + ['showconfig', 'paths.default'], + show_stdout=False, cwd=location).strip() + if self._is_local_repository(url): + url = path_to_url(url) + return url.strip() + + def get_revision(self, location): + current_revision = self.run_command( + ['parents', '--template={rev}'], + show_stdout=False, cwd=location).strip() + return current_revision + + def get_revision_hash(self, location): + current_rev_hash = self.run_command( + ['parents', '--template={node}'], + show_stdout=False, cwd=location).strip() + return current_rev_hash + + def get_src_requirement(self, dist, location): + repo = self.get_url(location) + if not repo.lower().startswith('hg:'): + repo = 'hg+' + repo + egg_project_name = dist.egg_name().split('-', 1)[0] + if not repo: + return None + current_rev_hash = self.get_revision_hash(location) + return '%s@%s#egg=%s' % (repo, current_rev_hash, egg_project_name) + + def is_commit_id_equal(self, dest, name): + """Always assume the versions don't match""" + return False + + +vcs.register(Mercurial) diff --git a/env/lib/python3.6/site-packages/pip/_internal/vcs/subversion.py b/env/lib/python3.6/site-packages/pip/_internal/vcs/subversion.py new file mode 100644 index 0000000..7078775 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/vcs/subversion.py @@ -0,0 +1,254 @@ +from __future__ import absolute_import + +import logging +import os +import re + +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.index import Link +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import display_path, remove_auth_from_url, rmtree +from pip._internal.vcs import VersionControl, vcs + +_svn_xml_url_re = re.compile('url="([^"]+)"') +_svn_rev_re = re.compile(r'committed-rev="(\d+)"') +_svn_url_re = re.compile(r'URL: (.+)') +_svn_revision_re = re.compile(r'Revision: (.+)') +_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"') +_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>') + + +logger = logging.getLogger(__name__) + + +class Subversion(VersionControl): + name = 'svn' + dirname = '.svn' + repo_name = 'checkout' + schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn') + + def get_base_rev_args(self, rev): + return ['-r', rev] + + def get_info(self, location): + """Returns (url, revision), where both are strings""" + assert not location.rstrip('/').endswith(self.dirname), \ + 'Bad directory: %s' % location + output = self.run_command( + ['info', location], + show_stdout=False, + extra_environ={'LANG': 'C'}, + ) + match = _svn_url_re.search(output) + if not match: + logger.warning( + 'Cannot determine URL of svn checkout %s', + display_path(location), + ) + logger.debug('Output that cannot be parsed: \n%s', output) + return None, None + url = match.group(1).strip() + match = _svn_revision_re.search(output) + if not match: + logger.warning( + 'Cannot determine revision of svn checkout %s', + display_path(location), + ) + logger.debug('Output that cannot be parsed: \n%s', output) + return url, None + return url, match.group(1) + + def export(self, location): + """Export the svn repository at the url to the destination location""" + url, rev_options = self.get_url_rev_options(self.url) + + logger.info('Exporting svn repository %s to %s', url, location) + with indent_log(): + if os.path.exists(location): + # Subversion doesn't like to check out over an existing + # directory --force fixes this, but was only added in svn 1.5 + rmtree(location) + cmd_args = ['export'] + rev_options.to_args() + [url, location] + self.run_command(cmd_args, show_stdout=False) + + def fetch_new(self, dest, url, rev_options): + rev_display = rev_options.to_display() + logger.info( + 'Checking out %s%s to %s', + url, + rev_display, + display_path(dest), + ) + cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest] + self.run_command(cmd_args) + + def switch(self, dest, url, rev_options): + cmd_args = ['switch'] + rev_options.to_args() + [url, dest] + self.run_command(cmd_args) + + def update(self, dest, rev_options): + cmd_args = ['update'] + rev_options.to_args() + [dest] + self.run_command(cmd_args) + + def get_location(self, dist, dependency_links): + for url in dependency_links: + egg_fragment = Link(url).egg_fragment + if not egg_fragment: + continue + if '-' in egg_fragment: + # FIXME: will this work when a package has - in the name? + key = '-'.join(egg_fragment.split('-')[:-1]).lower() + else: + key = egg_fragment + if key == dist.key: + return url.split('#', 1)[0] + return None + + def get_revision(self, location): + """ + Return the maximum revision for all files under a given location + """ + # Note: taken from setuptools.command.egg_info + revision = 0 + + for base, dirs, files in os.walk(location): + if self.dirname not in dirs: + dirs[:] = [] + continue # no sense walking uncontrolled subdirs + dirs.remove(self.dirname) + entries_fn = os.path.join(base, self.dirname, 'entries') + if not os.path.exists(entries_fn): + # FIXME: should we warn? + continue + + dirurl, localrev = self._get_svn_url_rev(base) + + if base == location: + base = dirurl + '/' # save the root url + elif not dirurl or not dirurl.startswith(base): + dirs[:] = [] + continue # not part of the same svn tree, skip it + revision = max(revision, localrev) + return revision + + def get_url_rev(self, url): + # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it + url, rev = super(Subversion, self).get_url_rev(url) + if url.startswith('ssh://'): + url = 'svn+' + url + return url, rev + + def get_url_rev_args(self, url): + extra_args = get_rev_options_args(url) + url = remove_auth_from_url(url) + + return url, extra_args + + def get_url(self, location): + # In cases where the source is in a subdirectory, not alongside + # setup.py we have to look up in the location until we find a real + # setup.py + orig_location = location + while not os.path.exists(os.path.join(location, 'setup.py')): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding setup.py + logger.warning( + "Could not find setup.py for directory %s (tried all " + "parent directories)", + orig_location, + ) + return None + + return self._get_svn_url_rev(location)[0] + + def _get_svn_url_rev(self, location): + from pip._internal.exceptions import InstallationError + + entries_path = os.path.join(location, self.dirname, 'entries') + if os.path.exists(entries_path): + with open(entries_path) as f: + data = f.read() + else: # subversion >= 1.7 does not have the 'entries' file + data = '' + + if (data.startswith('8') or + data.startswith('9') or + data.startswith('10')): + data = list(map(str.splitlines, data.split('\n\x0c\n'))) + del data[0][0] # get rid of the '8' + url = data[0][3] + revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0] + elif data.startswith('<?xml'): + match = _svn_xml_url_re.search(data) + if not match: + raise ValueError('Badly formatted data: %r' % data) + url = match.group(1) # get repository URL + revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0] + else: + try: + # subversion >= 1.7 + xml = self.run_command( + ['info', '--xml', location], + show_stdout=False, + ) + url = _svn_info_xml_url_re.search(xml).group(1) + revs = [ + int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml) + ] + except InstallationError: + url, revs = None, [] + + if revs: + rev = max(revs) + else: + rev = 0 + + return url, rev + + def get_src_requirement(self, dist, location): + repo = self.get_url(location) + if repo is None: + return None + # FIXME: why not project name? + egg_project_name = dist.egg_name().split('-', 1)[0] + rev = self.get_revision(location) + return 'svn+%s@%s#egg=%s' % (repo, rev, egg_project_name) + + def is_commit_id_equal(self, dest, name): + """Always assume the versions don't match""" + return False + + +def get_rev_options_args(url): + """ + Return the extra arguments to pass to RevOptions. + """ + r = urllib_parse.urlsplit(url) + if hasattr(r, 'username'): + # >= Python-2.5 + username, password = r.username, r.password + else: + netloc = r[1] + if '@' in netloc: + auth = netloc.split('@')[0] + if ':' in auth: + username, password = auth.split(':', 1) + else: + username, password = auth, None + else: + username, password = None, None + + extra_args = [] + if username: + extra_args += ['--username', username] + if password: + extra_args += ['--password', password] + + return extra_args + + +vcs.register(Subversion) diff --git a/env/lib/python3.6/site-packages/pip/_internal/wheel.py b/env/lib/python3.6/site-packages/pip/_internal/wheel.py new file mode 100644 index 0000000..fcf9d3d --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_internal/wheel.py @@ -0,0 +1,827 @@ +""" +Support for installing and building the "wheel" binary package format. +""" +from __future__ import absolute_import + +import collections +import compileall +import csv +import hashlib +import logging +import os.path +import re +import shutil +import stat +import sys +import warnings +from base64 import urlsafe_b64encode +from email.parser import Parser + +from pip._vendor import pkg_resources +from pip._vendor.distlib.scripts import ScriptMaker +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.six import StringIO + +from pip._internal import pep425tags +from pip._internal.download import path_to_url, unpack_url +from pip._internal.exceptions import ( + InstallationError, InvalidWheelFilename, UnsupportedWheel, +) +from pip._internal.locations import ( + PIP_DELETE_MARKER_FILENAME, distutils_scheme, +) +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ( + call_subprocess, captured_stdout, ensure_dir, read_chunks, +) +from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.ui import open_spinner + +if MYPY_CHECK_RUNNING: + from typing import Dict, List, Optional # noqa: F401 + +wheel_ext = '.whl' + +VERSION_COMPATIBLE = (1, 0) + + +logger = logging.getLogger(__name__) + + +def rehash(path, blocksize=1 << 20): + """Return (hash, length) for path using hashlib.sha256()""" + h = hashlib.sha256() + length = 0 + with open(path, 'rb') as f: + for block in read_chunks(f, size=blocksize): + length += len(block) + h.update(block) + digest = 'sha256=' + urlsafe_b64encode( + h.digest() + ).decode('latin1').rstrip('=') + return (digest, length) + + +def open_for_csv(name, mode): + if sys.version_info[0] < 3: + nl = {} + bin = 'b' + else: + nl = {'newline': ''} + bin = '' + return open(name, mode + bin, **nl) + + +def fix_script(path): + """Replace #!python with #!/path/to/python + Return True if file was changed.""" + # XXX RECORD hashes will need to be updated + if os.path.isfile(path): + with open(path, 'rb') as script: + firstline = script.readline() + if not firstline.startswith(b'#!python'): + return False + exename = sys.executable.encode(sys.getfilesystemencoding()) + firstline = b'#!' + exename + os.linesep.encode("ascii") + rest = script.read() + with open(path, 'wb') as script: + script.write(firstline) + script.write(rest) + return True + + +dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>.+?))?) + \.dist-info$""", re.VERBOSE) + + +def root_is_purelib(name, wheeldir): + """ + Return True if the extracted wheel in wheeldir should go into purelib. + """ + name_folded = name.replace("-", "_") + for item in os.listdir(wheeldir): + match = dist_info_re.match(item) + if match and match.group('name') == name_folded: + with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel: + for line in wheel: + line = line.lower().rstrip() + if line == "root-is-purelib: true": + return True + return False + + +def get_entrypoints(filename): + if not os.path.exists(filename): + return {}, {} + + # This is done because you can pass a string to entry_points wrappers which + # means that they may or may not be valid INI files. The attempt here is to + # strip leading and trailing whitespace in order to make them valid INI + # files. + with open(filename) as fp: + data = StringIO() + for line in fp: + data.write(line.strip()) + data.write("\n") + data.seek(0) + + # get the entry points and then the script names + entry_points = pkg_resources.EntryPoint.parse_map(data) + console = entry_points.get('console_scripts', {}) + gui = entry_points.get('gui_scripts', {}) + + def _split_ep(s): + """get the string representation of EntryPoint, remove space and split + on '='""" + return str(s).replace(" ", "").split("=") + + # convert the EntryPoint objects into strings with module:function + console = dict(_split_ep(v) for v in console.values()) + gui = dict(_split_ep(v) for v in gui.values()) + return console, gui + + +def message_about_scripts_not_on_PATH(scripts): + # type: (List[str]) -> Optional[str] + """Determine if any scripts are not on PATH and format a warning. + + Returns a warning message if one or more scripts are not on PATH, + otherwise None. + """ + if not scripts: + return None + + # Group scripts by the path they were installed in + grouped_by_dir = collections.defaultdict(set) # type: Dict[str, set] + for destfile in scripts: + parent_dir = os.path.dirname(destfile) + script_name = os.path.basename(destfile) + grouped_by_dir[parent_dir].add(script_name) + + # We don't want to warn for directories that are on PATH. + not_warn_dirs = [ + os.path.normcase(i).rstrip(os.sep) for i in + os.environ.get("PATH", "").split(os.pathsep) + ] + # If an executable sits with sys.executable, we don't warn for it. + # This covers the case of venv invocations without activating the venv. + not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) + warn_for = { + parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items() + if os.path.normcase(parent_dir) not in not_warn_dirs + } + if not warn_for: + return None + + # Format a message + msg_lines = [] + for parent_dir, scripts in warn_for.items(): + scripts = sorted(scripts) + if len(scripts) == 1: + start_text = "script {} is".format(scripts[0]) + else: + start_text = "scripts {} are".format( + ", ".join(scripts[:-1]) + " and " + scripts[-1] + ) + + msg_lines.append( + "The {} installed in '{}' which is not on PATH." + .format(start_text, parent_dir) + ) + + last_line_fmt = ( + "Consider adding {} to PATH or, if you prefer " + "to suppress this warning, use --no-warn-script-location." + ) + if len(msg_lines) == 1: + msg_lines.append(last_line_fmt.format("this directory")) + else: + msg_lines.append(last_line_fmt.format("these directories")) + + # Returns the formatted multiline message + return "\n".join(msg_lines) + + +def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None, + pycompile=True, scheme=None, isolated=False, prefix=None, + warn_script_location=True): + """Install a wheel""" + + if not scheme: + scheme = distutils_scheme( + name, user=user, home=home, root=root, isolated=isolated, + prefix=prefix, + ) + + if root_is_purelib(name, wheeldir): + lib_dir = scheme['purelib'] + else: + lib_dir = scheme['platlib'] + + info_dir = [] + data_dirs = [] + source = wheeldir.rstrip(os.path.sep) + os.path.sep + + # Record details of the files moved + # installed = files copied from the wheel to the destination + # changed = files changed while installing (scripts #! line typically) + # generated = files newly generated during the install (script wrappers) + installed = {} + changed = set() + generated = [] + + # Compile all of the pyc files that we're going to be installing + if pycompile: + with captured_stdout() as stdout: + with warnings.catch_warnings(): + warnings.filterwarnings('ignore') + compileall.compile_dir(source, force=True, quiet=True) + logger.debug(stdout.getvalue()) + + def normpath(src, p): + return os.path.relpath(src, p).replace(os.path.sep, '/') + + def record_installed(srcfile, destfile, modified=False): + """Map archive RECORD paths to installation RECORD paths.""" + oldpath = normpath(srcfile, wheeldir) + newpath = normpath(destfile, lib_dir) + installed[oldpath] = newpath + if modified: + changed.add(destfile) + + def clobber(source, dest, is_base, fixer=None, filter=None): + ensure_dir(dest) # common for the 'include' path + + for dir, subdirs, files in os.walk(source): + basedir = dir[len(source):].lstrip(os.path.sep) + destdir = os.path.join(dest, basedir) + if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): + continue + for s in subdirs: + destsubdir = os.path.join(dest, basedir, s) + if is_base and basedir == '' and destsubdir.endswith('.data'): + data_dirs.append(s) + continue + elif (is_base and + s.endswith('.dist-info') and + canonicalize_name(s).startswith( + canonicalize_name(req.name))): + assert not info_dir, ('Multiple .dist-info directories: ' + + destsubdir + ', ' + + ', '.join(info_dir)) + info_dir.append(destsubdir) + for f in files: + # Skip unwanted files + if filter and filter(f): + continue + srcfile = os.path.join(dir, f) + destfile = os.path.join(dest, basedir, f) + # directory creation is lazy and after the file filtering above + # to ensure we don't install empty dirs; empty dirs can't be + # uninstalled. + ensure_dir(destdir) + + # copyfile (called below) truncates the destination if it + # exists and then writes the new contents. This is fine in most + # cases, but can cause a segfault if pip has loaded a shared + # object (e.g. from pyopenssl through its vendored urllib3) + # Since the shared object is mmap'd an attempt to call a + # symbol in it will then cause a segfault. Unlinking the file + # allows writing of new contents while allowing the process to + # continue to use the old copy. + if os.path.exists(destfile): + os.unlink(destfile) + + # We use copyfile (not move, copy, or copy2) to be extra sure + # that we are not moving directories over (copyfile fails for + # directories) as well as to ensure that we are not copying + # over any metadata because we want more control over what + # metadata we actually copy over. + shutil.copyfile(srcfile, destfile) + + # Copy over the metadata for the file, currently this only + # includes the atime and mtime. + st = os.stat(srcfile) + if hasattr(os, "utime"): + os.utime(destfile, (st.st_atime, st.st_mtime)) + + # If our file is executable, then make our destination file + # executable. + if os.access(srcfile, os.X_OK): + st = os.stat(srcfile) + permissions = ( + st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH + ) + os.chmod(destfile, permissions) + + changed = False + if fixer: + changed = fixer(destfile) + record_installed(srcfile, destfile, changed) + + clobber(source, lib_dir, True) + + assert info_dir, "%s .dist-info directory not found" % req + + # Get the defined entry points + ep_file = os.path.join(info_dir[0], 'entry_points.txt') + console, gui = get_entrypoints(ep_file) + + def is_entrypoint_wrapper(name): + # EP, EP.exe and EP-script.py are scripts generated for + # entry point EP by setuptools + if name.lower().endswith('.exe'): + matchname = name[:-4] + elif name.lower().endswith('-script.py'): + matchname = name[:-10] + elif name.lower().endswith(".pya"): + matchname = name[:-4] + else: + matchname = name + # Ignore setuptools-generated scripts + return (matchname in console or matchname in gui) + + for datadir in data_dirs: + fixer = None + filter = None + for subdir in os.listdir(os.path.join(wheeldir, datadir)): + fixer = None + if subdir == 'scripts': + fixer = fix_script + filter = is_entrypoint_wrapper + source = os.path.join(wheeldir, datadir, subdir) + dest = scheme[subdir] + clobber(source, dest, False, fixer=fixer, filter=filter) + + maker = ScriptMaker(None, scheme['scripts']) + + # Ensure old scripts are overwritten. + # See https://github.com/pypa/pip/issues/1800 + maker.clobber = True + + # Ensure we don't generate any variants for scripts because this is almost + # never what somebody wants. + # See https://bitbucket.org/pypa/distlib/issue/35/ + maker.variants = {''} + + # This is required because otherwise distlib creates scripts that are not + # executable. + # See https://bitbucket.org/pypa/distlib/issue/32/ + maker.set_mode = True + + # Simplify the script and fix the fact that the default script swallows + # every single stack trace. + # See https://bitbucket.org/pypa/distlib/issue/34/ + # See https://bitbucket.org/pypa/distlib/issue/33/ + def _get_script_text(entry): + if entry.suffix is None: + raise InstallationError( + "Invalid script entry point: %s for req: %s - A callable " + "suffix is required. Cf https://packaging.python.org/en/" + "latest/distributing.html#console-scripts for more " + "information." % (entry, req) + ) + return maker.script_template % { + "module": entry.prefix, + "import_name": entry.suffix.split(".")[0], + "func": entry.suffix, + } + + maker._get_script_text = _get_script_text + maker.script_template = r"""# -*- coding: utf-8 -*- +import re +import sys + +from %(module)s import %(import_name)s + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(%(func)s()) +""" + + # Special case pip and setuptools to generate versioned wrappers + # + # The issue is that some projects (specifically, pip and setuptools) use + # code in setup.py to create "versioned" entry points - pip2.7 on Python + # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into + # the wheel metadata at build time, and so if the wheel is installed with + # a *different* version of Python the entry points will be wrong. The + # correct fix for this is to enhance the metadata to be able to describe + # such versioned entry points, but that won't happen till Metadata 2.0 is + # available. + # In the meantime, projects using versioned entry points will either have + # incorrect versioned entry points, or they will not be able to distribute + # "universal" wheels (i.e., they will need a wheel per Python version). + # + # Because setuptools and pip are bundled with _ensurepip and virtualenv, + # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we + # override the versioned entry points in the wheel and generate the + # correct ones. This code is purely a short-term measure until Metadata 2.0 + # is available. + # + # To add the level of hack in this section of code, in order to support + # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment + # variable which will control which version scripts get installed. + # + # ENSUREPIP_OPTIONS=altinstall + # - Only pipX.Y and easy_install-X.Y will be generated and installed + # ENSUREPIP_OPTIONS=install + # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note + # that this option is technically if ENSUREPIP_OPTIONS is set and is + # not altinstall + # DEFAULT + # - The default behavior is to install pip, pipX, pipX.Y, easy_install + # and easy_install-X.Y. + pip_script = console.pop('pip', None) + if pip_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + spec = 'pip = ' + pip_script + generated.extend(maker.make(spec)) + + if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": + spec = 'pip%s = %s' % (sys.version[:1], pip_script) + generated.extend(maker.make(spec)) + + spec = 'pip%s = %s' % (sys.version[:3], pip_script) + generated.extend(maker.make(spec)) + # Delete any other versioned pip entry points + pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] + for k in pip_ep: + del console[k] + easy_install_script = console.pop('easy_install', None) + if easy_install_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + spec = 'easy_install = ' + easy_install_script + generated.extend(maker.make(spec)) + + spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script) + generated.extend(maker.make(spec)) + # Delete any other versioned easy_install entry points + easy_install_ep = [ + k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) + ] + for k in easy_install_ep: + del console[k] + + # Generate the console and GUI entry points specified in the wheel + if len(console) > 0: + generated_console_scripts = maker.make_multiple( + ['%s = %s' % kv for kv in console.items()] + ) + generated.extend(generated_console_scripts) + + if warn_script_location: + msg = message_about_scripts_not_on_PATH(generated_console_scripts) + if msg is not None: + logger.warn(msg) + + if len(gui) > 0: + generated.extend( + maker.make_multiple( + ['%s = %s' % kv for kv in gui.items()], + {'gui': True} + ) + ) + + # Record pip as the installer + installer = os.path.join(info_dir[0], 'INSTALLER') + temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip') + with open(temp_installer, 'wb') as installer_file: + installer_file.write(b'pip\n') + shutil.move(temp_installer, installer) + generated.append(installer) + + # Record details of all files installed + record = os.path.join(info_dir[0], 'RECORD') + temp_record = os.path.join(info_dir[0], 'RECORD.pip') + with open_for_csv(record, 'r') as record_in: + with open_for_csv(temp_record, 'w+') as record_out: + reader = csv.reader(record_in) + writer = csv.writer(record_out) + for row in reader: + row[0] = installed.pop(row[0], row[0]) + if row[0] in changed: + row[1], row[2] = rehash(row[0]) + writer.writerow(row) + for f in generated: + digest, length = rehash(f) + writer.writerow((normpath(f, lib_dir), digest, length)) + for f in installed: + writer.writerow((installed[f], '', '')) + shutil.move(temp_record, record) + + +def wheel_version(source_dir): + """ + Return the Wheel-Version of an extracted wheel, if possible. + + Otherwise, return False if we couldn't parse / extract it. + """ + try: + dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0] + + wheel_data = dist.get_metadata('WHEEL') + wheel_data = Parser().parsestr(wheel_data) + + version = wheel_data['Wheel-Version'].strip() + version = tuple(map(int, version.split('.'))) + return version + except Exception: + return False + + +def check_compatibility(version, name): + """ + Raises errors or warns if called with an incompatible Wheel-Version. + + Pip should refuse to install a Wheel-Version that's a major series + ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when + installing a version only minor version ahead (e.g 1.2 > 1.1). + + version: a 2-tuple representing a Wheel-Version (Major, Minor) + name: name of wheel or package to raise exception about + + :raises UnsupportedWheel: when an incompatible Wheel-Version is given + """ + if not version: + raise UnsupportedWheel( + "%s is in an unsupported or invalid wheel" % name + ) + if version[0] > VERSION_COMPATIBLE[0]: + raise UnsupportedWheel( + "%s's Wheel-Version (%s) is not compatible with this version " + "of pip" % (name, '.'.join(map(str, version))) + ) + elif version > VERSION_COMPATIBLE: + logger.warning( + 'Installing from a newer Wheel-Version (%s)', + '.'.join(map(str, version)), + ) + + +class Wheel(object): + """A wheel file""" + + # TODO: maybe move the install code into this class + + wheel_file_re = re.compile( + r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?)) + ((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?) + \.whl|\.dist-info)$""", + re.VERBOSE + ) + + def __init__(self, filename): + """ + :raises InvalidWheelFilename: when the filename is invalid for a wheel + """ + wheel_info = self.wheel_file_re.match(filename) + if not wheel_info: + raise InvalidWheelFilename( + "%s is not a valid wheel filename." % filename + ) + self.filename = filename + self.name = wheel_info.group('name').replace('_', '-') + # we'll assume "_" means "-" due to wheel naming scheme + # (https://github.com/pypa/pip/issues/1150) + self.version = wheel_info.group('ver').replace('_', '-') + self.build_tag = wheel_info.group('build') + self.pyversions = wheel_info.group('pyver').split('.') + self.abis = wheel_info.group('abi').split('.') + self.plats = wheel_info.group('plat').split('.') + + # All the tag combinations from this file + self.file_tags = { + (x, y, z) for x in self.pyversions + for y in self.abis for z in self.plats + } + + def support_index_min(self, tags=None): + """ + Return the lowest index that one of the wheel's file_tag combinations + achieves in the supported_tags list e.g. if there are 8 supported tags, + and one of the file tags is first in the list, then return 0. Returns + None is the wheel is not supported. + """ + if tags is None: # for mock + tags = pep425tags.get_supported() + indexes = [tags.index(c) for c in self.file_tags if c in tags] + return min(indexes) if indexes else None + + def supported(self, tags=None): + """Is this wheel supported on this system?""" + if tags is None: # for mock + tags = pep425tags.get_supported() + return bool(set(tags).intersection(self.file_tags)) + + +class WheelBuilder(object): + """Build wheels from a RequirementSet.""" + + def __init__(self, finder, preparer, wheel_cache, + build_options=None, global_options=None, no_clean=False): + self.finder = finder + self.preparer = preparer + self.wheel_cache = wheel_cache + + self._wheel_dir = preparer.wheel_download_dir + + self.build_options = build_options or [] + self.global_options = global_options or [] + self.no_clean = no_clean + + def _build_one(self, req, output_dir, python_tag=None): + """Build one wheel. + + :return: The filename of the built wheel, or None if the build failed. + """ + # Install build deps into temporary directory (PEP 518) + with req.build_env: + return self._build_one_inside_env(req, output_dir, + python_tag=python_tag) + + def _build_one_inside_env(self, req, output_dir, python_tag=None): + with TempDirectory(kind="wheel") as temp_dir: + if self.__build_one(req, temp_dir.path, python_tag=python_tag): + try: + wheel_name = os.listdir(temp_dir.path)[0] + wheel_path = os.path.join(output_dir, wheel_name) + shutil.move( + os.path.join(temp_dir.path, wheel_name), wheel_path + ) + logger.info('Stored in directory: %s', output_dir) + return wheel_path + except Exception: + pass + # Ignore return, we can't do anything else useful. + self._clean_one(req) + return None + + def _base_setup_args(self, req): + # NOTE: Eventually, we'd want to also -S to the flags here, when we're + # isolating. Currently, it breaks Python in virtualenvs, because it + # relies on site.py to find parts of the standard library outside the + # virtualenv. + return [ + sys.executable, '-u', '-c', + SETUPTOOLS_SHIM % req.setup_py + ] + list(self.global_options) + + def __build_one(self, req, tempd, python_tag=None): + base_args = self._base_setup_args(req) + + spin_message = 'Running setup.py bdist_wheel for %s' % (req.name,) + with open_spinner(spin_message) as spinner: + logger.debug('Destination directory: %s', tempd) + wheel_args = base_args + ['bdist_wheel', '-d', tempd] \ + + self.build_options + + if python_tag is not None: + wheel_args += ["--python-tag", python_tag] + + try: + call_subprocess(wheel_args, cwd=req.setup_py_dir, + show_stdout=False, spinner=spinner) + return True + except Exception: + spinner.finish("error") + logger.error('Failed building wheel for %s', req.name) + return False + + def _clean_one(self, req): + base_args = self._base_setup_args(req) + + logger.info('Running setup.py clean for %s', req.name) + clean_args = base_args + ['clean', '--all'] + try: + call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False) + return True + except Exception: + logger.error('Failed cleaning build dir for %s', req.name) + return False + + def build(self, requirements, session, autobuilding=False): + """Build wheels. + + :param unpack: If True, replace the sdist we built from with the + newly built wheel, in preparation for installation. + :return: True if all the wheels built correctly. + """ + from pip._internal import index + + building_is_possible = self._wheel_dir or ( + autobuilding and self.wheel_cache.cache_dir + ) + assert building_is_possible + + buildset = [] + for req in requirements: + if req.constraint: + continue + if req.is_wheel: + if not autobuilding: + logger.info( + 'Skipping %s, due to already being wheel.', req.name, + ) + elif autobuilding and req.editable: + pass + elif autobuilding and not req.source_dir: + pass + elif autobuilding and req.link and not req.link.is_artifact: + # VCS checkout. Build wheel just for this run. + buildset.append((req, True)) + else: + ephem_cache = False + if autobuilding: + link = req.link + base, ext = link.splitext() + if index.egg_info_matches(base, None, link) is None: + # E.g. local directory. Build wheel just for this run. + ephem_cache = True + if "binary" not in index.fmt_ctl_formats( + self.finder.format_control, + canonicalize_name(req.name)): + logger.info( + "Skipping bdist_wheel for %s, due to binaries " + "being disabled for it.", req.name, + ) + continue + buildset.append((req, ephem_cache)) + + if not buildset: + return True + + # Build the wheels. + logger.info( + 'Building wheels for collected packages: %s', + ', '.join([req.name for (req, _) in buildset]), + ) + _cache = self.wheel_cache # shorter name + with indent_log(): + build_success, build_failure = [], [] + for req, ephem in buildset: + python_tag = None + if autobuilding: + python_tag = pep425tags.implementation_tag + if ephem: + output_dir = _cache.get_ephem_path_for_link(req.link) + else: + output_dir = _cache.get_path_for_link(req.link) + try: + ensure_dir(output_dir) + except OSError as e: + logger.warning("Building wheel for %s failed: %s", + req.name, e) + build_failure.append(req) + continue + else: + output_dir = self._wheel_dir + wheel_file = self._build_one( + req, output_dir, + python_tag=python_tag, + ) + if wheel_file: + build_success.append(req) + if autobuilding: + # XXX: This is mildly duplicative with prepare_files, + # but not close enough to pull out to a single common + # method. + # The code below assumes temporary source dirs - + # prevent it doing bad things. + if req.source_dir and not os.path.exists(os.path.join( + req.source_dir, PIP_DELETE_MARKER_FILENAME)): + raise AssertionError( + "bad source dir - missing marker") + # Delete the source we built the wheel from + req.remove_temporary_source() + # set the build directory again - name is known from + # the work prepare_files did. + req.source_dir = req.build_location( + self.preparer.build_dir + ) + # Update the link for this. + req.link = index.Link(path_to_url(wheel_file)) + assert req.link.is_wheel + # extract the wheel into the dir + unpack_url( + req.link, req.source_dir, None, False, + session=session, + ) + else: + build_failure.append(req) + + # notify success/failure + if build_success: + logger.info( + 'Successfully built %s', + ' '.join([req.name for req in build_success]), + ) + if build_failure: + logger.info( + 'Failed to build %s', + ' '.join([req.name for req in build_failure]), + ) + # Return True if all builds were successful + return len(build_failure) == 0 diff --git a/env/lib/python3.6/site-packages/pip/_vendor/appdirs.py b/env/lib/python3.6/site-packages/pip/_vendor/appdirs.py new file mode 100644 index 0000000..2bd3911 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/appdirs.py @@ -0,0 +1,604 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2005-2010 ActiveState Software Inc. +# Copyright (c) 2013 Eddy Petrișor + +"""Utilities for determining application-specific dirs. + +See <http://github.com/ActiveState/appdirs> for details and usage. +""" +# Dev Notes: +# - MSDN on where to store app data files: +# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 +# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html +# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html + +__version_info__ = (1, 4, 3) +__version__ = '.'.join(map(str, __version_info__)) + + +import sys +import os + +PY3 = sys.version_info[0] == 3 + +if PY3: + unicode = str + +if sys.platform.startswith('java'): + import platform + os_name = platform.java_ver()[3][0] + if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. + system = 'win32' + elif os_name.startswith('Mac'): # "Mac OS X", etc. + system = 'darwin' + else: # "Linux", "SunOS", "FreeBSD", etc. + # Setting this to "linux2" is not ideal, but only Windows or Mac + # are actually checked for and the rest of the module expects + # *sys.platform* style strings. + system = 'linux2' +else: + system = sys.platform + + + +def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> + for a discussion of issues. + + Typical user data directories are: + Mac OS X: ~/Library/Application Support/<AppName> + Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined + Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName> + Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName> + Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName> + Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName> + + For Unix, we follow the XDG spec and support $XDG_DATA_HOME. + That means, by default "~/.local/share/<AppName>". + """ + if system == "win32": + if appauthor is None: + appauthor = appname + const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" + path = os.path.normpath(_get_win_folder(const)) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('~/Library/Application Support/') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): + r"""Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of data dirs should be + returned. By default, the first item from XDG_DATA_DIRS is + returned, or '/usr/local/share/<AppName>', + if XDG_DATA_DIRS is not set + + Typical site data directories are: + Mac OS X: /Library/Application Support/<AppName> + Unix: /usr/local/share/<AppName> or /usr/share/<AppName> + Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName> + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7. + + For Unix, this is using the $XDG_DATA_DIRS[0] default. + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('/Library/Application Support') + if appname: + path = os.path.join(path, appname) + else: + # XDG default for $XDG_DATA_DIRS + # only first, if multipath is False + path = os.getenv('XDG_DATA_DIRS', + os.pathsep.join(['/usr/local/share', '/usr/share'])) + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + if appname and version: + path = os.path.join(path, version) + return path + + +def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific config dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> + for a discussion of issues. + + Typical user config directories are: + Mac OS X: same as user_data_dir + Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined + Win *: same as user_data_dir + + For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. + That means, by default "~/.config/<AppName>". + """ + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, None, roaming) + else: + path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): + r"""Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of config dirs should be + returned. By default, the first item from XDG_CONFIG_DIRS is + returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set + + Typical site config directories are: + Mac OS X: same as site_data_dir + Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in + $XDG_CONFIG_DIRS + Win *: same as site_data_dir + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + + For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system in ["win32", "darwin"]: + path = site_data_dir(appname, appauthor) + if appname and version: + path = os.path.join(path, version) + else: + # XDG default for $XDG_CONFIG_DIRS + # only first, if multipath is False + path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + +def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific cache dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Cache" to the base app data dir for Windows. See + discussion below. + + Typical user cache directories are: + Mac OS X: ~/Library/Caches/<AppName> + Unix: ~/.cache/<AppName> (XDG default) + Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache + Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache + + On Windows the only suggestion in the MSDN docs is that local settings go in + the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming + app data dir (the default returned by `user_data_dir` above). Apps typically + put cache data somewhere *under* the given dir here. Some examples: + ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache + ...\Acme\SuperApp\Cache\1.0 + OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. + This can be disabled with the `opinion=False` option. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + if opinion: + path = os.path.join(path, "Cache") + elif system == 'darwin': + path = os.path.expanduser('~/Library/Caches') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def user_state_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific state dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> + for a discussion of issues. + + Typical user state directories are: + Mac OS X: same as user_data_dir + Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined + Win *: same as user_data_dir + + For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state> + to extend the XDG spec and support $XDG_STATE_HOME. + + That means, by default "~/.local/state/<AppName>". + """ + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, None, roaming) + else: + path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific log dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Logs" to the base app data dir for Windows, and "log" to the + base cache dir for Unix. See discussion below. + + Typical user log directories are: + Mac OS X: ~/Library/Logs/<AppName> + Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined + Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs + Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs + + On Windows the only suggestion in the MSDN docs is that local settings + go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in + examples of what some windows apps use for a logs dir.) + + OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` + value for Windows and appends "log" to the user cache dir for Unix. + This can be disabled with the `opinion=False` option. + """ + if system == "darwin": + path = os.path.join( + os.path.expanduser('~/Library/Logs'), + appname) + elif system == "win32": + path = user_data_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "Logs") + else: + path = user_cache_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "log") + if appname and version: + path = os.path.join(path, version) + return path + + +class AppDirs(object): + """Convenience wrapper for getting application dirs.""" + def __init__(self, appname=None, appauthor=None, version=None, + roaming=False, multipath=False): + self.appname = appname + self.appauthor = appauthor + self.version = version + self.roaming = roaming + self.multipath = multipath + + @property + def user_data_dir(self): + return user_data_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_data_dir(self): + return site_data_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_config_dir(self): + return user_config_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_config_dir(self): + return site_config_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_cache_dir(self): + return user_cache_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_state_dir(self): + return user_state_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_log_dir(self): + return user_log_dir(self.appname, self.appauthor, + version=self.version) + + +#---- internal support stuff + +def _get_win_folder_from_registry(csidl_name): + """This is a fallback technique at best. I'm not sure if using the + registry for this guarantees us the correct answer for all CSIDL_* + names. + """ + if PY3: + import winreg as _winreg + else: + import _winreg + + shell_folder_name = { + "CSIDL_APPDATA": "AppData", + "CSIDL_COMMON_APPDATA": "Common AppData", + "CSIDL_LOCAL_APPDATA": "Local AppData", + }[csidl_name] + + key = _winreg.OpenKey( + _winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" + ) + dir, type = _winreg.QueryValueEx(key, shell_folder_name) + return dir + + +def _get_win_folder_with_pywin32(csidl_name): + from win32com.shell import shellcon, shell + dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) + # Try to make this a unicode path because SHGetFolderPath does + # not return unicode strings when there is unicode data in the + # path. + try: + dir = unicode(dir) + + # Downgrade to short path name if have highbit chars. See + # <http://bugs.activestate.com/show_bug.cgi?id=85099>. + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + try: + import win32api + dir = win32api.GetShortPathName(dir) + except ImportError: + pass + except UnicodeError: + pass + return dir + + +def _get_win_folder_with_ctypes(csidl_name): + import ctypes + + csidl_const = { + "CSIDL_APPDATA": 26, + "CSIDL_COMMON_APPDATA": 35, + "CSIDL_LOCAL_APPDATA": 28, + }[csidl_name] + + buf = ctypes.create_unicode_buffer(1024) + ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) + + # Downgrade to short path name if have highbit chars. See + # <http://bugs.activestate.com/show_bug.cgi?id=85099>. + has_high_char = False + for c in buf: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf2 = ctypes.create_unicode_buffer(1024) + if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): + buf = buf2 + + return buf.value + +def _get_win_folder_with_jna(csidl_name): + import array + from com.sun import jna + from com.sun.jna.platform import win32 + + buf_size = win32.WinDef.MAX_PATH * 2 + buf = array.zeros('c', buf_size) + shell = win32.Shell32.INSTANCE + shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + # Downgrade to short path name if have highbit chars. See + # <http://bugs.activestate.com/show_bug.cgi?id=85099>. + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf = array.zeros('c', buf_size) + kernel = win32.Kernel32.INSTANCE + if kernel.GetShortPathName(dir, buf, buf_size): + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + return dir + +if system == "win32": + try: + from ctypes import windll + _get_win_folder = _get_win_folder_with_ctypes + except ImportError: + try: + import com.sun.jna + _get_win_folder = _get_win_folder_with_jna + except ImportError: + _get_win_folder = _get_win_folder_from_registry + + +#---- self test code + +if __name__ == "__main__": + appname = "MyApp" + appauthor = "MyCompany" + + props = ("user_data_dir", + "user_config_dir", + "user_cache_dir", + "user_state_dir", + "user_log_dir", + "site_data_dir", + "site_config_dir") + + print("-- app dirs %s --" % __version__) + + print("-- app dirs (with optional 'version')") + dirs = AppDirs(appname, appauthor, version="1.0") + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'version')") + dirs = AppDirs(appname, appauthor) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'appauthor')") + dirs = AppDirs(appname) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (with disabled 'appauthor')") + dirs = AppDirs(appname, appauthor=False) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/_cmd.py b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/_cmd.py new file mode 100644 index 0000000..f1e0ad9 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/_cmd.py @@ -0,0 +1,57 @@ +import logging + +from pip._vendor import requests + +from pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pip._vendor.cachecontrol.cache import DictCache +from pip._vendor.cachecontrol.controller import logger + +from argparse import ArgumentParser + + +def setup_logging(): + logger.setLevel(logging.DEBUG) + handler = logging.StreamHandler() + logger.addHandler(handler) + + +def get_session(): + adapter = CacheControlAdapter( + DictCache(), cache_etags=True, serializer=None, heuristic=None + ) + sess = requests.Session() + sess.mount("http://", adapter) + sess.mount("https://", adapter) + + sess.cache_controller = adapter.controller + return sess + + +def get_args(): + parser = ArgumentParser() + parser.add_argument("url", help="The URL to try and cache") + return parser.parse_args() + + +def main(args=None): + args = get_args() + sess = get_session() + + # Make a request to get a response + resp = sess.get(args.url) + + # Turn on logging + setup_logging() + + # try setting the cache + sess.cache_controller.cache_response(resp.request, resp.raw) + + # Now try to get it + if sess.cache_controller.cached_request(resp.request): + print("Cached!") + else: + print("Not cached :(") + + +if __name__ == "__main__": + main() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/adapter.py b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/adapter.py new file mode 100644 index 0000000..780eb28 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/adapter.py @@ -0,0 +1,133 @@ +import types +import functools +import zlib + +from pip._vendor.requests.adapters import HTTPAdapter + +from .controller import CacheController +from .cache import DictCache +from .filewrapper import CallbackFileWrapper + + +class CacheControlAdapter(HTTPAdapter): + invalidating_methods = {"PUT", "DELETE"} + + def __init__( + self, + cache=None, + cache_etags=True, + controller_class=None, + serializer=None, + heuristic=None, + cacheable_methods=None, + *args, + **kw + ): + super(CacheControlAdapter, self).__init__(*args, **kw) + self.cache = cache or DictCache() + self.heuristic = heuristic + self.cacheable_methods = cacheable_methods or ("GET",) + + controller_factory = controller_class or CacheController + self.controller = controller_factory( + self.cache, cache_etags=cache_etags, serializer=serializer + ) + + def send(self, request, cacheable_methods=None, **kw): + """ + Send a request. Use the request information to see if it + exists in the cache and cache the response if we need to and can. + """ + cacheable = cacheable_methods or self.cacheable_methods + if request.method in cacheable: + try: + cached_response = self.controller.cached_request(request) + except zlib.error: + cached_response = None + if cached_response: + return self.build_response(request, cached_response, from_cache=True) + + # check for etags and add headers if appropriate + request.headers.update(self.controller.conditional_headers(request)) + + resp = super(CacheControlAdapter, self).send(request, **kw) + + return resp + + def build_response( + self, request, response, from_cache=False, cacheable_methods=None + ): + """ + Build a response by making a request or using the cache. + + This will end up calling send and returning a potentially + cached response + """ + cacheable = cacheable_methods or self.cacheable_methods + if not from_cache and request.method in cacheable: + # Check for any heuristics that might update headers + # before trying to cache. + if self.heuristic: + response = self.heuristic.apply(response) + + # apply any expiration heuristics + if response.status == 304: + # We must have sent an ETag request. This could mean + # that we've been expired already or that we simply + # have an etag. In either case, we want to try and + # update the cache if that is the case. + cached_response = self.controller.update_cached_response( + request, response + ) + + if cached_response is not response: + from_cache = True + + # We are done with the server response, read a + # possible response body (compliant servers will + # not return one, but we cannot be 100% sure) and + # release the connection back to the pool. + response.read(decode_content=False) + response.release_conn() + + response = cached_response + + # We always cache the 301 responses + elif response.status == 301: + self.controller.cache_response(request, response) + else: + # Wrap the response file with a wrapper that will cache the + # response when the stream has been consumed. + response._fp = CallbackFileWrapper( + response._fp, + functools.partial( + self.controller.cache_response, request, response + ), + ) + if response.chunked: + super_update_chunk_length = response._update_chunk_length + + def _update_chunk_length(self): + super_update_chunk_length() + if self.chunk_left == 0: + self._fp._close() + + response._update_chunk_length = types.MethodType( + _update_chunk_length, response + ) + + resp = super(CacheControlAdapter, self).build_response(request, response) + + # See if we should invalidate the cache. + if request.method in self.invalidating_methods and resp.ok: + cache_url = self.controller.cache_url(request.url) + self.cache.delete(cache_url) + + # Give the request a from_cache attr to let people use it + resp.from_cache = from_cache + + return resp + + def close(self): + self.cache.close() + super(CacheControlAdapter, self).close() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/cache.py b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/cache.py new file mode 100644 index 0000000..94e0773 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/cache.py @@ -0,0 +1,39 @@ +""" +The cache object API for implementing caches. The default is a thread +safe in-memory dictionary. +""" +from threading import Lock + + +class BaseCache(object): + + def get(self, key): + raise NotImplementedError() + + def set(self, key, value): + raise NotImplementedError() + + def delete(self, key): + raise NotImplementedError() + + def close(self): + pass + + +class DictCache(BaseCache): + + def __init__(self, init_dict=None): + self.lock = Lock() + self.data = init_dict or {} + + def get(self, key): + return self.data.get(key, None) + + def set(self, key, value): + with self.lock: + self.data.update({key: value}) + + def delete(self, key): + with self.lock: + if key in self.data: + self.data.pop(key) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py new file mode 100644 index 0000000..1ba0080 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py @@ -0,0 +1,146 @@ +import hashlib +import os +from textwrap import dedent + +from ..cache import BaseCache +from ..controller import CacheController + +try: + FileNotFoundError +except NameError: + # py2.X + FileNotFoundError = (IOError, OSError) + + +def _secure_open_write(filename, fmode): + # We only want to write to this file, so open it in write only mode + flags = os.O_WRONLY + + # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only + # will open *new* files. + # We specify this because we want to ensure that the mode we pass is the + # mode of the file. + flags |= os.O_CREAT | os.O_EXCL + + # Do not follow symlinks to prevent someone from making a symlink that + # we follow and insecurely open a cache file. + if hasattr(os, "O_NOFOLLOW"): + flags |= os.O_NOFOLLOW + + # On Windows we'll mark this file as binary + if hasattr(os, "O_BINARY"): + flags |= os.O_BINARY + + # Before we open our file, we want to delete any existing file that is + # there + try: + os.remove(filename) + except (IOError, OSError): + # The file must not exist already, so we can just skip ahead to opening + pass + + # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a + # race condition happens between the os.remove and this line, that an + # error will be raised. Because we utilize a lockfile this should only + # happen if someone is attempting to attack us. + fd = os.open(filename, flags, fmode) + try: + return os.fdopen(fd, "wb") + + except: + # An error occurred wrapping our FD in a file object + os.close(fd) + raise + + +class FileCache(BaseCache): + + def __init__( + self, + directory, + forever=False, + filemode=0o0600, + dirmode=0o0700, + use_dir_lock=None, + lock_class=None, + ): + + if use_dir_lock is not None and lock_class is not None: + raise ValueError("Cannot use use_dir_lock and lock_class together") + + try: + from pip._vendor.lockfile import LockFile + from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile + except ImportError: + notice = dedent( + """ + NOTE: In order to use the FileCache you must have + lockfile installed. You can install it via pip: + pip install lockfile + """ + ) + raise ImportError(notice) + + else: + if use_dir_lock: + lock_class = MkdirLockFile + + elif lock_class is None: + lock_class = LockFile + + self.directory = directory + self.forever = forever + self.filemode = filemode + self.dirmode = dirmode + self.lock_class = lock_class + + @staticmethod + def encode(x): + return hashlib.sha224(x.encode()).hexdigest() + + def _fn(self, name): + # NOTE: This method should not change as some may depend on it. + # See: https://github.com/ionrock/cachecontrol/issues/63 + hashed = self.encode(name) + parts = list(hashed[:5]) + [hashed] + return os.path.join(self.directory, *parts) + + def get(self, key): + name = self._fn(key) + try: + with open(name, "rb") as fh: + return fh.read() + + except FileNotFoundError: + return None + + def set(self, key, value): + name = self._fn(key) + + # Make sure the directory exists + try: + os.makedirs(os.path.dirname(name), self.dirmode) + except (IOError, OSError): + pass + + with self.lock_class(name) as lock: + # Write our actual file + with _secure_open_write(lock.path, self.filemode) as fh: + fh.write(value) + + def delete(self, key): + name = self._fn(key) + if not self.forever: + try: + os.remove(name) + except FileNotFoundError: + pass + + +def url_to_file_path(url, filecache): + """Return the file cache path based on the URL. + + This does not ensure the file exists! + """ + key = CacheController.cache_url(url) + return filecache._fn(key) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py new file mode 100644 index 0000000..ed705ce --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py @@ -0,0 +1,33 @@ +from __future__ import division + +from datetime import datetime +from pip._vendor.cachecontrol.cache import BaseCache + + +class RedisCache(BaseCache): + + def __init__(self, conn): + self.conn = conn + + def get(self, key): + return self.conn.get(key) + + def set(self, key, value, expires=None): + if not expires: + self.conn.set(key, value) + else: + expires = expires - datetime.utcnow() + self.conn.setex(key, int(expires.total_seconds()), value) + + def delete(self, key): + self.conn.delete(key) + + def clear(self): + """Helper for clearing all the keys in a database. Use with + caution!""" + for key in self.conn.keys(): + self.conn.delete(key) + + def close(self): + """Redis uses connection pooling, no need to close the connection.""" + pass diff --git a/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/compat.py b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/compat.py new file mode 100644 index 0000000..33b5aed --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/compat.py @@ -0,0 +1,29 @@ +try: + from urllib.parse import urljoin +except ImportError: + from urlparse import urljoin + + +try: + import cPickle as pickle +except ImportError: + import pickle + + +# Handle the case where the requests module has been patched to not have +# urllib3 bundled as part of its source. +try: + from pip._vendor.requests.packages.urllib3.response import HTTPResponse +except ImportError: + from pip._vendor.urllib3.response import HTTPResponse + +try: + from pip._vendor.requests.packages.urllib3.util import is_fp_closed +except ImportError: + from pip._vendor.urllib3.util import is_fp_closed + +# Replicate some six behaviour +try: + text_type = unicode +except NameError: + text_type = str diff --git a/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/controller.py b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/controller.py new file mode 100644 index 0000000..1b2b943 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/controller.py @@ -0,0 +1,367 @@ +""" +The httplib2 algorithms ported for use with requests. +""" +import logging +import re +import calendar +import time +from email.utils import parsedate_tz + +from pip._vendor.requests.structures import CaseInsensitiveDict + +from .cache import DictCache +from .serialize import Serializer + + +logger = logging.getLogger(__name__) + +URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") + + +def parse_uri(uri): + """Parses a URI using the regex given in Appendix B of RFC 3986. + + (scheme, authority, path, query, fragment) = parse_uri(uri) + """ + groups = URI.match(uri).groups() + return (groups[1], groups[3], groups[4], groups[6], groups[8]) + + +class CacheController(object): + """An interface to see if request should cached or not. + """ + + def __init__( + self, cache=None, cache_etags=True, serializer=None, status_codes=None + ): + self.cache = cache or DictCache() + self.cache_etags = cache_etags + self.serializer = serializer or Serializer() + self.cacheable_status_codes = status_codes or (200, 203, 300, 301) + + @classmethod + def _urlnorm(cls, uri): + """Normalize the URL to create a safe key for the cache""" + (scheme, authority, path, query, fragment) = parse_uri(uri) + if not scheme or not authority: + raise Exception("Only absolute URIs are allowed. uri = %s" % uri) + + scheme = scheme.lower() + authority = authority.lower() + + if not path: + path = "/" + + # Could do syntax based normalization of the URI before + # computing the digest. See Section 6.2.2 of Std 66. + request_uri = query and "?".join([path, query]) or path + defrag_uri = scheme + "://" + authority + request_uri + + return defrag_uri + + @classmethod + def cache_url(cls, uri): + return cls._urlnorm(uri) + + def parse_cache_control(self, headers): + known_directives = { + # https://tools.ietf.org/html/rfc7234#section-5.2 + "max-age": (int, True), + "max-stale": (int, False), + "min-fresh": (int, True), + "no-cache": (None, False), + "no-store": (None, False), + "no-transform": (None, False), + "only-if-cached": (None, False), + "must-revalidate": (None, False), + "public": (None, False), + "private": (None, False), + "proxy-revalidate": (None, False), + "s-maxage": (int, True), + } + + cc_headers = headers.get("cache-control", headers.get("Cache-Control", "")) + + retval = {} + + for cc_directive in cc_headers.split(","): + if not cc_directive.strip(): + continue + + parts = cc_directive.split("=", 1) + directive = parts[0].strip() + + try: + typ, required = known_directives[directive] + except KeyError: + logger.debug("Ignoring unknown cache-control directive: %s", directive) + continue + + if not typ or not required: + retval[directive] = None + if typ: + try: + retval[directive] = typ(parts[1].strip()) + except IndexError: + if required: + logger.debug( + "Missing value for cache-control " "directive: %s", + directive, + ) + except ValueError: + logger.debug( + "Invalid value for cache-control directive " "%s, must be %s", + directive, + typ.__name__, + ) + + return retval + + def cached_request(self, request): + """ + Return a cached response if it exists in the cache, otherwise + return False. + """ + cache_url = self.cache_url(request.url) + logger.debug('Looking up "%s" in the cache', cache_url) + cc = self.parse_cache_control(request.headers) + + # Bail out if the request insists on fresh data + if "no-cache" in cc: + logger.debug('Request header has "no-cache", cache bypassed') + return False + + if "max-age" in cc and cc["max-age"] == 0: + logger.debug('Request header has "max_age" as 0, cache bypassed') + return False + + # Request allows serving from the cache, let's see if we find something + cache_data = self.cache.get(cache_url) + if cache_data is None: + logger.debug("No cache entry available") + return False + + # Check whether it can be deserialized + resp = self.serializer.loads(request, cache_data) + if not resp: + logger.warning("Cache entry deserialization failed, entry ignored") + return False + + # If we have a cached 301, return it immediately. We don't + # need to test our response for other headers b/c it is + # intrinsically "cacheable" as it is Permanent. + # See: + # https://tools.ietf.org/html/rfc7231#section-6.4.2 + # + # Client can try to refresh the value by repeating the request + # with cache busting headers as usual (ie no-cache). + if resp.status == 301: + msg = ( + 'Returning cached "301 Moved Permanently" response ' + "(ignoring date and etag information)" + ) + logger.debug(msg) + return resp + + headers = CaseInsensitiveDict(resp.headers) + if not headers or "date" not in headers: + if "etag" not in headers: + # Without date or etag, the cached response can never be used + # and should be deleted. + logger.debug("Purging cached response: no date or etag") + self.cache.delete(cache_url) + logger.debug("Ignoring cached response: no date") + return False + + now = time.time() + date = calendar.timegm(parsedate_tz(headers["date"])) + current_age = max(0, now - date) + logger.debug("Current age based on date: %i", current_age) + + # TODO: There is an assumption that the result will be a + # urllib3 response object. This may not be best since we + # could probably avoid instantiating or constructing the + # response until we know we need it. + resp_cc = self.parse_cache_control(headers) + + # determine freshness + freshness_lifetime = 0 + + # Check the max-age pragma in the cache control header + if "max-age" in resp_cc: + freshness_lifetime = resp_cc["max-age"] + logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime) + + # If there isn't a max-age, check for an expires header + elif "expires" in headers: + expires = parsedate_tz(headers["expires"]) + if expires is not None: + expire_time = calendar.timegm(expires) - date + freshness_lifetime = max(0, expire_time) + logger.debug("Freshness lifetime from expires: %i", freshness_lifetime) + + # Determine if we are setting freshness limit in the + # request. Note, this overrides what was in the response. + if "max-age" in cc: + freshness_lifetime = cc["max-age"] + logger.debug( + "Freshness lifetime from request max-age: %i", freshness_lifetime + ) + + if "min-fresh" in cc: + min_fresh = cc["min-fresh"] + # adjust our current age by our min fresh + current_age += min_fresh + logger.debug("Adjusted current age from min-fresh: %i", current_age) + + # Return entry if it is fresh enough + if freshness_lifetime > current_age: + logger.debug('The response is "fresh", returning cached response') + logger.debug("%i > %i", freshness_lifetime, current_age) + return resp + + # we're not fresh. If we don't have an Etag, clear it out + if "etag" not in headers: + logger.debug('The cached response is "stale" with no etag, purging') + self.cache.delete(cache_url) + + # return the original handler + return False + + def conditional_headers(self, request): + cache_url = self.cache_url(request.url) + resp = self.serializer.loads(request, self.cache.get(cache_url)) + new_headers = {} + + if resp: + headers = CaseInsensitiveDict(resp.headers) + + if "etag" in headers: + new_headers["If-None-Match"] = headers["ETag"] + + if "last-modified" in headers: + new_headers["If-Modified-Since"] = headers["Last-Modified"] + + return new_headers + + def cache_response(self, request, response, body=None, status_codes=None): + """ + Algorithm for caching requests. + + This assumes a requests Response object. + """ + # From httplib2: Don't cache 206's since we aren't going to + # handle byte range requests + cacheable_status_codes = status_codes or self.cacheable_status_codes + if response.status not in cacheable_status_codes: + logger.debug( + "Status code %s not in %s", response.status, cacheable_status_codes + ) + return + + response_headers = CaseInsensitiveDict(response.headers) + + # If we've been given a body, our response has a Content-Length, that + # Content-Length is valid then we can check to see if the body we've + # been given matches the expected size, and if it doesn't we'll just + # skip trying to cache it. + if ( + body is not None + and "content-length" in response_headers + and response_headers["content-length"].isdigit() + and int(response_headers["content-length"]) != len(body) + ): + return + + cc_req = self.parse_cache_control(request.headers) + cc = self.parse_cache_control(response_headers) + + cache_url = self.cache_url(request.url) + logger.debug('Updating cache with response from "%s"', cache_url) + + # Delete it from the cache if we happen to have it stored there + no_store = False + if "no-store" in cc: + no_store = True + logger.debug('Response header has "no-store"') + if "no-store" in cc_req: + no_store = True + logger.debug('Request header has "no-store"') + if no_store and self.cache.get(cache_url): + logger.debug('Purging existing cache entry to honor "no-store"') + self.cache.delete(cache_url) + if no_store: + return + + # If we've been given an etag, then keep the response + if self.cache_etags and "etag" in response_headers: + logger.debug("Caching due to etag") + self.cache.set( + cache_url, self.serializer.dumps(request, response, body=body) + ) + + # Add to the cache any 301s. We do this before looking that + # the Date headers. + elif response.status == 301: + logger.debug("Caching permanant redirect") + self.cache.set(cache_url, self.serializer.dumps(request, response)) + + # Add to the cache if the response headers demand it. If there + # is no date header then we can't do anything about expiring + # the cache. + elif "date" in response_headers: + # cache when there is a max-age > 0 + if "max-age" in cc and cc["max-age"] > 0: + logger.debug("Caching b/c date exists and max-age > 0") + self.cache.set( + cache_url, self.serializer.dumps(request, response, body=body) + ) + + # If the request can expire, it means we should cache it + # in the meantime. + elif "expires" in response_headers: + if response_headers["expires"]: + logger.debug("Caching b/c of expires header") + self.cache.set( + cache_url, self.serializer.dumps(request, response, body=body) + ) + + def update_cached_response(self, request, response): + """On a 304 we will get a new set of headers that we want to + update our cached value with, assuming we have one. + + This should only ever be called when we've sent an ETag and + gotten a 304 as the response. + """ + cache_url = self.cache_url(request.url) + + cached_response = self.serializer.loads(request, self.cache.get(cache_url)) + + if not cached_response: + # we didn't have a cached response + return response + + # Lets update our headers with the headers from the new request: + # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1 + # + # The server isn't supposed to send headers that would make + # the cached body invalid. But... just in case, we'll be sure + # to strip out ones we know that might be problmatic due to + # typical assumptions. + excluded_headers = ["content-length"] + + cached_response.headers.update( + dict( + (k, v) + for k, v in response.headers.items() + if k.lower() not in excluded_headers + ) + ) + + # we want a 200 b/c we have content via the cache + cached_response.status = 200 + + # update our cache + self.cache.set(cache_url, self.serializer.dumps(request, cached_response)) + + return cached_response diff --git a/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/filewrapper.py b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/filewrapper.py new file mode 100644 index 0000000..30ed4c5 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/filewrapper.py @@ -0,0 +1,80 @@ +from io import BytesIO + + +class CallbackFileWrapper(object): + """ + Small wrapper around a fp object which will tee everything read into a + buffer, and when that file is closed it will execute a callback with the + contents of that buffer. + + All attributes are proxied to the underlying file object. + + This class uses members with a double underscore (__) leading prefix so as + not to accidentally shadow an attribute. + """ + + def __init__(self, fp, callback): + self.__buf = BytesIO() + self.__fp = fp + self.__callback = callback + + def __getattr__(self, name): + # The vaguaries of garbage collection means that self.__fp is + # not always set. By using __getattribute__ and the private + # name[0] allows looking up the attribute value and raising an + # AttributeError when it doesn't exist. This stop thigns from + # infinitely recursing calls to getattr in the case where + # self.__fp hasn't been set. + # + # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers + fp = self.__getattribute__("_CallbackFileWrapper__fp") + return getattr(fp, name) + + def __is_fp_closed(self): + try: + return self.__fp.fp is None + + except AttributeError: + pass + + try: + return self.__fp.closed + + except AttributeError: + pass + + # We just don't cache it then. + # TODO: Add some logging here... + return False + + def _close(self): + if self.__callback: + self.__callback(self.__buf.getvalue()) + + # We assign this to None here, because otherwise we can get into + # really tricky problems where the CPython interpreter dead locks + # because the callback is holding a reference to something which + # has a __del__ method. Setting this to None breaks the cycle + # and allows the garbage collector to do it's thing normally. + self.__callback = None + + def read(self, amt=None): + data = self.__fp.read(amt) + self.__buf.write(data) + if self.__is_fp_closed(): + self._close() + + return data + + def _safe_read(self, amt): + data = self.__fp._safe_read(amt) + if amt == 2 and data == b"\r\n": + # urllib executes this read to toss the CRLF at the end + # of the chunk. + return data + + self.__buf.write(data) + if self.__is_fp_closed(): + self._close() + + return data diff --git a/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/heuristics.py b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/heuristics.py new file mode 100644 index 0000000..6c0e979 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/heuristics.py @@ -0,0 +1,135 @@ +import calendar +import time + +from email.utils import formatdate, parsedate, parsedate_tz + +from datetime import datetime, timedelta + +TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT" + + +def expire_after(delta, date=None): + date = date or datetime.utcnow() + return date + delta + + +def datetime_to_header(dt): + return formatdate(calendar.timegm(dt.timetuple())) + + +class BaseHeuristic(object): + + def warning(self, response): + """ + Return a valid 1xx warning header value describing the cache + adjustments. + + The response is provided too allow warnings like 113 + http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need + to explicitly say response is over 24 hours old. + """ + return '110 - "Response is Stale"' + + def update_headers(self, response): + """Update the response headers with any new headers. + + NOTE: This SHOULD always include some Warning header to + signify that the response was cached by the client, not + by way of the provided headers. + """ + return {} + + def apply(self, response): + updated_headers = self.update_headers(response) + + if updated_headers: + response.headers.update(updated_headers) + warning_header_value = self.warning(response) + if warning_header_value is not None: + response.headers.update({"Warning": warning_header_value}) + + return response + + +class OneDayCache(BaseHeuristic): + """ + Cache the response by providing an expires 1 day in the + future. + """ + + def update_headers(self, response): + headers = {} + + if "expires" not in response.headers: + date = parsedate(response.headers["date"]) + expires = expire_after(timedelta(days=1), date=datetime(*date[:6])) + headers["expires"] = datetime_to_header(expires) + headers["cache-control"] = "public" + return headers + + +class ExpiresAfter(BaseHeuristic): + """ + Cache **all** requests for a defined time period. + """ + + def __init__(self, **kw): + self.delta = timedelta(**kw) + + def update_headers(self, response): + expires = expire_after(self.delta) + return {"expires": datetime_to_header(expires), "cache-control": "public"} + + def warning(self, response): + tmpl = "110 - Automatically cached for %s. Response might be stale" + return tmpl % self.delta + + +class LastModified(BaseHeuristic): + """ + If there is no Expires header already, fall back on Last-Modified + using the heuristic from + http://tools.ietf.org/html/rfc7234#section-4.2.2 + to calculate a reasonable value. + + Firefox also does something like this per + https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ + http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397 + Unlike mozilla we limit this to 24-hr. + """ + cacheable_by_default_statuses = { + 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501 + } + + def update_headers(self, resp): + headers = resp.headers + + if "expires" in headers: + return {} + + if "cache-control" in headers and headers["cache-control"] != "public": + return {} + + if resp.status not in self.cacheable_by_default_statuses: + return {} + + if "date" not in headers or "last-modified" not in headers: + return {} + + date = calendar.timegm(parsedate_tz(headers["date"])) + last_modified = parsedate(headers["last-modified"]) + if date is None or last_modified is None: + return {} + + now = time.time() + current_age = max(0, now - date) + delta = date - calendar.timegm(last_modified) + freshness_lifetime = max(0, min(delta / 10, 24 * 3600)) + if freshness_lifetime <= current_age: + return {} + + expires = date + freshness_lifetime + return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))} + + def warning(self, resp): + return None diff --git a/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/serialize.py b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/serialize.py new file mode 100644 index 0000000..ec43ff2 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/serialize.py @@ -0,0 +1,186 @@ +import base64 +import io +import json +import zlib + +from pip._vendor import msgpack +from pip._vendor.requests.structures import CaseInsensitiveDict + +from .compat import HTTPResponse, pickle, text_type + + +def _b64_decode_bytes(b): + return base64.b64decode(b.encode("ascii")) + + +def _b64_decode_str(s): + return _b64_decode_bytes(s).decode("utf8") + + +class Serializer(object): + + def dumps(self, request, response, body=None): + response_headers = CaseInsensitiveDict(response.headers) + + if body is None: + body = response.read(decode_content=False) + + # NOTE: 99% sure this is dead code. I'm only leaving it + # here b/c I don't have a test yet to prove + # it. Basically, before using + # `cachecontrol.filewrapper.CallbackFileWrapper`, + # this made an effort to reset the file handle. The + # `CallbackFileWrapper` short circuits this code by + # setting the body as the content is consumed, the + # result being a `body` argument is *always* passed + # into cache_response, and in turn, + # `Serializer.dump`. + response._fp = io.BytesIO(body) + + # NOTE: This is all a bit weird, but it's really important that on + # Python 2.x these objects are unicode and not str, even when + # they contain only ascii. The problem here is that msgpack + # understands the difference between unicode and bytes and we + # have it set to differentiate between them, however Python 2 + # doesn't know the difference. Forcing these to unicode will be + # enough to have msgpack know the difference. + data = { + u"response": { + u"body": body, + u"headers": dict( + (text_type(k), text_type(v)) for k, v in response.headers.items() + ), + u"status": response.status, + u"version": response.version, + u"reason": text_type(response.reason), + u"strict": response.strict, + u"decode_content": response.decode_content, + } + } + + # Construct our vary headers + data[u"vary"] = {} + if u"vary" in response_headers: + varied_headers = response_headers[u"vary"].split(",") + for header in varied_headers: + header = text_type(header).strip() + header_value = request.headers.get(header, None) + if header_value is not None: + header_value = text_type(header_value) + data[u"vary"][header] = header_value + + return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)]) + + def loads(self, request, data): + # Short circuit if we've been given an empty set of data + if not data: + return + + # Determine what version of the serializer the data was serialized + # with + try: + ver, data = data.split(b",", 1) + except ValueError: + ver = b"cc=0" + + # Make sure that our "ver" is actually a version and isn't a false + # positive from a , being in the data stream. + if ver[:3] != b"cc=": + data = ver + data + ver = b"cc=0" + + # Get the version number out of the cc=N + ver = ver.split(b"=", 1)[-1].decode("ascii") + + # Dispatch to the actual load method for the given version + try: + return getattr(self, "_loads_v{}".format(ver))(request, data) + + except AttributeError: + # This is a version we don't have a loads function for, so we'll + # just treat it as a miss and return None + return + + def prepare_response(self, request, cached): + """Verify our vary headers match and construct a real urllib3 + HTTPResponse object. + """ + # Special case the '*' Vary value as it means we cannot actually + # determine if the cached response is suitable for this request. + if "*" in cached.get("vary", {}): + return + + # Ensure that the Vary headers for the cached response match our + # request + for header, value in cached.get("vary", {}).items(): + if request.headers.get(header, None) != value: + return + + body_raw = cached["response"].pop("body") + + headers = CaseInsensitiveDict(data=cached["response"]["headers"]) + if headers.get("transfer-encoding", "") == "chunked": + headers.pop("transfer-encoding") + + cached["response"]["headers"] = headers + + try: + body = io.BytesIO(body_raw) + except TypeError: + # This can happen if cachecontrol serialized to v1 format (pickle) + # using Python 2. A Python 2 str(byte string) will be unpickled as + # a Python 3 str (unicode string), which will cause the above to + # fail with: + # + # TypeError: 'str' does not support the buffer interface + body = io.BytesIO(body_raw.encode("utf8")) + + return HTTPResponse(body=body, preload_content=False, **cached["response"]) + + def _loads_v0(self, request, data): + # The original legacy cache data. This doesn't contain enough + # information to construct everything we need, so we'll treat this as + # a miss. + return + + def _loads_v1(self, request, data): + try: + cached = pickle.loads(data) + except ValueError: + return + + return self.prepare_response(request, cached) + + def _loads_v2(self, request, data): + try: + cached = json.loads(zlib.decompress(data).decode("utf8")) + except (ValueError, zlib.error): + return + + # We need to decode the items that we've base64 encoded + cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"]) + cached["response"]["headers"] = dict( + (_b64_decode_str(k), _b64_decode_str(v)) + for k, v in cached["response"]["headers"].items() + ) + cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"]) + cached["vary"] = dict( + (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v) + for k, v in cached["vary"].items() + ) + + return self.prepare_response(request, cached) + + def _loads_v3(self, request, data): + # Due to Python 2 encoding issues, it's impossible to know for sure + # exactly how to load v3 entries, thus we'll treat these as a miss so + # that they get rewritten out as v4 entries. + return + + def _loads_v4(self, request, data): + try: + cached = msgpack.loads(data, encoding="utf-8") + except ValueError: + return + + return self.prepare_response(request, cached) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/wrapper.py b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/wrapper.py new file mode 100644 index 0000000..265bfc8 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/cachecontrol/wrapper.py @@ -0,0 +1,29 @@ +from .adapter import CacheControlAdapter +from .cache import DictCache + + +def CacheControl( + sess, + cache=None, + cache_etags=True, + serializer=None, + heuristic=None, + controller_class=None, + adapter_class=None, + cacheable_methods=None, +): + + cache = cache or DictCache() + adapter_class = adapter_class or CacheControlAdapter + adapter = adapter_class( + cache, + cache_etags=cache_etags, + serializer=serializer, + heuristic=heuristic, + controller_class=controller_class, + cacheable_methods=cacheable_methods, + ) + sess.mount("http://", adapter) + sess.mount("https://", adapter) + + return sess diff --git a/env/lib/python3.6/site-packages/pip/_vendor/certifi/cacert.pem b/env/lib/python3.6/site-packages/pip/_vendor/certifi/cacert.pem new file mode 100644 index 0000000..2713f54 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/certifi/cacert.pem @@ -0,0 +1,4400 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Label: "GlobalSign Root CA - R2" +# Serial: 4835703278459682885658125 +# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 +# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe +# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 +MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL +v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 +eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq +tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd +C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa +zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB +mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH +V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n +bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG +3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs +J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO +291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS +ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd +AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 +TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Label: "Verisign Class 3 Public Primary Certification Authority - G3" +# Serial: 206684696279472310254277870180966723415 +# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 +# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 +# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b +N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t +KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu +kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm +CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ +Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu +imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te +2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe +DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC +/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p +F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt +TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Label: "AddTrust External Root" +# Serial: 1 +# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f +# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 +# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 +-----BEGIN CERTIFICATE----- +MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs +IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 +MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux +FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h +bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v +dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt +H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 +uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX +mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX +a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN +E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 +WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD +VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 +Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU +cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx +IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN +AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH +YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 +6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC +Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX +c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a +mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. +# Label: "GeoTrust Global CA" +# Serial: 144470 +# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 +# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 +# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a +-----BEGIN CERTIFICATE----- +MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i +YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG +EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg +R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 +9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq +fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv +iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU +1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ +bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW +MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA +ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l +uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn +Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS +tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF +PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un +hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV +5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Label: "GeoTrust Universal CA" +# Serial: 1 +# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 +# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 +# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 +-----BEGIN CERTIFICATE----- +MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy +c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE +BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 +IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV +VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 +cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT +QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh +F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v +c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w +mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd +VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX +teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ +f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe +Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ +nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB +/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY +MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG +9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc +aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX +IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn +ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z +uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN +Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja +QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW +koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 +ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt +DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm +bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Label: "GeoTrust Universal CA 2" +# Serial: 1 +# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 +# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 +# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy +c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD +VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 +c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 +WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG +FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq +XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL +se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb +KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd +IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 +y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt +hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc +QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 +Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV +HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ +KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z +dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ +L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr +Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo +ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY +T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz +GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m +1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV +OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH +6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX +QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS +-----END CERTIFICATE----- + +# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association +# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association +# Label: "Visa eCommerce Root" +# Serial: 25952180776285836048024890241505565794 +# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02 +# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62 +# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22 +-----BEGIN CERTIFICATE----- +MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr +MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl +cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv +bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw +CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h +dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l +cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h +2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E +lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV +ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq +299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t +vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL +dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF +AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR +zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3 +LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd +7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw +++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt +398znM/jra6O1I7mT1GvFpLgXPYHDw== +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority +# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority +# Label: "QuoVadis Root CA" +# Serial: 985026699 +# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24 +# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9 +# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73 +-----BEGIN CERTIFICATE----- +MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz +MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw +IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR +dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp +li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D +rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ +WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug +F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU +xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC +Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv +dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw +ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl +IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh +c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy +ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh +Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI +KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T +KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq +y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p +dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD +VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL +MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk +fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8 +7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R +cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y +mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW +xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK +SnQ2+Q== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 +# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 +# Label: "Security Communication Root CA" +# Serial: 0 +# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a +# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 +# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY +MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t +dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 +WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD +VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 +9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ +DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 +Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N +QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ +xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G +A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG +kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr +Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 +Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU +JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot +RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== +-----END CERTIFICATE----- + +# Issuer: CN=Sonera Class2 CA O=Sonera +# Subject: CN=Sonera Class2 CA O=Sonera +# Label: "Sonera Class 2 Root CA" +# Serial: 29 +# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb +# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27 +# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27 +-----BEGIN CERTIFICATE----- +MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP +MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx +MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV +BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o +Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt +5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s +3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej +vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu +8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw +DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG +MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil +zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/ +3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD +FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6 +Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2 +ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: O=Government Root Certification Authority +# Subject: O=Government Root Certification Authority +# Label: "Taiwan GRCA" +# Serial: 42023070807708724159991140556527066870 +# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e +# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9 +# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3 +-----BEGIN CERTIFICATE----- +MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/ +MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow +PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR +IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q +gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy +yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts +F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2 +jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx +ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC +VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK +YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH +EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN +Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud +DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE +MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK +UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ +TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf +qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK +ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE +JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7 +hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1 +EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm +nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX +udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz +ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe +LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl +pYYsfPQS +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=Class 2 Primary CA O=Certplus +# Subject: CN=Class 2 Primary CA O=Certplus +# Label: "Certplus Class 2 Primary CA" +# Serial: 177770208045934040241468760488327595043 +# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b +# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb +# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb +-----BEGIN CERTIFICATE----- +MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw +PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz +cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9 +MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz +IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ +ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR +VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL +kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd +EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas +H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0 +HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud +DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4 +QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu +Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/ +AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8 +yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR +FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA +ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB +kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 +l7+ijrRU +-----END CERTIFICATE----- + +# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Label: "DST Root CA X3" +# Serial: 91299735575339953335919266965803778155 +# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 +# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 +# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 +-----BEGIN CERTIFICATE----- +MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ +MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT +DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow +PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD +Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O +rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq +OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b +xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw +7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD +aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG +SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 +ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr +AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz +R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 +JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo +Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Label: "GeoTrust Primary Certification Authority" +# Serial: 32798226551256963324313806436981982369 +# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf +# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 +# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c +-----BEGIN CERTIFICATE----- +MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY +MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo +R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx +MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK +Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 +AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA +ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 +7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W +kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI +mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ +KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 +6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl +4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K +oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj +UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU +AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA" +# Serial: 69529181992039203566298953787712940909 +# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 +# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 +# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB +qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV +BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw +NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j +LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG +A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl +IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs +W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta +3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk +6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 +Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J +NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP +r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU +DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz +YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX +xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 +/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ +LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 +jVaMaA== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" +# Serial: 33037644167568058970164719475676101450 +# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c +# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 +# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df +-----BEGIN CERTIFICATE----- +MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB +yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW +ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 +nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex +t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz +SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG +BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ +rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ +NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E +BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH +BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy +aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv +MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE +p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y +5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK +WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ +4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N +hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Label: "Network Solutions Certificate Authority" +# Serial: 116697915152937497490437556386812487904 +# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e +# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce +# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi +MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV +UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO +ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz +c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP +OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl +mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF +BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 +qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw +gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu +bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp +dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 +6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ +h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH +/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN +pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GA CA" +# Serial: 86718877871133159090080555911823548314 +# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93 +# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9 +# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5 +-----BEGIN CERTIFICATE----- +MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB +ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly +aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl +ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w +NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G +A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD +VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX +SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR +VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2 +w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF +mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg +4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9 +4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw +EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx +SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2 +ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8 +vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa +hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi +Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ +/L7fCg0= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center +# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center +# Label: "Deutsche Telekom Root CA 2" +# Serial: 38 +# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08 +# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf +# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3 +-----BEGIN CERTIFICATE----- +MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc +MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj +IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB +IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE +RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl +U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290 +IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU +ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC +QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr +rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S +NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc +QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH +txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP +BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC +AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp +tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa +IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl +6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+ +xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU +Cm26OWMohpLzGITY+9HPBVZkVw== +-----END CERTIFICATE----- + +# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc +# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc +# Label: "Cybertrust Global Root" +# Serial: 4835703278459682877484360 +# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 +# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 +# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 +-----BEGIN CERTIFICATE----- +MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG +A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh +bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE +ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS +b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 +7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS +J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y +HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP +t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz +FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY +XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ +MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw +hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js +MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA +A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj +Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx +XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o +omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc +A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW +WL1WMRJOEcgh4LMRkWXbtKaIOM5V +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G3" +# Serial: 28809105769928564313984085209975885599 +# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 +# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd +# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 +-----BEGIN CERTIFICATE----- +MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB +mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT +MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s +eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv +cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ +BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg +MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 +BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz ++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm +hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn +5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W +JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL +DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC +huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw +HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB +AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB +zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN +kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD +AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH +SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G +spki4cErx5z481+oghLrGREt +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G2" +# Serial: 71758320672825410020661621085256472406 +# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f +# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 +# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 +-----BEGIN CERTIFICATE----- +MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp +IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi +BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw +MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh +d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig +YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v +dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ +BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 +papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K +DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 +KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox +XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G3" +# Serial: 127614157056681299805556476275995414779 +# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 +# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 +# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB +rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV +BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa +Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl +LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u +MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl +ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm +gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 +YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf +b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 +9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S +zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk +OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV +HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA +2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW +oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu +t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c +KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM +m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu +MdRAGmI0Nj81Aa6sY6A= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G2" +# Serial: 80682863203381065782177908751794619243 +# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a +# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 +# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 +-----BEGIN CERTIFICATE----- +MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL +MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj +KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 +MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 +eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV +BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw +NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV +BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH +MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL +So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal +tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG +CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT +qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz +rD6ogRLQy7rQkgu2npaqBA+K +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Universal Root Certification Authority" +# Serial: 85209574734084581917763752644031726877 +# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 +# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 +# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c +-----BEGIN CERTIFICATE----- +MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB +vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W +ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX +MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 +IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y +IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh +bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF +9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH +H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H +LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN +/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT +rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud +EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw +WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs +exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud +DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 +sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ +seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz +4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ +BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR +lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 +7M2CYfE45k+XmCpajQ== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" +# Serial: 63143484348153506665311985501458640051 +# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 +# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a +# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 +-----BEGIN CERTIFICATE----- +MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp +U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg +SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln +biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm +GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve +fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ +aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj +aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW +kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC +4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga +FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden +# Label: "Staat der Nederlanden Root CA - G2" +# Serial: 10000012 +# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a +# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 +# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f +-----BEGIN CERTIFICATE----- +MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX +DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl +ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv +b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 +qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp +uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU +Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE +pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp +5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M +UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN +GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy +5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv +6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK +eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 +B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ +BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov +L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG +SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS +CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen +5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 +IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK +gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL ++63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL +vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm +bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk +N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC +Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z +ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Label: "Hongkong Post Root CA 1" +# Serial: 1000 +# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca +# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 +# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 +-----BEGIN CERTIFICATE----- +MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx +FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg +Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG +A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr +b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ +jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn +PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh +ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 +nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h +q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED +MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC +mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 +7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB +oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs +EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO +fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi +AmvZWg== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 6047274297262753887 +# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 +# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa +# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy +MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD +VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp +cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv +ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl +AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF +661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 +am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 +ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 +PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS +3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k +SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF +3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM +ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g +StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz +Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB +jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. +# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. +# Label: "Chambers of Commerce Root - 2008" +# Serial: 11806822484801597146 +# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7 +# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c +# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0 +-----BEGIN CERTIFICATE----- +MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD +VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 +IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 +MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz +IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz +MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj +dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw +EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp +MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G +CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9 +28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq +VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q +DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR +5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL +ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a +Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl +UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s ++12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5 +Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj +ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx +hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV +HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1 ++HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN +YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t +L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy +ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt +IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV +HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w +DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW +PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF +5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1 +glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH +FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2 +pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD +xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG +tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq +jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De +fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg +OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ +d0jQ +-----END CERTIFICATE----- + +# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. +# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. +# Label: "Global Chambersign Root - 2008" +# Serial: 14541511773111788494 +# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3 +# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c +# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca +-----BEGIN CERTIFICATE----- +MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD +VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 +IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 +MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD +aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx +MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy +cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG +A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl +BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI +hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed +KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7 +G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2 +zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4 +ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG +HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2 +Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V +yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e +beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r +6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh +wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog +zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW +BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr +ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp +ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk +cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt +YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC +CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow +KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI +hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ +UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz +X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x +fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz +a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd +Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd +SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O +AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso +M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge +v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z +09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2011" +# Serial: 0 +# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 +# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d +# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 +-----BEGIN CERTIFICATE----- +MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix +RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p +YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw +NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK +EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl +cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz +dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ +fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns +bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD +75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP +FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV +HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp +5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu +b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA +A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p +6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 +TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 +dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys +Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI +l7WdmplNsDz4SgCbZN2fOUvRJ9e4 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: O=Trustis Limited OU=Trustis FPS Root CA +# Subject: O=Trustis Limited OU=Trustis FPS Root CA +# Label: "Trustis FPS Root CA" +# Serial: 36053640375399034304724988975563710553 +# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d +# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04 +# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d +-----BEGIN CERTIFICATE----- +MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL +ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx +MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc +MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+ +AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH +iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj +vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA +0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB +OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/ +BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E +FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01 +GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW +zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4 +1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE +f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F +jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN +ZetX2fNXlrtIzYE= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus +# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus +# Label: "EE Certification Centre Root CA" +# Serial: 112324828676200291871926431888494945866 +# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f +# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7 +# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76 +-----BEGIN CERTIFICATE----- +MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1 +MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1 +czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG +CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy +MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl +ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS +b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy +euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO +bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw +WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d +MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE +1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/ +zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB +BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF +BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV +v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG +E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u +uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW +iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v +GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Label: "E-Tugra Certification Authority" +# Serial: 7667447206703254355 +# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 +# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 +# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV +BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC +aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV +BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 +Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz +MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ +BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp +em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN +ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY +B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH +D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF +Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo +q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D +k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH +fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut +dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM +ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 +zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn +rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX +U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 +Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 +XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF +Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR +HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY +GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c +77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 ++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK +vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 +FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl +yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P +AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD +y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d +NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 14367148294922964480859022125800977897474 +# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e +# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb +# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c +-----BEGIN CERTIFICATE----- +MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ +FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F +uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX +kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs +ewv4n4Q= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden +# Label: "Staat der Nederlanden Root CA - G3" +# Serial: 10003001 +# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37 +# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc +# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28 +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX +DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl +ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv +b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP +cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW +IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX +xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy +KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR +9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az +5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8 +6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7 +Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP +bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt +BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt +XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd +INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD +U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp +LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8 +Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp +gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh +/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw +0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A +fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq +4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR +1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/ +QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM +94B7IWcnMFk= +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Label: "Staat der Nederlanden EV Root CA" +# Serial: 10000013 +# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba +# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb +# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a +-----BEGIN CERTIFICATE----- +MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y +MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg +TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS +b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS +M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC +UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d +Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p +rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l +pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb +j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC +KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS +/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X +cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH +1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP +px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 +MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI +eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u +2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS +v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC +wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy +CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e +vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 +Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa +Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL +eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 +FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc +7uzXLg== +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 +# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 +# Label: "Certinomis - Root CA" +# Serial: 1 +# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f +# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8 +# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58 +-----BEGIN CERTIFICATE----- +MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET +MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb +BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz +MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx +FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g +Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2 +fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl +LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV +WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF +TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb +5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc +CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri +wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ +wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG +m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4 +F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng +WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0 +2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF +AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/ +0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw +F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS +g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj +qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN +h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/ +ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V +btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj +Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ +8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW +gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=Certplus Root CA G1 O=Certplus +# Subject: CN=Certplus Root CA G1 O=Certplus +# Label: "Certplus Root CA G1" +# Serial: 1491911565779898356709731176965615564637713 +# MD5 Fingerprint: 7f:09:9c:f7:d9:b9:5c:69:69:56:d5:37:3e:14:0d:42 +# SHA1 Fingerprint: 22:fd:d0:b7:fd:a2:4e:0d:ac:49:2c:a0:ac:a6:7b:6a:1f:e3:f7:66 +# SHA256 Fingerprint: 15:2a:40:2b:fc:df:2c:d5:48:05:4d:22:75:b3:9c:7f:ca:3e:c0:97:80:78:b0:f0:ea:76:e5:61:a6:c7:43:3e +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgISESBVg+QtPlRWhS2DN7cs3EYRMA0GCSqGSIb3DQEBDQUA +MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy +dHBsdXMgUm9vdCBDQSBHMTAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBa +MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy +dHBsdXMgUm9vdCBDQSBHMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ANpQh7bauKk+nWT6VjOaVj0W5QOVsjQcmm1iBdTYj+eJZJ+622SLZOZ5KmHNr49a +iZFluVj8tANfkT8tEBXgfs+8/H9DZ6itXjYj2JizTfNDnjl8KvzsiNWI7nC9hRYt +6kuJPKNxQv4c/dMcLRC4hlTqQ7jbxofaqK6AJc96Jh2qkbBIb6613p7Y1/oA/caP +0FG7Yn2ksYyy/yARujVjBYZHYEMzkPZHogNPlk2dT8Hq6pyi/jQu3rfKG3akt62f +6ajUeD94/vI4CTYd0hYCyOwqaK/1jpTvLRN6HkJKHRUxrgwEV/xhc/MxVoYxgKDE +EW4wduOU8F8ExKyHcomYxZ3MVwia9Az8fXoFOvpHgDm2z4QTd28n6v+WZxcIbekN +1iNQMLAVdBM+5S//Ds3EC0pd8NgAM0lm66EYfFkuPSi5YXHLtaW6uOrc4nBvCGrc +h2c0798wct3zyT8j/zXhviEpIDCB5BmlIOklynMxdCm+4kLV87ImZsdo/Rmz5yCT +mehd4F6H50boJZwKKSTUzViGUkAksnsPmBIgJPaQbEfIDbsYIC7Z/fyL8inqh3SV +4EJQeIQEQWGw9CEjjy3LKCHyamz0GqbFFLQ3ZU+V/YDI+HLlJWvEYLF7bY5KinPO +WftwenMGE9nTdDckQQoRb5fc5+R+ob0V8rqHDz1oihYHAgMBAAGjYzBhMA4GA1Ud +DwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSowcCbkahDFXxd +Bie0KlHYlwuBsTAfBgNVHSMEGDAWgBSowcCbkahDFXxdBie0KlHYlwuBsTANBgkq +hkiG9w0BAQ0FAAOCAgEAnFZvAX7RvUz1isbwJh/k4DgYzDLDKTudQSk0YcbX8ACh +66Ryj5QXvBMsdbRX7gp8CXrc1cqh0DQT+Hern+X+2B50ioUHj3/MeXrKls3N/U/7 +/SMNkPX0XtPGYX2eEeAC7gkE2Qfdpoq3DIMku4NQkv5gdRE+2J2winq14J2by5BS +S7CTKtQ+FjPlnsZlFT5kOwQ/2wyPX1wdaR+v8+khjPPvl/aatxm2hHSco1S1cE5j +2FddUyGbQJJD+tZ3VTNPZNX70Cxqjm0lpu+F6ALEUz65noe8zDUa3qHpimOHZR4R +Kttjd5cUvpoUmRGywO6wT/gUITJDT5+rosuoD6o7BlXGEilXCNQ314cnrUlZp5Gr +RHpejXDbl85IULFzk/bwg2D5zfHhMf1bfHEhYxQUqq/F3pN+aLHsIqKqkHWetUNy +6mSjhEv9DKgma3GX7lZjZuhCVPnHHd/Qj1vfyDBviP4NxDMcU6ij/UgQ8uQKTuEV +V/xuZDDCVRHc6qnNSlSsKWNEz0pAoNZoWRsz+e86i9sgktxChL8Bq4fA1SCC28a5 +g4VCXA9DO2pJNdWY9BW/+mGBDAkgGNLQFwzLSABQ6XaCjGTXOqAHVcweMcDvOrRl +++O/QmueD6i9a5jc2NvLi6Td11n0bt3+qsOR0C5CB8AMTVPNJLFMWx5R9N/pkvo= +-----END CERTIFICATE----- + +# Issuer: CN=Certplus Root CA G2 O=Certplus +# Subject: CN=Certplus Root CA G2 O=Certplus +# Label: "Certplus Root CA G2" +# Serial: 1492087096131536844209563509228951875861589 +# MD5 Fingerprint: a7:ee:c4:78:2d:1b:ee:2d:b9:29:ce:d6:a7:96:32:31 +# SHA1 Fingerprint: 4f:65:8e:1f:e9:06:d8:28:02:e9:54:47:41:c9:54:25:5d:69:cc:1a +# SHA256 Fingerprint: 6c:c0:50:41:e6:44:5e:74:69:6c:4c:fb:c9:f8:0f:54:3b:7e:ab:bb:44:b4:ce:6f:78:7c:6a:99:71:c4:2f:17 +-----BEGIN CERTIFICATE----- +MIICHDCCAaKgAwIBAgISESDZkc6uo+jF5//pAq/Pc7xVMAoGCCqGSM49BAMDMD4x +CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs +dXMgUm9vdCBDQSBHMjAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBaMD4x +CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs +dXMgUm9vdCBDQSBHMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABM0PW1aC3/BFGtat +93nwHcmsltaeTpwftEIRyoa/bfuFo8XlGVzX7qY/aWfYeOKmycTbLXku54uNAm8x +Ik0G42ByRZ0OQneezs/lf4WbGOT8zC5y0xaTTsqZY1yhBSpsBqNjMGEwDgYDVR0P +AQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNqDYwJ5jtpMxjwj +FNiPwyCrKGBZMB8GA1UdIwQYMBaAFNqDYwJ5jtpMxjwjFNiPwyCrKGBZMAoGCCqG +SM49BAMDA2gAMGUCMHD+sAvZ94OX7PNVHdTcswYO/jOYnYs5kGuUIe22113WTNch +p+e/IQ8rzfcq3IUHnQIxAIYUFuXcsGXCwI4Un78kFmjlvPl5adytRSv3tjFzzAal +U5ORGpOucGpnutee5WEaXw== +-----END CERTIFICATE----- + +# Issuer: CN=OpenTrust Root CA G1 O=OpenTrust +# Subject: CN=OpenTrust Root CA G1 O=OpenTrust +# Label: "OpenTrust Root CA G1" +# Serial: 1492036577811947013770400127034825178844775 +# MD5 Fingerprint: 76:00:cc:81:29:cd:55:5e:88:6a:7a:2e:f7:4d:39:da +# SHA1 Fingerprint: 79:91:e8:34:f7:e2:ee:dd:08:95:01:52:e9:55:2d:14:e9:58:d5:7e +# SHA256 Fingerprint: 56:c7:71:28:d9:8c:18:d9:1b:4c:fd:ff:bc:25:ee:91:03:d4:75:8e:a2:ab:ad:82:6a:90:f3:45:7d:46:0e:b4 +-----BEGIN CERTIFICATE----- +MIIFbzCCA1egAwIBAgISESCzkFU5fX82bWTCp59rY45nMA0GCSqGSIb3DQEBCwUA +MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w +ZW5UcnVzdCBSb290IENBIEcxMB4XDTE0MDUyNjA4NDU1MFoXDTM4MDExNTAwMDAw +MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU +T3BlblRydXN0IFJvb3QgQ0EgRzEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQD4eUbalsUwXopxAy1wpLuwxQjczeY1wICkES3d5oeuXT2R0odsN7faYp6b +wiTXj/HbpqbfRm9RpnHLPhsxZ2L3EVs0J9V5ToybWL0iEA1cJwzdMOWo010hOHQX +/uMftk87ay3bfWAfjH1MBcLrARYVmBSO0ZB3Ij/swjm4eTrwSSTilZHcYTSSjFR0 +77F9jAHiOH3BX2pfJLKOYheteSCtqx234LSWSE9mQxAGFiQD4eCcjsZGT44ameGP +uY4zbGneWK2gDqdkVBFpRGZPTBKnjix9xNRbxQA0MMHZmf4yzgeEtE7NCv82TWLx +p2NX5Ntqp66/K7nJ5rInieV+mhxNaMbBGN4zK1FGSxyO9z0M+Yo0FMT7MzUj8czx +Kselu7Cizv5Ta01BG2Yospb6p64KTrk5M0ScdMGTHPjgniQlQ/GbI4Kq3ywgsNw2 +TgOzfALU5nsaqocTvz6hdLubDuHAk5/XpGbKuxs74zD0M1mKB3IDVedzagMxbm+W +G+Oin6+Sx+31QrclTDsTBM8clq8cIqPQqwWyTBIjUtz9GVsnnB47ev1CI9sjgBPw +vFEVVJSmdz7QdFG9URQIOTfLHzSpMJ1ShC5VkLG631UAC9hWLbFJSXKAqWLXwPYY +EQRVzXR7z2FwefR7LFxckvzluFqrTJOVoSfupb7PcSNCupt2LQIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUl0YhVyE1 +2jZVx/PxN3DlCPaTKbYwHwYDVR0jBBgwFoAUl0YhVyE12jZVx/PxN3DlCPaTKbYw +DQYJKoZIhvcNAQELBQADggIBAB3dAmB84DWn5ph76kTOZ0BP8pNuZtQ5iSas000E +PLuHIT839HEl2ku6q5aCgZG27dmxpGWX4m9kWaSW7mDKHyP7Rbr/jyTwyqkxf3kf +gLMtMrpkZ2CvuVnN35pJ06iCsfmYlIrM4LvgBBuZYLFGZdwIorJGnkSI6pN+VxbS +FXJfLkur1J1juONI5f6ELlgKn0Md/rcYkoZDSw6cMoYsYPXpSOqV7XAp8dUv/TW0 +V8/bhUiZucJvbI/NeJWsZCj9VrDDb8O+WVLhX4SPgPL0DTatdrOjteFkdjpY3H1P +XlZs5VVZV6Xf8YpmMIzUUmI4d7S+KNfKNsSbBfD4Fdvb8e80nR14SohWZ25g/4/I +i+GOvUKpMwpZQhISKvqxnUOOBZuZ2mKtVzazHbYNeS2WuOvyDEsMpZTGMKcmGS3t +TAZQMPH9WD25SxdfGbRqhFS0OE85og2WaMMolP3tLR9Ka0OWLpABEPs4poEL0L91 +09S5zvE/bw4cHjdx5RiHdRk/ULlepEU0rbDK5uUTdg8xFKmOLZTW1YVNcxVPS/Ky +Pu1svf0OnWZzsD2097+o4BGkxK51CUpjAEggpsadCwmKtODmzj7HPiY46SvepghJ +AwSQiumPv+i2tCqjI40cHLI5kqiPAlxAOXXUc0ECd97N4EOH1uS6SsNsEn/+KuYj +1oxx +-----END CERTIFICATE----- + +# Issuer: CN=OpenTrust Root CA G2 O=OpenTrust +# Subject: CN=OpenTrust Root CA G2 O=OpenTrust +# Label: "OpenTrust Root CA G2" +# Serial: 1492012448042702096986875987676935573415441 +# MD5 Fingerprint: 57:24:b6:59:24:6b:ae:c8:fe:1c:0c:20:f2:c0:4e:eb +# SHA1 Fingerprint: 79:5f:88:60:c5:ab:7c:3d:92:e6:cb:f4:8d:e1:45:cd:11:ef:60:0b +# SHA256 Fingerprint: 27:99:58:29:fe:6a:75:15:c1:bf:e8:48:f9:c4:76:1d:b1:6c:22:59:29:25:7b:f4:0d:08:94:f2:9e:a8:ba:f2 +-----BEGIN CERTIFICATE----- +MIIFbzCCA1egAwIBAgISESChaRu/vbm9UpaPI+hIvyYRMA0GCSqGSIb3DQEBDQUA +MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w +ZW5UcnVzdCBSb290IENBIEcyMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAw +MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU +T3BlblRydXN0IFJvb3QgQ0EgRzIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQDMtlelM5QQgTJT32F+D3Y5z1zCU3UdSXqWON2ic2rxb95eolq5cSG+Ntmh +/LzubKh8NBpxGuga2F8ORAbtp+Dz0mEL4DKiltE48MLaARf85KxP6O6JHnSrT78e +CbY2albz4e6WiWYkBuTNQjpK3eCasMSCRbP+yatcfD7J6xcvDH1urqWPyKwlCm/6 +1UWY0jUJ9gNDlP7ZvyCVeYCYitmJNbtRG6Q3ffyZO6v/v6wNj0OxmXsWEH4db0fE +FY8ElggGQgT4hNYdvJGmQr5J1WqIP7wtUdGejeBSzFfdNTVY27SPJIjki9/ca1TS +gSuyzpJLHB9G+h3Ykst2Z7UJmQnlrBcUVXDGPKBWCgOz3GIZ38i1MH/1PCZ1Eb3X +G7OHngevZXHloM8apwkQHZOJZlvoPGIytbU6bumFAYueQ4xncyhZW+vj3CzMpSZy +YhK05pyDRPZRpOLAeiRXyg6lPzq1O4vldu5w5pLeFlwoW5cZJ5L+epJUzpM5ChaH +vGOz9bGTXOBut9Dq+WIyiET7vycotjCVXRIouZW+j1MY5aIYFuJWpLIsEPUdN6b4 +t/bQWVyJ98LVtZR00dX+G7bw5tYee9I8y6jj9RjzIR9u701oBnstXW5DiabA+aC/ +gh7PU3+06yzbXfZqfUAkBXKJOAGTy3HCOV0GEfZvePg3DTmEJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUajn6QiL3 +5okATV59M4PLuG53hq8wHwYDVR0jBBgwFoAUajn6QiL35okATV59M4PLuG53hq8w +DQYJKoZIhvcNAQENBQADggIBAJjLq0A85TMCl38th6aP1F5Kr7ge57tx+4BkJamz +Gj5oXScmp7oq4fBXgwpkTx4idBvpkF/wrM//T2h6OKQQbA2xx6R3gBi2oihEdqc0 +nXGEL8pZ0keImUEiyTCYYW49qKgFbdEfwFFEVn8nNQLdXpgKQuswv42hm1GqO+qT +RmTFAHneIWv2V6CG1wZy7HBGS4tz3aAhdT7cHcCP009zHIXZ/n9iyJVvttN7jLpT +wm+bREx50B1ws9efAvSyB7DH5fitIw6mVskpEndI2S9G/Tvw/HRwkqWOOAgfZDC2 +t0v7NqwQjqBSM2OdAzVWxWm9xiNaJ5T2pBL4LTM8oValX9YZ6e18CL13zSdkzJTa +TkZQh+D5wVOAHrut+0dSixv9ovneDiK3PTNZbNTe9ZUGMg1RGUFcPk8G97krgCf2 +o6p6fAbhQ8MTOWIaNr3gKC6UAuQpLmBVrkA9sHSSXvAgZJY/X0VdiLWK2gKgW0VU +3jg9CcCoSmVGFvyqv1ROTVu+OEO3KMqLM6oaJbolXCkvW0pujOotnCr2BXbgd5eA +iN1nE28daCSLT7d0geX0YJ96Vdc+N9oWaz53rK4YcJUIeSkDiv7BO7M/Gg+kO14f +WKGVyasvc0rQLW6aWQ9VGHgtPFGml4vmu7JwqkwR3v98KzfUetF3NI/n+UL3PIEM +S1IK +-----END CERTIFICATE----- + +# Issuer: CN=OpenTrust Root CA G3 O=OpenTrust +# Subject: CN=OpenTrust Root CA G3 O=OpenTrust +# Label: "OpenTrust Root CA G3" +# Serial: 1492104908271485653071219941864171170455615 +# MD5 Fingerprint: 21:37:b4:17:16:92:7b:67:46:70:a9:96:d7:a8:13:24 +# SHA1 Fingerprint: 6e:26:64:f3:56:bf:34:55:bf:d1:93:3f:7c:01:de:d8:13:da:8a:a6 +# SHA256 Fingerprint: b7:c3:62:31:70:6e:81:07:8c:36:7c:b8:96:19:8f:1e:32:08:dd:92:69:49:dd:8f:57:09:a4:10:f7:5b:62:92 +-----BEGIN CERTIFICATE----- +MIICITCCAaagAwIBAgISESDm+Ez8JLC+BUCs2oMbNGA/MAoGCCqGSM49BAMDMEAx +CzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9wZW5U +cnVzdCBSb290IENBIEczMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAwMFow +QDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwUT3Bl +blRydXN0IFJvb3QgQ0EgRzMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARK7liuTcpm +3gY6oxH84Bjwbhy6LTAMidnW7ptzg6kjFYwvWYpa3RTqnVkrQ7cG7DK2uu5Bta1d +oYXM6h0UZqNnfkbilPPntlahFVmhTzeXuSIevRHr9LIfXsMUmuXZl5mjYzBhMA4G +A1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRHd8MUi2I5 +DMlv4VBN0BBY3JWIbTAfBgNVHSMEGDAWgBRHd8MUi2I5DMlv4VBN0BBY3JWIbTAK +BggqhkjOPQQDAwNpADBmAjEAj6jcnboMBBf6Fek9LykBl7+BFjNAk2z8+e2AcG+q +j9uEwov1NcoG3GRvaBbhj5G5AjEA2Euly8LQCGzpGPta3U1fJAuwACEl74+nBCZx +4nxp5V2a+EEfOzmTk51V6s2N8fvB +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A. +# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A. +# Label: "LuxTrust Global Root 2" +# Serial: 59914338225734147123941058376788110305822489521 +# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c +# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f +# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5 +-----BEGIN CERTIFICATE----- +MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL +BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV +BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw +MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B +LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F +ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem +hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1 +EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn +Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4 +zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ +96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m +j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g +DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+ +8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j +X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH +hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB +KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0 +Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT ++Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL +BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9 +BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO +jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9 +loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c +qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+ +2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/ +JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre +zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf +LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+ +x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6 +oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-1" +# Serial: 15752444095811006489 +# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 +# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a +# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y +IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB +pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h +IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG +A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU +cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid +RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V +seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme +9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV +EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW +hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ +DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD +ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I +/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf +ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ +yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts +L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN +zl/HHk484IkzlQsPpTLWPFp5LBk= +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-2" +# Serial: 2711694510199101698 +# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 +# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 +# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 +-----BEGIN CERTIFICATE----- +MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig +Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk +MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg +Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD +VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy +dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ +QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq +1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp +2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK +DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape +az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF +3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 +oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM +g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 +mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh +8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd +BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U +nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw +DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX +dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ +MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL +/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX +CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa +ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW +2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 +N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 +Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB +As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp +5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu +1uwJ +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor ECA-1" +# Serial: 9548242946988625984 +# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c +# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd +# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y +IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig +RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb +3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA +BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 +3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou +owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ +wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF +ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf +BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ +MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv +civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 +AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F +hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 +soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI +WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi +tJ/X5g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- diff --git a/env/lib/python3.6/site-packages/pip/_vendor/certifi/core.py b/env/lib/python3.6/site-packages/pip/_vendor/certifi/core.py new file mode 100644 index 0000000..eab9d1d --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/certifi/core.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem. +""" +import os +import warnings + + +class DeprecatedBundleWarning(DeprecationWarning): + """ + The weak security bundle is being deprecated. Please bother your service + provider to get them to stop using cross-signed roots. + """ + + +def where(): + f = os.path.dirname(__file__) + + return os.path.join(f, 'cacert.pem') + + +def old_where(): + warnings.warn( + "The weak security bundle has been removed. certifi.old_where() is now an alias " + "of certifi.where(). Please update your code to use certifi.where() instead. " + "certifi.old_where() will be removed in 2018.", + DeprecatedBundleWarning + ) + return where() + +if __name__ == '__main__': + print(where()) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/big5freq.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/big5freq.py new file mode 100644 index 0000000..38f3251 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/big5freq.py @@ -0,0 +1,386 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Big5 frequency table +# by Taiwan's Mandarin Promotion Council +# <http://www.edu.tw:81/mandr/> +# +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +#Char to FreqOrder table +BIG5_TABLE_SIZE = 5376 + +BIG5_CHAR_TO_FREQ_ORDER = ( + 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 +3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 +1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 + 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 +3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 +4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 +5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 + 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 + 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 + 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 +2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 +1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 +3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 + 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 +3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 +2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 + 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 +3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 +1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 +5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 + 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 +5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 +1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 + 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 + 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 +3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 +3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 + 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 +2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 +2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 + 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 + 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 +3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 +1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 +1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 +1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 +2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 + 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 +4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 +1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 +5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 +2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 + 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 + 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 + 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 + 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 +5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 + 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 +1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 + 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 + 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 +5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 +1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 + 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 +3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 +4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 +3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 + 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 + 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 +1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 +4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 +3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 +3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 +2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 +5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 +3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 +5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 +1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 +2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 +1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 + 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 +1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 +4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 +3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 + 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 + 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 + 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 +2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 +5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 +1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 +2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 +1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 +1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 +5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 +5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 +5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 +3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 +4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 +4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 +2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 +5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 +3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 + 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 +5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 +5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 +1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 +2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 +3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 +4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 +5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 +3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 +4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 +1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 +1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 +4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 +1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 + 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 +1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 +1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 +3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 + 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 +5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 +2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 +1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 +1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 +5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 + 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 +4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 + 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 +2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 + 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 +1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 +1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 + 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 +4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 +4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 +1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 +3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 +5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 +5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 +1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 +2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 +1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 +3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 +2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 +3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 +2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 +4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 +4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 +3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 + 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 +3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 + 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 +3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 +4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 +3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 +1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 +5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 + 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 +5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 +1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 + 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 +4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 +4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 + 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 +2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 +2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 +3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 +1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 +4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 +2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 +1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 +1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 +2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 +3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 +1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 +5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 +1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 +4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 +1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 + 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 +1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 +4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 +4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 +2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 +1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 +4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 + 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 +5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 +2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 +3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 +4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 + 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 +5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 +5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 +1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 +4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 +4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 +2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 +3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 +3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 +2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 +1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 +4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 +3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 +3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 +2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 +4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 +5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 +3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 +2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 +3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 +1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 +2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 +3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 +4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 +2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 +2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 +5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 +1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 +2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 +1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 +3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 +4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 +2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 +3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 +3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 +2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 +4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 +2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 +3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 +4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 +5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 +3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 + 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 +1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 +4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 +1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 +4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 +5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 + 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 +5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 +5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 +2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 +3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 +2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 +2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 + 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 +1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 +4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 +3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 +3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 + 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 +2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 + 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 +2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 +4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 +1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 +4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 +1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 +3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 + 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 +3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 +5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 +5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 +3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 +3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 +1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 +2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 +5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 +1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 +1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 +3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 + 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 +1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 +4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 +5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 +2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 +3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 + 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 +1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 +2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 +2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 +5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 +5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 +5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 +2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 +2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 +1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 +4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 +3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 +3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 +4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 +4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 +2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 +2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 +5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 +4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 +5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 +4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 + 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 + 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 +1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 +3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 +4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 +1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 +5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 +2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 +2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 +3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 +5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 +1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 +3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 +5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 +1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 +5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 +2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 +3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 +2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 +3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 +3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 +3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 +4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 + 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 +2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 +4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 +3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 +5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 +1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 +5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 + 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 +1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 + 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 +4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 +1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 +4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 +1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 + 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 +3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 +4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 +5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 + 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 +3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 + 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 +2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 +) + diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/big5prober.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/big5prober.py new file mode 100644 index 0000000..98f9970 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/big5prober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import Big5DistributionAnalysis +from .mbcssm import BIG5_SM_MODEL + + +class Big5Prober(MultiByteCharSetProber): + def __init__(self): + super(Big5Prober, self).__init__() + self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) + self.distribution_analyzer = Big5DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "Big5" + + @property + def language(self): + return "Chinese" diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/chardistribution.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/chardistribution.py new file mode 100644 index 0000000..c0395f4 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/chardistribution.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, + EUCTW_TYPICAL_DISTRIBUTION_RATIO) +from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, + EUCKR_TYPICAL_DISTRIBUTION_RATIO) +from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, + GB2312_TYPICAL_DISTRIBUTION_RATIO) +from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, + BIG5_TYPICAL_DISTRIBUTION_RATIO) +from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, + JIS_TYPICAL_DISTRIBUTION_RATIO) + + +class CharDistributionAnalysis(object): + ENOUGH_DATA_THRESHOLD = 1024 + SURE_YES = 0.99 + SURE_NO = 0.01 + MINIMUM_DATA_THRESHOLD = 3 + + def __init__(self): + # Mapping table to get frequency order from char order (get from + # GetOrder()) + self._char_to_freq_order = None + self._table_size = None # Size of above table + # This is a constant value which varies from language to language, + # used in calculating confidence. See + # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html + # for further detail. + self.typical_distribution_ratio = None + self._done = None + self._total_chars = None + self._freq_chars = None + self.reset() + + def reset(self): + """reset analyser, clear any state""" + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + self._total_chars = 0 # Total characters encountered + # The number of characters whose frequency order is less than 512 + self._freq_chars = 0 + + def feed(self, char, char_len): + """feed a character with known length""" + if char_len == 2: + # we only care about 2-bytes character in our distribution analysis + order = self.get_order(char) + else: + order = -1 + if order >= 0: + self._total_chars += 1 + # order is valid + if order < self._table_size: + if 512 > self._char_to_freq_order[order]: + self._freq_chars += 1 + + def get_confidence(self): + """return confidence based on existing data""" + # if we didn't receive any character in our consideration range, + # return negative answer + if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: + return self.SURE_NO + + if self._total_chars != self._freq_chars: + r = (self._freq_chars / ((self._total_chars - self._freq_chars) + * self.typical_distribution_ratio)) + if r < self.SURE_YES: + return r + + # normalize confidence (we don't want to be 100% sure) + return self.SURE_YES + + def got_enough_data(self): + # It is not necessary to receive all data to draw conclusion. + # For charset detection, certain amount of data is enough + return self._total_chars > self.ENOUGH_DATA_THRESHOLD + + def get_order(self, byte_str): + # We do not handle characters based on the original encoding string, + # but convert this encoding string to a number, here called order. + # This allows multiple encodings of a language to share one frequency + # table. + return -1 + + +class EUCTWDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCTWDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER + self._table_size = EUCTW_TABLE_SIZE + self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-TW encoding, we are interested + # first byte range: 0xc4 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xC4: + return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 + else: + return -1 + + +class EUCKRDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCKRDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER + self._table_size = EUCKR_TABLE_SIZE + self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-KR encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xB0: + return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 + else: + return -1 + + +class GB2312DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(GB2312DistributionAnalysis, self).__init__() + self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER + self._table_size = GB2312_TABLE_SIZE + self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for GB2312 encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0xB0) and (second_char >= 0xA1): + return 94 * (first_char - 0xB0) + second_char - 0xA1 + else: + return -1 + + +class Big5DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(Big5DistributionAnalysis, self).__init__() + self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER + self._table_size = BIG5_TABLE_SIZE + self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for big5 encoding, we are interested + # first byte range: 0xa4 -- 0xfe + # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if first_char >= 0xA4: + if second_char >= 0xA1: + return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 + else: + return 157 * (first_char - 0xA4) + second_char - 0x40 + else: + return -1 + + +class SJISDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(SJISDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for sjis encoding, we are interested + # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe + # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0x81) and (first_char <= 0x9F): + order = 188 * (first_char - 0x81) + elif (first_char >= 0xE0) and (first_char <= 0xEF): + order = 188 * (first_char - 0xE0 + 31) + else: + return -1 + order = order + second_char - 0x40 + if second_char > 0x7F: + order = -1 + return order + + +class EUCJPDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCJPDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-JP encoding, we are interested + # first byte range: 0xa0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + char = byte_str[0] + if char >= 0xA0: + return 94 * (char - 0xA1) + byte_str[1] - 0xa1 + else: + return -1 diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/charsetgroupprober.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/charsetgroupprober.py new file mode 100644 index 0000000..8b3738e --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/charsetgroupprober.py @@ -0,0 +1,106 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState +from .charsetprober import CharSetProber + + +class CharSetGroupProber(CharSetProber): + def __init__(self, lang_filter=None): + super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) + self._active_num = 0 + self.probers = [] + self._best_guess_prober = None + + def reset(self): + super(CharSetGroupProber, self).reset() + self._active_num = 0 + for prober in self.probers: + if prober: + prober.reset() + prober.active = True + self._active_num += 1 + self._best_guess_prober = None + + @property + def charset_name(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.charset_name + + @property + def language(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.language + + def feed(self, byte_str): + for prober in self.probers: + if not prober: + continue + if not prober.active: + continue + state = prober.feed(byte_str) + if not state: + continue + if state == ProbingState.FOUND_IT: + self._best_guess_prober = prober + return self.state + elif state == ProbingState.NOT_ME: + prober.active = False + self._active_num -= 1 + if self._active_num <= 0: + self._state = ProbingState.NOT_ME + return self.state + return self.state + + def get_confidence(self): + state = self.state + if state == ProbingState.FOUND_IT: + return 0.99 + elif state == ProbingState.NOT_ME: + return 0.01 + best_conf = 0.0 + self._best_guess_prober = None + for prober in self.probers: + if not prober: + continue + if not prober.active: + self.logger.debug('%s not active', prober.charset_name) + continue + conf = prober.get_confidence() + self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) + if best_conf < conf: + best_conf = conf + self._best_guess_prober = prober + if not self._best_guess_prober: + return 0.0 + return best_conf diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/charsetprober.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/charsetprober.py new file mode 100644 index 0000000..eac4e59 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/charsetprober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging +import re + +from .enums import ProbingState + + +class CharSetProber(object): + + SHORTCUT_THRESHOLD = 0.95 + + def __init__(self, lang_filter=None): + self._state = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + + def reset(self): + self._state = ProbingState.DETECTING + + @property + def charset_name(self): + return None + + def feed(self, buf): + pass + + @property + def state(self): + return self._state + + def get_confidence(self): + return 0.0 + + @staticmethod + def filter_high_byte_only(buf): + buf = re.sub(b'([\x00-\x7F])+', b' ', buf) + return buf + + @staticmethod + def filter_international_words(buf): + """ + We define three types of bytes: + alphabet: english alphabets [a-zA-Z] + international: international characters [\x80-\xFF] + marker: everything else [^a-zA-Z\x80-\xFF] + + The input buffer can be thought to contain a series of words delimited + by markers. This function works to filter all words that contain at + least one international character. All contiguous sequences of markers + are replaced by a single space ascii character. + + This filter applies to all scripts which do not use English characters. + """ + filtered = bytearray() + + # This regex expression filters out only words that have at-least one + # international character. The word may include one marker character at + # the end. + words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', + buf) + + for word in words: + filtered.extend(word[:-1]) + + # If the last character in the word is a marker, replace it with a + # space as markers shouldn't affect our analysis (they are used + # similarly across all languages and may thus have similar + # frequencies). + last_char = word[-1:] + if not last_char.isalpha() and last_char < b'\x80': + last_char = b' ' + filtered.extend(last_char) + + return filtered + + @staticmethod + def filter_with_english_letters(buf): + """ + Returns a copy of ``buf`` that retains only the sequences of English + alphabet and high byte characters that are not between <> characters. + Also retains English alphabet and high byte characters immediately + before occurrences of >. + + This filter can be applied to all scripts which contain both English + characters and extended ASCII characters, but is currently only used by + ``Latin1Prober``. + """ + filtered = bytearray() + in_tag = False + prev = 0 + + for curr in range(len(buf)): + # Slice here to get bytes instead of an int with Python 3 + buf_char = buf[curr:curr + 1] + # Check if we're coming out of or entering an HTML tag + if buf_char == b'>': + in_tag = False + elif buf_char == b'<': + in_tag = True + + # If current character is not extended-ASCII and not alphabetic... + if buf_char < b'\x80' and not buf_char.isalpha(): + # ...and we're not in a tag + if curr > prev and not in_tag: + # Keep everything after last non-extended-ASCII, + # non-alphabetic character + filtered.extend(buf[prev:curr]) + # Output a space to delimit stretch we kept + filtered.extend(b' ') + prev = curr + 1 + + # If we're not in a tag... + if not in_tag: + # Keep everything after last non-extended-ASCII, non-alphabetic + # character + filtered.extend(buf[prev:]) + + return filtered diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/cli/chardetect.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/cli/chardetect.py new file mode 100644 index 0000000..c61136b --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/cli/chardetect.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python +""" +Script which takes one or more file paths and reports on their detected +encodings + +Example:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +If no paths are provided, it takes its input from stdin. + +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import argparse +import sys + +from pip._vendor.chardet import __version__ +from pip._vendor.chardet.compat import PY2 +from pip._vendor.chardet.universaldetector import UniversalDetector + + +def description_of(lines, name='stdin'): + """ + Return a string describing the probable encoding of a file or + list of strings. + + :param lines: The lines to get the encoding of. + :type lines: Iterable of bytes + :param name: Name of file or collection of lines + :type name: str + """ + u = UniversalDetector() + for line in lines: + line = bytearray(line) + u.feed(line) + # shortcut out of the loop to save reading further - particularly useful if we read a BOM. + if u.done: + break + u.close() + result = u.result + if PY2: + name = name.decode(sys.getfilesystemencoding(), 'ignore') + if result['encoding']: + return '{0}: {1} with confidence {2}'.format(name, result['encoding'], + result['confidence']) + else: + return '{0}: no result'.format(name) + + +def main(argv=None): + """ + Handles command line arguments and gets things started. + + :param argv: List of arguments, as if specified on the command-line. + If None, ``sys.argv[1:]`` is used instead. + :type argv: list of str + """ + # Get command line arguments + parser = argparse.ArgumentParser( + description="Takes one or more file paths and reports their detected \ + encodings") + parser.add_argument('input', + help='File whose encoding we would like to determine. \ + (default: stdin)', + type=argparse.FileType('rb'), nargs='*', + default=[sys.stdin if PY2 else sys.stdin.buffer]) + parser.add_argument('--version', action='version', + version='%(prog)s {0}'.format(__version__)) + args = parser.parse_args(argv) + + for f in args.input: + if f.isatty(): + print("You are running chardetect interactively. Press " + + "CTRL-D twice at the start of a blank line to signal the " + + "end of your input. If you want help, run chardetect " + + "--help\n", file=sys.stderr) + print(description_of(f, f.name)) + + +if __name__ == '__main__': + main() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/codingstatemachine.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/codingstatemachine.py new file mode 100644 index 0000000..68fba44 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/codingstatemachine.py @@ -0,0 +1,88 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging + +from .enums import MachineState + + +class CodingStateMachine(object): + """ + A state machine to verify a byte sequence for a particular encoding. For + each byte the detector receives, it will feed that byte to every active + state machine available, one byte at a time. The state machine changes its + state based on its previous state and the byte it receives. There are 3 + states in a state machine that are of interest to an auto-detector: + + START state: This is the state to start with, or a legal byte sequence + (i.e. a valid code point) for character has been identified. + + ME state: This indicates that the state machine identified a byte sequence + that is specific to the charset it is designed for and that + there is no other possible encoding which can contain this byte + sequence. This will to lead to an immediate positive answer for + the detector. + + ERROR state: This indicates the state machine identified an illegal byte + sequence for that encoding. This will lead to an immediate + negative answer for this encoding. Detector will exclude this + encoding from consideration from here on. + """ + def __init__(self, sm): + self._model = sm + self._curr_byte_pos = 0 + self._curr_char_len = 0 + self._curr_state = None + self.logger = logging.getLogger(__name__) + self.reset() + + def reset(self): + self._curr_state = MachineState.START + + def next_state(self, c): + # for each byte we get its class + # if it is first byte, we also get byte length + byte_class = self._model['class_table'][c] + if self._curr_state == MachineState.START: + self._curr_byte_pos = 0 + self._curr_char_len = self._model['char_len_table'][byte_class] + # from byte's class and state_table, we get its next state + curr_state = (self._curr_state * self._model['class_factor'] + + byte_class) + self._curr_state = self._model['state_table'][curr_state] + self._curr_byte_pos += 1 + return self._curr_state + + def get_current_charlen(self): + return self._curr_char_len + + def get_coding_state_machine(self): + return self._model['name'] + + @property + def language(self): + return self._model['language'] diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/compat.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/compat.py new file mode 100644 index 0000000..ddd7468 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/compat.py @@ -0,0 +1,34 @@ +######################## BEGIN LICENSE BLOCK ######################## +# Contributor(s): +# Dan Blanchard +# Ian Cordasco +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import sys + + +if sys.version_info < (3, 0): + PY2 = True + PY3 = False + base_str = (str, unicode) + text_type = unicode +else: + PY2 = False + PY3 = True + base_str = (bytes, str) + text_type = str diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/cp949prober.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/cp949prober.py new file mode 100644 index 0000000..efd793a --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/cp949prober.py @@ -0,0 +1,49 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .chardistribution import EUCKRDistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import CP949_SM_MODEL + + +class CP949Prober(MultiByteCharSetProber): + def __init__(self): + super(CP949Prober, self).__init__() + self.coding_sm = CodingStateMachine(CP949_SM_MODEL) + # NOTE: CP949 is a superset of EUC-KR, so the distribution should be + # not different. + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "CP949" + + @property + def language(self): + return "Korean" diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/enums.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/enums.py new file mode 100644 index 0000000..0451207 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/enums.py @@ -0,0 +1,76 @@ +""" +All of the Enums that are used throughout the chardet package. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + + +class InputState(object): + """ + This enum represents the different states a universal detector can be in. + """ + PURE_ASCII = 0 + ESC_ASCII = 1 + HIGH_BYTE = 2 + + +class LanguageFilter(object): + """ + This enum represents the different language filters we can apply to a + ``UniversalDetector``. + """ + CHINESE_SIMPLIFIED = 0x01 + CHINESE_TRADITIONAL = 0x02 + JAPANESE = 0x04 + KOREAN = 0x08 + NON_CJK = 0x10 + ALL = 0x1F + CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL + CJK = CHINESE | JAPANESE | KOREAN + + +class ProbingState(object): + """ + This enum represents the different states a prober can be in. + """ + DETECTING = 0 + FOUND_IT = 1 + NOT_ME = 2 + + +class MachineState(object): + """ + This enum represents the different states a state machine can be in. + """ + START = 0 + ERROR = 1 + ITS_ME = 2 + + +class SequenceLikelihood(object): + """ + This enum represents the likelihood of a character following the previous one. + """ + NEGATIVE = 0 + UNLIKELY = 1 + LIKELY = 2 + POSITIVE = 3 + + @classmethod + def get_num_categories(cls): + """:returns: The number of likelihood categories in the enum.""" + return 4 + + +class CharacterCategory(object): + """ + This enum represents the different categories language models for + ``SingleByteCharsetProber`` put characters into. + + Anything less than CONTROL is considered a letter. + """ + UNDEFINED = 255 + LINE_BREAK = 254 + SYMBOL = 253 + DIGIT = 252 + CONTROL = 251 diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/escprober.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/escprober.py new file mode 100644 index 0000000..c70493f --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/escprober.py @@ -0,0 +1,101 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .codingstatemachine import CodingStateMachine +from .enums import LanguageFilter, ProbingState, MachineState +from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, + ISO2022KR_SM_MODEL) + + +class EscCharSetProber(CharSetProber): + """ + This CharSetProber uses a "code scheme" approach for detecting encodings, + whereby easily recognizable escape or shift sequences are relied on to + identify these encodings. + """ + + def __init__(self, lang_filter=None): + super(EscCharSetProber, self).__init__(lang_filter=lang_filter) + self.coding_sm = [] + if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: + self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) + self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) + if self.lang_filter & LanguageFilter.JAPANESE: + self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) + if self.lang_filter & LanguageFilter.KOREAN: + self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) + self.active_sm_count = None + self._detected_charset = None + self._detected_language = None + self._state = None + self.reset() + + def reset(self): + super(EscCharSetProber, self).reset() + for coding_sm in self.coding_sm: + if not coding_sm: + continue + coding_sm.active = True + coding_sm.reset() + self.active_sm_count = len(self.coding_sm) + self._detected_charset = None + self._detected_language = None + + @property + def charset_name(self): + return self._detected_charset + + @property + def language(self): + return self._detected_language + + def get_confidence(self): + if self._detected_charset: + return 0.99 + else: + return 0.00 + + def feed(self, byte_str): + for c in byte_str: + for coding_sm in self.coding_sm: + if not coding_sm or not coding_sm.active: + continue + coding_state = coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + coding_sm.active = False + self.active_sm_count -= 1 + if self.active_sm_count <= 0: + self._state = ProbingState.NOT_ME + return self.state + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + self._detected_charset = coding_sm.get_coding_state_machine() + self._detected_language = coding_sm.language + return self.state + + return self.state diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/escsm.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/escsm.py new file mode 100644 index 0000000..0069523 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/escsm.py @@ -0,0 +1,246 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +HZ_CLS = ( +1,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,0,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,4,0,5,2,0, # 78 - 7f +1,1,1,1,1,1,1,1, # 80 - 87 +1,1,1,1,1,1,1,1, # 88 - 8f +1,1,1,1,1,1,1,1, # 90 - 97 +1,1,1,1,1,1,1,1, # 98 - 9f +1,1,1,1,1,1,1,1, # a0 - a7 +1,1,1,1,1,1,1,1, # a8 - af +1,1,1,1,1,1,1,1, # b0 - b7 +1,1,1,1,1,1,1,1, # b8 - bf +1,1,1,1,1,1,1,1, # c0 - c7 +1,1,1,1,1,1,1,1, # c8 - cf +1,1,1,1,1,1,1,1, # d0 - d7 +1,1,1,1,1,1,1,1, # d8 - df +1,1,1,1,1,1,1,1, # e0 - e7 +1,1,1,1,1,1,1,1, # e8 - ef +1,1,1,1,1,1,1,1, # f0 - f7 +1,1,1,1,1,1,1,1, # f8 - ff +) + +HZ_ST = ( +MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17 + 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f + 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27 + 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f +) + +HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +HZ_SM_MODEL = {'class_table': HZ_CLS, + 'class_factor': 6, + 'state_table': HZ_ST, + 'char_len_table': HZ_CHAR_LEN_TABLE, + 'name': "HZ-GB-2312", + 'language': 'Chinese'} + +ISO2022CN_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,3,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,4,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022CN_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27 + 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f +) + +ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS, + 'class_factor': 9, + 'state_table': ISO2022CN_ST, + 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, + 'name': "ISO-2022-CN", + 'language': 'Chinese'} + +ISO2022JP_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,2,2, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,7,0,0,0, # 20 - 27 +3,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +6,0,4,0,8,0,0,0, # 40 - 47 +0,9,5,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022JP_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47 +) + +ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS, + 'class_factor': 10, + 'state_table': ISO2022JP_ST, + 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, + 'name': "ISO-2022-JP", + 'language': 'Japanese'} + +ISO2022KR_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,3,0,0,0, # 20 - 27 +0,4,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,5,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022KR_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27 +) + +ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS, + 'class_factor': 6, + 'state_table': ISO2022KR_ST, + 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, + 'name': "ISO-2022-KR", + 'language': 'Korean'} + + diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/eucjpprober.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/eucjpprober.py new file mode 100644 index 0000000..20ce8f7 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/eucjpprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState, MachineState +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCJPDistributionAnalysis +from .jpcntx import EUCJPContextAnalysis +from .mbcssm import EUCJP_SM_MODEL + + +class EUCJPProber(MultiByteCharSetProber): + def __init__(self): + super(EUCJPProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) + self.distribution_analyzer = EUCJPDistributionAnalysis() + self.context_analyzer = EUCJPContextAnalysis() + self.reset() + + def reset(self): + super(EUCJPProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return "EUC-JP" + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char, char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/euckrfreq.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/euckrfreq.py new file mode 100644 index 0000000..b68078c --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/euckrfreq.py @@ -0,0 +1,195 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology + +# 128 --> 0.79 +# 256 --> 0.92 +# 512 --> 0.986 +# 1024 --> 0.99944 +# 2048 --> 0.99999 +# +# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 +# Random Distribution Ration = 512 / (2350-512) = 0.279. +# +# Typical Distribution Ratio + +EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 + +EUCKR_TABLE_SIZE = 2352 + +# Char to FreqOrder table , +EUCKR_CHAR_TO_FREQ_ORDER = ( + 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, +1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, +1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, + 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, + 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, + 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, +1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, + 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, + 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, +1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, +1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, +1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, +1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, +1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, + 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, +1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, +1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, +1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, +1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, + 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, +1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, + 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, + 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, +1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, + 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, +1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, + 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, + 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, +1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, +1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, +1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, +1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, + 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, +1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, + 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, + 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, +1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, +1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, +1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, +1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, +1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, +1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, + 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, + 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, + 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, +1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, + 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, +1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, + 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, + 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, +2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, + 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, + 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, +2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, +2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, +2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, + 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, + 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, +2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, + 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, +1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, +2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, +1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, +2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, +2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, +1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, + 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, +2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, +2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, + 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, + 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, +2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, +1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, +2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, +2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, +2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, +2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, +2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, +2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, +1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, +2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, +2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, +2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, +2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, +2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, +1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, +1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, +2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, +1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, +2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, +1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, + 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, +2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, + 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, +2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, + 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, +2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, +2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, + 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, +2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, +1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, + 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, +1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, +2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, +1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, +2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, + 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, +2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, +1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, +2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, +1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, +2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, +1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, + 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, +2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, +2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, + 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, + 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, +1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, +1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, + 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, +2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, +2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, + 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, + 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, + 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, +2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, + 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, + 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, +2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, +2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, + 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, +2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, +1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, + 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, +2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, +2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, +2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, + 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, + 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, + 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, +2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, +2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, +2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, +1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, +2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, + 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 +) + diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/euckrprober.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/euckrprober.py new file mode 100644 index 0000000..345a060 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/euckrprober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCKRDistributionAnalysis +from .mbcssm import EUCKR_SM_MODEL + + +class EUCKRProber(MultiByteCharSetProber): + def __init__(self): + super(EUCKRProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-KR" + + @property + def language(self): + return "Korean" diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/euctwfreq.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/euctwfreq.py new file mode 100644 index 0000000..ed7a995 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/euctwfreq.py @@ -0,0 +1,387 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# EUCTW frequency table +# Converted from big5 work +# by Taiwan's Mandarin Promotion Council +# <http:#www.edu.tw:81/mandr/> + +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +# Char to FreqOrder table , +EUCTW_TABLE_SIZE = 5376 + +EUCTW_CHAR_TO_FREQ_ORDER = ( + 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 +3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 +1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 + 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 +3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 +4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 +7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 + 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 + 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 + 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 +2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 +1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 +3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 + 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 +3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 +2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 + 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 +3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 +1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 +7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 + 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 +7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 +1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 + 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 + 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 +3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 +3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 + 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 +2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 +2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 + 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 + 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 +3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 +1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 +1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 +1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 +2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 + 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 +4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 +1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 +7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 +2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 + 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 + 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 + 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 + 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 +7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 + 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 +1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 + 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 + 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 +7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 +1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 + 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 +3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 +4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 +3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 + 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 + 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 +1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 +4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 +3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 +3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 +2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 +7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 +3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 +7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 +1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 +2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 +1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 + 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 +1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 +4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 +3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 + 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 + 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 + 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 +2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 +7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 +1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 +2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 +1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 +1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 +7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 +7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 +7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 +3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 +4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 +1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 +7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 +2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 +7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 +3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 +3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 +7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 +2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 +7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 + 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 +4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 +2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 +7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 +3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 +2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 +2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 + 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 +2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 +1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 +1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 +2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 +1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 +7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 +7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 +2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 +4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 +1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 +7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 + 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 +4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 + 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 +2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 + 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 +1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 +1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 + 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 +3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 +3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 +1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 +3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 +7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 +7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 +1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 +2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 +1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 +3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 +2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 +3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 +2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 +4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 +4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 +3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 + 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 +3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 + 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 +3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 +3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 +3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 +1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 +7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 + 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 +7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 +1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 + 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 +4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 +3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 + 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 +2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 +2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 +3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 +1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 +4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 +2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 +1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 +1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 +2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 +3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 +1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 +7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 +1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 +4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 +1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 + 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 +1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 +3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 +3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 +2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 +1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 +4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 + 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 +7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 +2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 +3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 +4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 + 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 +7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 +7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 +1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 +4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 +3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 +2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 +3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 +3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 +2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 +1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 +4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 +3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 +3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 +2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 +4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 +7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 +3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 +2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 +3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 +1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 +2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 +3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 +4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 +2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 +2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 +7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 +1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 +2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 +1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 +3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 +4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 +2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 +3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 +3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 +2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 +4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 +2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 +3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 +4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 +7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 +3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 + 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 +1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 +4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 +1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 +4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 +7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 + 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 +7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 +2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 +1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 +1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 +3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 + 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 + 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 + 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 +3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 +2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 + 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 +7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 +1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 +3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 +7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 +1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 +7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 +4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 +1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 +2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 +2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 +4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 + 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 + 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 +3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 +3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 +1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 +2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 +7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 +1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 +1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 +3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 + 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 +1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 +4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 +7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 +2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 +3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 + 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 +1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 +2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 +2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 +7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 +7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 +7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 +2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 +2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 +1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 +4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 +3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 +3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 +4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 +4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 +2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 +2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 +7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 +4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 +7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 +2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 +1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 +3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 +4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 +2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 + 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 +2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 +1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 +2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 +2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 +4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 +7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 +1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 +3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 +7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 +1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 +8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 +2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 +8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 +2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 +2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 +8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 +8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 +8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 + 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 +8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 +4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 +3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 +8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 +1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 +8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 + 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 +1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 + 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 +4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 +1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 +4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 +1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 + 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 +3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 +4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 +8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 + 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 +3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 + 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 +2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 +) + diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/euctwprober.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/euctwprober.py new file mode 100644 index 0000000..35669cc --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/euctwprober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCTWDistributionAnalysis +from .mbcssm import EUCTW_SM_MODEL + +class EUCTWProber(MultiByteCharSetProber): + def __init__(self): + super(EUCTWProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) + self.distribution_analyzer = EUCTWDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-TW" + + @property + def language(self): + return "Taiwan" diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/gb2312freq.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/gb2312freq.py new file mode 100644 index 0000000..697837b --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/gb2312freq.py @@ -0,0 +1,283 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# GB2312 most frequently used character table +# +# Char to FreqOrder table , from hz6763 + +# 512 --> 0.79 -- 0.79 +# 1024 --> 0.92 -- 0.13 +# 2048 --> 0.98 -- 0.06 +# 6768 --> 1.00 -- 0.02 +# +# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 +# Random Distribution Ration = 512 / (3755 - 512) = 0.157 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR + +GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 + +GB2312_TABLE_SIZE = 3760 + +GB2312_CHAR_TO_FREQ_ORDER = ( +1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, +2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, +2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, + 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, +1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, +1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, + 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, +1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, +2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, +3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, + 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, +1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, + 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, +2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, + 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, +2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, +1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, +3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, + 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, +1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, + 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, +2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, +1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, +3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, +1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, +2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, +1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, + 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, +3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, +3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, + 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, +3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, + 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, +1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, +3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, +2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, +1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, + 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, +1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, +4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, + 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, +3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, +3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, + 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, +1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, +2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, +1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, +1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, + 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, +3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, +3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, +4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, + 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, +3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, +1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, +1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, +4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, + 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, + 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, +3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, +1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, + 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, +1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, +2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, + 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, + 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, + 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, +3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, +4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, +3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, + 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, +2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, +2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, +2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, + 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, +2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, + 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, + 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, + 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, +3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, +2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, +2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, +1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, + 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, +2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, + 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, + 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, +1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, +1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, + 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, + 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, +1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, +2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, +3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, +2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, +2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, +2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, +3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, +1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, +1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, +2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, +1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, +3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, +1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, +1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, +3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, + 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, +2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, +1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, +4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, +1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, +1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, +3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, +1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, + 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, + 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, +1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, + 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, +1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, +1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, + 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, +3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, +4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, +3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, +2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, +2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, +1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, +3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, +2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, +1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, +1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, + 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, +2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, +2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, +3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, +4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, +3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, + 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, +3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, +2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, +1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, + 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, + 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, +3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, +4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, +2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, +1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, +1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, + 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, +1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, +3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, + 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, + 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, +1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, + 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, +1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, + 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, +2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, + 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, +2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, +2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, +1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, +1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, +2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, + 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, +1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, +1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, +2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, +2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, +3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, +1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, +4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, + 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, + 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, +3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, +1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, + 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, +3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, +1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, +4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, +1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, +2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, +1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, + 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, +1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, +3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, + 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, +2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, + 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, +1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, +1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, +1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, +3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, +2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, +3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, +3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, +3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, + 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, +2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, + 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, +2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, + 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, +1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, + 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, + 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, +1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, +3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, +3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, +1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, +1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, +3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, +2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, +2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, +1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, +3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, + 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, +4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, +1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, +2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, +3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, +3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, +1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, + 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, + 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, +2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, + 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, +1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, + 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, +1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, +1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, +1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, +1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, +1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, + 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, + 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 +) + diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/gb2312prober.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/gb2312prober.py new file mode 100644 index 0000000..8446d2d --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/gb2312prober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import GB2312DistributionAnalysis +from .mbcssm import GB2312_SM_MODEL + +class GB2312Prober(MultiByteCharSetProber): + def __init__(self): + super(GB2312Prober, self).__init__() + self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) + self.distribution_analyzer = GB2312DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "GB2312" + + @property + def language(self): + return "Chinese" diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/hebrewprober.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/hebrewprober.py new file mode 100644 index 0000000..b0e1bf4 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/hebrewprober.py @@ -0,0 +1,292 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Shy Shalom +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +# This prober doesn't actually recognize a language or a charset. +# It is a helper prober for the use of the Hebrew model probers + +### General ideas of the Hebrew charset recognition ### +# +# Four main charsets exist in Hebrew: +# "ISO-8859-8" - Visual Hebrew +# "windows-1255" - Logical Hebrew +# "ISO-8859-8-I" - Logical Hebrew +# "x-mac-hebrew" - ?? Logical Hebrew ?? +# +# Both "ISO" charsets use a completely identical set of code points, whereas +# "windows-1255" and "x-mac-hebrew" are two different proper supersets of +# these code points. windows-1255 defines additional characters in the range +# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific +# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. +# x-mac-hebrew defines similar additional code points but with a different +# mapping. +# +# As far as an average Hebrew text with no diacritics is concerned, all four +# charsets are identical with respect to code points. Meaning that for the +# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters +# (including final letters). +# +# The dominant difference between these charsets is their directionality. +# "Visual" directionality means that the text is ordered as if the renderer is +# not aware of a BIDI rendering algorithm. The renderer sees the text and +# draws it from left to right. The text itself when ordered naturally is read +# backwards. A buffer of Visual Hebrew generally looks like so: +# "[last word of first line spelled backwards] [whole line ordered backwards +# and spelled backwards] [first word of first line spelled backwards] +# [end of line] [last word of second line] ... etc' " +# adding punctuation marks, numbers and English text to visual text is +# naturally also "visual" and from left to right. +# +# "Logical" directionality means the text is ordered "naturally" according to +# the order it is read. It is the responsibility of the renderer to display +# the text from right to left. A BIDI algorithm is used to place general +# punctuation marks, numbers and English text in the text. +# +# Texts in x-mac-hebrew are almost impossible to find on the Internet. From +# what little evidence I could find, it seems that its general directionality +# is Logical. +# +# To sum up all of the above, the Hebrew probing mechanism knows about two +# charsets: +# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are +# backwards while line order is natural. For charset recognition purposes +# the line order is unimportant (In fact, for this implementation, even +# word order is unimportant). +# Logical Hebrew - "windows-1255" - normal, naturally ordered text. +# +# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be +# specifically identified. +# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew +# that contain special punctuation marks or diacritics is displayed with +# some unconverted characters showing as question marks. This problem might +# be corrected using another model prober for x-mac-hebrew. Due to the fact +# that x-mac-hebrew texts are so rare, writing another model prober isn't +# worth the effort and performance hit. +# +#### The Prober #### +# +# The prober is divided between two SBCharSetProbers and a HebrewProber, +# all of which are managed, created, fed data, inquired and deleted by the +# SBCSGroupProber. The two SBCharSetProbers identify that the text is in +# fact some kind of Hebrew, Logical or Visual. The final decision about which +# one is it is made by the HebrewProber by combining final-letter scores +# with the scores of the two SBCharSetProbers to produce a final answer. +# +# The SBCSGroupProber is responsible for stripping the original text of HTML +# tags, English characters, numbers, low-ASCII punctuation characters, spaces +# and new lines. It reduces any sequence of such characters to a single space. +# The buffer fed to each prober in the SBCS group prober is pure text in +# high-ASCII. +# The two SBCharSetProbers (model probers) share the same language model: +# Win1255Model. +# The first SBCharSetProber uses the model normally as any other +# SBCharSetProber does, to recognize windows-1255, upon which this model was +# built. The second SBCharSetProber is told to make the pair-of-letter +# lookup in the language model backwards. This in practice exactly simulates +# a visual Hebrew model using the windows-1255 logical Hebrew model. +# +# The HebrewProber is not using any language model. All it does is look for +# final-letter evidence suggesting the text is either logical Hebrew or visual +# Hebrew. Disjointed from the model probers, the results of the HebrewProber +# alone are meaningless. HebrewProber always returns 0.00 as confidence +# since it never identifies a charset by itself. Instead, the pointer to the +# HebrewProber is passed to the model probers as a helper "Name Prober". +# When the Group prober receives a positive identification from any prober, +# it asks for the name of the charset identified. If the prober queried is a +# Hebrew model prober, the model prober forwards the call to the +# HebrewProber to make the final decision. In the HebrewProber, the +# decision is made according to the final-letters scores maintained and Both +# model probers scores. The answer is returned in the form of the name of the +# charset identified, either "windows-1255" or "ISO-8859-8". + +class HebrewProber(CharSetProber): + # windows-1255 / ISO-8859-8 code points of interest + FINAL_KAF = 0xea + NORMAL_KAF = 0xeb + FINAL_MEM = 0xed + NORMAL_MEM = 0xee + FINAL_NUN = 0xef + NORMAL_NUN = 0xf0 + FINAL_PE = 0xf3 + NORMAL_PE = 0xf4 + FINAL_TSADI = 0xf5 + NORMAL_TSADI = 0xf6 + + # Minimum Visual vs Logical final letter score difference. + # If the difference is below this, don't rely solely on the final letter score + # distance. + MIN_FINAL_CHAR_DISTANCE = 5 + + # Minimum Visual vs Logical model score difference. + # If the difference is below this, don't rely at all on the model score + # distance. + MIN_MODEL_DISTANCE = 0.01 + + VISUAL_HEBREW_NAME = "ISO-8859-8" + LOGICAL_HEBREW_NAME = "windows-1255" + + def __init__(self): + super(HebrewProber, self).__init__() + self._final_char_logical_score = None + self._final_char_visual_score = None + self._prev = None + self._before_prev = None + self._logical_prober = None + self._visual_prober = None + self.reset() + + def reset(self): + self._final_char_logical_score = 0 + self._final_char_visual_score = 0 + # The two last characters seen in the previous buffer, + # mPrev and mBeforePrev are initialized to space in order to simulate + # a word delimiter at the beginning of the data + self._prev = ' ' + self._before_prev = ' ' + # These probers are owned by the group prober. + + def set_model_probers(self, logicalProber, visualProber): + self._logical_prober = logicalProber + self._visual_prober = visualProber + + def is_final(self, c): + return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, + self.FINAL_PE, self.FINAL_TSADI] + + def is_non_final(self, c): + # The normal Tsadi is not a good Non-Final letter due to words like + # 'lechotet' (to chat) containing an apostrophe after the tsadi. This + # apostrophe is converted to a space in FilterWithoutEnglishLetters + # causing the Non-Final tsadi to appear at an end of a word even + # though this is not the case in the original text. + # The letters Pe and Kaf rarely display a related behavior of not being + # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' + # for example legally end with a Non-Final Pe or Kaf. However, the + # benefit of these letters as Non-Final letters outweighs the damage + # since these words are quite rare. + return c in [self.NORMAL_KAF, self.NORMAL_MEM, + self.NORMAL_NUN, self.NORMAL_PE] + + def feed(self, byte_str): + # Final letter analysis for logical-visual decision. + # Look for evidence that the received buffer is either logical Hebrew + # or visual Hebrew. + # The following cases are checked: + # 1) A word longer than 1 letter, ending with a final letter. This is + # an indication that the text is laid out "naturally" since the + # final letter really appears at the end. +1 for logical score. + # 2) A word longer than 1 letter, ending with a Non-Final letter. In + # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, + # should not end with the Non-Final form of that letter. Exceptions + # to this rule are mentioned above in isNonFinal(). This is an + # indication that the text is laid out backwards. +1 for visual + # score + # 3) A word longer than 1 letter, starting with a final letter. Final + # letters should not appear at the beginning of a word. This is an + # indication that the text is laid out backwards. +1 for visual + # score. + # + # The visual score and logical score are accumulated throughout the + # text and are finally checked against each other in GetCharSetName(). + # No checking for final letters in the middle of words is done since + # that case is not an indication for either Logical or Visual text. + # + # We automatically filter out all 7-bit characters (replace them with + # spaces) so the word boundary detection works properly. [MAP] + + if self.state == ProbingState.NOT_ME: + # Both model probers say it's not them. No reason to continue. + return ProbingState.NOT_ME + + byte_str = self.filter_high_byte_only(byte_str) + + for cur in byte_str: + if cur == ' ': + # We stand on a space - a word just ended + if self._before_prev != ' ': + # next-to-last char was not a space so self._prev is not a + # 1 letter word + if self.is_final(self._prev): + # case (1) [-2:not space][-1:final letter][cur:space] + self._final_char_logical_score += 1 + elif self.is_non_final(self._prev): + # case (2) [-2:not space][-1:Non-Final letter][ + # cur:space] + self._final_char_visual_score += 1 + else: + # Not standing on a space + if ((self._before_prev == ' ') and + (self.is_final(self._prev)) and (cur != ' ')): + # case (3) [-2:space][-1:final letter][cur:not space] + self._final_char_visual_score += 1 + self._before_prev = self._prev + self._prev = cur + + # Forever detecting, till the end or until both model probers return + # ProbingState.NOT_ME (handled above) + return ProbingState.DETECTING + + @property + def charset_name(self): + # Make the decision: is it Logical or Visual? + # If the final letter score distance is dominant enough, rely on it. + finalsub = self._final_char_logical_score - self._final_char_visual_score + if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # It's not dominant enough, try to rely on the model scores instead. + modelsub = (self._logical_prober.get_confidence() + - self._visual_prober.get_confidence()) + if modelsub > self.MIN_MODEL_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if modelsub < -self.MIN_MODEL_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # Still no good, back to final letter distance, maybe it'll save the + # day. + if finalsub < 0.0: + return self.VISUAL_HEBREW_NAME + + # (finalsub > 0 - Logical) or (don't know what to do) default to + # Logical. + return self.LOGICAL_HEBREW_NAME + + @property + def language(self): + return 'Hebrew' + + @property + def state(self): + # Remain active as long as any of the model probers are active. + if (self._logical_prober.state == ProbingState.NOT_ME) and \ + (self._visual_prober.state == ProbingState.NOT_ME): + return ProbingState.NOT_ME + return ProbingState.DETECTING diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/jisfreq.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/jisfreq.py new file mode 100644 index 0000000..83fc082 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/jisfreq.py @@ -0,0 +1,325 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology +# +# Japanese frequency table, applied to both S-JIS and EUC-JP +# They are sorted in order. + +# 128 --> 0.77094 +# 256 --> 0.85710 +# 512 --> 0.92635 +# 1024 --> 0.97130 +# 2048 --> 0.99431 +# +# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 +# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 +# +# Typical Distribution Ratio, 25% of IDR + +JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 + +# Char to FreqOrder table , +JIS_TABLE_SIZE = 4368 + +JIS_CHAR_TO_FREQ_ORDER = ( + 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 +3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 +1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 +2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 +2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 +5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 +1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 +5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 +5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 +5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 +5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 +5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 +5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 +1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 +1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 +1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 +2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 +3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 +3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 + 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 + 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 +1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 + 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 +5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 + 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 + 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 + 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 + 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 + 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 +5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 +5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 +5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 +4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 +5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 +5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 +5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 +5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 +5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 +5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 +5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 +5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 +5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 +3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 +5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 +5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 +5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 +5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 +5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 +5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 +5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 +5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 +5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 +5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 +5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 +5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 +5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 +5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 +5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 +5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 +5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 +5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 +5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 +5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 +5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 +5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 +5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 +5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 +5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 +5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 +5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 +5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 +5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 +5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 +5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 +5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 +5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 +5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 +5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 +5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 +5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 +5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 +6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 +6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 +6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 +6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 +6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 +6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 +6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 +6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 +4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 + 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 + 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 +1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 +1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 + 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 +3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 +3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 + 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 +3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 +3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 + 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 +2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 + 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 +3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 +1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 + 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 +1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 + 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 +2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 +2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 +2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 +2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 +1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 +1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 +1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 +1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 +2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 +1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 +2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 +1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 +1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 +1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 +1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 +1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 +1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 + 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 + 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 +1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 +2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 +2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 +2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 +3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 +3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 + 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 +3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 +1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 + 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 +2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 +1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 + 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 +3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 +4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 +2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 +1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 +2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 +1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 + 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 + 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 +1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 +2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 +2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 +2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 +3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 +1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 +2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 + 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 + 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 + 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 +1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 +2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 + 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 +1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 +1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 + 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 +1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 +1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 +1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 + 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 +2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 + 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 +2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 +3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 +2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 +1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 +6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 +1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 +2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 +1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 + 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 + 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 +3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 +3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 +1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 +1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 +1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 +1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 + 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 + 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 +2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 + 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 +3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 +2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 + 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 +1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 +2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 + 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 +1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 + 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 +4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 +2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 +1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 + 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 +1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 +2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 + 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 +6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 +1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 +1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 +2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 +3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 + 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 +3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 +1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 + 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 +1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 + 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 +3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 + 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 +2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 + 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 +4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 +2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 +1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 +1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 +1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 + 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 +1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 +3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 +1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 +3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 + 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 + 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 + 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 +2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 +1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 + 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 +1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 + 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 +1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 + 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 + 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 + 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 +1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 +1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 +2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 +4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 + 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 +1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 + 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 +1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 +3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 +1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 +2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 +2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 +1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 +1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 +2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 + 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 +2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 +1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 +1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 +1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 +1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 +3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 +2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 +2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 + 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 +3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 +3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 +1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 +2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 +1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 +2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 +) + + diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/jpcntx.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/jpcntx.py new file mode 100644 index 0000000..20044e4 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/jpcntx.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +# This is hiragana 2-char sequence table, the number in each cell represents its frequency category +jp2CharContext = ( +(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), +(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), +(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), +(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), +(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), +(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), +(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), +(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), +(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), +(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), +(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), +(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), +(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), +(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), +(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), +(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), +(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), +(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), +(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), +(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), +(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), +(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), +(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), +(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), +(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), +(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), +(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), +(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), +(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), +(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), +(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), +(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), +(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), +(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), +(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), +(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), +(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), +(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), +(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), +(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), +(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), +(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), +(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), +(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), +(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), +(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), +(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), +(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), +(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), +(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), +(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), +(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), +(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), +(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), +(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), +(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), +(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), +(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), +(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), +(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), +(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), +(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), +(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), +(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), +(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), +(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), +(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), +(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), +(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), +(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), +(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), +(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), +(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), +(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), +) + +class JapaneseContextAnalysis(object): + NUM_OF_CATEGORY = 6 + DONT_KNOW = -1 + ENOUGH_REL_THRESHOLD = 100 + MAX_REL_THRESHOLD = 1000 + MINIMUM_DATA_THRESHOLD = 4 + + def __init__(self): + self._total_rel = None + self._rel_sample = None + self._need_to_skip_char_num = None + self._last_char_order = None + self._done = None + self.reset() + + def reset(self): + self._total_rel = 0 # total sequence received + # category counters, each integer counts sequence in its category + self._rel_sample = [0] * self.NUM_OF_CATEGORY + # if last byte in current buffer is not the last byte of a character, + # we need to know how many bytes to skip in next buffer + self._need_to_skip_char_num = 0 + self._last_char_order = -1 # The order of previous char + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + + def feed(self, byte_str, num_bytes): + if self._done: + return + + # The buffer we got is byte oriented, and a character may span in more than one + # buffers. In case the last one or two byte in last buffer is not + # complete, we record how many byte needed to complete that character + # and skip these bytes here. We can choose to record those bytes as + # well and analyse the character once it is complete, but since a + # character will not make much difference, by simply skipping + # this character will simply our logic and improve performance. + i = self._need_to_skip_char_num + while i < num_bytes: + order, char_len = self.get_order(byte_str[i:i + 2]) + i += char_len + if i > num_bytes: + self._need_to_skip_char_num = i - num_bytes + self._last_char_order = -1 + else: + if (order != -1) and (self._last_char_order != -1): + self._total_rel += 1 + if self._total_rel > self.MAX_REL_THRESHOLD: + self._done = True + break + self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 + self._last_char_order = order + + def got_enough_data(self): + return self._total_rel > self.ENOUGH_REL_THRESHOLD + + def get_confidence(self): + # This is just one way to calculate confidence. It works well for me. + if self._total_rel > self.MINIMUM_DATA_THRESHOLD: + return (self._total_rel - self._rel_sample[0]) / self._total_rel + else: + return self.DONT_KNOW + + def get_order(self, byte_str): + return -1, 1 + +class SJISContextAnalysis(JapaneseContextAnalysis): + def __init__(self): + super(SJISContextAnalysis, self).__init__() + self._charset_name = "SHIFT_JIS" + + @property + def charset_name(self): + return self._charset_name + + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): + char_len = 2 + if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): + self._charset_name = "CP932" + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 202) and (0x9F <= second_char <= 0xF1): + return second_char - 0x9F, char_len + + return -1, char_len + +class EUCJPContextAnalysis(JapaneseContextAnalysis): + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): + char_len = 2 + elif first_char == 0x8F: + char_len = 3 + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): + return second_char - 0xA1, char_len + + return -1, char_len + + diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/langbulgarianmodel.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/langbulgarianmodel.py new file mode 100644 index 0000000..2aa4fb2 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/langbulgarianmodel.py @@ -0,0 +1,228 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +# this table is modified base on win1251BulgarianCharToOrderMap, so +# only number <64 is sure valid + +Latin5_BulgarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 +110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 +253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 +116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 +194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 +210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 + 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 + 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 + 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 + 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 + 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 + 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 +) + +win1251BulgarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 +110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 +253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 +116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 +206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 +221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 + 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 + 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 + 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 + 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 + 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 + 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 96.9392% +# first 1024 sequences:3.0618% +# rest sequences: 0.2992% +# negative sequences: 0.0020% +BulgarianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, +3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, +0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, +0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, +0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, +0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, +0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, +2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, +3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, +1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, +3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, +1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, +2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, +2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, +3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, +1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, +2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, +2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, +3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, +1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, +2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, +2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, +2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, +1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, +2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, +1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, +3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, +1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, +3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, +1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, +2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, +1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, +2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, +1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, +2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, +1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, +1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, +1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, +2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, +1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, +2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, +1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, +0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, +1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, +1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, +1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, +0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, +0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, +0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, +1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, +1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, +1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, +1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +) + +Latin5BulgarianModel = { + 'char_to_order_map': Latin5_BulgarianCharToOrderMap, + 'precedence_matrix': BulgarianLangModel, + 'typical_positive_ratio': 0.969392, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-5", + 'language': 'Bulgairan', +} + +Win1251BulgarianModel = { + 'char_to_order_map': win1251BulgarianCharToOrderMap, + 'precedence_matrix': BulgarianLangModel, + 'typical_positive_ratio': 0.969392, + 'keep_english_letter': False, + 'charset_name': "windows-1251", + 'language': 'Bulgarian', +} diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/langcyrillicmodel.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/langcyrillicmodel.py new file mode 100644 index 0000000..e5f9a1f --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/langcyrillicmodel.py @@ -0,0 +1,333 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# KOI8-R language model +# Character Mapping Table: +KOI8R_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 +223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 +238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 + 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 + 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 + 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 + 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 +) + +win1251_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, +239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +) + +latin5_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, +) + +macCyrillic_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, +239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, +) + +IBM855_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, +206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, + 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, +220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, +230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, + 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, + 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, +250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, +) + +IBM866_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 97.6601% +# first 1024 sequences: 2.3389% +# rest sequences: 0.1237% +# negative sequences: 0.0009% +RussianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, +3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, +0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, +0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, +1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, +1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, +2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, +1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, +3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, +1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, +2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, +1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, +1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, +1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, +2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, +1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, +3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, +1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, +2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, +1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, +2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, +1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, +1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, +1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, +3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, +2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, +3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, +1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, +1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, +0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, +1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, +1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, +0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, +1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, +2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, +1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, +1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, +2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, +1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, +0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, +2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, +1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, +1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, +0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, +0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, +0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, +0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, +0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, +0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, +2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, +0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, +) + +Koi8rModel = { + 'char_to_order_map': KOI8R_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "KOI8-R", + 'language': 'Russian', +} + +Win1251CyrillicModel = { + 'char_to_order_map': win1251_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "windows-1251", + 'language': 'Russian', +} + +Latin5CyrillicModel = { + 'char_to_order_map': latin5_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-5", + 'language': 'Russian', +} + +MacCyrillicModel = { + 'char_to_order_map': macCyrillic_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "MacCyrillic", + 'language': 'Russian', +} + +Ibm866Model = { + 'char_to_order_map': IBM866_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "IBM866", + 'language': 'Russian', +} + +Ibm855Model = { + 'char_to_order_map': IBM855_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "IBM855", + 'language': 'Russian', +} diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/langgreekmodel.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/langgreekmodel.py new file mode 100644 index 0000000..5332221 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/langgreekmodel.py @@ -0,0 +1,225 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin7_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 + 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 +253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 + 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 +253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 +253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0 +110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 + 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 +124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 + 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 +) + +win1253_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 + 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 +253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 + 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 +253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 +253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0 +110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 + 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 +124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 + 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 98.2851% +# first 1024 sequences:1.7001% +# rest sequences: 0.0359% +# negative sequences: 0.0148% +GreekLangModel = ( +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0, +3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0, +2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0, +0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0, +2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0, +2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0, +0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0, +2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0, +0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0, +3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0, +3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0, +2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0, +2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0, +0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0, +0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0, +0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2, +0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0, +0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2, +0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0, +0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2, +0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2, +0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0, +0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2, +0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0, +0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0, +0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0, +0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0, +0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2, +0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2, +0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2, +0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2, +0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0, +0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1, +0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2, +0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2, +0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2, +0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0, +0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0, +0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1, +0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0, +0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0, +0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +Latin7GreekModel = { + 'char_to_order_map': Latin7_char_to_order_map, + 'precedence_matrix': GreekLangModel, + 'typical_positive_ratio': 0.982851, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-7", + 'language': 'Greek', +} + +Win1253GreekModel = { + 'char_to_order_map': win1253_char_to_order_map, + 'precedence_matrix': GreekLangModel, + 'typical_positive_ratio': 0.982851, + 'keep_english_letter': False, + 'charset_name': "windows-1253", + 'language': 'Greek', +} diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/langhebrewmodel.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/langhebrewmodel.py new file mode 100644 index 0000000..58f4c87 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/langhebrewmodel.py @@ -0,0 +1,200 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Simon Montagu +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Shoshannah Forbes - original C code (?) +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Windows-1255 language model +# Character Mapping Table: +WIN1255_CHAR_TO_ORDER_MAP = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40 + 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50 +253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60 + 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70 +124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214, +215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221, + 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227, +106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234, + 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237, +238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250, + 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23, + 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 98.4004% +# first 1024 sequences: 1.5981% +# rest sequences: 0.087% +# negative sequences: 0.0015% +HEBREW_LANG_MODEL = ( +0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0, +3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2, +1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2, +1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3, +1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2, +1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2, +1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2, +0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2, +0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2, +1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2, +0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1, +0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0, +0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2, +0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2, +0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2, +0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2, +0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1, +0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2, +0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2, +0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2, +0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2, +0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0, +1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2, +0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3, +0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0, +0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0, +0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, +0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0, +2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0, +0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0, +0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1, +1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1, +0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1, +2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1, +1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1, +2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1, +2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0, +0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1, +0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0, +) + +Win1255HebrewModel = { + 'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP, + 'precedence_matrix': HEBREW_LANG_MODEL, + 'typical_positive_ratio': 0.984004, + 'keep_english_letter': False, + 'charset_name': "windows-1255", + 'language': 'Hebrew', +} diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/langhungarianmodel.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/langhungarianmodel.py new file mode 100644 index 0000000..bb7c095 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/langhungarianmodel.py @@ -0,0 +1,225 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin2_HungarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, + 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, +253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, + 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, +159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174, +175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190, +191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205, + 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, +221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231, +232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241, + 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85, +245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253, +) + +win1250HungarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, + 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, +253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, + 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, +161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176, +177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190, +191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205, + 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, +221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231, +232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241, + 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87, +245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 94.7368% +# first 1024 sequences:5.2623% +# rest sequences: 0.8894% +# negative sequences: 0.0009% +HungarianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, +3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2, +3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0, +3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3, +0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2, +0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0, +1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0, +1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0, +1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1, +3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1, +2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1, +2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1, +2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1, +2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0, +2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, +3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1, +2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1, +2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1, +2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1, +1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1, +1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1, +3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0, +1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1, +1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1, +2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1, +2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0, +2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1, +3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1, +2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1, +1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0, +1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0, +2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1, +2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1, +1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0, +1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1, +2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0, +1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0, +1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0, +2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1, +2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1, +2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, +1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1, +1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1, +1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0, +0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0, +2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1, +2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1, +1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1, +2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1, +1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0, +1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0, +2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0, +2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1, +2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0, +1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0, +2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0, +0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0, +0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +) + +Latin2HungarianModel = { + 'char_to_order_map': Latin2_HungarianCharToOrderMap, + 'precedence_matrix': HungarianLangModel, + 'typical_positive_ratio': 0.947368, + 'keep_english_letter': True, + 'charset_name': "ISO-8859-2", + 'language': 'Hungarian', +} + +Win1250HungarianModel = { + 'char_to_order_map': win1250HungarianCharToOrderMap, + 'precedence_matrix': HungarianLangModel, + 'typical_positive_ratio': 0.947368, + 'keep_english_letter': True, + 'charset_name': "windows-1250", + 'language': 'Hungarian', +} diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/langthaimodel.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/langthaimodel.py new file mode 100644 index 0000000..15f94c2 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/langthaimodel.py @@ -0,0 +1,199 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# The following result for thai was collected from a limited sample (1M). + +# Character Mapping Table: +TIS620CharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40 +188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50 +253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60 + 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70 +209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222, +223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235, +236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57, + 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54, + 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63, + 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244, + 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247, + 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 92.6386% +# first 1024 sequences:7.3177% +# rest sequences: 1.0230% +# negative sequences: 0.0436% +ThaiLangModel = ( +0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3, +0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2, +3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3, +0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1, +3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2, +3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1, +3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2, +3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1, +3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1, +3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1, +2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1, +3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1, +0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1, +0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2, +1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0, +3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3, +3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0, +1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2, +0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3, +0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0, +3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1, +2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0, +3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2, +0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2, +3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, +3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0, +2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, +3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1, +2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1, +3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0, +3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1, +3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1, +3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1, +1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2, +0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3, +0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1, +3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0, +3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1, +1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0, +3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1, +3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2, +0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0, +0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0, +1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1, +1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1, +3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1, +0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0, +3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0, +0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1, +0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0, +0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1, +0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1, +0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0, +0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1, +0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0, +3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0, +0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0, +0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0, +3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1, +2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1, +0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0, +3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0, +1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0, +1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0, +1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +TIS620ThaiModel = { + 'char_to_order_map': TIS620CharToOrderMap, + 'precedence_matrix': ThaiLangModel, + 'typical_positive_ratio': 0.926386, + 'keep_english_letter': False, + 'charset_name': "TIS-620", + 'language': 'Thai', +} diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/langturkishmodel.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/langturkishmodel.py new file mode 100644 index 0000000..a427a45 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/langturkishmodel.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Özgür Baskın - Turkish Language Model +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin5_TurkishCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42, + 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255, +255, 1, 21, 28, 12, 2, 18, 27, 25, 3, 24, 10, 5, 13, 4, 15, + 26, 64, 7, 8, 9, 14, 32, 57, 58, 11, 22,255,255,255,255,255, +180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165, +164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106, +150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136, + 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125, +124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119, + 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86, + 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96, + 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17, 6, 19,107, +) + +TurkishLangModel = ( +3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3, +3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, +3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3, +3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1, +3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3, +3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1, +3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2, +2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1, +3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2, +2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0, +1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2, +3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1, +3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2, +2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1, +3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2, +2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0, +3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2, +3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, +3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3, +0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1, +3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1, +0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0, +3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1, +3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3, +2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3, +2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, +3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0, +0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0, +1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2, +3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1, +1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0, +3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0, +0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0, +3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1, +0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1, +1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0, +1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3, +2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1, +2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0, +0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, +3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0, +0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0, +3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1, +1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2, +0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1, +3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1, +0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0, +3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0, +3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0, +1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2, +2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1, +0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0, +3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0, +0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0, +0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0, +3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0, +0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0, +0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0, +3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0, +0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1, +3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0, +0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1, +0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0, +3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, +0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0, +0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0, +3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0, +0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0, +0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0, +0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0, +3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0, +0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, +0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0, +0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0, +0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0, +1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0, +0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1, +0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0, +3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0, +0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0, +2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0, +2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0, +0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1, +0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +Latin5TurkishModel = { + 'char_to_order_map': Latin5_TurkishCharToOrderMap, + 'precedence_matrix': TurkishLangModel, + 'typical_positive_ratio': 0.970290, + 'keep_english_letter': True, + 'charset_name': "ISO-8859-9", + 'language': 'Turkish', +} diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/latin1prober.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/latin1prober.py new file mode 100644 index 0000000..7d1e8c2 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/latin1prober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +FREQ_CAT_NUM = 4 + +UDF = 0 # undefined +OTH = 1 # other +ASC = 2 # ascii capital letter +ASS = 3 # ascii small letter +ACV = 4 # accent capital vowel +ACO = 5 # accent capital other +ASV = 6 # accent small vowel +ASO = 7 # accent small other +CLASS_NUM = 8 # total classes + +Latin1_CharToClass = ( + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F + OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 + ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F + OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 + ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F + OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 + OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F + UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 + OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF + ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 + ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF + ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 + ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF + ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 + ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF + ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 + ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF +) + +# 0 : illegal +# 1 : very unlikely +# 2 : normal +# 3 : very likely +Latin1ClassModel = ( +# UDF OTH ASC ASS ACV ACO ASV ASO + 0, 0, 0, 0, 0, 0, 0, 0, # UDF + 0, 3, 3, 3, 3, 3, 3, 3, # OTH + 0, 3, 3, 3, 3, 3, 3, 3, # ASC + 0, 3, 3, 3, 1, 1, 3, 3, # ASS + 0, 3, 3, 3, 1, 2, 1, 2, # ACV + 0, 3, 3, 3, 3, 3, 3, 3, # ACO + 0, 3, 1, 3, 1, 1, 1, 3, # ASV + 0, 3, 1, 3, 1, 1, 3, 3, # ASO +) + + +class Latin1Prober(CharSetProber): + def __init__(self): + super(Latin1Prober, self).__init__() + self._last_char_class = None + self._freq_counter = None + self.reset() + + def reset(self): + self._last_char_class = OTH + self._freq_counter = [0] * FREQ_CAT_NUM + CharSetProber.reset(self) + + @property + def charset_name(self): + return "ISO-8859-1" + + @property + def language(self): + return "" + + def feed(self, byte_str): + byte_str = self.filter_with_english_letters(byte_str) + for c in byte_str: + char_class = Latin1_CharToClass[c] + freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) + + char_class] + if freq == 0: + self._state = ProbingState.NOT_ME + break + self._freq_counter[freq] += 1 + self._last_char_class = char_class + + return self.state + + def get_confidence(self): + if self.state == ProbingState.NOT_ME: + return 0.01 + + total = sum(self._freq_counter) + if total < 0.01: + confidence = 0.0 + else: + confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) + / total) + if confidence < 0.0: + confidence = 0.0 + # lower the confidence of latin1 so that other more accurate + # detector can take priority. + confidence = confidence * 0.73 + return confidence diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/mbcharsetprober.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/mbcharsetprober.py new file mode 100644 index 0000000..6256ecf --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/mbcharsetprober.py @@ -0,0 +1,91 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState + + +class MultiByteCharSetProber(CharSetProber): + """ + MultiByteCharSetProber + """ + + def __init__(self, lang_filter=None): + super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) + self.distribution_analyzer = None + self.coding_sm = None + self._last_char = [0, 0] + + def reset(self): + super(MultiByteCharSetProber, self).reset() + if self.coding_sm: + self.coding_sm.reset() + if self.distribution_analyzer: + self.distribution_analyzer.reset() + self._last_char = [0, 0] + + @property + def charset_name(self): + raise NotImplementedError + + @property + def language(self): + raise NotImplementedError + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.distribution_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + return self.distribution_analyzer.get_confidence() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/mbcsgroupprober.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/mbcsgroupprober.py new file mode 100644 index 0000000..530abe7 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/mbcsgroupprober.py @@ -0,0 +1,54 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .utf8prober import UTF8Prober +from .sjisprober import SJISProber +from .eucjpprober import EUCJPProber +from .gb2312prober import GB2312Prober +from .euckrprober import EUCKRProber +from .cp949prober import CP949Prober +from .big5prober import Big5Prober +from .euctwprober import EUCTWProber + + +class MBCSGroupProber(CharSetGroupProber): + def __init__(self, lang_filter=None): + super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) + self.probers = [ + UTF8Prober(), + SJISProber(), + EUCJPProber(), + GB2312Prober(), + EUCKRProber(), + CP949Prober(), + Big5Prober(), + EUCTWProber() + ] + self.reset() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/mbcssm.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/mbcssm.py new file mode 100644 index 0000000..8360d0f --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/mbcssm.py @@ -0,0 +1,572 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +# BIG5 + +BIG5_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 4,4,4,4,4,4,4,4, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 4,3,3,3,3,3,3,3, # a0 - a7 + 3,3,3,3,3,3,3,3, # a8 - af + 3,3,3,3,3,3,3,3, # b0 - b7 + 3,3,3,3,3,3,3,3, # b8 - bf + 3,3,3,3,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +BIG5_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 +) + +BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) + +BIG5_SM_MODEL = {'class_table': BIG5_CLS, + 'class_factor': 5, + 'state_table': BIG5_ST, + 'char_len_table': BIG5_CHAR_LEN_TABLE, + 'name': 'Big5'} + +# CP949 + +CP949_CLS = ( + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f + 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f + 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f + 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f + 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f + 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f + 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f + 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f + 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af + 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf + 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff +) + +CP949_ST = ( +#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 +) + +CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) + +CP949_SM_MODEL = {'class_table': CP949_CLS, + 'class_factor': 10, + 'state_table': CP949_ST, + 'char_len_table': CP949_CHAR_LEN_TABLE, + 'name': 'CP949'} + +# EUC-JP + +EUCJP_CLS = ( + 4,4,4,4,4,4,4,4, # 00 - 07 + 4,4,4,4,4,4,5,5, # 08 - 0f + 4,4,4,4,4,4,4,4, # 10 - 17 + 4,4,4,5,4,4,4,4, # 18 - 1f + 4,4,4,4,4,4,4,4, # 20 - 27 + 4,4,4,4,4,4,4,4, # 28 - 2f + 4,4,4,4,4,4,4,4, # 30 - 37 + 4,4,4,4,4,4,4,4, # 38 - 3f + 4,4,4,4,4,4,4,4, # 40 - 47 + 4,4,4,4,4,4,4,4, # 48 - 4f + 4,4,4,4,4,4,4,4, # 50 - 57 + 4,4,4,4,4,4,4,4, # 58 - 5f + 4,4,4,4,4,4,4,4, # 60 - 67 + 4,4,4,4,4,4,4,4, # 68 - 6f + 4,4,4,4,4,4,4,4, # 70 - 77 + 4,4,4,4,4,4,4,4, # 78 - 7f + 5,5,5,5,5,5,5,5, # 80 - 87 + 5,5,5,5,5,5,1,3, # 88 - 8f + 5,5,5,5,5,5,5,5, # 90 - 97 + 5,5,5,5,5,5,5,5, # 98 - 9f + 5,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,0,5 # f8 - ff +) + +EUCJP_ST = ( + 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f + 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 +) + +EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) + +EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, + 'class_factor': 6, + 'state_table': EUCJP_ST, + 'char_len_table': EUCJP_CHAR_LEN_TABLE, + 'name': 'EUC-JP'} + +# EUC-KR + +EUCKR_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,3,3,3, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,3,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 2,2,2,2,2,2,2,2, # e0 - e7 + 2,2,2,2,2,2,2,2, # e8 - ef + 2,2,2,2,2,2,2,2, # f0 - f7 + 2,2,2,2,2,2,2,0 # f8 - ff +) + +EUCKR_ST = ( + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f +) + +EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) + +EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, + 'class_factor': 4, + 'state_table': EUCKR_ST, + 'char_len_table': EUCKR_CHAR_LEN_TABLE, + 'name': 'EUC-KR'} + +# EUC-TW + +EUCTW_CLS = ( + 2,2,2,2,2,2,2,2, # 00 - 07 + 2,2,2,2,2,2,0,0, # 08 - 0f + 2,2,2,2,2,2,2,2, # 10 - 17 + 2,2,2,0,2,2,2,2, # 18 - 1f + 2,2,2,2,2,2,2,2, # 20 - 27 + 2,2,2,2,2,2,2,2, # 28 - 2f + 2,2,2,2,2,2,2,2, # 30 - 37 + 2,2,2,2,2,2,2,2, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,2, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,6,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,3,4,4,4,4,4,4, # a0 - a7 + 5,5,1,1,1,1,1,1, # a8 - af + 1,1,1,1,1,1,1,1, # b0 - b7 + 1,1,1,1,1,1,1,1, # b8 - bf + 1,1,3,1,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +EUCTW_ST = ( + MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 + MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 + MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) + +EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, + 'class_factor': 7, + 'state_table': EUCTW_ST, + 'char_len_table': EUCTW_CHAR_LEN_TABLE, + 'name': 'x-euc-tw'} + +# GB2312 + +GB2312_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 3,3,3,3,3,3,3,3, # 30 - 37 + 3,3,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,4, # 78 - 7f + 5,6,6,6,6,6,6,6, # 80 - 87 + 6,6,6,6,6,6,6,6, # 88 - 8f + 6,6,6,6,6,6,6,6, # 90 - 97 + 6,6,6,6,6,6,6,6, # 98 - 9f + 6,6,6,6,6,6,6,6, # a0 - a7 + 6,6,6,6,6,6,6,6, # a8 - af + 6,6,6,6,6,6,6,6, # b0 - b7 + 6,6,6,6,6,6,6,6, # b8 - bf + 6,6,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 6,6,6,6,6,6,6,6, # e0 - e7 + 6,6,6,6,6,6,6,6, # e8 - ef + 6,6,6,6,6,6,6,6, # f0 - f7 + 6,6,6,6,6,6,6,0 # f8 - ff +) + +GB2312_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 + 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +# To be accurate, the length of class 6 can be either 2 or 4. +# But it is not necessary to discriminate between the two since +# it is used for frequency analysis only, and we are validating +# each code range there as well. So it is safe to set it to be +# 2 here. +GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) + +GB2312_SM_MODEL = {'class_table': GB2312_CLS, + 'class_factor': 7, + 'state_table': GB2312_ST, + 'char_len_table': GB2312_CHAR_LEN_TABLE, + 'name': 'GB2312'} + +# Shift_JIS + +SJIS_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 3,3,3,3,3,2,2,3, # 80 - 87 + 3,3,3,3,3,3,3,3, # 88 - 8f + 3,3,3,3,3,3,3,3, # 90 - 97 + 3,3,3,3,3,3,3,3, # 98 - 9f + #0xa0 is illegal in sjis encoding, but some pages does + #contain such byte. We need to be more error forgiven. + 2,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,4,4,4, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,0,0,0) # f8 - ff + + +SJIS_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 +) + +SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) + +SJIS_SM_MODEL = {'class_table': SJIS_CLS, + 'class_factor': 6, + 'state_table': SJIS_ST, + 'char_len_table': SJIS_CHAR_LEN_TABLE, + 'name': 'Shift_JIS'} + +# UCS2-BE + +UCS2BE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2BE_ST = ( + 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 + 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f + 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 + 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f + 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) + +UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, + 'class_factor': 6, + 'state_table': UCS2BE_ST, + 'char_len_table': UCS2BE_CHAR_LEN_TABLE, + 'name': 'UTF-16BE'} + +# UCS2-LE + +UCS2LE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2LE_ST = ( + 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f + 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 + 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) + +UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, + 'class_factor': 6, + 'state_table': UCS2LE_ST, + 'char_len_table': UCS2LE_CHAR_LEN_TABLE, + 'name': 'UTF-16LE'} + +# UTF-8 + +UTF8_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 2,2,2,2,3,3,3,3, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 5,5,5,5,5,5,5,5, # a0 - a7 + 5,5,5,5,5,5,5,5, # a8 - af + 5,5,5,5,5,5,5,5, # b0 - b7 + 5,5,5,5,5,5,5,5, # b8 - bf + 0,0,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 7,8,8,8,8,8,8,8, # e0 - e7 + 8,8,8,8,8,9,8,8, # e8 - ef + 10,11,11,11,11,11,11,11, # f0 - f7 + 12,13,13,13,14,15,0,0 # f8 - ff +) + +UTF8_ST = ( + MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 + 9, 11, 8, 7, 6, 5, 4, 3,#08-0f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f + MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f + MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f + MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f + MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af + MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf +) + +UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) + +UTF8_SM_MODEL = {'class_table': UTF8_CLS, + 'class_factor': 16, + 'state_table': UTF8_ST, + 'char_len_table': UTF8_CHAR_LEN_TABLE, + 'name': 'UTF-8'} diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/sbcharsetprober.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/sbcharsetprober.py new file mode 100644 index 0000000..0adb51d --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/sbcharsetprober.py @@ -0,0 +1,132 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import CharacterCategory, ProbingState, SequenceLikelihood + + +class SingleByteCharSetProber(CharSetProber): + SAMPLE_SIZE = 64 + SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 + POSITIVE_SHORTCUT_THRESHOLD = 0.95 + NEGATIVE_SHORTCUT_THRESHOLD = 0.05 + + def __init__(self, model, reversed=False, name_prober=None): + super(SingleByteCharSetProber, self).__init__() + self._model = model + # TRUE if we need to reverse every pair in the model lookup + self._reversed = reversed + # Optional auxiliary prober for name decision + self._name_prober = name_prober + self._last_order = None + self._seq_counters = None + self._total_seqs = None + self._total_char = None + self._freq_char = None + self.reset() + + def reset(self): + super(SingleByteCharSetProber, self).reset() + # char order of last character + self._last_order = 255 + self._seq_counters = [0] * SequenceLikelihood.get_num_categories() + self._total_seqs = 0 + self._total_char = 0 + # characters that fall in our sampling range + self._freq_char = 0 + + @property + def charset_name(self): + if self._name_prober: + return self._name_prober.charset_name + else: + return self._model['charset_name'] + + @property + def language(self): + if self._name_prober: + return self._name_prober.language + else: + return self._model.get('language') + + def feed(self, byte_str): + if not self._model['keep_english_letter']: + byte_str = self.filter_international_words(byte_str) + if not byte_str: + return self.state + char_to_order_map = self._model['char_to_order_map'] + for i, c in enumerate(byte_str): + # XXX: Order is in range 1-64, so one would think we want 0-63 here, + # but that leads to 27 more test failures than before. + order = char_to_order_map[c] + # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but + # CharacterCategory.SYMBOL is actually 253, so we use CONTROL + # to make it closer to the original intent. The only difference + # is whether or not we count digits and control characters for + # _total_char purposes. + if order < CharacterCategory.CONTROL: + self._total_char += 1 + if order < self.SAMPLE_SIZE: + self._freq_char += 1 + if self._last_order < self.SAMPLE_SIZE: + self._total_seqs += 1 + if not self._reversed: + i = (self._last_order * self.SAMPLE_SIZE) + order + model = self._model['precedence_matrix'][i] + else: # reverse the order of the letters in the lookup + i = (order * self.SAMPLE_SIZE) + self._last_order + model = self._model['precedence_matrix'][i] + self._seq_counters[model] += 1 + self._last_order = order + + charset_name = self._model['charset_name'] + if self.state == ProbingState.DETECTING: + if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: + confidence = self.get_confidence() + if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, we have a winner', + charset_name, confidence) + self._state = ProbingState.FOUND_IT + elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, below negative ' + 'shortcut threshhold %s', charset_name, + confidence, + self.NEGATIVE_SHORTCUT_THRESHOLD) + self._state = ProbingState.NOT_ME + + return self.state + + def get_confidence(self): + r = 0.01 + if self._total_seqs > 0: + r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / + self._total_seqs / self._model['typical_positive_ratio']) + r = r * self._freq_char / self._total_char + if r >= 1.0: + r = 0.99 + return r diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/sbcsgroupprober.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/sbcsgroupprober.py new file mode 100644 index 0000000..98e95dc --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/sbcsgroupprober.py @@ -0,0 +1,73 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .sbcharsetprober import SingleByteCharSetProber +from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, + Latin5CyrillicModel, MacCyrillicModel, + Ibm866Model, Ibm855Model) +from .langgreekmodel import Latin7GreekModel, Win1253GreekModel +from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel +# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel +from .langthaimodel import TIS620ThaiModel +from .langhebrewmodel import Win1255HebrewModel +from .hebrewprober import HebrewProber +from .langturkishmodel import Latin5TurkishModel + + +class SBCSGroupProber(CharSetGroupProber): + def __init__(self): + super(SBCSGroupProber, self).__init__() + self.probers = [ + SingleByteCharSetProber(Win1251CyrillicModel), + SingleByteCharSetProber(Koi8rModel), + SingleByteCharSetProber(Latin5CyrillicModel), + SingleByteCharSetProber(MacCyrillicModel), + SingleByteCharSetProber(Ibm866Model), + SingleByteCharSetProber(Ibm855Model), + SingleByteCharSetProber(Latin7GreekModel), + SingleByteCharSetProber(Win1253GreekModel), + SingleByteCharSetProber(Latin5BulgarianModel), + SingleByteCharSetProber(Win1251BulgarianModel), + # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) + # after we retrain model. + # SingleByteCharSetProber(Latin2HungarianModel), + # SingleByteCharSetProber(Win1250HungarianModel), + SingleByteCharSetProber(TIS620ThaiModel), + SingleByteCharSetProber(Latin5TurkishModel), + ] + hebrew_prober = HebrewProber() + logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, + False, hebrew_prober) + visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True, + hebrew_prober) + hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober) + self.probers.extend([hebrew_prober, logical_hebrew_prober, + visual_hebrew_prober]) + + self.reset() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/sjisprober.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/sjisprober.py new file mode 100644 index 0000000..9e29623 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/sjisprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import SJISDistributionAnalysis +from .jpcntx import SJISContextAnalysis +from .mbcssm import SJIS_SM_MODEL +from .enums import ProbingState, MachineState + + +class SJISProber(MultiByteCharSetProber): + def __init__(self): + super(SJISProber, self).__init__() + self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) + self.distribution_analyzer = SJISDistributionAnalysis() + self.context_analyzer = SJISContextAnalysis() + self.reset() + + def reset(self): + super(SJISProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return self.context_analyzer.charset_name + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char[2 - char_len:], + char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3 + - char_len], char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/universaldetector.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/universaldetector.py new file mode 100644 index 0000000..7b4e92d --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/universaldetector.py @@ -0,0 +1,286 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### +""" +Module containing the UniversalDetector detector class, which is the primary +class a user of ``chardet`` should use. + +:author: Mark Pilgrim (initial port to Python) +:author: Shy Shalom (original C code) +:author: Dan Blanchard (major refactoring for 3.0) +:author: Ian Cordasco +""" + + +import codecs +import logging +import re + +from .charsetgroupprober import CharSetGroupProber +from .enums import InputState, LanguageFilter, ProbingState +from .escprober import EscCharSetProber +from .latin1prober import Latin1Prober +from .mbcsgroupprober import MBCSGroupProber +from .sbcsgroupprober import SBCSGroupProber + + +class UniversalDetector(object): + """ + The ``UniversalDetector`` class underlies the ``chardet.detect`` function + and coordinates all of the different charset probers. + + To get a ``dict`` containing an encoding and its confidence, you can simply + run: + + .. code:: + + u = UniversalDetector() + u.feed(some_bytes) + u.close() + detected = u.result + + """ + + MINIMUM_THRESHOLD = 0.20 + HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]') + ESC_DETECTOR = re.compile(b'(\033|~{)') + WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]') + ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252', + 'iso-8859-2': 'Windows-1250', + 'iso-8859-5': 'Windows-1251', + 'iso-8859-6': 'Windows-1256', + 'iso-8859-7': 'Windows-1253', + 'iso-8859-8': 'Windows-1255', + 'iso-8859-9': 'Windows-1254', + 'iso-8859-13': 'Windows-1257'} + + def __init__(self, lang_filter=LanguageFilter.ALL): + self._esc_charset_prober = None + self._charset_probers = [] + self.result = None + self.done = None + self._got_data = None + self._input_state = None + self._last_char = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + self._has_win_bytes = None + self.reset() + + def reset(self): + """ + Reset the UniversalDetector and all of its probers back to their + initial states. This is called by ``__init__``, so you only need to + call this directly in between analyses of different documents. + """ + self.result = {'encoding': None, 'confidence': 0.0, 'language': None} + self.done = False + self._got_data = False + self._has_win_bytes = False + self._input_state = InputState.PURE_ASCII + self._last_char = b'' + if self._esc_charset_prober: + self._esc_charset_prober.reset() + for prober in self._charset_probers: + prober.reset() + + def feed(self, byte_str): + """ + Takes a chunk of a document and feeds it through all of the relevant + charset probers. + + After calling ``feed``, you can check the value of the ``done`` + attribute to see if you need to continue feeding the + ``UniversalDetector`` more data, or if it has made a prediction + (in the ``result`` attribute). + + .. note:: + You should always call ``close`` when you're done feeding in your + document if ``done`` is not already ``True``. + """ + if self.done: + return + + if not len(byte_str): + return + + if not isinstance(byte_str, bytearray): + byte_str = bytearray(byte_str) + + # First check for known BOMs, since these are guaranteed to be correct + if not self._got_data: + # If the data starts with BOM, we know it is UTF + if byte_str.startswith(codecs.BOM_UTF8): + # EF BB BF UTF-8 with BOM + self.result = {'encoding': "UTF-8-SIG", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_UTF32_LE, + codecs.BOM_UTF32_BE)): + # FF FE 00 00 UTF-32, little-endian BOM + # 00 00 FE FF UTF-32, big-endian BOM + self.result = {'encoding': "UTF-32", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\xFE\xFF\x00\x00'): + # FE FF 00 00 UCS-4, unusual octet order BOM (3412) + self.result = {'encoding': "X-ISO-10646-UCS-4-3412", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\x00\x00\xFF\xFE'): + # 00 00 FF FE UCS-4, unusual octet order BOM (2143) + self.result = {'encoding': "X-ISO-10646-UCS-4-2143", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): + # FF FE UTF-16, little endian BOM + # FE FF UTF-16, big endian BOM + self.result = {'encoding': "UTF-16", + 'confidence': 1.0, + 'language': ''} + + self._got_data = True + if self.result['encoding'] is not None: + self.done = True + return + + # If none of those matched and we've only see ASCII so far, check + # for high bytes and escape sequences + if self._input_state == InputState.PURE_ASCII: + if self.HIGH_BYTE_DETECTOR.search(byte_str): + self._input_state = InputState.HIGH_BYTE + elif self._input_state == InputState.PURE_ASCII and \ + self.ESC_DETECTOR.search(self._last_char + byte_str): + self._input_state = InputState.ESC_ASCII + + self._last_char = byte_str[-1:] + + # If we've seen escape sequences, use the EscCharSetProber, which + # uses a simple state machine to check for known escape sequences in + # HZ and ISO-2022 encodings, since those are the only encodings that + # use such sequences. + if self._input_state == InputState.ESC_ASCII: + if not self._esc_charset_prober: + self._esc_charset_prober = EscCharSetProber(self.lang_filter) + if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': + self._esc_charset_prober.charset_name, + 'confidence': + self._esc_charset_prober.get_confidence(), + 'language': + self._esc_charset_prober.language} + self.done = True + # If we've seen high bytes (i.e., those with values greater than 127), + # we need to do more complicated checks using all our multi-byte and + # single-byte probers that are left. The single-byte probers + # use character bigram distributions to determine the encoding, whereas + # the multi-byte probers use a combination of character unigram and + # bigram distributions. + elif self._input_state == InputState.HIGH_BYTE: + if not self._charset_probers: + self._charset_probers = [MBCSGroupProber(self.lang_filter)] + # If we're checking non-CJK encodings, use single-byte prober + if self.lang_filter & LanguageFilter.NON_CJK: + self._charset_probers.append(SBCSGroupProber()) + self._charset_probers.append(Latin1Prober()) + for prober in self._charset_probers: + if prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': prober.charset_name, + 'confidence': prober.get_confidence(), + 'language': prober.language} + self.done = True + break + if self.WIN_BYTE_DETECTOR.search(byte_str): + self._has_win_bytes = True + + def close(self): + """ + Stop analyzing the current document and come up with a final + prediction. + + :returns: The ``result`` attribute, a ``dict`` with the keys + `encoding`, `confidence`, and `language`. + """ + # Don't bother with checks if we're already done + if self.done: + return self.result + self.done = True + + if not self._got_data: + self.logger.debug('no data received!') + + # Default to ASCII if it is all we've seen so far + elif self._input_state == InputState.PURE_ASCII: + self.result = {'encoding': 'ascii', + 'confidence': 1.0, + 'language': ''} + + # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD + elif self._input_state == InputState.HIGH_BYTE: + prober_confidence = None + max_prober_confidence = 0.0 + max_prober = None + for prober in self._charset_probers: + if not prober: + continue + prober_confidence = prober.get_confidence() + if prober_confidence > max_prober_confidence: + max_prober_confidence = prober_confidence + max_prober = prober + if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): + charset_name = max_prober.charset_name + lower_charset_name = max_prober.charset_name.lower() + confidence = max_prober.get_confidence() + # Use Windows encoding name instead of ISO-8859 if we saw any + # extra Windows-specific bytes + if lower_charset_name.startswith('iso-8859'): + if self._has_win_bytes: + charset_name = self.ISO_WIN_MAP.get(lower_charset_name, + charset_name) + self.result = {'encoding': charset_name, + 'confidence': confidence, + 'language': max_prober.language} + + # Log all prober confidences if none met MINIMUM_THRESHOLD + if self.logger.getEffectiveLevel() == logging.DEBUG: + if self.result['encoding'] is None: + self.logger.debug('no probers hit minimum threshold') + for group_prober in self._charset_probers: + if not group_prober: + continue + if isinstance(group_prober, CharSetGroupProber): + for prober in group_prober.probers: + self.logger.debug('%s %s confidence = %s', + prober.charset_name, + prober.language, + prober.get_confidence()) + else: + self.logger.debug('%s %s confidence = %s', + prober.charset_name, + prober.language, + prober.get_confidence()) + return self.result diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/utf8prober.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/utf8prober.py new file mode 100644 index 0000000..6c3196c --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/utf8prober.py @@ -0,0 +1,82 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState +from .codingstatemachine import CodingStateMachine +from .mbcssm import UTF8_SM_MODEL + + + +class UTF8Prober(CharSetProber): + ONE_CHAR_PROB = 0.5 + + def __init__(self): + super(UTF8Prober, self).__init__() + self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) + self._num_mb_chars = None + self.reset() + + def reset(self): + super(UTF8Prober, self).reset() + self.coding_sm.reset() + self._num_mb_chars = 0 + + @property + def charset_name(self): + return "utf-8" + + @property + def language(self): + return "" + + def feed(self, byte_str): + for c in byte_str: + coding_state = self.coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + if self.coding_sm.get_current_charlen() >= 2: + self._num_mb_chars += 1 + + if self.state == ProbingState.DETECTING: + if self.get_confidence() > self.SHORTCUT_THRESHOLD: + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + unlike = 0.99 + if self._num_mb_chars < 6: + unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars + return 1.0 - unlike + else: + return unlike diff --git a/env/lib/python3.6/site-packages/pip/_vendor/chardet/version.py b/env/lib/python3.6/site-packages/pip/_vendor/chardet/version.py new file mode 100644 index 0000000..bb2a34a --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/chardet/version.py @@ -0,0 +1,9 @@ +""" +This module exists only to simplify retrieving the version number of chardet +from within setup.py and from chardet subpackages. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + +__version__ = "3.0.4" +VERSION = __version__.split('.') diff --git a/env/lib/python3.6/site-packages/pip/_vendor/colorama/ansi.py b/env/lib/python3.6/site-packages/pip/_vendor/colorama/ansi.py new file mode 100644 index 0000000..7877658 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/colorama/ansi.py @@ -0,0 +1,102 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +''' +This module generates ANSI character codes to printing colors to terminals. +See: http://en.wikipedia.org/wiki/ANSI_escape_code +''' + +CSI = '\033[' +OSC = '\033]' +BEL = '\007' + + +def code_to_chars(code): + return CSI + str(code) + 'm' + +def set_title(title): + return OSC + '2;' + title + BEL + +def clear_screen(mode=2): + return CSI + str(mode) + 'J' + +def clear_line(mode=2): + return CSI + str(mode) + 'K' + + +class AnsiCodes(object): + def __init__(self): + # the subclasses declare class attributes which are numbers. + # Upon instantiation we define instance attributes, which are the same + # as the class attributes but wrapped with the ANSI escape sequence + for name in dir(self): + if not name.startswith('_'): + value = getattr(self, name) + setattr(self, name, code_to_chars(value)) + + +class AnsiCursor(object): + def UP(self, n=1): + return CSI + str(n) + 'A' + def DOWN(self, n=1): + return CSI + str(n) + 'B' + def FORWARD(self, n=1): + return CSI + str(n) + 'C' + def BACK(self, n=1): + return CSI + str(n) + 'D' + def POS(self, x=1, y=1): + return CSI + str(y) + ';' + str(x) + 'H' + + +class AnsiFore(AnsiCodes): + BLACK = 30 + RED = 31 + GREEN = 32 + YELLOW = 33 + BLUE = 34 + MAGENTA = 35 + CYAN = 36 + WHITE = 37 + RESET = 39 + + # These are fairly well supported, but not part of the standard. + LIGHTBLACK_EX = 90 + LIGHTRED_EX = 91 + LIGHTGREEN_EX = 92 + LIGHTYELLOW_EX = 93 + LIGHTBLUE_EX = 94 + LIGHTMAGENTA_EX = 95 + LIGHTCYAN_EX = 96 + LIGHTWHITE_EX = 97 + + +class AnsiBack(AnsiCodes): + BLACK = 40 + RED = 41 + GREEN = 42 + YELLOW = 43 + BLUE = 44 + MAGENTA = 45 + CYAN = 46 + WHITE = 47 + RESET = 49 + + # These are fairly well supported, but not part of the standard. + LIGHTBLACK_EX = 100 + LIGHTRED_EX = 101 + LIGHTGREEN_EX = 102 + LIGHTYELLOW_EX = 103 + LIGHTBLUE_EX = 104 + LIGHTMAGENTA_EX = 105 + LIGHTCYAN_EX = 106 + LIGHTWHITE_EX = 107 + + +class AnsiStyle(AnsiCodes): + BRIGHT = 1 + DIM = 2 + NORMAL = 22 + RESET_ALL = 0 + +Fore = AnsiFore() +Back = AnsiBack() +Style = AnsiStyle() +Cursor = AnsiCursor() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/colorama/ansitowin32.py b/env/lib/python3.6/site-packages/pip/_vendor/colorama/ansitowin32.py new file mode 100644 index 0000000..1d6e605 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/colorama/ansitowin32.py @@ -0,0 +1,236 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import re +import sys +import os + +from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style +from .winterm import WinTerm, WinColor, WinStyle +from .win32 import windll, winapi_test + + +winterm = None +if windll is not None: + winterm = WinTerm() + + +def is_stream_closed(stream): + return not hasattr(stream, 'closed') or stream.closed + + +def is_a_tty(stream): + return hasattr(stream, 'isatty') and stream.isatty() + + +class StreamWrapper(object): + ''' + Wraps a stream (such as stdout), acting as a transparent proxy for all + attribute access apart from method 'write()', which is delegated to our + Converter instance. + ''' + def __init__(self, wrapped, converter): + # double-underscore everything to prevent clashes with names of + # attributes on the wrapped stream object. + self.__wrapped = wrapped + self.__convertor = converter + + def __getattr__(self, name): + return getattr(self.__wrapped, name) + + def write(self, text): + self.__convertor.write(text) + + +class AnsiToWin32(object): + ''' + Implements a 'write()' method which, on Windows, will strip ANSI character + sequences from the text, and if outputting to a tty, will convert them into + win32 function calls. + ''' + ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer + ANSI_OSC_RE = re.compile('\001?\033\\]((?:.|;)*?)(\x07)\002?') # Operating System Command + + def __init__(self, wrapped, convert=None, strip=None, autoreset=False): + # The wrapped stream (normally sys.stdout or sys.stderr) + self.wrapped = wrapped + + # should we reset colors to defaults after every .write() + self.autoreset = autoreset + + # create the proxy wrapping our output stream + self.stream = StreamWrapper(wrapped, self) + + on_windows = os.name == 'nt' + # We test if the WinAPI works, because even if we are on Windows + # we may be using a terminal that doesn't support the WinAPI + # (e.g. Cygwin Terminal). In this case it's up to the terminal + # to support the ANSI codes. + conversion_supported = on_windows and winapi_test() + + # should we strip ANSI sequences from our output? + if strip is None: + strip = conversion_supported or (not is_stream_closed(wrapped) and not is_a_tty(wrapped)) + self.strip = strip + + # should we should convert ANSI sequences into win32 calls? + if convert is None: + convert = conversion_supported and not is_stream_closed(wrapped) and is_a_tty(wrapped) + self.convert = convert + + # dict of ansi codes to win32 functions and parameters + self.win32_calls = self.get_win32_calls() + + # are we wrapping stderr? + self.on_stderr = self.wrapped is sys.stderr + + def should_wrap(self): + ''' + True if this class is actually needed. If false, then the output + stream will not be affected, nor will win32 calls be issued, so + wrapping stdout is not actually required. This will generally be + False on non-Windows platforms, unless optional functionality like + autoreset has been requested using kwargs to init() + ''' + return self.convert or self.strip or self.autoreset + + def get_win32_calls(self): + if self.convert and winterm: + return { + AnsiStyle.RESET_ALL: (winterm.reset_all, ), + AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT), + AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL), + AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL), + AnsiFore.BLACK: (winterm.fore, WinColor.BLACK), + AnsiFore.RED: (winterm.fore, WinColor.RED), + AnsiFore.GREEN: (winterm.fore, WinColor.GREEN), + AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW), + AnsiFore.BLUE: (winterm.fore, WinColor.BLUE), + AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA), + AnsiFore.CYAN: (winterm.fore, WinColor.CYAN), + AnsiFore.WHITE: (winterm.fore, WinColor.GREY), + AnsiFore.RESET: (winterm.fore, ), + AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True), + AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True), + AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True), + AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True), + AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True), + AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True), + AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True), + AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True), + AnsiBack.BLACK: (winterm.back, WinColor.BLACK), + AnsiBack.RED: (winterm.back, WinColor.RED), + AnsiBack.GREEN: (winterm.back, WinColor.GREEN), + AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW), + AnsiBack.BLUE: (winterm.back, WinColor.BLUE), + AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA), + AnsiBack.CYAN: (winterm.back, WinColor.CYAN), + AnsiBack.WHITE: (winterm.back, WinColor.GREY), + AnsiBack.RESET: (winterm.back, ), + AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True), + AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True), + AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True), + AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True), + AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True), + AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True), + AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True), + AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True), + } + return dict() + + def write(self, text): + if self.strip or self.convert: + self.write_and_convert(text) + else: + self.wrapped.write(text) + self.wrapped.flush() + if self.autoreset: + self.reset_all() + + + def reset_all(self): + if self.convert: + self.call_win32('m', (0,)) + elif not self.strip and not is_stream_closed(self.wrapped): + self.wrapped.write(Style.RESET_ALL) + + + def write_and_convert(self, text): + ''' + Write the given text to our wrapped stream, stripping any ANSI + sequences from the text, and optionally converting them into win32 + calls. + ''' + cursor = 0 + text = self.convert_osc(text) + for match in self.ANSI_CSI_RE.finditer(text): + start, end = match.span() + self.write_plain_text(text, cursor, start) + self.convert_ansi(*match.groups()) + cursor = end + self.write_plain_text(text, cursor, len(text)) + + + def write_plain_text(self, text, start, end): + if start < end: + self.wrapped.write(text[start:end]) + self.wrapped.flush() + + + def convert_ansi(self, paramstring, command): + if self.convert: + params = self.extract_params(command, paramstring) + self.call_win32(command, params) + + + def extract_params(self, command, paramstring): + if command in 'Hf': + params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';')) + while len(params) < 2: + # defaults: + params = params + (1,) + else: + params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0) + if len(params) == 0: + # defaults: + if command in 'JKm': + params = (0,) + elif command in 'ABCD': + params = (1,) + + return params + + + def call_win32(self, command, params): + if command == 'm': + for param in params: + if param in self.win32_calls: + func_args = self.win32_calls[param] + func = func_args[0] + args = func_args[1:] + kwargs = dict(on_stderr=self.on_stderr) + func(*args, **kwargs) + elif command in 'J': + winterm.erase_screen(params[0], on_stderr=self.on_stderr) + elif command in 'K': + winterm.erase_line(params[0], on_stderr=self.on_stderr) + elif command in 'Hf': # cursor position - absolute + winterm.set_cursor_position(params, on_stderr=self.on_stderr) + elif command in 'ABCD': # cursor position - relative + n = params[0] + # A - up, B - down, C - forward, D - back + x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command] + winterm.cursor_adjust(x, y, on_stderr=self.on_stderr) + + + def convert_osc(self, text): + for match in self.ANSI_OSC_RE.finditer(text): + start, end = match.span() + text = text[:start] + text[end:] + paramstring, command = match.groups() + if command in '\x07': # \x07 = BEL + params = paramstring.split(";") + # 0 - change title and icon (we will only change title) + # 1 - change icon (we don't support this) + # 2 - change title + if params[0] in '02': + winterm.set_title(params[1]) + return text diff --git a/env/lib/python3.6/site-packages/pip/_vendor/colorama/initialise.py b/env/lib/python3.6/site-packages/pip/_vendor/colorama/initialise.py new file mode 100644 index 0000000..834962a --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/colorama/initialise.py @@ -0,0 +1,82 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import atexit +import contextlib +import sys + +from .ansitowin32 import AnsiToWin32 + + +orig_stdout = None +orig_stderr = None + +wrapped_stdout = None +wrapped_stderr = None + +atexit_done = False + + +def reset_all(): + if AnsiToWin32 is not None: # Issue #74: objects might become None at exit + AnsiToWin32(orig_stdout).reset_all() + + +def init(autoreset=False, convert=None, strip=None, wrap=True): + + if not wrap and any([autoreset, convert, strip]): + raise ValueError('wrap=False conflicts with any other arg=True') + + global wrapped_stdout, wrapped_stderr + global orig_stdout, orig_stderr + + orig_stdout = sys.stdout + orig_stderr = sys.stderr + + if sys.stdout is None: + wrapped_stdout = None + else: + sys.stdout = wrapped_stdout = \ + wrap_stream(orig_stdout, convert, strip, autoreset, wrap) + if sys.stderr is None: + wrapped_stderr = None + else: + sys.stderr = wrapped_stderr = \ + wrap_stream(orig_stderr, convert, strip, autoreset, wrap) + + global atexit_done + if not atexit_done: + atexit.register(reset_all) + atexit_done = True + + +def deinit(): + if orig_stdout is not None: + sys.stdout = orig_stdout + if orig_stderr is not None: + sys.stderr = orig_stderr + + +@contextlib.contextmanager +def colorama_text(*args, **kwargs): + init(*args, **kwargs) + try: + yield + finally: + deinit() + + +def reinit(): + if wrapped_stdout is not None: + sys.stdout = wrapped_stdout + if wrapped_stderr is not None: + sys.stderr = wrapped_stderr + + +def wrap_stream(stream, convert, strip, autoreset, wrap): + if wrap: + wrapper = AnsiToWin32(stream, + convert=convert, strip=strip, autoreset=autoreset) + if wrapper.should_wrap(): + stream = wrapper.stream + return stream + + diff --git a/env/lib/python3.6/site-packages/pip/_vendor/colorama/win32.py b/env/lib/python3.6/site-packages/pip/_vendor/colorama/win32.py new file mode 100644 index 0000000..8262e35 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/colorama/win32.py @@ -0,0 +1,156 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. + +# from winbase.h +STDOUT = -11 +STDERR = -12 + +try: + import ctypes + from ctypes import LibraryLoader + windll = LibraryLoader(ctypes.WinDLL) + from ctypes import wintypes +except (AttributeError, ImportError): + windll = None + SetConsoleTextAttribute = lambda *_: None + winapi_test = lambda *_: None +else: + from ctypes import byref, Structure, c_char, POINTER + + COORD = wintypes._COORD + + class CONSOLE_SCREEN_BUFFER_INFO(Structure): + """struct in wincon.h.""" + _fields_ = [ + ("dwSize", COORD), + ("dwCursorPosition", COORD), + ("wAttributes", wintypes.WORD), + ("srWindow", wintypes.SMALL_RECT), + ("dwMaximumWindowSize", COORD), + ] + def __str__(self): + return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % ( + self.dwSize.Y, self.dwSize.X + , self.dwCursorPosition.Y, self.dwCursorPosition.X + , self.wAttributes + , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right + , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X + ) + + _GetStdHandle = windll.kernel32.GetStdHandle + _GetStdHandle.argtypes = [ + wintypes.DWORD, + ] + _GetStdHandle.restype = wintypes.HANDLE + + _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo + _GetConsoleScreenBufferInfo.argtypes = [ + wintypes.HANDLE, + POINTER(CONSOLE_SCREEN_BUFFER_INFO), + ] + _GetConsoleScreenBufferInfo.restype = wintypes.BOOL + + _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute + _SetConsoleTextAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + ] + _SetConsoleTextAttribute.restype = wintypes.BOOL + + _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition + _SetConsoleCursorPosition.argtypes = [ + wintypes.HANDLE, + COORD, + ] + _SetConsoleCursorPosition.restype = wintypes.BOOL + + _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA + _FillConsoleOutputCharacterA.argtypes = [ + wintypes.HANDLE, + c_char, + wintypes.DWORD, + COORD, + POINTER(wintypes.DWORD), + ] + _FillConsoleOutputCharacterA.restype = wintypes.BOOL + + _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute + _FillConsoleOutputAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + wintypes.DWORD, + COORD, + POINTER(wintypes.DWORD), + ] + _FillConsoleOutputAttribute.restype = wintypes.BOOL + + _SetConsoleTitleW = windll.kernel32.SetConsoleTitleW + _SetConsoleTitleW.argtypes = [ + wintypes.LPCWSTR + ] + _SetConsoleTitleW.restype = wintypes.BOOL + + handles = { + STDOUT: _GetStdHandle(STDOUT), + STDERR: _GetStdHandle(STDERR), + } + + def _winapi_test(handle): + csbi = CONSOLE_SCREEN_BUFFER_INFO() + success = _GetConsoleScreenBufferInfo( + handle, byref(csbi)) + return bool(success) + + def winapi_test(): + return any(_winapi_test(h) for h in handles.values()) + + def GetConsoleScreenBufferInfo(stream_id=STDOUT): + handle = handles[stream_id] + csbi = CONSOLE_SCREEN_BUFFER_INFO() + success = _GetConsoleScreenBufferInfo( + handle, byref(csbi)) + return csbi + + def SetConsoleTextAttribute(stream_id, attrs): + handle = handles[stream_id] + return _SetConsoleTextAttribute(handle, attrs) + + def SetConsoleCursorPosition(stream_id, position, adjust=True): + position = COORD(*position) + # If the position is out of range, do nothing. + if position.Y <= 0 or position.X <= 0: + return + # Adjust for Windows' SetConsoleCursorPosition: + # 1. being 0-based, while ANSI is 1-based. + # 2. expecting (x,y), while ANSI uses (y,x). + adjusted_position = COORD(position.Y - 1, position.X - 1) + if adjust: + # Adjust for viewport's scroll position + sr = GetConsoleScreenBufferInfo(STDOUT).srWindow + adjusted_position.Y += sr.Top + adjusted_position.X += sr.Left + # Resume normal processing + handle = handles[stream_id] + return _SetConsoleCursorPosition(handle, adjusted_position) + + def FillConsoleOutputCharacter(stream_id, char, length, start): + handle = handles[stream_id] + char = c_char(char.encode()) + length = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + # Note that this is hard-coded for ANSI (vs wide) bytes. + success = _FillConsoleOutputCharacterA( + handle, char, length, start, byref(num_written)) + return num_written.value + + def FillConsoleOutputAttribute(stream_id, attr, length, start): + ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )''' + handle = handles[stream_id] + attribute = wintypes.WORD(attr) + length = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + # Note that this is hard-coded for ANSI (vs wide) bytes. + return _FillConsoleOutputAttribute( + handle, attribute, length, start, byref(num_written)) + + def SetConsoleTitle(title): + return _SetConsoleTitleW(title) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/colorama/winterm.py b/env/lib/python3.6/site-packages/pip/_vendor/colorama/winterm.py new file mode 100644 index 0000000..60309d3 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/colorama/winterm.py @@ -0,0 +1,162 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +from . import win32 + + +# from wincon.h +class WinColor(object): + BLACK = 0 + BLUE = 1 + GREEN = 2 + CYAN = 3 + RED = 4 + MAGENTA = 5 + YELLOW = 6 + GREY = 7 + +# from wincon.h +class WinStyle(object): + NORMAL = 0x00 # dim text, dim background + BRIGHT = 0x08 # bright text, dim background + BRIGHT_BACKGROUND = 0x80 # dim text, bright background + +class WinTerm(object): + + def __init__(self): + self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes + self.set_attrs(self._default) + self._default_fore = self._fore + self._default_back = self._back + self._default_style = self._style + # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style. + # So that LIGHT_EX colors and BRIGHT style do not clobber each other, + # we track them separately, since LIGHT_EX is overwritten by Fore/Back + # and BRIGHT is overwritten by Style codes. + self._light = 0 + + def get_attrs(self): + return self._fore + self._back * 16 + (self._style | self._light) + + def set_attrs(self, value): + self._fore = value & 7 + self._back = (value >> 4) & 7 + self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND) + + def reset_all(self, on_stderr=None): + self.set_attrs(self._default) + self.set_console(attrs=self._default) + + def fore(self, fore=None, light=False, on_stderr=False): + if fore is None: + fore = self._default_fore + self._fore = fore + # Emulate LIGHT_EX with BRIGHT Style + if light: + self._light |= WinStyle.BRIGHT + else: + self._light &= ~WinStyle.BRIGHT + self.set_console(on_stderr=on_stderr) + + def back(self, back=None, light=False, on_stderr=False): + if back is None: + back = self._default_back + self._back = back + # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style + if light: + self._light |= WinStyle.BRIGHT_BACKGROUND + else: + self._light &= ~WinStyle.BRIGHT_BACKGROUND + self.set_console(on_stderr=on_stderr) + + def style(self, style=None, on_stderr=False): + if style is None: + style = self._default_style + self._style = style + self.set_console(on_stderr=on_stderr) + + def set_console(self, attrs=None, on_stderr=False): + if attrs is None: + attrs = self.get_attrs() + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + win32.SetConsoleTextAttribute(handle, attrs) + + def get_position(self, handle): + position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition + # Because Windows coordinates are 0-based, + # and win32.SetConsoleCursorPosition expects 1-based. + position.X += 1 + position.Y += 1 + return position + + def set_cursor_position(self, position=None, on_stderr=False): + if position is None: + # I'm not currently tracking the position, so there is no default. + # position = self.get_position() + return + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + win32.SetConsoleCursorPosition(handle, position) + + def cursor_adjust(self, x, y, on_stderr=False): + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + position = self.get_position(handle) + adjusted_position = (position.Y + y, position.X + x) + win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False) + + def erase_screen(self, mode=0, on_stderr=False): + # 0 should clear from the cursor to the end of the screen. + # 1 should clear from the cursor to the beginning of the screen. + # 2 should clear the entire screen, and move cursor to (1,1) + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + csbi = win32.GetConsoleScreenBufferInfo(handle) + # get the number of character cells in the current buffer + cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y + # get number of character cells before current cursor position + cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X + if mode == 0: + from_coord = csbi.dwCursorPosition + cells_to_erase = cells_in_screen - cells_before_cursor + if mode == 1: + from_coord = win32.COORD(0, 0) + cells_to_erase = cells_before_cursor + elif mode == 2: + from_coord = win32.COORD(0, 0) + cells_to_erase = cells_in_screen + # fill the entire screen with blanks + win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) + # now set the buffer's attributes accordingly + win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) + if mode == 2: + # put the cursor where needed + win32.SetConsoleCursorPosition(handle, (1, 1)) + + def erase_line(self, mode=0, on_stderr=False): + # 0 should clear from the cursor to the end of the line. + # 1 should clear from the cursor to the beginning of the line. + # 2 should clear the entire line. + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + csbi = win32.GetConsoleScreenBufferInfo(handle) + if mode == 0: + from_coord = csbi.dwCursorPosition + cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X + if mode == 1: + from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) + cells_to_erase = csbi.dwCursorPosition.X + elif mode == 2: + from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) + cells_to_erase = csbi.dwSize.X + # fill the entire screen with blanks + win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) + # now set the buffer's attributes accordingly + win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) + + def set_title(self, title): + win32.SetConsoleTitle(title) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/misc.py b/env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/misc.py new file mode 100644 index 0000000..cfb318d --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/misc.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Backports for individual classes and functions.""" + +import os +import sys + +__all__ = ['cache_from_source', 'callable', 'fsencode'] + + +try: + from imp import cache_from_source +except ImportError: + def cache_from_source(py_file, debug=__debug__): + ext = debug and 'c' or 'o' + return py_file + ext + + +try: + callable = callable +except NameError: + from collections import Callable + + def callable(obj): + return isinstance(obj, Callable) + + +try: + fsencode = os.fsencode +except AttributeError: + def fsencode(filename): + if isinstance(filename, bytes): + return filename + elif isinstance(filename, str): + return filename.encode(sys.getfilesystemencoding()) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/shutil.py b/env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/shutil.py new file mode 100644 index 0000000..159e49e --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/shutil.py @@ -0,0 +1,761 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Utility functions for copying and archiving files and directory trees. + +XXX The functions here don't copy the resource fork or other metadata on Mac. + +""" + +import os +import sys +import stat +from os.path import abspath +import fnmatch +import collections +import errno +from . import tarfile + +try: + import bz2 + _BZ2_SUPPORTED = True +except ImportError: + _BZ2_SUPPORTED = False + +try: + from pwd import getpwnam +except ImportError: + getpwnam = None + +try: + from grp import getgrnam +except ImportError: + getgrnam = None + +__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2", + "copytree", "move", "rmtree", "Error", "SpecialFileError", + "ExecError", "make_archive", "get_archive_formats", + "register_archive_format", "unregister_archive_format", + "get_unpack_formats", "register_unpack_format", + "unregister_unpack_format", "unpack_archive", "ignore_patterns"] + +class Error(EnvironmentError): + pass + +class SpecialFileError(EnvironmentError): + """Raised when trying to do a kind of operation (e.g. copying) which is + not supported on a special file (e.g. a named pipe)""" + +class ExecError(EnvironmentError): + """Raised when a command could not be executed""" + +class ReadError(EnvironmentError): + """Raised when an archive cannot be read""" + +class RegistryError(Exception): + """Raised when a registry operation with the archiving + and unpacking registries fails""" + + +try: + WindowsError +except NameError: + WindowsError = None + +def copyfileobj(fsrc, fdst, length=16*1024): + """copy data from file-like object fsrc to file-like object fdst""" + while 1: + buf = fsrc.read(length) + if not buf: + break + fdst.write(buf) + +def _samefile(src, dst): + # Macintosh, Unix. + if hasattr(os.path, 'samefile'): + try: + return os.path.samefile(src, dst) + except OSError: + return False + + # All other platforms: check for same pathname. + return (os.path.normcase(os.path.abspath(src)) == + os.path.normcase(os.path.abspath(dst))) + +def copyfile(src, dst): + """Copy data from src to dst""" + if _samefile(src, dst): + raise Error("`%s` and `%s` are the same file" % (src, dst)) + + for fn in [src, dst]: + try: + st = os.stat(fn) + except OSError: + # File most likely does not exist + pass + else: + # XXX What about other special files? (sockets, devices...) + if stat.S_ISFIFO(st.st_mode): + raise SpecialFileError("`%s` is a named pipe" % fn) + + with open(src, 'rb') as fsrc: + with open(dst, 'wb') as fdst: + copyfileobj(fsrc, fdst) + +def copymode(src, dst): + """Copy mode bits from src to dst""" + if hasattr(os, 'chmod'): + st = os.stat(src) + mode = stat.S_IMODE(st.st_mode) + os.chmod(dst, mode) + +def copystat(src, dst): + """Copy all stat info (mode bits, atime, mtime, flags) from src to dst""" + st = os.stat(src) + mode = stat.S_IMODE(st.st_mode) + if hasattr(os, 'utime'): + os.utime(dst, (st.st_atime, st.st_mtime)) + if hasattr(os, 'chmod'): + os.chmod(dst, mode) + if hasattr(os, 'chflags') and hasattr(st, 'st_flags'): + try: + os.chflags(dst, st.st_flags) + except OSError as why: + if (not hasattr(errno, 'EOPNOTSUPP') or + why.errno != errno.EOPNOTSUPP): + raise + +def copy(src, dst): + """Copy data and mode bits ("cp src dst"). + + The destination may be a directory. + + """ + if os.path.isdir(dst): + dst = os.path.join(dst, os.path.basename(src)) + copyfile(src, dst) + copymode(src, dst) + +def copy2(src, dst): + """Copy data and all stat info ("cp -p src dst"). + + The destination may be a directory. + + """ + if os.path.isdir(dst): + dst = os.path.join(dst, os.path.basename(src)) + copyfile(src, dst) + copystat(src, dst) + +def ignore_patterns(*patterns): + """Function that can be used as copytree() ignore parameter. + + Patterns is a sequence of glob-style patterns + that are used to exclude files""" + def _ignore_patterns(path, names): + ignored_names = [] + for pattern in patterns: + ignored_names.extend(fnmatch.filter(names, pattern)) + return set(ignored_names) + return _ignore_patterns + +def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, + ignore_dangling_symlinks=False): + """Recursively copy a directory tree. + + The destination directory must not already exist. + If exception(s) occur, an Error is raised with a list of reasons. + + If the optional symlinks flag is true, symbolic links in the + source tree result in symbolic links in the destination tree; if + it is false, the contents of the files pointed to by symbolic + links are copied. If the file pointed by the symlink doesn't + exist, an exception will be added in the list of errors raised in + an Error exception at the end of the copy process. + + You can set the optional ignore_dangling_symlinks flag to true if you + want to silence this exception. Notice that this has no effect on + platforms that don't support os.symlink. + + The optional ignore argument is a callable. If given, it + is called with the `src` parameter, which is the directory + being visited by copytree(), and `names` which is the list of + `src` contents, as returned by os.listdir(): + + callable(src, names) -> ignored_names + + Since copytree() is called recursively, the callable will be + called once for each directory that is copied. It returns a + list of names relative to the `src` directory that should + not be copied. + + The optional copy_function argument is a callable that will be used + to copy each file. It will be called with the source path and the + destination path as arguments. By default, copy2() is used, but any + function that supports the same signature (like copy()) can be used. + + """ + names = os.listdir(src) + if ignore is not None: + ignored_names = ignore(src, names) + else: + ignored_names = set() + + os.makedirs(dst) + errors = [] + for name in names: + if name in ignored_names: + continue + srcname = os.path.join(src, name) + dstname = os.path.join(dst, name) + try: + if os.path.islink(srcname): + linkto = os.readlink(srcname) + if symlinks: + os.symlink(linkto, dstname) + else: + # ignore dangling symlink if the flag is on + if not os.path.exists(linkto) and ignore_dangling_symlinks: + continue + # otherwise let the copy occurs. copy2 will raise an error + copy_function(srcname, dstname) + elif os.path.isdir(srcname): + copytree(srcname, dstname, symlinks, ignore, copy_function) + else: + # Will raise a SpecialFileError for unsupported file types + copy_function(srcname, dstname) + # catch the Error from the recursive copytree so that we can + # continue with other files + except Error as err: + errors.extend(err.args[0]) + except EnvironmentError as why: + errors.append((srcname, dstname, str(why))) + try: + copystat(src, dst) + except OSError as why: + if WindowsError is not None and isinstance(why, WindowsError): + # Copying file access times may fail on Windows + pass + else: + errors.extend((src, dst, str(why))) + if errors: + raise Error(errors) + +def rmtree(path, ignore_errors=False, onerror=None): + """Recursively delete a directory tree. + + If ignore_errors is set, errors are ignored; otherwise, if onerror + is set, it is called to handle the error with arguments (func, + path, exc_info) where func is os.listdir, os.remove, or os.rmdir; + path is the argument to that function that caused it to fail; and + exc_info is a tuple returned by sys.exc_info(). If ignore_errors + is false and onerror is None, an exception is raised. + + """ + if ignore_errors: + def onerror(*args): + pass + elif onerror is None: + def onerror(*args): + raise + try: + if os.path.islink(path): + # symlinks to directories are forbidden, see bug #1669 + raise OSError("Cannot call rmtree on a symbolic link") + except OSError: + onerror(os.path.islink, path, sys.exc_info()) + # can't continue even if onerror hook returns + return + names = [] + try: + names = os.listdir(path) + except os.error: + onerror(os.listdir, path, sys.exc_info()) + for name in names: + fullname = os.path.join(path, name) + try: + mode = os.lstat(fullname).st_mode + except os.error: + mode = 0 + if stat.S_ISDIR(mode): + rmtree(fullname, ignore_errors, onerror) + else: + try: + os.remove(fullname) + except os.error: + onerror(os.remove, fullname, sys.exc_info()) + try: + os.rmdir(path) + except os.error: + onerror(os.rmdir, path, sys.exc_info()) + + +def _basename(path): + # A basename() variant which first strips the trailing slash, if present. + # Thus we always get the last component of the path, even for directories. + return os.path.basename(path.rstrip(os.path.sep)) + +def move(src, dst): + """Recursively move a file or directory to another location. This is + similar to the Unix "mv" command. + + If the destination is a directory or a symlink to a directory, the source + is moved inside the directory. The destination path must not already + exist. + + If the destination already exists but is not a directory, it may be + overwritten depending on os.rename() semantics. + + If the destination is on our current filesystem, then rename() is used. + Otherwise, src is copied to the destination and then removed. + A lot more could be done here... A look at a mv.c shows a lot of + the issues this implementation glosses over. + + """ + real_dst = dst + if os.path.isdir(dst): + if _samefile(src, dst): + # We might be on a case insensitive filesystem, + # perform the rename anyway. + os.rename(src, dst) + return + + real_dst = os.path.join(dst, _basename(src)) + if os.path.exists(real_dst): + raise Error("Destination path '%s' already exists" % real_dst) + try: + os.rename(src, real_dst) + except OSError: + if os.path.isdir(src): + if _destinsrc(src, dst): + raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) + copytree(src, real_dst, symlinks=True) + rmtree(src) + else: + copy2(src, real_dst) + os.unlink(src) + +def _destinsrc(src, dst): + src = abspath(src) + dst = abspath(dst) + if not src.endswith(os.path.sep): + src += os.path.sep + if not dst.endswith(os.path.sep): + dst += os.path.sep + return dst.startswith(src) + +def _get_gid(name): + """Returns a gid, given a group name.""" + if getgrnam is None or name is None: + return None + try: + result = getgrnam(name) + except KeyError: + result = None + if result is not None: + return result[2] + return None + +def _get_uid(name): + """Returns an uid, given a user name.""" + if getpwnam is None or name is None: + return None + try: + result = getpwnam(name) + except KeyError: + result = None + if result is not None: + return result[2] + return None + +def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, + owner=None, group=None, logger=None): + """Create a (possibly compressed) tar file from all the files under + 'base_dir'. + + 'compress' must be "gzip" (the default), "bzip2", or None. + + 'owner' and 'group' can be used to define an owner and a group for the + archive that is being built. If not provided, the current owner and group + will be used. + + The output tar file will be named 'base_name' + ".tar", possibly plus + the appropriate compression extension (".gz", or ".bz2"). + + Returns the output filename. + """ + tar_compression = {'gzip': 'gz', None: ''} + compress_ext = {'gzip': '.gz'} + + if _BZ2_SUPPORTED: + tar_compression['bzip2'] = 'bz2' + compress_ext['bzip2'] = '.bz2' + + # flags for compression program, each element of list will be an argument + if compress is not None and compress not in compress_ext: + raise ValueError("bad value for 'compress', or compression format not " + "supported : {0}".format(compress)) + + archive_name = base_name + '.tar' + compress_ext.get(compress, '') + archive_dir = os.path.dirname(archive_name) + + if not os.path.exists(archive_dir): + if logger is not None: + logger.info("creating %s", archive_dir) + if not dry_run: + os.makedirs(archive_dir) + + # creating the tarball + if logger is not None: + logger.info('Creating tar archive') + + uid = _get_uid(owner) + gid = _get_gid(group) + + def _set_uid_gid(tarinfo): + if gid is not None: + tarinfo.gid = gid + tarinfo.gname = group + if uid is not None: + tarinfo.uid = uid + tarinfo.uname = owner + return tarinfo + + if not dry_run: + tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) + try: + tar.add(base_dir, filter=_set_uid_gid) + finally: + tar.close() + + return archive_name + +def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False): + # XXX see if we want to keep an external call here + if verbose: + zipoptions = "-r" + else: + zipoptions = "-rq" + from distutils.errors import DistutilsExecError + from distutils.spawn import spawn + try: + spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) + except DistutilsExecError: + # XXX really should distinguish between "couldn't find + # external 'zip' command" and "zip failed". + raise ExecError("unable to create zip file '%s': " + "could neither import the 'zipfile' module nor " + "find a standalone zip utility") % zip_filename + +def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None): + """Create a zip file from all the files under 'base_dir'. + + The output zip file will be named 'base_name' + ".zip". Uses either the + "zipfile" Python module (if available) or the InfoZIP "zip" utility + (if installed and found on the default search path). If neither tool is + available, raises ExecError. Returns the name of the output zip + file. + """ + zip_filename = base_name + ".zip" + archive_dir = os.path.dirname(base_name) + + if not os.path.exists(archive_dir): + if logger is not None: + logger.info("creating %s", archive_dir) + if not dry_run: + os.makedirs(archive_dir) + + # If zipfile module is not available, try spawning an external 'zip' + # command. + try: + import zipfile + except ImportError: + zipfile = None + + if zipfile is None: + _call_external_zip(base_dir, zip_filename, verbose, dry_run) + else: + if logger is not None: + logger.info("creating '%s' and adding '%s' to it", + zip_filename, base_dir) + + if not dry_run: + zip = zipfile.ZipFile(zip_filename, "w", + compression=zipfile.ZIP_DEFLATED) + + for dirpath, dirnames, filenames in os.walk(base_dir): + for name in filenames: + path = os.path.normpath(os.path.join(dirpath, name)) + if os.path.isfile(path): + zip.write(path, path) + if logger is not None: + logger.info("adding '%s'", path) + zip.close() + + return zip_filename + +_ARCHIVE_FORMATS = { + 'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), + 'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), + 'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"), + 'zip': (_make_zipfile, [], "ZIP file"), + } + +if _BZ2_SUPPORTED: + _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')], + "bzip2'ed tar-file") + +def get_archive_formats(): + """Returns a list of supported formats for archiving and unarchiving. + + Each element of the returned sequence is a tuple (name, description) + """ + formats = [(name, registry[2]) for name, registry in + _ARCHIVE_FORMATS.items()] + formats.sort() + return formats + +def register_archive_format(name, function, extra_args=None, description=''): + """Registers an archive format. + + name is the name of the format. function is the callable that will be + used to create archives. If provided, extra_args is a sequence of + (name, value) tuples that will be passed as arguments to the callable. + description can be provided to describe the format, and will be returned + by the get_archive_formats() function. + """ + if extra_args is None: + extra_args = [] + if not isinstance(function, collections.Callable): + raise TypeError('The %s object is not callable' % function) + if not isinstance(extra_args, (tuple, list)): + raise TypeError('extra_args needs to be a sequence') + for element in extra_args: + if not isinstance(element, (tuple, list)) or len(element) !=2: + raise TypeError('extra_args elements are : (arg_name, value)') + + _ARCHIVE_FORMATS[name] = (function, extra_args, description) + +def unregister_archive_format(name): + del _ARCHIVE_FORMATS[name] + +def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, + dry_run=0, owner=None, group=None, logger=None): + """Create an archive file (eg. zip or tar). + + 'base_name' is the name of the file to create, minus any format-specific + extension; 'format' is the archive format: one of "zip", "tar", "bztar" + or "gztar". + + 'root_dir' is a directory that will be the root directory of the + archive; ie. we typically chdir into 'root_dir' before creating the + archive. 'base_dir' is the directory where we start archiving from; + ie. 'base_dir' will be the common prefix of all files and + directories in the archive. 'root_dir' and 'base_dir' both default + to the current directory. Returns the name of the archive file. + + 'owner' and 'group' are used when creating a tar archive. By default, + uses the current owner and group. + """ + save_cwd = os.getcwd() + if root_dir is not None: + if logger is not None: + logger.debug("changing into '%s'", root_dir) + base_name = os.path.abspath(base_name) + if not dry_run: + os.chdir(root_dir) + + if base_dir is None: + base_dir = os.curdir + + kwargs = {'dry_run': dry_run, 'logger': logger} + + try: + format_info = _ARCHIVE_FORMATS[format] + except KeyError: + raise ValueError("unknown archive format '%s'" % format) + + func = format_info[0] + for arg, val in format_info[1]: + kwargs[arg] = val + + if format != 'zip': + kwargs['owner'] = owner + kwargs['group'] = group + + try: + filename = func(base_name, base_dir, **kwargs) + finally: + if root_dir is not None: + if logger is not None: + logger.debug("changing back to '%s'", save_cwd) + os.chdir(save_cwd) + + return filename + + +def get_unpack_formats(): + """Returns a list of supported formats for unpacking. + + Each element of the returned sequence is a tuple + (name, extensions, description) + """ + formats = [(name, info[0], info[3]) for name, info in + _UNPACK_FORMATS.items()] + formats.sort() + return formats + +def _check_unpack_options(extensions, function, extra_args): + """Checks what gets registered as an unpacker.""" + # first make sure no other unpacker is registered for this extension + existing_extensions = {} + for name, info in _UNPACK_FORMATS.items(): + for ext in info[0]: + existing_extensions[ext] = name + + for extension in extensions: + if extension in existing_extensions: + msg = '%s is already registered for "%s"' + raise RegistryError(msg % (extension, + existing_extensions[extension])) + + if not isinstance(function, collections.Callable): + raise TypeError('The registered function must be a callable') + + +def register_unpack_format(name, extensions, function, extra_args=None, + description=''): + """Registers an unpack format. + + `name` is the name of the format. `extensions` is a list of extensions + corresponding to the format. + + `function` is the callable that will be + used to unpack archives. The callable will receive archives to unpack. + If it's unable to handle an archive, it needs to raise a ReadError + exception. + + If provided, `extra_args` is a sequence of + (name, value) tuples that will be passed as arguments to the callable. + description can be provided to describe the format, and will be returned + by the get_unpack_formats() function. + """ + if extra_args is None: + extra_args = [] + _check_unpack_options(extensions, function, extra_args) + _UNPACK_FORMATS[name] = extensions, function, extra_args, description + +def unregister_unpack_format(name): + """Removes the pack format from the registry.""" + del _UNPACK_FORMATS[name] + +def _ensure_directory(path): + """Ensure that the parent directory of `path` exists""" + dirname = os.path.dirname(path) + if not os.path.isdir(dirname): + os.makedirs(dirname) + +def _unpack_zipfile(filename, extract_dir): + """Unpack zip `filename` to `extract_dir` + """ + try: + import zipfile + except ImportError: + raise ReadError('zlib not supported, cannot unpack this archive.') + + if not zipfile.is_zipfile(filename): + raise ReadError("%s is not a zip file" % filename) + + zip = zipfile.ZipFile(filename) + try: + for info in zip.infolist(): + name = info.filename + + # don't extract absolute paths or ones with .. in them + if name.startswith('/') or '..' in name: + continue + + target = os.path.join(extract_dir, *name.split('/')) + if not target: + continue + + _ensure_directory(target) + if not name.endswith('/'): + # file + data = zip.read(info.filename) + f = open(target, 'wb') + try: + f.write(data) + finally: + f.close() + del data + finally: + zip.close() + +def _unpack_tarfile(filename, extract_dir): + """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` + """ + try: + tarobj = tarfile.open(filename) + except tarfile.TarError: + raise ReadError( + "%s is not a compressed or uncompressed tar file" % filename) + try: + tarobj.extractall(extract_dir) + finally: + tarobj.close() + +_UNPACK_FORMATS = { + 'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"), + 'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"), + 'zip': (['.zip'], _unpack_zipfile, [], "ZIP file") + } + +if _BZ2_SUPPORTED: + _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [], + "bzip2'ed tar-file") + +def _find_unpack_format(filename): + for name, info in _UNPACK_FORMATS.items(): + for extension in info[0]: + if filename.endswith(extension): + return name + return None + +def unpack_archive(filename, extract_dir=None, format=None): + """Unpack an archive. + + `filename` is the name of the archive. + + `extract_dir` is the name of the target directory, where the archive + is unpacked. If not provided, the current working directory is used. + + `format` is the archive format: one of "zip", "tar", or "gztar". Or any + other registered format. If not provided, unpack_archive will use the + filename extension and see if an unpacker was registered for that + extension. + + In case none is found, a ValueError is raised. + """ + if extract_dir is None: + extract_dir = os.getcwd() + + if format is not None: + try: + format_info = _UNPACK_FORMATS[format] + except KeyError: + raise ValueError("Unknown unpack format '{0}'".format(format)) + + func = format_info[1] + func(filename, extract_dir, **dict(format_info[2])) + else: + # we need to look at the registered unpackers supported extensions + format = _find_unpack_format(filename) + if format is None: + raise ReadError("Unknown archive format '{0}'".format(filename)) + + func = _UNPACK_FORMATS[format][1] + kwargs = dict(_UNPACK_FORMATS[format][2]) + func(filename, extract_dir, **kwargs) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg b/env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg new file mode 100644 index 0000000..1746bd0 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg @@ -0,0 +1,84 @@ +[posix_prefix] +# Configuration directories. Some of these come straight out of the +# configure script. They are for implementing the other variables, not to +# be used directly in [resource_locations]. +confdir = /etc +datadir = /usr/share +libdir = /usr/lib +statedir = /var +# User resource directory +local = ~/.local/{distribution.name} + +stdlib = {base}/lib/python{py_version_short} +platstdlib = {platbase}/lib/python{py_version_short} +purelib = {base}/lib/python{py_version_short}/site-packages +platlib = {platbase}/lib/python{py_version_short}/site-packages +include = {base}/include/python{py_version_short}{abiflags} +platinclude = {platbase}/include/python{py_version_short}{abiflags} +data = {base} + +[posix_home] +stdlib = {base}/lib/python +platstdlib = {base}/lib/python +purelib = {base}/lib/python +platlib = {base}/lib/python +include = {base}/include/python +platinclude = {base}/include/python +scripts = {base}/bin +data = {base} + +[nt] +stdlib = {base}/Lib +platstdlib = {base}/Lib +purelib = {base}/Lib/site-packages +platlib = {base}/Lib/site-packages +include = {base}/Include +platinclude = {base}/Include +scripts = {base}/Scripts +data = {base} + +[os2] +stdlib = {base}/Lib +platstdlib = {base}/Lib +purelib = {base}/Lib/site-packages +platlib = {base}/Lib/site-packages +include = {base}/Include +platinclude = {base}/Include +scripts = {base}/Scripts +data = {base} + +[os2_home] +stdlib = {userbase}/lib/python{py_version_short} +platstdlib = {userbase}/lib/python{py_version_short} +purelib = {userbase}/lib/python{py_version_short}/site-packages +platlib = {userbase}/lib/python{py_version_short}/site-packages +include = {userbase}/include/python{py_version_short} +scripts = {userbase}/bin +data = {userbase} + +[nt_user] +stdlib = {userbase}/Python{py_version_nodot} +platstdlib = {userbase}/Python{py_version_nodot} +purelib = {userbase}/Python{py_version_nodot}/site-packages +platlib = {userbase}/Python{py_version_nodot}/site-packages +include = {userbase}/Python{py_version_nodot}/Include +scripts = {userbase}/Scripts +data = {userbase} + +[posix_user] +stdlib = {userbase}/lib/python{py_version_short} +platstdlib = {userbase}/lib/python{py_version_short} +purelib = {userbase}/lib/python{py_version_short}/site-packages +platlib = {userbase}/lib/python{py_version_short}/site-packages +include = {userbase}/include/python{py_version_short} +scripts = {userbase}/bin +data = {userbase} + +[osx_framework_user] +stdlib = {userbase}/lib/python +platstdlib = {userbase}/lib/python +purelib = {userbase}/lib/python/site-packages +platlib = {userbase}/lib/python/site-packages +include = {userbase}/include +scripts = {userbase}/bin +data = {userbase} diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/sysconfig.py b/env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/sysconfig.py new file mode 100644 index 0000000..1df3aba --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/sysconfig.py @@ -0,0 +1,788 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Access to Python's configuration information.""" + +import codecs +import os +import re +import sys +from os.path import pardir, realpath +try: + import configparser +except ImportError: + import ConfigParser as configparser + + +__all__ = [ + 'get_config_h_filename', + 'get_config_var', + 'get_config_vars', + 'get_makefile_filename', + 'get_path', + 'get_path_names', + 'get_paths', + 'get_platform', + 'get_python_version', + 'get_scheme_names', + 'parse_config_h', +] + + +def _safe_realpath(path): + try: + return realpath(path) + except OSError: + return path + + +if sys.executable: + _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable)) +else: + # sys.executable can be empty if argv[0] has been changed and Python is + # unable to retrieve the real program name + _PROJECT_BASE = _safe_realpath(os.getcwd()) + +if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir)) +# PC/VS7.1 +if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) +# PC/AMD64 +if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) + + +def is_python_build(): + for fn in ("Setup.dist", "Setup.local"): + if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)): + return True + return False + +_PYTHON_BUILD = is_python_build() + +_cfg_read = False + +def _ensure_cfg_read(): + global _cfg_read + if not _cfg_read: + from ..resources import finder + backport_package = __name__.rsplit('.', 1)[0] + _finder = finder(backport_package) + _cfgfile = _finder.find('sysconfig.cfg') + assert _cfgfile, 'sysconfig.cfg exists' + with _cfgfile.as_stream() as s: + _SCHEMES.readfp(s) + if _PYTHON_BUILD: + for scheme in ('posix_prefix', 'posix_home'): + _SCHEMES.set(scheme, 'include', '{srcdir}/Include') + _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.') + + _cfg_read = True + + +_SCHEMES = configparser.RawConfigParser() +_VAR_REPL = re.compile(r'\{([^{]*?)\}') + +def _expand_globals(config): + _ensure_cfg_read() + if config.has_section('globals'): + globals = config.items('globals') + else: + globals = tuple() + + sections = config.sections() + for section in sections: + if section == 'globals': + continue + for option, value in globals: + if config.has_option(section, option): + continue + config.set(section, option, value) + config.remove_section('globals') + + # now expanding local variables defined in the cfg file + # + for section in config.sections(): + variables = dict(config.items(section)) + + def _replacer(matchobj): + name = matchobj.group(1) + if name in variables: + return variables[name] + return matchobj.group(0) + + for option, value in config.items(section): + config.set(section, option, _VAR_REPL.sub(_replacer, value)) + +#_expand_globals(_SCHEMES) + + # FIXME don't rely on sys.version here, its format is an implementation detail + # of CPython, use sys.version_info or sys.hexversion +_PY_VERSION = sys.version.split()[0] +_PY_VERSION_SHORT = sys.version[:3] +_PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2] +_PREFIX = os.path.normpath(sys.prefix) +_EXEC_PREFIX = os.path.normpath(sys.exec_prefix) +_CONFIG_VARS = None +_USER_BASE = None + + +def _subst_vars(path, local_vars): + """In the string `path`, replace tokens like {some.thing} with the + corresponding value from the map `local_vars`. + + If there is no corresponding value, leave the token unchanged. + """ + def _replacer(matchobj): + name = matchobj.group(1) + if name in local_vars: + return local_vars[name] + elif name in os.environ: + return os.environ[name] + return matchobj.group(0) + return _VAR_REPL.sub(_replacer, path) + + +def _extend_dict(target_dict, other_dict): + target_keys = target_dict.keys() + for key, value in other_dict.items(): + if key in target_keys: + continue + target_dict[key] = value + + +def _expand_vars(scheme, vars): + res = {} + if vars is None: + vars = {} + _extend_dict(vars, get_config_vars()) + + for key, value in _SCHEMES.items(scheme): + if os.name in ('posix', 'nt'): + value = os.path.expanduser(value) + res[key] = os.path.normpath(_subst_vars(value, vars)) + return res + + +def format_value(value, vars): + def _replacer(matchobj): + name = matchobj.group(1) + if name in vars: + return vars[name] + return matchobj.group(0) + return _VAR_REPL.sub(_replacer, value) + + +def _get_default_scheme(): + if os.name == 'posix': + # the default scheme for posix is posix_prefix + return 'posix_prefix' + return os.name + + +def _getuserbase(): + env_base = os.environ.get("PYTHONUSERBASE", None) + + def joinuser(*args): + return os.path.expanduser(os.path.join(*args)) + + # what about 'os2emx', 'riscos' ? + if os.name == "nt": + base = os.environ.get("APPDATA") or "~" + if env_base: + return env_base + else: + return joinuser(base, "Python") + + if sys.platform == "darwin": + framework = get_config_var("PYTHONFRAMEWORK") + if framework: + if env_base: + return env_base + else: + return joinuser("~", "Library", framework, "%d.%d" % + sys.version_info[:2]) + + if env_base: + return env_base + else: + return joinuser("~", ".local") + + +def _parse_makefile(filename, vars=None): + """Parse a Makefile-style file. + + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + # Regexes needed for parsing Makefile (and similar syntaxes, + # like old-style Setup files). + _variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") + _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") + _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") + + if vars is None: + vars = {} + done = {} + notdone = {} + + with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f: + lines = f.readlines() + + for line in lines: + if line.startswith('#') or line.strip() == '': + continue + m = _variable_rx.match(line) + if m: + n, v = m.group(1, 2) + v = v.strip() + # `$$' is a literal `$' in make + tmpv = v.replace('$$', '') + + if "$" in tmpv: + notdone[n] = v + else: + try: + v = int(v) + except ValueError: + # insert literal `$' + done[n] = v.replace('$$', '$') + else: + done[n] = v + + # do variable interpolation here + variables = list(notdone.keys()) + + # Variables with a 'PY_' prefix in the makefile. These need to + # be made available without that prefix through sysconfig. + # Special care is needed to ensure that variable expansion works, even + # if the expansion uses the name without a prefix. + renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') + + while len(variables) > 0: + for name in tuple(variables): + value = notdone[name] + m = _findvar1_rx.search(value) or _findvar2_rx.search(value) + if m is not None: + n = m.group(1) + found = True + if n in done: + item = str(done[n]) + elif n in notdone: + # get it on a subsequent round + found = False + elif n in os.environ: + # do it like make: fall back to environment + item = os.environ[n] + + elif n in renamed_variables: + if (name.startswith('PY_') and + name[3:] in renamed_variables): + item = "" + + elif 'PY_' + n in notdone: + found = False + + else: + item = str(done['PY_' + n]) + + else: + done[n] = item = "" + + if found: + after = value[m.end():] + value = value[:m.start()] + item + after + if "$" in after: + notdone[name] = value + else: + try: + value = int(value) + except ValueError: + done[name] = value.strip() + else: + done[name] = value + variables.remove(name) + + if (name.startswith('PY_') and + name[3:] in renamed_variables): + + name = name[3:] + if name not in done: + done[name] = value + + else: + # bogus variable reference (e.g. "prefix=$/opt/python"); + # just drop it since we can't deal + done[name] = value + variables.remove(name) + + # strip spurious spaces + for k, v in done.items(): + if isinstance(v, str): + done[k] = v.strip() + + # save the results in the global dictionary + vars.update(done) + return vars + + +def get_makefile_filename(): + """Return the path of the Makefile.""" + if _PYTHON_BUILD: + return os.path.join(_PROJECT_BASE, "Makefile") + if hasattr(sys, 'abiflags'): + config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags) + else: + config_dir_name = 'config' + return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile') + + +def _init_posix(vars): + """Initialize the module as appropriate for POSIX systems.""" + # load the installed Makefile: + makefile = get_makefile_filename() + try: + _parse_makefile(makefile, vars) + except IOError as e: + msg = "invalid Python installation: unable to open %s" % makefile + if hasattr(e, "strerror"): + msg = msg + " (%s)" % e.strerror + raise IOError(msg) + # load the installed pyconfig.h: + config_h = get_config_h_filename() + try: + with open(config_h) as f: + parse_config_h(f, vars) + except IOError as e: + msg = "invalid Python installation: unable to open %s" % config_h + if hasattr(e, "strerror"): + msg = msg + " (%s)" % e.strerror + raise IOError(msg) + # On AIX, there are wrong paths to the linker scripts in the Makefile + # -- these paths are relative to the Python source, but when installed + # the scripts are in another directory. + if _PYTHON_BUILD: + vars['LDSHARED'] = vars['BLDSHARED'] + + +def _init_non_posix(vars): + """Initialize the module as appropriate for NT""" + # set basic install directories + vars['LIBDEST'] = get_path('stdlib') + vars['BINLIBDEST'] = get_path('platstdlib') + vars['INCLUDEPY'] = get_path('include') + vars['SO'] = '.pyd' + vars['EXE'] = '.exe' + vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT + vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable)) + +# +# public APIs +# + + +def parse_config_h(fp, vars=None): + """Parse a config.h-style file. + + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + if vars is None: + vars = {} + define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n") + undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n") + + while True: + line = fp.readline() + if not line: + break + m = define_rx.match(line) + if m: + n, v = m.group(1, 2) + try: + v = int(v) + except ValueError: + pass + vars[n] = v + else: + m = undef_rx.match(line) + if m: + vars[m.group(1)] = 0 + return vars + + +def get_config_h_filename(): + """Return the path of pyconfig.h.""" + if _PYTHON_BUILD: + if os.name == "nt": + inc_dir = os.path.join(_PROJECT_BASE, "PC") + else: + inc_dir = _PROJECT_BASE + else: + inc_dir = get_path('platinclude') + return os.path.join(inc_dir, 'pyconfig.h') + + +def get_scheme_names(): + """Return a tuple containing the schemes names.""" + return tuple(sorted(_SCHEMES.sections())) + + +def get_path_names(): + """Return a tuple containing the paths names.""" + # xxx see if we want a static list + return _SCHEMES.options('posix_prefix') + + +def get_paths(scheme=_get_default_scheme(), vars=None, expand=True): + """Return a mapping containing an install scheme. + + ``scheme`` is the install scheme name. If not provided, it will + return the default scheme for the current platform. + """ + _ensure_cfg_read() + if expand: + return _expand_vars(scheme, vars) + else: + return dict(_SCHEMES.items(scheme)) + + +def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True): + """Return a path corresponding to the scheme. + + ``scheme`` is the install scheme name. + """ + return get_paths(scheme, vars, expand)[name] + + +def get_config_vars(*args): + """With no arguments, return a dictionary of all configuration + variables relevant for the current platform. + + On Unix, this means every variable defined in Python's installed Makefile; + On Windows and Mac OS it's a much smaller set. + + With arguments, return a list of values that result from looking up + each argument in the configuration variable dictionary. + """ + global _CONFIG_VARS + if _CONFIG_VARS is None: + _CONFIG_VARS = {} + # Normalized versions of prefix and exec_prefix are handy to have; + # in fact, these are the standard versions used most places in the + # distutils2 module. + _CONFIG_VARS['prefix'] = _PREFIX + _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX + _CONFIG_VARS['py_version'] = _PY_VERSION + _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT + _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2] + _CONFIG_VARS['base'] = _PREFIX + _CONFIG_VARS['platbase'] = _EXEC_PREFIX + _CONFIG_VARS['projectbase'] = _PROJECT_BASE + try: + _CONFIG_VARS['abiflags'] = sys.abiflags + except AttributeError: + # sys.abiflags may not be defined on all platforms. + _CONFIG_VARS['abiflags'] = '' + + if os.name in ('nt', 'os2'): + _init_non_posix(_CONFIG_VARS) + if os.name == 'posix': + _init_posix(_CONFIG_VARS) + # Setting 'userbase' is done below the call to the + # init function to enable using 'get_config_var' in + # the init-function. + if sys.version >= '2.6': + _CONFIG_VARS['userbase'] = _getuserbase() + + if 'srcdir' not in _CONFIG_VARS: + _CONFIG_VARS['srcdir'] = _PROJECT_BASE + else: + _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir']) + + # Convert srcdir into an absolute path if it appears necessary. + # Normally it is relative to the build directory. However, during + # testing, for example, we might be running a non-installed python + # from a different directory. + if _PYTHON_BUILD and os.name == "posix": + base = _PROJECT_BASE + try: + cwd = os.getcwd() + except OSError: + cwd = None + if (not os.path.isabs(_CONFIG_VARS['srcdir']) and + base != cwd): + # srcdir is relative and we are not in the same directory + # as the executable. Assume executable is in the build + # directory and make srcdir absolute. + srcdir = os.path.join(base, _CONFIG_VARS['srcdir']) + _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir) + + if sys.platform == 'darwin': + kernel_version = os.uname()[2] # Kernel version (8.4.3) + major_version = int(kernel_version.split('.')[0]) + + if major_version < 8: + # On Mac OS X before 10.4, check if -arch and -isysroot + # are in CFLAGS or LDFLAGS and remove them if they are. + # This is needed when building extensions on a 10.3 system + # using a universal build of python. + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + flags = _CONFIG_VARS[key] + flags = re.sub(r'-arch\s+\w+\s', ' ', flags) + flags = re.sub('-isysroot [^ \t]*', ' ', flags) + _CONFIG_VARS[key] = flags + else: + # Allow the user to override the architecture flags using + # an environment variable. + # NOTE: This name was introduced by Apple in OSX 10.5 and + # is used by several scripting languages distributed with + # that OS release. + if 'ARCHFLAGS' in os.environ: + arch = os.environ['ARCHFLAGS'] + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + + flags = _CONFIG_VARS[key] + flags = re.sub(r'-arch\s+\w+\s', ' ', flags) + flags = flags + ' ' + arch + _CONFIG_VARS[key] = flags + + # If we're on OSX 10.5 or later and the user tries to + # compiles an extension using an SDK that is not present + # on the current machine it is better to not use an SDK + # than to fail. + # + # The major usecase for this is users using a Python.org + # binary installer on OSX 10.6: that installer uses + # the 10.4u SDK, but that SDK is not installed by default + # when you install Xcode. + # + CFLAGS = _CONFIG_VARS.get('CFLAGS', '') + m = re.search(r'-isysroot\s+(\S+)', CFLAGS) + if m is not None: + sdk = m.group(1) + if not os.path.exists(sdk): + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + + flags = _CONFIG_VARS[key] + flags = re.sub(r'-isysroot\s+\S+(\s|$)', ' ', flags) + _CONFIG_VARS[key] = flags + + if args: + vals = [] + for name in args: + vals.append(_CONFIG_VARS.get(name)) + return vals + else: + return _CONFIG_VARS + + +def get_config_var(name): + """Return the value of a single variable using the dictionary returned by + 'get_config_vars()'. + + Equivalent to get_config_vars().get(name) + """ + return get_config_vars().get(name) + + +def get_platform(): + """Return a string that identifies the current platform. + + This is used mainly to distinguish platform-specific build directories and + platform-specific built distributions. Typically includes the OS name + and version and the architecture (as supplied by 'os.uname()'), + although the exact information included depends on the OS; eg. for IRIX + the architecture isn't particularly important (IRIX only runs on SGI + hardware), but for Linux the kernel version isn't particularly + important. + + Examples of returned values: + linux-i586 + linux-alpha (?) + solaris-2.6-sun4u + irix-5.3 + irix64-6.2 + + Windows will return one of: + win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) + win-ia64 (64bit Windows on Itanium) + win32 (all others - specifically, sys.platform is returned) + + For other non-POSIX platforms, currently just returns 'sys.platform'. + """ + if os.name == 'nt': + # sniff sys.version for architecture. + prefix = " bit (" + i = sys.version.find(prefix) + if i == -1: + return sys.platform + j = sys.version.find(")", i) + look = sys.version[i+len(prefix):j].lower() + if look == 'amd64': + return 'win-amd64' + if look == 'itanium': + return 'win-ia64' + return sys.platform + + if os.name != "posix" or not hasattr(os, 'uname'): + # XXX what about the architecture? NT is Intel or Alpha, + # Mac OS is M68k or PPC, etc. + return sys.platform + + # Try to distinguish various flavours of Unix + osname, host, release, version, machine = os.uname() + + # Convert the OS name to lowercase, remove '/' characters + # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh") + osname = osname.lower().replace('/', '') + machine = machine.replace(' ', '_') + machine = machine.replace('/', '-') + + if osname[:5] == "linux": + # At least on Linux/Intel, 'machine' is the processor -- + # i386, etc. + # XXX what about Alpha, SPARC, etc? + return "%s-%s" % (osname, machine) + elif osname[:5] == "sunos": + if release[0] >= "5": # SunOS 5 == Solaris 2 + osname = "solaris" + release = "%d.%s" % (int(release[0]) - 3, release[2:]) + # fall through to standard osname-release-machine representation + elif osname[:4] == "irix": # could be "irix64"! + return "%s-%s" % (osname, release) + elif osname[:3] == "aix": + return "%s-%s.%s" % (osname, version, release) + elif osname[:6] == "cygwin": + osname = "cygwin" + rel_re = re.compile(r'[\d.]+') + m = rel_re.match(release) + if m: + release = m.group() + elif osname[:6] == "darwin": + # + # For our purposes, we'll assume that the system version from + # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set + # to. This makes the compatibility story a bit more sane because the + # machine is going to compile and link as if it were + # MACOSX_DEPLOYMENT_TARGET. + cfgvars = get_config_vars() + macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET') + + if True: + # Always calculate the release of the running machine, + # needed to determine if we can build fat binaries or not. + + macrelease = macver + # Get the system version. Reading this plist is a documented + # way to get the system version (see the documentation for + # the Gestalt Manager) + try: + f = open('/System/Library/CoreServices/SystemVersion.plist') + except IOError: + # We're on a plain darwin box, fall back to the default + # behaviour. + pass + else: + try: + m = re.search(r'<key>ProductUserVisibleVersion</key>\s*' + r'<string>(.*?)</string>', f.read()) + finally: + f.close() + if m is not None: + macrelease = '.'.join(m.group(1).split('.')[:2]) + # else: fall back to the default behaviour + + if not macver: + macver = macrelease + + if macver: + release = macver + osname = "macosx" + + if ((macrelease + '.') >= '10.4.' and + '-arch' in get_config_vars().get('CFLAGS', '').strip()): + # The universal build will build fat binaries, but not on + # systems before 10.4 + # + # Try to detect 4-way universal builds, those have machine-type + # 'universal' instead of 'fat'. + + machine = 'fat' + cflags = get_config_vars().get('CFLAGS') + + archs = re.findall(r'-arch\s+(\S+)', cflags) + archs = tuple(sorted(set(archs))) + + if len(archs) == 1: + machine = archs[0] + elif archs == ('i386', 'ppc'): + machine = 'fat' + elif archs == ('i386', 'x86_64'): + machine = 'intel' + elif archs == ('i386', 'ppc', 'x86_64'): + machine = 'fat3' + elif archs == ('ppc64', 'x86_64'): + machine = 'fat64' + elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'): + machine = 'universal' + else: + raise ValueError( + "Don't know machine value for archs=%r" % (archs,)) + + elif machine == 'i386': + # On OSX the machine type returned by uname is always the + # 32-bit variant, even if the executable architecture is + # the 64-bit variant + if sys.maxsize >= 2**32: + machine = 'x86_64' + + elif machine in ('PowerPC', 'Power_Macintosh'): + # Pick a sane name for the PPC architecture. + # See 'i386' case + if sys.maxsize >= 2**32: + machine = 'ppc64' + else: + machine = 'ppc' + + return "%s-%s-%s" % (osname, release, machine) + + +def get_python_version(): + return _PY_VERSION_SHORT + + +def _print_dict(title, data): + for index, (key, value) in enumerate(sorted(data.items())): + if index == 0: + print('%s: ' % (title)) + print('\t%s = "%s"' % (key, value)) + + +def _main(): + """Display all information sysconfig detains.""" + print('Platform: "%s"' % get_platform()) + print('Python version: "%s"' % get_python_version()) + print('Current installation scheme: "%s"' % _get_default_scheme()) + print() + _print_dict('Paths', get_paths()) + print() + _print_dict('Variables', get_config_vars()) + + +if __name__ == '__main__': + _main() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/tarfile.py b/env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/tarfile.py new file mode 100644 index 0000000..d66d856 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distlib/_backport/tarfile.py @@ -0,0 +1,2607 @@ +#------------------------------------------------------------------- +# tarfile.py +#------------------------------------------------------------------- +# Copyright (C) 2002 Lars Gustaebel <lars@gustaebel.de> +# All rights reserved. +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +from __future__ import print_function + +"""Read from and write to tar format archives. +""" + +__version__ = "$Revision$" + +version = "0.9.0" +__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)" +__date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $" +__cvsid__ = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $" +__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend." + +#--------- +# Imports +#--------- +import sys +import os +import stat +import errno +import time +import struct +import copy +import re + +try: + import grp, pwd +except ImportError: + grp = pwd = None + +# os.symlink on Windows prior to 6.0 raises NotImplementedError +symlink_exception = (AttributeError, NotImplementedError) +try: + # WindowsError (1314) will be raised if the caller does not hold the + # SeCreateSymbolicLinkPrivilege privilege + symlink_exception += (WindowsError,) +except NameError: + pass + +# from tarfile import * +__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"] + +if sys.version_info[0] < 3: + import __builtin__ as builtins +else: + import builtins + +_open = builtins.open # Since 'open' is TarFile.open + +#--------------------------------------------------------- +# tar constants +#--------------------------------------------------------- +NUL = b"\0" # the null character +BLOCKSIZE = 512 # length of processing blocks +RECORDSIZE = BLOCKSIZE * 20 # length of records +GNU_MAGIC = b"ustar \0" # magic gnu tar string +POSIX_MAGIC = b"ustar\x0000" # magic posix tar string + +LENGTH_NAME = 100 # maximum length of a filename +LENGTH_LINK = 100 # maximum length of a linkname +LENGTH_PREFIX = 155 # maximum length of the prefix field + +REGTYPE = b"0" # regular file +AREGTYPE = b"\0" # regular file +LNKTYPE = b"1" # link (inside tarfile) +SYMTYPE = b"2" # symbolic link +CHRTYPE = b"3" # character special device +BLKTYPE = b"4" # block special device +DIRTYPE = b"5" # directory +FIFOTYPE = b"6" # fifo special device +CONTTYPE = b"7" # contiguous file + +GNUTYPE_LONGNAME = b"L" # GNU tar longname +GNUTYPE_LONGLINK = b"K" # GNU tar longlink +GNUTYPE_SPARSE = b"S" # GNU tar sparse file + +XHDTYPE = b"x" # POSIX.1-2001 extended header +XGLTYPE = b"g" # POSIX.1-2001 global header +SOLARIS_XHDTYPE = b"X" # Solaris extended header + +USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format +GNU_FORMAT = 1 # GNU tar format +PAX_FORMAT = 2 # POSIX.1-2001 (pax) format +DEFAULT_FORMAT = GNU_FORMAT + +#--------------------------------------------------------- +# tarfile constants +#--------------------------------------------------------- +# File types that tarfile supports: +SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE, + SYMTYPE, DIRTYPE, FIFOTYPE, + CONTTYPE, CHRTYPE, BLKTYPE, + GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, + GNUTYPE_SPARSE) + +# File types that will be treated as a regular file. +REGULAR_TYPES = (REGTYPE, AREGTYPE, + CONTTYPE, GNUTYPE_SPARSE) + +# File types that are part of the GNU tar format. +GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, + GNUTYPE_SPARSE) + +# Fields from a pax header that override a TarInfo attribute. +PAX_FIELDS = ("path", "linkpath", "size", "mtime", + "uid", "gid", "uname", "gname") + +# Fields from a pax header that are affected by hdrcharset. +PAX_NAME_FIELDS = set(("path", "linkpath", "uname", "gname")) + +# Fields in a pax header that are numbers, all other fields +# are treated as strings. +PAX_NUMBER_FIELDS = { + "atime": float, + "ctime": float, + "mtime": float, + "uid": int, + "gid": int, + "size": int +} + +#--------------------------------------------------------- +# Bits used in the mode field, values in octal. +#--------------------------------------------------------- +S_IFLNK = 0o120000 # symbolic link +S_IFREG = 0o100000 # regular file +S_IFBLK = 0o060000 # block device +S_IFDIR = 0o040000 # directory +S_IFCHR = 0o020000 # character device +S_IFIFO = 0o010000 # fifo + +TSUID = 0o4000 # set UID on execution +TSGID = 0o2000 # set GID on execution +TSVTX = 0o1000 # reserved + +TUREAD = 0o400 # read by owner +TUWRITE = 0o200 # write by owner +TUEXEC = 0o100 # execute/search by owner +TGREAD = 0o040 # read by group +TGWRITE = 0o020 # write by group +TGEXEC = 0o010 # execute/search by group +TOREAD = 0o004 # read by other +TOWRITE = 0o002 # write by other +TOEXEC = 0o001 # execute/search by other + +#--------------------------------------------------------- +# initialization +#--------------------------------------------------------- +if os.name in ("nt", "ce"): + ENCODING = "utf-8" +else: + ENCODING = sys.getfilesystemencoding() + +#--------------------------------------------------------- +# Some useful functions +#--------------------------------------------------------- + +def stn(s, length, encoding, errors): + """Convert a string to a null-terminated bytes object. + """ + s = s.encode(encoding, errors) + return s[:length] + (length - len(s)) * NUL + +def nts(s, encoding, errors): + """Convert a null-terminated bytes object to a string. + """ + p = s.find(b"\0") + if p != -1: + s = s[:p] + return s.decode(encoding, errors) + +def nti(s): + """Convert a number field to a python number. + """ + # There are two possible encodings for a number field, see + # itn() below. + if s[0] != chr(0o200): + try: + n = int(nts(s, "ascii", "strict") or "0", 8) + except ValueError: + raise InvalidHeaderError("invalid header") + else: + n = 0 + for i in range(len(s) - 1): + n <<= 8 + n += ord(s[i + 1]) + return n + +def itn(n, digits=8, format=DEFAULT_FORMAT): + """Convert a python number to a number field. + """ + # POSIX 1003.1-1988 requires numbers to be encoded as a string of + # octal digits followed by a null-byte, this allows values up to + # (8**(digits-1))-1. GNU tar allows storing numbers greater than + # that if necessary. A leading 0o200 byte indicates this particular + # encoding, the following digits-1 bytes are a big-endian + # representation. This allows values up to (256**(digits-1))-1. + if 0 <= n < 8 ** (digits - 1): + s = ("%0*o" % (digits - 1, n)).encode("ascii") + NUL + else: + if format != GNU_FORMAT or n >= 256 ** (digits - 1): + raise ValueError("overflow in number field") + + if n < 0: + # XXX We mimic GNU tar's behaviour with negative numbers, + # this could raise OverflowError. + n = struct.unpack("L", struct.pack("l", n))[0] + + s = bytearray() + for i in range(digits - 1): + s.insert(0, n & 0o377) + n >>= 8 + s.insert(0, 0o200) + return s + +def calc_chksums(buf): + """Calculate the checksum for a member's header by summing up all + characters except for the chksum field which is treated as if + it was filled with spaces. According to the GNU tar sources, + some tars (Sun and NeXT) calculate chksum with signed char, + which will be different if there are chars in the buffer with + the high bit set. So we calculate two checksums, unsigned and + signed. + """ + unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512])) + signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512])) + return unsigned_chksum, signed_chksum + +def copyfileobj(src, dst, length=None): + """Copy length bytes from fileobj src to fileobj dst. + If length is None, copy the entire content. + """ + if length == 0: + return + if length is None: + while True: + buf = src.read(16*1024) + if not buf: + break + dst.write(buf) + return + + BUFSIZE = 16 * 1024 + blocks, remainder = divmod(length, BUFSIZE) + for b in range(blocks): + buf = src.read(BUFSIZE) + if len(buf) < BUFSIZE: + raise IOError("end of file reached") + dst.write(buf) + + if remainder != 0: + buf = src.read(remainder) + if len(buf) < remainder: + raise IOError("end of file reached") + dst.write(buf) + return + +filemode_table = ( + ((S_IFLNK, "l"), + (S_IFREG, "-"), + (S_IFBLK, "b"), + (S_IFDIR, "d"), + (S_IFCHR, "c"), + (S_IFIFO, "p")), + + ((TUREAD, "r"),), + ((TUWRITE, "w"),), + ((TUEXEC|TSUID, "s"), + (TSUID, "S"), + (TUEXEC, "x")), + + ((TGREAD, "r"),), + ((TGWRITE, "w"),), + ((TGEXEC|TSGID, "s"), + (TSGID, "S"), + (TGEXEC, "x")), + + ((TOREAD, "r"),), + ((TOWRITE, "w"),), + ((TOEXEC|TSVTX, "t"), + (TSVTX, "T"), + (TOEXEC, "x")) +) + +def filemode(mode): + """Convert a file's mode to a string of the form + -rwxrwxrwx. + Used by TarFile.list() + """ + perm = [] + for table in filemode_table: + for bit, char in table: + if mode & bit == bit: + perm.append(char) + break + else: + perm.append("-") + return "".join(perm) + +class TarError(Exception): + """Base exception.""" + pass +class ExtractError(TarError): + """General exception for extract errors.""" + pass +class ReadError(TarError): + """Exception for unreadable tar archives.""" + pass +class CompressionError(TarError): + """Exception for unavailable compression methods.""" + pass +class StreamError(TarError): + """Exception for unsupported operations on stream-like TarFiles.""" + pass +class HeaderError(TarError): + """Base exception for header errors.""" + pass +class EmptyHeaderError(HeaderError): + """Exception for empty headers.""" + pass +class TruncatedHeaderError(HeaderError): + """Exception for truncated headers.""" + pass +class EOFHeaderError(HeaderError): + """Exception for end of file headers.""" + pass +class InvalidHeaderError(HeaderError): + """Exception for invalid headers.""" + pass +class SubsequentHeaderError(HeaderError): + """Exception for missing and invalid extended headers.""" + pass + +#--------------------------- +# internal stream interface +#--------------------------- +class _LowLevelFile(object): + """Low-level file object. Supports reading and writing. + It is used instead of a regular file object for streaming + access. + """ + + def __init__(self, name, mode): + mode = { + "r": os.O_RDONLY, + "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC, + }[mode] + if hasattr(os, "O_BINARY"): + mode |= os.O_BINARY + self.fd = os.open(name, mode, 0o666) + + def close(self): + os.close(self.fd) + + def read(self, size): + return os.read(self.fd, size) + + def write(self, s): + os.write(self.fd, s) + +class _Stream(object): + """Class that serves as an adapter between TarFile and + a stream-like object. The stream-like object only + needs to have a read() or write() method and is accessed + blockwise. Use of gzip or bzip2 compression is possible. + A stream-like object could be for example: sys.stdin, + sys.stdout, a socket, a tape device etc. + + _Stream is intended to be used only internally. + """ + + def __init__(self, name, mode, comptype, fileobj, bufsize): + """Construct a _Stream object. + """ + self._extfileobj = True + if fileobj is None: + fileobj = _LowLevelFile(name, mode) + self._extfileobj = False + + if comptype == '*': + # Enable transparent compression detection for the + # stream interface + fileobj = _StreamProxy(fileobj) + comptype = fileobj.getcomptype() + + self.name = name or "" + self.mode = mode + self.comptype = comptype + self.fileobj = fileobj + self.bufsize = bufsize + self.buf = b"" + self.pos = 0 + self.closed = False + + try: + if comptype == "gz": + try: + import zlib + except ImportError: + raise CompressionError("zlib module is not available") + self.zlib = zlib + self.crc = zlib.crc32(b"") + if mode == "r": + self._init_read_gz() + else: + self._init_write_gz() + + if comptype == "bz2": + try: + import bz2 + except ImportError: + raise CompressionError("bz2 module is not available") + if mode == "r": + self.dbuf = b"" + self.cmp = bz2.BZ2Decompressor() + else: + self.cmp = bz2.BZ2Compressor() + except: + if not self._extfileobj: + self.fileobj.close() + self.closed = True + raise + + def __del__(self): + if hasattr(self, "closed") and not self.closed: + self.close() + + def _init_write_gz(self): + """Initialize for writing with gzip compression. + """ + self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED, + -self.zlib.MAX_WBITS, + self.zlib.DEF_MEM_LEVEL, + 0) + timestamp = struct.pack("<L", int(time.time())) + self.__write(b"\037\213\010\010" + timestamp + b"\002\377") + if self.name.endswith(".gz"): + self.name = self.name[:-3] + # RFC1952 says we must use ISO-8859-1 for the FNAME field. + self.__write(self.name.encode("iso-8859-1", "replace") + NUL) + + def write(self, s): + """Write string s to the stream. + """ + if self.comptype == "gz": + self.crc = self.zlib.crc32(s, self.crc) + self.pos += len(s) + if self.comptype != "tar": + s = self.cmp.compress(s) + self.__write(s) + + def __write(self, s): + """Write string s to the stream if a whole new block + is ready to be written. + """ + self.buf += s + while len(self.buf) > self.bufsize: + self.fileobj.write(self.buf[:self.bufsize]) + self.buf = self.buf[self.bufsize:] + + def close(self): + """Close the _Stream object. No operation should be + done on it afterwards. + """ + if self.closed: + return + + if self.mode == "w" and self.comptype != "tar": + self.buf += self.cmp.flush() + + if self.mode == "w" and self.buf: + self.fileobj.write(self.buf) + self.buf = b"" + if self.comptype == "gz": + # The native zlib crc is an unsigned 32-bit integer, but + # the Python wrapper implicitly casts that to a signed C + # long. So, on a 32-bit box self.crc may "look negative", + # while the same crc on a 64-bit box may "look positive". + # To avoid irksome warnings from the `struct` module, force + # it to look positive on all boxes. + self.fileobj.write(struct.pack("<L", self.crc & 0xffffffff)) + self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFF)) + + if not self._extfileobj: + self.fileobj.close() + + self.closed = True + + def _init_read_gz(self): + """Initialize for reading a gzip compressed fileobj. + """ + self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS) + self.dbuf = b"" + + # taken from gzip.GzipFile with some alterations + if self.__read(2) != b"\037\213": + raise ReadError("not a gzip file") + if self.__read(1) != b"\010": + raise CompressionError("unsupported compression method") + + flag = ord(self.__read(1)) + self.__read(6) + + if flag & 4: + xlen = ord(self.__read(1)) + 256 * ord(self.__read(1)) + self.read(xlen) + if flag & 8: + while True: + s = self.__read(1) + if not s or s == NUL: + break + if flag & 16: + while True: + s = self.__read(1) + if not s or s == NUL: + break + if flag & 2: + self.__read(2) + + def tell(self): + """Return the stream's file pointer position. + """ + return self.pos + + def seek(self, pos=0): + """Set the stream's file pointer to pos. Negative seeking + is forbidden. + """ + if pos - self.pos >= 0: + blocks, remainder = divmod(pos - self.pos, self.bufsize) + for i in range(blocks): + self.read(self.bufsize) + self.read(remainder) + else: + raise StreamError("seeking backwards is not allowed") + return self.pos + + def read(self, size=None): + """Return the next size number of bytes from the stream. + If size is not defined, return all bytes of the stream + up to EOF. + """ + if size is None: + t = [] + while True: + buf = self._read(self.bufsize) + if not buf: + break + t.append(buf) + buf = "".join(t) + else: + buf = self._read(size) + self.pos += len(buf) + return buf + + def _read(self, size): + """Return size bytes from the stream. + """ + if self.comptype == "tar": + return self.__read(size) + + c = len(self.dbuf) + while c < size: + buf = self.__read(self.bufsize) + if not buf: + break + try: + buf = self.cmp.decompress(buf) + except IOError: + raise ReadError("invalid compressed data") + self.dbuf += buf + c += len(buf) + buf = self.dbuf[:size] + self.dbuf = self.dbuf[size:] + return buf + + def __read(self, size): + """Return size bytes from stream. If internal buffer is empty, + read another block from the stream. + """ + c = len(self.buf) + while c < size: + buf = self.fileobj.read(self.bufsize) + if not buf: + break + self.buf += buf + c += len(buf) + buf = self.buf[:size] + self.buf = self.buf[size:] + return buf +# class _Stream + +class _StreamProxy(object): + """Small proxy class that enables transparent compression + detection for the Stream interface (mode 'r|*'). + """ + + def __init__(self, fileobj): + self.fileobj = fileobj + self.buf = self.fileobj.read(BLOCKSIZE) + + def read(self, size): + self.read = self.fileobj.read + return self.buf + + def getcomptype(self): + if self.buf.startswith(b"\037\213\010"): + return "gz" + if self.buf.startswith(b"BZh91"): + return "bz2" + return "tar" + + def close(self): + self.fileobj.close() +# class StreamProxy + +class _BZ2Proxy(object): + """Small proxy class that enables external file object + support for "r:bz2" and "w:bz2" modes. This is actually + a workaround for a limitation in bz2 module's BZ2File + class which (unlike gzip.GzipFile) has no support for + a file object argument. + """ + + blocksize = 16 * 1024 + + def __init__(self, fileobj, mode): + self.fileobj = fileobj + self.mode = mode + self.name = getattr(self.fileobj, "name", None) + self.init() + + def init(self): + import bz2 + self.pos = 0 + if self.mode == "r": + self.bz2obj = bz2.BZ2Decompressor() + self.fileobj.seek(0) + self.buf = b"" + else: + self.bz2obj = bz2.BZ2Compressor() + + def read(self, size): + x = len(self.buf) + while x < size: + raw = self.fileobj.read(self.blocksize) + if not raw: + break + data = self.bz2obj.decompress(raw) + self.buf += data + x += len(data) + + buf = self.buf[:size] + self.buf = self.buf[size:] + self.pos += len(buf) + return buf + + def seek(self, pos): + if pos < self.pos: + self.init() + self.read(pos - self.pos) + + def tell(self): + return self.pos + + def write(self, data): + self.pos += len(data) + raw = self.bz2obj.compress(data) + self.fileobj.write(raw) + + def close(self): + if self.mode == "w": + raw = self.bz2obj.flush() + self.fileobj.write(raw) +# class _BZ2Proxy + +#------------------------ +# Extraction file object +#------------------------ +class _FileInFile(object): + """A thin wrapper around an existing file object that + provides a part of its data as an individual file + object. + """ + + def __init__(self, fileobj, offset, size, blockinfo=None): + self.fileobj = fileobj + self.offset = offset + self.size = size + self.position = 0 + + if blockinfo is None: + blockinfo = [(0, size)] + + # Construct a map with data and zero blocks. + self.map_index = 0 + self.map = [] + lastpos = 0 + realpos = self.offset + for offset, size in blockinfo: + if offset > lastpos: + self.map.append((False, lastpos, offset, None)) + self.map.append((True, offset, offset + size, realpos)) + realpos += size + lastpos = offset + size + if lastpos < self.size: + self.map.append((False, lastpos, self.size, None)) + + def seekable(self): + if not hasattr(self.fileobj, "seekable"): + # XXX gzip.GzipFile and bz2.BZ2File + return True + return self.fileobj.seekable() + + def tell(self): + """Return the current file position. + """ + return self.position + + def seek(self, position): + """Seek to a position in the file. + """ + self.position = position + + def read(self, size=None): + """Read data from the file. + """ + if size is None: + size = self.size - self.position + else: + size = min(size, self.size - self.position) + + buf = b"" + while size > 0: + while True: + data, start, stop, offset = self.map[self.map_index] + if start <= self.position < stop: + break + else: + self.map_index += 1 + if self.map_index == len(self.map): + self.map_index = 0 + length = min(size, stop - self.position) + if data: + self.fileobj.seek(offset + (self.position - start)) + buf += self.fileobj.read(length) + else: + buf += NUL * length + size -= length + self.position += length + return buf +#class _FileInFile + + +class ExFileObject(object): + """File-like object for reading an archive member. + Is returned by TarFile.extractfile(). + """ + blocksize = 1024 + + def __init__(self, tarfile, tarinfo): + self.fileobj = _FileInFile(tarfile.fileobj, + tarinfo.offset_data, + tarinfo.size, + tarinfo.sparse) + self.name = tarinfo.name + self.mode = "r" + self.closed = False + self.size = tarinfo.size + + self.position = 0 + self.buffer = b"" + + def readable(self): + return True + + def writable(self): + return False + + def seekable(self): + return self.fileobj.seekable() + + def read(self, size=None): + """Read at most size bytes from the file. If size is not + present or None, read all data until EOF is reached. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + buf = b"" + if self.buffer: + if size is None: + buf = self.buffer + self.buffer = b"" + else: + buf = self.buffer[:size] + self.buffer = self.buffer[size:] + + if size is None: + buf += self.fileobj.read() + else: + buf += self.fileobj.read(size - len(buf)) + + self.position += len(buf) + return buf + + # XXX TextIOWrapper uses the read1() method. + read1 = read + + def readline(self, size=-1): + """Read one entire line from the file. If size is present + and non-negative, return a string with at most that + size, which may be an incomplete line. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + pos = self.buffer.find(b"\n") + 1 + if pos == 0: + # no newline found. + while True: + buf = self.fileobj.read(self.blocksize) + self.buffer += buf + if not buf or b"\n" in buf: + pos = self.buffer.find(b"\n") + 1 + if pos == 0: + # no newline found. + pos = len(self.buffer) + break + + if size != -1: + pos = min(size, pos) + + buf = self.buffer[:pos] + self.buffer = self.buffer[pos:] + self.position += len(buf) + return buf + + def readlines(self): + """Return a list with all remaining lines. + """ + result = [] + while True: + line = self.readline() + if not line: break + result.append(line) + return result + + def tell(self): + """Return the current file position. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + return self.position + + def seek(self, pos, whence=os.SEEK_SET): + """Seek to a position in the file. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + if whence == os.SEEK_SET: + self.position = min(max(pos, 0), self.size) + elif whence == os.SEEK_CUR: + if pos < 0: + self.position = max(self.position + pos, 0) + else: + self.position = min(self.position + pos, self.size) + elif whence == os.SEEK_END: + self.position = max(min(self.size + pos, self.size), 0) + else: + raise ValueError("Invalid argument") + + self.buffer = b"" + self.fileobj.seek(self.position) + + def close(self): + """Close the file object. + """ + self.closed = True + + def __iter__(self): + """Get an iterator over the file's lines. + """ + while True: + line = self.readline() + if not line: + break + yield line +#class ExFileObject + +#------------------ +# Exported Classes +#------------------ +class TarInfo(object): + """Informational class which holds the details about an + archive member given by a tar header block. + TarInfo objects are returned by TarFile.getmember(), + TarFile.getmembers() and TarFile.gettarinfo() and are + usually created internally. + """ + + __slots__ = ("name", "mode", "uid", "gid", "size", "mtime", + "chksum", "type", "linkname", "uname", "gname", + "devmajor", "devminor", + "offset", "offset_data", "pax_headers", "sparse", + "tarfile", "_sparse_structs", "_link_target") + + def __init__(self, name=""): + """Construct a TarInfo object. name is the optional name + of the member. + """ + self.name = name # member name + self.mode = 0o644 # file permissions + self.uid = 0 # user id + self.gid = 0 # group id + self.size = 0 # file size + self.mtime = 0 # modification time + self.chksum = 0 # header checksum + self.type = REGTYPE # member type + self.linkname = "" # link name + self.uname = "" # user name + self.gname = "" # group name + self.devmajor = 0 # device major number + self.devminor = 0 # device minor number + + self.offset = 0 # the tar header starts here + self.offset_data = 0 # the file's data starts here + + self.sparse = None # sparse member information + self.pax_headers = {} # pax header information + + # In pax headers the "name" and "linkname" field are called + # "path" and "linkpath". + def _getpath(self): + return self.name + def _setpath(self, name): + self.name = name + path = property(_getpath, _setpath) + + def _getlinkpath(self): + return self.linkname + def _setlinkpath(self, linkname): + self.linkname = linkname + linkpath = property(_getlinkpath, _setlinkpath) + + def __repr__(self): + return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self)) + + def get_info(self): + """Return the TarInfo's attributes as a dictionary. + """ + info = { + "name": self.name, + "mode": self.mode & 0o7777, + "uid": self.uid, + "gid": self.gid, + "size": self.size, + "mtime": self.mtime, + "chksum": self.chksum, + "type": self.type, + "linkname": self.linkname, + "uname": self.uname, + "gname": self.gname, + "devmajor": self.devmajor, + "devminor": self.devminor + } + + if info["type"] == DIRTYPE and not info["name"].endswith("/"): + info["name"] += "/" + + return info + + def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"): + """Return a tar header as a string of 512 byte blocks. + """ + info = self.get_info() + + if format == USTAR_FORMAT: + return self.create_ustar_header(info, encoding, errors) + elif format == GNU_FORMAT: + return self.create_gnu_header(info, encoding, errors) + elif format == PAX_FORMAT: + return self.create_pax_header(info, encoding) + else: + raise ValueError("invalid format") + + def create_ustar_header(self, info, encoding, errors): + """Return the object as a ustar header block. + """ + info["magic"] = POSIX_MAGIC + + if len(info["linkname"]) > LENGTH_LINK: + raise ValueError("linkname is too long") + + if len(info["name"]) > LENGTH_NAME: + info["prefix"], info["name"] = self._posix_split_name(info["name"]) + + return self._create_header(info, USTAR_FORMAT, encoding, errors) + + def create_gnu_header(self, info, encoding, errors): + """Return the object as a GNU header block sequence. + """ + info["magic"] = GNU_MAGIC + + buf = b"" + if len(info["linkname"]) > LENGTH_LINK: + buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors) + + if len(info["name"]) > LENGTH_NAME: + buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors) + + return buf + self._create_header(info, GNU_FORMAT, encoding, errors) + + def create_pax_header(self, info, encoding): + """Return the object as a ustar header block. If it cannot be + represented this way, prepend a pax extended header sequence + with supplement information. + """ + info["magic"] = POSIX_MAGIC + pax_headers = self.pax_headers.copy() + + # Test string fields for values that exceed the field length or cannot + # be represented in ASCII encoding. + for name, hname, length in ( + ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK), + ("uname", "uname", 32), ("gname", "gname", 32)): + + if hname in pax_headers: + # The pax header has priority. + continue + + # Try to encode the string as ASCII. + try: + info[name].encode("ascii", "strict") + except UnicodeEncodeError: + pax_headers[hname] = info[name] + continue + + if len(info[name]) > length: + pax_headers[hname] = info[name] + + # Test number fields for values that exceed the field limit or values + # that like to be stored as float. + for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)): + if name in pax_headers: + # The pax header has priority. Avoid overflow. + info[name] = 0 + continue + + val = info[name] + if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float): + pax_headers[name] = str(val) + info[name] = 0 + + # Create a pax extended header if necessary. + if pax_headers: + buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding) + else: + buf = b"" + + return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace") + + @classmethod + def create_pax_global_header(cls, pax_headers): + """Return the object as a pax global header block sequence. + """ + return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8") + + def _posix_split_name(self, name): + """Split a name longer than 100 chars into a prefix + and a name part. + """ + prefix = name[:LENGTH_PREFIX + 1] + while prefix and prefix[-1] != "/": + prefix = prefix[:-1] + + name = name[len(prefix):] + prefix = prefix[:-1] + + if not prefix or len(name) > LENGTH_NAME: + raise ValueError("name is too long") + return prefix, name + + @staticmethod + def _create_header(info, format, encoding, errors): + """Return a header block. info is a dictionary with file + information, format must be one of the *_FORMAT constants. + """ + parts = [ + stn(info.get("name", ""), 100, encoding, errors), + itn(info.get("mode", 0) & 0o7777, 8, format), + itn(info.get("uid", 0), 8, format), + itn(info.get("gid", 0), 8, format), + itn(info.get("size", 0), 12, format), + itn(info.get("mtime", 0), 12, format), + b" ", # checksum field + info.get("type", REGTYPE), + stn(info.get("linkname", ""), 100, encoding, errors), + info.get("magic", POSIX_MAGIC), + stn(info.get("uname", ""), 32, encoding, errors), + stn(info.get("gname", ""), 32, encoding, errors), + itn(info.get("devmajor", 0), 8, format), + itn(info.get("devminor", 0), 8, format), + stn(info.get("prefix", ""), 155, encoding, errors) + ] + + buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts)) + chksum = calc_chksums(buf[-BLOCKSIZE:])[0] + buf = buf[:-364] + ("%06o\0" % chksum).encode("ascii") + buf[-357:] + return buf + + @staticmethod + def _create_payload(payload): + """Return the string payload filled with zero bytes + up to the next 512 byte border. + """ + blocks, remainder = divmod(len(payload), BLOCKSIZE) + if remainder > 0: + payload += (BLOCKSIZE - remainder) * NUL + return payload + + @classmethod + def _create_gnu_long_header(cls, name, type, encoding, errors): + """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence + for name. + """ + name = name.encode(encoding, errors) + NUL + + info = {} + info["name"] = "././@LongLink" + info["type"] = type + info["size"] = len(name) + info["magic"] = GNU_MAGIC + + # create extended header + name blocks. + return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \ + cls._create_payload(name) + + @classmethod + def _create_pax_generic_header(cls, pax_headers, type, encoding): + """Return a POSIX.1-2008 extended or global header sequence + that contains a list of keyword, value pairs. The values + must be strings. + """ + # Check if one of the fields contains surrogate characters and thereby + # forces hdrcharset=BINARY, see _proc_pax() for more information. + binary = False + for keyword, value in pax_headers.items(): + try: + value.encode("utf8", "strict") + except UnicodeEncodeError: + binary = True + break + + records = b"" + if binary: + # Put the hdrcharset field at the beginning of the header. + records += b"21 hdrcharset=BINARY\n" + + for keyword, value in pax_headers.items(): + keyword = keyword.encode("utf8") + if binary: + # Try to restore the original byte representation of `value'. + # Needless to say, that the encoding must match the string. + value = value.encode(encoding, "surrogateescape") + else: + value = value.encode("utf8") + + l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' + n = p = 0 + while True: + n = l + len(str(p)) + if n == p: + break + p = n + records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n" + + # We use a hardcoded "././@PaxHeader" name like star does + # instead of the one that POSIX recommends. + info = {} + info["name"] = "././@PaxHeader" + info["type"] = type + info["size"] = len(records) + info["magic"] = POSIX_MAGIC + + # Create pax header + record blocks. + return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \ + cls._create_payload(records) + + @classmethod + def frombuf(cls, buf, encoding, errors): + """Construct a TarInfo object from a 512 byte bytes object. + """ + if len(buf) == 0: + raise EmptyHeaderError("empty header") + if len(buf) != BLOCKSIZE: + raise TruncatedHeaderError("truncated header") + if buf.count(NUL) == BLOCKSIZE: + raise EOFHeaderError("end of file header") + + chksum = nti(buf[148:156]) + if chksum not in calc_chksums(buf): + raise InvalidHeaderError("bad checksum") + + obj = cls() + obj.name = nts(buf[0:100], encoding, errors) + obj.mode = nti(buf[100:108]) + obj.uid = nti(buf[108:116]) + obj.gid = nti(buf[116:124]) + obj.size = nti(buf[124:136]) + obj.mtime = nti(buf[136:148]) + obj.chksum = chksum + obj.type = buf[156:157] + obj.linkname = nts(buf[157:257], encoding, errors) + obj.uname = nts(buf[265:297], encoding, errors) + obj.gname = nts(buf[297:329], encoding, errors) + obj.devmajor = nti(buf[329:337]) + obj.devminor = nti(buf[337:345]) + prefix = nts(buf[345:500], encoding, errors) + + # Old V7 tar format represents a directory as a regular + # file with a trailing slash. + if obj.type == AREGTYPE and obj.name.endswith("/"): + obj.type = DIRTYPE + + # The old GNU sparse format occupies some of the unused + # space in the buffer for up to 4 sparse structures. + # Save the them for later processing in _proc_sparse(). + if obj.type == GNUTYPE_SPARSE: + pos = 386 + structs = [] + for i in range(4): + try: + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) + except ValueError: + break + structs.append((offset, numbytes)) + pos += 24 + isextended = bool(buf[482]) + origsize = nti(buf[483:495]) + obj._sparse_structs = (structs, isextended, origsize) + + # Remove redundant slashes from directories. + if obj.isdir(): + obj.name = obj.name.rstrip("/") + + # Reconstruct a ustar longname. + if prefix and obj.type not in GNU_TYPES: + obj.name = prefix + "/" + obj.name + return obj + + @classmethod + def fromtarfile(cls, tarfile): + """Return the next TarInfo object from TarFile object + tarfile. + """ + buf = tarfile.fileobj.read(BLOCKSIZE) + obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors) + obj.offset = tarfile.fileobj.tell() - BLOCKSIZE + return obj._proc_member(tarfile) + + #-------------------------------------------------------------------------- + # The following are methods that are called depending on the type of a + # member. The entry point is _proc_member() which can be overridden in a + # subclass to add custom _proc_*() methods. A _proc_*() method MUST + # implement the following + # operations: + # 1. Set self.offset_data to the position where the data blocks begin, + # if there is data that follows. + # 2. Set tarfile.offset to the position where the next member's header will + # begin. + # 3. Return self or another valid TarInfo object. + def _proc_member(self, tarfile): + """Choose the right processing method depending on + the type and call it. + """ + if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK): + return self._proc_gnulong(tarfile) + elif self.type == GNUTYPE_SPARSE: + return self._proc_sparse(tarfile) + elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE): + return self._proc_pax(tarfile) + else: + return self._proc_builtin(tarfile) + + def _proc_builtin(self, tarfile): + """Process a builtin type or an unknown type which + will be treated as a regular file. + """ + self.offset_data = tarfile.fileobj.tell() + offset = self.offset_data + if self.isreg() or self.type not in SUPPORTED_TYPES: + # Skip the following data blocks. + offset += self._block(self.size) + tarfile.offset = offset + + # Patch the TarInfo object with saved global + # header information. + self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors) + + return self + + def _proc_gnulong(self, tarfile): + """Process the blocks that hold a GNU longname + or longlink member. + """ + buf = tarfile.fileobj.read(self._block(self.size)) + + # Fetch the next header and process it. + try: + next = self.fromtarfile(tarfile) + except HeaderError: + raise SubsequentHeaderError("missing or bad subsequent header") + + # Patch the TarInfo object from the next header with + # the longname information. + next.offset = self.offset + if self.type == GNUTYPE_LONGNAME: + next.name = nts(buf, tarfile.encoding, tarfile.errors) + elif self.type == GNUTYPE_LONGLINK: + next.linkname = nts(buf, tarfile.encoding, tarfile.errors) + + return next + + def _proc_sparse(self, tarfile): + """Process a GNU sparse header plus extra headers. + """ + # We already collected some sparse structures in frombuf(). + structs, isextended, origsize = self._sparse_structs + del self._sparse_structs + + # Collect sparse structures from extended header blocks. + while isextended: + buf = tarfile.fileobj.read(BLOCKSIZE) + pos = 0 + for i in range(21): + try: + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) + except ValueError: + break + if offset and numbytes: + structs.append((offset, numbytes)) + pos += 24 + isextended = bool(buf[504]) + self.sparse = structs + + self.offset_data = tarfile.fileobj.tell() + tarfile.offset = self.offset_data + self._block(self.size) + self.size = origsize + return self + + def _proc_pax(self, tarfile): + """Process an extended or global header as described in + POSIX.1-2008. + """ + # Read the header information. + buf = tarfile.fileobj.read(self._block(self.size)) + + # A pax header stores supplemental information for either + # the following file (extended) or all following files + # (global). + if self.type == XGLTYPE: + pax_headers = tarfile.pax_headers + else: + pax_headers = tarfile.pax_headers.copy() + + # Check if the pax header contains a hdrcharset field. This tells us + # the encoding of the path, linkpath, uname and gname fields. Normally, + # these fields are UTF-8 encoded but since POSIX.1-2008 tar + # implementations are allowed to store them as raw binary strings if + # the translation to UTF-8 fails. + match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf) + if match is not None: + pax_headers["hdrcharset"] = match.group(1).decode("utf8") + + # For the time being, we don't care about anything other than "BINARY". + # The only other value that is currently allowed by the standard is + # "ISO-IR 10646 2000 UTF-8" in other words UTF-8. + hdrcharset = pax_headers.get("hdrcharset") + if hdrcharset == "BINARY": + encoding = tarfile.encoding + else: + encoding = "utf8" + + # Parse pax header information. A record looks like that: + # "%d %s=%s\n" % (length, keyword, value). length is the size + # of the complete record including the length field itself and + # the newline. keyword and value are both UTF-8 encoded strings. + regex = re.compile(br"(\d+) ([^=]+)=") + pos = 0 + while True: + match = regex.match(buf, pos) + if not match: + break + + length, keyword = match.groups() + length = int(length) + value = buf[match.end(2) + 1:match.start(1) + length - 1] + + # Normally, we could just use "utf8" as the encoding and "strict" + # as the error handler, but we better not take the risk. For + # example, GNU tar <= 1.23 is known to store filenames it cannot + # translate to UTF-8 as raw strings (unfortunately without a + # hdrcharset=BINARY header). + # We first try the strict standard encoding, and if that fails we + # fall back on the user's encoding and error handler. + keyword = self._decode_pax_field(keyword, "utf8", "utf8", + tarfile.errors) + if keyword in PAX_NAME_FIELDS: + value = self._decode_pax_field(value, encoding, tarfile.encoding, + tarfile.errors) + else: + value = self._decode_pax_field(value, "utf8", "utf8", + tarfile.errors) + + pax_headers[keyword] = value + pos += length + + # Fetch the next header. + try: + next = self.fromtarfile(tarfile) + except HeaderError: + raise SubsequentHeaderError("missing or bad subsequent header") + + # Process GNU sparse information. + if "GNU.sparse.map" in pax_headers: + # GNU extended sparse format version 0.1. + self._proc_gnusparse_01(next, pax_headers) + + elif "GNU.sparse.size" in pax_headers: + # GNU extended sparse format version 0.0. + self._proc_gnusparse_00(next, pax_headers, buf) + + elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0": + # GNU extended sparse format version 1.0. + self._proc_gnusparse_10(next, pax_headers, tarfile) + + if self.type in (XHDTYPE, SOLARIS_XHDTYPE): + # Patch the TarInfo object with the extended header info. + next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors) + next.offset = self.offset + + if "size" in pax_headers: + # If the extended header replaces the size field, + # we need to recalculate the offset where the next + # header starts. + offset = next.offset_data + if next.isreg() or next.type not in SUPPORTED_TYPES: + offset += next._block(next.size) + tarfile.offset = offset + + return next + + def _proc_gnusparse_00(self, next, pax_headers, buf): + """Process a GNU tar extended sparse header, version 0.0. + """ + offsets = [] + for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf): + offsets.append(int(match.group(1))) + numbytes = [] + for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf): + numbytes.append(int(match.group(1))) + next.sparse = list(zip(offsets, numbytes)) + + def _proc_gnusparse_01(self, next, pax_headers): + """Process a GNU tar extended sparse header, version 0.1. + """ + sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")] + next.sparse = list(zip(sparse[::2], sparse[1::2])) + + def _proc_gnusparse_10(self, next, pax_headers, tarfile): + """Process a GNU tar extended sparse header, version 1.0. + """ + fields = None + sparse = [] + buf = tarfile.fileobj.read(BLOCKSIZE) + fields, buf = buf.split(b"\n", 1) + fields = int(fields) + while len(sparse) < fields * 2: + if b"\n" not in buf: + buf += tarfile.fileobj.read(BLOCKSIZE) + number, buf = buf.split(b"\n", 1) + sparse.append(int(number)) + next.offset_data = tarfile.fileobj.tell() + next.sparse = list(zip(sparse[::2], sparse[1::2])) + + def _apply_pax_info(self, pax_headers, encoding, errors): + """Replace fields with supplemental information from a previous + pax extended or global header. + """ + for keyword, value in pax_headers.items(): + if keyword == "GNU.sparse.name": + setattr(self, "path", value) + elif keyword == "GNU.sparse.size": + setattr(self, "size", int(value)) + elif keyword == "GNU.sparse.realsize": + setattr(self, "size", int(value)) + elif keyword in PAX_FIELDS: + if keyword in PAX_NUMBER_FIELDS: + try: + value = PAX_NUMBER_FIELDS[keyword](value) + except ValueError: + value = 0 + if keyword == "path": + value = value.rstrip("/") + setattr(self, keyword, value) + + self.pax_headers = pax_headers.copy() + + def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors): + """Decode a single field from a pax record. + """ + try: + return value.decode(encoding, "strict") + except UnicodeDecodeError: + return value.decode(fallback_encoding, fallback_errors) + + def _block(self, count): + """Round up a byte count by BLOCKSIZE and return it, + e.g. _block(834) => 1024. + """ + blocks, remainder = divmod(count, BLOCKSIZE) + if remainder: + blocks += 1 + return blocks * BLOCKSIZE + + def isreg(self): + return self.type in REGULAR_TYPES + def isfile(self): + return self.isreg() + def isdir(self): + return self.type == DIRTYPE + def issym(self): + return self.type == SYMTYPE + def islnk(self): + return self.type == LNKTYPE + def ischr(self): + return self.type == CHRTYPE + def isblk(self): + return self.type == BLKTYPE + def isfifo(self): + return self.type == FIFOTYPE + def issparse(self): + return self.sparse is not None + def isdev(self): + return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE) +# class TarInfo + +class TarFile(object): + """The TarFile Class provides an interface to tar archives. + """ + + debug = 0 # May be set from 0 (no msgs) to 3 (all msgs) + + dereference = False # If true, add content of linked file to the + # tar file, else the link. + + ignore_zeros = False # If true, skips empty or invalid blocks and + # continues processing. + + errorlevel = 1 # If 0, fatal errors only appear in debug + # messages (if debug >= 0). If > 0, errors + # are passed to the caller as exceptions. + + format = DEFAULT_FORMAT # The format to use when creating an archive. + + encoding = ENCODING # Encoding for 8-bit character strings. + + errors = None # Error handler for unicode conversion. + + tarinfo = TarInfo # The default TarInfo class to use. + + fileobject = ExFileObject # The default ExFileObject class to use. + + def __init__(self, name=None, mode="r", fileobj=None, format=None, + tarinfo=None, dereference=None, ignore_zeros=None, encoding=None, + errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None): + """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to + read from an existing archive, 'a' to append data to an existing + file or 'w' to create a new file overwriting an existing one. `mode' + defaults to 'r'. + If `fileobj' is given, it is used for reading or writing data. If it + can be determined, `mode' is overridden by `fileobj's mode. + `fileobj' is not closed, when TarFile is closed. + """ + if len(mode) > 1 or mode not in "raw": + raise ValueError("mode must be 'r', 'a' or 'w'") + self.mode = mode + self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode] + + if not fileobj: + if self.mode == "a" and not os.path.exists(name): + # Create nonexistent files in append mode. + self.mode = "w" + self._mode = "wb" + fileobj = bltn_open(name, self._mode) + self._extfileobj = False + else: + if name is None and hasattr(fileobj, "name"): + name = fileobj.name + if hasattr(fileobj, "mode"): + self._mode = fileobj.mode + self._extfileobj = True + self.name = os.path.abspath(name) if name else None + self.fileobj = fileobj + + # Init attributes. + if format is not None: + self.format = format + if tarinfo is not None: + self.tarinfo = tarinfo + if dereference is not None: + self.dereference = dereference + if ignore_zeros is not None: + self.ignore_zeros = ignore_zeros + if encoding is not None: + self.encoding = encoding + self.errors = errors + + if pax_headers is not None and self.format == PAX_FORMAT: + self.pax_headers = pax_headers + else: + self.pax_headers = {} + + if debug is not None: + self.debug = debug + if errorlevel is not None: + self.errorlevel = errorlevel + + # Init datastructures. + self.closed = False + self.members = [] # list of members as TarInfo objects + self._loaded = False # flag if all members have been read + self.offset = self.fileobj.tell() + # current position in the archive file + self.inodes = {} # dictionary caching the inodes of + # archive members already added + + try: + if self.mode == "r": + self.firstmember = None + self.firstmember = self.next() + + if self.mode == "a": + # Move to the end of the archive, + # before the first empty block. + while True: + self.fileobj.seek(self.offset) + try: + tarinfo = self.tarinfo.fromtarfile(self) + self.members.append(tarinfo) + except EOFHeaderError: + self.fileobj.seek(self.offset) + break + except HeaderError as e: + raise ReadError(str(e)) + + if self.mode in "aw": + self._loaded = True + + if self.pax_headers: + buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy()) + self.fileobj.write(buf) + self.offset += len(buf) + except: + if not self._extfileobj: + self.fileobj.close() + self.closed = True + raise + + #-------------------------------------------------------------------------- + # Below are the classmethods which act as alternate constructors to the + # TarFile class. The open() method is the only one that is needed for + # public use; it is the "super"-constructor and is able to select an + # adequate "sub"-constructor for a particular compression using the mapping + # from OPEN_METH. + # + # This concept allows one to subclass TarFile without losing the comfort of + # the super-constructor. A sub-constructor is registered and made available + # by adding it to the mapping in OPEN_METH. + + @classmethod + def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs): + """Open a tar archive for reading, writing or appending. Return + an appropriate TarFile class. + + mode: + 'r' or 'r:*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'a' or 'a:' open for appending, creating the file if necessary + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + + 'r|*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + """ + + if not name and not fileobj: + raise ValueError("nothing to open") + + if mode in ("r", "r:*"): + # Find out which *open() is appropriate for opening the file. + for comptype in cls.OPEN_METH: + func = getattr(cls, cls.OPEN_METH[comptype]) + if fileobj is not None: + saved_pos = fileobj.tell() + try: + return func(name, "r", fileobj, **kwargs) + except (ReadError, CompressionError) as e: + if fileobj is not None: + fileobj.seek(saved_pos) + continue + raise ReadError("file could not be opened successfully") + + elif ":" in mode: + filemode, comptype = mode.split(":", 1) + filemode = filemode or "r" + comptype = comptype or "tar" + + # Select the *open() function according to + # given compression. + if comptype in cls.OPEN_METH: + func = getattr(cls, cls.OPEN_METH[comptype]) + else: + raise CompressionError("unknown compression type %r" % comptype) + return func(name, filemode, fileobj, **kwargs) + + elif "|" in mode: + filemode, comptype = mode.split("|", 1) + filemode = filemode or "r" + comptype = comptype or "tar" + + if filemode not in "rw": + raise ValueError("mode must be 'r' or 'w'") + + stream = _Stream(name, filemode, comptype, fileobj, bufsize) + try: + t = cls(name, filemode, stream, **kwargs) + except: + stream.close() + raise + t._extfileobj = False + return t + + elif mode in "aw": + return cls.taropen(name, mode, fileobj, **kwargs) + + raise ValueError("undiscernible mode") + + @classmethod + def taropen(cls, name, mode="r", fileobj=None, **kwargs): + """Open uncompressed tar archive name for reading or writing. + """ + if len(mode) > 1 or mode not in "raw": + raise ValueError("mode must be 'r', 'a' or 'w'") + return cls(name, mode, fileobj, **kwargs) + + @classmethod + def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): + """Open gzip compressed tar archive name for reading or writing. + Appending is not allowed. + """ + if len(mode) > 1 or mode not in "rw": + raise ValueError("mode must be 'r' or 'w'") + + try: + import gzip + gzip.GzipFile + except (ImportError, AttributeError): + raise CompressionError("gzip module is not available") + + extfileobj = fileobj is not None + try: + fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj) + t = cls.taropen(name, mode, fileobj, **kwargs) + except IOError: + if not extfileobj and fileobj is not None: + fileobj.close() + if fileobj is None: + raise + raise ReadError("not a gzip file") + except: + if not extfileobj and fileobj is not None: + fileobj.close() + raise + t._extfileobj = extfileobj + return t + + @classmethod + def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): + """Open bzip2 compressed tar archive name for reading or writing. + Appending is not allowed. + """ + if len(mode) > 1 or mode not in "rw": + raise ValueError("mode must be 'r' or 'w'.") + + try: + import bz2 + except ImportError: + raise CompressionError("bz2 module is not available") + + if fileobj is not None: + fileobj = _BZ2Proxy(fileobj, mode) + else: + fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel) + + try: + t = cls.taropen(name, mode, fileobj, **kwargs) + except (IOError, EOFError): + fileobj.close() + raise ReadError("not a bzip2 file") + t._extfileobj = False + return t + + # All *open() methods are registered here. + OPEN_METH = { + "tar": "taropen", # uncompressed tar + "gz": "gzopen", # gzip compressed tar + "bz2": "bz2open" # bzip2 compressed tar + } + + #-------------------------------------------------------------------------- + # The public methods which TarFile provides: + + def close(self): + """Close the TarFile. In write-mode, two finishing zero blocks are + appended to the archive. + """ + if self.closed: + return + + if self.mode in "aw": + self.fileobj.write(NUL * (BLOCKSIZE * 2)) + self.offset += (BLOCKSIZE * 2) + # fill up the end with zero-blocks + # (like option -b20 for tar does) + blocks, remainder = divmod(self.offset, RECORDSIZE) + if remainder > 0: + self.fileobj.write(NUL * (RECORDSIZE - remainder)) + + if not self._extfileobj: + self.fileobj.close() + self.closed = True + + def getmember(self, name): + """Return a TarInfo object for member `name'. If `name' can not be + found in the archive, KeyError is raised. If a member occurs more + than once in the archive, its last occurrence is assumed to be the + most up-to-date version. + """ + tarinfo = self._getmember(name) + if tarinfo is None: + raise KeyError("filename %r not found" % name) + return tarinfo + + def getmembers(self): + """Return the members of the archive as a list of TarInfo objects. The + list has the same order as the members in the archive. + """ + self._check() + if not self._loaded: # if we want to obtain a list of + self._load() # all members, we first have to + # scan the whole archive. + return self.members + + def getnames(self): + """Return the members of the archive as a list of their names. It has + the same order as the list returned by getmembers(). + """ + return [tarinfo.name for tarinfo in self.getmembers()] + + def gettarinfo(self, name=None, arcname=None, fileobj=None): + """Create a TarInfo object for either the file `name' or the file + object `fileobj' (using os.fstat on its file descriptor). You can + modify some of the TarInfo's attributes before you add it using + addfile(). If given, `arcname' specifies an alternative name for the + file in the archive. + """ + self._check("aw") + + # When fileobj is given, replace name by + # fileobj's real name. + if fileobj is not None: + name = fileobj.name + + # Building the name of the member in the archive. + # Backward slashes are converted to forward slashes, + # Absolute paths are turned to relative paths. + if arcname is None: + arcname = name + drv, arcname = os.path.splitdrive(arcname) + arcname = arcname.replace(os.sep, "/") + arcname = arcname.lstrip("/") + + # Now, fill the TarInfo object with + # information specific for the file. + tarinfo = self.tarinfo() + tarinfo.tarfile = self + + # Use os.stat or os.lstat, depending on platform + # and if symlinks shall be resolved. + if fileobj is None: + if hasattr(os, "lstat") and not self.dereference: + statres = os.lstat(name) + else: + statres = os.stat(name) + else: + statres = os.fstat(fileobj.fileno()) + linkname = "" + + stmd = statres.st_mode + if stat.S_ISREG(stmd): + inode = (statres.st_ino, statres.st_dev) + if not self.dereference and statres.st_nlink > 1 and \ + inode in self.inodes and arcname != self.inodes[inode]: + # Is it a hardlink to an already + # archived file? + type = LNKTYPE + linkname = self.inodes[inode] + else: + # The inode is added only if its valid. + # For win32 it is always 0. + type = REGTYPE + if inode[0]: + self.inodes[inode] = arcname + elif stat.S_ISDIR(stmd): + type = DIRTYPE + elif stat.S_ISFIFO(stmd): + type = FIFOTYPE + elif stat.S_ISLNK(stmd): + type = SYMTYPE + linkname = os.readlink(name) + elif stat.S_ISCHR(stmd): + type = CHRTYPE + elif stat.S_ISBLK(stmd): + type = BLKTYPE + else: + return None + + # Fill the TarInfo object with all + # information we can get. + tarinfo.name = arcname + tarinfo.mode = stmd + tarinfo.uid = statres.st_uid + tarinfo.gid = statres.st_gid + if type == REGTYPE: + tarinfo.size = statres.st_size + else: + tarinfo.size = 0 + tarinfo.mtime = statres.st_mtime + tarinfo.type = type + tarinfo.linkname = linkname + if pwd: + try: + tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0] + except KeyError: + pass + if grp: + try: + tarinfo.gname = grp.getgrgid(tarinfo.gid)[0] + except KeyError: + pass + + if type in (CHRTYPE, BLKTYPE): + if hasattr(os, "major") and hasattr(os, "minor"): + tarinfo.devmajor = os.major(statres.st_rdev) + tarinfo.devminor = os.minor(statres.st_rdev) + return tarinfo + + def list(self, verbose=True): + """Print a table of contents to sys.stdout. If `verbose' is False, only + the names of the members are printed. If it is True, an `ls -l'-like + output is produced. + """ + self._check() + + for tarinfo in self: + if verbose: + print(filemode(tarinfo.mode), end=' ') + print("%s/%s" % (tarinfo.uname or tarinfo.uid, + tarinfo.gname or tarinfo.gid), end=' ') + if tarinfo.ischr() or tarinfo.isblk(): + print("%10s" % ("%d,%d" \ + % (tarinfo.devmajor, tarinfo.devminor)), end=' ') + else: + print("%10d" % tarinfo.size, end=' ') + print("%d-%02d-%02d %02d:%02d:%02d" \ + % time.localtime(tarinfo.mtime)[:6], end=' ') + + print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ') + + if verbose: + if tarinfo.issym(): + print("->", tarinfo.linkname, end=' ') + if tarinfo.islnk(): + print("link to", tarinfo.linkname, end=' ') + print() + + def add(self, name, arcname=None, recursive=True, exclude=None, filter=None): + """Add the file `name' to the archive. `name' may be any type of file + (directory, fifo, symbolic link, etc.). If given, `arcname' + specifies an alternative name for the file in the archive. + Directories are added recursively by default. This can be avoided by + setting `recursive' to False. `exclude' is a function that should + return True for each filename to be excluded. `filter' is a function + that expects a TarInfo object argument and returns the changed + TarInfo object, if it returns None the TarInfo object will be + excluded from the archive. + """ + self._check("aw") + + if arcname is None: + arcname = name + + # Exclude pathnames. + if exclude is not None: + import warnings + warnings.warn("use the filter argument instead", + DeprecationWarning, 2) + if exclude(name): + self._dbg(2, "tarfile: Excluded %r" % name) + return + + # Skip if somebody tries to archive the archive... + if self.name is not None and os.path.abspath(name) == self.name: + self._dbg(2, "tarfile: Skipped %r" % name) + return + + self._dbg(1, name) + + # Create a TarInfo object from the file. + tarinfo = self.gettarinfo(name, arcname) + + if tarinfo is None: + self._dbg(1, "tarfile: Unsupported type %r" % name) + return + + # Change or exclude the TarInfo object. + if filter is not None: + tarinfo = filter(tarinfo) + if tarinfo is None: + self._dbg(2, "tarfile: Excluded %r" % name) + return + + # Append the tar header and data to the archive. + if tarinfo.isreg(): + f = bltn_open(name, "rb") + self.addfile(tarinfo, f) + f.close() + + elif tarinfo.isdir(): + self.addfile(tarinfo) + if recursive: + for f in os.listdir(name): + self.add(os.path.join(name, f), os.path.join(arcname, f), + recursive, exclude, filter=filter) + + else: + self.addfile(tarinfo) + + def addfile(self, tarinfo, fileobj=None): + """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is + given, tarinfo.size bytes are read from it and added to the archive. + You can create TarInfo objects using gettarinfo(). + On Windows platforms, `fileobj' should always be opened with mode + 'rb' to avoid irritation about the file size. + """ + self._check("aw") + + tarinfo = copy.copy(tarinfo) + + buf = tarinfo.tobuf(self.format, self.encoding, self.errors) + self.fileobj.write(buf) + self.offset += len(buf) + + # If there's data to follow, append it. + if fileobj is not None: + copyfileobj(fileobj, self.fileobj, tarinfo.size) + blocks, remainder = divmod(tarinfo.size, BLOCKSIZE) + if remainder > 0: + self.fileobj.write(NUL * (BLOCKSIZE - remainder)) + blocks += 1 + self.offset += blocks * BLOCKSIZE + + self.members.append(tarinfo) + + def extractall(self, path=".", members=None): + """Extract all members from the archive to the current working + directory and set owner, modification time and permissions on + directories afterwards. `path' specifies a different directory + to extract to. `members' is optional and must be a subset of the + list returned by getmembers(). + """ + directories = [] + + if members is None: + members = self + + for tarinfo in members: + if tarinfo.isdir(): + # Extract directories with a safe mode. + directories.append(tarinfo) + tarinfo = copy.copy(tarinfo) + tarinfo.mode = 0o700 + # Do not set_attrs directories, as we will do that further down + self.extract(tarinfo, path, set_attrs=not tarinfo.isdir()) + + # Reverse sort directories. + directories.sort(key=lambda a: a.name) + directories.reverse() + + # Set correct owner, mtime and filemode on directories. + for tarinfo in directories: + dirpath = os.path.join(path, tarinfo.name) + try: + self.chown(tarinfo, dirpath) + self.utime(tarinfo, dirpath) + self.chmod(tarinfo, dirpath) + except ExtractError as e: + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + def extract(self, member, path="", set_attrs=True): + """Extract a member from the archive to the current working directory, + using its full name. Its file information is extracted as accurately + as possible. `member' may be a filename or a TarInfo object. You can + specify a different directory using `path'. File attributes (owner, + mtime, mode) are set unless `set_attrs' is False. + """ + self._check("r") + + if isinstance(member, str): + tarinfo = self.getmember(member) + else: + tarinfo = member + + # Prepare the link target for makelink(). + if tarinfo.islnk(): + tarinfo._link_target = os.path.join(path, tarinfo.linkname) + + try: + self._extract_member(tarinfo, os.path.join(path, tarinfo.name), + set_attrs=set_attrs) + except EnvironmentError as e: + if self.errorlevel > 0: + raise + else: + if e.filename is None: + self._dbg(1, "tarfile: %s" % e.strerror) + else: + self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) + except ExtractError as e: + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + def extractfile(self, member): + """Extract a member from the archive as a file object. `member' may be + a filename or a TarInfo object. If `member' is a regular file, a + file-like object is returned. If `member' is a link, a file-like + object is constructed from the link's target. If `member' is none of + the above, None is returned. + The file-like object is read-only and provides the following + methods: read(), readline(), readlines(), seek() and tell() + """ + self._check("r") + + if isinstance(member, str): + tarinfo = self.getmember(member) + else: + tarinfo = member + + if tarinfo.isreg(): + return self.fileobject(self, tarinfo) + + elif tarinfo.type not in SUPPORTED_TYPES: + # If a member's type is unknown, it is treated as a + # regular file. + return self.fileobject(self, tarinfo) + + elif tarinfo.islnk() or tarinfo.issym(): + if isinstance(self.fileobj, _Stream): + # A small but ugly workaround for the case that someone tries + # to extract a (sym)link as a file-object from a non-seekable + # stream of tar blocks. + raise StreamError("cannot extract (sym)link as file object") + else: + # A (sym)link's file object is its target's file object. + return self.extractfile(self._find_link_target(tarinfo)) + else: + # If there's no data associated with the member (directory, chrdev, + # blkdev, etc.), return None instead of a file object. + return None + + def _extract_member(self, tarinfo, targetpath, set_attrs=True): + """Extract the TarInfo object tarinfo to a physical + file called targetpath. + """ + # Fetch the TarInfo object for the given name + # and build the destination pathname, replacing + # forward slashes to platform specific separators. + targetpath = targetpath.rstrip("/") + targetpath = targetpath.replace("/", os.sep) + + # Create all upper directories. + upperdirs = os.path.dirname(targetpath) + if upperdirs and not os.path.exists(upperdirs): + # Create directories that are not part of the archive with + # default permissions. + os.makedirs(upperdirs) + + if tarinfo.islnk() or tarinfo.issym(): + self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname)) + else: + self._dbg(1, tarinfo.name) + + if tarinfo.isreg(): + self.makefile(tarinfo, targetpath) + elif tarinfo.isdir(): + self.makedir(tarinfo, targetpath) + elif tarinfo.isfifo(): + self.makefifo(tarinfo, targetpath) + elif tarinfo.ischr() or tarinfo.isblk(): + self.makedev(tarinfo, targetpath) + elif tarinfo.islnk() or tarinfo.issym(): + self.makelink(tarinfo, targetpath) + elif tarinfo.type not in SUPPORTED_TYPES: + self.makeunknown(tarinfo, targetpath) + else: + self.makefile(tarinfo, targetpath) + + if set_attrs: + self.chown(tarinfo, targetpath) + if not tarinfo.issym(): + self.chmod(tarinfo, targetpath) + self.utime(tarinfo, targetpath) + + #-------------------------------------------------------------------------- + # Below are the different file methods. They are called via + # _extract_member() when extract() is called. They can be replaced in a + # subclass to implement other functionality. + + def makedir(self, tarinfo, targetpath): + """Make a directory called targetpath. + """ + try: + # Use a safe mode for the directory, the real mode is set + # later in _extract_member(). + os.mkdir(targetpath, 0o700) + except EnvironmentError as e: + if e.errno != errno.EEXIST: + raise + + def makefile(self, tarinfo, targetpath): + """Make a file called targetpath. + """ + source = self.fileobj + source.seek(tarinfo.offset_data) + target = bltn_open(targetpath, "wb") + if tarinfo.sparse is not None: + for offset, size in tarinfo.sparse: + target.seek(offset) + copyfileobj(source, target, size) + else: + copyfileobj(source, target, tarinfo.size) + target.seek(tarinfo.size) + target.truncate() + target.close() + + def makeunknown(self, tarinfo, targetpath): + """Make a file from a TarInfo object with an unknown type + at targetpath. + """ + self.makefile(tarinfo, targetpath) + self._dbg(1, "tarfile: Unknown file type %r, " \ + "extracted as regular file." % tarinfo.type) + + def makefifo(self, tarinfo, targetpath): + """Make a fifo called targetpath. + """ + if hasattr(os, "mkfifo"): + os.mkfifo(targetpath) + else: + raise ExtractError("fifo not supported by system") + + def makedev(self, tarinfo, targetpath): + """Make a character or block device called targetpath. + """ + if not hasattr(os, "mknod") or not hasattr(os, "makedev"): + raise ExtractError("special devices not supported by system") + + mode = tarinfo.mode + if tarinfo.isblk(): + mode |= stat.S_IFBLK + else: + mode |= stat.S_IFCHR + + os.mknod(targetpath, mode, + os.makedev(tarinfo.devmajor, tarinfo.devminor)) + + def makelink(self, tarinfo, targetpath): + """Make a (symbolic) link called targetpath. If it cannot be created + (platform limitation), we try to make a copy of the referenced file + instead of a link. + """ + try: + # For systems that support symbolic and hard links. + if tarinfo.issym(): + os.symlink(tarinfo.linkname, targetpath) + else: + # See extract(). + if os.path.exists(tarinfo._link_target): + os.link(tarinfo._link_target, targetpath) + else: + self._extract_member(self._find_link_target(tarinfo), + targetpath) + except symlink_exception: + if tarinfo.issym(): + linkpath = os.path.join(os.path.dirname(tarinfo.name), + tarinfo.linkname) + else: + linkpath = tarinfo.linkname + else: + try: + self._extract_member(self._find_link_target(tarinfo), + targetpath) + except KeyError: + raise ExtractError("unable to resolve link inside archive") + + def chown(self, tarinfo, targetpath): + """Set owner of targetpath according to tarinfo. + """ + if pwd and hasattr(os, "geteuid") and os.geteuid() == 0: + # We have to be root to do so. + try: + g = grp.getgrnam(tarinfo.gname)[2] + except KeyError: + g = tarinfo.gid + try: + u = pwd.getpwnam(tarinfo.uname)[2] + except KeyError: + u = tarinfo.uid + try: + if tarinfo.issym() and hasattr(os, "lchown"): + os.lchown(targetpath, u, g) + else: + if sys.platform != "os2emx": + os.chown(targetpath, u, g) + except EnvironmentError as e: + raise ExtractError("could not change owner") + + def chmod(self, tarinfo, targetpath): + """Set file permissions of targetpath according to tarinfo. + """ + if hasattr(os, 'chmod'): + try: + os.chmod(targetpath, tarinfo.mode) + except EnvironmentError as e: + raise ExtractError("could not change mode") + + def utime(self, tarinfo, targetpath): + """Set modification time of targetpath according to tarinfo. + """ + if not hasattr(os, 'utime'): + return + try: + os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) + except EnvironmentError as e: + raise ExtractError("could not change modification time") + + #-------------------------------------------------------------------------- + def next(self): + """Return the next member of the archive as a TarInfo object, when + TarFile is opened for reading. Return None if there is no more + available. + """ + self._check("ra") + if self.firstmember is not None: + m = self.firstmember + self.firstmember = None + return m + + # Read the next block. + self.fileobj.seek(self.offset) + tarinfo = None + while True: + try: + tarinfo = self.tarinfo.fromtarfile(self) + except EOFHeaderError as e: + if self.ignore_zeros: + self._dbg(2, "0x%X: %s" % (self.offset, e)) + self.offset += BLOCKSIZE + continue + except InvalidHeaderError as e: + if self.ignore_zeros: + self._dbg(2, "0x%X: %s" % (self.offset, e)) + self.offset += BLOCKSIZE + continue + elif self.offset == 0: + raise ReadError(str(e)) + except EmptyHeaderError: + if self.offset == 0: + raise ReadError("empty file") + except TruncatedHeaderError as e: + if self.offset == 0: + raise ReadError(str(e)) + except SubsequentHeaderError as e: + raise ReadError(str(e)) + break + + if tarinfo is not None: + self.members.append(tarinfo) + else: + self._loaded = True + + return tarinfo + + #-------------------------------------------------------------------------- + # Little helper methods: + + def _getmember(self, name, tarinfo=None, normalize=False): + """Find an archive member by name from bottom to top. + If tarinfo is given, it is used as the starting point. + """ + # Ensure that all members have been loaded. + members = self.getmembers() + + # Limit the member search list up to tarinfo. + if tarinfo is not None: + members = members[:members.index(tarinfo)] + + if normalize: + name = os.path.normpath(name) + + for member in reversed(members): + if normalize: + member_name = os.path.normpath(member.name) + else: + member_name = member.name + + if name == member_name: + return member + + def _load(self): + """Read through the entire archive file and look for readable + members. + """ + while True: + tarinfo = self.next() + if tarinfo is None: + break + self._loaded = True + + def _check(self, mode=None): + """Check if TarFile is still open, and if the operation's mode + corresponds to TarFile's mode. + """ + if self.closed: + raise IOError("%s is closed" % self.__class__.__name__) + if mode is not None and self.mode not in mode: + raise IOError("bad operation for mode %r" % self.mode) + + def _find_link_target(self, tarinfo): + """Find the target member of a symlink or hardlink member in the + archive. + """ + if tarinfo.issym(): + # Always search the entire archive. + linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname + limit = None + else: + # Search the archive before the link, because a hard link is + # just a reference to an already archived file. + linkname = tarinfo.linkname + limit = tarinfo + + member = self._getmember(linkname, tarinfo=limit, normalize=True) + if member is None: + raise KeyError("linkname %r not found" % linkname) + return member + + def __iter__(self): + """Provide an iterator object. + """ + if self._loaded: + return iter(self.members) + else: + return TarIter(self) + + def _dbg(self, level, msg): + """Write debugging output to sys.stderr. + """ + if level <= self.debug: + print(msg, file=sys.stderr) + + def __enter__(self): + self._check() + return self + + def __exit__(self, type, value, traceback): + if type is None: + self.close() + else: + # An exception occurred. We must not call close() because + # it would try to write end-of-archive blocks and padding. + if not self._extfileobj: + self.fileobj.close() + self.closed = True +# class TarFile + +class TarIter(object): + """Iterator Class. + + for tarinfo in TarFile(...): + suite... + """ + + def __init__(self, tarfile): + """Construct a TarIter object. + """ + self.tarfile = tarfile + self.index = 0 + def __iter__(self): + """Return iterator object. + """ + return self + + def __next__(self): + """Return the next item using TarFile's next() method. + When all members have been read, set TarFile as _loaded. + """ + # Fix for SF #1100429: Under rare circumstances it can + # happen that getmembers() is called during iteration, + # which will cause TarIter to stop prematurely. + if not self.tarfile._loaded: + tarinfo = self.tarfile.next() + if not tarinfo: + self.tarfile._loaded = True + raise StopIteration + else: + try: + tarinfo = self.tarfile.members[self.index] + except IndexError: + raise StopIteration + self.index += 1 + return tarinfo + + next = __next__ # for Python 2.x + +#-------------------- +# exported functions +#-------------------- +def is_tarfile(name): + """Return True if name points to a tar archive that we + are able to handle, else return False. + """ + try: + t = open(name) + t.close() + return True + except TarError: + return False + +bltn_open = open +open = TarFile.open diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/compat.py b/env/lib/python3.6/site-packages/pip/_vendor/distlib/compat.py new file mode 100644 index 0000000..ff328c8 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distlib/compat.py @@ -0,0 +1,1120 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import absolute_import + +import os +import re +import sys + +try: + import ssl +except ImportError: # pragma: no cover + ssl = None + +if sys.version_info[0] < 3: # pragma: no cover + from StringIO import StringIO + string_types = basestring, + text_type = unicode + from types import FileType as file_type + import __builtin__ as builtins + import ConfigParser as configparser + from ._backport import shutil + from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit + from urllib import (urlretrieve, quote as _quote, unquote, url2pathname, + pathname2url, ContentTooShortError, splittype) + + def quote(s): + if isinstance(s, unicode): + s = s.encode('utf-8') + return _quote(s) + + import urllib2 + from urllib2 import (Request, urlopen, URLError, HTTPError, + HTTPBasicAuthHandler, HTTPPasswordMgr, + HTTPHandler, HTTPRedirectHandler, + build_opener) + if ssl: + from urllib2 import HTTPSHandler + import httplib + import xmlrpclib + import Queue as queue + from HTMLParser import HTMLParser + import htmlentitydefs + raw_input = raw_input + from itertools import ifilter as filter + from itertools import ifilterfalse as filterfalse + + _userprog = None + def splituser(host): + """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" + global _userprog + if _userprog is None: + import re + _userprog = re.compile('^(.*)@(.*)$') + + match = _userprog.match(host) + if match: return match.group(1, 2) + return None, host + +else: # pragma: no cover + from io import StringIO + string_types = str, + text_type = str + from io import TextIOWrapper as file_type + import builtins + import configparser + import shutil + from urllib.parse import (urlparse, urlunparse, urljoin, splituser, quote, + unquote, urlsplit, urlunsplit, splittype) + from urllib.request import (urlopen, urlretrieve, Request, url2pathname, + pathname2url, + HTTPBasicAuthHandler, HTTPPasswordMgr, + HTTPHandler, HTTPRedirectHandler, + build_opener) + if ssl: + from urllib.request import HTTPSHandler + from urllib.error import HTTPError, URLError, ContentTooShortError + import http.client as httplib + import urllib.request as urllib2 + import xmlrpc.client as xmlrpclib + import queue + from html.parser import HTMLParser + import html.entities as htmlentitydefs + raw_input = input + from itertools import filterfalse + filter = filter + +try: + from ssl import match_hostname, CertificateError +except ImportError: # pragma: no cover + class CertificateError(ValueError): + pass + + + def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + parts = dn.split('.') + leftmost, remainder = parts[0], parts[1:] + + wildcards = leftmost.count('*') + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn)) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + return pat.match(hostname) + + + def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED") + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" + % (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" + % (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") + + +try: + from types import SimpleNamespace as Container +except ImportError: # pragma: no cover + class Container(object): + """ + A generic container for when multiple values need to be returned + """ + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + +try: + from shutil import which +except ImportError: # pragma: no cover + # Implementation from Python 3.3 + def which(cmd, mode=os.F_OK | os.X_OK, path=None): + """Given a command, mode, and a PATH string, return the path which + conforms to the given mode on the PATH, or None if there is no such + file. + + `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result + of os.environ.get("PATH"), or can be overridden with a custom search + path. + + """ + # Check that a given file can be accessed with the correct mode. + # Additionally check that `file` is not a directory, as on Windows + # directories pass the os.access check. + def _access_check(fn, mode): + return (os.path.exists(fn) and os.access(fn, mode) + and not os.path.isdir(fn)) + + # If we're given a path with a directory part, look it up directly rather + # than referring to PATH directories. This includes checking relative to the + # current directory, e.g. ./script + if os.path.dirname(cmd): + if _access_check(cmd, mode): + return cmd + return None + + if path is None: + path = os.environ.get("PATH", os.defpath) + if not path: + return None + path = path.split(os.pathsep) + + if sys.platform == "win32": + # The current directory takes precedence on Windows. + if not os.curdir in path: + path.insert(0, os.curdir) + + # PATHEXT is necessary to check on Windows. + pathext = os.environ.get("PATHEXT", "").split(os.pathsep) + # See if the given file matches any of the expected path extensions. + # This will allow us to short circuit when given "python.exe". + # If it does match, only test that one, otherwise we have to try + # others. + if any(cmd.lower().endswith(ext.lower()) for ext in pathext): + files = [cmd] + else: + files = [cmd + ext for ext in pathext] + else: + # On other platforms you don't have things like PATHEXT to tell you + # what file suffixes are executable, so just pass on cmd as-is. + files = [cmd] + + seen = set() + for dir in path: + normdir = os.path.normcase(dir) + if not normdir in seen: + seen.add(normdir) + for thefile in files: + name = os.path.join(dir, thefile) + if _access_check(name, mode): + return name + return None + + +# ZipFile is a context manager in 2.7, but not in 2.6 + +from zipfile import ZipFile as BaseZipFile + +if hasattr(BaseZipFile, '__enter__'): # pragma: no cover + ZipFile = BaseZipFile +else: # pragma: no cover + from zipfile import ZipExtFile as BaseZipExtFile + + class ZipExtFile(BaseZipExtFile): + def __init__(self, base): + self.__dict__.update(base.__dict__) + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + class ZipFile(BaseZipFile): + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + def open(self, *args, **kwargs): + base = BaseZipFile.open(self, *args, **kwargs) + return ZipExtFile(base) + +try: + from platform import python_implementation +except ImportError: # pragma: no cover + def python_implementation(): + """Return a string identifying the Python implementation.""" + if 'PyPy' in sys.version: + return 'PyPy' + if os.name == 'java': + return 'Jython' + if sys.version.startswith('IronPython'): + return 'IronPython' + return 'CPython' + +try: + import sysconfig +except ImportError: # pragma: no cover + from ._backport import sysconfig + +try: + callable = callable +except NameError: # pragma: no cover + from collections import Callable + + def callable(obj): + return isinstance(obj, Callable) + + +try: + fsencode = os.fsencode + fsdecode = os.fsdecode +except AttributeError: # pragma: no cover + # Issue #99: on some systems (e.g. containerised), + # sys.getfilesystemencoding() returns None, and we need a real value, + # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and + # sys.getfilesystemencoding(): the return value is "the user’s preference + # according to the result of nl_langinfo(CODESET), or None if the + # nl_langinfo(CODESET) failed." + _fsencoding = sys.getfilesystemencoding() or 'utf-8' + if _fsencoding == 'mbcs': + _fserrors = 'strict' + else: + _fserrors = 'surrogateescape' + + def fsencode(filename): + if isinstance(filename, bytes): + return filename + elif isinstance(filename, text_type): + return filename.encode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + + def fsdecode(filename): + if isinstance(filename, text_type): + return filename + elif isinstance(filename, bytes): + return filename.decode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + +try: + from tokenize import detect_encoding +except ImportError: # pragma: no cover + from codecs import BOM_UTF8, lookup + import re + + cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)") + + def _get_normal_name(orig_enc): + """Imitates get_normal_name in tokenizer.c.""" + # Only care about the first 12 characters. + enc = orig_enc[:12].lower().replace("_", "-") + if enc == "utf-8" or enc.startswith("utf-8-"): + return "utf-8" + if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ + enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): + return "iso-8859-1" + return orig_enc + + def detect_encoding(readline): + """ + The detect_encoding() function is used to detect the encoding that should + be used to decode a Python source file. It requires one argument, readline, + in the same way as the tokenize() generator. + + It will call readline a maximum of twice, and return the encoding used + (as a string) and a list of any lines (left as bytes) it has read in. + + It detects the encoding from the presence of a utf-8 bom or an encoding + cookie as specified in pep-0263. If both a bom and a cookie are present, + but disagree, a SyntaxError will be raised. If the encoding cookie is an + invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, + 'utf-8-sig' is returned. + + If no encoding is specified, then the default of 'utf-8' will be returned. + """ + try: + filename = readline.__self__.name + except AttributeError: + filename = None + bom_found = False + encoding = None + default = 'utf-8' + def read_or_stop(): + try: + return readline() + except StopIteration: + return b'' + + def find_cookie(line): + try: + # Decode as UTF-8. Either the line is an encoding declaration, + # in which case it should be pure ASCII, or it must be UTF-8 + # per default encoding. + line_string = line.decode('utf-8') + except UnicodeDecodeError: + msg = "invalid or missing encoding declaration" + if filename is not None: + msg = '{} for {!r}'.format(msg, filename) + raise SyntaxError(msg) + + matches = cookie_re.findall(line_string) + if not matches: + return None + encoding = _get_normal_name(matches[0]) + try: + codec = lookup(encoding) + except LookupError: + # This behaviour mimics the Python interpreter + if filename is None: + msg = "unknown encoding: " + encoding + else: + msg = "unknown encoding for {!r}: {}".format(filename, + encoding) + raise SyntaxError(msg) + + if bom_found: + if codec.name != 'utf-8': + # This behaviour mimics the Python interpreter + if filename is None: + msg = 'encoding problem: utf-8' + else: + msg = 'encoding problem for {!r}: utf-8'.format(filename) + raise SyntaxError(msg) + encoding += '-sig' + return encoding + + first = read_or_stop() + if first.startswith(BOM_UTF8): + bom_found = True + first = first[3:] + default = 'utf-8-sig' + if not first: + return default, [] + + encoding = find_cookie(first) + if encoding: + return encoding, [first] + + second = read_or_stop() + if not second: + return default, [first] + + encoding = find_cookie(second) + if encoding: + return encoding, [first, second] + + return default, [first, second] + +# For converting & <-> & etc. +try: + from html import escape +except ImportError: + from cgi import escape +if sys.version_info[:2] < (3, 4): + unescape = HTMLParser().unescape +else: + from html import unescape + +try: + from collections import ChainMap +except ImportError: # pragma: no cover + from collections import MutableMapping + + try: + from reprlib import recursive_repr as _recursive_repr + except ImportError: + def _recursive_repr(fillvalue='...'): + ''' + Decorator to make a repr function return fillvalue for a recursive + call + ''' + + def decorating_function(user_function): + repr_running = set() + + def wrapper(self): + key = id(self), get_ident() + if key in repr_running: + return fillvalue + repr_running.add(key) + try: + result = user_function(self) + finally: + repr_running.discard(key) + return result + + # Can't use functools.wraps() here because of bootstrap issues + wrapper.__module__ = getattr(user_function, '__module__') + wrapper.__doc__ = getattr(user_function, '__doc__') + wrapper.__name__ = getattr(user_function, '__name__') + wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) + return wrapper + + return decorating_function + + class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + @_recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self): # like Django's Context.push() + 'New ChainMap with a new dict followed by all previous maps.' + return self.__class__({}, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + +try: + from importlib.util import cache_from_source # Python >= 3.4 +except ImportError: # pragma: no cover + try: + from imp import cache_from_source + except ImportError: # pragma: no cover + def cache_from_source(path, debug_override=None): + assert path.endswith('.py') + if debug_override is None: + debug_override = __debug__ + if debug_override: + suffix = 'c' + else: + suffix = 'o' + return path + suffix + +try: + from collections import OrderedDict +except ImportError: # pragma: no cover +## {{{ http://code.activestate.com/recipes/576693/ (r9) +# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. +# Passes Python2.7's test suite and incorporates all the latest updates. + try: + from thread import get_ident as _get_ident + except ImportError: + from dummy_thread import get_ident as _get_ident + + try: + from _abcoll import KeysView, ValuesView, ItemsView + except ImportError: + pass + + + class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as for regular dictionaries. + + # The internal self.__map dictionary maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # Each link is stored as a list of length three: [PREV, NEXT, KEY]. + + def __init__(self, *args, **kwds): + '''Initialize an ordered dictionary. Signature is the same as for + regular dictionaries, but keyword arguments are not recommended + because their insertion order is arbitrary. + + ''' + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__root = root = [] # sentinel node + root[:] = [root, root, None] + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, dict_setitem=dict.__setitem__): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link which goes at the end of the linked + # list, and the inherited dictionary is updated with the new key/value pair. + if key not in self: + root = self.__root + last = root[0] + last[1] = root[0] = self.__map[key] = [last, root, key] + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which is + # then removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link_prev, link_next, key = self.__map.pop(key) + link_prev[1] = link_next + link_next[0] = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + root = self.__root + curr = root[1] + while curr is not root: + yield curr[2] + curr = curr[1] + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + root = self.__root + curr = root[0] + while curr is not root: + yield curr[2] + curr = curr[0] + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + try: + for node in self.__map.itervalues(): + del node[:] + root = self.__root + root[:] = [root, root, None] + self.__map.clear() + except AttributeError: + pass + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root[0] + link_prev = link[0] + link_prev[1] = root + root[0] = link_prev + else: + link = root[1] + link_next = link[1] + root[1] = link_next + link_next[0] = root + key = link[2] + del self.__map[key] + value = dict.pop(self, key) + return key, value + + # -- the following methods do not depend on the internal structure -- + + def keys(self): + 'od.keys() -> list of keys in od' + return list(self) + + def values(self): + 'od.values() -> list of values in od' + return [self[key] for key in self] + + def items(self): + 'od.items() -> list of (key, value) pairs in od' + return [(key, self[key]) for key in self] + + def iterkeys(self): + 'od.iterkeys() -> an iterator over the keys in od' + return iter(self) + + def itervalues(self): + 'od.itervalues -> an iterator over the values in od' + for k in self: + yield self[k] + + def iteritems(self): + 'od.iteritems -> an iterator over the (key, value) items in od' + for k in self: + yield (k, self[k]) + + def update(*args, **kwds): + '''od.update(E, **F) -> None. Update od from dict/iterable E and F. + + If E is a dict instance, does: for k in E: od[k] = E[k] + If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] + Or if E is an iterable of items, does: for k, v in E: od[k] = v + In either case, this is followed by: for k, v in F.items(): od[k] = v + + ''' + if len(args) > 2: + raise TypeError('update() takes at most 2 positional ' + 'arguments (%d given)' % (len(args),)) + elif not args: + raise TypeError('update() takes at least 1 argument (0 given)') + self = args[0] + # Make progressively weaker assumptions about "other" + other = () + if len(args) == 2: + other = args[1] + if isinstance(other, dict): + for key in other: + self[key] = other[key] + elif hasattr(other, 'keys'): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + __update = update # let subclasses override update without breaking __init__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + def __repr__(self, _repr_running=None): + 'od.__repr__() <==> repr(od)' + if not _repr_running: _repr_running = {} + call_key = id(self), _get_ident() + if call_key in _repr_running: + return '...' + _repr_running[call_key] = 1 + try: + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + finally: + del _repr_running[call_key] + + def __reduce__(self): + 'Return state information for pickling' + items = [[k, self[k]] for k in self] + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S + and values equal to v (which defaults to None). + + ''' + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return len(self)==len(other) and self.items() == other.items() + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other + + # -- the following methods are only used in Python 2.7 -- + + def viewkeys(self): + "od.viewkeys() -> a set-like object providing a view on od's keys" + return KeysView(self) + + def viewvalues(self): + "od.viewvalues() -> an object providing a view on od's values" + return ValuesView(self) + + def viewitems(self): + "od.viewitems() -> a set-like object providing a view on od's items" + return ItemsView(self) + +try: + from logging.config import BaseConfigurator, valid_ident +except ImportError: # pragma: no cover + IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) + + + def valid_ident(s): + m = IDENTIFIER.match(s) + if not m: + raise ValueError('Not a valid Python identifier: %r' % s) + return True + + + # The ConvertingXXX classes are wrappers around standard Python containers, + # and they serve to convert any suitable values in the container. The + # conversion converts base dicts, lists and tuples to their wrapped + # equivalents, whereas strings which match a conversion format are converted + # appropriately. + # + # Each wrapper should have a configurator attribute holding the actual + # configurator to use for conversion. + + class ConvertingDict(dict): + """A converting dictionary wrapper.""" + + def __getitem__(self, key): + value = dict.__getitem__(self, key) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def get(self, key, default=None): + value = dict.get(self, key, default) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, key, default=None): + value = dict.pop(self, key, default) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class ConvertingList(list): + """A converting list wrapper.""" + def __getitem__(self, key): + value = list.__getitem__(self, key) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, idx=-1): + value = list.pop(self, idx) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + return result + + class ConvertingTuple(tuple): + """A converting tuple wrapper.""" + def __getitem__(self, key): + value = tuple.__getitem__(self, key) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class BaseConfigurator(object): + """ + The configurator base class which defines some useful defaults. + """ + + CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$') + + WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') + DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') + INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') + DIGIT_PATTERN = re.compile(r'^\d+$') + + value_converters = { + 'ext' : 'ext_convert', + 'cfg' : 'cfg_convert', + } + + # We might want to use a different one, e.g. importlib + importer = staticmethod(__import__) + + def __init__(self, config): + self.config = ConvertingDict(config) + self.config.configurator = self + + def resolve(self, s): + """ + Resolve strings to objects using standard import and attribute + syntax. + """ + name = s.split('.') + used = name.pop(0) + try: + found = self.importer(used) + for frag in name: + used += '.' + frag + try: + found = getattr(found, frag) + except AttributeError: + self.importer(used) + found = getattr(found, frag) + return found + except ImportError: + e, tb = sys.exc_info()[1:] + v = ValueError('Cannot resolve %r: %s' % (s, e)) + v.__cause__, v.__traceback__ = e, tb + raise v + + def ext_convert(self, value): + """Default converter for the ext:// protocol.""" + return self.resolve(value) + + def cfg_convert(self, value): + """Default converter for the cfg:// protocol.""" + rest = value + m = self.WORD_PATTERN.match(rest) + if m is None: + raise ValueError("Unable to convert %r" % value) + else: + rest = rest[m.end():] + d = self.config[m.groups()[0]] + #print d, rest + while rest: + m = self.DOT_PATTERN.match(rest) + if m: + d = d[m.groups()[0]] + else: + m = self.INDEX_PATTERN.match(rest) + if m: + idx = m.groups()[0] + if not self.DIGIT_PATTERN.match(idx): + d = d[idx] + else: + try: + n = int(idx) # try as number first (most likely) + d = d[n] + except TypeError: + d = d[idx] + if m: + rest = rest[m.end():] + else: + raise ValueError('Unable to convert ' + '%r at %r' % (value, rest)) + #rest should be empty + return d + + def convert(self, value): + """ + Convert values to an appropriate type. dicts, lists and tuples are + replaced by their converting alternatives. Strings are checked to + see if they have a conversion format and are converted if they do. + """ + if not isinstance(value, ConvertingDict) and isinstance(value, dict): + value = ConvertingDict(value) + value.configurator = self + elif not isinstance(value, ConvertingList) and isinstance(value, list): + value = ConvertingList(value) + value.configurator = self + elif not isinstance(value, ConvertingTuple) and\ + isinstance(value, tuple): + value = ConvertingTuple(value) + value.configurator = self + elif isinstance(value, string_types): + m = self.CONVERT_PATTERN.match(value) + if m: + d = m.groupdict() + prefix = d['prefix'] + converter = self.value_converters.get(prefix, None) + if converter: + suffix = d['suffix'] + converter = getattr(self, converter) + value = converter(suffix) + return value + + def configure_custom(self, config): + """Configure an object with a user-supplied factory.""" + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) + result = c(**kwargs) + if props: + for name, value in props.items(): + setattr(result, name, value) + return result + + def as_tuple(self, value): + """Utility function which converts lists to tuples.""" + if isinstance(value, list): + value = tuple(value) + return value diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/database.py b/env/lib/python3.6/site-packages/pip/_vendor/distlib/database.py new file mode 100644 index 0000000..a19905e --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distlib/database.py @@ -0,0 +1,1336 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2017 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""PEP 376 implementation.""" + +from __future__ import unicode_literals + +import base64 +import codecs +import contextlib +import hashlib +import logging +import os +import posixpath +import sys +import zipimport + +from . import DistlibException, resources +from .compat import StringIO +from .version import get_scheme, UnsupportedVersionError +from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME +from .util import (parse_requirement, cached_property, parse_name_and_version, + read_exports, write_exports, CSVReader, CSVWriter) + + +__all__ = ['Distribution', 'BaseInstalledDistribution', + 'InstalledDistribution', 'EggInfoDistribution', + 'DistributionPath'] + + +logger = logging.getLogger(__name__) + +EXPORTS_FILENAME = 'pydist-exports.json' +COMMANDS_FILENAME = 'pydist-commands.json' + +DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED', + 'RESOURCES', EXPORTS_FILENAME, 'SHARED') + +DISTINFO_EXT = '.dist-info' + + +class _Cache(object): + """ + A simple cache mapping names and .dist-info paths to distributions + """ + def __init__(self): + """ + Initialise an instance. There is normally one for each DistributionPath. + """ + self.name = {} + self.path = {} + self.generated = False + + def clear(self): + """ + Clear the cache, setting it to its initial state. + """ + self.name.clear() + self.path.clear() + self.generated = False + + def add(self, dist): + """ + Add a distribution to the cache. + :param dist: The distribution to add. + """ + if dist.path not in self.path: + self.path[dist.path] = dist + self.name.setdefault(dist.key, []).append(dist) + + +class DistributionPath(object): + """ + Represents a set of distributions installed on a path (typically sys.path). + """ + def __init__(self, path=None, include_egg=False): + """ + Create an instance from a path, optionally including legacy (distutils/ + setuptools/distribute) distributions. + :param path: The path to use, as a list of directories. If not specified, + sys.path is used. + :param include_egg: If True, this instance will look for and return legacy + distributions as well as those based on PEP 376. + """ + if path is None: + path = sys.path + self.path = path + self._include_dist = True + self._include_egg = include_egg + + self._cache = _Cache() + self._cache_egg = _Cache() + self._cache_enabled = True + self._scheme = get_scheme('default') + + def _get_cache_enabled(self): + return self._cache_enabled + + def _set_cache_enabled(self, value): + self._cache_enabled = value + + cache_enabled = property(_get_cache_enabled, _set_cache_enabled) + + def clear_cache(self): + """ + Clears the internal cache. + """ + self._cache.clear() + self._cache_egg.clear() + + + def _yield_distributions(self): + """ + Yield .dist-info and/or .egg(-info) distributions. + """ + # We need to check if we've seen some resources already, because on + # some Linux systems (e.g. some Debian/Ubuntu variants) there are + # symlinks which alias other files in the environment. + seen = set() + for path in self.path: + finder = resources.finder_for_path(path) + if finder is None: + continue + r = finder.find('') + if not r or not r.is_container: + continue + rset = sorted(r.resources) + for entry in rset: + r = finder.find(entry) + if not r or r.path in seen: + continue + if self._include_dist and entry.endswith(DISTINFO_EXT): + possible_filenames = [METADATA_FILENAME, WHEEL_METADATA_FILENAME] + for metadata_filename in possible_filenames: + metadata_path = posixpath.join(entry, metadata_filename) + pydist = finder.find(metadata_path) + if pydist: + break + else: + continue + + with contextlib.closing(pydist.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + logger.debug('Found %s', r.path) + seen.add(r.path) + yield new_dist_class(r.path, metadata=metadata, + env=self) + elif self._include_egg and entry.endswith(('.egg-info', + '.egg')): + logger.debug('Found %s', r.path) + seen.add(r.path) + yield old_dist_class(r.path, self) + + def _generate_cache(self): + """ + Scan the path for distributions and populate the cache with + those that are found. + """ + gen_dist = not self._cache.generated + gen_egg = self._include_egg and not self._cache_egg.generated + if gen_dist or gen_egg: + for dist in self._yield_distributions(): + if isinstance(dist, InstalledDistribution): + self._cache.add(dist) + else: + self._cache_egg.add(dist) + + if gen_dist: + self._cache.generated = True + if gen_egg: + self._cache_egg.generated = True + + @classmethod + def distinfo_dirname(cls, name, version): + """ + The *name* and *version* parameters are converted into their + filename-escaped form, i.e. any ``'-'`` characters are replaced + with ``'_'`` other than the one in ``'dist-info'`` and the one + separating the name from the version number. + + :parameter name: is converted to a standard distribution name by replacing + any runs of non- alphanumeric characters with a single + ``'-'``. + :type name: string + :parameter version: is converted to a standard version string. Spaces + become dots, and all other non-alphanumeric characters + (except dots) become dashes, with runs of multiple + dashes condensed to a single dash. + :type version: string + :returns: directory name + :rtype: string""" + name = name.replace('-', '_') + return '-'.join([name, version]) + DISTINFO_EXT + + def get_distributions(self): + """ + Provides an iterator that looks for distributions and returns + :class:`InstalledDistribution` or + :class:`EggInfoDistribution` instances for each one of them. + + :rtype: iterator of :class:`InstalledDistribution` and + :class:`EggInfoDistribution` instances + """ + if not self._cache_enabled: + for dist in self._yield_distributions(): + yield dist + else: + self._generate_cache() + + for dist in self._cache.path.values(): + yield dist + + if self._include_egg: + for dist in self._cache_egg.path.values(): + yield dist + + def get_distribution(self, name): + """ + Looks for a named distribution on the path. + + This function only returns the first result found, as no more than one + value is expected. If nothing is found, ``None`` is returned. + + :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution` + or ``None`` + """ + result = None + name = name.lower() + if not self._cache_enabled: + for dist in self._yield_distributions(): + if dist.key == name: + result = dist + break + else: + self._generate_cache() + + if name in self._cache.name: + result = self._cache.name[name][0] + elif self._include_egg and name in self._cache_egg.name: + result = self._cache_egg.name[name][0] + return result + + def provides_distribution(self, name, version=None): + """ + Iterates over all distributions to find which distributions provide *name*. + If a *version* is provided, it will be used to filter the results. + + This function only returns the first result found, since no more than + one values are expected. If the directory is not found, returns ``None``. + + :parameter version: a version specifier that indicates the version + required, conforming to the format in ``PEP-345`` + + :type name: string + :type version: string + """ + matcher = None + if version is not None: + try: + matcher = self._scheme.matcher('%s (%s)' % (name, version)) + except ValueError: + raise DistlibException('invalid name or version: %r, %r' % + (name, version)) + + for dist in self.get_distributions(): + # We hit a problem on Travis where enum34 was installed and doesn't + # have a provides attribute ... + if not hasattr(dist, 'provides'): + logger.debug('No "provides": %s', dist) + else: + provided = dist.provides + + for p in provided: + p_name, p_ver = parse_name_and_version(p) + if matcher is None: + if p_name == name: + yield dist + break + else: + if p_name == name and matcher.match(p_ver): + yield dist + break + + def get_file_path(self, name, relative_path): + """ + Return the path to a resource file. + """ + dist = self.get_distribution(name) + if dist is None: + raise LookupError('no distribution named %r found' % name) + return dist.get_resource_path(relative_path) + + def get_exported_entries(self, category, name=None): + """ + Return all of the exported entries in a particular category. + + :param category: The category to search for entries. + :param name: If specified, only entries with that name are returned. + """ + for dist in self.get_distributions(): + r = dist.exports + if category in r: + d = r[category] + if name is not None: + if name in d: + yield d[name] + else: + for v in d.values(): + yield v + + +class Distribution(object): + """ + A base class for distributions, whether installed or from indexes. + Either way, it must have some metadata, so that's all that's needed + for construction. + """ + + build_time_dependency = False + """ + Set to True if it's known to be only a build-time dependency (i.e. + not needed after installation). + """ + + requested = False + """A boolean that indicates whether the ``REQUESTED`` metadata file is + present (in other words, whether the package was installed by user + request or it was installed as a dependency).""" + + def __init__(self, metadata): + """ + Initialise an instance. + :param metadata: The instance of :class:`Metadata` describing this + distribution. + """ + self.metadata = metadata + self.name = metadata.name + self.key = self.name.lower() # for case-insensitive comparisons + self.version = metadata.version + self.locator = None + self.digest = None + self.extras = None # additional features requested + self.context = None # environment marker overrides + self.download_urls = set() + self.digests = {} + + @property + def source_url(self): + """ + The source archive download URL for this distribution. + """ + return self.metadata.source_url + + download_url = source_url # Backward compatibility + + @property + def name_and_version(self): + """ + A utility property which displays the name and version in parentheses. + """ + return '%s (%s)' % (self.name, self.version) + + @property + def provides(self): + """ + A set of distribution names and versions provided by this distribution. + :return: A set of "name (version)" strings. + """ + plist = self.metadata.provides + s = '%s (%s)' % (self.name, self.version) + if s not in plist: + plist.append(s) + return plist + + def _get_requirements(self, req_attr): + md = self.metadata + logger.debug('Getting requirements from metadata %r', md.todict()) + reqts = getattr(md, req_attr) + return set(md.get_requirements(reqts, extras=self.extras, + env=self.context)) + + @property + def run_requires(self): + return self._get_requirements('run_requires') + + @property + def meta_requires(self): + return self._get_requirements('meta_requires') + + @property + def build_requires(self): + return self._get_requirements('build_requires') + + @property + def test_requires(self): + return self._get_requirements('test_requires') + + @property + def dev_requires(self): + return self._get_requirements('dev_requires') + + def matches_requirement(self, req): + """ + Say if this instance matches (fulfills) a requirement. + :param req: The requirement to match. + :rtype req: str + :return: True if it matches, else False. + """ + # Requirement may contain extras - parse to lose those + # from what's passed to the matcher + r = parse_requirement(req) + scheme = get_scheme(self.metadata.scheme) + try: + matcher = scheme.matcher(r.requirement) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', + req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + result = False + for p in self.provides: + p_name, p_ver = parse_name_and_version(p) + if p_name != name: + continue + try: + result = matcher.match(p_ver) + break + except UnsupportedVersionError: + pass + return result + + def __repr__(self): + """ + Return a textual representation of this instance, + """ + if self.source_url: + suffix = ' [%s]' % self.source_url + else: + suffix = '' + return '<Distribution %s (%s)%s>' % (self.name, self.version, suffix) + + def __eq__(self, other): + """ + See if this distribution is the same as another. + :param other: The distribution to compare with. To be equal to one + another. distributions must have the same type, name, + version and source_url. + :return: True if it is the same, else False. + """ + if type(other) is not type(self): + result = False + else: + result = (self.name == other.name and + self.version == other.version and + self.source_url == other.source_url) + return result + + def __hash__(self): + """ + Compute hash in a way which matches the equality test. + """ + return hash(self.name) + hash(self.version) + hash(self.source_url) + + +class BaseInstalledDistribution(Distribution): + """ + This is the base class for installed distributions (whether PEP 376 or + legacy). + """ + + hasher = None + + def __init__(self, metadata, path, env=None): + """ + Initialise an instance. + :param metadata: An instance of :class:`Metadata` which describes the + distribution. This will normally have been initialised + from a metadata file in the ``path``. + :param path: The path of the ``.dist-info`` or ``.egg-info`` + directory for the distribution. + :param env: This is normally the :class:`DistributionPath` + instance where this distribution was found. + """ + super(BaseInstalledDistribution, self).__init__(metadata) + self.path = path + self.dist_path = env + + def get_hash(self, data, hasher=None): + """ + Get the hash of some data, using a particular hash algorithm, if + specified. + + :param data: The data to be hashed. + :type data: bytes + :param hasher: The name of a hash implementation, supported by hashlib, + or ``None``. Examples of valid values are ``'sha1'``, + ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and + ``'sha512'``. If no hasher is specified, the ``hasher`` + attribute of the :class:`InstalledDistribution` instance + is used. If the hasher is determined to be ``None``, MD5 + is used as the hashing algorithm. + :returns: The hash of the data. If a hasher was explicitly specified, + the returned hash will be prefixed with the specified hasher + followed by '='. + :rtype: str + """ + if hasher is None: + hasher = self.hasher + if hasher is None: + hasher = hashlib.md5 + prefix = '' + else: + hasher = getattr(hashlib, hasher) + prefix = '%s=' % self.hasher + digest = hasher(data).digest() + digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii') + return '%s%s' % (prefix, digest) + + +class InstalledDistribution(BaseInstalledDistribution): + """ + Created with the *path* of the ``.dist-info`` directory provided to the + constructor. It reads the metadata contained in ``pydist.json`` when it is + instantiated., or uses a passed in Metadata instance (useful for when + dry-run mode is being used). + """ + + hasher = 'sha256' + + def __init__(self, path, metadata=None, env=None): + self.modules = [] + self.finder = finder = resources.finder_for_path(path) + if finder is None: + raise ValueError('finder unavailable for %s' % path) + if env and env._cache_enabled and path in env._cache.path: + metadata = env._cache.path[path].metadata + elif metadata is None: + r = finder.find(METADATA_FILENAME) + # Temporary - for Wheel 0.23 support + if r is None: + r = finder.find(WHEEL_METADATA_FILENAME) + # Temporary - for legacy support + if r is None: + r = finder.find('METADATA') + if r is None: + raise ValueError('no %s found in %s' % (METADATA_FILENAME, + path)) + with contextlib.closing(r.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + + super(InstalledDistribution, self).__init__(metadata, path, env) + + if env and env._cache_enabled: + env._cache.add(self) + + r = finder.find('REQUESTED') + self.requested = r is not None + p = os.path.join(path, 'top_level.txt') + if os.path.exists(p): + with open(p, 'rb') as f: + data = f.read() + self.modules = data.splitlines() + + def __repr__(self): + return '<InstalledDistribution %r %s at %r>' % ( + self.name, self.version, self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def _get_records(self): + """ + Get the list of installed files for the distribution + :return: A list of tuples of path, hash and size. Note that hash and + size might be ``None`` for some entries. The path is exactly + as stored in the file (which is as in PEP 376). + """ + results = [] + r = self.get_distinfo_resource('RECORD') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as record_reader: + # Base location is parent dir of .dist-info dir + #base_location = os.path.dirname(self.path) + #base_location = os.path.abspath(base_location) + for row in record_reader: + missing = [None for i in range(len(row), 3)] + path, checksum, size = row + missing + #if not os.path.isabs(path): + # path = path.replace('/', os.sep) + # path = os.path.join(base_location, path) + results.append((path, checksum, size)) + return results + + @cached_property + def exports(self): + """ + Return the information exported by this distribution. + :return: A dictionary of exports, mapping an export category to a dict + of :class:`ExportEntry` instances describing the individual + export entries, and keyed by name. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + result = self.read_exports() + return result + + def read_exports(self): + """ + Read exports data from a file in .ini format. + + :return: A dictionary of exports, mapping an export category to a list + of :class:`ExportEntry` instances describing the individual + export entries. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + with contextlib.closing(r.as_stream()) as stream: + result = read_exports(stream) + return result + + def write_exports(self, exports): + """ + Write a dictionary of exports to a file in .ini format. + :param exports: A dictionary of exports, mapping an export category to + a list of :class:`ExportEntry` instances describing the + individual export entries. + """ + rf = self.get_distinfo_file(EXPORTS_FILENAME) + with open(rf, 'w') as f: + write_exports(exports, f) + + def get_resource_path(self, relative_path): + """ + NOTE: This API may change in the future. + + Return the absolute path to a resource file with the given relative + path. + + :param relative_path: The path, relative to .dist-info, of the resource + of interest. + :return: The absolute path where the resource is to be found. + """ + r = self.get_distinfo_resource('RESOURCES') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as resources_reader: + for relative, destination in resources_reader: + if relative == relative_path: + return destination + raise KeyError('no resource file with relative path %r ' + 'is installed' % relative_path) + + def list_installed_files(self): + """ + Iterates over the ``RECORD`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: iterator of (path, hash, size) + """ + for result in self._get_records(): + yield result + + def write_installed_files(self, paths, prefix, dry_run=False): + """ + Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any + existing ``RECORD`` file is silently overwritten. + + prefix is used to determine when to write absolute paths. + """ + prefix = os.path.join(prefix, '') + base = os.path.dirname(self.path) + base_under_prefix = base.startswith(prefix) + base = os.path.join(base, '') + record_path = self.get_distinfo_file('RECORD') + logger.info('creating %s', record_path) + if dry_run: + return None + with CSVWriter(record_path) as writer: + for path in paths: + if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')): + # do not put size and hash, as in PEP-376 + hash_value = size = '' + else: + size = '%d' % os.path.getsize(path) + with open(path, 'rb') as fp: + hash_value = self.get_hash(fp.read()) + if path.startswith(base) or (base_under_prefix and + path.startswith(prefix)): + path = os.path.relpath(path, base) + writer.writerow((path, hash_value, size)) + + # add the RECORD file itself + if record_path.startswith(base): + record_path = os.path.relpath(record_path, base) + writer.writerow((record_path, '', '')) + return record_path + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + base = os.path.dirname(self.path) + record_path = self.get_distinfo_file('RECORD') + for path, hash_value, size in self.list_installed_files(): + if not os.path.isabs(path): + path = os.path.join(base, path) + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + elif os.path.isfile(path): + actual_size = str(os.path.getsize(path)) + if size and actual_size != size: + mismatches.append((path, 'size', size, actual_size)) + elif hash_value: + if '=' in hash_value: + hasher = hash_value.split('=', 1)[0] + else: + hasher = None + + with open(path, 'rb') as f: + actual_hash = self.get_hash(f.read(), hasher) + if actual_hash != hash_value: + mismatches.append((path, 'hash', hash_value, actual_hash)) + return mismatches + + @cached_property + def shared_locations(self): + """ + A dictionary of shared locations whose keys are in the set 'prefix', + 'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'. + The corresponding value is the absolute path of that category for + this distribution, and takes into account any paths selected by the + user at installation time (e.g. via command-line arguments). In the + case of the 'namespace' key, this would be a list of absolute paths + for the roots of namespace packages in this distribution. + + The first time this property is accessed, the relevant information is + read from the SHARED file in the .dist-info directory. + """ + result = {} + shared_path = os.path.join(self.path, 'SHARED') + if os.path.isfile(shared_path): + with codecs.open(shared_path, 'r', encoding='utf-8') as f: + lines = f.read().splitlines() + for line in lines: + key, value = line.split('=', 1) + if key == 'namespace': + result.setdefault(key, []).append(value) + else: + result[key] = value + return result + + def write_shared_locations(self, paths, dry_run=False): + """ + Write shared location information to the SHARED file in .dist-info. + :param paths: A dictionary as described in the documentation for + :meth:`shared_locations`. + :param dry_run: If True, the action is logged but no file is actually + written. + :return: The path of the file written to. + """ + shared_path = os.path.join(self.path, 'SHARED') + logger.info('creating %s', shared_path) + if dry_run: + return None + lines = [] + for key in ('prefix', 'lib', 'headers', 'scripts', 'data'): + path = paths[key] + if os.path.isdir(paths[key]): + lines.append('%s=%s' % (key, path)) + for ns in paths.get('namespace', ()): + lines.append('namespace=%s' % ns) + + with codecs.open(shared_path, 'w', encoding='utf-8') as f: + f.write('\n'.join(lines)) + return shared_path + + def get_distinfo_resource(self, path): + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + finder = resources.finder_for_path(self.path) + if finder is None: + raise DistlibException('Unable to get a finder for %s' % self.path) + return finder.find(path) + + def get_distinfo_file(self, path): + """ + Returns a path located under the ``.dist-info`` directory. Returns a + string representing the path. + + :parameter path: a ``'/'``-separated path relative to the + ``.dist-info`` directory or an absolute path; + If *path* is an absolute path and doesn't start + with the ``.dist-info`` directory path, + a :class:`DistlibException` is raised + :type path: str + :rtype: str + """ + # Check if it is an absolute path # XXX use relpath, add tests + if path.find(os.sep) >= 0: + # it's an absolute path? + distinfo_dirname, path = path.split(os.sep)[-2:] + if distinfo_dirname != self.path.split(os.sep)[-1]: + raise DistlibException( + 'dist-info file %r does not belong to the %r %s ' + 'distribution' % (path, self.name, self.version)) + + # The file must be relative + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + + return os.path.join(self.path, path) + + def list_distinfo_files(self): + """ + Iterates over the ``RECORD`` entries and returns paths for each line if + the path is pointing to a file located in the ``.dist-info`` directory + or one of its subdirectories. + + :returns: iterator of paths + """ + base = os.path.dirname(self.path) + for path, checksum, size in self._get_records(): + # XXX add separator or use real relpath algo + if not os.path.isabs(path): + path = os.path.join(base, path) + if path.startswith(self.path): + yield path + + def __eq__(self, other): + return (isinstance(other, InstalledDistribution) and + self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + + +class EggInfoDistribution(BaseInstalledDistribution): + """Created with the *path* of the ``.egg-info`` directory or file provided + to the constructor. It reads the metadata contained in the file itself, or + if the given path happens to be a directory, the metadata is read from the + file ``PKG-INFO`` under that directory.""" + + requested = True # as we have no way of knowing, assume it was + shared_locations = {} + + def __init__(self, path, env=None): + def set_name_and_version(s, n, v): + s.name = n + s.key = n.lower() # for case-insensitive comparisons + s.version = v + + self.path = path + self.dist_path = env + if env and env._cache_enabled and path in env._cache_egg.path: + metadata = env._cache_egg.path[path].metadata + set_name_and_version(self, metadata.name, metadata.version) + else: + metadata = self._get_metadata(path) + + # Need to be set before caching + set_name_and_version(self, metadata.name, metadata.version) + + if env and env._cache_enabled: + env._cache_egg.add(self) + super(EggInfoDistribution, self).__init__(metadata, path, env) + + def _get_metadata(self, path): + requires = None + + def parse_requires_data(data): + """Create a list of dependencies from a requires.txt file. + + *data*: the contents of a setuptools-produced requires.txt file. + """ + reqs = [] + lines = data.splitlines() + for line in lines: + line = line.strip() + if line.startswith('['): + logger.warning('Unexpected line: quitting requirement scan: %r', + line) + break + r = parse_requirement(line) + if not r: + logger.warning('Not recognised as a requirement: %r', line) + continue + if r.extras: + logger.warning('extra requirements in requires.txt are ' + 'not supported') + if not r.constraints: + reqs.append(r.name) + else: + cons = ', '.join('%s%s' % c for c in r.constraints) + reqs.append('%s (%s)' % (r.name, cons)) + return reqs + + def parse_requires_path(req_path): + """Create a list of dependencies from a requires.txt file. + + *req_path*: the path to a setuptools-produced requires.txt file. + """ + + reqs = [] + try: + with codecs.open(req_path, 'r', 'utf-8') as fp: + reqs = parse_requires_data(fp.read()) + except IOError: + pass + return reqs + + tl_path = tl_data = None + if path.endswith('.egg'): + if os.path.isdir(path): + p = os.path.join(path, 'EGG-INFO') + meta_path = os.path.join(p, 'PKG-INFO') + metadata = Metadata(path=meta_path, scheme='legacy') + req_path = os.path.join(p, 'requires.txt') + tl_path = os.path.join(p, 'top_level.txt') + requires = parse_requires_path(req_path) + else: + # FIXME handle the case where zipfile is not available + zipf = zipimport.zipimporter(path) + fileobj = StringIO( + zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) + metadata = Metadata(fileobj=fileobj, scheme='legacy') + try: + data = zipf.get_data('EGG-INFO/requires.txt') + tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8') + requires = parse_requires_data(data.decode('utf-8')) + except IOError: + requires = None + elif path.endswith('.egg-info'): + if os.path.isdir(path): + req_path = os.path.join(path, 'requires.txt') + requires = parse_requires_path(req_path) + path = os.path.join(path, 'PKG-INFO') + tl_path = os.path.join(path, 'top_level.txt') + metadata = Metadata(path=path, scheme='legacy') + else: + raise DistlibException('path must end with .egg-info or .egg, ' + 'got %r' % path) + + if requires: + metadata.add_requirements(requires) + # look for top-level modules in top_level.txt, if present + if tl_data is None: + if tl_path is not None and os.path.exists(tl_path): + with open(tl_path, 'rb') as f: + tl_data = f.read().decode('utf-8') + if not tl_data: + tl_data = [] + else: + tl_data = tl_data.splitlines() + self.modules = tl_data + return metadata + + def __repr__(self): + return '<EggInfoDistribution %r %s at %r>' % ( + self.name, self.version, self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + record_path = os.path.join(self.path, 'installed-files.txt') + if os.path.exists(record_path): + for path, _, _ in self.list_installed_files(): + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + return mismatches + + def list_installed_files(self): + """ + Iterates over the ``installed-files.txt`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: a list of (path, hash, size) + """ + + def _md5(path): + f = open(path, 'rb') + try: + content = f.read() + finally: + f.close() + return hashlib.md5(content).hexdigest() + + def _size(path): + return os.stat(path).st_size + + record_path = os.path.join(self.path, 'installed-files.txt') + result = [] + if os.path.exists(record_path): + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + p = os.path.normpath(os.path.join(self.path, line)) + # "./" is present as a marker between installed files + # and installation metadata files + if not os.path.exists(p): + logger.warning('Non-existent file: %s', p) + if p.endswith(('.pyc', '.pyo')): + continue + #otherwise fall through and fail + if not os.path.isdir(p): + result.append((p, _md5(p), _size(p))) + result.append((record_path, None, None)) + return result + + def list_distinfo_files(self, absolute=False): + """ + Iterates over the ``installed-files.txt`` entries and returns paths for + each line if the path is pointing to a file located in the + ``.egg-info`` directory or one of its subdirectories. + + :parameter absolute: If *absolute* is ``True``, each returned path is + transformed into a local absolute path. Otherwise the + raw value from ``installed-files.txt`` is returned. + :type absolute: boolean + :returns: iterator of paths + """ + record_path = os.path.join(self.path, 'installed-files.txt') + if os.path.exists(record_path): + skip = True + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + if line == './': + skip = False + continue + if not skip: + p = os.path.normpath(os.path.join(self.path, line)) + if p.startswith(self.path): + if absolute: + yield p + else: + yield line + + def __eq__(self, other): + return (isinstance(other, EggInfoDistribution) and + self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + +new_dist_class = InstalledDistribution +old_dist_class = EggInfoDistribution + + +class DependencyGraph(object): + """ + Represents a dependency graph between distributions. + + The dependency relationships are stored in an ``adjacency_list`` that maps + distributions to a list of ``(other, label)`` tuples where ``other`` + is a distribution and the edge is labeled with ``label`` (i.e. the version + specifier, if such was provided). Also, for more efficient traversal, for + every distribution ``x``, a list of predecessors is kept in + ``reverse_list[x]``. An edge from distribution ``a`` to + distribution ``b`` means that ``a`` depends on ``b``. If any missing + dependencies are found, they are stored in ``missing``, which is a + dictionary that maps distributions to a list of requirements that were not + provided by any other distributions. + """ + + def __init__(self): + self.adjacency_list = {} + self.reverse_list = {} + self.missing = {} + + def add_distribution(self, distribution): + """Add the *distribution* to the graph. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + """ + self.adjacency_list[distribution] = [] + self.reverse_list[distribution] = [] + #self.missing[distribution] = [] + + def add_edge(self, x, y, label=None): + """Add an edge from distribution *x* to distribution *y* with the given + *label*. + + :type x: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type y: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type label: ``str`` or ``None`` + """ + self.adjacency_list[x].append((y, label)) + # multiple edges are allowed, so be careful + if x not in self.reverse_list[y]: + self.reverse_list[y].append(x) + + def add_missing(self, distribution, requirement): + """ + Add a missing *requirement* for the given *distribution*. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + :type requirement: ``str`` + """ + logger.debug('%s missing %r', distribution, requirement) + self.missing.setdefault(distribution, []).append(requirement) + + def _repr_dist(self, dist): + return '%s %s' % (dist.name, dist.version) + + def repr_node(self, dist, level=1): + """Prints only a subgraph""" + output = [self._repr_dist(dist)] + for other, label in self.adjacency_list[dist]: + dist = self._repr_dist(other) + if label is not None: + dist = '%s [%s]' % (dist, label) + output.append(' ' * level + str(dist)) + suboutput = self.repr_node(other, level + 1) + subs = suboutput.split('\n') + output.extend(subs[1:]) + return '\n'.join(output) + + def to_dot(self, f, skip_disconnected=True): + """Writes a DOT output for the graph to the provided file *f*. + + If *skip_disconnected* is set to ``True``, then all distributions + that are not dependent on any other distribution are skipped. + + :type f: has to support ``file``-like operations + :type skip_disconnected: ``bool`` + """ + disconnected = [] + + f.write("digraph dependencies {\n") + for dist, adjs in self.adjacency_list.items(): + if len(adjs) == 0 and not skip_disconnected: + disconnected.append(dist) + for other, label in adjs: + if not label is None: + f.write('"%s" -> "%s" [label="%s"]\n' % + (dist.name, other.name, label)) + else: + f.write('"%s" -> "%s"\n' % (dist.name, other.name)) + if not skip_disconnected and len(disconnected) > 0: + f.write('subgraph disconnected {\n') + f.write('label = "Disconnected"\n') + f.write('bgcolor = red\n') + + for dist in disconnected: + f.write('"%s"' % dist.name) + f.write('\n') + f.write('}\n') + f.write('}\n') + + def topological_sort(self): + """ + Perform a topological sort of the graph. + :return: A tuple, the first element of which is a topologically sorted + list of distributions, and the second element of which is a + list of distributions that cannot be sorted because they have + circular dependencies and so form a cycle. + """ + result = [] + # Make a shallow copy of the adjacency list + alist = {} + for k, v in self.adjacency_list.items(): + alist[k] = v[:] + while True: + # See what we can remove in this run + to_remove = [] + for k, v in list(alist.items())[:]: + if not v: + to_remove.append(k) + del alist[k] + if not to_remove: + # What's left in alist (if anything) is a cycle. + break + # Remove from the adjacency list of others + for k, v in alist.items(): + alist[k] = [(d, r) for d, r in v if d not in to_remove] + logger.debug('Moving to result: %s', + ['%s (%s)' % (d.name, d.version) for d in to_remove]) + result.extend(to_remove) + return result, list(alist.keys()) + + def __repr__(self): + """Representation of the graph""" + output = [] + for dist, adjs in self.adjacency_list.items(): + output.append(self.repr_node(dist)) + return '\n'.join(output) + + +def make_graph(dists, scheme='default'): + """Makes a dependency graph from the given distributions. + + :parameter dists: a list of distributions + :type dists: list of :class:`distutils2.database.InstalledDistribution` and + :class:`distutils2.database.EggInfoDistribution` instances + :rtype: a :class:`DependencyGraph` instance + """ + scheme = get_scheme(scheme) + graph = DependencyGraph() + provided = {} # maps names to lists of (version, dist) tuples + + # first, build the graph and find out what's provided + for dist in dists: + graph.add_distribution(dist) + + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + provided.setdefault(name, []).append((version, dist)) + + # now make the edges + for dist in dists: + requires = (dist.run_requires | dist.meta_requires | + dist.build_requires | dist.dev_requires) + for req in requires: + try: + matcher = scheme.matcher(req) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', + req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + matched = False + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + graph.add_edge(dist, provider, req) + matched = True + break + if not matched: + graph.add_missing(dist, req) + return graph + + +def get_dependent_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + dependent on *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + dep = [dist] # dependent distributions + todo = graph.reverse_list[dist] # list of nodes we should inspect + + while todo: + d = todo.pop() + dep.append(d) + for succ in graph.reverse_list[d]: + if succ not in dep: + todo.append(succ) + + dep.pop(0) # remove dist from dep, was there to prevent infinite loops + return dep + + +def get_required_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + required by *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + req = [] # required distributions + todo = graph.adjacency_list[dist] # list of nodes we should inspect + + while todo: + d = todo.pop()[0] + req.append(d) + for pred in graph.adjacency_list[d]: + if pred not in req: + todo.append(pred) + + return req + + +def make_dist(name, version, **kwargs): + """ + A convenience method for making a dist given just a name and version. + """ + summary = kwargs.pop('summary', 'Placeholder for summary') + md = Metadata(**kwargs) + md.name = name + md.version = version + md.summary = summary or 'Placeholder for summary' + return Distribution(md) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/index.py b/env/lib/python3.6/site-packages/pip/_vendor/distlib/index.py new file mode 100644 index 0000000..2406be2 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distlib/index.py @@ -0,0 +1,516 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import hashlib +import logging +import os +import shutil +import subprocess +import tempfile +try: + from threading import Thread +except ImportError: + from dummy_threading import Thread + +from . import DistlibException +from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr, + urlparse, build_opener, string_types) +from .util import cached_property, zip_dir, ServerProxy + +logger = logging.getLogger(__name__) + +DEFAULT_INDEX = 'https://pypi.python.org/pypi' +DEFAULT_REALM = 'pypi' + +class PackageIndex(object): + """ + This class represents a package index compatible with PyPI, the Python + Package Index. + """ + + boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$' + + def __init__(self, url=None): + """ + Initialise an instance. + + :param url: The URL of the index. If not specified, the URL for PyPI is + used. + """ + self.url = url or DEFAULT_INDEX + self.read_configuration() + scheme, netloc, path, params, query, frag = urlparse(self.url) + if params or query or frag or scheme not in ('http', 'https'): + raise DistlibException('invalid repository: %s' % self.url) + self.password_handler = None + self.ssl_verifier = None + self.gpg = None + self.gpg_home = None + with open(os.devnull, 'w') as sink: + # Use gpg by default rather than gpg2, as gpg2 insists on + # prompting for passwords + for s in ('gpg', 'gpg2'): + try: + rc = subprocess.check_call([s, '--version'], stdout=sink, + stderr=sink) + if rc == 0: + self.gpg = s + break + except OSError: + pass + + def _get_pypirc_command(self): + """ + Get the distutils command for interacting with PyPI configurations. + :return: the command. + """ + from distutils.core import Distribution + from distutils.config import PyPIRCCommand + d = Distribution() + return PyPIRCCommand(d) + + def read_configuration(self): + """ + Read the PyPI access configuration as supported by distutils, getting + PyPI to do the actual work. This populates ``username``, ``password``, + ``realm`` and ``url`` attributes from the configuration. + """ + # get distutils to do the work + c = self._get_pypirc_command() + c.repository = self.url + cfg = c._read_pypirc() + self.username = cfg.get('username') + self.password = cfg.get('password') + self.realm = cfg.get('realm', 'pypi') + self.url = cfg.get('repository', self.url) + + def save_configuration(self): + """ + Save the PyPI access configuration. You must have set ``username`` and + ``password`` attributes before calling this method. + + Again, distutils is used to do the actual work. + """ + self.check_credentials() + # get distutils to do the work + c = self._get_pypirc_command() + c._store_pypirc(self.username, self.password) + + def check_credentials(self): + """ + Check that ``username`` and ``password`` have been set, and raise an + exception if not. + """ + if self.username is None or self.password is None: + raise DistlibException('username and password must be set') + pm = HTTPPasswordMgr() + _, netloc, _, _, _, _ = urlparse(self.url) + pm.add_password(self.realm, netloc, self.username, self.password) + self.password_handler = HTTPBasicAuthHandler(pm) + + def register(self, metadata): + """ + Register a distribution on PyPI, using the provided metadata. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the distribution to be + registered. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + metadata.validate() + d = metadata.todict() + d[':action'] = 'verify' + request = self.encode_request(d.items(), []) + response = self.send_request(request) + d[':action'] = 'submit' + request = self.encode_request(d.items(), []) + return self.send_request(request) + + def _reader(self, name, stream, outbuf): + """ + Thread runner for reading lines of from a subprocess into a buffer. + + :param name: The logical name of the stream (used for logging only). + :param stream: The stream to read from. This will typically a pipe + connected to the output stream of a subprocess. + :param outbuf: The list to append the read lines to. + """ + while True: + s = stream.readline() + if not s: + break + s = s.decode('utf-8').rstrip() + outbuf.append(s) + logger.debug('%s: %s' % (name, s)) + stream.close() + + def get_sign_command(self, filename, signer, sign_password, + keystore=None): + """ + Return a suitable command for signing a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The signing command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + if sign_password is not None: + cmd.extend(['--batch', '--passphrase-fd', '0']) + td = tempfile.mkdtemp() + sf = os.path.join(td, os.path.basename(filename) + '.asc') + cmd.extend(['--detach-sign', '--armor', '--local-user', + signer, '--output', sf, filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd, sf + + def run_command(self, cmd, input_data=None): + """ + Run a command in a child process , passing it any input data specified. + + :param cmd: The command to run. + :param input_data: If specified, this must be a byte string containing + data to be sent to the child process. + :return: A tuple consisting of the subprocess' exit code, a list of + lines read from the subprocess' ``stdout``, and a list of + lines read from the subprocess' ``stderr``. + """ + kwargs = { + 'stdout': subprocess.PIPE, + 'stderr': subprocess.PIPE, + } + if input_data is not None: + kwargs['stdin'] = subprocess.PIPE + stdout = [] + stderr = [] + p = subprocess.Popen(cmd, **kwargs) + # We don't use communicate() here because we may need to + # get clever with interacting with the command + t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout)) + t1.start() + t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr)) + t2.start() + if input_data is not None: + p.stdin.write(input_data) + p.stdin.close() + + p.wait() + t1.join() + t2.join() + return p.returncode, stdout, stderr + + def sign_file(self, filename, signer, sign_password, keystore=None): + """ + Sign a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The absolute pathname of the file where the signature is + stored. + """ + cmd, sig_file = self.get_sign_command(filename, signer, sign_password, + keystore) + rc, stdout, stderr = self.run_command(cmd, + sign_password.encode('utf-8')) + if rc != 0: + raise DistlibException('sign command failed with error ' + 'code %s' % rc) + return sig_file + + def upload_file(self, metadata, filename, signer=None, sign_password=None, + filetype='sdist', pyversion='source', keystore=None): + """ + Upload a release file to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the file to be uploaded. + :param filename: The pathname of the file to be uploaded. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param filetype: The type of the file being uploaded. This is the + distutils command which produced that file, e.g. + ``sdist`` or ``bdist_wheel``. + :param pyversion: The version of Python which the release relates + to. For code compatible with any Python, this would + be ``source``, otherwise it would be e.g. ``3.2``. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.exists(filename): + raise DistlibException('not found: %s' % filename) + metadata.validate() + d = metadata.todict() + sig_file = None + if signer: + if not self.gpg: + logger.warning('no signing program available - not signed') + else: + sig_file = self.sign_file(filename, signer, sign_password, + keystore) + with open(filename, 'rb') as f: + file_data = f.read() + md5_digest = hashlib.md5(file_data).hexdigest() + sha256_digest = hashlib.sha256(file_data).hexdigest() + d.update({ + ':action': 'file_upload', + 'protocol_version': '1', + 'filetype': filetype, + 'pyversion': pyversion, + 'md5_digest': md5_digest, + 'sha256_digest': sha256_digest, + }) + files = [('content', os.path.basename(filename), file_data)] + if sig_file: + with open(sig_file, 'rb') as f: + sig_data = f.read() + files.append(('gpg_signature', os.path.basename(sig_file), + sig_data)) + shutil.rmtree(os.path.dirname(sig_file)) + request = self.encode_request(d.items(), files) + return self.send_request(request) + + def upload_documentation(self, metadata, doc_dir): + """ + Upload documentation to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the documentation to be + uploaded. + :param doc_dir: The pathname of the directory which contains the + documentation. This should be the directory that + contains the ``index.html`` for the documentation. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.isdir(doc_dir): + raise DistlibException('not a directory: %r' % doc_dir) + fn = os.path.join(doc_dir, 'index.html') + if not os.path.exists(fn): + raise DistlibException('not found: %r' % fn) + metadata.validate() + name, version = metadata.name, metadata.version + zip_data = zip_dir(doc_dir).getvalue() + fields = [(':action', 'doc_upload'), + ('name', name), ('version', version)] + files = [('content', name, zip_data)] + request = self.encode_request(fields, files) + return self.send_request(request) + + def get_verify_command(self, signature_filename, data_filename, + keystore=None): + """ + Return a suitable command for verifying a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The verifying command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + cmd.extend(['--verify', signature_filename, data_filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd + + def verify_signature(self, signature_filename, data_filename, + keystore=None): + """ + Verify a signature for a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: True if the signature was verified, else False. + """ + if not self.gpg: + raise DistlibException('verification unavailable because gpg ' + 'unavailable') + cmd = self.get_verify_command(signature_filename, data_filename, + keystore) + rc, stdout, stderr = self.run_command(cmd) + if rc not in (0, 1): + raise DistlibException('verify command failed with error ' + 'code %s' % rc) + return rc == 0 + + def download_file(self, url, destfile, digest=None, reporthook=None): + """ + This is a convenience method for downloading a file from an URL. + Normally, this will be a file from the index, though currently + no check is made for this (i.e. a file can be downloaded from + anywhere). + + The method is just like the :func:`urlretrieve` function in the + standard library, except that it allows digest computation to be + done during download and checking that the downloaded data + matched any expected value. + + :param url: The URL of the file to be downloaded (assumed to be + available via an HTTP GET request). + :param destfile: The pathname where the downloaded file is to be + saved. + :param digest: If specified, this must be a (hasher, value) + tuple, where hasher is the algorithm used (e.g. + ``'md5'``) and ``value`` is the expected value. + :param reporthook: The same as for :func:`urlretrieve` in the + standard library. + """ + if digest is None: + digester = None + logger.debug('No digest specified') + else: + if isinstance(digest, (list, tuple)): + hasher, digest = digest + else: + hasher = 'md5' + digester = getattr(hashlib, hasher)() + logger.debug('Digest specified: %s' % digest) + # The following code is equivalent to urlretrieve. + # We need to do it this way so that we can compute the + # digest of the file as we go. + with open(destfile, 'wb') as dfp: + # addinfourl is not a context manager on 2.x + # so we have to use try/finally + sfp = self.send_request(Request(url)) + try: + headers = sfp.info() + blocksize = 8192 + size = -1 + read = 0 + blocknum = 0 + if "content-length" in headers: + size = int(headers["Content-Length"]) + if reporthook: + reporthook(blocknum, blocksize, size) + while True: + block = sfp.read(blocksize) + if not block: + break + read += len(block) + dfp.write(block) + if digester: + digester.update(block) + blocknum += 1 + if reporthook: + reporthook(blocknum, blocksize, size) + finally: + sfp.close() + + # check that we got the whole file, if we can + if size >= 0 and read < size: + raise DistlibException( + 'retrieval incomplete: got only %d out of %d bytes' + % (read, size)) + # if we have a digest, it must match. + if digester: + actual = digester.hexdigest() + if digest != actual: + raise DistlibException('%s digest mismatch for %s: expected ' + '%s, got %s' % (hasher, destfile, + digest, actual)) + logger.debug('Digest verified: %s', digest) + + def send_request(self, req): + """ + Send a standard library :class:`Request` to PyPI and return its + response. + + :param req: The request to send. + :return: The HTTP response from PyPI (a standard library HTTPResponse). + """ + handlers = [] + if self.password_handler: + handlers.append(self.password_handler) + if self.ssl_verifier: + handlers.append(self.ssl_verifier) + opener = build_opener(*handlers) + return opener.open(req) + + def encode_request(self, fields, files): + """ + Encode fields and files for posting to an HTTP server. + + :param fields: The fields to send as a list of (fieldname, value) + tuples. + :param files: The files to send as a list of (fieldname, filename, + file_bytes) tuple. + """ + # Adapted from packaging, which in turn was adapted from + # http://code.activestate.com/recipes/146306 + + parts = [] + boundary = self.boundary + for k, values in fields: + if not isinstance(values, (list, tuple)): + values = [values] + + for v in values: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"' % + k).encode('utf-8'), + b'', + v.encode('utf-8'))) + for key, filename, value in files: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"; filename="%s"' % + (key, filename)).encode('utf-8'), + b'', + value)) + + parts.extend((b'--' + boundary + b'--', b'')) + + body = b'\r\n'.join(parts) + ct = b'multipart/form-data; boundary=' + boundary + headers = { + 'Content-type': ct, + 'Content-length': str(len(body)) + } + return Request(self.url, body, headers) + + def search(self, terms, operator=None): + if isinstance(terms, string_types): + terms = {'name': terms} + rpc_proxy = ServerProxy(self.url, timeout=3.0) + try: + return rpc_proxy.search(terms, operator or 'and') + finally: + rpc_proxy('close')() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/locators.py b/env/lib/python3.6/site-packages/pip/_vendor/distlib/locators.py new file mode 100644 index 0000000..11d2636 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distlib/locators.py @@ -0,0 +1,1292 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2015 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# + +import gzip +from io import BytesIO +import json +import logging +import os +import posixpath +import re +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import zlib + +from . import DistlibException +from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url, + queue, quote, unescape, string_types, build_opener, + HTTPRedirectHandler as BaseRedirectHandler, text_type, + Request, HTTPError, URLError) +from .database import Distribution, DistributionPath, make_dist +from .metadata import Metadata, MetadataInvalidError +from .util import (cached_property, parse_credentials, ensure_slash, + split_filename, get_project_data, parse_requirement, + parse_name_and_version, ServerProxy, normalize_name) +from .version import get_scheme, UnsupportedVersionError +from .wheel import Wheel, is_compatible + +logger = logging.getLogger(__name__) + +HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)') +CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I) +HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') +DEFAULT_INDEX = 'https://pypi.python.org/pypi' + +def get_all_distribution_names(url=None): + """ + Return all distribution names known by an index. + :param url: The URL of the index. + :return: A list of all known distribution names. + """ + if url is None: + url = DEFAULT_INDEX + client = ServerProxy(url, timeout=3.0) + try: + return client.list_packages() + finally: + client('close')() + +class RedirectHandler(BaseRedirectHandler): + """ + A class to work around a bug in some Python 3.2.x releases. + """ + # There's a bug in the base version for some 3.2.x + # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header + # returns e.g. /abc, it bails because it says the scheme '' + # is bogus, when actually it should use the request's + # URL for the scheme. See Python issue #13696. + def http_error_302(self, req, fp, code, msg, headers): + # Some servers (incorrectly) return multiple Location headers + # (so probably same goes for URI). Use first header. + newurl = None + for key in ('location', 'uri'): + if key in headers: + newurl = headers[key] + break + if newurl is None: # pragma: no cover + return + urlparts = urlparse(newurl) + if urlparts.scheme == '': + newurl = urljoin(req.get_full_url(), newurl) + if hasattr(headers, 'replace_header'): + headers.replace_header(key, newurl) + else: + headers[key] = newurl + return BaseRedirectHandler.http_error_302(self, req, fp, code, msg, + headers) + + http_error_301 = http_error_303 = http_error_307 = http_error_302 + +class Locator(object): + """ + A base class for locators - things that locate distributions. + """ + source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz') + binary_extensions = ('.egg', '.exe', '.whl') + excluded_extensions = ('.pdf',) + + # A list of tags indicating which wheels you want to match. The default + # value of None matches against the tags compatible with the running + # Python. If you want to match other values, set wheel_tags on a locator + # instance to a list of tuples (pyver, abi, arch) which you want to match. + wheel_tags = None + + downloadable_extensions = source_extensions + ('.whl',) + + def __init__(self, scheme='default'): + """ + Initialise an instance. + :param scheme: Because locators look for most recent versions, they + need to know the version scheme to use. This specifies + the current PEP-recommended scheme - use ``'legacy'`` + if you need to support existing distributions on PyPI. + """ + self._cache = {} + self.scheme = scheme + # Because of bugs in some of the handlers on some of the platforms, + # we use our own opener rather than just using urlopen. + self.opener = build_opener(RedirectHandler()) + # If get_project() is called from locate(), the matcher instance + # is set from the requirement passed to locate(). See issue #18 for + # why this can be useful to know. + self.matcher = None + self.errors = queue.Queue() + + def get_errors(self): + """ + Return any errors which have occurred. + """ + result = [] + while not self.errors.empty(): # pragma: no cover + try: + e = self.errors.get(False) + result.append(e) + except self.errors.Empty: + continue + self.errors.task_done() + return result + + def clear_errors(self): + """ + Clear any errors which may have been logged. + """ + # Just get the errors and throw them away + self.get_errors() + + def clear_cache(self): + self._cache.clear() + + def _get_scheme(self): + return self._scheme + + def _set_scheme(self, value): + self._scheme = value + + scheme = property(_get_scheme, _set_scheme) + + def _get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This should be implemented in subclasses. + + If called from a locate() request, self.matcher will be set to a + matcher for the requirement to satisfy, otherwise it will be None. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This calls _get_project to do all the work, and just implements a caching layer on top. + """ + if self._cache is None: # pragma: no cover + result = self._get_project(name) + elif name in self._cache: + result = self._cache[name] + else: + self.clear_errors() + result = self._get_project(name) + self._cache[name] = result + return result + + def score_url(self, url): + """ + Give an url a score which can be used to choose preferred URLs + for a given project release. + """ + t = urlparse(url) + basename = posixpath.basename(t.path) + compatible = True + is_wheel = basename.endswith('.whl') + is_downloadable = basename.endswith(self.downloadable_extensions) + if is_wheel: + compatible = is_compatible(Wheel(basename), self.wheel_tags) + return (t.scheme == 'https', 'pypi.python.org' in t.netloc, + is_downloadable, is_wheel, compatible, basename) + + def prefer_url(self, url1, url2): + """ + Choose one of two URLs where both are candidates for distribution + archives for the same version of a distribution (for example, + .tar.gz vs. zip). + + The current implementation favours https:// URLs over http://, archives + from PyPI over those from other locations, wheel compatibility (if a + wheel) and then the archive name. + """ + result = url2 + if url1: + s1 = self.score_url(url1) + s2 = self.score_url(url2) + if s1 > s2: + result = url1 + if result != url2: + logger.debug('Not replacing %r with %r', url1, url2) + else: + logger.debug('Replacing %r with %r', url1, url2) + return result + + def split_filename(self, filename, project_name): + """ + Attempt to split a filename in project name, version and Python version. + """ + return split_filename(filename, project_name) + + def convert_url_to_download_info(self, url, project_name): + """ + See if a URL is a candidate for a download URL for a project (the URL + has typically been scraped from an HTML page). + + If it is, a dictionary is returned with keys "name", "version", + "filename" and "url"; otherwise, None is returned. + """ + def same_project(name1, name2): + return normalize_name(name1) == normalize_name(name2) + + result = None + scheme, netloc, path, params, query, frag = urlparse(url) + if frag.lower().startswith('egg='): # pragma: no cover + logger.debug('%s: version hint in fragment: %r', + project_name, frag) + m = HASHER_HASH.match(frag) + if m: + algo, digest = m.groups() + else: + algo, digest = None, None + origpath = path + if path and path[-1] == '/': # pragma: no cover + path = path[:-1] + if path.endswith('.whl'): + try: + wheel = Wheel(path) + if is_compatible(wheel, self.wheel_tags): + if project_name is None: + include = True + else: + include = same_project(wheel.name, project_name) + if include: + result = { + 'name': wheel.name, + 'version': wheel.version, + 'filename': wheel.filename, + 'url': urlunparse((scheme, netloc, origpath, + params, query, '')), + 'python-version': ', '.join( + ['.'.join(list(v[2:])) for v in wheel.pyver]), + } + except Exception as e: # pragma: no cover + logger.warning('invalid path for wheel: %s', path) + elif not path.endswith(self.downloadable_extensions): # pragma: no cover + logger.debug('Not downloadable: %s', path) + else: # downloadable extension + path = filename = posixpath.basename(path) + for ext in self.downloadable_extensions: + if path.endswith(ext): + path = path[:-len(ext)] + t = self.split_filename(path, project_name) + if not t: # pragma: no cover + logger.debug('No match for project/version: %s', path) + else: + name, version, pyver = t + if not project_name or same_project(project_name, name): + result = { + 'name': name, + 'version': version, + 'filename': filename, + 'url': urlunparse((scheme, netloc, origpath, + params, query, '')), + #'packagetype': 'sdist', + } + if pyver: # pragma: no cover + result['python-version'] = pyver + break + if result and algo: + result['%s_digest' % algo] = digest + return result + + def _get_digest(self, info): + """ + Get a digest from a dictionary by looking at keys of the form + 'algo_digest'. + + Returns a 2-tuple (algo, digest) if found, else None. Currently + looks only for SHA256, then MD5. + """ + result = None + for algo in ('sha256', 'md5'): + key = '%s_digest' % algo + if key in info: + result = (algo, info[key]) + break + return result + + def _update_version_data(self, result, info): + """ + Update a result dictionary (the final result from _get_project) with a + dictionary for a specific version, which typically holds information + gleaned from a filename or URL for an archive for the distribution. + """ + name = info.pop('name') + version = info.pop('version') + if version in result: + dist = result[version] + md = dist.metadata + else: + dist = make_dist(name, version, scheme=self.scheme) + md = dist.metadata + dist.digest = digest = self._get_digest(info) + url = info['url'] + result['digests'][url] = digest + if md.source_url != info['url']: + md.source_url = self.prefer_url(md.source_url, url) + result['urls'].setdefault(version, set()).add(url) + dist.locator = self + result[version] = dist + + def locate(self, requirement, prereleases=False): + """ + Find the most recent distribution which matches the given + requirement. + + :param requirement: A requirement of the form 'foo (1.0)' or perhaps + 'foo (>= 1.0, < 2.0, != 1.3)' + :param prereleases: If ``True``, allow pre-release versions + to be located. Otherwise, pre-release versions + are not returned. + :return: A :class:`Distribution` instance, or ``None`` if no such + distribution could be located. + """ + result = None + r = parse_requirement(requirement) + if r is None: # pragma: no cover + raise DistlibException('Not a valid requirement: %r' % requirement) + scheme = get_scheme(self.scheme) + self.matcher = matcher = scheme.matcher(r.requirement) + logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__) + versions = self.get_project(r.name) + if len(versions) > 2: # urls and digests keys are present + # sometimes, versions are invalid + slist = [] + vcls = matcher.version_class + for k in versions: + if k in ('urls', 'digests'): + continue + try: + if not matcher.match(k): + logger.debug('%s did not match %r', matcher, k) + else: + if prereleases or not vcls(k).is_prerelease: + slist.append(k) + else: + logger.debug('skipping pre-release ' + 'version %s of %s', k, matcher.name) + except Exception: # pragma: no cover + logger.warning('error matching %s with %r', matcher, k) + pass # slist.append(k) + if len(slist) > 1: + slist = sorted(slist, key=scheme.key) + if slist: + logger.debug('sorted list: %s', slist) + version = slist[-1] + result = versions[version] + if result: + if r.extras: + result.extras = r.extras + result.download_urls = versions.get('urls', {}).get(version, set()) + d = {} + sd = versions.get('digests', {}) + for url in result.download_urls: + if url in sd: # pragma: no cover + d[url] = sd[url] + result.digests = d + self.matcher = None + return result + + +class PyPIRPCLocator(Locator): + """ + This locator uses XML-RPC to locate distributions. It therefore + cannot be used with simple mirrors (that only mirror file content). + """ + def __init__(self, url, **kwargs): + """ + Initialise an instance. + + :param url: The URL to use for XML-RPC. + :param kwargs: Passed to the superclass constructor. + """ + super(PyPIRPCLocator, self).__init__(**kwargs) + self.base_url = url + self.client = ServerProxy(url, timeout=3.0) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + return set(self.client.list_packages()) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + versions = self.client.package_releases(name, True) + for v in versions: + urls = self.client.release_urls(name, v) + data = self.client.release_data(name, v) + metadata = Metadata(scheme=self.scheme) + metadata.name = data['name'] + metadata.version = data['version'] + metadata.license = data.get('license') + metadata.keywords = data.get('keywords', []) + metadata.summary = data.get('summary') + dist = Distribution(metadata) + if urls: + info = urls[0] + metadata.source_url = info['url'] + dist.digest = self._get_digest(info) + dist.locator = self + result[v] = dist + for info in urls: + url = info['url'] + digest = self._get_digest(info) + result['urls'].setdefault(v, set()).add(url) + result['digests'][url] = digest + return result + +class PyPIJSONLocator(Locator): + """ + This locator uses PyPI's JSON interface. It's very limited in functionality + and probably not worth using. + """ + def __init__(self, url, **kwargs): + super(PyPIJSONLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + url = urljoin(self.base_url, '%s/json' % quote(name)) + try: + resp = self.opener.open(url) + data = resp.read().decode() # for now + d = json.loads(data) + md = Metadata(scheme=self.scheme) + data = d['info'] + md.name = data['name'] + md.version = data['version'] + md.license = data.get('license') + md.keywords = data.get('keywords', []) + md.summary = data.get('summary') + dist = Distribution(md) + dist.locator = self + urls = d['urls'] + result[md.version] = dist + for info in d['urls']: + url = info['url'] + dist.download_urls.add(url) + dist.digests[url] = self._get_digest(info) + result['urls'].setdefault(md.version, set()).add(url) + result['digests'][url] = self._get_digest(info) + # Now get other releases + for version, infos in d['releases'].items(): + if version == md.version: + continue # already done + omd = Metadata(scheme=self.scheme) + omd.name = md.name + omd.version = version + odist = Distribution(omd) + odist.locator = self + result[version] = odist + for info in infos: + url = info['url'] + odist.download_urls.add(url) + odist.digests[url] = self._get_digest(info) + result['urls'].setdefault(version, set()).add(url) + result['digests'][url] = self._get_digest(info) +# for info in urls: +# md.source_url = info['url'] +# dist.digest = self._get_digest(info) +# dist.locator = self +# for info in urls: +# url = info['url'] +# result['urls'].setdefault(md.version, set()).add(url) +# result['digests'][url] = self._get_digest(info) + except Exception as e: + self.errors.put(text_type(e)) + logger.exception('JSON fetch failed: %s', e) + return result + + +class Page(object): + """ + This class represents a scraped HTML page. + """ + # The following slightly hairy-looking regex just looks for the contents of + # an anchor link, which has an attribute "href" either immediately preceded + # or immediately followed by a "rel" attribute. The attribute values can be + # declared with double quotes, single quotes or no quotes - which leads to + # the length of the expression. + _href = re.compile(""" +(rel\\s*=\\s*(?:"(?P<rel1>[^"]*)"|'(?P<rel2>[^']*)'|(?P<rel3>[^>\\s\n]*))\\s+)? +href\\s*=\\s*(?:"(?P<url1>[^"]*)"|'(?P<url2>[^']*)'|(?P<url3>[^>\\s\n]*)) +(\\s+rel\\s*=\\s*(?:"(?P<rel4>[^"]*)"|'(?P<rel5>[^']*)'|(?P<rel6>[^>\\s\n]*)))? +""", re.I | re.S | re.X) + _base = re.compile(r"""<base\s+href\s*=\s*['"]?([^'">]+)""", re.I | re.S) + + def __init__(self, data, url): + """ + Initialise an instance with the Unicode page contents and the URL they + came from. + """ + self.data = data + self.base_url = self.url = url + m = self._base.search(self.data) + if m: + self.base_url = m.group(1) + + _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) + + @cached_property + def links(self): + """ + Return the URLs of all the links on a page together with information + about their "rel" attribute, for determining which ones to treat as + downloads and which ones to queue for further scraping. + """ + def clean(url): + "Tidy up an URL." + scheme, netloc, path, params, query, frag = urlparse(url) + return urlunparse((scheme, netloc, quote(path), + params, query, frag)) + + result = set() + for match in self._href.finditer(self.data): + d = match.groupdict('') + rel = (d['rel1'] or d['rel2'] or d['rel3'] or + d['rel4'] or d['rel5'] or d['rel6']) + url = d['url1'] or d['url2'] or d['url3'] + url = urljoin(self.base_url, url) + url = unescape(url) + url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url) + result.add((url, rel)) + # We sort the result, hoping to bring the most recent versions + # to the front + result = sorted(result, key=lambda t: t[0], reverse=True) + return result + + +class SimpleScrapingLocator(Locator): + """ + A locator which scrapes HTML pages to locate downloads for a distribution. + This runs multiple threads to do the I/O; performance is at least as good + as pip's PackageFinder, which works in an analogous fashion. + """ + + # These are used to deal with various Content-Encoding schemes. + decoders = { + 'deflate': zlib.decompress, + 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(d)).read(), + 'none': lambda b: b, + } + + def __init__(self, url, timeout=None, num_workers=10, **kwargs): + """ + Initialise an instance. + :param url: The root URL to use for scraping. + :param timeout: The timeout, in seconds, to be applied to requests. + This defaults to ``None`` (no timeout specified). + :param num_workers: The number of worker threads you want to do I/O, + This defaults to 10. + :param kwargs: Passed to the superclass. + """ + super(SimpleScrapingLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + self.timeout = timeout + self._page_cache = {} + self._seen = set() + self._to_fetch = queue.Queue() + self._bad_hosts = set() + self.skip_externals = False + self.num_workers = num_workers + self._lock = threading.RLock() + # See issue #45: we need to be resilient when the locator is used + # in a thread, e.g. with concurrent.futures. We can't use self._lock + # as it is for coordinating our internal threads - the ones created + # in _prepare_threads. + self._gplock = threading.RLock() + + def _prepare_threads(self): + """ + Threads are created only when get_project is called, and terminate + before it returns. They are there primarily to parallelise I/O (i.e. + fetching web pages). + """ + self._threads = [] + for i in range(self.num_workers): + t = threading.Thread(target=self._fetch) + t.setDaemon(True) + t.start() + self._threads.append(t) + + def _wait_threads(self): + """ + Tell all the threads to terminate (by sending a sentinel value) and + wait for them to do so. + """ + # Note that you need two loops, since you can't say which + # thread will get each sentinel + for t in self._threads: + self._to_fetch.put(None) # sentinel + for t in self._threads: + t.join() + self._threads = [] + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + with self._gplock: + self.result = result + self.project_name = name + url = urljoin(self.base_url, '%s/' % quote(name)) + self._seen.clear() + self._page_cache.clear() + self._prepare_threads() + try: + logger.debug('Queueing %s', url) + self._to_fetch.put(url) + self._to_fetch.join() + finally: + self._wait_threads() + del self.result + return result + + platform_dependent = re.compile(r'\b(linux-(i\d86|x86_64|arm\w+)|' + r'win(32|-amd64)|macosx-?\d+)\b', re.I) + + def _is_platform_dependent(self, url): + """ + Does an URL refer to a platform-specific download? + """ + return self.platform_dependent.search(url) + + def _process_download(self, url): + """ + See if an URL is a suitable download for a project. + + If it is, register information in the result dictionary (for + _get_project) about the specific version it's for. + + Note that the return value isn't actually used other than as a boolean + value. + """ + if self._is_platform_dependent(url): + info = None + else: + info = self.convert_url_to_download_info(url, self.project_name) + logger.debug('process_download: %s -> %s', url, info) + if info: + with self._lock: # needed because self.result is shared + self._update_version_data(self.result, info) + return info + + def _should_queue(self, link, referrer, rel): + """ + Determine whether a link URL from a referring page and with a + particular "rel" attribute should be queued for scraping. + """ + scheme, netloc, path, _, _, _ = urlparse(link) + if path.endswith(self.source_extensions + self.binary_extensions + + self.excluded_extensions): + result = False + elif self.skip_externals and not link.startswith(self.base_url): + result = False + elif not referrer.startswith(self.base_url): + result = False + elif rel not in ('homepage', 'download'): + result = False + elif scheme not in ('http', 'https', 'ftp'): + result = False + elif self._is_platform_dependent(link): + result = False + else: + host = netloc.split(':', 1)[0] + if host.lower() == 'localhost': + result = False + else: + result = True + logger.debug('should_queue: %s (%s) from %s -> %s', link, rel, + referrer, result) + return result + + def _fetch(self): + """ + Get a URL to fetch from the work queue, get the HTML page, examine its + links for download candidates and candidates for further scraping. + + This is a handy method to run in a thread. + """ + while True: + url = self._to_fetch.get() + try: + if url: + page = self.get_page(url) + if page is None: # e.g. after an error + continue + for link, rel in page.links: + if link not in self._seen: + try: + self._seen.add(link) + if (not self._process_download(link) and + self._should_queue(link, url, rel)): + logger.debug('Queueing %s from %s', link, url) + self._to_fetch.put(link) + except MetadataInvalidError: # e.g. invalid versions + pass + except Exception as e: # pragma: no cover + self.errors.put(text_type(e)) + finally: + # always do this, to avoid hangs :-) + self._to_fetch.task_done() + if not url: + #logger.debug('Sentinel seen, quitting.') + break + + def get_page(self, url): + """ + Get the HTML for an URL, possibly from an in-memory cache. + + XXX TODO Note: this cache is never actually cleared. It's assumed that + the data won't get stale over the lifetime of a locator instance (not + necessarily true for the default_locator). + """ + # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api + scheme, netloc, path, _, _, _ = urlparse(url) + if scheme == 'file' and os.path.isdir(url2pathname(path)): + url = urljoin(ensure_slash(url), 'index.html') + + if url in self._page_cache: + result = self._page_cache[url] + logger.debug('Returning %s from cache: %s', url, result) + else: + host = netloc.split(':', 1)[0] + result = None + if host in self._bad_hosts: + logger.debug('Skipping %s due to bad host %s', url, host) + else: + req = Request(url, headers={'Accept-encoding': 'identity'}) + try: + logger.debug('Fetching %s', url) + resp = self.opener.open(req, timeout=self.timeout) + logger.debug('Fetched %s', url) + headers = resp.info() + content_type = headers.get('Content-Type', '') + if HTML_CONTENT_TYPE.match(content_type): + final_url = resp.geturl() + data = resp.read() + encoding = headers.get('Content-Encoding') + if encoding: + decoder = self.decoders[encoding] # fail if not found + data = decoder(data) + encoding = 'utf-8' + m = CHARSET.search(content_type) + if m: + encoding = m.group(1) + try: + data = data.decode(encoding) + except UnicodeError: # pragma: no cover + data = data.decode('latin-1') # fallback + result = Page(data, final_url) + self._page_cache[final_url] = result + except HTTPError as e: + if e.code != 404: + logger.exception('Fetch failed: %s: %s', url, e) + except URLError as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + with self._lock: + self._bad_hosts.add(host) + except Exception as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + finally: + self._page_cache[url] = result # even if None (failure) + return result + + _distname_re = re.compile('<a href=[^>]*>([^<]+)<') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + page = self.get_page(self.base_url) + if not page: + raise DistlibException('Unable to get %s' % self.base_url) + for match in self._distname_re.finditer(page.data): + result.add(match.group(1)) + return result + +class DirectoryLocator(Locator): + """ + This class locates distributions in a directory tree. + """ + + def __init__(self, path, **kwargs): + """ + Initialise an instance. + :param path: The root of the directory tree to search. + :param kwargs: Passed to the superclass constructor, + except for: + * recursive - if True (the default), subdirectories are + recursed into. If False, only the top-level directory + is searched, + """ + self.recursive = kwargs.pop('recursive', True) + super(DirectoryLocator, self).__init__(**kwargs) + path = os.path.abspath(path) + if not os.path.isdir(path): # pragma: no cover + raise DistlibException('Not a directory: %r' % path) + self.base_dir = path + + def should_include(self, filename, parent): + """ + Should a filename be considered as a candidate for a distribution + archive? As well as the filename, the directory which contains it + is provided, though not used by the current implementation. + """ + return filename.endswith(self.downloadable_extensions) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', + pathname2url(os.path.abspath(fn)), + '', '', '')) + info = self.convert_url_to_download_info(url, name) + if info: + self._update_version_data(result, info) + if not self.recursive: + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', + pathname2url(os.path.abspath(fn)), + '', '', '')) + info = self.convert_url_to_download_info(url, None) + if info: + result.add(info['name']) + if not self.recursive: + break + return result + +class JSONLocator(Locator): + """ + This locator uses special extended metadata (not available on PyPI) and is + the basis of performant dependency resolution in distlib. Other locators + require archive downloads before dependencies can be determined! As you + might imagine, that can be slow. + """ + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + data = get_project_data(name) + if data: + for info in data.get('files', []): + if info['ptype'] != 'sdist' or info['pyversion'] != 'source': + continue + # We don't store summary in project metadata as it makes + # the data bigger for no benefit during dependency + # resolution + dist = make_dist(data['name'], info['version'], + summary=data.get('summary', + 'Placeholder for summary'), + scheme=self.scheme) + md = dist.metadata + md.source_url = info['url'] + # TODO SHA256 digest + if 'digest' in info and info['digest']: + dist.digest = ('md5', info['digest']) + md.dependencies = info.get('requirements', {}) + dist.exports = info.get('exports', {}) + result[dist.version] = dist + result['urls'].setdefault(dist.version, set()).add(info['url']) + return result + +class DistPathLocator(Locator): + """ + This locator finds installed distributions in a path. It can be useful for + adding to an :class:`AggregatingLocator`. + """ + def __init__(self, distpath, **kwargs): + """ + Initialise an instance. + + :param distpath: A :class:`DistributionPath` instance to search. + """ + super(DistPathLocator, self).__init__(**kwargs) + assert isinstance(distpath, DistributionPath) + self.distpath = distpath + + def _get_project(self, name): + dist = self.distpath.get_distribution(name) + if dist is None: + result = {'urls': {}, 'digests': {}} + else: + result = { + dist.version: dist, + 'urls': {dist.version: set([dist.source_url])}, + 'digests': {dist.version: set([None])} + } + return result + + +class AggregatingLocator(Locator): + """ + This class allows you to chain and/or merge a list of locators. + """ + def __init__(self, *locators, **kwargs): + """ + Initialise an instance. + + :param locators: The list of locators to search. + :param kwargs: Passed to the superclass constructor, + except for: + * merge - if False (the default), the first successful + search from any of the locators is returned. If True, + the results from all locators are merged (this can be + slow). + """ + self.merge = kwargs.pop('merge', False) + self.locators = locators + super(AggregatingLocator, self).__init__(**kwargs) + + def clear_cache(self): + super(AggregatingLocator, self).clear_cache() + for locator in self.locators: + locator.clear_cache() + + def _set_scheme(self, value): + self._scheme = value + for locator in self.locators: + locator.scheme = value + + scheme = property(Locator.scheme.fget, _set_scheme) + + def _get_project(self, name): + result = {} + for locator in self.locators: + d = locator.get_project(name) + if d: + if self.merge: + files = result.get('urls', {}) + digests = result.get('digests', {}) + # next line could overwrite result['urls'], result['digests'] + result.update(d) + df = result.get('urls') + if files and df: + for k, v in files.items(): + if k in df: + df[k] |= v + else: + df[k] = v + dd = result.get('digests') + if digests and dd: + dd.update(digests) + else: + # See issue #18. If any dists are found and we're looking + # for specific constraints, we only return something if + # a match is found. For example, if a DirectoryLocator + # returns just foo (1.0) while we're looking for + # foo (>= 2.0), we'll pretend there was nothing there so + # that subsequent locators can be queried. Otherwise we + # would just return foo (1.0) which would then lead to a + # failure to find foo (>= 2.0), because other locators + # weren't searched. Note that this only matters when + # merge=False. + if self.matcher is None: + found = True + else: + found = False + for k in d: + if self.matcher.match(k): + found = True + break + if found: + result = d + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for locator in self.locators: + try: + result |= locator.get_distribution_names() + except NotImplementedError: + pass + return result + + +# We use a legacy scheme simply because most of the dists on PyPI use legacy +# versions which don't conform to PEP 426 / PEP 440. +default_locator = AggregatingLocator( + JSONLocator(), + SimpleScrapingLocator('https://pypi.python.org/simple/', + timeout=3.0), + scheme='legacy') + +locate = default_locator.locate + +NAME_VERSION_RE = re.compile(r'(?P<name>[\w-]+)\s*' + r'\(\s*(==\s*)?(?P<ver>[^)]+)\)$') + +class DependencyFinder(object): + """ + Locate dependencies for distributions. + """ + + def __init__(self, locator=None): + """ + Initialise an instance, using the specified locator + to locate distributions. + """ + self.locator = locator or default_locator + self.scheme = get_scheme(self.locator.scheme) + + def add_distribution(self, dist): + """ + Add a distribution to the finder. This will update internal information + about who provides what. + :param dist: The distribution to add. + """ + logger.debug('adding distribution %s', dist) + name = dist.key + self.dists_by_name[name] = dist + self.dists[(name, dist.version)] = dist + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + self.provided.setdefault(name, set()).add((version, dist)) + + def remove_distribution(self, dist): + """ + Remove a distribution from the finder. This will update internal + information about who provides what. + :param dist: The distribution to remove. + """ + logger.debug('removing distribution %s', dist) + name = dist.key + del self.dists_by_name[name] + del self.dists[(name, dist.version)] + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Remove from provided: %s, %s, %s', name, version, dist) + s = self.provided[name] + s.remove((version, dist)) + if not s: + del self.provided[name] + + def get_matcher(self, reqt): + """ + Get a version matcher for a requirement. + :param reqt: The requirement + :type reqt: str + :return: A version matcher (an instance of + :class:`distlib.version.Matcher`). + """ + try: + matcher = self.scheme.matcher(reqt) + except UnsupportedVersionError: # pragma: no cover + # XXX compat-mode if cannot read the version + name = reqt.split()[0] + matcher = self.scheme.matcher(name) + return matcher + + def find_providers(self, reqt): + """ + Find the distributions which can fulfill a requirement. + + :param reqt: The requirement. + :type reqt: str + :return: A set of distribution which can fulfill the requirement. + """ + matcher = self.get_matcher(reqt) + name = matcher.key # case-insensitive + result = set() + provided = self.provided + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + result.add(provider) + break + return result + + def try_to_replace(self, provider, other, problems): + """ + Attempt to replace one provider with another. This is typically used + when resolving dependencies from multiple sources, e.g. A requires + (B >= 1.0) while C requires (B >= 1.1). + + For successful replacement, ``provider`` must meet all the requirements + which ``other`` fulfills. + + :param provider: The provider we are trying to replace with. + :param other: The provider we're trying to replace. + :param problems: If False is returned, this will contain what + problems prevented replacement. This is currently + a tuple of the literal string 'cantreplace', + ``provider``, ``other`` and the set of requirements + that ``provider`` couldn't fulfill. + :return: True if we can replace ``other`` with ``provider``, else + False. + """ + rlist = self.reqts[other] + unmatched = set() + for s in rlist: + matcher = self.get_matcher(s) + if not matcher.match(provider.version): + unmatched.add(s) + if unmatched: + # can't replace other with provider + problems.add(('cantreplace', provider, other, + frozenset(unmatched))) + result = False + else: + # can replace other with provider + self.remove_distribution(other) + del self.reqts[other] + for s in rlist: + self.reqts.setdefault(provider, set()).add(s) + self.add_distribution(provider) + result = True + return result + + def find(self, requirement, meta_extras=None, prereleases=False): + """ + Find a distribution and all distributions it depends on. + + :param requirement: The requirement specifying the distribution to + find, or a Distribution instance. + :param meta_extras: A list of meta extras such as :test:, :build: and + so on. + :param prereleases: If ``True``, allow pre-release versions to be + returned - otherwise, don't return prereleases + unless they're all that's available. + + Return a set of :class:`Distribution` instances and a set of + problems. + + The distributions returned should be such that they have the + :attr:`required` attribute set to ``True`` if they were + from the ``requirement`` passed to ``find()``, and they have the + :attr:`build_time_dependency` attribute set to ``True`` unless they + are post-installation dependencies of the ``requirement``. + + The problems should be a tuple consisting of the string + ``'unsatisfied'`` and the requirement which couldn't be satisfied + by any distribution known to the locator. + """ + + self.provided = {} + self.dists = {} + self.dists_by_name = {} + self.reqts = {} + + meta_extras = set(meta_extras or []) + if ':*:' in meta_extras: + meta_extras.remove(':*:') + # :meta: and :run: are implicitly included + meta_extras |= set([':test:', ':build:', ':dev:']) + + if isinstance(requirement, Distribution): + dist = odist = requirement + logger.debug('passed %s as requirement', odist) + else: + dist = odist = self.locator.locate(requirement, + prereleases=prereleases) + if dist is None: + raise DistlibException('Unable to locate %r' % requirement) + logger.debug('located %s', odist) + dist.requested = True + problems = set() + todo = set([dist]) + install_dists = set([odist]) + while todo: + dist = todo.pop() + name = dist.key # case-insensitive + if name not in self.dists_by_name: + self.add_distribution(dist) + else: + #import pdb; pdb.set_trace() + other = self.dists_by_name[name] + if other != dist: + self.try_to_replace(dist, other, problems) + + ireqts = dist.run_requires | dist.meta_requires + sreqts = dist.build_requires + ereqts = set() + if meta_extras and dist in install_dists: + for key in ('test', 'build', 'dev'): + e = ':%s:' % key + if e in meta_extras: + ereqts |= getattr(dist, '%s_requires' % key) + all_reqts = ireqts | sreqts | ereqts + for r in all_reqts: + providers = self.find_providers(r) + if not providers: + logger.debug('No providers found for %r', r) + provider = self.locator.locate(r, prereleases=prereleases) + # If no provider is found and we didn't consider + # prereleases, consider them now. + if provider is None and not prereleases: + provider = self.locator.locate(r, prereleases=True) + if provider is None: + logger.debug('Cannot satisfy %r', r) + problems.add(('unsatisfied', r)) + else: + n, v = provider.key, provider.version + if (n, v) not in self.dists: + todo.add(provider) + providers.add(provider) + if r in ireqts and dist in install_dists: + install_dists.add(provider) + logger.debug('Adding %s to install_dists', + provider.name_and_version) + for p in providers: + name = p.key + if name not in self.dists_by_name: + self.reqts.setdefault(p, set()).add(r) + else: + other = self.dists_by_name[name] + if other != p: + # see if other can be replaced by p + self.try_to_replace(p, other, problems) + + dists = set(self.dists.values()) + for dist in dists: + dist.build_time_dependency = dist not in install_dists + if dist.build_time_dependency: + logger.debug('%s is a build-time dependency only.', + dist.name_and_version) + logger.debug('find done for %s', odist) + return dists, problems diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/manifest.py b/env/lib/python3.6/site-packages/pip/_vendor/distlib/manifest.py new file mode 100644 index 0000000..ca0fe44 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distlib/manifest.py @@ -0,0 +1,393 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2013 Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +""" +Class representing the list of files in a distribution. + +Equivalent to distutils.filelist, but fixes some problems. +""" +import fnmatch +import logging +import os +import re +import sys + +from . import DistlibException +from .compat import fsdecode +from .util import convert_path + + +__all__ = ['Manifest'] + +logger = logging.getLogger(__name__) + +# a \ followed by some spaces + EOL +_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M) +_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S) + +# +# Due to the different results returned by fnmatch.translate, we need +# to do slightly different processing for Python 2.7 and 3.2 ... this needed +# to be brought in for Python 3.6 onwards. +# +_PYTHON_VERSION = sys.version_info[:2] + +class Manifest(object): + """A list of files built by on exploring the filesystem and filtered by + applying various patterns to what we find there. + """ + + def __init__(self, base=None): + """ + Initialise an instance. + + :param base: The base directory to explore under. + """ + self.base = os.path.abspath(os.path.normpath(base or os.getcwd())) + self.prefix = self.base + os.sep + self.allfiles = None + self.files = set() + + # + # Public API + # + + def findall(self): + """Find all files under the base and set ``allfiles`` to the absolute + pathnames of files found. + """ + from stat import S_ISREG, S_ISDIR, S_ISLNK + + self.allfiles = allfiles = [] + root = self.base + stack = [root] + pop = stack.pop + push = stack.append + + while stack: + root = pop() + names = os.listdir(root) + + for name in names: + fullname = os.path.join(root, name) + + # Avoid excess stat calls -- just one will do, thank you! + stat = os.stat(fullname) + mode = stat.st_mode + if S_ISREG(mode): + allfiles.append(fsdecode(fullname)) + elif S_ISDIR(mode) and not S_ISLNK(mode): + push(fullname) + + def add(self, item): + """ + Add a file to the manifest. + + :param item: The pathname to add. This can be relative to the base. + """ + if not item.startswith(self.prefix): + item = os.path.join(self.base, item) + self.files.add(os.path.normpath(item)) + + def add_many(self, items): + """ + Add a list of files to the manifest. + + :param items: The pathnames to add. These can be relative to the base. + """ + for item in items: + self.add(item) + + def sorted(self, wantdirs=False): + """ + Return sorted files in directory order + """ + + def add_dir(dirs, d): + dirs.add(d) + logger.debug('add_dir added %s', d) + if d != self.base: + parent, _ = os.path.split(d) + assert parent not in ('', '/') + add_dir(dirs, parent) + + result = set(self.files) # make a copy! + if wantdirs: + dirs = set() + for f in result: + add_dir(dirs, os.path.dirname(f)) + result |= dirs + return [os.path.join(*path_tuple) for path_tuple in + sorted(os.path.split(path) for path in result)] + + def clear(self): + """Clear all collected files.""" + self.files = set() + self.allfiles = [] + + def process_directive(self, directive): + """ + Process a directive which either adds some files from ``allfiles`` to + ``files``, or removes some files from ``files``. + + :param directive: The directive to process. This should be in a format + compatible with distutils ``MANIFEST.in`` files: + + http://docs.python.org/distutils/sourcedist.html#commands + """ + # Parse the line: split it up, make sure the right number of words + # is there, and return the relevant words. 'action' is always + # defined: it's the first word of the line. Which of the other + # three are defined depends on the action; it'll be either + # patterns, (dir and patterns), or (dirpattern). + action, patterns, thedir, dirpattern = self._parse_directive(directive) + + # OK, now we know that the action is valid and we have the + # right number of words on the line for that action -- so we + # can proceed with minimal error-checking. + if action == 'include': + for pattern in patterns: + if not self._include_pattern(pattern, anchor=True): + logger.warning('no files found matching %r', pattern) + + elif action == 'exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, anchor=True) + #if not found: + # logger.warning('no previously-included files ' + # 'found matching %r', pattern) + + elif action == 'global-include': + for pattern in patterns: + if not self._include_pattern(pattern, anchor=False): + logger.warning('no files found matching %r ' + 'anywhere in distribution', pattern) + + elif action == 'global-exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, anchor=False) + #if not found: + # logger.warning('no previously-included files ' + # 'matching %r found anywhere in ' + # 'distribution', pattern) + + elif action == 'recursive-include': + for pattern in patterns: + if not self._include_pattern(pattern, prefix=thedir): + logger.warning('no files found matching %r ' + 'under directory %r', pattern, thedir) + + elif action == 'recursive-exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, prefix=thedir) + #if not found: + # logger.warning('no previously-included files ' + # 'matching %r found under directory %r', + # pattern, thedir) + + elif action == 'graft': + if not self._include_pattern(None, prefix=dirpattern): + logger.warning('no directories found matching %r', + dirpattern) + + elif action == 'prune': + if not self._exclude_pattern(None, prefix=dirpattern): + logger.warning('no previously-included directories found ' + 'matching %r', dirpattern) + else: # pragma: no cover + # This should never happen, as it should be caught in + # _parse_template_line + raise DistlibException( + 'invalid action %r' % action) + + # + # Private API + # + + def _parse_directive(self, directive): + """ + Validate a directive. + :param directive: The directive to validate. + :return: A tuple of action, patterns, thedir, dir_patterns + """ + words = directive.split() + if len(words) == 1 and words[0] not in ('include', 'exclude', + 'global-include', + 'global-exclude', + 'recursive-include', + 'recursive-exclude', + 'graft', 'prune'): + # no action given, let's use the default 'include' + words.insert(0, 'include') + + action = words[0] + patterns = thedir = dir_pattern = None + + if action in ('include', 'exclude', + 'global-include', 'global-exclude'): + if len(words) < 2: + raise DistlibException( + '%r expects <pattern1> <pattern2> ...' % action) + + patterns = [convert_path(word) for word in words[1:]] + + elif action in ('recursive-include', 'recursive-exclude'): + if len(words) < 3: + raise DistlibException( + '%r expects <dir> <pattern1> <pattern2> ...' % action) + + thedir = convert_path(words[1]) + patterns = [convert_path(word) for word in words[2:]] + + elif action in ('graft', 'prune'): + if len(words) != 2: + raise DistlibException( + '%r expects a single <dir_pattern>' % action) + + dir_pattern = convert_path(words[1]) + + else: + raise DistlibException('unknown action %r' % action) + + return action, patterns, thedir, dir_pattern + + def _include_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Select strings (presumably filenames) from 'self.files' that + match 'pattern', a Unix-style wildcard (glob) pattern. + + Patterns are not quite the same as implemented by the 'fnmatch' + module: '*' and '?' match non-special characters, where "special" + is platform-dependent: slash on Unix; colon, slash, and backslash on + DOS/Windows; and colon on Mac OS. + + If 'anchor' is true (the default), then the pattern match is more + stringent: "*.py" will match "foo.py" but not "foo/bar.py". If + 'anchor' is false, both of these will match. + + If 'prefix' is supplied, then only filenames starting with 'prefix' + (itself a pattern) and ending with 'pattern', with anything in between + them, will match. 'anchor' is ignored in this case. + + If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and + 'pattern' is assumed to be either a string containing a regex or a + regex object -- no translation is done, the regex is just compiled + and used as-is. + + Selected strings will be added to self.files. + + Return True if files are found. + """ + # XXX docstring lying about what the special chars are? + found = False + pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) + + # delayed loading of allfiles list + if self.allfiles is None: + self.findall() + + for name in self.allfiles: + if pattern_re.search(name): + self.files.add(name) + found = True + return found + + def _exclude_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Remove strings (presumably filenames) from 'files' that match + 'pattern'. + + Other parameters are the same as for 'include_pattern()', above. + The list 'self.files' is modified in place. Return True if files are + found. + + This API is public to allow e.g. exclusion of SCM subdirs, e.g. when + packaging source distributions + """ + found = False + pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) + for f in list(self.files): + if pattern_re.search(f): + self.files.remove(f) + found = True + return found + + def _translate_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Translate a shell-like wildcard pattern to a compiled regular + expression. + + Return the compiled regex. If 'is_regex' true, + then 'pattern' is directly compiled to a regex (if it's a string) + or just returned as-is (assumes it's a regex object). + """ + if is_regex: + if isinstance(pattern, str): + return re.compile(pattern) + else: + return pattern + + if _PYTHON_VERSION > (3, 2): + # ditch start and end characters + start, _, end = self._glob_to_re('_').partition('_') + + if pattern: + pattern_re = self._glob_to_re(pattern) + if _PYTHON_VERSION > (3, 2): + assert pattern_re.startswith(start) and pattern_re.endswith(end) + else: + pattern_re = '' + + base = re.escape(os.path.join(self.base, '')) + if prefix is not None: + # ditch end of pattern character + if _PYTHON_VERSION <= (3, 2): + empty_pattern = self._glob_to_re('') + prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)] + else: + prefix_re = self._glob_to_re(prefix) + assert prefix_re.startswith(start) and prefix_re.endswith(end) + prefix_re = prefix_re[len(start): len(prefix_re) - len(end)] + sep = os.sep + if os.sep == '\\': + sep = r'\\' + if _PYTHON_VERSION <= (3, 2): + pattern_re = '^' + base + sep.join((prefix_re, + '.*' + pattern_re)) + else: + pattern_re = pattern_re[len(start): len(pattern_re) - len(end)] + pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep, + pattern_re, end) + else: # no prefix -- respect anchor flag + if anchor: + if _PYTHON_VERSION <= (3, 2): + pattern_re = '^' + base + pattern_re + else: + pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):]) + + return re.compile(pattern_re) + + def _glob_to_re(self, pattern): + """Translate a shell-like glob pattern to a regular expression. + + Return a string containing the regex. Differs from + 'fnmatch.translate()' in that '*' does not match "special characters" + (which are platform-specific). + """ + pattern_re = fnmatch.translate(pattern) + + # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which + # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, + # and by extension they shouldn't match such "special characters" under + # any OS. So change all non-escaped dots in the RE to match any + # character except the special characters (currently: just os.sep). + sep = os.sep + if os.sep == '\\': + # we're using a regex to manipulate a regex, so we need + # to escape the backslash twice + sep = r'\\\\' + escaped = r'\1[^%s]' % sep + pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re) + return pattern_re diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/markers.py b/env/lib/python3.6/site-packages/pip/_vendor/distlib/markers.py new file mode 100644 index 0000000..ee1f3e2 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distlib/markers.py @@ -0,0 +1,131 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +""" +Parser for the environment markers micro-language defined in PEP 508. +""" + +# Note: In PEP 345, the micro-language was Python compatible, so the ast +# module could be used to parse it. However, PEP 508 introduced operators such +# as ~= and === which aren't in Python, necessitating a different approach. + +import os +import sys +import platform +import re + +from .compat import python_implementation, urlparse, string_types +from .util import in_venv, parse_marker + +__all__ = ['interpret'] + +def _is_literal(o): + if not isinstance(o, string_types) or not o: + return False + return o[0] in '\'"' + +class Evaluator(object): + """ + This class is used to evaluate marker expessions. + """ + + operations = { + '==': lambda x, y: x == y, + '===': lambda x, y: x == y, + '~=': lambda x, y: x == y or x > y, + '!=': lambda x, y: x != y, + '<': lambda x, y: x < y, + '<=': lambda x, y: x == y or x < y, + '>': lambda x, y: x > y, + '>=': lambda x, y: x == y or x > y, + 'and': lambda x, y: x and y, + 'or': lambda x, y: x or y, + 'in': lambda x, y: x in y, + 'not in': lambda x, y: x not in y, + } + + def evaluate(self, expr, context): + """ + Evaluate a marker expression returned by the :func:`parse_requirement` + function in the specified context. + """ + if isinstance(expr, string_types): + if expr[0] in '\'"': + result = expr[1:-1] + else: + if expr not in context: + raise SyntaxError('unknown variable: %s' % expr) + result = context[expr] + else: + assert isinstance(expr, dict) + op = expr['op'] + if op not in self.operations: + raise NotImplementedError('op not implemented: %s' % op) + elhs = expr['lhs'] + erhs = expr['rhs'] + if _is_literal(expr['lhs']) and _is_literal(expr['rhs']): + raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs)) + + lhs = self.evaluate(elhs, context) + rhs = self.evaluate(erhs, context) + result = self.operations[op](lhs, rhs) + return result + +def default_context(): + def format_full_version(info): + version = '%s.%s.%s' % (info.major, info.minor, info.micro) + kind = info.releaselevel + if kind != 'final': + version += kind[0] + str(info.serial) + return version + + if hasattr(sys, 'implementation'): + implementation_version = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + else: + implementation_version = '0' + implementation_name = '' + + result = { + 'implementation_name': implementation_name, + 'implementation_version': implementation_version, + 'os_name': os.name, + 'platform_machine': platform.machine(), + 'platform_python_implementation': platform.python_implementation(), + 'platform_release': platform.release(), + 'platform_system': platform.system(), + 'platform_version': platform.version(), + 'platform_in_venv': str(in_venv()), + 'python_full_version': platform.python_version(), + 'python_version': platform.python_version()[:3], + 'sys_platform': sys.platform, + } + return result + +DEFAULT_CONTEXT = default_context() +del default_context + +evaluator = Evaluator() + +def interpret(marker, execution_context=None): + """ + Interpret a marker and return a result depending on environment. + + :param marker: The marker to interpret. + :type marker: str + :param execution_context: The context used for name lookup. + :type execution_context: mapping + """ + try: + expr, rest = parse_marker(marker) + except Exception as e: + raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e)) + if rest and rest[0] != '#': + raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest)) + context = dict(DEFAULT_CONTEXT) + if execution_context: + context.update(execution_context) + return evaluator.evaluate(expr, context) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/metadata.py b/env/lib/python3.6/site-packages/pip/_vendor/distlib/metadata.py new file mode 100644 index 0000000..6d6470f --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distlib/metadata.py @@ -0,0 +1,1091 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Implementation of the Metadata for Python packages PEPs. + +Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental). +""" +from __future__ import unicode_literals + +import codecs +from email import message_from_file +import json +import logging +import re + + +from . import DistlibException, __version__ +from .compat import StringIO, string_types, text_type +from .markers import interpret +from .util import extract_by_key, get_extras +from .version import get_scheme, PEP440_VERSION_RE + +logger = logging.getLogger(__name__) + + +class MetadataMissingError(DistlibException): + """A required metadata is missing""" + + +class MetadataConflictError(DistlibException): + """Attempt to read or write metadata fields that are conflictual.""" + + +class MetadataUnrecognizedVersionError(DistlibException): + """Unknown metadata version number.""" + + +class MetadataInvalidError(DistlibException): + """A metadata value is invalid""" + +# public API of this module +__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] + +# Encoding used for the PKG-INFO files +PKG_INFO_ENCODING = 'utf-8' + +# preferred version. Hopefully will be changed +# to 1.2 once PEP 345 is supported everywhere +PKG_INFO_PREFERRED_VERSION = '1.1' + +_LINE_PREFIX_1_2 = re.compile('\n \\|') +_LINE_PREFIX_PRE_1_2 = re.compile('\n ') +_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License') + +_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License', 'Classifier', 'Download-URL', 'Obsoletes', + 'Provides', 'Requires') + +_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', + 'Download-URL') + +_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External') + +_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', + 'Obsoletes-Dist', 'Requires-External', 'Maintainer', + 'Maintainer-email', 'Project-URL') + +_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External', 'Private-Version', + 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension', + 'Provides-Extra') + +_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', + 'Setup-Requires-Dist', 'Extension') + +_566_FIELDS = _426_FIELDS + ('Description-Content-Type',) + +_566_MARKERS = ('Description-Content-Type',) + +_ALL_FIELDS = set() +_ALL_FIELDS.update(_241_FIELDS) +_ALL_FIELDS.update(_314_FIELDS) +_ALL_FIELDS.update(_345_FIELDS) +_ALL_FIELDS.update(_426_FIELDS) +_ALL_FIELDS.update(_566_FIELDS) + +EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''') + + +def _version2fieldlist(version): + if version == '1.0': + return _241_FIELDS + elif version == '1.1': + return _314_FIELDS + elif version == '1.2': + return _345_FIELDS + elif version in ('1.3', '2.1'): + return _345_FIELDS + _566_FIELDS + elif version == '2.0': + return _426_FIELDS + raise MetadataUnrecognizedVersionError(version) + + +def _best_version(fields): + """Detect the best version depending on the fields used.""" + def _has_marker(keys, markers): + for marker in markers: + if marker in keys: + return True + return False + + keys = [] + for key, value in fields.items(): + if value in ([], 'UNKNOWN', None): + continue + keys.append(key) + + possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.0', '2.1'] + + # first let's try to see if a field is not part of one of the version + for key in keys: + if key not in _241_FIELDS and '1.0' in possible_versions: + possible_versions.remove('1.0') + logger.debug('Removed 1.0 due to %s', key) + if key not in _314_FIELDS and '1.1' in possible_versions: + possible_versions.remove('1.1') + logger.debug('Removed 1.1 due to %s', key) + if key not in _345_FIELDS and '1.2' in possible_versions: + possible_versions.remove('1.2') + logger.debug('Removed 1.2 due to %s', key) + if key not in _566_FIELDS and '1.3' in possible_versions: + possible_versions.remove('1.3') + logger.debug('Removed 1.3 due to %s', key) + if key not in _566_FIELDS and '2.1' in possible_versions: + if key != 'Description': # In 2.1, description allowed after headers + possible_versions.remove('2.1') + logger.debug('Removed 2.1 due to %s', key) + if key not in _426_FIELDS and '2.0' in possible_versions: + possible_versions.remove('2.0') + logger.debug('Removed 2.0 due to %s', key) + + # possible_version contains qualified versions + if len(possible_versions) == 1: + return possible_versions[0] # found ! + elif len(possible_versions) == 0: + logger.debug('Out of options - unknown metadata set: %s', fields) + raise MetadataConflictError('Unknown metadata set') + + # let's see if one unique marker is found + is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS) + is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS) + is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS) + is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS) + if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_0) > 1: + raise MetadataConflictError('You used incompatible 1.1/1.2/2.0/2.1 fields') + + # we have the choice, 1.0, or 1.2, or 2.0 + # - 1.0 has a broken Summary field but works with all tools + # - 1.1 is to avoid + # - 1.2 fixes Summary but has little adoption + # - 2.0 adds more features and is very new + if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_0: + # we couldn't find any specific marker + if PKG_INFO_PREFERRED_VERSION in possible_versions: + return PKG_INFO_PREFERRED_VERSION + if is_1_1: + return '1.1' + if is_1_2: + return '1.2' + if is_2_1: + return '2.1' + + return '2.0' + +_ATTR2FIELD = { + 'metadata_version': 'Metadata-Version', + 'name': 'Name', + 'version': 'Version', + 'platform': 'Platform', + 'supported_platform': 'Supported-Platform', + 'summary': 'Summary', + 'description': 'Description', + 'keywords': 'Keywords', + 'home_page': 'Home-page', + 'author': 'Author', + 'author_email': 'Author-email', + 'maintainer': 'Maintainer', + 'maintainer_email': 'Maintainer-email', + 'license': 'License', + 'classifier': 'Classifier', + 'download_url': 'Download-URL', + 'obsoletes_dist': 'Obsoletes-Dist', + 'provides_dist': 'Provides-Dist', + 'requires_dist': 'Requires-Dist', + 'setup_requires_dist': 'Setup-Requires-Dist', + 'requires_python': 'Requires-Python', + 'requires_external': 'Requires-External', + 'requires': 'Requires', + 'provides': 'Provides', + 'obsoletes': 'Obsoletes', + 'project_url': 'Project-URL', + 'private_version': 'Private-Version', + 'obsoleted_by': 'Obsoleted-By', + 'extension': 'Extension', + 'provides_extra': 'Provides-Extra', +} + +_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') +_VERSIONS_FIELDS = ('Requires-Python',) +_VERSION_FIELDS = ('Version',) +_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', + 'Requires', 'Provides', 'Obsoletes-Dist', + 'Provides-Dist', 'Requires-Dist', 'Requires-External', + 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', + 'Provides-Extra', 'Extension') +_LISTTUPLEFIELDS = ('Project-URL',) + +_ELEMENTSFIELD = ('Keywords',) + +_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') + +_MISSING = object() + +_FILESAFE = re.compile('[^A-Za-z0-9.]+') + + +def _get_name_and_version(name, version, for_filename=False): + """Return the distribution name with version. + + If for_filename is true, return a filename-escaped form.""" + if for_filename: + # For both name and version any runs of non-alphanumeric or '.' + # characters are replaced with a single '-'. Additionally any + # spaces in the version string become '.' + name = _FILESAFE.sub('-', name) + version = _FILESAFE.sub('-', version.replace(' ', '.')) + return '%s-%s' % (name, version) + + +class LegacyMetadata(object): + """The legacy metadata of a release. + + Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can + instantiate the class with one of these arguments (or none): + - *path*, the path to a metadata file + - *fileobj* give a file-like object with metadata as content + - *mapping* is a dict-like object + - *scheme* is a version scheme name + """ + # TODO document the mapping API and UNKNOWN default key + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._fields = {} + self.requires_files = [] + self._dependencies = None + self.scheme = scheme + if path is not None: + self.read(path) + elif fileobj is not None: + self.read_file(fileobj) + elif mapping is not None: + self.update(mapping) + self.set_metadata_version() + + def set_metadata_version(self): + self._fields['Metadata-Version'] = _best_version(self._fields) + + def _write_field(self, fileobj, name, value): + fileobj.write('%s: %s\n' % (name, value)) + + def __getitem__(self, name): + return self.get(name) + + def __setitem__(self, name, value): + return self.set(name, value) + + def __delitem__(self, name): + field_name = self._convert_name(name) + try: + del self._fields[field_name] + except KeyError: + raise KeyError(name) + + def __contains__(self, name): + return (name in self._fields or + self._convert_name(name) in self._fields) + + def _convert_name(self, name): + if name in _ALL_FIELDS: + return name + name = name.replace('-', '_').lower() + return _ATTR2FIELD.get(name, name) + + def _default_value(self, name): + if name in _LISTFIELDS or name in _ELEMENTSFIELD: + return [] + return 'UNKNOWN' + + def _remove_line_prefix(self, value): + if self.metadata_version in ('1.0', '1.1'): + return _LINE_PREFIX_PRE_1_2.sub('\n', value) + else: + return _LINE_PREFIX_1_2.sub('\n', value) + + def __getattr__(self, name): + if name in _ATTR2FIELD: + return self[name] + raise AttributeError(name) + + # + # Public API + # + +# dependencies = property(_get_dependencies, _set_dependencies) + + def get_fullname(self, filesafe=False): + """Return the distribution name with version. + + If filesafe is true, return a filename-escaped form.""" + return _get_name_and_version(self['Name'], self['Version'], filesafe) + + def is_field(self, name): + """return True if name is a valid metadata key""" + name = self._convert_name(name) + return name in _ALL_FIELDS + + def is_multi_field(self, name): + name = self._convert_name(name) + return name in _LISTFIELDS + + def read(self, filepath): + """Read the metadata values from a file path.""" + fp = codecs.open(filepath, 'r', encoding='utf-8') + try: + self.read_file(fp) + finally: + fp.close() + + def read_file(self, fileob): + """Read the metadata values from a file object.""" + msg = message_from_file(fileob) + self._fields['Metadata-Version'] = msg['metadata-version'] + + # When reading, get all the fields we can + for field in _ALL_FIELDS: + if field not in msg: + continue + if field in _LISTFIELDS: + # we can have multiple lines + values = msg.get_all(field) + if field in _LISTTUPLEFIELDS and values is not None: + values = [tuple(value.split(',')) for value in values] + self.set(field, values) + else: + # single line + value = msg[field] + if value is not None and value != 'UNKNOWN': + self.set(field, value) + logger.debug('Attempting to set metadata for %s', self) + self.set_metadata_version() + + def write(self, filepath, skip_unknown=False): + """Write the metadata fields to filepath.""" + fp = codecs.open(filepath, 'w', encoding='utf-8') + try: + self.write_file(fp, skip_unknown) + finally: + fp.close() + + def write_file(self, fileobject, skip_unknown=False): + """Write the PKG-INFO format data to a file object.""" + self.set_metadata_version() + + for field in _version2fieldlist(self['Metadata-Version']): + values = self.get(field) + if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']): + continue + if field in _ELEMENTSFIELD: + self._write_field(fileobject, field, ','.join(values)) + continue + if field not in _LISTFIELDS: + if field == 'Description': + if self.metadata_version in ('1.0', '1.1'): + values = values.replace('\n', '\n ') + else: + values = values.replace('\n', '\n |') + values = [values] + + if field in _LISTTUPLEFIELDS: + values = [','.join(value) for value in values] + + for value in values: + self._write_field(fileobject, field, value) + + def update(self, other=None, **kwargs): + """Set metadata values from the given iterable `other` and kwargs. + + Behavior is like `dict.update`: If `other` has a ``keys`` method, + they are looped over and ``self[key]`` is assigned ``other[key]``. + Else, ``other`` is an iterable of ``(key, value)`` iterables. + + Keys that don't match a metadata field or that have an empty value are + dropped. + """ + def _set(key, value): + if key in _ATTR2FIELD and value: + self.set(self._convert_name(key), value) + + if not other: + # other is None or empty container + pass + elif hasattr(other, 'keys'): + for k in other.keys(): + _set(k, other[k]) + else: + for k, v in other: + _set(k, v) + + if kwargs: + for k, v in kwargs.items(): + _set(k, v) + + def set(self, name, value): + """Control then set a metadata field.""" + name = self._convert_name(name) + + if ((name in _ELEMENTSFIELD or name == 'Platform') and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [v.strip() for v in value.split(',')] + else: + value = [] + elif (name in _LISTFIELDS and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [value] + else: + value = [] + + if logger.isEnabledFor(logging.WARNING): + project_name = self['Name'] + + scheme = get_scheme(self.scheme) + if name in _PREDICATE_FIELDS and value is not None: + for v in value: + # check that the values are valid + if not scheme.is_valid_matcher(v.split(';')[0]): + logger.warning( + "'%s': '%s' is not valid (field '%s')", + project_name, v, name) + # FIXME this rejects UNKNOWN, is that right? + elif name in _VERSIONS_FIELDS and value is not None: + if not scheme.is_valid_constraint_list(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", + project_name, value, name) + elif name in _VERSION_FIELDS and value is not None: + if not scheme.is_valid_version(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", + project_name, value, name) + + if name in _UNICODEFIELDS: + if name == 'Description': + value = self._remove_line_prefix(value) + + self._fields[name] = value + + def get(self, name, default=_MISSING): + """Get a metadata field.""" + name = self._convert_name(name) + if name not in self._fields: + if default is _MISSING: + default = self._default_value(name) + return default + if name in _UNICODEFIELDS: + value = self._fields[name] + return value + elif name in _LISTFIELDS: + value = self._fields[name] + if value is None: + return [] + res = [] + for val in value: + if name not in _LISTTUPLEFIELDS: + res.append(val) + else: + # That's for Project-URL + res.append((val[0], val[1])) + return res + + elif name in _ELEMENTSFIELD: + value = self._fields[name] + if isinstance(value, string_types): + return value.split(',') + return self._fields[name] + + def check(self, strict=False): + """Check if the metadata is compliant. If strict is True then raise if + no Name or Version are provided""" + self.set_metadata_version() + + # XXX should check the versions (if the file was loaded) + missing, warnings = [], [] + + for attr in ('Name', 'Version'): # required by PEP 345 + if attr not in self: + missing.append(attr) + + if strict and missing != []: + msg = 'missing required metadata: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + + for attr in ('Home-page', 'Author'): + if attr not in self: + missing.append(attr) + + # checking metadata 1.2 (XXX needs to check 1.1, 1.0) + if self['Metadata-Version'] != '1.2': + return missing, warnings + + scheme = get_scheme(self.scheme) + + def are_valid_constraints(value): + for v in value: + if not scheme.is_valid_matcher(v.split(';')[0]): + return False + return True + + for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints), + (_VERSIONS_FIELDS, + scheme.is_valid_constraint_list), + (_VERSION_FIELDS, + scheme.is_valid_version)): + for field in fields: + value = self.get(field, None) + if value is not None and not controller(value): + warnings.append("Wrong value for '%s': %s" % (field, value)) + + return missing, warnings + + def todict(self, skip_missing=False): + """Return fields as a dict. + + Field names will be converted to use the underscore-lowercase style + instead of hyphen-mixed case (i.e. home_page instead of Home-page). + """ + self.set_metadata_version() + + mapping_1_0 = ( + ('metadata_version', 'Metadata-Version'), + ('name', 'Name'), + ('version', 'Version'), + ('summary', 'Summary'), + ('home_page', 'Home-page'), + ('author', 'Author'), + ('author_email', 'Author-email'), + ('license', 'License'), + ('description', 'Description'), + ('keywords', 'Keywords'), + ('platform', 'Platform'), + ('classifiers', 'Classifier'), + ('download_url', 'Download-URL'), + ) + + data = {} + for key, field_name in mapping_1_0: + if not skip_missing or field_name in self._fields: + data[key] = self[field_name] + + if self['Metadata-Version'] == '1.2': + mapping_1_2 = ( + ('requires_dist', 'Requires-Dist'), + ('requires_python', 'Requires-Python'), + ('requires_external', 'Requires-External'), + ('provides_dist', 'Provides-Dist'), + ('obsoletes_dist', 'Obsoletes-Dist'), + ('project_url', 'Project-URL'), + ('maintainer', 'Maintainer'), + ('maintainer_email', 'Maintainer-email'), + ) + for key, field_name in mapping_1_2: + if not skip_missing or field_name in self._fields: + if key != 'project_url': + data[key] = self[field_name] + else: + data[key] = [','.join(u) for u in self[field_name]] + + elif self['Metadata-Version'] == '1.1': + mapping_1_1 = ( + ('provides', 'Provides'), + ('requires', 'Requires'), + ('obsoletes', 'Obsoletes'), + ) + for key, field_name in mapping_1_1: + if not skip_missing or field_name in self._fields: + data[key] = self[field_name] + + return data + + def add_requirements(self, requirements): + if self['Metadata-Version'] == '1.1': + # we can't have 1.1 metadata *and* Setuptools requires + for field in ('Obsoletes', 'Requires', 'Provides'): + if field in self: + del self[field] + self['Requires-Dist'] += requirements + + # Mapping API + # TODO could add iter* variants + + def keys(self): + return list(_version2fieldlist(self['Metadata-Version'])) + + def __iter__(self): + for key in self.keys(): + yield key + + def values(self): + return [self[key] for key in self.keys()] + + def items(self): + return [(key, self[key]) for key in self.keys()] + + def __repr__(self): + return '<%s %s %s>' % (self.__class__.__name__, self.name, + self.version) + + +METADATA_FILENAME = 'pydist.json' +WHEEL_METADATA_FILENAME = 'metadata.json' + + +class Metadata(object): + """ + The metadata of a release. This implementation uses 2.0 (JSON) + metadata where possible. If not possible, it wraps a LegacyMetadata + instance which handles the key-value metadata format. + """ + + METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$') + + NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I) + + VERSION_MATCHER = PEP440_VERSION_RE + + SUMMARY_MATCHER = re.compile('.{1,2047}') + + METADATA_VERSION = '2.0' + + GENERATOR = 'distlib (%s)' % __version__ + + MANDATORY_KEYS = { + 'name': (), + 'version': (), + 'summary': ('legacy',), + } + + INDEX_KEYS = ('name version license summary description author ' + 'author_email keywords platform home_page classifiers ' + 'download_url') + + DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ' + 'dev_requires provides meta_requires obsoleted_by ' + 'supports_environments') + + SYNTAX_VALIDATORS = { + 'metadata_version': (METADATA_VERSION_MATCHER, ()), + 'name': (NAME_MATCHER, ('legacy',)), + 'version': (VERSION_MATCHER, ('legacy',)), + 'summary': (SUMMARY_MATCHER, ('legacy',)), + } + + __slots__ = ('_legacy', '_data', 'scheme') + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._legacy = None + self._data = None + self.scheme = scheme + #import pdb; pdb.set_trace() + if mapping is not None: + try: + self._validate_mapping(mapping, scheme) + self._data = mapping + except MetadataUnrecognizedVersionError: + self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) + self.validate() + else: + data = None + if path: + with open(path, 'rb') as f: + data = f.read() + elif fileobj: + data = fileobj.read() + if data is None: + # Initialised with no args - to be added + self._data = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + else: + if not isinstance(data, text_type): + data = data.decode('utf-8') + try: + self._data = json.loads(data) + self._validate_mapping(self._data, scheme) + except ValueError: + # Note: MetadataUnrecognizedVersionError does not + # inherit from ValueError (it's a DistlibException, + # which should not inherit from ValueError). + # The ValueError comes from the json.load - if that + # succeeds and we get a validation error, we want + # that to propagate + self._legacy = LegacyMetadata(fileobj=StringIO(data), + scheme=scheme) + self.validate() + + common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) + + none_list = (None, list) + none_dict = (None, dict) + + mapped_keys = { + 'run_requires': ('Requires-Dist', list), + 'build_requires': ('Setup-Requires-Dist', list), + 'dev_requires': none_list, + 'test_requires': none_list, + 'meta_requires': none_list, + 'extras': ('Provides-Extra', list), + 'modules': none_list, + 'namespaces': none_list, + 'exports': none_dict, + 'commands': none_dict, + 'classifiers': ('Classifier', list), + 'source_url': ('Download-URL', None), + 'metadata_version': ('Metadata-Version', None), + } + + del none_list, none_dict + + def __getattribute__(self, key): + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, maker = mapped[key] + if self._legacy: + if lk is None: + result = None if maker is None else maker() + else: + result = self._legacy.get(lk) + else: + value = None if maker is None else maker() + if key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + result = self._data.get(key, value) + else: + # special cases for PEP 459 + sentinel = object() + result = sentinel + d = self._data.get('extensions') + if d: + if key == 'commands': + result = d.get('python.commands', value) + elif key == 'classifiers': + d = d.get('python.details') + if d: + result = d.get(key, value) + else: + d = d.get('python.exports') + if not d: + d = self._data.get('python.exports') + if d: + result = d.get(key, value) + if result is sentinel: + result = value + elif key not in common: + result = object.__getattribute__(self, key) + elif self._legacy: + result = self._legacy.get(key) + else: + result = self._data.get(key) + return result + + def _validate_value(self, key, value, scheme=None): + if key in self.SYNTAX_VALIDATORS: + pattern, exclusions = self.SYNTAX_VALIDATORS[key] + if (scheme or self.scheme) not in exclusions: + m = pattern.match(value) + if not m: + raise MetadataInvalidError("'%s' is an invalid value for " + "the '%s' property" % (value, + key)) + + def __setattr__(self, key, value): + self._validate_value(key, value) + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, _ = mapped[key] + if self._legacy: + if lk is None: + raise NotImplementedError + self._legacy[lk] = value + elif key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + self._data[key] = value + else: + # special cases for PEP 459 + d = self._data.setdefault('extensions', {}) + if key == 'commands': + d['python.commands'] = value + elif key == 'classifiers': + d = d.setdefault('python.details', {}) + d[key] = value + else: + d = d.setdefault('python.exports', {}) + d[key] = value + elif key not in common: + object.__setattr__(self, key, value) + else: + if key == 'keywords': + if isinstance(value, string_types): + value = value.strip() + if value: + value = value.split() + else: + value = [] + if self._legacy: + self._legacy[key] = value + else: + self._data[key] = value + + @property + def name_and_version(self): + return _get_name_and_version(self.name, self.version, True) + + @property + def provides(self): + if self._legacy: + result = self._legacy['Provides-Dist'] + else: + result = self._data.setdefault('provides', []) + s = '%s (%s)' % (self.name, self.version) + if s not in result: + result.append(s) + return result + + @provides.setter + def provides(self, value): + if self._legacy: + self._legacy['Provides-Dist'] = value + else: + self._data['provides'] = value + + def get_requirements(self, reqts, extras=None, env=None): + """ + Base method to get dependencies, given a set of extras + to satisfy and an optional environment context. + :param reqts: A list of sometimes-wanted dependencies, + perhaps dependent on extras and environment. + :param extras: A list of optional components being requested. + :param env: An optional environment for marker evaluation. + """ + if self._legacy: + result = reqts + else: + result = [] + extras = get_extras(extras or [], self.extras) + for d in reqts: + if 'extra' not in d and 'environment' not in d: + # unconditional + include = True + else: + if 'extra' not in d: + # Not extra-dependent - only environment-dependent + include = True + else: + include = d.get('extra') in extras + if include: + # Not excluded because of extras, check environment + marker = d.get('environment') + if marker: + include = interpret(marker, env) + if include: + result.extend(d['requires']) + for key in ('build', 'dev', 'test'): + e = ':%s:' % key + if e in extras: + extras.remove(e) + # A recursive call, but it should terminate since 'test' + # has been removed from the extras + reqts = self._data.get('%s_requires' % key, []) + result.extend(self.get_requirements(reqts, extras=extras, + env=env)) + return result + + @property + def dictionary(self): + if self._legacy: + return self._from_legacy() + return self._data + + @property + def dependencies(self): + if self._legacy: + raise NotImplementedError + else: + return extract_by_key(self._data, self.DEPENDENCY_KEYS) + + @dependencies.setter + def dependencies(self, value): + if self._legacy: + raise NotImplementedError + else: + self._data.update(value) + + def _validate_mapping(self, mapping, scheme): + if mapping.get('metadata_version') != self.METADATA_VERSION: + raise MetadataUnrecognizedVersionError() + missing = [] + for key, exclusions in self.MANDATORY_KEYS.items(): + if key not in mapping: + if scheme not in exclusions: + missing.append(key) + if missing: + msg = 'Missing metadata items: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + for k, v in mapping.items(): + self._validate_value(k, v, scheme) + + def validate(self): + if self._legacy: + missing, warnings = self._legacy.check(True) + if missing or warnings: + logger.warning('Metadata: missing: %s, warnings: %s', + missing, warnings) + else: + self._validate_mapping(self._data, self.scheme) + + def todict(self): + if self._legacy: + return self._legacy.todict(True) + else: + result = extract_by_key(self._data, self.INDEX_KEYS) + return result + + def _from_legacy(self): + assert self._legacy and not self._data + result = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + lmd = self._legacy.todict(True) # skip missing ones + for k in ('name', 'version', 'license', 'summary', 'description', + 'classifier'): + if k in lmd: + if k == 'classifier': + nk = 'classifiers' + else: + nk = k + result[nk] = lmd[k] + kw = lmd.get('Keywords', []) + if kw == ['']: + kw = [] + result['keywords'] = kw + keys = (('requires_dist', 'run_requires'), + ('setup_requires_dist', 'build_requires')) + for ok, nk in keys: + if ok in lmd and lmd[ok]: + result[nk] = [{'requires': lmd[ok]}] + result['provides'] = self.provides + author = {} + maintainer = {} + return result + + LEGACY_MAPPING = { + 'name': 'Name', + 'version': 'Version', + 'license': 'License', + 'summary': 'Summary', + 'description': 'Description', + 'classifiers': 'Classifier', + } + + def _to_legacy(self): + def process_entries(entries): + reqts = set() + for e in entries: + extra = e.get('extra') + env = e.get('environment') + rlist = e['requires'] + for r in rlist: + if not env and not extra: + reqts.add(r) + else: + marker = '' + if extra: + marker = 'extra == "%s"' % extra + if env: + if marker: + marker = '(%s) and %s' % (env, marker) + else: + marker = env + reqts.add(';'.join((r, marker))) + return reqts + + assert self._data and not self._legacy + result = LegacyMetadata() + nmd = self._data + for nk, ok in self.LEGACY_MAPPING.items(): + if nk in nmd: + result[ok] = nmd[nk] + r1 = process_entries(self.run_requires + self.meta_requires) + r2 = process_entries(self.build_requires + self.dev_requires) + if self.extras: + result['Provides-Extra'] = sorted(self.extras) + result['Requires-Dist'] = sorted(r1) + result['Setup-Requires-Dist'] = sorted(r2) + # TODO: other fields such as contacts + return result + + def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): + if [path, fileobj].count(None) != 1: + raise ValueError('Exactly one of path and fileobj is needed') + self.validate() + if legacy: + if self._legacy: + legacy_md = self._legacy + else: + legacy_md = self._to_legacy() + if path: + legacy_md.write(path, skip_unknown=skip_unknown) + else: + legacy_md.write_file(fileobj, skip_unknown=skip_unknown) + else: + if self._legacy: + d = self._from_legacy() + else: + d = self._data + if fileobj: + json.dump(d, fileobj, ensure_ascii=True, indent=2, + sort_keys=True) + else: + with codecs.open(path, 'w', 'utf-8') as f: + json.dump(d, f, ensure_ascii=True, indent=2, + sort_keys=True) + + def add_requirements(self, requirements): + if self._legacy: + self._legacy.add_requirements(requirements) + else: + run_requires = self._data.setdefault('run_requires', []) + always = None + for entry in run_requires: + if 'environment' not in entry and 'extra' not in entry: + always = entry + break + if always is None: + always = { 'requires': requirements } + run_requires.insert(0, always) + else: + rset = set(always['requires']) | set(requirements) + always['requires'] = sorted(rset) + + def __repr__(self): + name = self.name or '(no name)' + version = self.version or 'no version' + return '<%s %s %s (%s)>' % (self.__class__.__name__, + self.metadata_version, name, version) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/resources.py b/env/lib/python3.6/site-packages/pip/_vendor/distlib/resources.py new file mode 100644 index 0000000..1884016 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distlib/resources.py @@ -0,0 +1,355 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import bisect +import io +import logging +import os +import pkgutil +import shutil +import sys +import types +import zipimport + +from . import DistlibException +from .util import cached_property, get_cache_base, path_to_cache_dir, Cache + +logger = logging.getLogger(__name__) + + +cache = None # created when needed + + +class ResourceCache(Cache): + def __init__(self, base=None): + if base is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('resource-cache')) + super(ResourceCache, self).__init__(base) + + def is_stale(self, resource, path): + """ + Is the cache stale for the given resource? + + :param resource: The :class:`Resource` being cached. + :param path: The path of the resource in the cache. + :return: True if the cache is stale. + """ + # Cache invalidation is a hard problem :-) + return True + + def get(self, resource): + """ + Get a resource into the cache, + + :param resource: A :class:`Resource` instance. + :return: The pathname of the resource in the cache. + """ + prefix, path = resource.finder.get_cache_info(resource) + if prefix is None: + result = path + else: + result = os.path.join(self.base, self.prefix_to_dir(prefix), path) + dirname = os.path.dirname(result) + if not os.path.isdir(dirname): + os.makedirs(dirname) + if not os.path.exists(result): + stale = True + else: + stale = self.is_stale(resource, path) + if stale: + # write the bytes of the resource to the cache location + with open(result, 'wb') as f: + f.write(resource.bytes) + return result + + +class ResourceBase(object): + def __init__(self, finder, name): + self.finder = finder + self.name = name + + +class Resource(ResourceBase): + """ + A class representing an in-package resource, such as a data file. This is + not normally instantiated by user code, but rather by a + :class:`ResourceFinder` which manages the resource. + """ + is_container = False # Backwards compatibility + + def as_stream(self): + """ + Get the resource as a stream. + + This is not a property to make it obvious that it returns a new stream + each time. + """ + return self.finder.get_stream(self) + + @cached_property + def file_path(self): + global cache + if cache is None: + cache = ResourceCache() + return cache.get(self) + + @cached_property + def bytes(self): + return self.finder.get_bytes(self) + + @cached_property + def size(self): + return self.finder.get_size(self) + + +class ResourceContainer(ResourceBase): + is_container = True # Backwards compatibility + + @cached_property + def resources(self): + return self.finder.get_resources(self) + + +class ResourceFinder(object): + """ + Resource finder for file system resources. + """ + + if sys.platform.startswith('java'): + skipped_extensions = ('.pyc', '.pyo', '.class') + else: + skipped_extensions = ('.pyc', '.pyo') + + def __init__(self, module): + self.module = module + self.loader = getattr(module, '__loader__', None) + self.base = os.path.dirname(getattr(module, '__file__', '')) + + def _adjust_path(self, path): + return os.path.realpath(path) + + def _make_path(self, resource_name): + # Issue #50: need to preserve type of path on Python 2.x + # like os.path._get_sep + if isinstance(resource_name, bytes): # should only happen on 2.x + sep = b'/' + else: + sep = '/' + parts = resource_name.split(sep) + parts.insert(0, self.base) + result = os.path.join(*parts) + return self._adjust_path(result) + + def _find(self, path): + return os.path.exists(path) + + def get_cache_info(self, resource): + return None, resource.path + + def find(self, resource_name): + path = self._make_path(resource_name) + if not self._find(path): + result = None + else: + if self._is_directory(path): + result = ResourceContainer(self, resource_name) + else: + result = Resource(self, resource_name) + result.path = path + return result + + def get_stream(self, resource): + return open(resource.path, 'rb') + + def get_bytes(self, resource): + with open(resource.path, 'rb') as f: + return f.read() + + def get_size(self, resource): + return os.path.getsize(resource.path) + + def get_resources(self, resource): + def allowed(f): + return (f != '__pycache__' and not + f.endswith(self.skipped_extensions)) + return set([f for f in os.listdir(resource.path) if allowed(f)]) + + def is_container(self, resource): + return self._is_directory(resource.path) + + _is_directory = staticmethod(os.path.isdir) + + def iterator(self, resource_name): + resource = self.find(resource_name) + if resource is not None: + todo = [resource] + while todo: + resource = todo.pop(0) + yield resource + if resource.is_container: + rname = resource.name + for name in resource.resources: + if not rname: + new_name = name + else: + new_name = '/'.join([rname, name]) + child = self.find(new_name) + if child.is_container: + todo.append(child) + else: + yield child + + +class ZipResourceFinder(ResourceFinder): + """ + Resource finder for resources in .zip files. + """ + def __init__(self, module): + super(ZipResourceFinder, self).__init__(module) + archive = self.loader.archive + self.prefix_len = 1 + len(archive) + # PyPy doesn't have a _files attr on zipimporter, and you can't set one + if hasattr(self.loader, '_files'): + self._files = self.loader._files + else: + self._files = zipimport._zip_directory_cache[archive] + self.index = sorted(self._files) + + def _adjust_path(self, path): + return path + + def _find(self, path): + path = path[self.prefix_len:] + if path in self._files: + result = True + else: + if path and path[-1] != os.sep: + path = path + os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + if not result: + logger.debug('_find failed: %r %r', path, self.loader.prefix) + else: + logger.debug('_find worked: %r %r', path, self.loader.prefix) + return result + + def get_cache_info(self, resource): + prefix = self.loader.archive + path = resource.path[1 + len(prefix):] + return prefix, path + + def get_bytes(self, resource): + return self.loader.get_data(resource.path) + + def get_stream(self, resource): + return io.BytesIO(self.get_bytes(resource)) + + def get_size(self, resource): + path = resource.path[self.prefix_len:] + return self._files[path][3] + + def get_resources(self, resource): + path = resource.path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + plen = len(path) + result = set() + i = bisect.bisect(self.index, path) + while i < len(self.index): + if not self.index[i].startswith(path): + break + s = self.index[i][plen:] + result.add(s.split(os.sep, 1)[0]) # only immediate children + i += 1 + return result + + def _is_directory(self, path): + path = path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + return result + +_finder_registry = { + type(None): ResourceFinder, + zipimport.zipimporter: ZipResourceFinder +} + +try: + # In Python 3.6, _frozen_importlib -> _frozen_importlib_external + try: + import _frozen_importlib_external as _fi + except ImportError: + import _frozen_importlib as _fi + _finder_registry[_fi.SourceFileLoader] = ResourceFinder + _finder_registry[_fi.FileFinder] = ResourceFinder + del _fi +except (ImportError, AttributeError): + pass + + +def register_finder(loader, finder_maker): + _finder_registry[type(loader)] = finder_maker + +_finder_cache = {} + + +def finder(package): + """ + Return a resource finder for a package. + :param package: The name of the package. + :return: A :class:`ResourceFinder` instance for the package. + """ + if package in _finder_cache: + result = _finder_cache[package] + else: + if package not in sys.modules: + __import__(package) + module = sys.modules[package] + path = getattr(module, '__path__', None) + if path is None: + raise DistlibException('You cannot get a finder for a module, ' + 'only for a package') + loader = getattr(module, '__loader__', None) + finder_maker = _finder_registry.get(type(loader)) + if finder_maker is None: + raise DistlibException('Unable to locate finder for %r' % package) + result = finder_maker(module) + _finder_cache[package] = result + return result + + +_dummy_module = types.ModuleType(str('__dummy__')) + + +def finder_for_path(path): + """ + Return a resource finder for a path, which should represent a container. + + :param path: The path. + :return: A :class:`ResourceFinder` instance for the path. + """ + result = None + # calls any path hooks, gets importer into cache + pkgutil.get_importer(path) + loader = sys.path_importer_cache.get(path) + finder = _finder_registry.get(type(loader)) + if finder: + module = _dummy_module + module.__file__ = os.path.join(path, '') + module.__loader__ = loader + result = finder(module) + return result diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/scripts.py b/env/lib/python3.6/site-packages/pip/_vendor/distlib/scripts.py new file mode 100644 index 0000000..0b7c3d0 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distlib/scripts.py @@ -0,0 +1,415 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2015 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from io import BytesIO +import logging +import os +import re +import struct +import sys + +from .compat import sysconfig, detect_encoding, ZipFile +from .resources import finder +from .util import (FileOperator, get_export_entry, convert_path, + get_executable, in_venv) + +logger = logging.getLogger(__name__) + +_DEFAULT_MANIFEST = ''' +<?xml version="1.0" encoding="UTF-8" standalone="yes"?> +<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"> + <assemblyIdentity version="1.0.0.0" + processorArchitecture="X86" + name="%s" + type="win32"/> + + <!-- Identify the application security requirements. --> + <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> + <security> + <requestedPrivileges> + <requestedExecutionLevel level="asInvoker" uiAccess="false"/> + </requestedPrivileges> + </security> + </trustInfo> +</assembly>'''.strip() + +# check if Python is called on the first line with this expression +FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') +SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*- +if __name__ == '__main__': + import sys, re + + def _resolve(module, func): + __import__(module) + mod = sys.modules[module] + parts = func.split('.') + result = getattr(mod, parts.pop(0)) + for p in parts: + result = getattr(result, p) + return result + + try: + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + + func = _resolve('%(module)s', '%(func)s') + rc = func() # None interpreted as 0 + except Exception as e: # only supporting Python >= 2.6 + sys.stderr.write('%%s\n' %% e) + rc = 1 + sys.exit(rc) +''' + + +def _enquote_executable(executable): + if ' ' in executable: + # make sure we quote only the executable in case of env + # for example /usr/bin/env "/dir with spaces/bin/jython" + # instead of "/usr/bin/env /dir with spaces/bin/jython" + # otherwise whole + if executable.startswith('/usr/bin/env '): + env, _executable = executable.split(' ', 1) + if ' ' in _executable and not _executable.startswith('"'): + executable = '%s "%s"' % (env, _executable) + else: + if not executable.startswith('"'): + executable = '"%s"' % executable + return executable + + +class ScriptMaker(object): + """ + A class to copy or create scripts from source scripts or callable + specifications. + """ + script_template = SCRIPT_TEMPLATE + + executable = None # for shebangs + + def __init__(self, source_dir, target_dir, add_launchers=True, + dry_run=False, fileop=None): + self.source_dir = source_dir + self.target_dir = target_dir + self.add_launchers = add_launchers + self.force = False + self.clobber = False + # It only makes sense to set mode bits on POSIX. + self.set_mode = (os.name == 'posix') or (os.name == 'java' and + os._name == 'posix') + self.variants = set(('', 'X.Y')) + self._fileop = fileop or FileOperator(dry_run) + + self._is_nt = os.name == 'nt' or ( + os.name == 'java' and os._name == 'nt') + + def _get_alternate_executable(self, executable, options): + if options.get('gui', False) and self._is_nt: # pragma: no cover + dn, fn = os.path.split(executable) + fn = fn.replace('python', 'pythonw') + executable = os.path.join(dn, fn) + return executable + + if sys.platform.startswith('java'): # pragma: no cover + def _is_shell(self, executable): + """ + Determine if the specified executable is a script + (contains a #! line) + """ + try: + with open(executable) as fp: + return fp.read(2) == '#!' + except (OSError, IOError): + logger.warning('Failed to open %s', executable) + return False + + def _fix_jython_executable(self, executable): + if self._is_shell(executable): + # Workaround for Jython is not needed on Linux systems. + import java + + if java.lang.System.getProperty('os.name') == 'Linux': + return executable + elif executable.lower().endswith('jython.exe'): + # Use wrapper exe for Jython on Windows + return executable + return '/usr/bin/env %s' % executable + + def _build_shebang(self, executable, post_interp): + """ + Build a shebang line. In the simple case (on Windows, or a shebang line + which is not too long or contains spaces) use a simple formulation for + the shebang. Otherwise, use /bin/sh as the executable, with a contrived + shebang which allows the script to run either under Python or sh, using + suitable quoting. Thanks to Harald Nordgren for his input. + + See also: http://www.in-ulm.de/~mascheck/various/shebang/#length + https://hg.mozilla.org/mozilla-central/file/tip/mach + """ + if os.name != 'posix': + simple_shebang = True + else: + # Add 3 for '#!' prefix and newline suffix. + shebang_length = len(executable) + len(post_interp) + 3 + if sys.platform == 'darwin': + max_shebang_length = 512 + else: + max_shebang_length = 127 + simple_shebang = ((b' ' not in executable) and + (shebang_length <= max_shebang_length)) + + if simple_shebang: + result = b'#!' + executable + post_interp + b'\n' + else: + result = b'#!/bin/sh\n' + result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n' + result += b"' '''" + return result + + def _get_shebang(self, encoding, post_interp=b'', options=None): + enquote = True + if self.executable: + executable = self.executable + enquote = False # assume this will be taken care of + elif not sysconfig.is_python_build(): + executable = get_executable() + elif in_venv(): # pragma: no cover + executable = os.path.join(sysconfig.get_path('scripts'), + 'python%s' % sysconfig.get_config_var('EXE')) + else: # pragma: no cover + executable = os.path.join( + sysconfig.get_config_var('BINDIR'), + 'python%s%s' % (sysconfig.get_config_var('VERSION'), + sysconfig.get_config_var('EXE'))) + if options: + executable = self._get_alternate_executable(executable, options) + + if sys.platform.startswith('java'): # pragma: no cover + executable = self._fix_jython_executable(executable) + # Normalise case for Windows + executable = os.path.normcase(executable) + # If the user didn't specify an executable, it may be necessary to + # cater for executable paths with spaces (not uncommon on Windows) + if enquote: + executable = _enquote_executable(executable) + # Issue #51: don't use fsencode, since we later try to + # check that the shebang is decodable using utf-8. + executable = executable.encode('utf-8') + # in case of IronPython, play safe and enable frames support + if (sys.platform == 'cli' and '-X:Frames' not in post_interp + and '-X:FullFrames' not in post_interp): # pragma: no cover + post_interp += b' -X:Frames' + shebang = self._build_shebang(executable, post_interp) + # Python parser starts to read a script using UTF-8 until + # it gets a #coding:xxx cookie. The shebang has to be the + # first line of a file, the #coding:xxx cookie cannot be + # written before. So the shebang has to be decodable from + # UTF-8. + try: + shebang.decode('utf-8') + except UnicodeDecodeError: # pragma: no cover + raise ValueError( + 'The shebang (%r) is not decodable from utf-8' % shebang) + # If the script is encoded to a custom encoding (use a + # #coding:xxx cookie), the shebang has to be decodable from + # the script encoding too. + if encoding != 'utf-8': + try: + shebang.decode(encoding) + except UnicodeDecodeError: # pragma: no cover + raise ValueError( + 'The shebang (%r) is not decodable ' + 'from the script encoding (%r)' % (shebang, encoding)) + return shebang + + def _get_script_text(self, entry): + return self.script_template % dict(module=entry.prefix, + func=entry.suffix) + + manifest = _DEFAULT_MANIFEST + + def get_manifest(self, exename): + base = os.path.basename(exename) + return self.manifest % base + + def _write_script(self, names, shebang, script_bytes, filenames, ext): + use_launcher = self.add_launchers and self._is_nt + linesep = os.linesep.encode('utf-8') + if not use_launcher: + script_bytes = shebang + linesep + script_bytes + else: # pragma: no cover + if ext == 'py': + launcher = self._get_launcher('t') + else: + launcher = self._get_launcher('w') + stream = BytesIO() + with ZipFile(stream, 'w') as zf: + zf.writestr('__main__.py', script_bytes) + zip_data = stream.getvalue() + script_bytes = launcher + shebang + linesep + zip_data + for name in names: + outname = os.path.join(self.target_dir, name) + if use_launcher: # pragma: no cover + n, e = os.path.splitext(outname) + if e.startswith('.py'): + outname = n + outname = '%s.exe' % outname + try: + self._fileop.write_binary_file(outname, script_bytes) + except Exception: + # Failed writing an executable - it might be in use. + logger.warning('Failed to write executable - trying to ' + 'use .deleteme logic') + dfname = '%s.deleteme' % outname + if os.path.exists(dfname): + os.remove(dfname) # Not allowed to fail here + os.rename(outname, dfname) # nor here + self._fileop.write_binary_file(outname, script_bytes) + logger.debug('Able to replace executable using ' + '.deleteme logic') + try: + os.remove(dfname) + except Exception: + pass # still in use - ignore error + else: + if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover + outname = '%s.%s' % (outname, ext) + if os.path.exists(outname) and not self.clobber: + logger.warning('Skipping existing file %s', outname) + continue + self._fileop.write_binary_file(outname, script_bytes) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + + def _make_script(self, entry, filenames, options=None): + post_interp = b'' + if options: + args = options.get('interpreter_args', []) + if args: + args = ' %s' % ' '.join(args) + post_interp = args.encode('utf-8') + shebang = self._get_shebang('utf-8', post_interp, options=options) + script = self._get_script_text(entry).encode('utf-8') + name = entry.name + scriptnames = set() + if '' in self.variants: + scriptnames.add(name) + if 'X' in self.variants: + scriptnames.add('%s%s' % (name, sys.version[0])) + if 'X.Y' in self.variants: + scriptnames.add('%s-%s' % (name, sys.version[:3])) + if options and options.get('gui', False): + ext = 'pyw' + else: + ext = 'py' + self._write_script(scriptnames, shebang, script, filenames, ext) + + def _copy_script(self, script, filenames): + adjust = False + script = os.path.join(self.source_dir, convert_path(script)) + outname = os.path.join(self.target_dir, os.path.basename(script)) + if not self.force and not self._fileop.newer(script, outname): + logger.debug('not copying %s (up-to-date)', script) + return + + # Always open the file, but ignore failures in dry-run mode -- + # that way, we'll get accurate feedback if we can read the + # script. + try: + f = open(script, 'rb') + except IOError: # pragma: no cover + if not self.dry_run: + raise + f = None + else: + first_line = f.readline() + if not first_line: # pragma: no cover + logger.warning('%s: %s is an empty file (skipping)', + self.get_command_name(), script) + return + + match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n')) + if match: + adjust = True + post_interp = match.group(1) or b'' + + if not adjust: + if f: + f.close() + self._fileop.copy_file(script, outname) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + else: + logger.info('copying and adjusting %s -> %s', script, + self.target_dir) + if not self._fileop.dry_run: + encoding, lines = detect_encoding(f.readline) + f.seek(0) + shebang = self._get_shebang(encoding, post_interp) + if b'pythonw' in first_line: # pragma: no cover + ext = 'pyw' + else: + ext = 'py' + n = os.path.basename(outname) + self._write_script([n], shebang, f.read(), filenames, ext) + if f: + f.close() + + @property + def dry_run(self): + return self._fileop.dry_run + + @dry_run.setter + def dry_run(self, value): + self._fileop.dry_run = value + + if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover + # Executable launcher support. + # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ + + def _get_launcher(self, kind): + if struct.calcsize('P') == 8: # 64-bit + bits = '64' + else: + bits = '32' + name = '%s%s.exe' % (kind, bits) + # Issue 31: don't hardcode an absolute package name, but + # determine it relative to the current package + distlib_package = __name__.rsplit('.', 1)[0] + result = finder(distlib_package).find(name).bytes + return result + + # Public API follows + + def make(self, specification, options=None): + """ + Make a script. + + :param specification: The specification, which is either a valid export + entry specification (to make a script from a + callable) or a filename (to make a script by + copying from a source location). + :param options: A dictionary of options controlling script generation. + :return: A list of all absolute pathnames written to. + """ + filenames = [] + entry = get_export_entry(specification) + if entry is None: + self._copy_script(specification, filenames) + else: + self._make_script(entry, filenames, options=options) + return filenames + + def make_multiple(self, specifications, options=None): + """ + Take a list of specifications and make scripts from them, + :param specifications: A list of specifications. + :return: A list of all absolute pathnames written to, + """ + filenames = [] + for specification in specifications: + filenames.extend(self.make(specification, options)) + return filenames diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/t32.exe b/env/lib/python3.6/site-packages/pip/_vendor/distlib/t32.exe new file mode 100644 index 0000000000000000000000000000000000000000..a09d926872d84ae22a617dfe9ebb560d420b37de GIT binary patch literal 92672 zcmeFae|!{0wm01KBgrHTnE?_A5MachXi%deNF0I#WI|jC4hCk362KMWILj(RH{ePj zu``%XGb_8R_v$|4mCL$UukKy$uKZHLgkS~~70^XiSdF_`t+BHjmuwgyrl0Sro=Jjw z?{oin-_P^UgJ!zA>QvRKQ>RXyI(4eL;;wCiMGyol{&Zas_TfqYJpA{+|A`|xbHb~c z!Yk?TT(QqI@0}|a2Jc_%TD|7M`_|m^W7oa+Jn+DSqU(n%U2CKVT=zfVD!rr9_2UOu zth|2c(2Tr9(NA5t<NC8tT~V9-yW`aU+CSkvn$lGJ4S&8-`#yiFwJ+iMhxXsq{t?f! zPq}Iz<MEFt;9pBTU+2#|@4q)lW&T$!@OcGco+(9m^+zAvm4s;*%%&lx3_&=8m}iaH zUtWi&6MyaW?lHn<K}Zoy6w&__n(+=I7JY33Jw5dtkn&Mx{_KBHq_Emz5@t}qXA*wp zqrkWR?J^0TbV1nmsUYNjD{1iSzP@kuRXeq7FvR8I>&2BDL`2=vh9AO<+De^2=$}gv zmS4YS#XaIZf{>Aqgm(N*!QV0b4f^Ln)z=$f!r^I1aH3)=lNe*rKaU_ZU%zJUntKt) z+ln>|cjCo%Iii5`T)$@Jss{o1@0myk4S0EXeFttfQvct-{|_jzNbRiew1NS4Gz_05 z6uzl=d*xc2AbBHRr%#vck#O%NT@UJz5kcY;ANvDFj(j-FNbm)xT=WR+p`nOt_W0P8 zEK0P8OnSD^?h(|A-okg706sq2ikj34TcA*nl=b=?2UD8I&k}qKn1+r<j&QR$c0Wa_ z>28~3R^yR!lj^nQw?s+{dbRh|=(1`mLGGLq2+l*55pQpy9$cP}GL+h0rM8RRhgu4c zx}%OKT7nA!v4FXBT@RT9y41`3IS_AnE*m8XPb*%Q(%Yx&^5HyXQK#aKyQ8%hr8Zva z2W*_ct~S75vx4y|(HP0bibhZgHnoctqFDK`%N-TRsa>Izsz~hz=bl$<ZTV4)H~zHR zg)(FH=$eCIUaOzA3=ssy+pVHfLFl?vHBeu&w*5c~wfd=|Zgy-qy>+9aw}7MCRoLu4 z?|8B~xEgIzq)s2ZjiSAs`QGkO3TmtZ@Y4nkR5g3YCJ4YrK0GB~>d2Sc^UpnOF6;>j zerni!qbjs1!0tswy!f`U&F4=CpFsIO*7*&mOQdwBzVvP_vqp99--U!4_b@T7+#Ox} zrDjpQT~yT4(a7%Ys#?aoR_?U>L)U{qg*}QCXIB7;sw#BqIDasB-7JH5fPu}gXWPIS zND<4lhXTP@P<X`K?L&Y1Sd?Set@1vY?cjXo?vrkdc;mh|4g-?<QgaO|5-d7Uq?AQ~ z0Y6JaUxBCGZPEvtrLd=r(A|>;jFzcwOF6oJwM);=0wVHNLdYC4fjm@{PtPtTw(Sb{ zNOnDY1_8uVB~uyl8T?0MWB86>(JX30dPqQyTtF2zdyMpsczx$tbiOg14l50Lr|||( z26Gkafq+t)m#b$_rAkgmO7on)&}uw3_(JKGdiE4VqgcDVG0(YLN<pETxv)8S3@!Ju zJ9~A#ersMM4f+D2F3%|%Iqk?9?BsCQ0xnd#)Q@7P27K(yd`?D1%$uwhO$S)0M?d95 z;tJLcMv7YV?3bwca~S3*^B+cHkbP(*PUeZHjKppuaTR;jNG#=v`;A0XaLNde5G~DH zLQ|uj?Ll3rCWq>p;tK=<;JJV<0x3P)i8KVWg3Eac>rsLVDD)X(b9NGWK@OJz1$vbe z-a66{&N0e`bmFghcnvo4VhT7Sh;|y%=NJUW0?=J8DgD$Vy!JAHD$&XMht$8~%t)CH z($2A0r~%C<$nlBdn2^oKB+OvMx{@8hy#}!KJ~9kdt8H?dO}!L*hq|=d7P1HTQJKsG z-YPsAZieWo44y{R0`{wmx*mBX$FVm}KAb}pjG(edC(0I+eOnpK?Ir3<07vWPs2Mp3 zJd?n`z!2c5d|o5pDyZkh(T=^TlyD-M0EEmn#i`QgiG+QL1kqO5T%)8SHNcjFAu2Jz z7ow)IdPrDY|2Yjw$P^#@<^t90tdZRlrK^xdo;k77@kDd5kz@4<QjKzeTANvJH3PvU z6hzW-4z(Xps2=DO;#U!VHzv`@;n_9bn%rdM5R`=sfR;X2y>_Jl(tYXOd|cLd=3%B8 zn2SgxXIs(5HS+X{qBZ2wQbH5uW^2^~A3Fd@qobnXcC_&b*k8+wtTt=I2#4QbV&Nia zaCORVf;8m%L7F}MA+YLXUO@@HPZVv+ZUz`_Xf#aEA0kp_X7x#WDLh)E*k?z=T?qTy zj46z*MElivVRKjqNim*W-%yY4jAJ}S9-|qgu%}9W&mCWz-88K3;!x3EcQHduo8>;T z<}1ytevOPhB;Tj=Y^x|+Rb?dH4MFT{OBM3Z`vW0cF!l|NsRAHMBD?U6`yAz2!ShT< z9-?!DM476pBD?8XQ@ouX{XDZBb2O)i!87Bf&v{Q?8Qg|K(C0qZb)Jg=^D?8qRwXlJ zSk6;-xmzX1vs@8uPG&j4vl#F*z6U-M?j%zAmF@IoKf;d^?!a$hbMbb12D_;!V#PHm zied>c=;}+vE<voyb6^}r%FURNEYTYG`%+JS%Za$!rSb~Clc0ppq8OF;;CB+$BPwT@ zh!4f(pt$fE6nE%E+;YScp?raec%#kF4xsP)J2tokDEZj29?brniFD2;`fkEk-_6^y z4IqAhfIW-ZPd;1_U|)bWj>YoO4ep_&UrFY3t+DH%BSCbm)}c6+j0Jn>N^M7BGX#qJ z6Hvk(m9p4}V+0{8jD(zFKS8jtS$hN!lAWsp&^$gyM-<QG(Bet<OU#>!*M^)!*>;{Y z2RXH)(2Qz|-I9wn_7@lGi+H<yK|+S@$|W@I+73*8PJbo)C0E{@ink-`CH+WeP^mC? zb+9wY-wM&mPC^B&YE^YeR=+CQFinnN`A7_nT&fhX_eKM}P0I_`As@<w{>X-NZON{r zLN-{@jx=_OpajgPyckT4HR>X}W~*_(B@UOHAsK8n;iFPlO|esiut|WCQYu~t6fj<k zawg8gU|5L301=YoXD?ETn9ymy_OU9wRVk^-3KqyKdj&t~7eI&FaLqV^M#F)9PO-OF z9KnLf0{k-AGAgN}SFv$LA&H=0{kpBpPL<uuZn*}uF0-lStCUQ&JgCgKs+sPg!LhRh zakx6vH5!UR`D!VR#jXNes#<1sr%cX4;z$*l`qOQ!d;*nYMQo2}wOPuN%U7FGiAl>) zZ7A7er9@~QhpYleL+*4IHdh9Uy-r61t;4`BVB0b5H|XjFr}z-u2Xb$Yy+i=D_OLE~ z0;MY}Qqjc<kN|Z}-jF3ov+_T2?6tb(_^dTU<@jCeZE~~Av9}A-sEZ~nL=U0pR36<7 znXgwk#nKwgfw$JUyTn#)Ix&%Buf@l{x>gX7)p$?yu}|=h3B{Nykj=3dWTl)bl=FyV zFaB@KZ>g*86_$!=YDHYWXZ1JBApDI+mXxDw1;6w#BmuRwo*KgWY!qt+mnT|UgCK9I zcCT7t4<8l(oc}dil=-a|9Y>3fJNBBs)1nsMBH(qB@H#HGa=Z@Zw`e24Uz~A?Q)CPR zG$zSOm81Y%YG41LKOmP74+>Han|}kie>{8YIxLWMV9Q<r1t4e7h*q@~+9y^;11!6k z<aa!*OIL;LON&!po(#qqTFLH28KiN%h|%#U40;TuQ~W^_qn1_4ZX^J92ys!tj!Fuf z@2+m$Cpc#btvi~_Xco&_iu`H&1T)5cs=KW=O>NsrDIu$mJ%1x%wDVWfNNJVEhpc|3 zh|<{B%MwyTV-_!MEj+oO%GFYK5WHeH%PlVXkhT6o9Yn^)FG77w0pSEhKt0qFPf@Mm zI%sR^MfvjyEuW{VR<MsQ+T3lT6?K`F8<Bl>{e{)Yu<_kxh0RM_+2pB$P*)-n{lpa3 z4IK0$s*8<)BpoDNc>CO4YbMtBEl1t!$Efe-A8EOeBDXjfu$m%4sGn~a>d-VTLvC|n zVX*|%P4*SUiX6|X9Vs_EeXJP3P&Dex4S0wYuN}M%-JP-w2qNBccgvayCA`9%`sH?g zv##g2prO2=Q9!+_y4A?Ld{EvB8x?sWt9C>p4@Z&}eiytn&t3^pbEmp6&sKP*X-S^_ z{2?eZ5D-ln@*&erZ;NYWW)g2QVx=!+W?eHppk8YEi_P*0J)D+Lw6V*e1Bsc*93JG5 z{(g5W!TwdvD17@3y{~VR<%0aRUicn$-lu}eR4=xxKj=mISKg$Fqg!H51nmf#wIj<S zv-P`MBeVOK(JzK0etYqolz+f?xXf(z)Bp4*@H|HO{ZLmy2cEuQ!C-X_`plVt`y8gQ zESl!{w6G7$vDg$7O$nG)=T0MTbbD=U(nx7Z)&2m|se<asf`W04+E!CMUL1=_K)yg? z=mLqM7FUe|83j!@NBV1FbL`KcS7l{L_rD>aR4j51QwJY`hM-i$-ET{y*gvDnsDP0O zCPz>eV*i0~afNN|FkUHJhuF}>ST&@g`|VA0LhXeo7oY!Hj+@uq94Sq=m5{At{Rnn| z3O?*^6?3D)F^FAl7}O+MW*{m(DiA&7W*fwqdK%JrD4W3Rr6H<q;muk=Xa@AvS<Ho^ zfFWo(j8-9j_A;0Wvyj@Q+1ck<i-)eQ!o2f!B@09BRH<!|m7P$F4HF9KSxFh$iFwsY zBE6av&k7sKUYcniKsJ)ARaO0hHIap68lU=JLvvAOqUR#s9Fk2^)_}yTyqP1J0KlAs z@*(!@SVYx2L0qM}7n8~uxi(7>voK4KV%Gulgj7C0j3g6R<y9#MGT$yA(F;$WKVR(4 zT6cwfNf+&vA*_wcJ-p!nXc+)lzuWQK+N|?sc00Nh_8j#S(WaK=z;dFcMZMi*2ZVy% z@DWIx01`_vyMml0j>f+uR=wmty#|IOcWtlZvDXk0(5KM?4%Ubt-YN*!Y_ghWnrh?u zpFpBtQ`@W7cE!Sga#we+St8eV3*v<Rpw8yPlkPvROIKUY!vxc!rKznHXw5&Q4dD}x z`}BIV+UoZ9uD=^ZkNa8sOt7<${iVccQ?vL83BVO5Z#@6>HQrt=&(FRjj;Gi=Wps}? z5$vLS<BcXX?{*!^hPOL>#u2^>wX5E&*y}Xu)M6owZnjhR*w`rGk8WcvAVO4_2&`j| z6V!aWOO573WS^Iuu?8c?sdYlR+@?dhYzH`*V>*f@r+7oLlqFtUEagbo@zNbAoeVPU zRWyJKU%?B<6eF-S%Gk{QiU+j59AmgEM9ZAZxaC7AwlD<_QW#T^9SWnyvpr8z!VnVu z*|3U7op*6Q%&Kk$s=El)BC7F>QcZert<8OjG}~6x{2tbf3GP~hAlN1LCaQpTP;KWh z;#sBE7GO~fg(@&-&s@7ldN9C#fbQTVA1lZEpnDx}xtIb0@#%z?Pg5=SCuz#kQuc3v z*48sCZ?kj__0DJl%~JUk(>|f4J=J237=ZgYpeL_R%wi=27`2n>vZ6yTuI`Yo3@{CK zs?da-K8$aBfPD<Yf;6y4{g{(D_uE=^7)5cddLv<<kfz`=L8vMA+9YVpM={A`IMC}_ zs8U{Nke%bObl+>8rHvz%He`x;ZTQu*S70{6jBB}qOd9l8VZX8^G5!~*UMJGBSRF7< zkn>6esRF3+P=sOJsIXx?k5lP)6blRhUc|BvGWVw-yJPRL0O?HEJNC{*wi<|n;VM>R zhr~f^>@FA)1VpqzlOG0X=?^t>v7l7+iZdV)9ebxk+ozn_j=eWh<~G0{0<4+r0myud zAW>$@1oIuYW0>%cCO|rRd-Ge)pB~$MrMGt(EO`md*j@?ogxS=62`uvr@J+PwRs@M< zR)U6DmKC|FgQ{SkEM8`X#dn!CWUBPD-`~au0Bk|-R>#&$#K8ef%CtEl+4ARFW0Me4 z)6_d`>goJHD%IURhb(BzDPpNC&PwuU6Iwn??J2#<S_fV`;Xc0Bsdm-fk|CMq%yyqz z^AF^qkuQx^TVtnDe#6NPU$Jh?5(b{J#}Eh3H8~ny;k8>qHQN=7x?|7NYjs?e;`uF> zLoJt5P*Ws#J8>n}d#Z)kT7X&~h7l8@BF;W5=Z%4Yl3eOs%uF`R5iPxLdWK}ty*3Y& zn{(&q+65OTC=cb}^6@{7OyTB-Q$Q|lI#(mXbL*Yz9rm6Un`k@VLKC8BQRhM;qvD>@ z0;^S|BB5wO%&FdPi???vDe@T7$7x9a5bYx^-iC3Cp3P>K{syyO!zNBOO(tP51WW2F zTBOm-wUA;kk$-0eT7}GftoR7p=y+Ozs%7>UWXZ`(G^k1C-Y2(zCD%GlN|{~C^s_%e zPMM&et#k@iel~tGh+1Z^YG{7gCb#zjMjQEpNgV!yP0W0enkl74%W_DQHs(b?>z&SJ zeA8UC=qO|*q=n<jmdGp}+9sOYMa^A{CSBItEJP&uaBqgu+*?)2iLsU;_nE{Lxz8+p z#M}RmMEfC*`7AwwOGo?nP@xiKaw`0Q@+8>5qz=ln;8%-QK&2+Bp{);KX?uNf(Go<6 z_p!bo2*OT=y%m;&5PCVCHG=2SDYqM$fYU6#z;+Wp3y@Z&#<j^lRz^X0bln&=wML$? zp+p)63%t$8#3aLr4!O;$Vr?&-q?sRjLu#aSgIVhaS)2lDT!N;D(%9Z>P!P>Uy@r7A zBjMc!iS%W9QcL_fLYS*GQMnm%0%F0e6o8<TlY@$XKxeQapiGr|+WoQkhf4M$kcg}{ zh0K07qKoS_N?M@~BgiQB6v{GIN-Tn)N^)2mTj}?)oAZtF5tXi>TB1}7%r8mN4E2p0 zJib7#R@kfq0rrB8w;&f>Gl=g3@_RanoW-u=Rq<)_I3R~awbGt4yDU!kv)z-ZTjFfm z?Rc`i&;op{20Z`;gb%g%bZxj=mJ1bTh>wl@3QefV#jI6h7iitbS*w6(n1d>4o*@em zOfJds^m|m7U@$*|#P>r{wMQJvi-6fCk6Php|Ni$RgRvPzz(I^f^R@N?iuJSe1eIi| zPH>AEtFzS*6vPwz$0wJ!M`5w5g6<#63i=4SM^JTPPjS(6U_xn#ADdWMiLJt9w6EeW znz>Me2kSiQ*=ajwAY8wXVrc(e`eOeOh}N3o#vH^*XXSk&o|)_3FFabjiy??Xrc`vW zyTJ9}Fk2{>k-lEVbQn5#gp<wV5%=9eywl5W1iB!tEi{(3jsu>0cCg(e?0kk+moLx9 zDCnS3@Oec7%Eq=66kCoC;@Q&KR*DFj*uB(DFd-H@4^z|*8cREu<Hx5LEyP1F^5K_F z=rlOb+g>bnNU1(%0yLY9AMJW<(y2BzU8y*Wea_$AhEhP^l}z=XRlMzTZHGYcpTh{p z(g2@eLDk#NR$)J(m3<6^V^2aJ@>#CFb265RJL3}|`iFMYZ*~{`j_ah~B1XR@9r&%; zn(cJaW2lus#<lavl(YOX=`?>__W>TyJf30$i0Tz~_Tp9bT6YR~heol}PVwAG8ciuj znhF2ypv0ZMpkOqm3%}`Bp*fn;jSxD~u-Pl&(^$jrXvA{eu)yls8>s_4C;~+NH?*h< zvrhH~L<V2})Ptaipj<)#m~8<g6HJiGHa6(6NM8+*{<+?{BL^1w!jqMxxM0p!7IiC& z;>w~f%|d%2@=TXV)@nI^k60kb*N9ij@%7>;wgr5c7%bNy2!-Yzvmm@?0!_7{g=gf7 zUXzyoS~^;SpxM}<C_FkV0OiKfa0=0phc~|}c)%w|9Sym7hha;OS2`a51==odmYK`Z z(1W1NhKP5Ti*sa_BVH%74Dkvq${pby$WiQ#JHp2R6ZOXND#&j;W36}&`6Tu_9zCrd zNBB29-op)eQEwN4#h&JgW=D7%0?>fuzw}|+lHWEDiK6|nI>gGgaX}LM%XMiF$ZVl_ zm&`InZ#n1yq_Sm}>IjcUiRW8|W)Ryu<Rfh^Eqo+*{mNeb4eSMayQxC$MjksUeNk^R zW<ny*u==;j;-WcVn*k|K!=igsGY>i4zoFv@pQU9;ZI|F^cn)QST+57pDV{0DLl%GV z6?8glUI>(F&)*Sl1d!a8Isk+oERiJYN}eSp_&Rd<*`G8%&M@ksYGwcpOw`&eY>XV? z$p;4~J1N;LXcI$e!LvO1U;2~B%59mHY!U|XOCdH(W{ShvJ(hkZu_CDD2J1i&T5Wr2 zGY}KsXO)C`7DP79vo5UH^ptjt0J0gE+hL1THdvME$_AUVAy+AP^0jct8C)$uR4hP| zg=e_6AAJ7&MDRIQEHo*$ySY8i5qS&L;C8o&bysnYcsH3vNWUq6k;pF1ij;jL$DQkk zN6KK;+HnO+01X?SNaoU~?((y5Ad#x7cqyuNSC0pCk=^HK3;#yZW!lfwIOaR;-q3Vb zPJ&Gx%I$pC|Aa+je(*UgNs?J*ZXv6~;0rhNIB5hbU_WLkh`%ejyR@;W!vG{xnvr$J zF4Ukbv%4>eBkS+uHaF<n$}*cWL0Oh7-{AzO8T$)EfVmoF8_ke+YHbI|vfBlmj9Cbp z<<6{$vy%2XLjVr4HNhGiAfrNBC7X{~wMu@T_V$F(ya?Yf!rnal_y!DIF2)SW6bTpb zC9B<#PD;2PuS(=B{XTh`ez$)>zq^mq?}20Zt=alyoIfJu8d0-#`w{*KALfteoB886 zujBE|<KZqmAVwn<RwY84Z&6+!2~Q==DDAdhCDK6wa7u*GRV$o`K|tXfS%$m}!ANWf z$p{yykbxv7!Te6xj_rv?SJ8|D##>hS&fV;pzZwQ2%)bXmL3sK@X7(lx#lu+Tb5Dna zAYEz@S1%&c>e-FFT+vdkw|{$e|65G0#|oQ$^p8dH0><y}8F<=Q-`NH^FOHZcU$}0~ z*OBtS$rpyL&kPM+3@y<5&J#$hZcQmgzEEbB`v}%-Eijc;x3bOPF*GH0Uwj1Y*NAIn ztCCT@MwH#C$It$Z>{!DrP;Bf`1gqc`^E#eN0o0>o^e^Zt@(3$**w(;FrFl+eRh~0~ zzx;M=9dl;65uQSC`jnLn%Ogn71na>I2X?a+J1JkQTG6#a!CDdYTt+6hzg90WN<Vfi zvBJ#ZMlf})t+0r;&H`#`n^%V*=K?eGh?7hQL)H0K%X@|P>CDjqtmoUYw`08Pf5E#K z8$H$<Lj<GOBa4_)*{j}-IgBY4o${qVaarUxA!5B-owp?`Qo05Ea9yOh#<9JTrGCh$ zDpYC;H*fH4o~wFcazw4tyLGj?Am*u<@dl%?m8t{^evZN|Y$HdZ+h|=Y8PxDkI||y? z7vH<~$L%nIlspABNf2E@da`qOkfbB~nnPWLiTO@Fo8sleSX0^&!=3;>P@#(#+r{C0 zKQW-buO4ClWJJTpMFR0#SoNSk2V?aay`!1sHZ<^B<Rr%uy|~iuXt)D`M6qwPSxAbF zM$9pC=UABML|132^YU^Q-RWDfAn3Wdp9c*2a2RejwiU`GY9v4l)WtSHPbnO&uC~j4 zeWDv>OqDP8iB|XD*Igf(x-PQh_fB;PFqR*&3evHliCQto#t!)eVL!tB<paEEyH-37 z{eftc17fzKSnK&&)>OpoBRH`T^<j6=R(OQj(7HuxFh^f)*H=5q20Rl@z=*8oFldHi z-iJv+fM?r0WV%LwC|7?dM}KHC%T54d_ivFuP^o@Fd;Wzd3wz*vcH(Zn(E39CT5W;E zoB*tN>QSWY`e)dh1(8C+ox#sQmIZA7vw{Fj$vtURp6$*B@Q=x2yA9D$eaI$+;GBiY zoYb;y5C+_j<;j+vw7;dcB*r`0hQzT6Be~maU+Z8+kXgyisOnb7Z!7HBCB=%!R94t5 z_qDGd;Sbr8JGHd!g%N*~TtYiuf|%=P%d#-o5O<QBro_}_Q5p<UPE?i}HDSe1+d0?$ z3M3LILX8qf$qeoj<sx>~TKAFDV(Y%){MU*_Nb9~~6jotwSG#xzlB;1Zb_Y&hLlnXm zpW32qvMQTw$|ifur_LcQkxkB*UV3T2kVSlL2XOwoZ&1%SWtkeCo;#%TkuBr!dJys( zaW=%wm(DLsNYMJuTrk3*`6v(xGgv%*`Z}wg{REoKcPD6q?nO%qn;RRr*P+K9UDMqZ z{t}>VVVVYA4b5UfWcyc$aO^qa*kf@YSwAwr#p8=SF_h9nt~*&angA4==9sXv+R!YW zLU*kr=S*ZmeLmDpps)mn1U6>@sykDOc*J6|3G^oikg1aO@S$Cr06;$u00g<&gMdzO zpgf}6Rxef4(_#`c>*l47b2e>Fp<=aRJuPN2o1$D4g@PKlrV_!lw8m$6fZF<ocBetc zXt)E#{0k5+JbDcet4~r)q#=_sS&m2Ua><uQug|EPmpRTES>V!!$`?nkx6`XDvY@@u zsafE)Jj?ywnzrP$_x#5+?ZMcvjWn#UU`J(7r(?9nckrF~xvRx-^5#{7I7(d~1asO# zF81%3Yp}b*(ol74Xei4icL6d#0R*d5cM;#Np9Y)A7|fi{7_954?;|b|(_qZ~g!CT* zQsxF#4vlO8eF~sS#fC(L_ES~rKm~usW_5C5-RZ1E&(P-0b0|g`my1ybfh3KOrce-M zz%cw33YuQsD|!>#<Jt_l?;C0OV36kkqMecZdZpncKRwogMC~x;O~V8sFJJwQ+Sb3f z-su{|thA?tWq*LJK!3o=r3YqoxLRhat?X5FB-Tf?WI@AVg4tJq#yT2)M#y<P<mQ5s zE(F(nUazxnun=kx0a>q;hmxZqh_GXC6w1a6oN|r^KVl+Y=7S>_4GJ0$HzSIV(8!!z z*kq=|Rig0ZZ1A`8h*eo@FJ8nPTWHMG)qaU0-$y7SebtoNfTb50Kyd6S!$>(AdlBJ5 z#e5BMuU2%Rm>(T2fKna#PY-nx3=jEDWhM-=YaDxKI`%Zf=;Cc}s+)pDTd8{-N;A!M z$Jc#9PP1+1x|xD>937`)iQZ<DYul|TVNFbp0=MWK?y=79#|~g9RheUt%yCAPsVL~K z8ui8+r2uwnY*YR~`dU55J_Jzg6%5L{d6scjSYFrlQ1P2|!4W2BjL4kv`}?SoHk;=* z>4G}P%7!5eN>wUt@Un%jVaO~)R6RnXO8d9sBH|NAcp(ag#fQehQm+4<;R7KnxQhnD zXE2h=7416PiiwF7{<Dl0=IXK_`kXz4!AtH!bF7Yr0Ck1S3>(BP*u8^o4O>wSWr*BQ zD>DoU_0qZL<tw@4BzpxJt6)BAr<EIZkSd+k*9H4W$uPAnSYnJ5AM>6Cu(C8*sg}^l z&_C=cTa88R7s%F=LZj2<2>%H$7$Hw*Cx_r1>&_`?AEw@&1^j8>ITg>sX4tIccuK9a zMx8gu2`4<S3(+184rxd!A)#G6v}s;WZeycsBqhX*1c4GDuyRPkG&W8iMQNYueAM=% zJ%W$se#EzelvT<&8sU}thshBQ5(!!XkR3rYSF1J&MqtTRf5~WWCG%4*HUV~7!_1&r z<(2JFklNX^h-;NgwnBS??{MfF=11REMN=pOSfO#oEDMW95mAcvG6MQ3^|4(@g#Kmm z(F?3*123-(erX<fi7fL)y*Bi@Q2$6g4>T6jRZF4>`4Q|rW`NC-@2yU~!X}~U4*;J+ zMWQ0EDR8Bi(4ZYx83}|MNy7hYXhA8b6961Bvi#W8Ew2MF@-=7`A1tw92`&cJEkrRy zEQO!IUFsGh8Qw<WZG?~Q{v!t69?HdLlZ~lL-9l|10C-{mU>_`mRaN>PDvxa(h<^w{ z%GhjVEJev4b<1JAT}MON$9w=#w~&$NjXM0~M}4e>M;%YR-M|ZL#v98+5T;;t3(>!1 zGWFKj;-?5FLigZpkhXg$iCsEPwMI7e_w8n*Z-=RAz<vmjfR*wT0TnOn#g5!u>p=7y z6fH-2S4aJ97rkEA$K)jD#^MBAG1adYxX+7|1Ilz3qM?pCa4fd35yX~Wm4r!f+ZbaK zTuUshMwgO*I{F0@@Ntqm55R`ZaxhfXE@J{NTMf-^6DHtXW}@iTs}i$t9yB(Zh3k<6 z+1Wpl^x>O8MdV8-x2^KCDs&i$n||v&N)WVzfPUObxuuR)(pnq9n5}yD%Xn~SIlo@C z8b#>YyAZ=&`N!%-GaxRE)vnsr5AX^Bv@LDjv5Kn17Vt<IcT4*r_2cqTO3`;vd6b@s zd2Jsu$wPS!v0cz5V1w$Swy*gb3zivwg`~@VoywJL(Xu7a#Q|JngOBH2WmA^2X?5F{ zBWT2&wk@|~=+B9k1xbEDs{9kRh_|2Q>0ni2Cg9Oz?v@URPAs{UvQ^NWZ99li2<z)s zvDYwjR3$|fq$y0$K&KVe0uL0wl$0K#^CBJ~CE0M7)QhNv*rYg&9@UR?a?KBBnNg>S zt%7|98>Ykuw}5Dz7Db*x^a0c4;OGR46Fb1#ewb)8->So_C*9BHoI-424{B;gJe|ED z?VN2!MZ6wc$jNdctiT6LTS3Mg6Udm4tsLNtZH|UG+M$-^p%U<S&mT~jS~kUaW5(N5 z<Lx8kZHDo7%y{z{ZwHOHQsZrx@m6lU{j2e|q=dSOD)|{jfLu1B64wbg1<Bt9P3Tty zbwlDqb0Xj*%>za+y_boMh$FeKZd!%Ba18hjG|eh^3HK4rs@M4#vcsWYN(-=S2Y1|f z<nl8+mCJ(I4<dHv-S;mrPC$i3*v@`og!RB+W+R`%bT$<u72^?m`b9@T@!$q<BSdy^ z6+L%Or;a-nT+UzkcsLbY%wKqyo{~!lLQsonSnQ->AdZwv2oO$+Fwye>W)CTE2aT+q zl(K_HLo|gl9+~aIJ_JGWyvBgsnHV{ah8DEV7>1Z-ND1V!^?49VFQV*f5shR0lmU}K zRyWEskTr(pP6Jt92m1^Rimtp@Eg?HrP$@+Tyfpno{rJx0s4h+N^D_`S34SiPoSy-X za>f!bPl2LzIWN;WoHVY_!GCd?F$wJ>Hx0Qni(E4t4UeI5m9%{uspw>F?-K`is`Inp zk?^*Z4dEIof1^geFnYbU2DVb{9B8+5zmAZJdv=Vc9k#wdp<2)dP99a_6!oVxhdB0F zO`0pRsP|6zc`UNQ*1<jkgK;l10u-&}>M^}KP7Yt)GCXPN7zLjsgE^mp7F-gcVc9_& zULm}QE%2U#8ujCe`IKruLZX%;`LVrYAsb7<@*5Jv#;yd7Y5C%3kAsgPJ=qgjXZzXW zFLcCxbO(js<iD?C*7UQT_yvZERWi-hu#`K%HcmAY3wyJE0$avz$-btOwu{M=TrSy0 zx{)|KNKf`~2`U7V85|#qs$#GEpr)?+6n(r9KWqn~OXh=x{y;FW5itz_*f$Sp2YvX# z_O-ihtwT*iF=mMIsMX!K=4-j+394t=QgLjMLd=n<32s*0e<GV=$>luc3VKKwJ&Sz< zkl;cFFd}gPPAE><2yS&WoJRlb+<;({*ZHp^p75%IUj7`S^`b_UqZScQLUlW>R3C>s za8NI5Kr|wtkAI+4!*S`f{FN19_oX$rvzso!@RcV14KFkGn<*QcfG8zRf8QvNqLM`v zSD%$qioK`BOe&}PxZ*v{OI53nYcEB;9jifu`r3|-c&r@;e=L<coe1IWuxg)0z3p`z zpuHgh&^`dr&H)VbybFzi8-*ZU6XmVOV8wLDhGB(G%)$<kW`K0jhS*CqqqnkMU<;#L zK~%nX{98;8Sd=9?8?pR6<<rSnGFiZAp&0M2cqJRgPZF=3L0F8$1S-4<2viwv*4#SH zQ?V^xVRPHx-1Q}dc!o!gk6iO5KQ~}~^A$uT>aFi2p*&~>%$L7@wx4FBc;T5U<$x7+ z!u70S6#zpPHX3FW_>jRXC(VekQ3RL{!jPPyk?<w(sqdqekfUK5fP$T0fkm?{r2c^= z0_+Gl2W_YI5^1ABIu3O3cS!PA*6e&Wk93mB;F8xanMsgI6N0a!0Qe+rOXd^pNejFS z`!0U=%GHA40ai2CUF&E6hL?!dOX5*IlK*bVa^gbp6%>&F$4VcIU`+C@D(OJ*Wken% zwBQ9L@OYpkJ+JSkCL^vB3Nc4h`dQHFG6})u$Pi%nSMX?UX(j!OJq%KXy7lboz*y~a zpA*aAATQ1;Y;Lm8ZQPn-Ls>P&xpPIEr=%P0T*GjTi7N0#!j$G~tiHrHmV<`L2pCO{ zQCZ1F?1#trBG$s51&%~|F&q8xGkPK7B*-p}3=+lJB$R3J!dQf8Z=Hk*r0vcZU}a1S zw<3D!-{*kWBLp8w7dnAg-8yi-q;nq5h`a(3c^VjnJR#RoKU;-fsj9+OM~h^`Vms!* zdt{pcM&HR@u!=-DV!02kohCP@$mN&xny5z?GL&))0uzLcHqRA!DQqmiK`kP9oRE(A zF4ebD0dNa@r!r7eT=AKsArr*H@nCn0qXD-92x<<TyRoxtX+21gbYA%5jb`=Z;&D`6 z?T_AQz=JSk#{kWbbS;omD9sgV<T=vZEo*N~;3O}%2zARR)XB>W1p`0)x-x*=4T9<b zN|twll>5Y*laP`|6&wFmOI3Mgg?jkRrZu$Jz}4R+w8s!YcQvJxHLwD%VbTzg>;sSt zBrQ?T!#_=p!do7WX_l$R$pFfXgD~FSCZVy+%6AweWp?B;b`~8Cv?SBZY_d0QovXtM z@6yJf7M@YhQ4ySMw27d@Nf33X*3GxpX%DrPS?l3$of7I<tYt*z=;RS7H~#}=a@LH? zIQBLhy4OtTZ3)~8Ct<!8l$r4GmZ%humM+IFk`+PQcW@G?03R)bz@n+(Eq#uB$>P`= zL`dg-u4f-dlc8$e4JSl$yy@Y*ha<i{B&Obdhh$0>bh4|9Q+9#>)=dDbw<Akr3&SXM z8<7?=;B=84;Vr}Ar@s&qoZJ<x7K2`m)6o1Mm(}{MvJxdV%>!q}!7aKprPym1|A&~h ze5W*WOQuGC#tSr1Ly6A+X^97n60s}3oTgYe_R6^DFV-7B18rzeJY-p>)V8}z=#Wb7 zLiIe~RxZxn1&e56N85qD-H$Nni8J7Z*dgm#8z&pP&&mDhvmiH*p-t<3M*+;=uxUM4 z+mTe;F_U5Fb+C)r9>dhbrkR0(AxI1}Lz!JYQunE)@J!tWv*dY^?0;f0HueJQ%zP-_ zo2CS?w|<ruZ$5S_cMgD4ndE?fA>0cca{D*rUYJIn+Vb1_GGvr%tQZbU)mH4t82!yx zI}+AQML?!XyTQ*kg3q{&BG#G!cXz>qYP0-oEh_S{mrzgD`O{Tnn`!w?j$&DGQ~)i% z!iE#~FMz=hjhRi2!IJSZ7XulUa6*ua!E|w{DsUG8Kbp}B@e6Txa<;OlH%Uvi91fr| zyvG;WB%FQt0bxc&9}l8yql;^8QWot3pg(R%BuSQZI5^ezGRQ8WOlv5FGTff*2tPZ< zE5Qz=p<>|l08|Vc?t18ecd7R*Ta7kQPrQr-=%3i%qH;kh8eDJe!(ftU{Nr`3SxwTo zi1i=)Xbn7_k6^t(j^-rAifG5=l(+GHNO^47$ax$PBUbxb)hpF;#2o&Elo=ffNijmk z@c?mXKz~2Lwqmav*8)_*{9E65Iu{3*&T`0Q<mV`+6Ql&2-1`IRpV3BOV)D_azDdRE z*~?J{w~V|%U9<30>YBN9((_F5xE##ba8(`-1rKM(=!~l|k*(^c9sol`rgDUF6vnDX zwI7Fa*#Dx1BGlSTl7sDUAJ}`-e4z}sn23deQ#@YE=d^&}GsLSjD!^WALsr(%p9yaE z+7M-?hUMpTl$7j?<Y4$4AX`!DH3`Zav#LL0v<#*ovQJ$}iI|mbp<ygQKDjt;aoGth zxzkk{C_EFwDIZ*s(V<kgpL?meIt$Id_({@8%C;j&GwU`q04GeKlabfRXdEEQX73Mx ztuw&1A7R<0Z-zz49bb<dJ34eJH{vD7g{Zf4Hj2P814Uv!82|M}xB&xO=vh!xirlRm zC+Za)8?Y(T-k75eLmpox8%o22Gjj_3cr*ugI;uMwm(0{1+naIXn>#b}UZvA6z-P_? zKA(Ne(XMWVTL2+#3t&2eYp>)imh94S?4JBPuz}emji17V=W1$yX726HdQbweH+(MK zm)2dYPM=fh4?g>AtYr>h%E1bXcK7G9cc`lA6QwHFijXp0^Qk$31mF_}U>h#$!2H}N zjfOI=!~ON?M4n0PamtgU!N>IBu{calKu-1(L>k9P*f@ebq7PUEfe=kTgN_7U=;PQ7 zl2-68PBtu?U565kV_qk)f>qo2-ZVdMkV1#MK2cBQ;|Qh=CVSc%!O33Ha)$){9P`iz z0APPZuFyn&@=1F=F^J$_wF!C!P#r^zjkN|5iXx1;N6+rygNuWc)3trwaI697$bgvc z!6pp0sMmbWJwz5nu(O_zlOGOC%h;nsTB>4S+${+Gv1!TJ4-m_XTR=SMXX#k=Dma%0 zKk*kH1xd?*W|S_nfqe_I94vbSrh*sXY|HX_(nKU_f5Gk^T**f&ORX>9^eUMJ)cJ5S z?^7}{51=seOFv>p7!Vk*FVbNrX$rd$!w{AMoRGD%Nj&UvcS%FhS~k8K6u>yc&f{B4 z5X5XilTg6XP)DWXQ1MJ$m4g$*^K<g!x8XRl`_iUy0np0Mev26z^D|UQtwKKHLaj8P zJPiL0`GPKvl`qiAm=?Kxf_egH8Tf&h#L1Y%ffuVw%nF$+D;KbpAkUSDFrrBIPeQFt z6}Cp3HWDH&KqpYBI!}Lf#kIYVlLnnMIw8Q7FRm;Z1M0sN4WFFp7Y&ahNOUIka6mNV zLNw&CeFI>3C%~QnSV9Uw1V94RV}R+mu1m*q7=g`NYQ%agBuBr<0F(O$O9?-u#B7oh z8C*(W|1T*h$YIM66yGC7qWy_nir|noq)3fYx~cEK5F@?NTN0kA|AHWz_}_?;|3Iq- zMw^qp(Vsb{B8mML@82UvezYHA<Y&gfr7?dS+d@@Aj8wCY2tkZ2<YI&a1_4Ot8ggos zd7JtM3ld)<*VU|ya^+~_AxOs2Ef_dzO`_xmL?=Ya$v^VO42Tkvix7#~EQ14a7x~`+ zD0Y#0l+JB98oomC1&<^AIX%r#@;RIGLo)IaI=*3y5GY6QRDt=m6tJF>s;|q@*TH3d zMH=FK>^|6#iO=aYpre840xoqlJc<DP;UAS2_}MK4NxWO&XV)9yJ~0nRv#!7k)+_$V z48B@n!|;v~QAML6t!kN;!iPeW$C~%(j7Oz3I&$p7ntu~N9|GGRnsNED5ol;?ras^5 z*khWdWNKM_ZPM<<@!@ogKPZ3b@P5NrXRf-4&mW<_#frC6S=51HKbCc3mqvC8>;#?( zp@V@?3#S6e7x%f1HaA~|teL<L0Yb@PFZ2Vl+bJ)g=L1@8L(>9uX2@urnubMH)4T#J zR&O}E5H>RZs6Vq7tiMQOW&M1dSaQGbXh=mNQ12Y!Z(#Dnkvp-dsk9)^+<ZLV=<RbH zY%UL3tHjaea2q&u{x}If`OkgIA}5>+l<F?+Cq}F^nvFGTGVz)?BmC+^IFL+J51oMX zn-iy!aH|xAyOX_w{UG%;beS&9sN>mt081R?_>c!lsifvT0E7(75v@gL`O#R1QkprL zCjEt(Q&flL-JV(2a<x_bNz-j9br&*ltePxUt8gblU2UJxI7D?s=9m&5d~KzfDH)<q zbu`V(oJ7E04t#5)O?7yT90Y1c<p7<OAx+|-R}m-<!=l`*Bq+eJiXpJ8GD1S6f-OL^ zd}^9LHC4}M?X*yKG;9EfTEXB;-uPn#-MA;=u@w}TW~%6pl%`sHggQq<2jo0(H9Hz; zKL#^rMx8rDN~yD1HA|iAl3LwG$F5qHYUnxL?$ZwW1S*F6RFi4O7)Qfz@iGJMQjL~5 zvq0n6&nVH`UG6@zHYYO6L`TBtoE?(dEE$>v`fESdy-wf^XAL@6s9%n?lws@`VJ-r7 zm>}M&ru6{Taxn`oh#BJkHp@^ot*Jt9oR^xSO>$RvVWCY4&!L}m<J{-d3u&aH0}yQm z{2U-e_dGmW2Da0()ik5+9%`gnOKCCzc^tm=c7Y5gG|~}1j#dx_kKlQG(~yRv8&c=Q zw%`SdK72wnha9(V9)Zf&WZv%BGsIK3za1L9AhM<rjy-QV4l4ADBaTBEP85N)u0>Yu zC%BA9vRY1S9@WuPdLx=NX-?z98&hB`*qGilLUlAQ%$zib>;=iUtLEgN)`p)y{WKgS zG5Oip8+`5O#4;woy6Xg^2@xLSU2v`&xVeW8`Zh~bllPR2rhOi{qLVxzp|H^Y)3DbN zg<~TSu8y#Z?gxEhvhh?$!4TDoBQX}ZJajAbMiyvo;E5r)yXn7W3i6GBlO1$0`2yJD zk7%%bVW>E)Mj1l4bTpgM^ReBCr7eV(KA4Wi(~UWDaRv;XWQcNxGWh9FVxk7h?RDa? zA?Fe^UAT4`Zx7;<yE&IEN^;5M8k|zd5Pt^;;Tpw4oDwHap}++MCaGy{rKwkCXx9?w zq#3|r&N_WW;H7tR)-mGKjY5Ebl7Yq$1C7R*7Bj6qsl-5;W-Yx&6;Kzz&?yjUv7ck6 zGsquGS&H*#qu2x3tT99^TZf=h5DU??8UL{(d=~{)b_%g2G(Q@)9#}1o&~h$JdpvX- zNFT&?30_ECPwX#?B-9>|Dtu;x&CM-oYsRpV39w5i`>T8wLG7g43Nf7&(dQtpA*Izc z$3dL2l-o^W+dh)XZm)A}vj?;3d&onzy~2wjVXEz|Wbdt@368wjFenSKmQ85zmF(wO zWO6OALmS0557hmbQ4Sp}OD+KI#09X1bRwx0&8uXiR-)McwJo?eo6YF2mwj>qMU(!b zdYl96gDgz?bUNZ5I#P)HfrcQ1u|oJQ;Bh}tIhU9tu~b?!44Y<<`!?2nJ$0{Li(=py z+XfSf)o|95r0Z*dU7N{TkUzOr_+4n^Vwy)6=Gn;y7pIc%hanoixA2Y}S%0w(xz}XM zC97Z-#qqOPW({;^^@4oSy5`37f0RG9i1z#wjcIb!B*#or4^Dlz+bk{gaN_Zn{AWu` z%q*s!dkF<+7;s+@94f#LU}>Ipz<2}u4;Tc8B58Yo%r+a@J+Fc=q|b9gIM@RIPCET^ z$SIv48A;q?AkD7~pzm$h!mx3x@EW<|O0G)wGIpM-6zpF~BO+x`!g1x0lDb&Ig$QL< z_{iQ$UaT{fr8!tfKqoN|BLTR~b9cfZWN6uRWzyBOoFNMm$`waL-@!4E`Wn0bB@nF1 zq3aLHJ)sJe?3sn5gQ@bv$dsqwX5BDE9oA^pP2@0V$5f9C*UtVup$EgnliI4M8YHOi zti$XyXk#VeT3FZ&4<h2iNaR=0k&|aCIw%|_Pcnrcmr%lVpu#vFp@iwgg%YOI6be6K z!5-cNkCLPB(fbpK1#9KASMi$ApsNwAJFp8W<l7W}83FQor15t%R&aD2Qi37hjrgip z=@dWdfQdT+=sEzktEDf6-wCjrAN4n@Z}AHO{ujZGh8U&`0iX}!+L=KY0+`i9J)XQe zNBAL(Oi1NFIvVansA)vvC`p7LC5h}qt&LB9h2Msgj)tFNOJ@#Daog$0Nb&Bo_;qZ3 z7?F|L?K2jycQ_6navZG7>GDATbWlG!4mPw*$7?99C2p-!!dsC8djyZUkVnr8Pg)Jg z2%RbcZ5#1Wc5}Mz=JednDY=^tq$s-&<2M$=;uUq^q?-5xnOVeXxY0$NR9;Re!z_;Q zTS%581aFHS><?RGzv~a1V!uYXp2N`aiv4qck~yX#TzBzWX$p1`lmpbs>gHbM0O8{9 zb3|74gIdq?6Ev~A5To+G|50;><KSD7QrmHZ7h<;}377B@(o++~UUhk~lt#s7^J3{u zkEQbhDLlA9Udory8tX3JCN8SG7!*tEF0K-D>MpK#gij&fXb)|h#G(Y|UL}p3lZeEa zF}f@EGLj7HIAhQChh4EJ5N@)}m?n*{d&D$V%E45V$O{T3@~#HVj6x1^lL7HOky+o2 zuHnoOn@<oc;CD&S`yCB4>G>eG6zM5B8m_1321mnH^jz#{7>}p2oA}`h-nWr3jWC~M z&mpJ~K1iW(b5of3t_qipM2;g6;rzyO;M>q-nPXJj05xhCA})jIxdc)k#3G1TCBDM( z_#UVaj)uh;;{3SdtLS)fp3G*6POwfM{%qytj_^xZDAXNtMZ=A#3^@dY?_+-CJI}{? z0dRJNpGDFjia(Cmfn+ITAW7w%4LgODvY%*${x<-f)b;@eqXS%yhCZwYU{D&eqXV~N z7^k{aezq&hr3fJuI|dk;fqE06Xan!f`Pgrx))D?15>;O6_f#YnIQGu%^>N?$h;cC^ z&Sjxuc-`HDLg_fSI3dc#7FDH<XqwyG$N{4qjv|eW25zy9R2?Rt#85$Yw_0w6HaFF1 zB(bC84FN~QP>Y!LG+j<Os3|uiyV3KpDG2Up?{Bq_jm<~@$FdPE$5%TZFF^-58Yc1X zTj|(p;qmu5e!3SZ$?^NejdJ_}@p?J_AlBfZOAqg>I)fAj@<0X4rbN%69BsKArtxjX zwTyVEt9w}hmLF2ee~8tiQG!df*QjBVabyIv89^m=fJU*Iv_3T`&LxV+s134BP<aHd zoTww*+d)0tz7ep>QCrLo1TM=J;g?+U3oDfEL@g!!9Da+r_^7qx4o|$nJ|Jiz3Ab<F zC*5mA@qP*v^W;sb#`IHvfPi-bcvFeW3#f0a1|Y7CfC;IIOLE9z66@$OXX5nWZmLf` ztz{SmQ+A-soj-uF60W1<xxGrb0fEFw)w#gN5W^*sh&A}xr}LsBJVzxw5gXyv3WuoU z>H(4$^5NY2&p{CZM;bVy0xtG527aYp^h5%-s;ce)jr{v?0TV1-0|46w0NmF}!xH_8 z)<GH&-6~@(_%+%<U9LoEj@GV~*;+@#0}vA!CJl>8C8pWpHR=@Jdr>}@UyU3I-ZA<S zq7!|06X2UTfOSDz_yZJJ&={uMIHG)}M`sGLOu(S8k--tpqVl6KPq@S!gD5>MP)Zzc z%<a|S>om9bX>9~(Ns*SPF-M*p02&iMxq0M9Sb)|#&z~M~>ikCoEliB5Z9w^=dRj6U zev3UgFN~47R6cLqeR3IJsI5byQtB0aN{vY8aH}X<pmPBgZr+?q$>Mb?AL&ou=?he{ z&wqfy)l#5rH&_Fg<6S7;lxpD=ZOojn9f)|(<+qh3@B$TZIu%9Ya$5X~KLm57sqfYm z7l;9!O8}MswwVe%+O4<MAU+MtHY{S#<#Qo-0(W(A={Fz;4C$w(-Bvdp+OG$&|1e;U zn&bndDuCd0X3ZFGMAIVl10uw9qpz;h#?Ur@;w@jpPM}#FW~4#XlZHX0GiLF8-h}*w z21gC=X|cmj64%BJo?v#l?qEOv2YUGc2?rgw1nQeV(K%_=1Ek@p+xdLOnFW3#1jT-F zbCSDkxZLb|gVC%g`~cOXjW%XC_3d2+cd(*w75*3bz+nIZOCqr-VQb+bl@nSCKZO|F z6`)5b;0vYli^#*<=mkeL*aaB9xp0@J74ul}dVM#gUWO@MUT&b-ISud!s4T1lq+e@S z%KT)pu8lD=V1QExC!h}k8dhaa2Vvt)iAIUnBpUS{sx86Z;AK>k5A36=#1Z;#3a}6U z9RSbsxGI$^7EP8$t_I-j%Lp|>`hqcLn~ulUfK1<`I2(ex-yx^$MRLg5_Qrj1A6n@V zzQo_W8jtW4{&wOohQHB4kFjw==3YPhcoA9!<r${D5r>oOT&Uw(1#XUkaS6*ixM_5@ zBNMr4kjLQ+ypX;NwzvD31-Ysy!&q*;Ox!PNEQ;|h0BfD=n|=oZMoaOFt!P$qDgHaW z$XFczGoAyMQ`#H2Y$>iLz*hHzu@MOVpO@m5tcEx6`xe?gB)n+5g%;W)2TC4qRQ7!f zZ5c_%Li<0cSYtsY<B%A(6=DCx)@dviLyRw^$FM_(s8O`yXDbopW`Wpec%?NSRz_pk za{~}_`XO2Y5qN`?DEBApvf0J~m<b5RNC%^tqN0o0(cSzw85A1n2RP)Le+pNP-Sn+n zRgd6SRovnVubf$z-xJ$rzMbxRJxX_~9uePk?8U}k3vSN4xzbO!Cj?E9@jlj!&1&w! zD&?}S7URl7qg9Z4i9>5q4F>Z*y37!9i92HZU0dbEC9#e$nKTo$`87&P(B?J-4casy z9lKq?=#zugeq1KBE{i=f06HE)7$lZ~b^m|4Kz0geiT(>@u@hFK@{26FK=#^B#LE+Q zlLfe_UgZ}ykuyxMno0*-d}>Jn1_xbr>8r$9Byt676=#LaxB(v9UUW917ZC+G+3tgZ zbsE876kUs(;ot!HAP7zNhz;5Njwalvw+A)?A|nm2o?@I5gtt;Jd*;_DO4HzBp%&3C zQTR>)F%zw!w}XH+a=b(|&GoZlkgzHumL>0Q|Ew}(of}|tfe9@3I59={Pl0Rs9bzku zva}*UGa(<{>QNQhU=k<dgB&c&K%Pz}&GH9)>|a0SBL_@(o7`%ROx;9R$VqSN939sC zJW?kSW&#ePMN{ayE1GxUSAdhytvbK=ik;$6gaW?_3Fj7#iwk1td7R>h|5Y~$oh~fb zzb329($<>dOc88`i$-ixJn`(R%x{Y<He(LY{|L?EK3qeQw~O*dv4h!)v(;>FF0rs( z`;6OJNbq4Nsl#VTKGC;>JNxySr1YLTVnGuO?YQhKx5rb8EfQSJupgiy6AoSMqCB`@ zi%vw-mvO2f8_Q7@D3P$XWB!D`;%5R<zbg={+8`0J@)2>};9F=Y7o2n?2lgD8Ds5)S z$Bz)-FCTx77a8(#J)Q&dk&wJhKK>{H=IaMz=MMbO<YO5%W3V9-XNmvN2h>O|I#?fy zNmTqjhR3z2&ya`DQZWNIHojdbj>lfx80`G9*iLT6I*-LFxIjrI>sXnU%z+6n995{F z&aXANR^H&WNO`zjw#1e4i_v0s$rbd-ESX4;v=YJdv`I=~yK(dazMwd85qxi*2i`jy z&<n|fd4|&x9a(`!3(iyLFM(`STLQSD942ymWdAl05J#QAs&C<;mbF&n@^UbEn(DLR zIzJNS{{WPHF$EWREXRqUW>2hxN5GHxGy)J*mFm*v%KYV63d$F3j_@ADhVrV^O-tkz z#WrY^_WBD{{>H!IUYJcQN`8v(DoN?lvK2BSwM`{RGv4dz{ecpQN8_FPS6f>0i{yKl z-shJ@lJAew`^*x|1O`0qr)bxg{5<*IMDOEEcAFFF$S7!;C9lvs?#f#ML~tB^1rGe5 ztWq|ufWI3WxPV@kF25UcgxE2805XMr4F?B^8oG+h5H&d@YDkvPFa*tF3@-?pR8vzb zjJaQMDf21L5|R6&QnG}kj4r-ylu)S^`q|aUP)7o0F$ow`CHp;{JmTh4@m4=X;WIdb zjRA{cH5bbZ%Q-sadqn3bu<biYybv~meD(K<7pjo0=TH>9T)Z^FvTIxtvH&}8m4(fI zB~AT1uDFcSz6<Vrvf&6Ov=gt*s*HfRuA4bgA|C;7@9!t#qYGu^oH0XBgO%CVl-g*9 z>z%!6ykk$RuZ%rPDgiiXgq}uc3t-=@us5aZUV9_HN3#f*4LKXmh&S<zC10$&<PuZr zE~QKVf|9Ilv*8Z}6$Q<7G{k^LQ|b(tXq}NRrIu;u=4*f93CEE@vnLS5W!Z$FQ#Tc! znL}4PmCdS~xkS7`*j`1O#S{3=wYVYy`-T%GEAA{FN_S468E6FBa3Y3DcKB_)a`Tee zXwXsVYibL6P+Y`uv;l?NXQYdBaTcNk24x?BuVmY?BS?)L+LVgs8I991=O<gL4P`$` zfLO}(G$bvum&N>;Qjk5Z%`6bbD1$SWiAc0$>D?&K0wJfH`Y#Q$W8d5#C>}>gZZX;) zgpO&r;yYn>_g6NK%gQI0y*LK_4!SH(DO!b|#?+dIwoT8GEVx`wUDQjvU6qxQ+HRHs ziAKuGVS5Q`y>;ymX!GoXzIL`6Z~5FDu{yA&Jq_1I(Kb<66@1XHNo2S51^iUNQBuZv z0p&aCA~}U$Du-PYath{?biz}{j&nuE)OEVB$NjN!zhg~tVPfhkNK9P?QWw5+(~Ac9 z{r>z`|B1NASLyd-r_fLv+QjKT763Y2XJ`|z^<(EHj%~_rK#|r!PQATs+p`2A_2TP0 ze98lN(uavCoX{OGmF`=vV?97Wf$u$M!*9s&?+X$X{ropjbo!^$$u|$=m2u9rm4P?r zf984ZHHZ{k<|qyg<EHKN$9K}5a@tDx=mY6&`=^+WahD{%)|G8TxUkDOdq__!f9IEC zXA1=9?Jo3o6?VDLOKAu1K*^djd`_~fZ9|96h3`kZb4ZuMFZDTpN-3gRxZ|HZX*KN} zB{lM?V4xnavku>l!ik&4>OQ499`zoh4Kp0S5!03G58AxC6GkBK2Q=;*tM!QYtdGq# zc-ImB7&fSVLLKH=uTvU+-s=?b(I7g*b5^w0Rp@otp_SV$`K|krxtWZtb>f_IadNrn zVjp7*M9Gmeb=HEAv6HqEA+;^`F#wf{Zfz`ZgP@^e1r*z9-0$PTEdq=1;jyfcvnszu zycvJj;%^-OoHFxB&lfN1=EJvB8xPkh3kuV+5inE0jsUd;WmMx(h4WPu3>UEdf|XVi z0+QS<n+wIs7$kY<rcosVvWW{z1Qa7(7xgk;%0dK?LC|hTfLAcPM1bW_oLVA)BFK73 zyoUAePPXt9gp3x-2$44-)Kz3f7ThX=0HFkIa5r8ZLg6Sp*oMx-_&I;#%8DF#0|2Ir zVBncIyuP9fA!~g_H{JJ!op$Ssd>hP?UfcD8OH4P?ZQ76*oMM{sf(s?fAr;@o30COK zSFj%f3)v+o<CzzssE~sK*)4>c5L<4@8@0p<E~AxgSCq(t0E>8!VQ6(?bYZ<q1F#*X zt%i))hxFzvkHFm^A6;e=C)KaSvR>cJvm+PsemCRI>a_2we#Tn3FX>Eh>=g`L_8fls zol!A38Uc~^<oO4w^#51}o$T8}rSNQA3+<79!zvIJ6@~(D?K$J{M1|gec%nkL5%e_H zUW#r>RgcqFS^u@j<U~~khmg9Xrp9?@Toe1PbR<Vg&3SdMy2grc>Q;VJ-dLean|oU7 z91Smkdq5zwxElV4DF2sVp<yI$;r~3E9s51hzv(h?5`9Qq*NtVY4v8$UJPo}%;yq2V zzk~vB%=u&BG;n&1G(wHSJcpE7^U=j9s#QG1&!|mfZWM3C?CSCAsDCo*e}jhTe!&Aa zt98Pq-+T7TsFadkfoo{ez3}vKUKw?_h@~aOT;es*B=MMtH?#4E2fbObghd)|l^WmX z?K5dPn5y>CwUe9+G7x9htoRiYgV)jUGMK1P2Ob`HI6K1I@d_En1;dpsC{gejhi55R zCq9HN!SKTzhT-FfTOL3V{j?4ade(LMxHH2Mz8g`FgWkSE9VXoIc)^CpTs+7#vJWbz zIW`<`SeW6)eAZJy#BmNeBp$=<w}|*FBDm`(oKG5l3Mz*z5pM_4aXOs&IMo~t>xlYs zvlxPtj3fLqFvIb~uU>mYkQP&`xkDcvaRP$xAQ7OBE%$@*fu!TH00N2HHzaF!G|*84 z1A}{w$SV&4gD~luu{2Z%M}<i+e+eah_>sl{AG&>@iaqn62@!&OzGKVKuo7ydG&T@2 z17-pCzY{ng!W7KOKa;ofW+O%WCCEaUhb(u)^(czZ*Ol<r-g5=#8rZhr*o&-|xcigM ze}bq0U(=oOs-52!Pa}Z%+LYI1yQ!kD?$gZ$w*LwOtkC4dmpGa~O{@F!=8U)MYQGU0 zZPFE7nvbPi#@2J9Xro+foy~QbB-z9z$%g)6o0KIX98$nBWN$afq;EzTUo<391yR)R zgY@Js5c0pO$JGadJvIvpT5JbaT96>`4r(WNQ&Fs$&|+eXu<^ss2(q927Wy#Gqf9nK zX<mlXlV7)zauVOJf=9>&02xw#J3=tPRAF|5Qd~=Sg<~@LxVSbK*UovfCT&JXlLw_o zd<#cP2K%KG590oaC2{Ice1f1o>BN!^27w1Jim}j~=>iV82LT_XD6Z`gCl}YYi=47( ziP2RF;-bf_b-cw_&PI!kiJu=;HGK5BpNgGbK}>r%C$Z8b=M>V&@Jb4~jlPqVjSmjh zkVaeMHsjbJZUj1H);>d|V{b-&OXAu>es>}L7z@@4TjI846WuF{(q_%DwA4@Mmn46M z@9h}ZB$wwno;ai)x~z!)1#kHb3ygBJvMT+Ky$_`po(y0^oxZ^_7AFvJh{t_lO*(GD zv-}a~i!)}+&69Be5trw1Z{2=mlK6!Bg5~Hx<8H+rpr_!IJLwCSTv5Bx8^?u;{kJFL zW<`*mfPxTB0=t$|2pcitLTKaHQ5?2TDaFTA=%$fdR8L+Dn{XcU1^g;|(aE^UXy6V; zegz{w(u3=h3s2V571H>$B3e$jCnvz^(C@c1P&=Sd0?$Px*Mn?}2Xml}&AUSos?k#1 z>-gRK`fh?VPnKHVTX=*m{yD#|&#C$*->LfY?qpeLlziCso$LBg19CYR`9P>HRFb%V z((r*fOdq_o8aGP<YBJqDNVg8^;w|{D=M-H`b&GjZ)?J5N2UYv;m3et~x^{5m?=eG+ zGVUEL{k@IdhN@KxEJHxsOD;}{D=NW#XbVoRu25-K7V00i5)L?Czre2EX)j)2lTv6~ zM`*2F@LCskhP5Gy01B}yx7(CCR^><bMGJh3tE#K+hRH)eo>X%UO`LxPSY4FE7ftT> zH%-7uRNuO7dJazZ;zENS`KYeqTUq7qL$xN4;?03BTwI+e4MBI)g|$}2o2M3$;gWpe zC&MTy<zQTsjoJDpAqG*DXB>m?!gNlSkvkEc{0Pr^Ob+xBo?H7r!ZZC{u*bJP!t<ji zAnP%M4}63NOC8cxyNj#4#h0<!0M#o8b<z+<ZL~ezj=Etr0AiJu27r@<;wf%cHEyWj z>TMXK_!`ygq6v?tGP=0=@tp?Zxq~xuw@9@Xhq5-!HZDix$WJ5W-7V`!vQ2alv==9u zg3&bkd=NH-wJ|>SAHVoE@`jlYfVW~*hAO%^{swv&FB2;(i>qCdwX#x6#jR7^<3An% zVe|BCTJxa=0XF}ixboJ`ya+%lS4CEK5ZCi>FmHUEc5)JHN|b9Odw=fFFz}?w7|K*q zqFf@HA?$qYubAiL!+Dn(;uED@_Sq*|U2`tT9n1x}16<%DF393s;2hwBT;c+-0A!xF zdDDz~y$ci7`l*Baeg=*Ue!K4<#5ldY@9Eky@l_n~@P+U>Rt8UT%<)7YY6)=wY62OD z(J3OtVj^5&P_2^XJeefcz}J@U`04i$>nl(YWa7k1oZCv0Nh9s&aPIe!iHyT!H@p`b zA1-8MH&7|CU|!9ib~b@Ooop0;W-$kU=CCw+PGbUpb+I@w(%0p&F8-X%7=KP-?fhB5 zPV?tfcAP(R*%AJn&YJmi2HS_HeAuI}^RVCWs8aSkf0ncD{5g+3$)C74fIk<qFn=y) zwfwn+N&LB-{g^*ju$BB7WYzq+iY?;L)vSU)Mdszt4XlJeH?kr;357j%7)k7Eirv#d z!CW3}q~I_f+)BYz9^6L3OA&&7f`VN<_!I^I%7f2P@FO04j)L#;;IAlnm<L~=;C>!_ zor3?tgUuA&$%BU}_!JKwp<sjuF<1rmD1sd2<Mbx-1X{td`+4v*1()*RSqfJ2U^@lN zd9Z_mB|OL|coPqHQt)aX{D6YFJlI9SVLXWCD%#J3aSC4AO6{j9mUZ!<0CCCw%7b*F z1p9~w=~x(h4?&JHoh)N5Ji$r9Jv^92!IyY2hl0=XU@irp<Utn&n|Lsff}448G6h8* zoI=6-d9Z+jOL=fA1uJ=QIt9yla0UfSc+f+^n|QF4f>-lkIR$eO<S5Uhw@jYkqo9Qc z7g8{;5(ySl@NYc0go1zO!Q~YE5JAk0$t?h5*ojqYsyl^W4hQG@R{(+=r0_vbJB+;| zV*b^LvAI*6iI{ChOo2OPdLm{Mk6Aa>T{MHo;8qBVxx6Ar!x!isY*M&WvJ&~qjFO!0 zl$=D&R3j$Kosye~nP|l1xKmt-7^e}F>rTl_#Pl_BtX=qwXd<T5h{<!OOi9FiWW-E& zr+5-EM~s*m?v&C*%pN1g<4!40#Qe&LDRrmJOT_%#h$(lc_!2R7JZ9ZIchN!~<7W?0 z3|gO18li9b6I*TAZ-W+$JFJ_`8O=EVcgW;;$(n})*U*BG>WG(HVA1DEZ6?P~Yu?%~ zar*GEEBPHK?5X$zWYsm!%#L6uvCCsD6V@SwWkMkq-LO<z8_n9E)xYO=HQ5^Nsh$RY zr1Ts-V1~gS%$}iKi36o=##UGYS9-u-+)9@%CqAz@Lp9%GlCB3*SKV@tNt%?=A&zTd z&Rb@grO}8ScFR2$$tky3<wMqt4qR4@RZ8o&vCSv`H+x?KS5>wBzZpbS^kQnFX<ikF z!~t_iMdc!cf}$WQnggMLf(QurI+O}}p~NeuuX@>FX=>T{tQ?xmsnp6+v%$<9%IXr9 zl%|;E{(rywoC6m`vwH9M`~3g^cVOLp&K}oVd+mAewNKi2xb42U3z8?SeoN5BcSAJa zgFpm2c5#<G?boF^*!PFSN3h+)_}@kR+b|?3S!|#L{>4LBIhzlCi;kU+LmqpAuFUcd zDl;uwjp%XjCgRF&VeDjY6hFrPy~+NaDd@_i1Y51*Mi%U#+>6EqyTPzy9sAa?bd-JD zx%JZjq0)a?uxR-P9qq-Q**JXa;js@phdp60{foo{7O@;=K0cQ>#*YP%1ZaB*OA)o9 zGj;J`w<Qtoh<5Q{T#4af->V|uUlBR-w8F3Q<%VrDxGt6`JYC^yx#q{d$BhVL!#!LV zSGXdM?~&#wfc=1X0B->{0bT&C131E#oh}T!|1?Y|Oef4UFwej&g;@&oJk0Yj%V3tl zEQeWM<XHsLg-5AJnZXT7qP+o)0UZHcFi5}_7gFr{u2HYsP^Miu0(KaFaZ_}8(Y(Ip zdLH;!=0W}6&#f;<x=SBKD)QnN;B<eyA}%9OE@^oZz&u$FT;PMAm#@bAJAgBQB@rHN z4=o<-VgE^S@2uk9D=twJH{DNVUj5{5KdW+Kv5U{;F8)9PDAe=pClC8s=B#Pa7}T;Z zArQ9(2n_+m0LB9D0!#yB0qg+qx&?UM0;V5KKbVbSHiqd76N=iG`M~sn=?&8xrYB6# zs(GXF=yAli4zLNZk8vA$6X5|4xa5WU2DL8v0NUV3v#XMKMnTg}4x}#bWRbA?FTuTX zZdjihu36a5a+X;Xt@C#=9Byx@yHpR_OJ$E;s0p4`SE)K3A>{~pd;V#w|Fh`XVHXw* zA#t1PhqxDvsRZoYT@-Sq;_df}w{rbWVRU2lr$efW(+6cpRh&N;MWD4~%?Y)M)7&xD za{dYI0DIykRFjrD=;_|f<v)3_1cNJ!%c$A;eSfr-^`FF)$g~{~LE@D1%(ebl{nEw; zVDj3I_*&bUKY{$|i64Es1Fnwx{V!pSsc(!YCTM=1e!<5BwfhcS*Oh%{`g=Ye(cY7A zfUFjsu?=A&HfJynP5lzJsx2n2Lx8KUrsRm)nNTlxsI`e>cbYqwDcS(M0eH8CI!C?; zlAti{2zRq`otWK$w~68!{*;WCvnMzXYxhDGWnreRB-Vj@a7|bkb$VG_55cW2j#Zq& zz8Tr$?26Zt*WV^iYxq-g^V=kJ4S!1NzD-is@CQ?XtlF{Cv{;Q3PC}>s{F7Ly{|vT$ z!%y03LoZbq%tH5t+7fgmj=Y6Nks61~?U%iAzuV<{xZmxvr|lNUh`S1-KPeo17wl~V z9V3zoqYv&KoWve3Z8|&Z2ZEirA<9v|Ctf_%XW!^!^P4%MkAb0%_z8t!4ZUUfv68Qx zrsuIt;^jKe#W-5Y*-3G7^vQ8J{x;Fu0i|-dSqd82&`Wz0SnXDBRndY<I0GjrW;$3n zI0?6XUVNN;FANo0{lSIGTwiOc{8Ss2$d-7i^xRQpBNf|G&s{kNbWjXtTC@-ZI<5p< zE*k8KDc)>boO5+Q*c`$4xS%6BLtf(!cf8;(Rgc|4yR%I(Tzwp}6$oQB*mg4%Yr}S+ zvb|lmwRYPn-D8S+zNSkpmF!_4>lmOEM}A)Dg>6n)%3Q0E3HRofLJWU7Tpg3<32j+V zV9gB5RiOS=lX`|%p0V4hR+=B~zQ$=NZVXEEnYMv)y81Dcsh?4%RAItI5+|x$_0iTL zl{hc=7Ci2D9)wSgft+*#(rV@sdV16zFQ~7Pa%&cPQCjka_wgOO5$v*K_IJjm0`@ch zl_#lC+~P2?35~B9T_YJ2w&(FcqJ2OZvIB#Dr)~bUbr2g|@Nx>(rPAHa&c0*7KIG4| zm2gr!!c6(<$bBy|3fecPEvCa-Mj}7ww^e-)srVkNzK0p#Ye(S?m5T2)ixwlotc`)) z8vfuMv$oqEiy?#i)~8=<Fnr*eG`f~iZz1+;bjAq1quQR<tSI_eY#LN$md2*JL5~h% z_PT&8v20k7^A*A@N_wmzE<xc=>urb#?rkJg9G<~Tvo*wuE|3_yVEyTga)fqJxF|bJ zZ{Q!A9!@Gp3PQz>R_lU_p*_b4RaBWwe#Gc+df`o1Wy0GiI7h{E3|~1u<Nc&KCAZ6c zgzY@2`aa+gr+W)M>!Mf3S>FofCcCKI#FsJZebMK%vNf9bDK|z(mkMJ(hQgT9N?{Bn zb>eQ<&hMuy4P@rx4V~Ywv<;yth3+K>(OWdIa>w<3yKp0r%?~}|pEYC}=*V<{rj?R5 zj-La5F>Uqn((lm5Mh&kKR*#{!67JQbE(falE|?2>MJ<PjaObm6S`1WJL|qwMoCIqm z>5L#c8YRVPu+xa)y&!XLwO?{y0F@#hw#I9CZ{Wn;$|$U_eK_kOs9yiR^e`k?9T;Uj zqqc6=!*q;uRUQh~MEx#W>OJvxdLg4wrDET3NgxWSTLktipi(og6!D|LLjjj<Qr}v< zRK#i-<E)3Ne(oh{iTg)peK5v(`Cs^UE=8Kg?IPTW<h%zK4r~<Y&(h!wz!!Fqm3-}- zQpLWJW)JO4@9VU36G_kqvnsDa@x?VLUE$4$y(9$Jp!i~L_~*V8y{#b3+xc8CtR*;( z5O=3H*`_qGSsMo(&+!d7HzrMZoQQMwd6#2XA8u<ll!Co>x;dJwV60`hRtMUZ4QM(G zdVY(hU|S#c8;IY&SfS)Z>PuKuhyJlv&Sx<P2sPgK!_awuJ6_p<I^acHPQDUX)I!tI z=VAZ8)z0ss8lsQC`+Em36|V9}oQsQs@e93YR_IS~vvq*bT|C6iKrNj^8JAf&11qCH zjCr);mWca8SRd$(F;Sr^)#*NsNp!3yj&Y7g3yj<`<v-#M1aO0FZO=SY{!)B6zgrK^ zSkiIr;}D!!F(XyegF9m!9<pa`$Ir5f8F@`5jHdj%;5+DNt4|+=nkhd9-?B*y%EBte z5)~K?aY1K9Ld^pAwne9|u)u=PB?Y7hr``&tqK;fr&#{?Q_SgX>4%`J%&;nl$FOR+U zIXE-XWJyfV#iP$Jj{entS0Aj6@@PQGP}AExabu&OA_R*VMNBi`1CMCz=&}UuGu^u$ z5yNjm80@j_Y&v`*W7U%3KRj{NMk+)~ZowWk%@cNrxcH$`3l65!Y86GFN99;l#E4>X zZh$<|Lu)g>+HS-F2!NybirN_LjX59VC?HV|0oG~CHOcY1@a9lSJBlbR9y<#QC_8;O zlTD_j7d(LHHqtLl`COl^h?A@7m67fVKVQE}#4oFWjKs~fbR#}w0pph{_F_9?>W>wz z{_eKcrma1oV&)1sy^~r86f*9Gn@L|`5mVMZj+DyI`Qq(ha!Qcmq^Tg1>8MEEbv&)N zK?Oiep>lWTRq@<H;X(Q|Y%poiSEXlKbP4m>#olmtG+5F|!*cN`Q%^^O!Z1^x;<J#Z z9`8{!`%pC3;4^O<Wd?_#h^VQ6lZl$7^@Ylgdw+)y#|J$w1Sml$Di{J!(B+ZSen}(f z+*rj-%li##HZ(l;i29ZY+#wXP@QQ4NG5x2wEL;T%fSQP+f{yTwJXAI{XJaUnQ~ul( zFM{@%mIl#ocYvx8pd!GuC>>-M^SqyiI&`-%LtT&_0yq1576{<3VNQ`H?vsdosA+2> zkK-O6Y53cLe{;9Z%+<8|<5LR#9EvQDJ#L#Bh4!0L=<Bg(;Wk=aA!V=qS;|t`X{kn8 zBJEr$8%)ZmHs7IDe_9!5KG<kkL^0F}b0O=JPF9fPAtmfvZ*o&o@9_~y!*z8e>YC(i zK!ujQqsN6YW2TM9YFklJX$cBsQPB`Y8?aNI%ZzdCj2WYA`6xeWK{qVuxGDc(y%ecj z1sQu{it>9ga7|fj_3_wDk3q+CKPbWCM1Mr1i8gE|I255;7Hj2JWpq8Tqa+x(FeH`C z$jz*dWY0cE!N-_N@zlPa(u){bCaT77S8a%}rQ5eDKh`c#jL}yWK`01{UC!2ny<F!w zycPzQ1nb3fB0k5JbT?`nR^}EA2vx@9^=YnFbo`wSRrnSR-wdyIv)ViB<4}kMsH%d? zQ@FrzlJiR|J7(0c!LD~ZcvnM1>eu)Riy#Q=+y%38(>m7!s%%={qI-L+!kcp-UT@@3 z&x+QlZCp34>nmV!&WtjoZ5-+esf;;NORT0tJuksY+r<6_qa{sF(i97Oou)?43(H(- zSyPpko1C9lI6LpgYst}T>Im`jq>hk};+!9vU1;!v29WM?&KTNZ6zhM=!ZQW+bkV|2 zeB4fR8oPfnQf#JHcyMtN?pVC5BH5Y<`xLGkVL}n6`bDu9LVYaQ7U`&s(J!{c<34B` zX3~7zyh;XQKQ(tQF9^g)W{HrvH}C`JL)##u*l#>g+8Wq{J7Hhd2OEQ(xv-_z+)tqd z!v;-i<%PA4dEpySF!2KF^{NUcHqb^LX0A!W#5(25bAh;~7eCXm*iu;VIKI)<3~-La zr`~HS#~MVQe$WmICU_>+P%x3`qF~}Ewt@f06ii^-Z-s&hb&kJq^AQrD>wDlC$VxR6 zuhdmXdUwFmP%=>nD;FgbTk=+87^f?la1^}-pVN2LF>T5B-U0hG@10K1NtzB0G%)#R zG3HIHJ<dh(#4E3GW#6u=o=|Ej3e`DegVQ`1YVe*sF8&@>h^~5K2vtw?4A`So2Q*e^ ziQj{39i^$_->i57!<xcBt$4z|o~L_7aSvccg%&kvo?yI<;jFWu*c<QKq2Q}DPyC2! zj+!)2d<y$YWe3H3=&feW6VJoR&^+;E#k;xq0lfc_=7~)BxxVI!X!?NWiEx_GJTZVK zG*9%R3C$B-XwHEG0h(h?`7L4E*HdI*sB^VNO6iKGd*UH9k?7*rtb5||*Q@ECc&NJW ziM!#W_)TmxHgr#Hb;Eo9Xm_N^tG2l<x(3}78_>g7x+i$R6(J1W6LAQq9kKq8>Ylia z&b2yyeI4Bs@4=7KJ;A=Ip?l(0;7Z*S+#s#%G`L#H#dUN~+}R3|8oDP~qmlMM);%$o z$yL!k(O=U&(d&kEPxK@yTGkhL#CsLx6Hh>0`M6@<!>N={P@6XNZK(W%@(Bsz?PX9t z@hT9d@`*WAKG8`jpZErDx&i@>7g`<n2Z|?-qvUab6NUYUTIg#ko-i16<BBJ~0zW;j zI0lzF;>(NcfCxR4G<6la4u%@^Ppm{%{M$57ti!pZ3e6L&=`p`ip?QKS-MHonHj)@h zvXoq{d4f?D{VB~8D!S`wo-jNt=bR_hSU@$!H8fAKBGDB76c(}J*0oMpb*&TQ(FCcM z;%(%JmI-?c=&u9hNEaGctrNZAe~I#NZLJdx;m6QA(UkH3HLVl3K<h+PrFEj=#Uu8Q z#r4%r=rUsnhbpgstan1GRJb9%6Rhu*-U&@GD)df}SAVQ`VhTh{*E=!xD!mhy$P_!K zMRdgzzXbec#S<)t|3SqQr2LwSCz@f!riuy$L-7QAel;ncX#T5FuT)n&!E~xBo_On( zs*zt$@dTAfD8&;>*My;XVlix$;)%Rw$Vb-fR6IdjDxRR}*ye(1rQ(Sk9DuNIV_a7& zo?w8giYIU+4C^2@DV|V7U8Q*98*Her!Zo{6yP*_Mutsu@$Hf@-^?b!#XLZFBCau8s zxB#USNnoe0dITc{rGuolsh|k>)X>GQri$Xt6pjzEBHiyfi@0NhMWh1W1vGrtB3c5b z03L!{)dgQ_`t}UK?eiB8w%zA=r=2LpFneEiUB}LG58|YZr~mFQ0*ej>qNG?G&ct%L z1uFyCQi+M9c$}asch<qAhW!Bc9PYI>bYh#LJ_>d0b$nhDg>}iI=yD9ec`%KNEx4U@ zudR_b)<T)86XWcPFyl%NT<a9i@7S%0^MMIm&uu)-+XI6|e}v#MBwp`?6(Db_TW;Yz zjCpc9M#8Vb)JDRN-HyY>Yfum3oImz4@fH}UntWdOx4goivj<*F4ylt0Mg7%D1zbI% zshWi9xnbQs?Wdq>GRArDO)kSoDw4!rM}0KRN$k&AS5mS5vBJ?OOPV>mR;JKfOH@PI zSf%s<YB)LL7=6<DPq^=99J`o=zEY-CA*u_=ov%L%CSenOVF<T~*SAOdc<&AIWA2nR z#D`~5NMks`3Qe(agm~K%ag&By<sv0nWOA;`HCV&-XBV#A<XlwY<ZOr6lH*sOuYl4` zH&6RXiyo_SHc{<}=7k_W)F>ElD&S>LIP(7jFn-feE7*06^Dr%_HL%SX=U%+KYL?!L zZ=5*LHA_Q>#_lB+fB)S6Q19ymL1Uc%)B>Zhk8v(>iD*H!h%&Ab5tgT)R1rnHL=@r@ zQLkzdwYw^!3l`5j>qO)cW_{CY#qbcN^PDz;&&J_3lyFfp5&Dznmo5l|lIuA)Ik0Fj z;5?KcH_#PcHvkI<oX4%sFRcbIl+NvagM;Rm&O4X_F)lINBRsFnsqetC5!?yjX7_S0 zsn4tI5TG0rMOdFTE`xf1G7G#~{(vfQtPRu}iv>Q+9~-yQQ%?%BgetMEP5MsswfgqC zmG@zLV_&$ou!YrJEC8z#TI%eIwJc~i={vTu?N-f`muX7_EPuJ)myL=1k`G9?X^U5k z^BwS0sq~yrwJ3{Uz^DC^+k$qO{hep-@iCTpOb_iE34X<nNvk8XaPK>}y%+3&Z!V+x z2B{#~=020$a1bMp;gOgrA9WcHJe1iJvwknW6YtLN=TT}qY3^u+H9aU?t_gxO_tEoc z43@*8O}{kFt!iqff`0H+@`kFwc=`vcpX!Pp>Rmu#trTY1bKkfB6f{3uu$d#e)KRz( zi9*XuNIQ{-ag?jd6@8~SWAs+{q>aNGUDfJ!{}>*hsJFw`5t~}D*~j0f$Hy0cb{xT* zH_TGU?u$vV-{;sv)8kOdV7yO&4b`^7&!OT&Ump75(2;uY+0I`)=O~3QDBOgL@5S#t z4rMn8g1_0`*`^@)omFRe032=^<&TRM@#c*;pNmJ)?>Z_R?>i1VzF<0&cKK@hh;Xe9 zREOE;;DCE`GS1lv-N|v|Fvf&V6Wr)k3#WsyLB&hw&UNOoLXCN>UJx78R!(Ha;GT4> zeMuafcgIu~?#AU@mTy`x>=(d(oSMu!Skq+I91fcDZ^A``@1ku{i@|7ape>avuk(G1 ziZ)$lZ}=1bt~$-%f)~_pnfg7Ve$T7lW9oOK`aOtW=g>s_Ja#w3JdSTQnY9$3`ear& zyyk7&0T-n$^)0*@lUYC3#oEV(pexn`rmaoU7l%{f<}>Q|9re3`zYm?nZ%WW-ru=pA zkNr9xmkPJ7h8^_n;n%cu4y-ZN1f4O|Xu5Tmsp@3YX2zvWHU+v)Hqn}sO(V$Cvf8Hm z>LVWPimUgoHq}IOLDNbYg#{YD8Xq(cXq+Jjicexhh;*stv~sEmyNR@^rY&%-vzgwD zx8l`a#8=Pa=PTabil4;$LS>KQAc~hWg!(Klz-x*fQ$hg_sFe0JGKYv@3|g2{5eZbB z(z19IY@l`wubda!s;f9vPJQWlJ;@TqU5t3!Rf(65jJJV`S8<@&UB$?E*BJR-{JpnE zcv+-1)?PNvYO$9=&8fW%YEJjVNh687Zi=_zC&eC|ZfodqNw-EDTl_SvHHP>WKU(o_ zE?$Or)7IMdvfj34DfV3Vp0=AXSkeQ6N5wPfxvYogdb{Sjz6?0YT;MfAx$4SIG3eLk zm^kLo@2Q+H%M_qqFwN9Py<ncH8DG{@EWp7}V2mtM61KO1xy*r+vnh*naVe*Zkl$2Q z+8rGOQ~q}Rs_CK@@Mg_bs!AaMcWT?pOa-SfU1X=K(v^Blnp8WA$VQC;mZELt_|UXU zZY#xWVFAkm^z|1mL-czK=od>vqWCyIFBXtmZIbCdSZa}&i?`vu(#=*|w|8t)Dd8|l zt?gtIWa)y6!K{gtV|;nxDkf^mzl6F1yEN+QlPt8fuO}wLv6&y3iCoqY^ia(PuBpVE zR((KeGxRlk{l*Fp4YylFgj59d-NwN44i+Cn#A-t71n{RK)Q5<-v$iS!JlYIc6ubc+ zrmYn89v31E{5Bs%a6|Cd;oUlDalt;AMFpGii?uBpP)m<rAvdzUD^l(;MFr$&jB}7$ zPr=Y;uBmYIMp%{9PAODwnh(qy!&0kyihBbGmofoL`e{>DJv6pboRykXhOyp+<+w`u zDE^tVP3wuUDE=PrE<B8J{`x6}=b)O9f|k^8Au3q;#;?5$6IE|3drVY)k1-7=sxmlH z<*z2Ho`Rdkjy&jVWV(~}vH(t&jH##?kc-aXi>e6c&p}4$EL3_?Syw_YJ@umUwa{a) zs?;df#TS_~s=|RrRK|~*P?sW+M=T$KH;?0v&@x9{dGV+Cu-$}OX{s$=lS)QXGBju( z^n)uYb?jSsX)Wv)+)?zhrp#2WL#dh^%1k#P1@IM9N|k)aVKgW+rI0e9!$VhQx*IVr zhovJF%1j@`i=OFnGfR@1QeqfQJTT;>s1>OY@vh2DSFx~AndvtmM=3L9D5cDF6JBDl zt?<E$8KV^YHu8YlOuxi9OOrDAaG6sIR@zJ%sQ~SR3srfIFKz}oF5Jwh_p0_2^@J$# zSK3VPLCry#f1KSTYBT)^0X1J8;7iY4jr*t>!Si|WnHGq93kvolLg*RCuYE@>zCXen zw0`5aI3AvKxkM;a0lzEDwzY*8uSMezm70bsrKX|fkCZgk-N0Hyv8ihMb!%%)(@X}% zdXmeLQ@VCjyQ*LWr<q8<k_b#QF@T}ol=f76OH)^GT0kO-HeZIwJCwatHKMDAQ)Y#x z;k4ET&_)fXOBunDikT)dMw@9WU_?sEsX`QmL#smzRmEkU#PNh<PhOuuYn&{i>^YPK zYW36}5m?e+Reai{dZl}10WYaDLQP3|dF;gW`?&xW{7{*eihbKgM2Sq;0O}p8c7;Ze z0Bqid$a$u9DQSS)YCO{dO1yCEP~$Z7xRk;oX6;_Z1#-->?FhaDRD~I^jl3yTqPW4w z=3jEF)+nW!wN`0_bBUVSU}1*NZR#{VE;lm_CT#e->J$7HDd9m)NN>*j)YKAr!>Ofi zT26b~+B;M#CC$?UwYVL-M>soIkNs==wu1;MY||a9&fo>Nv?fAJFy5+E#6}IwnmRsa zsPo-lkZTyc7ckeL2-RP1rjtgDmYj13W@9|I(ZjfcFLO7Rbj2zcK4eKdtwd`SNtKHR zU5cPB`m_>1#JnClLDo(>L07RX9{w>Q%D8ow*|%+ASSmE-i_>Eae5_Y?<DeB4Rt{Av z&>MjseN{Q81nq$s9W0&+4)s;NOHM4Y-++lFH(1ut-PJ1HigD)TQToKvQ*T+sQ*YoX z3ZUDY7I6>YKEQ{7ci^UN1H@1@9<vJLw7Hg?SWWi>r&5e*6%(%Su=j5uZN2mhi_ypT zvE6ES3g}FSx^!EkxU};n-f?NamUzUaUBC^{rx1DV!WLdVc8o8%+4*G#JM8G`3FkL> zwVSzXf;$&A1fspQbJ-uv8y{4k^F29nj-8ljaQv)r&^Gk(qNfY$9+2Ml{(;gOsH0+Q z8SsJCH`3}Ic?~S=K3*7ZmNapWuEb&@UZH?U>7_ET&}O9koFN*9&h{1F;jhZPOLJ#S z-H&^PALsfRkf=|u)|+u5%o|fqA38j})zz6DITh9n!FV=`_X?{UhC!Qtxv;)ZABxB( zdE0v7%E}Q~xmOoq;=9>Z_xeJQ*TmDf+Sizz3IvaFTbs3|id)+QsVkf<3hP5fwG&Pv zYq0hDDDd5lTZ!j;Bawznk%*of7(~~kq=RAg3qbv*4IveAh=H3bc<|v^T0Q4C4wf+7 zpUFXfB5EAitzg8^bHSV8rNvYf#LBDZHmZ~48RFN0E-toncq*G(Y72d-$^K7RUx>h^ zq~q-iu=%17Fy!&eaZu%k9r?=cmaAD&3-fd(9=vxMCq<kc5r=*LF{mIYnuLps6y1!| zdJ8^Ch<%Tx#E!!SxXTssn~3~w72rEu#_WcnbbyBE&MRJE=E+(frG>WB*k2-Ta|ai9 zMj2NZR^M_T!eIyfN!0#{MLvoSOaf__S34Rm+@)yRmD6;O1sA1x%RQD_b*W1b*Hj}= z$yYnSuLYernj{>+^&PmmL(i{06dc^Qjz))E^>p38!lJ}XY?6*l1e;@dgmHI@>FkbJ z6di1YK!99qqW(H}r?a;84*dX7iYeC(5aP=pGk*g4W8qH>f9~Q>R#9Odq90;Ah|Sw~ zICf$4gw<5yfq81Ux)nwG4uQUeuT9n#j$J*z-1&pM)w{4+QKV-S)V7`UuzD?S7Ba;4 z+xW4&9Y-#HY2WP|fD3C!Iu7F)AKctRqHMqIEMXYL<T=z<c4zTuvJ$#MJEP86%gb#H zC6$%4VYqh17q=uf#I2(BwRtZ0LO+!0d$bP^@D-EG7<kNT<jllgZtaL=BfMdkId&@h zaf-+-7N2Ue%v6A`g}~%p<JU2B!l{#4y)oftLiF|GaaH}@*xrpDQcizFpiN;pn=vlV zbfIo`(cX(t?Sn4QHajmt^-o%xNri#VRd}Pn0)57-crFlIj6*4$!}HSgX{i~r{;)Uv z1me9Y+9x(Hehl`fMmLU)E1c+~X5Y#osR-B@SJjycfCMJlyn{ZlZYy*vd0m^2x0l^* zDu{s#PO0SQ(7bHAcREax@-J-W1}Vkk8In8HIrZf-`TYQUbni6Q>p;vs;;N$sP!9`b z*E3lnaJa+~j=NUX<)wbkiOLQ-SeirJZ^j&yAH8aGbC@Ya4wl^P_$Xi>PM^4sEvW|$ z*zcJh*-;cG+>FW|YBH(Ow!|MjXv|>!{<Ojm;_B=0!kit}&j(m<<*|ciO2sc6K6C5| zsKqcl%iJ#>VLX-JC8dg}Sm@)!iHHL@zA&tBZ5-6y>1na|6}F3GENPxG&e?VlUy4#{ zE64nicUm3ioCToGQ5(rL3AhsD+=o$@I&9<cyn|)!M;x2MhAkeWRPjR+k$+>*MBC2e zjx9fDU91o3Gf*$$o*Y(qEHiPqff5x|&~a;W+JHFcPtiyh+v70@H9F{oH5NxM`p$M& z`svEnkfNYk)9`Dn>+Fr}S*vXJ*ygOEPEK48W$l5kKsV=28{kG=!OqUlu#Yo0Ug<Xm z?!%pnkhq2i+cI9=-q%)!!jD=Oc;1rc>Fm7-l&)ori0o)#U|+?4TO&B#qMWo;t=kI& z9ZKCXkbgCRiiye(p<XX_MnFP91n#C;`a4MM+ryOqE6k#vZ$g<v4^RkowNxjfRAiwG zf_q!B;NjNe0x6iC<~|<UDaxG()&mWX-7(G*6jYrjcfx^guj+2`&h*8)G?)s$MH(or zJ>Dzw9E=HV6grRH7r(gWJ!r+-7mK@~dqUQbQzm=#dFi|dv(H*V#r@C2kP^6HMR%p# z`44;{>&AgP+&g!av<&wgT-X5U_w}-!Q?*90$vzzXPxHhmjNEXZf;9>aw_)@$GNw2H zZ-~|gPRw_|c%o>qJ5+xyEkKL|;DR{r#%oNPryj>DEe=irCNfp1+Vpv?uwmg$PqL@G z%IxAV-~#2AW5zg}BqI{w`}I%*UmSf1U_f=O<P6G~(r?lq^kAMFhpW#o8QnO4lv_)5 z!+4(<ZVPsq`EHA=4{=5aGU9>h{~D*jJ=G*Q&eT1Ml+lIOs{s2MKj;F&CD(4$Z{m$x zE1`hK`RX_5FNHgm(zL?SxXe#l$MG6n7U75C=GfQveZ;{_ctd#fd%kZ#=`FvR7VkkW z=6a)Iy7w)-sjI-^pi{R=3~Dv>C&t3Sj4|@DsdFpVGW2^fU*NKaP$%7{afX1YG=WI7 zoy7r}d3AF=gU)4pI(B2pX%DIqND<KZP-PlX>-`8*pW~H#7{&d7gQ{oB=;aV_;ML3J zAl*P=6j12#rMhp?IT-2M`_!`4b9Pe5VDFc(e<V@pOST1F&Yd|A$>vN4(Z~(88u9qo zQW|#%oASfJNG9_lI_cb^+6N*^O<xy}40)t5ytM5usICNhw%eQ^V6{TiK<GS-SL5hT zp%-v%Yda6kN~V13-bYf<xaef0-K!);!GVC#Py)jKIG1?Ua%@p!t;bwfTMYI1Xh{ez zIE^=Lnd=E9wc3p<hsqXS78Z;gV_<^C)<G}@)cv)m2}OUm(u4x10eO+0d5*e8!@Bz~ zX_)u*!o2t07B?*EP}O!(-uvz)&b&m=+>-j0E_to<3aI$iR$HkFow%FKXeV|EsLMps zmHlqye-r1{$wpP?yc4gu3lARZPrw3MA(j#*?v8itQT-ZI!A^my;gJ1Q?#>@-Ta$4M z@?)?-=Ooh$FdUtm%rR#COk(GzHedv-a^qo@n*giK6bpVbV(>HTF8nOWg2PnU<z~Vz zcQ)*DbF+%J<RQ+Y?fi|ht;GqmNL(rXgD1K~O<mK=tz9(Bw<y;)%61kPa$Ef|Zowsc z^&K}CHZ7XvS(NJ;iQ83hEt`k64$s?1434y296Kpt;_f#vp&|kf2D~5Z*kyRQd2v(a zVW+c76hmz1#ue9tY&r9GvjM<K*qfb;@H*~7t<`83aDz#j+cX@kvfv2s+5}Y$@OIa1 zLyxmMm4@+8Vg-lG?t(9lY9LxD488nN?a3y?P!=#qad(bGP<=QMYag%?X<UJh;UsrV zIr4)-tgW14bsrbPmh)gwv^P%mH0iIZW$V{m8Pyw4{rd4G%UFdN*N-=I?ga|^)^}X1 zt=3_S2cVFv3&@{Sj%~oAl2e%0Xv$lLdHr}1Y^q&9&ijYa-;Yak$4%tp>+P<%VY##O z#Yj-OL%V}~je4)RgZ$Bxpb&D0JIEvWT6qV#ok?hSkh|-5kOzE#OUMhPaS3^+gNntd zxJriWw>z^5z!}3Ezl6L=9M6))I!_$0tU++&4$_^7MP$E{mOP(Tj=Igqfm?B5HL=|J z$^j$YzPOFN9&aPpmal6&cDKVUgQ&cY9OG%Muc|W(xQ>AJ$M7f6!_0C^b06b;EgZ;d znn$gz;0E>o=kiq4V2CG<2l{A=4;M~iC8JL8xh|0^{T^{x3a<B_HJWwKe4ni$uim-E zOuY^5>z-ax+u8xzLE7SEKU8D%`##&N-#4?}-M{O%7jL`qwx{1oTpxftDi8H|uir^) z9jsqUneBe@3&+m!>~g8|VjeMR9@CH&mT4`1vp_bf=5Z~BZ?_?WR-8h+f}`r%{Q{M% zxLkzg(rvwc`1P^X!MEqdQ&>ZdyLd`p#>JAXhqj=5%H!~OILUTPA^ZP*{$Jog85Br) z)p8Slfc5|jU?d;~Fb}X2unF)!;3S|Na1-vNX%FZPhyY9iWC4Dv>n4r?*5Q34;4Q!> zfHQzA0N>gO2j~YF1F!-X12zJ701g6<0e%2n05pI`tM-6EK!3n+z@30;fLVY%z=MEw zfHwg90Y?Bo0LlP$>$r(FfKGsZfC#`?KsI10;3>dsfR6!R1Ihq50e>?f5HJuh9B>!F z3djen2D}2;5BLqhXDMi_{_Jdt1Ngxf@y$x;GkFiY)Mi^Myqx^hBC>C-{H}1&U*4Gh z$(?*f3nHTV!f|(r5Tz*4Lt2H1Dfr8Q)o3wFM2Ie;kIQ>^(OV1?;jp3ma1kj&#Rw6m zY=(#-qMw+7zkUeM7=%dD|2hjZ($fCS%8oX3^*`bfExIZDZpw~fV_?T8L^s1kGB8U< z{FCvUt=xu-OfjpP-3a)y!rt%|2lp)4xQ4_)PfP{mz@ASO-qVq?@ty(Sd_oX1TcpB` zI40tK3iXhJFUg2M8=+`tgi90|E;bsz0$d`F0(>G~7?>)27&mb+($>rjd@~)!sHJVB zYotkkOo#C#B0d|^Ptrrs53#NM9tCXaBge%q9_c3`hGZApQSjyZ9Sxi_T*Ab`z3Mm9 zHqsN26s7~!?J915Gd|+Zc!(>*^FTts88iCjDB(!L)7c!2$IO?xctmt`x1^+Qc)=5c z><<BiB~MA7F*#Xf`0&hG74IXaSTkuImz-raEJJKlZ8<<J%9gI;h_Yp<j10-jPE~oB zm_0@1U-IN^TVl56Cox04A{~MF1>$9#0&y`OK!%7;oGTCq%xn>nJXu5~W{9{%t1UYT z4tOH6Q`Ot3X}0Vf-7Y>kDI;0`7-iGmqBAp;Yn)9t6Riv@5Kh3qfIk600`6icO4Ue6 zPdG|k4{^KbigGp#e=5E7oQUk?WD${`6PIiqlbDWhcpvQY9+IA(IYoKKkDI%PXDzSV z-gWBM^Qqs!<lFG3Mva@?+|;jG^IKZ9ytS3Nb(^;S?b>(fcw47{&Rx283+#S-kDk4H z-_fUUzo7mD1_oO~28D)&M+_bk88viR^zaceu_NO~jUE#}cHEugCrq4_a985wDM`sG zQ>Ue-O;4YZk(o6!JI899HG9t7yYHDde?hJY&CCv;lWL90&YY6W+@As2n*!O$hLj|O zvLuu+<_}9$1|%yLK9W&Gu$*Tre`ZBWeZlo=%GWTIr#Sq%`q5nDP%8}=gKKbsEFn}h zN)~-w9a4bby+t6n-9s?0F7OiqY_z(Ab%+^|iC@+n#4j2cL;@GHq9#e%r6`PND8JJ{ zNe<o;@yigbyI9Y#4rIAZ1+`Q0m7&UVs;bLe<Dz>i(oBVWI)3lg{jpTlRi#dgpZ=2I zK1I2+Br{DjQez!shD!#1=K^=8O1CWhF-9#!DqJ#<4`xt9Dz#W=z?L<nS^1m}{59OI zDD9-4xtD_&)0Ll0kper$$GkKsV_j9rr!I<5GmtjxRMtag(GfNO6ntfi+whfw_%iTK znu!x_C;{XrDY}|d845>Aj#lrJK1!Br$S{QyYgXdbRpl<_$jI;8EAl%7VM%c^{E=Hz zL8}=lWFahDAI7T1o(@x^mbQ#nbD0632KI)$8tHVeNT+7GVk}kjn{gZb4h6oW@XdT7 z?==^V!{in5>-ry&i|TX)R?uPKWbmyf3X-bv`*!pxjPk|YPE@5rqlcxdrZ~(><|wxY zE|vLrySSqwJ_C;%%fH!3tL7B1&O_JqdjEy=Sdv&q|4MqjD$>h>Olo;Q3vp#5PWD04 z!L_SPj!_mXIi|_s?V@Kzd^gUo1Ypiy!yKe*MVTdsj4w)}k&Bh78Re_H=v$FqP5GUP zTxEV~H6P1!rm7uSOD3aEWG$7fVqhNd(dg)2O^%2SV`4p^)h(>2C^I$H^{(+$$`A3o zI-VKeGHW?fK27mIQPo{q9Web5<Nqu2QZ*&^>BwV^y9WK0<&fNGtzboc%6fDf{IV5b zFWBI%Rx^_`MjmPL1iIwUjmraL)nt%z!S<Rhw<~^uF8Oog@v=wFzPS-&P6f6`z6YW= z#B|s`ryyT46>nH;u&v9&H{V%{vvp!ir*Vd@hgQ35VJKadyr4XAOce7Iba=un`_ZDd zNvwv+UdLFNoG2798^Tz9#v*XkM2v;mi1sl3U@R}ewY4xUFrj8i9Q?r|Zh?6hOe(AJ zg?TIOi!GuROmCQGn5&%@(HiE)?<|mG!~>I^ODoK~VUC4a4l@QOhiri`qgB~p`^Ykr zqG%oiJJPMy3ZWtZe`b^zN;V}}>sbxM8%Hpe<CnUMN`V%He>jj0zA@&h$`{*T*3?>P z#x-4Wb2fel!Z-7#Y6{^9r}f=hBj&mo&$-6dPtn{Fp;@xhA+vlsX4ulx@ruo_UYG#~ zzdgK!m%FcLczAd%KD`1F4?UXu#Eh-&E$#>mjE}+QJF}TtCcN*Ob{8HY=48#m;|(9U zSjyWQhByBB`QHZ|Fkki85%q@lceUHqHbamz*Za#CSN~P@zfe^ExrrP5bB$q<sQhzB zxxJA;BfR;)GH_M?v&HxymH@Yf6@P9w_!v1zbCFx+pS#<Q{Tbn}mgqlg^G79sDK*BQ zks`k;-+iIx_s=}l{ofe1mA-sM<-7LghT0VevKB6~=NH_2-{Qh0j-^G*?q9y*9}hhE z&_5qu`N*S>J-+IRCs(g|YVEr9Pd~Ha+2@{r;l-E!wejUwUfr~L%huOkf8))!w!OW5 z$Ie~5-+6b>-hJ=A|H1wbKRR&m(8q^A`Si2Tk9=|T%VS?1KXLNZ*WaA}_Pg($#Xpps z`SGW-r9c02?)<M8E|y*T?Q;3=xLWJ)PE1^T;^BrSCjPhS|KCpkZ}b0;CWfx<t|o^5 zx9P8iyPxXmtwBq?d+P7l^jPs;gm<Igu*~KCewTObVXN@7!sY!RF7FSxyz_2jBhJk( z?;c3M4gm299{?uw^f|Nm)QqIe*>ToHYbxdkVLv)2IeWz9wB#w)$c&WC>>0`-UJElU zF~=G*#hN-RIVLm9mZjp+zO`sXG-lxvrzQ`|oD+|E{5Un!SbdHWQ3<cSynFK&=Ak3z zac|zei}D)Rs)e3dK|ui+7Z{iqleZYXs*WA{#Kh;JpM}m?Ow3{gGk45eoQF^X-LYxY zrg?kUo|Ba|J1eV7Ka48}!vS1p@Q2@sL~CNYIXOE!Guxb+VNOr9WlWitoZZjdE=NuJ zWuw2!Cn7O5Jvqs2%`|6bC1;qE=Oj<DSraFxbE0>224Cow0)CkjGt7xu@RS7qocRSq zy1MwuPEJfRr(|c&fNvFCv~A6GhY(;i1UwlF6Pve~D4wXy$-t|E)#jPD<m|br8B@(E z3ZbjqbCRuA7iW=UO#)d-wygBjDJrv!fQTDznKo<9j&K80YIduncM6EHCY!Ug8CJ6` zhe>y6m!88jCoVjjnrsEjQmy7GnMuj!%oHO8`~4jEl8XYPd(LoX!<>w9LIzB2w5J^L z6Fw&kf~Vzz#%aViV@4u)4sJ7PklLXu@}>jda;7CuPK0H8YDO~hGaWO)HN-J{TB<cU zCo6GEvN<uunw)L!(9M>U-EDGeMz`dQSsjdkl{BlAEAyWz!DDK6X2y)<46EV4YFf$J zGg33aeqaNZLs+`Zv}J;E$X6Fpx)#!-T!L%iW~W-GG3#=yiP<XFKNFoxz9?FBKGnb* zutVXkl?_*ZR>_N`WR<P1?z$+99u?80PZhr^#SU#dm=ksEDGjb6Ys#Yztvi5KSX!8^ z<O`vzWp53*SIwa+DO@c_*;8%Iyc~1K<XI@)sVU~<8Cll3w_QJ-$q*U6;3sn3gGIp* zND7^KM)HhIEcdh#?J(BNfoay?%r)3yor*&97atzJj*-x|kMJYo!s6W9X0<xG`&9UI z?Kah0>Gks(9_$S5H-Ytc&V(@##<>$v$Fm~OnUIq@BP%^Q!KnKtB&Ft9Cs=#j-Zd*p zRet7Pm{+(1Yqj^*j2!l$acV$(qMOEdKy!-<V0>41AM1a8_l51Q@BU)P>$|^t+x6Ys z2VCF1R_Chj`(5ap&;|E}0Qea6VONmigYmuO_NwmH>7N)>)!j9I#@h{R?R<>*s)v7d zkcG|_?nkPne>~Ju;r64;dv$-S!z=y0;PSqsT6`f<Rnx0ZuTN}M_v-ZgbEM`Dl*MGc zUyH70qpHSJJ)P#0ukUW3d42Z>W>s~sj^}szRoz|r_1L`@@e+WKfxoN!$%icBG{Dup zIv+oLxT<^ge2sdfs(W?%$F9G=d-tcSx>u(!Yg1MC>gjjhTh)DEH97cspXM&`biw-z z9&UV9&jRinIf=RgdvJ_rCG5gZ8DCY+|L)cK_wChb=H|NGeV-fp>!DizXc$_fc+t`` zE}0$Dm_+Necrg=SuDy8lG_{_+*dRhxzs?v0U<je&vSnwZk<@L)CC~W8RBJ?Lb{rbz z^khBkRQSwD&PG!hnwgQ4nVuYK%}x(Tql*0zH;a&*oYbiqdJLm7E0Yu_m;%ucMGw(P zLNs=VZFFXmEj>8`o#o+)GeCw|?-9#hu*(RfGNP#-(YADJ>Y%yS<WZUNsY%J9(-O1A zLpntj{z9-zh;heRlZK%G$bPsxzd42p=U@PmP5!tLq4~=eP7$W}rjzxcBSmO>W{&YS zG<@Xn@L^~@lhU!dAlxm^nvMTR;2k$)SbRuKq;fdmJ|sCYOKqnRAE<Y2>%>nYJOkaX z(CkzzI_&9jXrMXt5`8^}B`3~GzREsTqaqu5FlufVxpQx|d=C+aRs2<R8+qz!^eZd* zeb{q!#x%u`r0_XYu*C&wgYiHJTqi%S?d%bm6P7&LHg#%pc1(714m124_s9&8k(i!( zcXh-=GLqu5QZqs`ZSeO4Xl4&GCNq_^i}$(v#^u}3bEGwWbOt(qN#a9Aizc7gxuIx{ zp(Kd2NDZOU51XEx6q$jc3A=RIWaes*hz<K`3>y*}Bg7r#;fU~PzSjjE*x8brq~s8z zRq?LpsPr6tU&~&;!?U*cWgox56zyvdzf^|$F+NRdH3>nk<dAzV()F&wTq{wdrg2Od znFMKJNJ@W5QWBVm5lg#T@el<i{UVcbXfbMx6XzHUO9t~^OwnWkLjqeCSrRV}fs^UU zD2vs^=@rko^knQd>f$jhG&(U0@(K9?mODH~0ux3kL<&>mtC1}t(T(JVR}OZxa5?ef zDDkMtK{Tr51><4~M%imv%P5+oGAqifct$JNG0E9#yqhrvbqM4G67c|I8I?L^x=!~_ z7w+km1=u%N(LXl_8?#2GBApz?8N7-6_3}@PcoFO|EHg1_SnA|#Y{mlBA1j#}nXF~< zqbhE_@`6OX;PQ=31!v;jBGPR+(-_$xTS^Lg)I!`xZn@MZo{%FQv&`%WjFN5HC}zp3 zTqI#<(u}Oc?Boi*$1}7G|HdR{r*dc!FXA+pq!B4h4)Xz|QID842zuRG=|&k7!e5gX zz19M0|6e{kdPBtU(9~v}bvF3wri;O~S2vgM>aTPs{P+1U2X2%Dl&9g}S>AlP+4eAo z;rGn|LzXy3=es9>YxlJP^#L5Ca~`%ffb+1NtEEXhnw*fN8|RJ<H^$4bG)(};OEIS% z_X}{Z0D<<c0kp?(UVVq?-=X?9DmxWsq;4Olo2*9||2P2CMz==AGXtg>fJ#X1F+e9l z;YvE_KMz2h7wYCBn54xHpnE=m_+ai@t;9c}f3JZ_eAfY(-ZKFD+X^5}9|7q8Ie_kd zU<&y|AYcBokMA`fEnV|9pZ_dg|5LGFd+|%d;M$8X|5F(L=hL~S2<R=$HATSupU3Tg zFoplyMWHeJ2kxHU>rf%zwP^05);jB+KB2v=S+AK3pFGJeP{OhxPnjFwf9KkxYt5ST zRlf_bXjT^8+<b%nLv;UJ;Qzo=r=MyrzJ1F1)c9-1zhI3D5sL;S_UNReW|43-?da`S z`#*f-_{mE`bYGxh#(Aqy`0DemMf3y&0y+aa0{j7HfFHmY;0-80Z4spaC*T<12;dXI zLBM{%KEOMG9e}q0uK_jzHUeG%tOKkBEC(zG(0?9a4j>DV1egGb0fYf8fc}6$Kns8` zpbi>KH=QzXd<#I?H^2+v1e^pM0qg_32G{_25ReDR0!#pm0t^F$0r~@a0y+cy0WAQH z0X_gvK>63Ws~T_wuph7kK>wRyZUC$V<O8gLy8y!gVSxUCjsO8Ta|$LNH}(7P|M71Y zQYF&A`%OHn<LZs`S;n*SXUN6{i&%XTG$QTg&2eT}e;z-F{egJ$*x>(-$4K8Wji`)o z!@QRLwcP)#e<L2lG{XPa{QDgEqdiFO)gBN1F;WgJg&YDXkB>s`%(Wh9X1LMps)K;+ zwg~uR$kiWD_&3A<wSZ-T^1%3A<-&3pb=D04f~kjnSJ%f_N2stHTFa~A{l71NnFDAt z@OY>-(T*67G{6_eDtR1pErtn0J(|DTDo<C#p84|{Ob?g`Vba|RljAga%46pE!K@84 z5GD-uXz{qI-3&u&u&2!2Rf9bP&v6kbBOcl>zJ~qEYuInNhW%^Tu-|tL`y<z|ch+Ff zwz&-U-Xq<F6U;lU5g<xOxrvUjH@^MGxQPuIpc&sgCgI#Om}-1?OoDs6%I|}P_(qS~ zaG&!i{3CAT`{Wb&29J#IAy48gwM%*(;bsO{0B%A@3hy;NUAuM_g9i^5@$vB@H8oY( zY&MZck9m3c&l4+Gt`yHa^Ne`?_1DFY9XrJ5pMNf{T)DzFPx(@w@lnbzA94TwJRf1& zJA3v4^?5*^Ezk2QpFMltJbE}Q_m>}#`!B+IFTTC;aTa0mJ$p94od=+9L4Ctk3UB<J zmE|eQefGRk?=uK2_vqiV4|ta`d`b%9=aWnS`wyg~96<W&Tg9J}k`8<L$z}ZIaOVR* z%0I*NNxz8ia-@G?kNQR;jQ<4FSI<SH5A6{LxTr`w;#Yp)(g}QBpa+HjqVgsC%lBVk z9Q?jAazZ3Ll&2$peAjyGy~ejazW)G7NFjf`kG#0B5gCA|jNiW(+}?25{sZu_6y6d4 zvyXP~qj^x@Wgi|`*XD)&$}im!?o3F3S%%<h4gmOnw06|~vho9YJLnGn$lphAFDqBh z^bh_PKVBx4v*JIaaB9x<uhd-}(VSKM3O7d1_!jHW4)rO@TkXg_>5&(lCqye3@W8tp zK#9gROuEybYdFSJ6Xe2P<_R}|2cR~<1ZX8G=e__l;E&|IXV0EE?~D_qadG1AyYE)G z88W_n`Ev2xbI*xQn>HyK|Ln8R#JAsmTOsFJoNn2OI&|aK+LZKrvhI;vQnriS?Ps^A zOwSa#$fA_(P{OypBmt5zJ@=<y6Sm+b_la+zeeQC~{P(^cJ$m%^lwm!ehnX-vYUT(j zHz&vig&nq!ADtj_<=X9=M>D?Hp(>^n-}1+c7dHwe#rHtnbE{U;w{|NjJaho<U|r2% z_@RG-N#hfFWKn!VMRc8~UAuN7ARqwy4Fko10Ru!x2+r?DMk?OL#>NV$?1Cn#abn`c ziDE%ggqS*Ysz^&q6EkMa5ZT!{7mE60{`~o3jV)L_fA;|K>VhC)pBgTfP7f6iW`>Bz zvMu7xh5f{fd6DALg_FhBm04oX{X@mUwbMn%x25R3ON#D$qzHaTieB$a(f=bUCVVJG z=qFMPJt{@)2`O>_qraA7{P$8!IVr{DGg2&ExKI=p7K#-sR)~imepo#6$RpzM#~&A~ zSFaZ9*RNOkyK&=2v3c`mRhPZ>)?4E6?u}y6&r)nImEzrZ-xcq@_n!Fh!w<!wLx;pC zpL`;Y9z80)`syoj_S+-k@GnxFI(16PMR9SlIDhsB@y#VEN=r+{#fuk}tdOnl-7voy zgE>tIjrVfQ18#)yps+V6g`CQp!~oe{jF+)uuAC`W$`xX>d>Q+P4jJ{SXpHb}V$i;3 z2{B-~5W_ZN{t@A)mZGhc4aE|Ke;naoLiimB|1rX!b_w4e;Vm&j+?j>5Ov{B>wo!;@ z5q?*x5Qh-{2*Mvn_-_!t7~#(%`~{cr-P&VMW(Z_`Jod$66>;M-jLDzHzJ}c>gdaB) z@<?|fzls&|^h_atSRrKT%R*i_RDplD#t7dA;R6wVAi_r@JmM-%MfkZ5g<R5I$W^gI z{%fX?J69mimxcWHP-S>@K4Lr(-V5O|X}S^PsspHhO3{gt=9`2Z*j>m8u|nQGQ^<!` z2)X5DAwM}(8D2ENp3<i1@3h9g-T)Na-r@ixzZ7S!Wy3p#?4BiL?7c$Hd|b#CuL$|_ zJ|PdCa0zcl_}&OV4B;mu{2YW|hVbhU{#As38{zjNJknfo4B@{;_|l5-ow0j!C}K!O z4EG_1^@!me#Bd5Rls1&&m+n%WkCo!WOerp|kmAzIQd~X+1^ZI9r{Wfb?}G5b2tN|x zry%?+gkOyCk2I9x>F!c&ij`v5OeqemkmA_OQj{F34DXHb<UkXIzXjo2BYb;=?~L#R z8%i;@yA(5HrC2%>ajlSI`^!=sJyaRKYSoaSJ+79ap@TvOg@h@qVVyd*^Ka9p{oo1@ zA%mhKBg4X?LW6@t!V<c4?9ic||KP!G6Lb$@k#NR;BwoV85&~|chrxr*x_eY~Xn0gG zq7M%Z2_6)Z(3u|EwQJK_caMy=ghYjehJ_+LG3(knAYh=5BfUgLM;TAVEq+ZCy21lv z@Nd)F+!jbiGXAKj$l$1imW`VE!5tnt>K@uBAbfBLBM6O3xTR5}W}3Ug(Z7uuNJdt~ zpU|Xnqeepqs0acSm960p{KFVNBns}08?_v&<2I}lQ9$^F;E?FyQBmPh3C$TnGry)y zZ}#!=X)%mA(wz!AqLE5M^C}(^$OgKHhDS$6MMZ~4x2oa+?j1U*_y<LYMTJL)MMvD) zyosI!Qb@S1W0zr|pYeyPBn+-4^!Eb_`~v?}{N011!Q$xfsAxrm!qMPA@J|TqZXpU$ z(a{ObBO)3#Y6K!G+!K0xC0M$JBZ=W~zcnI4QQ4xxJ=9do)TcpUcvM(4xE#?+QQ0y= z7mwh6AtASWm}&(ECqySiM}|jhSfUEip2*OigF?G`y44-7JCIkAVW_Tj_k_OPeCv3* zxiuUD42fcNR4@do(mmvkUV%O8czE9w3CGYukma5|LqjXw6A}i6j0kE_yH;<c5SqZ) zBf~1wPY9*ljR>mmUfV+V&|rvblo1^KBYz-ZmU;~vj7SKL4i18>RXD@lc!u~k>>C{d zK1RAYlmB7L2kh_Y5gLS|;_9s8NB%~IK@cOud-bd4>=HjRIx?hR)zBy(RiEf8k)wW< zJ95iRdBG>qx!3{7)8Oy)=W-E8b&xgn<?=*uwf@}o`zc0$Zsf?3sz0(Id2mJF<C!@F z#p2X(u`)YUY+4j9Ha@yQ+_4XR3e<B$K9^z)`VQ<f%z^pOfBsWE_Sj=$)v8ru&6+i0 z-MV$Eukh-tud4pw8*jWJ*jM;;$1~zF^fxx5ukg-0?}(2+`bhN+PJewueEs#;;`Hg$ zqNJomoH=tw{POcz)i?O{*I&i&zyB^)T$JKv^c4<WcByB(wMIjC2O2t*%jHwh(9K0d zcRw1sr$s}#NpzQQi&(i&%#?@43VBStEWbtjUD?ivZfFo={16_E?efkD-y7jA2p@&; z;}L!)!rzDRs}TMbgntj=PgJxs|Lv!MegEyJ{9oBm;W>Xk&6_tzArhjQngwm{*RET) zZk=dvZr<FldFxKCd>b^l75(96Z92AV*P&gvhQ6lT>f^h4>$V*_z;8p}R^0-+1&9`H zI(6*UvTnDA@X(-s{aahKZr8C}y}BK5)h*2Cj-9%Bd;4@mnA>h@P`|lf(@x#$d3)Eb zQ>&KGZ6;H5Pp{^kTGsQfON(y4t(w$!tK9~EyLD?>rxxSC+0VTZzUsBDTc=I{#sRI{ z-Qv*#t_ac+-$*~8MdJ=_1G;q!=m7kYey4x{|A2tj0gApBc+7ZOw^pAb*93h5wc!zc zWd&|9YkFvJ_@RG<6RiYJ9%Fm~xC`JW%=rCVk2^x6$F8<<px3U<S}>XN|HN}G>aUkJ z@vR4F(yCRf)-VbFfcACj)WHY{$5a%j(1jK_N~~?eFgT9Sf6GJu)CXX6b3+e#>kFXx zo1c90$#}FoZ=OAS_Pd{c`ssVLJzxL$<B#9MJaPW~`Lh_8o<4T$*votO?sZ_@A)tT% z{*Zj;zS?@jc(^5neE2i`V_vgizNvlt_HAL3SDaqHk;iZR`0>HL@xb#fm`A)H<7l~k z`*!*L_uosjrxNonoS>2?PMnY!e@nW928l8FS5Bw17_^@H_~VbC*tv6O?w~<~dLSO= z6V-e)1vCT@7v^hS9r#Wj(~VniaO_kx#au;?va+(@@Q#M_hVgF(ejh*??8!LpxZ{rY z#1D8W{NI27eTg|z3H;=1uf3-5#vGFT?z`{g!Gi}S<`k4ahCv^J_NNi%$(LV#dH&X| zTj!(O7jC!PM`UGXg)LjQEC&5*;&vM#plQ>lJutU%=k2%OPTu*2g@tuwym<dp_@6s> zPNFZfqHWu@y}-j|Km726#GGygpAQ^3AiwzH3xy~0N8!%AIeGG={PN2$)i-G}0DT_y z4w*au^Upt*LGCUiPUmmG{U(3;<(G4xe){R_-+c4U38Zz2VL;~tC~v)h!!m~bv-qPw zC6QJI5Pt*6R|A+Q1`vPpil*_-Z-PMwP2yt!aFzxj&!qu|onihJ{CDr(y%hP_1~QRP zT6XQ)rD&jhV7^H*4=~T9<b^o0OrQ)a^YG!rlEAXT{GiG5!Lq|JAAInEqJepc@-LYW zn5*X$ZpDM|%djt}JIXLOP26btZFb?p1&L-z$$y_decDrw3Csh`o5?rdd{ZLNCHl;& z3^NayCzw}LK-~B3+b3C8jvP6n-bn-N0LmN73G;}!ZTU&c<fFJ=;3Fw}z9(h3cX`j7 zlwEh={>b;GeC}H*f4y+wFv<$c|BXBf|F_?MdxgKhe=qdmm!ZCt$PYyW>m23*`AT}2 z7sQ?K%>U!Zk1OCic}{*4U&;b$A>QOaW%Q{tQigpdrR8H>NrEZ(JFsTZV;^XEN6Jp1 zq5U=~+q@y=vSU~qC@+8fMv#Xeg+J<gX#nvzz{m^3{43>z<$&@Me_YDJINTNbDfmws zkO#d#kn(oWknuUzJ8<V-$|2m6`L+_P(i_De^Q4sJr9FD|XaiZuCmqNKMUO!TP4bd* zME=)A2l-B(Gmj`Ylz-N{7_%vaMgaezUurZA!XdALz_lM}z<jdI0$s#E^{|xwZ)wHi zM)60RA&vT<@{jgN5{&$yN&F2tr~ETNC|8sXgBF%?${FRJWy3I8F8IWql5#j`h=Tk_ zfZwEH01m_T#YGRKArNH&^W?JQcIBP*=#4zhh(GG$6`14ig?w1Xa>lx)CORnZu6bg} z6;1M=?rawrmi3J5Gv+kPC~5dg%1F=<4jMN8=<4H|??1!k(Q6RX?9!!6675VCAPoi> zbkvk51}(01T)uo+9(sM1Tt6>LJ~}g4{xj2}5WDj`DMx=JW$Z~Qqe;UTdU=M-^f$^g z>m-zC)=BMA4p^SMK%Q8puV9_61{xIp$nT|?yJ&-YJ)g9&KBQ^TK$CJ$xvox!Azzer z%F>Dbo8&XI`^&Yq0rH8Qfr<taFtHeV{dF2*PDnWnI1K>}73G;U=;gU9>m<~v?NBGR z1`VxV)9O}4v#=Ts3ja23+Emp4Xye(=UzHy$zibbT{9t+Dw^2@rKk7ZX<KZOv{M`QX z>DdG1Q=nlLXyB8G`f~zk7>hc76mI_@4Muq;4Murpoz#6V_>LPPZX*rgzZp99N1&d< z^HELsqrO-2kFvIm{UMe)gARih<^kIS*E}(3p-KE%Pi|fqB44^ENInM|)`NyMRt^80 zvr^tw0vepSiV8HaJhM)ULY-ukXVPGlXVPGlXVys_-&FWttd2j+8QT~1vnqfz7*L%K zqpY~n!FSTYXKQX>`O3V0@};|j<g;@?!>j@F*U}&4=P1skAptaCjZMb8lxNmSEYBe* z3#^m+piW}@Y}82|w&Pj{4gc!(QZwR@{{7Nky?V7lA0?l3uwJA|nIRqQ^Ux$Mv}0Rq z^vmeR_LhAHK5yjpm0K3{l`n&a7eT`Y(D2qHnezNu2+s{X#h`Nr@}v*jXV75uF*>}h z1+LD2))$8S_v_cMJ@di<mRI6U+=#nD3+sN?_Z-)--eg<FwvEr*i~7jdLBr++{p7}Z zLGlIAP`x}qggR-(j1akW`XISDHB{QChRWQeFzK+}DUW}CP?84MK87mKsFV2Agg@$g zCI7%@8F43GG>H@OW_ci=jXYr;@7h0Re~2_v{&z1PD7S%z*FeLj`Je%1f#sPruspL) zdIa?<X;@Ag(gw-<rh$f(Fu5QpT+u*0*~eh}Z1gdDp?$-1mHe~LU>nAM1YyI54f6Tt zpO@^H8errH&FhsD%*)DyPbA8n_B-TT3qb?Q!mFU+UwV0FowUX_P_D`zC|70$%Lg+o z^8WM?=>QG)f`&z)VLoW!Q@xKd31tJ%RrL??hb$=hhg|2AmV58LSHAGV3yL0t2AbER zgEUdL7}j~{Rk<tw4!Hv~ya^gqc?J!vlZ^7b8g<g+*}?MREQ@>qG%N!ROF%;b<Y-}X zm_n3wQiw|*<5iS<JXh8K#NUwrprD}k#DREXS4ag7%okTWu1Cx7zn9BXJ0F$rE)A92 z?S15%dU<A@WR&N1sFO&;V>%80fE+EG9wG}<H5!Ph>SLh4Jq)l4_0<(AKd2`A{A|WN zNBg@1`xv4!GBVyLt}Kr%0}B=`P&By8S9Myd=Lx@AC$KF1(ewE`FIDt0Se}dY@?0(4 zb^AZWpLsuI$Png(eD>LARo{z!8q5#KS+izU&~QCEu9qjohjr2>)=7U<o<Rej8hBlk zRWtGldu?{2?vx!mbdU)N2@-oVB>QzaIXTj5waTSSm#T7&DIZnuurE{-E#y7h2G&*V z3$Z`S@c<u|=L1jMWchCxZ>*iA+Gp23#v^)pUXHTBrzT_#JIqy>(AOV@Z-sxCE?s(K zYflEQQz$_{TIIu2Pdz0^j2I!Yw@4Nh6-lfq$p;^NP~pSzJ^4)<*cPyzpj;6+h9M2C zPbr6N3(2E*9AWa~XNdm=`Tn|Dm3<791@<vmo>?b7IwzXw|Ka!xbAN?c3SCI~fvm5< zxW5<n!MuPnEa4`hyH%o0NPZ6;I#l(0updU%pTwQGGLJ}u0kk8(DSI5}uy4n_V0mDf zR^=J_!1mcF&#aSN%k%!NPqH8Qn8EAonSJ~AeGq$k)I12&*2}WQ9z|XxC^4rcZ@cX_ ziN3YMg?O;P;R>X|0D}&ijE_K>GU8_4`r)d{@~r|3+Gnkg!S?z2`Jr;_15@RfA8e5q ze*N_@^81G8AF!8F=I7_1!yYBMXwjly@4WL)nVz1m_>OU<k|ol>a>02Y;zl~E)519j zw!@Tr_K{dtI3KYc<4M}FkHmI@wAAo`1(%L9zy9p}5931FU5z=)6ZhP6&lTc{eWMCk zrVSc8b?PLscTMF3+YHJ)`#uI8#FzL}=1C{V1~ge7SVmYLj69)98D!tYXnQ#J=J*-% z@~7rMS+*$ukfk-)FZKz`DOSYgym|9fK9C01tC(AsW5<qF_RIs)U;t?_#=RU<vX4!< zC!RDZL!`}+FWR$D#XdLcl7C?CsW<i+-p?__U%{VpPoOMuzL_);H_ka@@0}{Yp`oGD zVzEf<PEq+lcZM-&plQgJktaquVfi5LhDkZ%n1OP|ejxMCnBM^YTyFCL+{mNqPtd&- zO8{-a!+e(KZQHgf8pt2c8=`zD8WIx|<*;GHlx$&5Ug1w(ljo#`c(WX^{-Hg`2$Uc8 zwYQ@june$FFkaTd!2Js1$@lZ~vmoD}!n~6cNOR4H>pC~`sQ!Z?gY5qpd?h|7PMlEq zAa5o57Ti^=$^-ISLf(`Nu#F<0>7T%F(!hF@JZ1g=$}6wPmtJ~FwSoWo*S}Oa&Jlo5 zPSkA^(MHY#?z>=jACTs{$BnMvG$X$3|FHf?d0fVCmN%Njh562U0dlJP5?Ciubt}rc zYTsDbP`)X1#GmDW<&t?qIbj}fK8x<g!*|BZJYs&ZJqNw(fj8?-t`pwqqwqK6l%}f; zlLiBb8|k79u`Jwo-+dBwmSj8a`Vcn*7>4x>>mojsAC8F##GQ0K`Q($FV_c16I)4^- z(x~t^`v2f}K4~!OMS~WD2AbqI>n60_YMelsVq5FVU*gJd;?KM>`Vd^#q1;oJ$a9t< z)EO&*$6vv{0)JQeXC2|1A2sC(>Eaywgb5QQ_T?)1HhAu8(jR4svQB%p0mR){AHf)D z)!)Ef;m<UT@h{q*Wt2;{L8OCakbGkO!Mcv^k!zliw_CPsk&iz5sFG*$+W^u{*<smX zzlq<J8OF!90CnawILh@``A*#VG$TH)?IQ6vfHW9zy*yzY*b}Ydp^PyMX(PUrt?j5g zNsECy`lnC-MS0h-uKZQ=KPX>n{EPNGpR|zwGz~gv8g$SkPg%dPED)GCv|~Q7?qoS- zp0O_CS_0RgNDKLnH2z9GQ;BiaH-*0;|L7~UC!Yw{%M<qR+5aJ3T$dwIwrK9zvq#mt z<N?bo<(>Gm96%n|A^E>6Gp-agBR`G#Pt+3?^FO44Z72ILtp6wnY>(J>lE)l#lK0F9 z_63Z5;5X}h*0rq1Fs4xJ8ld^#jXUX3^6x4e)#cpyHp;E5Nm=JN{V*>m^W-yWq^v`Z zuAq<LL|(C7<sOSa(>4*mKYDJ02kt@mPXg26-Usf}_}h=nL*uf2_Uv*|TV4sCJ^Lii z=agzD-qiQM&-BpabJI<nenEP8{-$ZfXT<M<cOIk1_YU1W`FG4*9Z#v5Zo28Ao3(Y* zq?@gDGgvosbyI4l8%^%hG6O7tzqn6}`+L~GB~YHP*;hnPF9cu~TwVaUKK$m2O7;0b zL|5a(wEQp@3`CnBm7JU$i~fEX=KMoo9|&Ndy9uB|P8s)CWm3+<TF;Qrv^6%)1#?Z| zcC778z})a>zbKThhXZMCfm>_tz}Rjk%5)j)GxRxsMSWY0w%`ovrK9MdKZSX+H1vVP z;J-Vd4f-2rr(%tR>tvh@wP601Yu;RI{p6gK2QVv#^GJMtg8yqhEm4QBMVe)-KUqg| zyhI!b#u|p+=f8q_^&INl!>BjkV8mQA<$5F6xwyW<IdQHJeR^KXgP{Ee)_Pm9p2oaF zBIcgP5C`_1IQC@w$a<Y^5$kI9W!X=m8{hei$66KFJh|4!H6HF?;2IUzcew7)H8wui zA|CdwI0nENGy~&>G`7EN*Er5)y6i`jCp!JA@1(`3{c^qRPR!kMy^m{Un@U|>YkcP- zma9Cd^f?}6AAvv|2&~@;<O$oaAHO{+pRtco>k^y~=QH_7tatsOt((RH2d?{a4+Q7- zx#nxgBiDPm&e$L3r&VRL726byUlY;K9YZ_}T$umt0}~gvKW{!VL(OS(&6#uZM*75I z5^&(UC)dxFJOT%<wQ-Gy^2jwRu61&qa2(1Ao_%_rv|>Asd6x{Fze{7=OfYa@pMyMM z-}<Emp=zy<>oc53<ioTHTzlpEG1vTD<&k??xJJXZKCUrQ9s{<ipcjnv*$*<-7ul|| zpJw#m3|tt3^U9nHT#NZkuKD6Dom_}A=86O5aZELN#QuF%Cb*Y|@>p%1t`*bAdP*YZ z6~?&Y!L%voH2HA7jcX)aFXTGamWQ+caLw?C-*8j=39NYn2kz%#nc$i&AA^4OD{!xF zMs99y8vCFG0}sxdkQaP7zs|KLu5oa!jO$EX-{3kK*O<7r!8J0jFU^~x!9N$JO5&j8 z5$mqT+Bf5KO`mlDfqff-D;~s!`M>kNV9E8aSAYZOG&wiUH5SSv*SWa9!nH=V#-*n} zKPiGqsWM^6;{fmhPeuN-Z-#Y<M4Y=E!@7XuefG~uH*p~kXnwplRjnIxy^3qMTr=d_ z^OO2|A<G2UN4Qp)hczmL2TaVhj^^4eo(lPA*}~c04AlQ=EQ_pnI4<DWjyz%ALw=lh zej(p~AV#edaDJNd$TfV<O&eu`>r7nh<2qTcjsp{mIiaoNPe9toF4Cr=4r;~zC1sH1 zkbQod#DhS75Qqo)#C*8kb9mRk)S4;R>hggD*GsECSJi(^-{Ej1KJmm8W4JcN{y6a< z&pEE<n40sZ#DlzGeMC1tT)*W$0HaLQB#-o`%UVrFEB3K5Uy*_NmKo&3{rBIm>OI!G zZ2wsQQx?b%$|BPyE__%fe){?o`Qz80p-fbhN0bT5BcGZQHsqh<an5saPM199_zGoF zjkj1fiIb5(u6e_}cy~pNEIs{+Jp0XOmGX!(!S!p(<6{fPG5H$Xf7Gq)Z?|IlSc^Cn z9L!$bY_&EGoeFZvk|k<<N1RwMvK$Z(@__k6-kftDl^?B{E?>8YsJ#G&JU%ryLca1) zmMl4q&Pk=LRbj)xfdhMBzIQI^z&d8;<jIrw;{3LpK7G2H2gV*rHFsf*eaLh2gZ$_C zj<P_05dZ2A<AlGDAzQ9(ZI$%-fpxLbDEDd{$hMyAGF)3iKTBfYx1!q^e-RG?`9VCY z=MC{=yT!VL<5EQ58^HeE^`2H7gQEZO1J@F{E`f8VlJl>`Vdl)4itnrs*bXvoLk5@@ z>jk5%qMazmy3AC_at``P)HTLEPk%I~YDHdw_sek!&mOMvaE=}a{w4E*>uYG2RXXes zknc>Nz&;uKXoiWl>NoK79>nz|)+>HQ+8he}(WB&#Wsq^PZ%2M}E|)UMxpb~;uzV0t zWA2K1z<Pn<hzohadYg47@!Y<B`~66`!5<|KcUAteew&DMbYqw{<77S)2j~fq&?_K^ z4<D{@BMt=mVHu!5$_@KTtS`7P5p&^d5HH6HH}a_Zm-P?!(Wf!K6PS}{o6kCjYYWg> zpw^gKE{Go=^1+znWq+A#D(ts|hR2cUjiycfRQiTIldlBgL121pkDwz#)eYRMO4=!N z%rEkqbhA#z+{@E{GHsPU(?MOM>i?SXF#5nab0BfvQOy;zU&uKp%H!WiTcuBWjrNza zM0yz~fps3s9LqN8q>OR@4)<Q*T!5+{{vzE>n@=m!U!Cu+{AV5zSogB-V?IMC1m*8X z%!d^s4$hza)rV(IeE%Y_eEm`Vc1^s>Tj9*ETg7?ZR(aqBzzra70O-#M(+WWd!LTzR z7w-g_SA!0gysOUbn#Hvq?A2o2H9nBX&?ldKaue2QE})M33Hw6+@$}PASE+Zf25=T} zWIp%YbIKlmJlC#W8;SYsw_kkmMU|gM8^(M_o&K3?Vq8zd{%6j!UPc@zA%Evt4mmca zyuO4nNF4fg+}9Y4vDIT32jbak#6iE5Y4+ia{)|zkSeGSW+{7^x=MX+dx27ldb>cDl z$AaqzOp9fW^%8;d%CLMAF+AZIc&pYWQ+E2#uQ0c;ZelqiuIxKdwhz9wPOiw*`i4{V z@f*jF9KUj`z_Cgo#!8O>FRrz6OitV>|4jGU1(B+ca}Hy$$AB~A;8>hvFV019+{bZe zAB;OWN6kJJ@n*fnhhrFyp<aDxreqwhPYJ46&gpO-fnzrEkNLzli2WcwZ{8cO`db`- zaO}ac5Bs_tZ@ln$p=2B!hYtZB%s=R!QS02S!^nq|@2rtq@&>5!B>V2{w{zUUvD5tI z!77co6H;!#xEANUWo~Y++9SesHRdJd#o)j4jGu!$H>!UBe2jhchs16s|IjX|dW&mv z+&{puhRnUZV4(cr<YC26j-d)tRr==*`JwEwu4lc&yu{gc#Z%VR%**4uo|3OD8m#tn zubMMdzW>HEOn$Qw9%olnUybz_<%ab(`&`Tq)~Bwx@SSbB5tb(X8~IP(8U3ykXeXII z+arz>7&q%>wEelR;aN`;Z^lDjz+IImw%MFdVpxu|*>+<srb<}Gv!M11A-(|Np@V>V zEinAhKfy%5ZkWh4n{huYDobiya}&@=tiGsk%^hyE^H$o{Jm98%QP-L$G#c^CtTe6F z(tY9!e!O&_xRn=maBa~)F()T^#^m(5<~cLcGjayBv1MoU%b7AQc}8MRml>&3vNLls zQ><NZ<ypVPoEcqbb#G(FWqhgsr@bqUuBy7i4<(SrAQ93gpe~;QAyAr}d!~Eln@AW9 z5G>dLu>_J}6eKJXB4SixsYZ(sAu8Gkk*0_g5D>y_5u!$9P%JnFjRP2E)H0+DrTc}J zrK^AXqkp<q-j8?Qd-t7v_xaAZzkT1jZ|yxXudwJ&Xo>*6Lu`VVgc4lGcHyuonl`<# zx!=rxX^mW&2Qv$y*CI5qc%a!%7#?O?9`r$kJ`cGW)9xvTz6f{c6<$5~<HP-%+cbhB z>Co40a(Hs&*(QuH96Y7CU{c<+gz)rxQgd>k(S}W!IDT?rUV<~pS8e}v@>Tmk`o@2p z-6a3SSCf2o(J<X4{~J%2k(!a3mNt0Uz|72ly=Zy=zr!PP_0a%v)()kjF=!@w3avx0 zql0L<*A92bIk*td!pm_DehXj3*OQwFC;iB1QcRvA)#Pomo17rm(lE6&osOZ!^bz_D zt)xroTKWcki+)6p(4#b9cd$}+l$~X9-1296HGh|1<WYVH{}$i+zw)2-SNX5|*9tC5 z#dD&M94haXGvztCUTsl()IdF4=jsAoZluk(Q|v=_tKDJi?NQrgTf10ygX`nC>*wxv z54gu&rCaKDyUXsnATH3sy#Xu?qPqfy{^&#UC_PIr(VJOMwuZgKQvLP*D3K;><!*UM zek)t4v1+l7gCC%S&7Ed~nPQ5}V`i>--ZYrQ=A`LulPt5^uC_JJKGfO0_5gVDmHp0s z58hn1ZCxi9=eoPT&U3y?bwk`JH{MNj#qL2@3f{fws@-c)-zsuPV>=8}(Gv6qYC!$G ziC&qvz<bC0*t-_T;#+Vc7I+9Aju+!K_-XP7vWy%d$H{pT0;EUN5;_m?{fuJPfyeT` zd>~)M-{kM}dcULSD#nRCQ6O5&RGBI3)W@oe4(mQz=u~~Xenda1EA&deLGRKh^sQ!$ zxz8Lj=S@Gm*1m5CxI0{)yWh=sFSsRcliTl3xO483YZJ5&x&^6#=Yzq#;L{*-b7>H0 zXCE{Ty@j6eHh9T+Fdl;!;GK9EK8WjaBR+x8;Y+v;=}h8DnDiu=2-2SnC!@$XQc9jB zTggRo4ed)Y6?7P#NaxTttT!WU5-Vg|*$(y*JH!qHYkpu&>@thu9bu(i`7OK;+!)e4 zg%99Ecm^NE$MS4mz<<Yo&lm88yo&#cujFg_CSJ$)@DF%BFzp(@oBy%@x!>rY^sg6> ziMiq>u|lj7JH;NcUz`w4;yT$?66xdsd8f>h_sOMlwLA!%I4A!hd#iWVXX=#lfrq1Y zkuK4X>T+GH-`7WgE7Z&~tIPrOrD-&st*{O_y3($*@7NgE&GmP8!Okys``j0<Q-A{z zOb8|g_Xn$j4Z+UfVgMz#LPfxXD0Dp<hJKB5PzibwEk%3KC+Kr@9MyS8y|Z3doR43H z2;7eM<8PZ$4U=9Zg^VO)NDf5e3*-}$1Q^a^8`%aPBjUtCd0DpC33{XcR!^}#T$20J zeed!ED6iLCz(K$;1dT+e5%zMuIbH)UCS_zWJx1Fx#LC%Xwwdh{Z_3fSQrCfvJr*WR zz0zaiR@58)5_yOri7aSFG5Rg~Bie>eqBw6oo=d7oC(weibP;`pzD8^5K2U&OY!sW% zD%m>N(@xyyo&9)!l9(yhiml?3h?bpYcbOzlsLQIY?x;KKcs(0<x>WDg-Aqrzj51?Q zHpJp9rpD|ryUc#`nQ3b~*>1Mhr3J4BJAyBQzeUzIE7V-$v<-?!nP>(YN(vxy_K}n1 z?<9sYnn|NrJim!2^Pzk^zZ=+*>JRnbZ01Ic7%hGfJET$LRFnG3opEi0uE8&Y5kU^% z_IU7o@aJG#u<y#5%AueE@IMO00UFD_dhfJ%0U|dVcfc`N;&J#PJR6tcIk+5G;Dxvn zSK;sQDSR3?fr?Hb_W|1TNCjC)D#<qT4e1IB+Jh!SG$8o9o}h+42Jui%E9gSHlbvTF z9>u%xi026$H+x>dYxov^hM(uH{5F0^zq23qOF>D?{dmz!_`-<+qDE{Hwc-PDR$LGv z87B+mboroMAZz3s@@@H{te3~-8F@iQsrKq;>IQYIN>WleHBgOES?W2p2ADq_lrYPz zH5*N>xnR23CAJl4Sgl(Z9E#vw6$+)nz)`jLFdjx8A<vM5<P4b)`+kg01SYL!N7yO0 zoDcOU`A_?2K%a=XUEgLtus?UVxtxd=u7-MrFbqIzf#aL;>v#v`nq7D=9!|2zbg}^U z?;I(mhiNoRVEtGgTgA??m-q^v0a@dIIYG@-i`2{ZXvEg`=32#}p6DL*4BCLIaC_E? zbzyPr26i*+&U!J;hOu#MHv0?P&%R^r_+6kJi}+^#4UhG21}=>CbNy<+#{a>O6Fo$_ zco_7eR&12p<X&}5&D1aH1NyLj*{n31fQbi8tPR^_$O%WR1??<_>{AUqi4;Oc;7%`; zgq}b(sI|ApJB}$)WCZyQd5A=E<a_KO`;>hF`UbTv8m{!S&GfRoY>3EwugII>t?*WR z>)cjAd$;?mt9M_!WA3!O=voIIg4p23phwU*pn(a}g7jcykQLdFQ&$R)oOVsFELaxQ z1&t7d$oQ6d_Ia#21iL(5PdYDCdqtcN_~Wx}}@dez=`ufYr9Fiyc)I2)Hh-me19 zX}}@S?-Y_vCX-N1t57WPK7_!UNR)jgl2i(5WQt6agJinQkRu`MWXWuqEA!=KIaL<P z8L~vqlCx!*oFmI+g<L2rWtFU!%j62s*>!TG+$y(27OsOl{GM!(U&}^$OrDabWs|%p zL#nlkRvlD~idFIIMio{)RHEvukfKT`qf%6w8l=)yh8n3dfw$QzSLLh8YN{$yGgOJ1 zrDm%#)d(>i4Z4!3({-lK(%EoLkq>I#V86DF_Lz-!9b61tO~kt!UD)+-iIBsEGcLuY xxj`=7Ww?<p(@k|nZiXvyv)pV~=H|F^R}o=d%cBK>76e)lXhEO_f&V)M{t5GqzHtBm literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/t64.exe b/env/lib/python3.6/site-packages/pip/_vendor/distlib/t64.exe new file mode 100644 index 0000000000000000000000000000000000000000..9da9b40de922fb203df6b9a1d0ad4139af536850 GIT binary patch literal 102400 zcmeEvi+>c=+5c`fOO{-i<+2dK$O?<1QH+h3#3i}|yE0og5*0L6Kr|S!pwciSs33`x z)NGF1(!RE}t*y4z)>^H#RSBrDA&?6f33v;j62;3|2Q{E3;X1$Xb7nV*Xy4EK2mJVG zX69U<^PJ~AxAUB{r8lm%IczptI{r6Jo2?N~`WFy?|Mx%L$R0Lf!!X;6LwBClXpihX zYtG_3mV1{~-F<u2Eq8h=Zn^8OyH)S4w|T3yyS#VY<qgih&U@$Gi*B2mlauL7HvQbK zYfmH`r=Cy!e_v_k^Bmtt{y6vfHu0SE{2#<~;qzU1cHMvH^M~+EILzn&Ez--9<rgKN z72<jA9TkhI&C^Fv7P8qE{d9=!UrXM+F_qVCn`p~Q%e2|vN6J5~)KwSZ=@#z+J3Z(< z&1Q4bAN)%_MIw-w@o*aO7^q2AO4db5tC7@$>Lyd@)%HZ5-8PM0*4k}Pmf=}#w{-!O z{(s$e+fo4F{>o-Mqd)Qg@Y0R8El|I=@Kp7-l`3);yoWyo5RILPV9-CW&9-oA)uLO} zTWq#RFGU90v=!n15Apw?e*uuoRI8Y+79X|(06YrMad-~;7qHplQ<qr<#TYSGTQ&+z z{Z6^S)T-rG7041e0d8#9;^Xq~D|g${yHPMeL=0SsF+BX8a?_Ff|Nngt2I|_iJ0tAf zaP&HNX>>+zX>^uXsX3&85)|hA+a$Y9Dcqt&YkdVsiLh-O2*2UjjND*sx~aq>z5*p0 z^m+MHvu!s1k%Tg_Akt#WLM7&jpFm>87@KW9&4=k(d%$Mf(Y#}a5}oIBDN)tuzCqCs zd71N^LiEFqDeQ3J{s?Q2#HOE+Hg<$rJAZ08b=#)Y#nn9KG=D(lUYGs$uoT=IHk-ov zC>$(4VRR@=^%W_sSz+_gzrMwLbF=8jP5tC5(N#Y0TzQT>SH51pL1Bl`Zy>@Fk(kpD zAOY(~)6sOSv>;UYQ6zd>0UwoRv&n2nT^xB{6p<cj;vJ_<Q(-8BF{Tyn;ZKeyER`1U zpO3R%A{E_oY~u+Ot21RuIT0$1vv>Gv6pM&zt9*8uy(2fK10P|wlb<tMJQdc#xoEqS z208(7bWtCpD8pz#+ZARw|D@tLo_G+5d<6?Ooo(l@<ygI<OOXEYI>6RF&}&Ar*C1;S zvv+_szTQLSU#CXzLvI)z#<u7<5;02FDgCO6e{m8cMDGfFLvExM%$Hi-Q@#9CfPmi( z=9Is>o5r2L`cTc6G?sy0e*sL;SW{zshlUwI$5wA=eyg`?^mcV@UcOoCf3Iqsw9Yvm z*_Cp!L(MGBKY+yLM+`?PJ1B7cCAeQCHqRFbvn^qEQ&E^L$Lsw{m>raFmKVtL<$3c{ zv^Dp7BCLW!VB~P@hN(3B3;C~yVx+MPvSwhSf#nP2^+d~<B89zO*iL1pDAA=puj%P~ z*Ji7WOSQE?z$PMHV>Jln!DCc^j&CuMeK|iM=A1qmK3ONf8l+mcU%OU(8$PS~2>WjH zzZK1E;|hs(eG4%Om!WCCho+}4uC*9W(Wz*M+X!mVbXo5KWqe$jv=y{naPtBh>S^mJ zps;-kJ8F8aLHmk6$<~UMND^Qp4M70X7Gc{J_6}CW6n&A;izQiyvaA!mW}BWn@UHg_ zk%I5jVk)=RQW^RS&|1<DO#!bM!E&~R?Y6ph)m6C1^z`h9#z4)Z)Rbmg*H~|>e7SR# z*iv6L7zuHBFp>omfOKLcp=AKEa8F^o>G=$GBe77IynZb;z&aIRqih4li;wMst(cyd zPzaMXyk?cJjEZR)f|nR+n6a2A<0$ZU`Ek>%Deblu3~nyfjOi&v3n+S`4+CxEIVfDX zhh}gYIM`yo`f6~e9@8{|tYwSQf$6zXRG1rAQ2ckaDr24lR^^Ukrn&wb6gEAVScTfC zkY*LyXBGMf75Z8fa$AMKo7#ApRp>3N&`(iFiRJtai5m!gt)XXac$%Ki_gPe49sw>D z!pO1^5ogdBP%vdhK=dMCfenAASfu+-VLL$gQX{5kHfjUhwdf?mTG8v7=qCv30g=w} zl>@E8XkMmbN6{b^1?4U@J<;TAxxNh}8=v|jS}!v_ldLuuS%B|6YMQ8p^ya-_=KVCd zlqMdG_6Fjy#MBk`86deHwXg<F&t%XRSgZV+3xLvGV^V|Cx>>GDkWzl7esrQbS!QqX z+eXlQYhzJ&9Mz4@a+w}ZcocQC9ZKDPH7o3RPhqDLyQr<0$Bu$>Bk2*u<s|fAF`MB? zuZu@m3-Rtz09=VZkSnc|sLSEhSszxcASd+bu<A@k9n|0-_o2AnnwBh{O~u=3fyXtj zumhFB;&aro(z+~4f5A%6RP^RFWK7aNO=dhQGE`S(Fgl4c6}F4d0|8oH9q9$Y0zQTH z5%c{C`mEHin5dmwsrx5dg-)Unz$cKdrR-EodP^FCH&D<Az|ja5JQoE40S!~&bhrh^ zSgLGDfLL1B#!tA#!pbJNq;-x;4D4fC_GL<RKJW-WtFWb@K-Nmal(_57kgZt3<&fNq zG1<!?R9AW}kxC04`WF_uHa@w~YkkX*VtSr@2Rw8l=BBc^NOi_$pE5F}b-NTjo-VE1 zO$)<BF;K=vK^TByXe4$){x{&c6C<(3X~qclHi27efjca$Z%vb>bs-x8{81!!$@zY} z?KP4?h3#SXc93l~J@=x^%Hom{$YC8?777i;tD=9Fro<lhEd)!f3`Kp{w>Y0a&u%N@ zZ}+BVq`;byq%8apY(a_XmaINOEXm)wd`xw5jw)3bXR4Ws*`)Z}s=hEBU}-d45Wq1{ zI!zcR^GVbW)}JGITK@3}Yi4h+Pn@*bf@~Lr3?vN}t?7CHZ9%L;%Q)5p3|0tAxE+-- zTZ>keDKRiU5Y-}InM$<F-saPxWnuMz+%W&3LycGJS5C6X<rB0))P6P^Fos#E8)22l zOcOb05aK>TjkBy8A0=yi4ZfDD(KReZ8v`|n8-OHsMWTy*0Y2nJ3TN*U)TBRf8udTX z3Q<u&;G+llI7ICu=Mm%-#>0BM3AHZFPR7d8DvvymRjl;xj>Ke272hZEt(ff*N0XHj z79|F!pg=e_&y|50;S({L=%h6TM4v4EY76@2lhIuSh%SnND-}dyIRqzW0g4R<L9EIW z_WTJUUw82U_ygFPkaK(_6chUB|5sTlO(1Omz4>!~k!!bU!<D8ccV%yrX=-U-H7iZ+ z?u0xV95E>a2eHBe1t{}|SeK>Yh6+EI*;b{lUHVlVQ!$2rF#-JFhPpO>KVA#ti5k3% zGJ8ZB^<8{mrq8p!Ugzm&01og&&`=p^3_1K6(MB%n8JkdYsy31TroUMr$YPEWDC%c; zVQQ`|j2jc6R)?^lE!}W8>VgdsPFtMx+DbeC5<`W80*%9B0KiA!I^ynO2EMvYVS6zS z&BOE2*DExFXf9uOCIoFW^q<CRBtY3DHU1Nleo+T;{dDx5!H-fYtjR6jo0@kRLo5B8 z)!~ZX1Ol}afqsbuEnQ(v&4qF39^hn`iGCxjNntrpoNu?0_QBr}40#Oy2U-I{OwV7D z0-kamzR{c2dJ1O9%z!XY0{jiYn-nPHiy>N74_{180K6=|fIy4*0#O&46W?wFBW{s% zQE-p8hR?qMRiZQ83R@eep+-k4tiDma&2lMz&oyV+ZR%)N-G-z(=SUd(nu`bWsU%p2 zGwlp8rQ#Vbb-co6xLLg@mU9TH+E5+_%S77%VTQX$w2-Ea(nXoHmKnKra8Y;KSJE2$ zU>LubKV!NA;ICl@xkVP1%(j%0a=VQH#q5pacjomI;4x5EBXpNFiGPUfJ?aOfD`CEO z^JAF92#CTPDy|r%U8&^vO6$(JqEbIDS7qYwjVsbC^=2<|7`;y<^@*goml%&??o<Pn zklp@Df2Z1FTr8~{Ml8k4zXBG)*u;mlu9F`mtR}@9)ykw=PO7y7f<Ri=oZl2If+nP9 zlloGi|B3K+QmDuzv63XqRzc^{w*=&QWD`JlEd;^tiLkxJ&IWLOZ3S?k-j^Id2Ay1h z$HphekGjxj>oERE{=ON~y6lnAQN*Z@icvLrA%#X%>c>RdF_E^}8`d|ch0QjCxt(C% zrf!keZI;%x=eL1ysYxhvd@e19l)emUnzDmwIq65cR;;qr|GIWX3LOQxzi=)H1vBO4 zMj;dykf)8;T!c51-dYJ?Eazbqg3d?FVs8hfbQ5Pdzu1`%XN)?G7WPrp#;9|QfAIS- z&Q{@i;dl4HDy+pA!QVhBeq(<knMY(%gAo(~Xfmr|HafRTc3ZQ>3z)O0%x-&ytVQ&g z(P#rtHk|%uDblP|v~3`{9(8(9gxf9yU{Pn{N*YgG^To<Q-`k^keY25y@KTYvBbk}e zI1Uw!dxV~U1A0@UqwI>l8Mb|sOEI^rL$QP&M`7a=GT~yL*(ei=DUL8}i^M#k61xpd zN@?nI>K{x9v-Hi%lJ@cl8)Xmc$4qFD`ms~0vlQ!iNNXDA!wj2oYCUYWYp<a_vFwR% z_?4I2Y}F-CPJxP{I{9uI0?Rp@pu`j=Zh<<N<cTzWv7BdzA;-VB>IxvKU<AE)uJ{2- zoyLDWBmSMI<nLB`KU12HfeCC=ni5yWBE#Lh+mB_iVM(r(>^e3}crk8L%*n%{=l1up zPd_?p^+>(R%{QK2<A?n<4!myZ<}pv60B2j(B>?A7!wH<W73WGDeu94Tw+(`+H*p$3 ziJLKQ;JN?+Jc{P1+Z<^_Rj!1-6VoX(h7Qfi_gj6@dde;uu3~PQumrTP!@ZrtU~OvN z5tvJU7h*(Y7D67*$NyT95?vjDuSLN`UO}K0ZdIZo=z=|^#H)@ZrORl(6S65KU4Df3 z#NC(+{+BBNg1@C|2o$A4V_%qsY)l!2bvH+bk^xb_fh?*ZNqW)&ynF;IL@K&KcvtfK zsLAt)GIRM|cxSc*_%8U6Lolx#{Jn7n7+Ks~NpeK!^@Um&WHU5V5Y~>c|Lf%=sb_v6 zH3e>MOUwru)x{sUo$Sl$T}tm^Mej?iDxU=g>Q-uwlDA>SMM@0L5`WW*Or^1#j6|sF zir&@D(`h*QU8L_7K$BF@ZOz!R0}Egze;Xu5q8SL5T5a#aUkirnHbCg7P*8L8=f<QK zbK*j@N0>;W00>E9VG-#&@PYB{62SyUfLFlpL2Th?(=h%{Bb(obc9KX@Vzawzim5Nn z$ydXkAkC>1Q?!#GC$*Si1{r3MVFnpyFm{`pTrluT2;UxgUjopI$s=Kjj}HC{S|NXj z*HXdwr_iygSHYtFK1L4C4Mu~7op=@_f`3x@CG|r^a6L{5M`v`0^$HH&7a(M8DI%T( zd)W)WPrr{3S_(P%*kL6aA^Ue*DLYz9O4t!4I>)z+=yhBv$i-7y*Q1@o7ejqX3W0Cn zh_Tq-N?MF5?-Ds(6!xmG8gorEGz{~sx0&JIuOt4pd(Kt#Q>N;M{Z+V!d|p19Pxd+$ z{L7$%eJg$yhPPFE{y^`{+-#7X!VGmtkj?fPL*Oxjl@kQG3t{C-wdKf8mXbFB4Qtc| zk%|?rP+gB&Ce@S4ANGF{l%{_ZVy{R1?b1V^0kQ~#VhqCyW9BdZ5Y3&t!X>ko2>AD6 zC7&yHWRvp)e-=g@-AaC2;!mO)@<XgC?B`XBLqLlc!|;E04-DMRA$COWJ#2vCZVj?; z!oA(W{3Avtz4|{3Ni&b2d7|jM$-EtlE<?%gV2?o;GZZ2?%bBM4nd%a7<$RvF2LqxC za{}aaGF8ZJfJK_#IdZ)~2!wywY=P!4FhXm1F-;}Pe4aQUL?{rR(~t1~fQ<Q{A%$A8 zn}gMO35w{mbG=Ibd%!=OpuJxwrdVli<FhbpO^n0Wj((kK8Eo2>xsm8XuW(=#gxO)i zhmJ+q2Sh9jiCAtTVMw#1O;kvZWJcI#ID#*uzRwnYgN$taKO?bnFQmow0nCcEf;J=U z^-}2l3?9)P{Av(?Qr*6RTtTcA9<4Yv9w12zmI$*i!WKcD5z4plzrmJ)Dpm^l#x^VP z)$jn}&?0|p9Ay}T;)+BA7>DA$j#dN7Fmk!|p!t-|k<0G`fJti4z$?f&aR7i*nUu{D zi7kTD^elqEXJ&3ds{1Jl(2QM*38fi3PwzXWPF5=5-hvoA>V%7Q*ClqF{^0~=$)57C zV^kZ&;hm!)p@-vT5ne!;DD(^p^-M2XqfMq(#!)LCC<;*alhT?bb=Z*|?~#kD)sg&l zxP+3h-h@~EbrTErJc48%AYq-Qj2K1I!L^GQf&!uZuy=rcDGXzWN**B=)&g8jfgfW5 zOpYCuz@<yr6wr5x^Y5$lw`pXYgc?b3;A^=Xl4;RcJ2Km)b?J(6ws00jz|kPc=J*PN ztjYA~t>k+EfjFgg$dJ}GN$a*xB5jXM8=-rK3ip??4j4?JBW(;n34cs-Kn-{Wllujs z7Ru&Fq~=``W-fYOV7vvjyE0TfR?Q8OT@>`kp~ykSgySr&GK97c!PXlG{yAVc?F1T) z{L|~%zptJq>Y;_P+Af29RAZ?ftmJSsGb{zk^qB&>`>#G9;p7trU@kvzU`Xr0!-$@4 z1QL1XRrt8yMRIp_x?bUe*#_$zo>x2Hbsf!9U3}#pc3p1oW*3TUnfc3ItR*o4@5~QU zS%ZBc_GSJOch6>rI*r6Gpy^lX;zTGtL6@r<1Oz+D$gD2P)Y8nY3UoQYqv|l|WIWrN zfI4ie-LvvAuLWop;uAMw>GW*2OSR7<lOWVG3O`T$86WxgyGz-jQuejuepk5ZlvCC} zo-m80cb6{tyj<=HI?>yz)F{&UP>jOgqD>Me+%Cn@mI9(x0MS%+xOYab%?oxg14PQ+ zCf~$c*vbS)tbZH)M`+9bz7@lJX5_lmEGi5&f%T~lcE1lg+iL81{Sdv2p3~Jdvo*gr z7Q6&YX~08APCM`mVijI%BP1yg!3{v<Vvh~V)F1}n;rGHRl%&BSc*TSn<M_pJnqawX z=NW*^at4sQ+wuh@wSOVsX`cj*lamAd#mnSene_>c?P(nf$r=5PQ@cObyS^;^Jc=@U zpB<*{GvIyfBt#E7nhN73$ZpCF)$YGHQ`Qfj2u5$pHO9$3&31JZ{<_p*vb5n~W>rS( z3?CU5ROzJ|RH;=+m*_PBzRwIuzuFoLfsYDP1#TbWq5(pPuve^o34l9*5Seunz(>D@ zl?ph%l33}^v9NLhtj|pSxLtMX$J4a;xErqhwb)Z5-Dwy9f)Le7Z)=fGO47XV>3fik zr<Xi~8O1R23d#IuhoxE_L`M>(P$|wEhD254OXeae>MQ`y<mb#GYPokPzaFH9HILoP zyk7FM0<c*Mbb+hL>z#*8AyBeNgckA~`u4ZpGge_)mHM=MOEk&S`1s9&`qUxNYF)-I zg*o7&T$z><KLvvgz5ca-x@;7B6gd;tX8?G9@r7Wsh5Ir5x*p4`HInB7e<`K?Nw}Yk zdz9#zqzyTg=mgeWTKAFV&3c=};X6QM1WG@+=xp){&jyT<*h2dT%s6S={PHg#>7ql( zY8=I1Lvu;v_1}19AQ(v2Fng1##xD|B8ZEktM9;Ax{3YlNPAQL&#IV7!7HJcTS7MhJ z6#w7F?tnh))pvou1iu@_FBc#PS>K8#vRaL+dgNSPy`0ZGk0xR}>*N2VNd&nLk<oV> z-*3&M6U`WK6wR7K6P3xmpOXx)m}1i_a&6X16K32Xx*fEJQF#!-j^h7D=ShOS60cAr z&ITL-b%=gTuFdG?w;oQe=u5!wtQGw!k_-?p5$fReA$Cj~OK&9Kx$~1<rU_5oexpdK zaC$lbVk~D5(iBHiO%a9AXb0D4VR9;qm0Yc}aKGM2-qB(A3@v`N4=>WMnz7^pd?e6> zDVwO5EFEdLrNMqacM|*-@VD6bCU44v;mpJ5Vt5>^qH?n6sfj26j&0+Q^cUTSs{E;& zFlqc|w1jn$xgHg5JP$=okGTmbk2<R_#JXV^3En?~4bC6>QL-#KBlzJ7_@iE*Orrfp z{yvg~aOd*?49GA8GuDp82y@;CcW)%>ydMvRdFJ84a<0W+zB@rUB}YD1FI8Ab4KK$Z zV+`xlSHaz=xsW^;DLeQQ{9S8JumZ#vV1h|E<1;ZDd{0T-gGE4g45l0VDfIvgNPiFD zo1U9D;e$EbfEjZ7mXp2%1!N)V%;kK2BaM`N!RWKhrpgL-1dC7~4FsV}A`1~2aC6ez z6bO}iG)nI<kg$~(VQ7)mr-fJ$x1%j%qQYu@0)VJ<4)qyJe(G_sGUo?WojETT&^Z4= zraQ{7rT(JMXOY?Jd=QC4({1rq=PJB`;ZcPj*{yD2j5+5f-<O%5A8#ZF(IJ$xX5ceS z9PJPP5b3}Gi_^?rf)Bl<1%;p`SmidOoatHgn$5O>y29c&JwvEY%@%?kGEl^|+~f*J zm*j?{%e@riGT1&0l(KtWrR*Xmr-#|SMWy+@rR;+U_AOcB=}M{Yb2JjJ_=ysZ&T2zw zOb6U!{>vo2hS;|Q_@CFB&1UpP{N2)nPopTCaf06U$MCM-pKg<Ce@SnCyE>M5EL@dg z_Ey-@YIcz9k0S2@wPv-gV)>n2+7v}E*$k()O+6DMUXM4fVsxOxVPbn&VLMIFpOMUF zrOS<kPSMsb{(y^KF}3Wv^hgB^bedsGm=bop6CtL!Q%tn!@c~bK4T{60(wp5hl67b& zQ4Gca4fJ1lFJ-c~@N?`>l;*cS?N$ASdy2>01IN*97n3ozcvS|o<8e>63G<`L?H{u| z*Z3Hwh$GG-t!`+nL)g;RY{L;`3BsGg-uGX&e6C_*cjREY;yA!&z*6q6Dz>n;{Gw3Z z=c+U4&sQ&^7yptI>J;5?!`?4wab@f(`wMm%!%>33Y^kGFHak+FSG^=~kc<Yo35Z=1 zKsd<YP!M9D7Vjoq1#%xApuuxrz%KdmnQ#~u_Jmm?6nnthpV<eG0gUModppG5hh}`a zmfF<yt37ZIS-98xZcz*LzAAN+-nT*>!!H=p&xAK%?*!PK0oJ(5?!Z#Yk=ZFZtJ`pG z8j5MtU&pR(bDGRLtSwimwi&4~xoW=#{^8!%k6yRtUiHUv{w}F@H<I*3mO9gHSFg|$ zx#~H3B2yi!A5GUD4xvun-%udLI`m!c#8Xs#!<QKeqP{>+E6g>bFnw1b(I8578mo|; zrro1|dP@J~guW|H+VDk&u?!7qx1td_%QPzV#2j_Do*1T1Lzm}K-(&QnL!`QKppo8I zTO4ceQuH#+D29hX%MY<OX+zI2BcHzV#Mc+YjJ#yYY?PD?N!svf_Hw7p-Zs*a0FxuN z-7M9mquCHSI<RPMMml8y=fE`9z*Dbc3#<_s1mk#v-?WexgH-nmut*deGPp3dz)PgM z6;@7v1D>U#0Za!P*v^lAO0GFEo0$M(gmE=I#y=88up~s0cLs{MQDnH0k0Mu#BBbO{ zD=&&7QeCFi*)SAye+!ggaiLUqh-`bTceqn_KuyI;r2vg;Of<jwW@{XKQN(o0*;vp) z-{Pp28x5TijB2plHFUHU4>$gY-A>L1Y-%hct!yPpjecj;Ao@=39~2bskIl04zfC4( z%p_-@zvPcauwG+_2>yy*u-&`h##Z!c-8QvSfmP8H$knyEy@6a?HRd)Fn|V_2bFXm6 zX1n=Hp8&O%7z4!KB~+1(Is#3%3Suw9@+Kh#e)X@Y4RYaiAnP4u8BXl18LQ0J#Jw2g z`f}9g51mPkv>}WQFno3niGgf5G}~2#iZm5$cmno<>1lxI!5p0l_j)T8VcAD{+DD?V zb@&eJzT-B-hX(#`<@Qls)AJSB7{Gaiim)?|lNv>;ab68xFe)-Sp<lW2XA-?Y(VBIJ zX-SY(P$6jlY(W1@%H?RzC2s6*khW-<IdjRA5_{I?CSPpLp>EI>0I5HQ2Sqj2b*0me zEleP`S3hb(&zTHu4;zllAi*L&74~A|BZP=y&<=@hEmLA4Hv;W2%@pPXHiw9sd&Nke zB1@R0J6IYyHuyeF0cpmUB?ExK9Rt+rLQ)kV*r5VYgd{#7+e=}*0wDN_Ak2S<n#LIl zt8)>&(K@T;)E5b@WFENVG7yZaL_^isKYE!2FzYitt$3pj6w~t}UNJjvc$~BK<0cfO z?K~iiju;JIDcr$7=)z>h9`g})5TOS-v>dGYAUrPQ7ly85vCj=-Ag$$?V&{)x9YL<W z1PM-QL?KXwu%~b(IvdQt2W(B)CkYV9ud%Hmv%O(bX^%iZLir%up7Lq$By^n>A(bWq zU$57M?Ebd7Eaq9k;sX+~IQaqqB78N_i5L02kPH1L!U|*oEb=iUjNrLGV({><1Y-f0 zKU2*&<6%dK-giOOk(m6s-Zy;3kVaTOw&t3qASyPF#=CwLM(kDbX5~?t1)I{7df)G1 zYmz-P6bjiU#IX@0iMt5_@Vr1Tynj&sUsCy_M1FGLZ7;uJP?<ITWw57l1v0P;c01At z7wM5$nHv-42S{=hGRX2x^@lg?7!==7bEp&ZSy~t`#nu}|E<}K#Dg4HNqN$oKlt-)w z{}2)pM4Hf7f#uvF%DKa>O<w2!j5S=zGHVA4*8&w*Wf~tSdjq+<MfjQsQ#3vQdXd_5 zHi0h^)aRO>Un6DOkmchr<^Mpc2rib_0|v4GzYMuT6A)NQ%6s9F2$NBMj5tgloWIH4 z7=P<>iC2T>EaMTbzWIvu{86g~1n)?}KIqlD*|j1U{rS-!HYh<;rstvm2s~;R=q3B1 z-`Z5@D7*OIF}aw)?Zj^W-Rgj?V>6zik0I6v5#6_JIVjKH@|!5JA7OPSHbNK1O57f# zy#sDcrg*6RN^G@Tj?Ef^ZRX}+bbfXuIzPSgtA8l5lHc{BbpFYP^a=G2Qm)L`wXc*| z4B-VfvDwp*UR-ad;3RV!G6W0|Aw`2cB9>i`MP<csAz1V2l@?>GCPu-W&wqo5U~J2P zYuNcLoK7qWOllNR@d|15<kO#qCz&Gf%GcZBbTga$%~jtt1QA??z(p@@W?gTbE4_S! zJt(~#5BfK+7%k^_$SnTDSR|bSFzx^L@?$=9fe?X)jM}*N2D>U7E3imR^)gUY0lO+4 zOPgp=iPbHGVkI<UqD@-37tI%ZHef?Bc6~O|iojZzfvXl!fhpvy#b!ryVLOt%JQK#2 zhyxj1ylxqW@$DM2@XIADy#-ZViOpRWLH{VJ<i8z>{ed7+Vpr!y@>}E{hg}_7J=EP0 z%sgle!)Kc6t{&=@8-lKb^)-YnBcnw;ZL~WIJqH`uTAOht(dN~8z{BHmEa27q#;$Oo znb>r%kqu|r*cHRj3W)c#HlTh3(Qi~?11~1#2fX*T{TA<~2?X!W1@HADIvPCqAa<=V zI4jhi|2-36Lr^87O&)**Q%5R#V;sb5qdoOUA%$(m()Gl|_)Rc|*DCCBN~1;!6o1|< z@Dz1Ak@@&;GfNu{_T?gSfhmdXHpsrxcfm24b0RUH=tH6|bB+>Un9pt#Wzm$?!%<l8 zXiIa&W_fuz_(4)0_xmOUuM5!DI?1#>Tt(yhWFaK=<EOOAt<GOVtO3Q)AnGCStS!as z1USjq5F?}?KdFsqbuR5Mc|A$SH}&HuG{o40j#`~%$vhTFH(u%Amjt}}6K!bxB-8|K zO5P-JnVZ~a1Ro1vpfyJTHmw#SK6ZqM!DZnM>jQ7dA>W)G!E%Ywpch0o=0ScSl8_l; zwpD(Wz%DM5YR|@t?AQ@@9D)oe(2qK+5;Z1237x*yfvg&nA_t{fEPNOVyG+mDp10QD zCrF@m>19Z~FPXSiBwl?6DILUF_;1ewc|VXIU64(2gkI^V4~r5(-`Pd;d}pf{6qVc6 zDbmYD(#wk7e{_W>e;3-kEDr!xXQqc8Z-(=CXx{*^w4z+?4JCHfnd?uXfohj4?ARjm z*wyJZi6^0~lZle9j@J{2k%S464wu`w#&oa{V`Sq9dSo}IL*F)>jl=O3y(&8xt#lcV z=mHl)bKT%{>tRaSz~;nmRB_&>Ay8lI8o{z@N8zp02Nw4dPUqZ-KY6eAfbg3>On(gx z+h)gt=eZFGA{XVV7Zv@;uAVQwJY0HNw);C)j0H>hL7v>;kR5NzE<2@`I(GOw?g8$b z(u%Md5{8?j)bVaO|B&_y@K*CB4UHg$!S`i4Y@#o7)GTwCHtrQ`Xs=j9d&L^sD>Sr7 z{@Xw|7Pos=Y}I*RZ=hje3rVVsB8H_YEu8<R)=i{Qh0Q-OP^_II(x4uHhgQyGzG@UF zauZQ^$yoo?KFsO9Sj3*{J5An!E)ev?-5@rF{gGB3MASxn1zDGWM!Hn1ffm+#L593< z1eNW?@d#q#w<29(zqZzCo7^ahXyuq6KWs1BuyDa#2{KL=Sf(iaGb$X3O_mPh;E6dD z!H`TOh5b8d{dWR{r~skcboBg`)=!#L2UcV2ab{tj?3Qz7Bk>p@;hS9YjLk@oIu9b) zmd$WQo$rg6?5J}uy}X01pgFIYCh2neG3RP*7jBc*1!uV{<!G(4I1CrrX5&MgYe0U@ zF{1g^BT=LwIQER`^|kh8mx%%&kV#Zy60cZ2TB+F8P&<}nCY7j$nPj#aIQQUO`ZW#7 zMMKJ2h<9s9_tC^e^Ja@o2Vj=RfQ<98^Prs#JY;lJ!FLh-0wwEel;vX)C1E*c4QC;c z_a()1T1x2mZtn{HSTA{M93aw0ttb1C4<%K_`Ew*8k>o7thTSGY{%T;I2o=d8xM_{| z+n>4L{wx^$8I#N9Mj}U8x8!wDip#pyEQPsg{}IhWIb7K%Vloh%)t?miQcMpv18d6B zShrcd5D&Qf+ojsype+hF=?>Z<KKda`$2ZW?4_fLw1zYMn1)TLg7`jisN<s(YQb0#V z;Ey5jz*jBsl4%xv2uQWy3)<@mK<tGIehs~AAjYm{ipYT@Kd#;jPs8tzqoBV@y;EU& z3az*3MK*1o!kC5Fc6GE~jF4Zx{4H8sj9TCa=TQgD8c%<TYX+5=Yn7lzdGVn13&gvg z8u?7>Y&x1MBEV`A#9aAMtGt4wP1Q78`MdZxZ;IZ_s2Dy!u|BC8zR!B6$$>m>B!L7` zJWgLvXduY<-hd>=r~rSC3dim*-)ND6b2Sw&p4qLAP?#ikC1|uxUPtBhH9$A^D`wN4 zG<L#7&JvN+>LuNCBWOOUWu`&|6SI&H1HiIVC<q9IBvbuq5jH&H-=kirM6tlO@u?tm z1mUTO(A&nJ!-uS2PO;T7=Zm1th`$5u<Z>(nXa!=2N*9KU)h>UJMvHB{q%QMsFe5d$ zUzZuiUlaL{#(@DNe?6(^ykQh6G%-eL@M@5r%df{;Lo-%^J4PZSrH{11@k9EE)$5KO zP&&=Y2Hb8K8`&@{=N>db27iv}$%j3y=PW`3nTt~rzX6k_a!{L6Td>$MgC67D+k|GL zHBSHr{T8>aRmP?tJcu>KC+V%FM#W=BOHD65P~mAqR3N5nX-&<55_*|VDc}HB;y=-$ zHHargcJ!4vvat`QA^Ov~h1Pmh7fDQvpT7W|f2~!c3U#eX#8?M>r^1#pJ!O9dpU!DO z3*;2|0T2LuVf|=0=V{;2Mj~Z0c)69R7P6e1sWaALdWMOz+p*L}LwD=OOq!k%l&0fc zPt<`xaPJ<WEJ1%cf?kr^U(#wy&ht;K38D^NF~!X<0^Rmci{L?seHHqtrQ{NpS-6Pk z&B^1tX?ABR*4MwSuX5|_N$YE|_4RA(OT-{KdA;?u(#pF}e6ebQP-^xLE8z)Bh&eOj zcnID18l-O$a%~sS15z<djAqI5)c~S1NM4RXoi`FlO)1SocrGfgg~aC2fvq`tf4Hcg znng9AY|bb}3;9cWj)2z3p_LejUh(&-!}Kc=Hcln724?j!v_%r>RocLV@JT7vNxahf z;7cV^CDn!CgfEEcx&J9he&-+10fpx%VtOrinT#FA<Q!kba_&b7F~{ej0$MneqO7*_ zY!Y3bsYpxCaU*(x{lm9I6%jH_gvE$;;TAzSbFJgJWg-p-VK-9kGXkrnUHl;d`1~&Y z{!upV9BJKL*hZTWn>m(Z%{P0iSA#b}c9nJ~HGKqI8_T41rEXyeBmR2%WMM7xVebn6 z?+~0EavLR70;`9ZtMqcZ%Q&CS8U?G-D~oYbxEbum5bWb1Ove3KOwfE#QBB9+?{-R< zrC9SaaVFEkEv@sMTQCmO<9`yU0Di_o;9iLou}^hoG7-mk;hakGmsDN84md=P=jJ~P z9<m-fvn}yDW@^U{>wPo)J1PFkZ^Mj}?+j;9hl!=JonLNH(susS`PAWdeoyjA_AkCg zkBRqS*X~e&qzk^oq_ATJkkbgZW*lWWQ1$s>?l4fCbXURE(8?IBPY4PTfRJYHPSu3F zZjc*fFhA5wHqw3`+Y7E8%lQx9`2$x9T5{3g|4a#&EC9eU&s4%!EXVu{DE0v$2VLEW z?>9>DeT>Ey%X<TP@Q%ERhXIE7-2vz&H&AzUygKJ<YJ0fVnMA<=?J^D|nVw6LPGcH? zSd3NZ4?&QFb-q!YA*W*%tc&dnvZG>7_*BfvTD^ph2_vqEw9r=PUG#y;3)WArz$-R8 z>fR*cW;riHEx>t|KhZ}wQCa~oF4Y2h%Ke$htrxj{f(|3BhoG23VG_d7)W3vU1C3Vk zpQ>8MZ$X@%Qk<cs8J|$X&5-1#M-lZqd5|)r8Ri>kd75Z>s+EIppZGo#-x2>;nls|( z>Z=j<6%8Bv58_$S-zVIyv?h$-VM<;BZ32^z;lbBo(IctRO8Pq`J&B~Xp}LP$$-<5s z@)w`l*{#m`a0S>gPAj7qTtx=oY6gG66Z^rB6Io&kmZf$*0)uuvXtMSsD#5CDkarx@ zW>1vki^4g`p_G^<vUViBL_xMWQA{cQYwK@mAUXK10RZZYGZfSaA8FNtsS$miwCynK zfPZ(GI2Pd6`$ktC#!)2YQS5eNe>LtRza}9GT;f~%i`X3~Ll2O+KmHI8atqY@0@UYG z{sbtuu*YJdQtcPOLF{S={|qFq_km={K%<1cNC1K7=p|>O31rMeMvZv_5KFbhPH~ET zav!kv>L>@9Va_&qGZeN{W^eK)5Fte)N_C@95Dtf_R8U+vP#L}CT+kr)qfF2Be?%(0 zbDyH0^UwnMLntzJC$B~WSo)p;4>*yXmrvtBA{F@0RCGpI>6>=XOiK7O63|2znvmB6 zAb=MMy$ansoWE7s-KXUDAsCZij8*!5?S6zkH8|FtaUflU@a*I@(3=65j2Ql8%H389 z1fC~?a&n{FOB+Nx`PDaqF+$c1a@bqo#;DoT$FOp6qE+rTN=hErzU2>u#y;YF*q4Iv zo6X?q!{&NJmR#?uEG|@so14Wsk><3TV_yTmRUeaEDiElD(N~Bm=F)Y93b89gn>1_} zow_IVnVxCDxWXP5q(G+ri>;Q!j)=wDELnohlI6J8;9&n*JJK-)MtKVm02)!pSfA@G zeB|jRCk}u@U@)El4){{6IqOi+t+XJ?Tm+5goQ2HfSskGo<+zbxdd{QaVyqB_5JGs# zutYTlB3lwE$@_%Q8i)G(&-$*lCYgq{8jWly$9L?<ZJ3WUG5^?8hhBh|H5YkpmSBKz z>c5EnZ^db&Af=troS)H`zhij_;jBXy{fE6~W$Sd)mKlszIq-I&Ewg3%Mf5c@SYI^* zi%Fvj`sQ*RI_b5VfxcC>mE{DHfn8(OcdJx;F|7^SQ5CF|oNAE@?<+P)PaGvqFLiue zoGV3g{oAG3Lt+Fa=b*BHo@nV-0u*Ri%sgMZ&|9pYpatJ_ycPAlM=AcM<+2hhvjDEX z^}_0J0bP(e2;hwG0^80!zoAz2R+s;cRrSUYmiG|85g<^v|AAFueVWfy7t`aEdW+l> zvf0%e>EY6bg;}=G^khCJR!~T(_)<YUHL8k@Q-y6sgepbFDu_^39OvWUY`RauNgBB! zgi(~b>q9t0fBx^Owp4ou*qj;4He18y*08?C<v+fvHpD&&GdMp=Bmv(vze$HNr}<B* znbi@$t2$8!bjer=W=qiTs_E@)gm=Rn->x@(1C}lY5qLBCcZ7N_mQ5*PQ4xC1^`ckR z5AVMcJ>QS>Fj@vR889;SK8NO{cliDyes^*cxDM?Qn43YsX3(E)<cQ)*Yr3BRhf+~4 zn_HBc<ABsAJ**&&<peTFmVSt;IOP|6OoSp~O?eR)0tfj0ViQta5CW#%AJaJa3_BK@ zmz6jnmkOci9G3MR(4JoB^V-m(W$8>o<f4zFa2`W1tc31*2*&=vLGD5S0ht-HH1m+` zKcqnm>)&fZOfIk>wwHxLW1U4z`SDKgpDrswKx%?IhhP$ib9VA`Kkf(Nykchsy1i#2 z2}uvY2*w__0LYs~Or9MF5GQ2+2@RG1S0Mf?4rt{<of9yKgTXub!&ux&99&2-m4M<s z_@M#C>!#=Dza=RCn1o_0(q;A)p!iz$e@%{o{}qa)Z9HgCj2H|}1qJ0mXnaPFO`N_E z+q#H_-@)JbE-+>Nz@&W(n4*4QzLEXkXs-}3m<<sto!s#Ule1XufR0B7FRqP=&9?KK z)1l7AZgxP%d;NKH$JlMFZ&ND10xvoj<Yom?lqoE`TLgZ(`RuphrH0TLVzxpp^XbP& zt-1(>Pr!vurL16u>mMa-=6q@;9_AC9MSR0M_(pugS2nZR__<|lb%7f(?C|u5rs^VO z(9)ktrX=r9l5=U&_WB&t(zGjk<y=}9>ZPnW|6Xj>KX3uB$0nu1u5(ksbQ{0#W`wJ^ zY7(1~TN}Su(z{OL&L<;7pOx#yxeD0HZi<&+Erh5dB?{YWKod^c8ze&z@B>dAoH2=u z!5aAOScX{hP73$-QVe?lKp-Z6J0FVf(eH=ox3_bAe87n_fOBzSjF2q?`-mRHVHGoW zwVl5U93ZY(M%1e{3%fiN&QgsoBNYdM+{c}Wj@PGk+0?~9WTU!zrXv((0eaolBEnvs zYbc*(cZn$~l=YK#Z3Mp*z{>tsb&}ZvH2RG3h(psxqX>5G_*g4bmVy*PGX*{(zi4CB zhiJAB&6>@LW%z4N60Vhi@AITx3ZAm&3Iu_hb0`{rA<#q7I=DEaSrkrWqFO)AWB!uk zCW40x`N@M|K0Sr|!|a5<#%0&eRw`O7p4g(qva4b0SCLaCmffaAq1w;?l@JDJWB(V2 z0rlg)RqaB0TN5T`4?qNHjsSpC(M)=)?%M(mY3v^?*Hbb4n|#13%&0Q}2Scb(K8{&o zC+B{bohA~D@Av4D#N^#^V#5?m)9fby-<zoP<%g6v3#wDQnuU-@ZHVbvLghu7T^i<! z=xBf+04$1st-vKd&S$8yqaBN+4eqt`AHf%9+w$33I1)@jsn{znGD4{2<A(q%4$`~} zF@*h7u6~`;dp2|vyF9Kq5QuWYHE1}juQ~}@OvlmYd)H4vHGZ6oA&aR?yz2cy(E#P^ zGpRr&{;UP<T2-|Rl9I{zAHq0XHz0&1o8X?mbf9jJkrR&2J83h9@G@kfc8-e9P0s>U zGW7486mk}baS9(B1_#^qi4O=9`@vfdkk9rTNIcS_V#;t{@)bdyePCcg0#|*%{0^?f z9cM|03Y|LYLPR6Ul~`$K5JxBJY~oRzzwO|)PSk~O(8KR~FSS*{kHbwuYxo|7=c`US zTma@<szg{QupZRACgT49*9Sl{NHd$zWJha=PY1Sen70f6cSJ*-viTNi&^>A)PWGH- zmUgamDJ;}cpKBo8X>JFCl9oVb5}!;*u==p#`JL12=bddze`t=)I7N|BWtQMx-Y>XQ z_j>&oxe4K(2-A75eK^d4BYILT5eFT#@(`CA3)iz$w--qW%lw#Nr6TR(1!0$Qyo9m$ zSIQS-=n({+LbbYO9+p6^P$VN({4DqYg$m)S3^IRtJthXn*0iPk2ZXSqiBcUYl-!4} zXs+=9ACDI_l#7PYLXdfrjbS$eF$v#pjP#n8-~<y$Ii-!GQ>0oh-?v}1s}=GNyal6P z<6OkLPie^zrSkX&q*~`3jV%70_yD8DU077F4V3cjaHr(ypaNnAI7Dj!AV3k(PW*Ae z09HXUPL`b<qVXddwjF13`s5y4dW7v(N6IYVDy=9>@9m5@cH_uiWYX?%MKd_BTO5B< zT#93st4DT7YBsi29XQskJQ{JdMGCj!4iHORytV;M5HL(Zurc63#%>-3X<5umSS<Z8 z6ym;yc48^RX#Zd+&1oKvsSW3^?laBQ*%AfsDHnY>zYXCsw#w<zbR6@Kr{N|MP%*+_ z5@<kk<JO<yyqfAx1g?zDV1=!x$<u;x1=QzGc8ncHQ0oJbe;u^mFGG1cF}qlV$-^!c zK0-Q2>}Br+SsUp>3|3JR@3h006xYH>0V5Ct7y&cIjz~$eo#~w2=Bh8g0>Z_=LGf4m zAp`}E@=;)*s}$EEVtYR~lHXA(O@HII2s>l48?co6-&j7nQm*;X)?FI=J+=T$xy3=L zS~spZ@X;+DE<oT$^Zl_tixC99%!}ip7Q<oKEZfI_3~Qgw%2g`5dzu~uN4<3~(bZ6N zAyfjGZ)8*TGzHWgewD}*sQa+c>}f(^d-Q%+Pm^1!jiLgPfd0WLCF+RI%7uP`JRJPe z+tQ$6{2j4WHtj;*{2I|&9C0F@>M;J2^|cURS{Bsp=xu_-f?;gu=i(iTgwXP9V`v@% z30e$}0wS8rj!xJNpV@{BQtCc`U@UHZLiImhk5YZc4SKs55G1YyRnd{`N&2Z%2&-qW zBYq$LgY%DLY#$`;rFPg$*_(|Ftko_1F;3a#RmT+WVXwoJl*XGXFe<PrsrE}+A5e}E z*;G((H%61^9KsSuKv9%!< ${*zMOR+N-pMlm7<5cpDZ16cH6?W=rezTqd)`&?Dm zNiXjSueT@v;ehyVBnPU_RC@R7A7rRQBKb{9)Qtm%IR6{gyRHm3C5C9l)<*_D9P04O z%F4BCX=4pHWY9epU(>PW%=+OJbA;X7o@Qx6z($4eUl{ihoME#qFQD?#Yanf}aid-U z&rbZ`h5C4K{NIiL`?OBukz~A|)I3~~no$FPtSWfvq%TnkPfj1*^ruhj=&sK8*%EUw z8Q;-;{?D?ilh6-YmCH9n$xfbk68bg>6uU`a_epW}{%H~^Typ|FUwO~a))0ac%r!>F zj99u4aw;X(NQ!~4_lvE05L;mqTkvF)*rLC*VxyrB1A7EQg8dzBk_0=8GO29(Ao2Ea zGLyuc1n~xDG_Ug1|3SW4I!}LmVA2-CeWag%N5Wyx=X448K9V5eJ|ns$3HQ2qfrQ(N z7m{#g{HNGbPD;34x2GiB%E1zDS-*t478oaaw2YUbmXL4{qai*WU(wt|HN9U-Thy@} z9c>K=IkYz*<ObTKD^|d|m)?A8u$226+r(&FyO@85H<EJaLCT$rf)`S;JpKlf`^8*~ z_z+_5I;v=ixiVT~<+R2Y63-p*o%QRGNq%>f)Bz8#9%v;8uL&Gtvf=#d)4Ehk%;xEU zR7cL~r_-Rwws~=CH9^?c+w6F?*jNUCcp>d*DNKeAk7$ftd=fZjkj+GJ`VDSaJAE10 zLYwJI`SS$}fxT|SIr;~+o2kMP_@s!0rqRd8;^P!H#qrUJ4?LCFB@g!Ct)f$DI_~6~ zks4&3kj4{Am+UW(IL>(A$UxN7uy<g9G|X`M9-Q`|=9D(votsm$MjVvY#-lfv>H$=^ zL&64QR@^Qj1)HJ-r)17{NiT~q(WqpMBHK*waw7nF=*(RGb{8E8wp2}FN?7VTWaS*6 zo8o=vimL?%U#45GR5am8ZEL6tGi^rae?${FmjB=lc)ZPM5g&>dORyxiVvnlO1d-N1 zi8sIVL2Q{z`lDj!3gW9T63e;$H@L>6$#m+U;OO<kN4*PRLoewmZwOxGdK&)lr6^l- zoYwO_WB4v?Aftn)@vt~U;$~zA*{5`}<O;k#jo710sddQU-NA;X7Zt%Dj@byMeJ*#L zl%<V3gY|bVM7Y{0@XRdM+}rWAU42?Vcq+n<7z>l%kM1^Pf2S->?}Tt_#F@>Ab-~hj zexc|XyBjth6t9>nTcXPevMN;y_t2gMKR}9LENAVnsb1$SRx5@C5nm8Uec-YTxsmLT zo?roZYb_jSwuVT-Q2BCfi2e*8G@PH}Dc286)sb-tgzVTCj$LmL#TNDk>w^VDL$#l) zx26i9fnqeEUV~`O()!F)GU_PiW>o2;D#da&?Bc1ZOw_rY>g1u$*nv7$g`=oSbuLd$ zOC(nMF2ZYJnp`Ayqo4;pL{eO;tp|>kin;GX|E^z!cNFq>NDuu4uW<L*{R#`-@gX|x zquHfFS4*{*AQRA}Nwv659p|Fd9J%_6OmZ0(xY7-0^`ZjT@o)vM|9!udeGzuFfGu-z z=nm(hu^mGD?&S2GHcqM~PAFf#Ma>aG(r|Vrb|3{Dn=57;cDa_13CBVR{L0jZ(4CUT z1K8wB`~^iYG652e3=AKvCHo@l-~t*+j`44p2xz03IG0!_-YSb%zpLJYaXt5lX$~Hg zqK>OxU7o2<2-Z(ZwcHRYuMb`{)bNM>?`v6<tvOp9nMOVc#_CDj?(xix2(m;bTD<|S zsuHbuX9TZk3xVGe|D9ZVFdNIjz$zKQW5B{!nlY4cA%-7h2SfRvRIgr$$h+OZEO~8f zDQ*LC$UTl~&4uO?FJSOu*UXUgW9e(g0J^FXC}3`uUUp0`deyc}#^Lmjq?fa%OB)eY zhcDn@B%g|N!L7FQP+Q+_CN#8a#&LU}OR2!oz)oZSVAvGZkFM%L*ZQ%vRl^$`DV$Jh z@S<Y<m_z$c>gCi@ey1@K`03r3#8AZ^9{mxhD)do=-B~8*zrUL!OuUq}kXci>N8GhZ z7eHc<;$EHjK^K{XriYH(gGH}@9TEiKZUf=?o7wy>I3f=J(lmdVL?kX0Xbm|&imob9 z5`RURx;-0cajH4Eo_h5EN{|Z$LEbh%km9ydy$>`w6^WIye~XI12M2s3X+(Jm(v3lv z5MS|AM4011m}!;8Zfz*C(-Y63TcXYP@JwMVNt>M(Z35%)b8JmZ5@%+uAjx5-=g|l0 zO{)7f3V=W*p-6*<To~wSvm!=USv<EBxAftf*oF|el(i0sR2xEj2YnW&>ekOGxZH>k z6}$WEYtG8(NaHe0mD0MIWC0kUgTH?RXp0bt-Wz@aU4VKTZDgm??x8gFOGp7-FO(i6 zMcSB{>WTP6KV4_T$ioG&778uW#sm@>l={C>U18V)+n&Sp9zO*)-n)n#`;qC1)sVo& z^|XA=j+7+gkBC=rYFk3aeuLh3r<TqR7%zc7ev7Qc`ACG3IF2vD7i3Iupz7}JmIk74 zDevE$UT{<L{wt^jaI2A^9DL-|D>I0<&y<Rzx=FHeG+Ha%kHb2jqphTD+z40-_mgbI zzM9QnQuzhR#$OQP*ryTJOBa$vVkNgDS=K*-F7Qz~?k4HQf{b`w`UiIP4%RH!^pR+k zejNwJkK)gxb#ziMIw9@JxB+U$(I3OfC93uWz3}l3$Key~3?pX@YU2Q(hhN_h7GEOT zz<NRc?MsN411Xe`hA$i__A2g=PAU%l8ri`0S=PBHOA9-Ol1V-OA*+(%VDcHz;IL(R znoW*v931rO#?A!XJK~0(>#as!Ld}#LK*l?unnoti2nB~Da4{65(%byme2f?;!|0l( zwMWQ~=ux*S{^QHDxkmLT5=J(Pb6Vd~c#?j|RX(@4N68D%H!lKiK_Bm3FP4^we&vV$ ziFPfsR=_us@3T^bX}d!BHcaQWdxS|VOyLT6eD`#bfb7GL-wB(RFSIkh0NXv`X!;54 zplJw-nc>A9pr9s4Bdzi13?B?$V=T4<PtbKJ@C3BZaM44F&O)iInpwGN!;?p%23_c} z$_o4v+fO^-PWgej_NaGyuBgiPsW^nRjsFEZ;W#sC;-(F~!Uw}w9G9eXlY?R|;b9NJ zKLRg?exDmLqOxf-CYwKWtwca>T0O)_n)Gs$;gUACbNx$NQKptY)M2EVV0YG;v(tUg zsXZSV=STxI0}J7y46;dK#@P`#FV*a@zEX5$A8uR$pz8KKaIxG_iUon)k+;m(`K_1{ zh-UptQyZcC&(nrmIWJrZNe>5V&zjwIs@y@`<utb#K>C)wkUG~P;(KDx`U=2<5YN#8 z<YTkpcX2!@o};b}9GiCN_9|H8%SbC;3_G$K5hJDSFyzl71k2-5E-r@%;w^&z=i&c3 z{H4Fcb9@4O{~?6SMMF<etm*mOKtggktQ2|sgGKWnrO>v}+T{5YtgFNZ*cG%Hg1dqO zP$%Vw;cKXS3Y0+}lgD9r!oP818_HmJV+s^-byB1vj)J);7{+M`y$gPiqt4PL@$ynf zZ96Gm2HjwWBHu9hka_~RmFrxpeJ0f&EAIB%Oc#2I-DSGe>yaj*u1&|yT<7zP2f%K^ z)(B3meFtnA_pCVpHik?0Of07L*&1p9FFX<i-U9STk#RF?bnd@Gwa`kMNmFO@8|7DQ zKs%x>2zQHduT1^lkwcr%UDL`}j0j5`w}9(C?24!1%CQe51NXOYBRzq(6h^fn>ygYZ zWHlX2M-L0xHacW~4FEA=9Nz~Oot_hu&kvI6JCe_a<WohDC|5U$dPawO-y-Sf%Q5*< z?Ogy4*W60AMflYFW=OSfVIK6pkad_x@0%&r=83H9R8{XQmulY@UpM0TNZ&0|ZI1Y= zkZPYrwM0Jj-Y|nA+JY$SI2nStWgYtW+0rI-yy4@l<s*KJL*#Q%$(Fx)cVD8%0hI;= zo0B+%H{1yH6tr8Sf>;5XdkblD@74kg4@*?q@Of64ohkcqk(2P^!v>_DP{NH8QZ0fY zU>xmFb)ZPWyaiTsi4W1?X)O)ZRL{Y+WLNh=Nsdz(j8e*)dC47Ot}%$QXpaJyY`Aj1 z;i^7-DfCqzLgD%GL=j*_+Db~3tFaLEodL?g26CM#Mb89&ksSo)ZHajT*|$j#J5WjQ zyZwPZdfy$Eim&(GC5bcYdf(mFSp>arnOdRuEthIDu>AEt&GLcheJiE9W;E1z4#8Ar zw!s}s*OkH-3ucRp5VtG4j~ZSef)FrwyTGTBX|@G%t7!&&Xef0ge>+yZlDHr3Y$8W2 zAy;Yyx!6FZi)3+K4J@#G=cQkR6Od&Ea_pKT^+De?#%x(cyHQ>slx0BoXIIc9@UO2; zA`GN}6)g8`_2TB>=w(J)^s*EN6Lgh6AxIWGRBVnf(rBv&1=MjZgXklB`Os2}e2l4L zYN-|6pS3lhW|T^suIe+!V5;zNi?ku76?jh%v8&+QN0}U~%6nb7u$6}6qE<6St5uE= zi`SX=W^5Akn1!p*dSySt3}zpPTVN60uN(UV<tgwVv`^Xw+(ZzZu&b_p4X)P3582Rm zQ#_0%*?Sm~P_gN_HKX`}G)D^=CEJZR(Fo?ej#|KGYcL;YS(|}JB~)ifwb*cn@``hn z{sO6196N$3g3K02K>|Kpo933QPu#A}ASw~y6(>QXrfMis)b_yHHLddryoy~s{e7^< zXy{+GzTmp)WW9!qs!};DJ?z+3fpZGvNgGcn6^OPBHJTphN>m4L$F=>$><h3702fya zUZ>zShyzl1^`b<hiF2(dYd*!a;1-v{IIa~B;&3F(DMYiT=d~5o&fIFOQ4}P=$HAdV z`B@_W4}<fo{TFC1F&97xt?G5>L+1jrz(ol^!Qwf-c~aet7@C;Bi)gl7%QJ}NE-<o? zW>?+xkuN3=2#FZOk(#E8B3^@ILnsmy*Gr<=Wx@PKXf*#*6v@^`i2DsOMuZc-;N><M zfV0pOB9)eT8M0CvVHZMIL?xG+i@)jLu3Esi1^_i;U<5MesJ9~?p#mYLL2>o>ui`W@ zvC^ll(@5qny7QZ#T9jN3+J$WH({KP_vkZMha^UnWBJFU_S=0j-y!?e;Ax446XN@T` z>kVR{0lK^>39SVvvwlj3>sDz(V_nb6vj|{y{6NdrXh#sp7NXINAQlOMO#Hq-iLOXW zVMIY8%CHO<T2cz~^QBr8Xz-E<tDAWgR!`AfA-_x4CPUM)`M_eNI8gB4y2n9`IOpfZ zy2hb;QZNRri22Q+kFZ*Vzl+s#@pmb?K~AjD9w(B>G9Hzyi<WAc5RLSmZPbG^xQz$# z*DJ2vkQ*$vH0@H&BUi7qUyJ}UY=%)`DNzodIIYmi_(6ZhH<H*cnYKu(y^IQgxn4+5 zu+($t2_8Cuo-45|=?QTr;Yku`AyKs~y}Uz+BmUZKs75_lW7w|2ExRIoQf}D6U;Bne z3`Hr}FcGdKp)(12Ps{w<MSqE+$vH^;4>T~0{X7i{gW#96xJf>&r3?lL@SVUmj~UC} zL?Sx2O6d@3cZ&kY2&!P>)>Jd&Ws6OMe#&7PN5hn%@5GL#A%V!s5osY>38EEzCeF1g zdQ}-vOj0kx-+Xl<{$7CH(CQNfQXO5Dh!$LlQ!o~(sl+$di#<UX!4TFn5+7PgX$iXc zg+N+Kf9BDja{3bqA=oK78v34-_!kQDH(%>l3va<|ph$Z(@khiw48%vIf(NX-N4E2S z!qG@uR=9-mH=^=Tup_9B#<gHzh$^F73lXjN<#LdDAC*I7U{w|5A%Z@M*@Dy`M?C}E zhTAE04f-SkOjGx=CT~%%UgUC}wBlU~tUw_s(sXEu;1d4f)HTc$+XAaM$UaP3y}8_r z=5;U5puXB~^!^+$*0rx(?k`z^&XzC1@9RX^m*J+9Fm*3kg(P^r6?na2u6_YSH&+<C zA()~UW3*(%e$f-ePNA@2v5b+iACm$bb~OKGe=q!RctyvS5&LJ<gt<7L|H4Y;cMKHl zc?AV6_jfs2E!LHqH!q-AS`nAhzej`NmC^PLY*Q=xAdXu^;|Y%OkcB#qj})zv(aoPg ztBqJQYZC1u8^0_NwhZ+Zoy1lV$FQ6K<`3i`MyQO8%|^5^Q^`M$W5W}0<mFqxs6fB& zbK1`lkmm&9IP}v4*z~T%fyNebZEZZi$(R|$1<|Z^gB>ahJQGuB9A5rc#AHFIIfvf~ z&@AgCP_)#iNVRhSC&dT^lD0=;1`^<%64|i*Ao&7Gwk9k#+JeE>I3x-1RdzGQ7~p=7 zcEk(>i*v?fyc!*&0zMr5J@K^&1Q7T@mE<oi^y4FvzXvX!bo{)+>BGhkO?>-T80b3O z<S#BHhrKm5Y^!0ko(O^*5RWdc!+s@21?<tjR<NCl*mSdvcHjlwia35+PX%-ZOaM$N zkA8O}d6Gm!C?b-lV0A6>dErjn35CY`9kX$Fqf`Y>`>QLlaU-s*<fE}qNwFRQQa5gi zFx-i@lS+d%0s9bMrsv>YLch73CO-h@#HI!Gz4A;<BzC~*_=@&3>nMss_$|Y5aA%;v zJ&>tp(>+V5$q)Pvw7%7No5u0qVq`FukD5rbek3#a7WQj-!kh#l0>veCt1M|5dkIak zc{5VK2X!zu(=L9#0ihl|NV|Oq?TI??YbElIs|fMW`x#n=9z99Djv9Cr4bXL5v1N93 zs#5WVz=eXCu?(sl&7fLhgOI&NLe=xWh;O=vBBBxPro_&eL2YR-qTx%>Fg0n%=pxa{ zXX6u>dA2(5fE&~fxGl^C2s1#>pc`;0#$_PxpMI^#%d6K>&<V?lw_qVY|69@Gi+E4; z0N~{Oy9ULf65BF8`!GSchlRqK;%cO_cmf<tL_Wp2T5sy*i~Lq3DHWVX`6ujgS--oX z!Uuul-tW*T^T*Oi5M7O^-8E>kKOz{Q7ZK<I%<SoBF2OFr#=78S5)suug8O0lK5XGZ zQLyPIZqW_YHa6m2%k&@B@_GUbaWsIxfZ`(55b7;|lIr@K)icq~qx4RE|M8__$z$JQ zHQvA>({NA4F1}+fu*(k6wj<mFw8;p@??8X>FK?s$#_27=u`cXDso9v42Y^iyDRfSN zc$Eb{ts&v^OkO(fMMxW6(2>ihQh9LvA$TU;oHYO)9RD_d9))HtpHCrsIBO)Fo0A)m zq<;#IZ<ZsH{cCXiBEj)5L*LUelC_lD6D$J9&q7iw<`7{Y!}wocPPGpg2{JaKEH>fC z^XHI30Mzvt;3Ft0wSf%u)2-n6oeJR?qhL|^cGQV@aglE<?B@6VMmWcMkCQ_f{y1{W z(XaEMDlYjZey1>dO_8|FF<)Pqo32e}0dI*HS0SmMlC$VBtYjiRGD>WL-y%Sg`zPU; z6B@Hzo>m(;g9%QH-|>TwHJSB2GRKR6KAa}HaSQ3Bh@0#LfnvuD%L2bd^aM}~cV2NZ z&UM5djMEqHYNW#whf?3T**qQj_yGxuu99l6Ma>A4T`<|C+7LbvdE(S^BGIr5*V3F2 zAwWp7YvMe|*`l9+JWVY}|3eS`-gK8#*MqkW@IIh-{Rw*Ln-RzqN$XQ;j>Yl4>eHIy zpWz39*(?9TmnOC9^wjZxf?QZN#cPq7O0J<~ltwa~-}rS(t8^_=jQeVe5mnirR!3=- zHN^-L>Q8%+(ypy3hGx;9_ESo`qNezlgVGu(?Yx@em(m8xp&gqsx~BL=q(Np95K?e+ zR6xuAhhP?#Jg%dRijlyB21Mb4B%~-&ftSRapvlzgIBl7K&wLi>CC_!DUCqgRpdEqx zUHB^IAFk^C+IV8rFCueVKbwB$OdvNl&tY$H;KHO&2Q%r%ccqy0)+Q7J--@HP4_?7W zqTi+>RAM_{irUqKSTtr6Gq#g2qN)~)UM^hcf<<TM35GE%#iAXZXh>U!Mju9_)ZRku zM=JW{Rrth|pMx`xEr>~=RxJbs2C-;Mf(ZdSz@qtwpn2lXBLFfVZZ+q`yDec8dI&tO z@P>W&HwP(h8pvG;pDT_F$og$W#UEnj%B^(x62sr4W<;XcWZU8@{Jh#fQ5y~=_w&HZ z6QxJuFsvCjie)o|a=6W|!BL-~n61*wg_QF;zvCtvIF$U;3LrO7+oO(0$oO-C>Qi2I z2>uS${zQ!oZJ7yoIJq_odw?dX_8_3B_QRi3?H{Tk5V6@fEYjgy5+O^tnRc3wz?01( z$@7Yk>SnXqRPCRqqK%2#rM26%JZxFu$FgQ%w|XL)0o%@^Ly+)^J`!i$yA#H61Z$!H zAuSp6Fa`n{+Ll4&q0Yt~_^xSQb>_68>I@*{GHGKQe;7N<RAD?uIIIBxH;chj#Uvh# zEMx&8&6Fl^fDPgw(R5h-VB=WylNg0G5q*W=avw^<k@CHvr~Mbx3ppXq#0gzXFZnk) zPC@#|p%5YldIz)%>%1bdws%^b8=-{3fimZIqPM)KP+EzW4B12@!0&<)IXYl4h?|au zivU)RIEgOQEdJU_v|2$zmRz6oZ-U}KynLQg0Y4soZOdGK4gAH2760xk=n>oad)UoN ziu|HTEWLZ6T_MhszJ%M+D*dc#0HDmmo}G*-kK?CR{dhMV{elI;oh%5t>iNIwW&zXl zlS-kQS=~ytf5&(+$xwyh!y~m`C`CQ}Z-+>!DZ++0JqwiG_;E>$IEcGHaRL@e__-q} zJviToW}%s2BCnwZ?hqsECZ-^Z7DRQb2Gc<3rc3*E(=V}>pnqo3A*eNLY4hM~p}%ZG zI{iu*GN8@8fG=y2gR}GpVZlGL7=xx?UK6~IrrC=#0{iPw7w5C_`2O?R-_|9PVXCpb z|5){&{Xf>TevttBlJQsc2VPTPQ#SqJ&j&8}Z9cxSwUMp#f=@f&L^kfEC(Fz9Ot_84 z1zpDUWaAZ)R^h_^re`s{QK|Psc&E}usf0I>K(<;@1WF=VGWds-Nu5e#VY0i3|3E|v zVxICApto@E8+e;XU<uN(v_5_E>JCgenIYmI?3DG^CG0NMK_`wYlxc|iER5F_i+k6z zH(2+sUz3Z~kNDw8g*;8zT6-dnCRj~@QSkJZ55Wa{wp?7U3;pk!Y}~j~l?egw*o+%h zsw5eA1L{pvLjLVlA5ssvB`4K8=oGpPeq|9Ztq*@f6W`8%jDdk@CYI(SG`fRQ>XAZp zB(Z?6iBIV5(7WtH9Zcym<OiUA8-yHFv{=d=Am2bI6KW1sW59)`{8F`yUuR<bT0=Y+ zDZ$w7_UP))K>g}+{4!W~sCK97imvXH*SU?1VDz@WU<3F=X(#_-IT5E+cWwrp`8Kpw z*R8pGTWo<0u#w_Wr|Jl0p2UyDi=(onB**1R?YPhnyjZ;nVO}!+FXE3rOI!MSmdWqJ zTWTBm*gv350wa(4i0i;LSB=MD*IbZw8)E}KEiToCE^X-Ya~X@?7BJu|?`EY7agA?W zeog4<3AcKIjz5#=&sh2+=|=-vZOGq^6BLhZhEJC5#!Y|hEh7Vd3nLA`S<|u}ZQ7!9 zw@hjR$Pi=dRWg3|rMQH4!_@OiZ15PWXO`H~@i@Q4=EP%4iERiT!(j5~C488g5c}8| zDZh$58=S$fq9~YBh<&iW8gJ3k9dcc_I=aExupYr?9TP)~pRSI{HBzt*Sr}PB9W8B0 zjDzFq3%9;49iSkcMkQ_dBzyS~IV)KXq7UW98=0u%Rx|K^1`6_Jv(SfwTuW#1i-AoG z$C-F*wb3kd^BJJnmLOXZF4jPhg>ayxw5(O=3@ij=e3h>Nw1X=+N;&S0CYA<nR(lKP zBO2-w0oip7EdhK?Eo+(7<Zseuh)woDZMRyCHd2jZ5IqyVw?P_^>A7%FqZ-1D=(mEi zcG3cE8WbZAd!vPXUZ+gYRq&a>rn?_w;Fr=siW0vEeuZ)M<As80&a#@>B6iUJOEcyI zY6e!?56}ii-(`Bz$s4m@`#V2?9pYA74hCmDCi}N)L*(jR0b?i~mrcj3OJQ@nSQcMA z6Wt1cZ|v_W85)Jn+E8X6K}|o96Rcl_BeoS_+1~?9QMKIA>Qm}N9a0^s;-OmjR-Xc? z$+h388p{gdaPc;iGXoPDYNJ^l%NGE^nj$K#mI9EOhY=}50rD&huHZK<7%HQy-{d=C z^+Sq`snl1$IZksU7_bfH{~vqr0v}~@HU3X-B!q-bfJhLOMM0w6!bOcnBuh59!9>DM zP*D<cfz&`^vJ0qO0tuF|rbTO8ZK<VRX=_`oH?UTNAQwdmY86pyRJ2cwmsk~|BK!ZI znP)d41hwz`>-+wGpMMvgoHO^CGiPpRX3m_+h8GcecM!V_)z(NN7mJ5XP@M&N*Y|O0 z(OXppbor+*-k){gOrC`8@o7tVk5(%EY1ln;yv#JcXS1XNoPjA^H-`+sAWV@;FSjC< z?=<(?tl2Gnw~9`7n`2@*^*OsGQo0+{lG%Wxy}3}H?)EQls9V0<vF6ue!LBJ~Z8&oq zF{%u+bt`DKw<3L;W6h2D(dX$Z1xbHzae}MnXyoD%{Ec#~k?kkPlex1wk19LP++Wl? z%9DV*vUUfKG0R1jvC@==7LSghR_R&y28na8SU?>|t>`}2Bt}IarBK$Qmo?oSwW9kR zPT{s5JW{Y~`R-5t@|5z$qoMei6kF?3<iynO@y8gTe5X<eKludyU3`D2-}8Un{4%*1 zFZuBe)M?AZ=C?K5DI}4CkV`Mgf>~Q^TwBIddhb$7?zpKJy^oaM@xx;}x3CX|dV91k zl%w`DW2tL>uUHC0UED0%Lm-Ca+Vxi{n!iVO;SR!b!y;;T*03Gc_Q9HGSmesYE21eY z{lk?9ThI%;yus%A!XEB*#T+^k9(PqAT4_!!@Q#>iyJ^+#n4%aSGvQeOaR(awaXbAq z?!>f}+pL6!rsx=VVp>Mp@<yLick-up-EeYZVw`sOvrD=k$C{s#ZC7n8IRr5+dJId5 z4t-Im8=_M>GEh1<wjX5!2o8IdchZqaYkHg;NNQq!F7mPakW>}+&*n&%h-7HiB8!m{ zO{^|TjyXm}_Hx5Ol}IIY$)43mHNHd<07o_aaW8o4B4as!F@pW5_Vt!NC1@?YM;2Wo z&zAXFhMp(MTy7aMT=rM=)6~h<^m{`rV!cHO|Mq4P*4H>>b&Kdgym?>a)sdy>h;tp; zwvq~#fmSg$!R0A^P0B6uPPhMA{dV)Kdd6iPJVIm#a2QA6RE9Eq5AqbU*QOtVL;O^` zi*&fBLl5R*-$D1keuvdef#oE0C7eb!ntvcu5vEfj9?j76Z0@(Y{M+>VhP-Q#!}Nl& z1m<S+V5ipYXUJez`Z+5=UI`?3-Vl8Fl9>sa$EUfdl^%}fR=K2Qq|3hdadA8rJIuUg z$}M$U15lp4XUZbrYCfNbSql^4sLBnbHCEm-jqzWdKQ_SnG>el!Gs*XgbKL%yvi;j! zfoC()<OSnEgFL{{DhIi#<zY_;cBgNzpB=U=+<(CBFJ9_DLfi7OT^;83=SLp-RW-#h zr(FBXJ8B|Lu6c*G@iIoprvpjrXEOnn8Nc3!I833lLywH|FzGd{i|QvfWgkJAD-V-S zxwJlKMCf7Cb3FFLq=<zY^oyjGdmxZ`ku*b=eGi0QBz6B#h_2<LvF$ICil4P6cX}}^ z54~{!H^pQx=`)UiihwDHY`OhDHAprh_~UfRgBys`cj<QEz5?nMwu;X)@W~{I@il8r zo0lV{6(X&Q!#4d>$B$7AN<+(9AY1XF@Wf?{*N;Mr<|gRX@kLe^QSu@yJa}G0HZS4w zxJlQqu`U+P4^nnn;|ah0jF@&s*e|lWZ<6wh*v;EBSpMw^%z$5)HR+;`xe0Bzpx1~T z>4yR@@@G}Z=`X#QU}J`L5~~ndZ%J7+o3_Nu3`mNnivuYGNmmk{t)}`*+h1mVcQqO5 z_*CnQSjbLuZj3zDnzu^w33K{kPCv{k&$P}p`&ps;$01z;6{3D(zqHRhPF%91XvNeJ zTkYU2n(H<UZUAa$H~mHi8BD`b1Y)46@&u3w19QxNdkV&l7)9;CE4Vwgekb%QFR<Tx z&9^s@^lJ#*`AtCiO)|=Fqz&_4aC;le2)-HIv5ob-BiP22-?v~aQ{LuW*=PI;aR(xZ zFn>klk*h2GFGvgN52<M&G!$tgWizFXG}�vlnW`{LoXpTySn$@;x8yoz96jn?+Kk z+8+$D$B^0(dpJ6<hZa-dac!=Dhn7&}Ml@L8?2`HV^PF4PZHDWFvVxb6nK^I63}Ihy z5|`ET>A18uUe?`-6oJ;)kr577Q&Vi><kIszZ_Qr$uhe^fsNOTolL}S>eB)*fB=t!> z-VTs@&yaf8Q*)_o#I@2F(QesP)-{|(k4g>Cwre=A!xmwMY;-K8yUmySJ@MCm|CJ}& zc_7=d`c9fRLg$#ug2UUQz30hw(;n+0IrXqxE4M$qE!^uU@ncbAh1*({4{wX~#v7|h zt!Ft>u19Wbj`B@4NCff}+~v}ieMORYgqxzJ`{&g7c6~pRG;{)TWAZbx<ie!<90O}9 zFuPst_a~ifU7)u$k|UHeV&x#2(^Kba%8_f6es?Tqy=O2BmwS}1RxI0mFO%57W>(dK zj`6KWJ}w7wdDMsql_NPa){*Rv&G4++R*ji#uw-r)A6qgo=lJ7HdO42m-T9`*@Q7Dz zf+UuVF*`x*lEuy`b}3{lL+qQx-V%G0F)qPi+bXtgzTai9vLpp|U<Ej8B$MXm=2j~8 z#PV~8bbWRN7g3L|ARg-t){(9tPfc@phf2~Fgk5B)l7bbK*Iq$2%`9=Gq9I(EFH}L% zrh*VWhLZT6_2Z6h6H2s_uv8FF<8p763W9KbOfoJW-bwWRO;=Bqsh;-w*7d^;>m{as zs0*o<P&JwQ+18<ca_sbkzHLHZ2%ZxQ>1V`BC1hJ-*Ccia#IBKi=^B#in@UCIn4Fmk zior7R+v?|Wna0Stc!;T_-oKJ_$^PsZYv(fQ1ujjn{%Vk9>tTa9t=}6Y#k$KNsn#7D z>0#YsEHbRM2Ju)c4U%UqHppyip+WMkB7-cjt};lPHO(LuR<=QwS{WMg3(c%?245?9 ziow?lKEUA8&sYft-za#b!KE9r+HRE6_%{pwp}}QbvYHIuAo$A$-!1rVgEtEPguxF8 zzS-bSg5P6sOYqwb-YWRb25%F*+TbFcu)GG35qyEc(VnQRIR;M_JkQ`x!QBRz*{79e z@KnKv8(fYKTTX+sfK^r>gL?#zF?gQfM{m$&uMoV|;Bs!(I%M!_!4DXGt>AkNzFzPK zgWoRr!v<d<_$GsI5`2TfHw(Vr;9CT*HF$&IOAWqT@G^rp3Z8H9je^fG_yNH^25%Dl zB7<9kry9If@WBRe6Fk}ABIdH<3?3u6GI)aEpI@)(kSw@maHrsJ7(7MrMuVpczQf>Y zf^RW6q5)++U~rG%8x5W(_-zKCE%;i4=L^2f;0pw=FnERFMFw9g_-un$3!Y=}wSs3D ze7)e~41T-dDF)vt_yB`%5<J1+n+1<F_!hz2mQwZ_`wj)?KT6u$dUsEH{NL_G91~@| zD8BrcqOvvqIb+jcY}f&g-)3z7Xl!O<bHvzeFgDy)iT}vhtP>lZn(kW^HPoJ7b}}B9 z8IJ`fs1ut?V>89rq+qka*o-$eXnlyk#@GxqHfh+*FgE>+O$IiTjZL(%@nDl-Y`$2c z3%3B9%Z$yt#-;)rx3Sr8Y?fkkp|N>N+vpAm%Mr%%cg9kBA}pQ8@~6g9x+E-n8_R03 zwCQfn2S5?@&$E{%8|J<N?zLgou%ETBlOOvjQ!$P}NmZf0Bp00|>s01iG?D}w*pSou zNIoLCujkrC>z{ZwSCF3JqS>>diBbD}gUJvNhszwO|GKb=>-u!R@M+)WoJJ?AS)L=? z5Ed)n`|GoLl1KkmO|I|Ly=2up0Y}NIui6|-o?|kMf)-fhY2Q=Xj|YzF--?`w7d<tt z6bwxfW1r4mv)zlf0#APyeg^{ESqaN5r}jnfP_{bH^lbYe0lYtWUmJIT_b}FxwTFFv z*+sAen<8|`KP&npuX%ne8D++{ejQ4q8i;Q0rfA0REx`=67XW4dOa?e$6O$A1tmnv! z*<<%-pZSuPIaOX8;ii)GC3ZG003T)JgCu(<OR;2WD;sxV9G>RyG5z-&{r9N;`?dc2 zmHzvs{@d1RR@x!|!$c8BOywC;1%1eW*oBd?zTrlv26n!W?Wv1zAnf3E7<X3oTIa^6 z9Dd%;)q1G9o|$O4_k8NZlTHj*p+10S$~45614{dJ$0qp3gVMU(vB|!XGNBYT9?#3X zBsf3%WiWhyU3WNtEK^u?Oi`7g&mV`5j@=%|9#mGE)I*%rxqc+mKEq_~bs%LOXxNQ{ zz;UzWjNN7M+~gq7z1@k?_IF^kXLR0bteAF&TZ~qc)&Oj0v)i_oI3Z1tYra3Pt(9OD zLiVYw*EyE$@voIZURq^jV1J9UuDlpijn;jNA5VS!ex`JRq_G~X>!L)txN#+py7=W{ ztczb^z)AzE0XhwT{6et_3^|6hUHof(zoIg&!vZyxr`1JST>jX@{{1x{aC!Y1@&n?X zCx3F~&+M8HBD~XUK8W;=tofju_msf1-gB)lZV^fUDR{P4gSK#|YseSu{&E)LV?eWr z60!QHv1$RdEW>=zm|qfenKc8Y!sgJKIi4d@==NhkIfYT@&*Sp%6_N7F(>4~_4W4aH zJjVL8oCvaxGgd{3w{XsF))&O&`iBp0agy2`liEx`@G?PuAnD^tvJ$bfMWdb^c|A&2 zb2$?c>#EO<u!;~|?nN3DgSxNehtP*s^L30jvF7U@UY-t&<WL4;#*~)fIsPX%NfBj{ z?y0#?aD`7_Z@2%^&EUIklD|TQmMAyt$9!?Tz#o4_0n5_;$V#04sHxx)yVLhCKZgfj zv=+IWHWGl1-`z313d31yr$0UgPc1K5<L_icvo3NlIg2J|D;`8ue+-_}-(q{=XOfnG zABtdFqWsSR(%(X<K3VQgmK|%7VWFHlo!|7g9BaB`bl2%zHh&{Ca72&`f+t{)d!!TQ za#1p5JO-LEL?5fHpUmA~HpbXJLYfpvx+RN@*G2t;<30et0Ct3;>e0~1Ywc&66Jh$i z8GIO{<u)SO2<qk4=V3mz_R%KR!$Q*uA4tj~&K!Re=5%d7SJGg6JAH>^^)H1`{<iF4 zM|0}SW2nm*$Lb$qCz>{Ari)u^cKS0_Gxa-t3?LrI#70j<BWfHf6UEOX<XuEw4Zc&g z?5qBArU9>1W{cY$QZBldi#*%7`&mDrO>dF?3F$3G+wzbkg3hd8YcuY=29b2Dar}vP z{JX?xIduDGKUSY95j9G1j);0g$4N{-&>~eJI!uln2rzUQy~nm}+sWC3`H1hHiq*yK z5lA}QPE`)t@{Ci9M4u{=YT48D&~`OlOdj&wyKW-Wa_^IyH!eTxIJuT;wf<5U{({r0 z?$cCstl?w|Vv_iDHu3^VZ%P_S>qL(E6KRt5Nxg8;R2U>~;&6jF$hpj)XKEi!*N7Pj zX4L6K8c1prYjIy<+&6{XyJ`0WLHAk4eL=|m<9j9imZ1CjASfOo;`eZndO3p(4Km)5 z8wdS41fhQL)WRA$>Q9371i24{M^^8b(43)|$H@jSr#8vjp^1{m_<C{iw+514#w-lS znWc*)q4NW;2td4yjMJrGwvbC9X<E$5^|&vH#T9rNFVTlNFU`qFZcHS#WDw^do!UO{ zxIoehNd-S=%Tu0O@H!9=F}8=^=)<%<#Be6f`0>ZTc8?Gt({@SV2sUCiF1c$j@K=Kz zp!UdpoS|j#4@cL7NByt9%GQ^yudT`{$rE(YrE#Y=cxqsWzkSNbVVy+Y(UZ*s6wiGs z6TNPtc&_F7@1vdW9bz)`!A;Bu|G+qB`U_Fz*eKWEQH7`fyy$b{0gb$Sn3ORW?In)f z8|>F&=~*e5^;3yVh8J$>59*LbL~LC8&F1ivZ$R#_gB%(OFUhQn?48YShuj_*C9<#P zmXw@f`!hL)e=4tRMP$o4B-}&*=Z%tS$sv6kAGiJ_=vcFe9#Kcxqoi)m)MFeR&1+dN zxo`xF)a=8HZm%5ub=G_tz9PQn%Lv~o{-`|8bG8ifM|psC@e{?=+j@0fXml(Bl|xn1 zu=So*NM`LcH!p-Jv2uLtcVwgG2SW5{Yv;A#b38R)H7&`6S*}IAAgfyVNN#u{d*Rd* z(&|9c1uo6I@RzUrf~c?WWTHyDf6m%-mp=G1%v1Aq)8fC1&12YD7we405tBLmlq&sx zn(05RK{)Epe82>Tlik*yI(E(@TK;Cm1RWxb!)EOuhpGNz_Tk#gB$}YPPa?cyo-e(m z$#dkh?DPhnFWpA4(DS9^Q4>dF*yD|{?mbt^Krj1}oY{I+4@vs0*D0dwAkj076LN9U zQsC`Y9Rr^z!_(bfd*03b#u@7VIIUCvz~DwS5^PEB&m3jmp9sdDh(BumDiYH-;hm?- zNghtVpx9y%SFt$sodp?XO7LDcJW&w?tr6sTP@2_EXI)Qse!>YNDzteUescUAV0#z! zE4TihQDNIr&Y%BdpeXH?^J#gxoC0^P^E%RGmXMzjemJA?7$+{n^|?#V!E^pY>udUj z>#*B__P%(u_dTbz_g&kY3V6;Mmt?k`o((5E^-&{8WU@c93GdOOap)net=oRC`*QU8 z&0#j#AUskZ6@kwSuIM8<VK3F;xNSS<#Hc&zxR(<ONj|6W4?Ai_6c9){da+VafPL3D z87SstaXPa$7Z>rerWU?i$Dz&YFJsvm#XW}v?qX^wT8mvZ%lhUiuaoCO`{u088C>9! z1;1L0#!**I5l*6LFu#Qx?|ZEKlJ$<jxNNF#*!JuQX4<?Q@uNQwzn0LOpm2dT9H08! zz@Np9qo)JxopLxtWcgN)pod$;0~^3;`fsYDQ=G%PiMNya!<A#4)z}wh7d1*K!=g&0 z{eh%sIRv5A>NsjIlPCzs&|EawDb#yLPaETQ%aKg>+bDn#G0VS$_oE0WM3n74be0eM zCIZHhmnmmEZbsL#Vn;X!uAuVGOga?jb;QZ7*8F(Q9mhp%$`(j^<suXJPI5{-v%?%W z{~45f!*1U9y8+G#)0`9TJKgbE1ovUM{}Nk5c`{s%+jhtiOWA+Pk@+T|gZB#^j|tIk z;y6Z0I^KQrd9hXwJ`A$ePWktuzorN;g&{8_DGBSM$nNeluJoN%XElGo83qpB)+haj zd0}7|g?Y!d*6qlFE#GnNa!2P0!D*X0&0lKHdhtSx2bw>`plw?~pl~6fg%FzY;pAgj z%P{et=D6pGL-XKi5xmVXJ>Jd4$?d6s#7T?eu^aymJ3B*N>mmnd2X<T(>+7-h6c-nP z0y|mq@qcYML05elpdD4d;2Fe@l&5};ghy43dsu^95MxI%{`LE%?&G%UtC%vg*EKEm z?(x+BRQv~$CNP_nDj&rNKyBloBk~Mu^%OFz@}%!{+#E;#$=Bm@^2ub*v3e5zn?Hjd z(&Qa$4g?D&k~{73WW^f8phmfBc7#g>`*?Tr6c-gHcZ~JW%<x=N$D|tLsxOa83AdlT zgkRqG-6g!5N|F-h((|6Igy&F@&LvEj63Vkgy0R+7hkid?+9;dE?G-iZ_!7mzZTcxu zIiet+y$8l-(k4h=xwB)7meyP`jJ61nc03arbIep8mAzvaigEV0p~E&<C#G?Kbb-_} zm)eXbI?*vN)oq%0;R~Mn+v25av)FX3k3S&uFjzqR&}&G|`{QEV!*;U%AReX-JtCOA zyVH-Y+|t5KU!@60i!q7&Xr{pQ1FrOAj@1zmL~{|RP}ax_g?@HU&DT-BORZ<v+%^}8 ztb?HFq1^H%zLOq2U!rT-BR!0VKA2@IS;*3Jy+G1-5|d&)bd*V&+(v)B<sQa`EyU2g zl<+}5?yU<ExlNA@8QvbQ>g5WZU1RWKU8++aKyIvj&>E*|9*_Mn+LY|N*2^#`^T@Xf zB)$!U*D_vmcvObX;C}mDo^jRk1J<oE!2_|ZFgX6e;MsmZR+;#%1FM3C)kkj3{n%N| zXgl4HorBfM?#K2Sbi(~u9@OE&SMN4xA@^e|L#bFl1f^i+e(Y*O>ie<R8Cxs6-Qz3l zHQPtrySN|Q=?<9Ib(LVnhJrW$O6n8^S8IUj<Ja6xiI&R&ew5DJ_rI{ROT<|xx&LKu z{EFJDiO$gdFS+qM46DBk6y{?kvJN(2?!o1@jZB8j<u;fn$@&8sW(s^0z_5=g<DJAs z8GBm`Lk`wgD??>7%S?SQ43$GU_Pn#>-Crk~in#piKbWzunb{=TrZUz&A9&8%FPF<E zpy7PY>ns0Z&$7C_>-X*YhR#_kO!Zy*hQ6e|ev7`LtOD5gW=h3M##5KIQ6vh(tsAkh zB~LNh?L@^rGCg}8p{<K|A#8m^`?&_(u5W16!|-~2(#@;!lfKiN(m~(QCOl?;yS||{ zcnRqn+W9^84eeMF%Jx*yuJsM=Y#vOurMRMQK<3c9&^d6NzM(8s9tHIc9ikU&*EbYY zO(6P){(?&<eM67l9J8MDNA3EC(1IU_f|CYL=udJyTHAdyuC8rXq?=k{_SP2TsFg9y z<=@S;*il<Z&g2;A&4`@@j<exsw*B92xWR_CWI+@V4VJ9@`xOvHXV>iduPPuKTq|^Z z&Wf|>SO3POqm$Yo|3JF;jtYo&eX9bZH*0isT`C|7h13d&rf?Fbg94%_Xw_6cW6p63 zh%WhmO##u%v@7WO?^HlEL8{xTLN2BW#z@Z)iCz>gto@f15NY;l87Z;qSlSg3U1Agv z0YVChz)?VS$^U;85Y@2NM-RMN#8}aY2#!}kgiZXXt5^UF@j5CXsvQL%{I?Vk{h4Ta ze@@5Kl>(x5+I>Jr_Y)Klop*={gVBpa&-Op6fM_Li6^}n_qfF{DWLlipK>^Vpkx9U7 zCs9D;mTAtvrhsTUlRVuTzK;T;R8kkuwgRHVZccliTmezI@hxOLxdNhFj)VwJp*6E` zD0;W`<!qf)Pyvx1+uSnU4rw47dl6d_>(0;`h|Xnl%7EjIWeAE!?HP3Vc7;S|%`y|$ zZ&OG#9)&jxyd(J=R7kX5wA=J(sjI%QjPufgr^Oi8%!aur!bxZrksye(xfo=fQ9y)J zqCV9(h)yC|v`7F)vy71FBocd664~~sBpNC9MJUu0Sn8-fU0}JR_7s7rB<d*;l|<bH zqLN7N9EkRu{Nq&;ox@<-l}e&>&=G6s_?;?=0uO|0rFj*pKm)CkNcW%!rnE{TIsDFJ ze!>Lq=1L5mRT5pJJ=iLVrUs)yC6VqpX;)e$QJOf^oLnVQvI+PtDv1swU?7H==8p^# z*?h<#Va=}#0@0(J_ZzG5=G`DdwJuc>nbH`QM8C$P<wA&S&m3Ry+A4`|Jlydvfhd09 z@UUojN@G4Spb!0*-1A7ZqIB5q%=<meE>S~^4_T95B+Auf20we4K1-Y?-jV6QcD79D zwNj#3g0^rUnBxrFnRL$oIdO0wLM{k&R7xakxwPZ9bIhLYcdWT?rX(zVtRkSTq2g9@ z$(n0%`uKO*JE~fAoK7MOtFG2u+={g}B24I@lc<;0s~glwM7Q74*ZPJthW0wPmr2=U zRwQvkE0!b!tgrAQIeM|(>s(Nwn-s6py36bl1fp7Rl<=<^RkFQ*jDOJ{3kl<S%_YWG z8QanuL!{`I#UflPLp;gjExi2ZLA3=NVYW_-nlF#};;qX<d1#pkNk*o}H~tio@IV== zHj~}}KmKZ&)QICrcMQ+JWVH0M9HHRW25xO2GY{?0C5z4#r>!s~1Htn7*Aki8YD*PW zv*`SFJoc$$&6+St<XP)4Tv_7r+vLTFjq8ULkTk5~Jy+8mCs~4|h~X#V52h+KuP4-E z?C^R*Zp45$S|W1%Z}M_N+TfKJv12dlI3?hfIu<+1+>TLZ93SQAZ;z5k3LY2I5@oa< z<!CW{b)U&4FEH%0KEePD=JM|t{g%At$vd9e5yRg&oFx+KI*A7X>R=iBoG8zG!L=m{ zJL|<H5%@_jzeR2yH;cy$kkYNVSWe6j=7EQ$=ofT|4tO4^h;}^6Zf6539$I>GA&soP zYBeGE_U~pN$op5Mx`AiX3B7EW<FTD=zZFiA<0QxGro>R^XAw_H-RPDiPvIq4Hyo>z z*DaPZGDVlvv2_}a${>9!{8h4N{q-~{Ioo|A64{$4h18EZm+2GWTnyK`wEXeer4#^m z*c0ZtAbtkfM>fu0udVN)BEeI)l*79D&YAXmW{7Kz9%wn?%DS$%oL%YZMk&G&*#wD1 zBsR!w$>htH%mRIzNu;lG@SNA#nMNVX-*1hcCL*S*o3bt_?BhF=6)`tiAEh7>?zG}I zt#r4wX5((C`HkK$YWaiXQ3pdxlhuTsEaT{XC}3`r+g~TQQCu}&hOJ0-rGL6?n5Y4v z0Nfb)X#*mG$GhW`jy-sw|C7NcAYzJMp2AfU*puE`d6qkEewf$k*A|iqN9}oxX(SPR z*uMEJ%+`)_={U|I&J%Jf%k^;lh``Y30MdD2gOy{>yL1-(T%MJC1%6LtAgP#~P!xTT zq2)gI8Mz?VT+Ps;`;rU@EZk;wn=08r-0yZgiYlbF8RU2(qSY&gsA_INqFLHL?4KGu z>|e7ZgWkj5w+zO^CBP>%AN=bvnZB(nz0;>yTfUZe|KNSpo%y1;%iTfI^moZ@9FN&D z8^>cJvte@n{bV+ry9mjE<PeY+;e8+7)tQ{&AX670$L*lJ`ZUrN-Tlamd`Y6cy3f`L z)UOsN{Y~FdLxJ_)@1ek2s<yK{$;o<->-J5N!gWyxive1m)X4H&8j99mz0_03s-Ql1 zy-{C{%AUb_@Jj_-J2EBFI;>^KNAB~N@wu%}FM}7;7U~HpE1E4vrZf}_duH%(nYf8e zX_IbF8|~&KOge|<ChrMWOWS=8RRt+r$IJ46mBsx)L^4boGD>)l(@{GJf5>GT5I@yl zc7?nCnnBrU+RCZRs+I>u+Gto1@|hCXu<ca-k|d<i?~FHgjz@E1nobbWY(qrDuE}5V z1TV*hxwbVUrQu08S}#(XPb5PB9v*#j)K0dgGP&4^l%~x#-)qD5Hq5f&P$R8blV@jY zYY{cbrO+SuVtD0I6R53KMen8YXjRcuHG-;Wx$~n{ML(#K>5kfM1l8)IZzOC@+m+ra zT3z%8?T)%=*)(&bY{UK32z-hn*fK5|i+Jepjtn+f7#Wz~9g1<(9H!Pwn48WC9^1-k zPQukzGo8aMHBY7n7X)=pr|K9baF41}&^zs}FMs(CxxnWZjnnJ}wA|u$Je7u~N|_G2 z<yFhmMpa5BJ!kcO#jH;JZOwC~6FRa(>z<Ct>hUfwb9>M}%@D<t1a7q5@8q%tX=l~G z>!_VhY2Y5}pB?$r-o8#^6ZOwO>W6oC+9{6O1Q5rgXup}c-YTHE8_^CbYj&EwDtCHd z*(=Q-$eg%CqOgWmrtSJG2|xwbNxCG#l)dBFT`gjPwcfpU#^B<d!fNS?xs$tIM%p4; zL!(tCYbX&UUEf2(mbFBd)MYiIqPj85|4s99QrwlbR!5%|(Xw?u>1<Kd8P7MfO1-+# z-zsWKMRT?5DXK@w_L!&nSGrcwWG%CN9VmU<id|bv@c_eUHOE?`^$skymfN;>0Y$5I zf!1pMdkN{Yo!%6G$cZKNnlFgF-u0&z4<t1u7pnfGLK3GMWo8){*vA2t1W|k45YD<4 zwb$FxgGlW*|DNf6P^a~WMyIvWXASi#o|K|x)t8Eb+v}H=)HF<Guq_)rNAz0@d9P`? zHmKnG(DhTbZ+DOPc^kxNZPj$Qy0Z+`6m`^ALlTCSjI8=AiSk_$I7OrJI-1?7GNEbn zdXXSPN~7^wN9g)%DlRM<g37O(coWjrdQGqT_@TH;ZVT;<k@))Q6s1iB6<`lA3b2#3 zT?Yl&?H)U7!Tt~}*iBjsHnASB1-m;BZyaH#YRWM=lUN}~DC>W<k4&FL3AVFKCD;+J z2wMyGOF2ZtM(P@gLzsjb32diE+-u&4|LMYMrQO|W!yL5{(w_%7oVd%qc3ATlGzVW| zi95a7H^HSFdGpVy!}K=SQ+Gos$CNgS4YltphPJE0{)X)(`fL#_IBFN*T}z^58`aZ- zl1jw(T%U}e59@_mD&nYbj6Up!K++e;M`W;&<6EizT<-hs153e*`3^@)UJ^cciZ1M< zwk~X*S2l{S&mq!4(!)A}nd|8`0*$%_=n@3AVSlqoJpH(%C*3b)q@`<>*dH>9Wsdlq zb<cjyOwun!nZ8hZ#pVybq9-`KBh2gA!eUZ4{W;BF8Ov~QZ+*p17<Nwm(znTJPD^)+ zo};a$I=O4kJ;khHN8H1X5KT}UwtqK;Ll=p4AL1FhjV&Qz)SM2&s5fyWP>i=fe<OWK z+S68HT08UtJji3yjBtS!1t>j4X|;4hrsK(B@6>ce%*}b%;-8=E66y=xD~5=qVyPTL z$k)d*tV1livm?Hd_H=mV<2u_dBQ)SHxeTK%XNEGWlYdoK&y?J0oh#YkoD81Uyts6( zWY&9y$*i+IvwtB0&6;?1{Y4xk0C5`LnNEiwf~ovTtRdhYe8~XLa4p_UfaY`Yq~kE= z7Ew_2Oif$1H?O8`fj4t&8L^_rBYEp=keaq7jx~vdpe=vcqd6L^#C1W8b_;2${xGij zW7-K_LqhX=AdE20a^Fb8I7(bKY3KQP7`0|;xY2qYsqLE|#fLbKwH=M-YjOPC(R>Gv zTV%*4HHPbTVw4Wq7)8Cu7U_wNQF^B>()-qX-ZeIJv8i`CjbR!FFeVfpFc~OWg-~(k zC=F9hIykMZ-WvRcu7u_n=+4R7H|O}6327DAy1=?ylJ+a3$WhYsA7j+?=*I}PT<NiA zY*p8u<^wpp{93N$X?{*;RpJ|N>$>g`pDki~Kmye4Me8;EFkA1)(it1&*TfrCeqAlz zs4y35L@%ysJJUO6tF$R9?98n)`vB>q**krksiav%8q+dWqHdYQ->{Ypp?@@b_G<Qq zw~Tey&0H%qDU&}z#n<ncl9@?Gg0K=iq#xUrX`+r;ripr1xTa00##*l%v$km8c!ZjI zhMBw3UViwW_W0{%dMiuMsS~BKZWx@(w1?eznFe><jkkRBLNnHI(5_~z9MAisITzP) z-Y;{0j_9#4o}V7Sy%u4}Tokq<nAiu-Mz&}#WiMMan6h~K0&&+x(E(q2&G^f%L}86@ z2n9F=B~(2)McHpFtvt{Yx0QkgTB5en=s3}^D_P=^kaYt%OUFk29`<o<nbF@tYn&Zk zXzX%t+2mI%Un8H_rN{S|e3t5m#zzd*`TSaI6$y~f*Cn6d@Cu)7s%Mei<N!53{?}JX zc87MzZvQT_JNCsu(kUjx-%!-J2xL6uxNm!ohj*1@#7wrnjueqIC0X5!FYE95!J@Ws z#B}NC^!WT5vVOLw5ak5%W#Gr_a6#U*?XWw6oHKMqCLld~f*7EXbz3MVYY?cGo{8|O z3mJp!a$Tu~oQ!=}4k4llDx8!;QoeET>k+nQ+1NPc8&^N2yETE4biFa3%3^tJg8feX zcEmCqMSt<kuLAC|app<+-m4hJdBN{ef86`{?U%rQ4Bq6-fE+z%%t30oY;cAMD?B2A zbD@nEzSAYXVRcs}CYl7UQVlsOGQ*SH2goo@>3!ait-nT8_ugHtH8Mr}cj_(?o9oHh zdJde{NA)HdI-J2Zfec5zpWZ8{@W^EUfL0M^)P72HmsC<Yq}J+*Qav!HA8^#{!TxY& z49^Pw9<1g@h;xxCvQr|~7K1q6A0*Cw<QCya4|n~7FquH6v_u3lZ6bZVQrh-MKBnK7 zLZJS5m8j8@qRqWAGqt625Kj|!;^e6t%X@NHuHs?5VPtg6-Tsk}TX*?KdXBVkq=ssY z%tjcq&6<!C(d6H?ttkrCL0X*{Px;ntM~M1@6#P`eHxH!Qxzpcwtht491a>vcje^Hz zRH~o6+uUWPt4x&zQ<bOiRZ=}v%@nCFvau#=8yH7H;u^WJZwRxk>eQVXB1}!)nbAGA zCO?j5(J)&E-_({@w*4bYrbxU=Vk^6`wcK#lrC}#h(=a51S1~bTg63w5`492D6?!Y{ z`4!ABC=fi8FFa!eT(C=qIk#@kMl&%-a>FTJBQ?uMH!AvqjmXSrL}nr_3m=3w)aG@> zE2fB=?g1|ULxh!!ldL&cw;vq0<}ki*Z(*@ClXXpQll9rfMtbI|xv`IOtobuBpj;|# zD`FUt!z`XGNB?_?GMIRz7;+5b1*`8GBnI7x)kv?buZgsJd7JVpkq?BVT+&%r6AeYm z*XN~(37Q9dXIpy+Q9t(#j`7R_LqiiXGTBz@5W0CLW=<`NJa1P749x49Z4Bs1+FMpo zrq>*Wxu!UUA@fqi;91D#WYa)vK+37lN(&x%h3+_^p9XzQy@YGMy=#`IP~x!?ABp^i zbKSq<URk0{U2br!>96fiD0_djfmpF*TA%Bu06)9R6y8yL4@|~{DwMO^#9Z^C&J5kx z2yH||QK-LKfK0Rksrgc;evppW>g8oTR^<M-<+*xpYM@<cx3!kG$Gl$7R%#x}M+Xjc zZWN(&nRG?3d!3YL00mFcDm-@j|AvYgTT#z6O54(R*a59&tlo3X;n}A3j?)jgIpU|U z;*`k|zfM}$%{ZCN(4Nro3^@w*$A0AhD#!n+|4mqpj{ch~L*wo`(B`kGsvQ3y)W{l@ zxsM~GK~AyPI+QM~X6d!cv*Ibe$w){_E?5xm@t@nefH{_*Cwy<)<w-yH15Cd1vFs9k z%$-xwd)sm)Cp=>idSB4f_eG!~q|1KkVxbSsg3b-qAN8K@)|-e{sR|lHgSWqD*e5cz zU4wi_R3ft_&%nZ6iw2)H_9IW-D7tE-X6If<f}|5X5*0NSGi;wNak#Dfe?yFQQDyUJ zxrH00Jzsylb?dJM@m5%0UJ;~$-qH#2ef!-p&I__k@J2G2z9mVYS?`IlF81k2|ADwc zQlZ7CKnF`35Br>2Gqu&vczV(Hbf@vOU5x&ZrFGuy@psD|_MyjrEPMYq@<?h$ZXs8a z8@yRh3h8r2cYUK&z}P$4u-QZ6{Y26~@q%#+9JRl}2WT89+hDX+p^m8b_c5BCd$pu8 z?3mp8I`S6d4%64Qa)U4Df`v3ih8UDPvF6G7LRn{BDE)oFTat4@*%!)t8Y9{lL>0yv zWVy>)C)1z6PVMNk=yZchRz-tvqod|liOV1VVF|OkE!32<{vhS16gRe1l9QKkuyfMP z78JMlioQ^}bt79io6j+}uL;4^R^dn9s=p*WeV5~AFRd+ylUyRHlHb6b^ae+b?AaQ= zp&QFF2{5^rmRYzjr*80W$hSL&XsPM~QgmOyE{E4A#9Q~PK;X%6$z^?9H`b2f?DYxP zk;9qd{sY`|r90=b?>^nE=;gv1zNsQ^uP~OE(3hX%iFkz!XFIa~7D#%kryf3gvR!CB zHY?bjO0#|~%@C89-q>@&K$g(fcGovDuic{+EpoAws&-nHm$O|f-tINtnty?(PIpi; z<N}J;1?6=4zvPIjbrjE%wG`P2aMdo0IL$W@R!R~(XUCm^Gk40>Dhe5;cTc+7BcKn- zlkS5NSSxMw?{{sDgiw;rJ<X!pjlKlRbfx!O&uBgV55@*W=kX<B{!gt{Z<}sH_5$;; z3ukTnf^{vnvhnO4@1f%v#yy$W9Y5MeDuJX;45zNAL}g3;EI4jAr<emtcgvhccQ(G! z)|Jr6T(hyJfckOivfp#Eptu@M$B(OOrgJY?e<ywf=WhS@?DV%h{=a28a(7{U_g;n^ zvJ&UH-Q;9N|CZOSBx0^fi}LlbT4qXea`N57+XeE5%?s8~bXLF7;&R&KZkc8u!PGMI z`l}%nk>ly6CR0fBW(yX7U^#*3K+*;-C>=TA>Ce5$x47l#sX<sY#9AFn$oe@bg9PvY z5UFhYB52h|Du`TOf_=d{I~4g5_?_8;06o{tDdgbeX>Tu)zCu4MnQcES$%TaHCa!ab zrEwZ^PFPHMgKO2hPzI6vlZ~$ZU!mzJgWFZJf!KOQ^AV~gyeR_1mDec{ZJnNp)j44? z)yy@YL(`#3TZLJrg|(!1vR|$3!&{Q^6&ACyB_ZfeCa_7XMgg)fTI&er5yB$}`!@_x z4GN{XWI`%5hDkQ#ZHf#_eXWui!7kbQY&w^oPEim$)*tmWQi{4TC@xC}bKmscZ4d$$ zkKHPstV0+NM@^kWA$WC9v;E^t?=xKWo^b0iw$QXHVk@^)+L@hRKkG1w*c&D_prO@? z){_|jL;-s=C~4S{!X*C_`Zv}CdKhjVBhl<FVU~v!m&t-ZzUeAq92QY=RyG|DHZK^P z0Hv4xv6dm`<h}@bxO^c38l6kt0ip6K$ZL+rkj9dls6!0)1!aSigO7PI!C?QLHLsP% z)h++@WK{GWetI?9U{iG|*!fT#D{$)6n$wtk9e$d31+8A%^$ek8)7Ze_r^CGc>TZlV z{B#0x7A*C#gLjT`>Y#3VvOhB!x0<F;YIcX$WhVbtMDo5-){oB@+BBd&<bb!QwaS?C zG5LDHqch*qy(X>rIqQqR2LeYjS;ES$iwrvcVIKeVgkjevp!Sg0u_OGMgV7eoyB%vj zTG7w#e}?d-jTEP?Wme6zPO{K^5q%G@iJ`Lj*fH<vau*k&W_{8TFU8}m2}e$}LduA^ zr(2>~jQ)5g1$A-ltgm!A`r6seZ%TGWVe5y|S~yohC+=(+@K-8^kzmGPA|+`oS?XCI z?33ivzj3U265j}0^z1%E)Twtkf8M^nlirW@BHQ14yg63N&l#LdydJjrTdm$>B}qNR zjP%rBj%Ih}j=QY~7HcB$bil5bWWx#VgMu}WomqK^#Po0onP3C0vR-%<LlFu(`NM=+ z&j{td2Ng;Lk5i%yH<L)F$nQl{y1J;#kr)u%6MPdTK1*A0!XQo<)Z2}9T{rC|WqV-A zQqaJV8}wf}>J00l`;SzebnwjV^Q;T0?)60IL|vhFq4468Z#iw3)Q&D&$%V4+z0L#* zIoZ*rS}o^wiq5z|yZ!zc>y}@1;ks3NWow!pWhfsKP+w-Y&hOGmhZ@kqNy^q<I1}_} zMHdOy{I!}CSs0ng+<~tA=pBCRuU$EL3UkRD^Yo<EFs;Ym=UL0uq0~AX$K{09qO8g; zqw>f9ak`9`w=*~&R|`^lsh+DoX)k~sbBFrlJ%Uwf$le+pw{7$DuX8=tEJyR}SQeHQ zHOt4LvDs6|9bT>fmGyNkn}Hox#zy~ZtkwLnN%6S4ty!M0w6clcGmAeVqt>4ME^Pg| z8P*0nvg2!&9#L5ZUBzT18<RRX=|YE!Y?kR~b#RxO{J;BG?Gy%3>PI%*Xv06+aJLN) z+VEo=Mh-IJoHo43hSO}gz=pLp{DlqwXu~IM_<{}Jw_%$Nm&rRr{C;4=;j;71FU^K- z8_uzz*M>LQ@OB$MV8h)ueBXv&*|6uCCf*bqUSz|`HmtPaY8&2a!%a4P)P_53*l5Fp zHhkZPZ8q#S*reCThNs(bqz&CR%(LM<8!ootY8&2W!@F$wunqUx@MRkwvSF(Y!-tsk z2iWjD8|K(>o(-4T@Mas{Wy2?I_^b^N+R(CLn+;>_dOO{QBW;*r!z*pL)`q%r`P0X) zm!SD%@FJ%ueV6y74XpURZC+zw=Ww$>F!lomb?x5K|K@97zEaFGwC3`)azV2-!qBZL zGfbyj?KZGb6{;%y%v3YfBsD^=HtJu{JX)Qna#WUWEB-E1*(y^_QEqMPQ66=nTFAc( ziAQ2GzanM+4OdR`VE#F^o!DG&|C9XXs!`g0l(v)5dDzL%WkbQ;s+gP>lg>Q;R*IGW zU8c&_;x6o`;M<E^(D!uwRAAFN2AA@w`FvHl$!{`v89535#Jq@5!Jms*Dpe_d%r93J zU@lb1PV{dQe&=E1)Ak+XDIlJ9TT>ndH6{NFdP;s#Q>XJOWtV&f^J5@;%TT01=g#EA zn1vc9UGXRBNIV6^=OZUYb_gu_P13n<x0^xE#aC4)f3wI(kuLf1E;I05+`)G)`Crr_ z>~!3ZH%mT(=_<HP(o@RBFJ_5P$|3Pcz9l>(zvse*3oon+F;o%1;m%xsPC>5chcCkA zjw&h|Wej<+jmxl(au*jCFPv8#VwB6u7awPyQs^xg=jp{2-k^iaH{Vx9W+(f~bmWtL zi*&`iDk@8*zH<vI3q$o>Jg?GMP#LP=$pwqFk#rSlDdLy76>&;@e2WNwy&^7&M@=f8 zrwF5_7kGV@ML6)AR9R|c&}D(IvMOYPUd2U=iaS`$@EKEX`6BIWy3d9MUWrG-OBjVe zg&&2xB%#8>BO;@^MaOiH?GYE>(~*$at9PHIQ~D<NJGK9S(@r1g9CXH+gNK|oH0A7b zh7BJva#ZT*F=NM_d*1nJ7hITr(fEreWVj|~y0a!-;>n(TX-@8CQ}U)xn?3_^>Fg`6 zylT$X*UZf?m{(X-Trz(_=?|_gTezsa;=0NzuW#{^rPtqZ<Fe%|R;tYM%B-cO-n`24 z!eXi2h44}#bzcb=6swVNQkiaB(t;MjIi>h0rpJ&rU#a~$bvfdd%yWc)a%el#`Pb1; z&{g;;n>KdINnE90nF8;*w6CsWm`4AXs>bq9;v7LdE^=l15$R8yeC7O=9z^n)t5eJY zlTsCFyAqxAh1%}|N>l|_z+VX={h9PP(l<&wETz=*F)h;Vw^a9S9pe)BQqnBNy^Oy% z*#4yT#Ol^%;un8KxEE@7X|a-~lhmYNapJOwzt@vTNmbk`_$M)%5T&>*qCT8ZMoMX7 zU5KspluqsQTJmr5?xj^r>7^E{h_OujDOV-fNqkbL!IDY;)J~@nnobKzTl!S#bDS!L z5up%Mr8*l^3Gpq$EM=BFiH&fA&{IluBBd7TJPHSyF+j>E9M&<UTjxo7Q_0szUAw|h z!TKH(s_#XFl@b@@PpCDYvN}njg0M0oNvtOJGN37~*h;*{Wg$L;xo_u8Nv%D-(V_IZ z<}Zou_<EN#f^^R$-!m{v{YV`c7ei^GndEcizlA@7<q6U)HB^pZo`Ur2ssu$Lx*KW) z=`n_RNU*(0?HA})V#+10t*f}4ng@lCgL!JF?-+G{D868RzF&PVA+4@c?7B`39mddW z1oQv>)037ZZT7#C-npUlD&SGW1B;2Pk~n47Q3lmcY)Qek*-o)w{>O#l`~LZ#OU`Ak zw3Kwcv|*u8&?Fogti|@!g7rOGT@XsItNJddR;9j7PblrXT=y2zYZlW-O0QL{V+it5 z5SsFg?!-@$D~VD12h-<B|0TTB)ff<rtDT=lci<<<?}_m(gzC}@3MCfk`Y6zSo9QF2 zJ!v|@{Qg(?=lGOljwdwlNbPodx3^QN73np+^t;mEE+CAI7BX`V(%kgmLaF0p3&xk) zq5i%<zewzcM}==>oR<;*_!JC1QajY&_m5v@<}$Yr#$44Y?)LgjRqg$K3H4U4X(zpm zjFlCb1(#7kMmHHL4aH>iF{6VCBiwjAjbsfbzYJy8Te8z9zovj$$BE=we$}?w%xaWM zwasku=wIV8<DcDn{j0Fey==47Hutv84c{359Bk6RJlovQHY?kFs%>6>)cEgjo1M0q zldSsJ__c9A-8Qea%>!+716w0u;7;3|XPXDv=KMC}zSK5n*yi8_+G(3_u-%nyma~KM zYdd1X%P|f49k9*0y6ww+K>CCdT}PK=C-r1{y-YQXe@;e7X{pj8&5SY!Ojc^bB_<cK z_MB4UAF2QGZZUN&vCr2&WzHd^k%>X#yH<0+68@q<jbG<fvIuA9t%Ha?mbGoi*e(pw zD}@-O$0(x>$}eS+udECdv2r&Rrr7L@=%%`bCq!mtWkt#F+*INckIuBb+0ilT{M6K0 zdB~5QPR(L5`3v&-DYdj>DZdze@G@ge3?PPoV*Jk!3OG;rmqI^i{+;M{qK$`|`L=(x z>coG?`(LHn|AndV_pf6Nt5y5IaAZE;=U*_q^FKUrLjB7K_&>}6VXH#_j2is!{u``8 zfvdyX|AmKrzkk8-Cvx<^YV%z!KvL?%zhH`DYR407#sA-3V&kK;|L<4Yc3%mq9XD7Q zy=(igde!Qh+BG-*aBbZ>|IPJ3`tkZ(ZvDw^KmFOyZ@=RgzufSvJAb|LH+S8A&%O8E zzv;KX`~3rd_~V~8KltZ|9)9G}$F^*J{4Y=Z^~t9iwmrRl$1^*3?cVe3b9<kEVPE6^ z7hih$Z?C+1;I)IVzwzeZ-)egMop%qt_x?Yu<_}sv{OIFPT0i~gXP<v@_(<EAUwwTP zRowq=0nNz;FyBi+^S@pG|Lyeu+w}h~0olpjwSeq@yZlk?cgktq;O{ida<(b><ILP~ z=6cK>=Mq2aWd3m{^ZHI^&MO}m{?<<BpL8-m9Dcn2M>?4!$f(Sz8~Eb*E-4Pm8fTR* zDk@(x-CN)-MIMU%%&OwbDf50%T<FbSR8n5KP&>>bjQnuVDX8+g3caO^i}T7Wd=(*k zV`rjMxkxb!<`+*aUur_mME;k>EO0C<ijhbcQ3Y!P+JC!MSKm~<s+tL1#7)&Vt*-79 z6~&Bh&6+h3J~g#EuX-)Bvy`D}d9$jSuQ931%UerOXG-<jYC<Wdbqf|Oh>3|_T2Zm2 zqT+=4ob1e8#wYtXgYiG9z*|t}EUv69uXL6!a+VeN78NciuDsAWL=<PCX_Gmy{3aEY zmK7H{z2(mGisD7i;-$rfK5xOiGA#hdedZ!Dh<gcibQUaf7F6J|h%h1*he#uJJnjA} ziwlaJC6(n131Ol+bCB%qxX76Y8qEfB2}g%Q=3YLJ{DrE)d7ZDg^7_(6^PMF+V<)&* z6??rMoK=;#h~gFbD#}U=bwvgP+VSIF?(?491MU#%<nHa|Dx_fEVrPYsU^{(Pl8vE5 z%BqG+Qtfn{U+jg9rCw)Yc~P;myd)UHWN0KS#ie|~-zaAPRn*2P>O<C=qx5Q6c7~4s zWi73MVNFe$njrp#3k#?dYD2OD8HSFe{)P%xwV-%j0hVQ@i;Ai9G9>QicS1Z%7L<C6 zt11c#i=71}-r`E<&{4js%2D%57mX@jw0LMRa627z>g{|Yso}A-vV76P;ziz||Mql> zD;Jh7qH4$uRRJ@NxXxD&A*u5Y?DnmhChIdgL}S7)DfKRps;%-CRO;COrD)shOP9x{ z7w3aI(;1TJ{F`>vk*=Dc9sL&->niW)$7Yk6GbW8NJFFf3>y(F{In-HTmqNIuV`x%1 z(f^>Kkglw(e2L~iLU*d}lhpI^HP$JWF48HeobQtgt#YOmFQ#WGEpZkvtnglc_IIZ@ z4_2}}jRG=CyDSbdt1zT<Ve!IpdkgIhHH5Wql{$m@xWSy@o$Xj(Pj^rM)8Jpc`SFOK zT>6vWW<C<T?WdQ}b*jt@=SY8EHRp1e?D{$EFG<H<HK(d{VMST-++e>qr>L~bJI8z8 zxRDh_rsFeYI_Y1T947XTQN4@eRPS*;RPSky#`uOF6>p3&`|B?vF!_RS{RbwBufTgE zGzpsfjfg*Y;0}lC@9nAj7R3`soN|nft?bqm*%;0O-kVRqPtdT~NEJH{2|;DIim)DG zst36X>l3T`jB}_yV-i|>HpMl@HpEm!=Xc9X>=�?dYNUd4WaURX_A__M4W}D0!0n z7SR(e=lh+Vr^EqKYQV(ghEpn%^81ij&>v^w)H{5^yoQ|?r%v<sP^T5es?%n4SEmv0 zX=C~|^=(XQNOVLh$GB+a7-Pz==`viY{(Nr*)9KSq^(l%W&(V@+O(XGl?g;kT_=5hz z9Nm<oh`7c9iSOIW6HfSShL$G29d<e&gGY>NH+Pk{cX9uSFPLU`P2cV+c3QVkzP3P% zS)-Nul6VD%p~E{aEK!9y<CL=~Q8{NMDCfAI%2_#}_0*>1##0*lRD=zPQv-?|YQT)1 zY5;XPU|MqPDNTJEdo?6fB<gZ?r(7}0F|D0Wubj^@OPc)yEfj`dzmd?kXb1G&u1*Vk zQuS<ztLS0#LX$8vzcB6(M~D4V*Qd}>zJ8;+tJA%YsMCucR;Q19NSz+GStZ!vDhQXT z%NVU<$F!I6j0~l&=j$6xdti)87{~gnvYnrV2c=i~wtA5C*SeJ&m(?CuVz+SBZA^G- zke@#DF!#z<YK)zh$xpXXexxoR$9)H1uI=YFa1Je~g|^wW0~02(cO>m4TJ;{|&~+x^ z^DpJpJ6|yTufbp83x)3$sd|lzSG{iSkr$?U*5<JRv8LXr&jFR~br#I~lqpJG4K3HU zkO;qiBYLR*MN?J(8F{MzxGAcC*komD*|gaVG7~nShZ^8bh8Oz63#X_7VZBsRQ}4#a z2Hd2LdTE=qhki4nX`|g#zcEP-Vac&7nf8@T`$~pSlE-{I@0@;xQn&I2c}LfgH;#B| z|MVBM`&LO&$|3YQ$jP76uj273yBxp4d_LwQwmB>*MkRUqXn#rMDQQe%Lzt<@yu=gT z8iVxddo^=FzFr>+btqr|So*XCXhh!zP5a-f%aIor8KxrV;ohk&X!~B+_l=<+?5_IG z08+Po$Mmky@kyMTHgV9V2eg4k(+q9G26R^g?xLJciH(ki_=>pv9;va^Rifm9ez`yW za{n=XTMg|EuL!>$Ek}+^?5V*#Cv;N@-e~wAI3}(ktb4fXJ|-%)Uuq9Ea9oiZ7<Q#P zzNa;Hy&J-6+K>+PYBD20Y<`e7+g2#`8DA)!KJ<Y_Jyo9>@$`jps?V(n6`CG1V(A;` zALttr6T7KI%9uDtMw9lq9;#L9RlZMxdDd|eA3W5Dd`rI?rtKIT;GsU_aGPew4^KFV zQ{p%L7Z0DnE6`K(N+tZK`-m9bCc8^rO>7?z`u>Qf$d^aj0>cK!s=?#>slop|wKciv zl*T>{y($v(6Y?~_ObgF5?c0o5L0VkR0<oH}(#}8QU)DfxFX}0g`c6>2Gke||+Zxl< ztueA8IR4RX*!+@6{u7kr#U2%U+_d?tFZ|VeY|qNh;Zj549E9ts9Dk<VViy}O<x36g z8LoPciA}ZnTfFOj^klFzJ)YmB)P6pRQ($7>Sa_Floc^WnwBD^jP6(F0_;>ID-(T#q zo3`3vj2>e+H0b}8-z&A@0i|9G(&}`^jaz#(b#IJrh^mOpkH`y8mA+Z%)9<_<YRI_j zB8G&OM0CZYPUaoo#-nd<RjT*n?L2x?{=?4^z7YQ<(?`*VCBuKo@E`qE#kZw1a~HQw z_=0Vr-=G-PYlee9xu@z?sYkkeDU+@{X}|W|s6TU~{<IbP5yM}V;dZ;ck9N7C%XZlq zY4Z(vJAN)fzw4#nrH`}w-KHSTFMeIAxqLAnK~#RGj(XykwnF;-D%<Nw`qQi5P^y;i zOxu4X_`Pp3-?YtPN%W!cY|@{5R>bGW<(YOzd!vu<NBgwrW~SfAAAMSDucm}XLy6eD zlgznVWzH2A6|SPju_MquOm&w&wU^K`7Fq(wm>6`wy4t2gu>DukeujsuQ^V@a{1Q6# z8$w^}9S84@Rei%!RdBu`4JItEn~I~~h?{2Smth0r)Ie{d8d#J-zvxf{+sDhieq-X5 z)4PQE(PHLDKITX4iiTAvGfOo6Wd%YQoiF;9rqiVLm|wRuz+087aJJhyv0MMoppO$_ zwe9ym=erHf{&T+D(Bc1<^W6i(Iv!`N4?}L4Y2-0EtZ-+kVUg2|ML?EU;9W3Ft-#b# z+KAN4NFdcFm8s=Q_QA+mJQbzm@>N!{_zoVIjES06Q0kpjUOAmbe_62|b|F3&6<4yn z&MaS4RbEz{>8&iwVzJIy)>D+Ls;YGUB0Gi|<?TtT_az0%ekKyCU=r%oaBOqSi+p9p zlMvipR<Mv_sQV+PibSr1q_jPv(uiDNnYVP}_1@wc<+DnQiZd4!RH~oaB?|d-DRr&H zJ>6U6L0%;?!5A@%oHa`Xlt=@GJ{<~S{8g$CmD`r=7283lsm!wSs-Wr8tZA2J<%}IO zvZ$;K8AjN2Zzcb;$@g?m&Ma46wsv?m+*4doF{!eclwZ=gOT-fDpDJq+;+@ROQZK^8 zvgrs8L`1C8BXWuh78jpjUtvm7Ngd3%zCx&TbEkTDsTU%HlB#yfz7sif(E?raqO7Hb z96Tl!NKDd7JtQSRsdIQlc9pw$o^SsA;>x_r;wq`yvm&Q?%Pudi^!f_QW-dYsRHW2E zvCAnhzt&eV2|=$UK+#0Rk}NKn1r?k&7B2A?FZHS+VrPP8EmbYy*^3}RL0Rbyor~;R zR5ZP!bWvuxk90qVS|Z=dD=!tQspsUZbqDk7nzG09IkE_$+2sgmG-dy${TPnth=QhG zp754hB)BUxPpOL~#FVUD!Q&|Z<>ahLb1L$7b!FJ3vMPzPpo|mFSBZ%vjp+(8>1wVP zs&?S7=X6S@P0d&!66$QIHe37~R!}*Ts<Z`HQ4xejUV6INnD$_JkNO@LH4A3Z?L#e- zG>nTOkornek+vh;p?)CY*><3!Dx`?B)QeE8teDo?iQyU|r<WCTW_(mcPG)XF1sn{A zk=pi%Y2;KQWPFv>1Pzj3%@32JX0?l6O}MIA>TStKR}QLQdzmCIY2&m`XH>B9&L|JH zX!Tpu^7D!-wRC<_A^Za$Q1ic#SZ0(KUTc?oR|o3a-3jEa*5$vxievUON=c_mQwB`^ z*zO_3VwQ46<dhc_<&@5=<XAnWKB#$QTCu5^jXG|-dxInX+`&xcK$)wlMPJndiEN^; zqy$z|>NcTPhyJFB^XCgI3Mz|3v@I4N9cXlUL1n23EoZ3$<5D!GM50t`s+7Ynmh>a6 zn+&JZUbyRQIKu9`$o_wR|05Kr&Nt`kf{6vq$L;DT1YJ)KWv*{#7AN=9(M9~r_n+T? zDDWQ&{MRWEY;$AodTYcT!<2gdYUhh3FN@L#^Aq<|_=4?C_V)#6Nvo3iqWI$ZI47z1 z{iA_#d@(lcZo^ohxb@%*x=FkeR-l7V;+3vK?Btv+;!6FA{UAQEKbKF;F58@Gn;DXH zm}$2CbQ{V@An_x@)oC+5)$uF@I{6qz6x&7Y{F?krImCZ8pX7O!4OauDEH!-MUdt!> z;rxU?F?y$M{tZy_cMqSG^?p9__ZXj)=><M<f0a+%lOc-GD@ZfYf8#IVq(76W^Q6Cj zW2XKGqdxiJER%_o9}fB-%;dj0sgtsRJxZUH1)TrpjQ#IK|Nl5k<eh8gUt!^q(ygj{ z=%DjgU%z^gP0jn>GRSZ4HE`9hn~n;Wv7%bTP59q-_rbfJ#`%Q5tBx6~3>!Z3J^j0E z|BvE--(839I#s=Qmvt8#VV!=V_*eW!hnUKMGWFm!2c2r}!5<tP)5$me@k<B(-t?tX zqgR-asJ#hfcJP&A-%A*5L#GV~*f80KeQcOu!x$SX8@3%b_Z=)79<bqF8#dVR2^(&) z;lnoEY{Lg^xXFh1*l?o_H`wrY8{THa^)_5<!=*MXvtf}9gYnO?%`<G6W5Wy^rr9vX zhRHTeuwk4Hl?|;gO!*JkusZ{OSO+>c+V;C`*kHpgHr#B(O*Y(U!}T_-wqb=0XWKBt zhN(78wqb$|V{E8w*!H=}XR8e@8#dYSfDIdMxZ8#eHr!&vO*UL_!)hB=*f1D>zHQF4 zVTKJ;Y?y3AWkc&TlfO4?__7TfY<Rm3t8Lhk#vRE0e;lp7@c4CG=LG(@-GvU{MvILT zyUu?`q_J;|F)%OI!1Z={^Tr!9-G-jyy1N;3u>4{#ziOHM{TqLs%huW4IqA;soz4HM z|9>_7zdQcU`RQ!#oc@2z|8G*@yY88Uclt%xzhG$(gq!xd+lImRKGAN+Bk?F-uzy%@ z_Y-B)O}PIqTxtJqv*WF><Na5m@VDf%GoL^34>Ml;pMtX1l%LF#<fkr;k|pl{xgV>B zO1@<FnK>?^UgvwbcbIwu_yP;~8Q3=hxAx`?BKQ-)p?#16fTsX+_-+Rmcrgp>6z~k- zaD)q+PYKg7zCY`9>=S^e@`?Xc;1s_6USTQ^ID$od5qK)FhHvnxVd@^>+kAb%4*`d8 znL_MSfO~9wFYuky$$vlm0GACk@e3T|G-bI6IAD+oD=?E!^56#EZ`*GIzGmY^XOOqS z@Sx-k_)EUs;P(J`^1Tec8yGkX88)~YN<4g@gKq*p!?(UKet?HlNEdpx0k1jRlqDZH z@Enr|C-4>IHaYk?08AaO)B<pU@9;GxQ!d~&BiJRwJ|Ea0#*;c406d*DnmfRqz+dn+ zCWWaD!0=RLJbgHy1iX>6nJM7QfIs4svabiq^gIW9Iot8tIO<w(;MVi0FYqUTy)PhL z@BzU27m_ac0^l8d63+%;k95jTKI4Gr@JZRdz}xu5kHCj*T;NxHQjZB2u@l61fH*UN zv-z69^MM6?hrla<*YR24Uf{EQ66aoE$#_%V1;EewgjNC%O;GAX{0qD@1KA8T*$7N^ zQFr~JE%06z9jVxF0^XDb9l+5grZ)3Q{7sXPIe847=Kvq#lRQ5RJUbgc!+#p^YCiFw z4@{cOc_i$U1@h^-05<SRSb?8i3eVu@bD%HB*e?a<<{~?zy@(E`IzI7#Gw?1Oe*ze= zaW1H+v3vvYKMuH%PtvLY4xD0eC-6^s)H{AGV9%*0&WnI|AY7Y`{RZG$e3E|)*nfuc zGXVJU<&+UWt-wXI@B>}}{LIEb2VOth*e?S{BA}D@F7S_hyTLaD&%V;oa5!)&pOi~r z(N!E}kvM^$^QFQw0&kpS>hU(<4Odey*e?S%@JSg3-ggagVZRC3Iv0K$Kt6#z^9}DM z0IT>UEidpEJ}KjEz<_PfC3JN)-|fV+9{6{@P2f$yg@u#_yc+m}BFX|@2E3`*q`MaQ zw-WS~Vt)X5&H}rPz$xYMANF~`Yb!WIDPe)P^65MP`(J1L4*-6|Hyplg1rDv`zC3se za1o#6A9YtMs>;wd2KexD`eFPCoV>!slLH*Kl70jG;lLTyv^{Wv7N4XWxr#oMZxi-= zfnha<Zjrz`K508Q13k5d&H{U^;jts^<A9lbk{>s44WGoh7P!&I1y=mf#3OLyTGJ;8 zEMI5(w+dj1pRt3!dI50G&8FWLcpIP8ufV-FE^uW%yn+9OA0b!Zy9j&+@aRto4=(V9 zpTVo(jll4q8y-jnesu@=5I?|Me?dKh-v<2WFX?l@KL=KBPz*}&0C2~zX@lSb|9m&? z3;bcA^B&W03q0puV?P{N&nGn52+X^We1hi#WA5iXIJgt|555d=4ydR{_&nfSfUooA zfWHC!l27P4{I~S;zgMaZd-NHqhxjD^hk;K%K%WBM0DP5C{2u`J{DYxe0x<bc&<;NW zf5oTs1}xuf@Cx8&J_#$Z=0U?hwZPav8$1qJwiUi19)XuWPX9@tnFBoYFO&;>Fwn=B z555$5$xg$Q9^jjNQjbl*?Yr1-$IlL6-`yq;$-pIil82>&KT93qXFV`#uToEdCje*i zNnbr1_-h;A2z-@K%5?yE_VXqW!+|^br2p9sJmWR`Si%kl?&lNUei^vpAazY&Q4Rc@ zPx2t}j@PMo>~{e7zDa$73taye`v~9y@8OeiP2l7v({^%z(TAv7{KNoj_+(ymJ23Tq z`Yh}P9^-ohT>V3-QGAkaDzL)F1^(8?w*V6@`Vjp00p8urSO$I%u-^xSB@Y9Dv-r6D zpwMfnZV-ELxrZWmR^0dzDEB?Q@VP*_$04{txvwF91j@Yz!3D~_1HlE#{Q$uQF0gUA zBOvGd#a^JCsTW+}E*ls4iH*ydc{%Scegw*yR>1{k+jt&O&N_>|Ksoy>`4lMUdc|I# zoTn9BpqxDwT%epQ6<pxOHZEsNud#8c^q=4Npg^Sd6QLr2DX@J{U<FY22-gAu`e*z_ z>vhjY>}7qj1t{Sff#MF^R!0HxRNW0g^Q(>qrUC)_XW|LdH#j|A*wb26ei!x<c&Rwr zzhFFKTzeAx+fQP@=_K}BPGZ0NB=!eRVsCX}FLKy6>|<_a&IR!fnF6TR+Zgw;51NyI z#{4Fx-P~{|V<tomx~JTO`;c~X;~vZj?dGKi@lV`A|4o0#42^<j&s$-f0c$s>;O6AZ z;A6JH-;@7FjvT2jx#SWxYt}4PT3V`nKA)-<@72}(uU5C;e!IH={`=L|ty@(?LxXzt z)mPQAW5<*%U&WX9jG<Lbaq7o=tGUy+Ysr#*+PE+9$<gY#qf3@7TP7Yp3G6+(FR=Fr z&bu(jVEBZvn3pUOv$YQ;+}ez7K*jLqlfcmu?Y?ALYs`lP4(zneOZLUIN%)cy;+Nk_ z@xOOpNy$Fa*Jd4mNeNF_2k^5};y;SJ{P3gwOL+N}9l?B*^!M%5W)hM3-;(t8?+EFm z;C~<aBes$f$=}g^U%rLuuz1k<Uww4l-lKaDOB`Z8Hf>He9*&j~Kjx?>_VhV>!`^)q zp+x8tbKnoFdJ6&gwTbzgHDeNU_U^;S&3GsN-~M8Bn(?5`ZO`w!=ZpvTYQN%6xDQ<y z1=<K6r~U8S`@-Eb_O>0}SKT0H=o9#6=2LA)wX^t#zRerj0@_*AU!t&#v-sCa&<_}A z9ly?-L@x2IwSNuTsE!V7SlJX&J)xSVN{253Jd97X#z~;Ki@%QHB%ZFmmDmTWRTy6c zUo@YCZ(u<9Jb};2_g#EGZrnK5WoRUwK3&~#!woubK^|Yfe!cqL?|!Eqe)wU{<FCB( zihA$8_d<MLA;$wY(}!+;ELu4I#)=yQkCr^8RzCH~Qzh4ytCdty$y2<y_EgJUIN{or zAAd3TQIM5)zQ_AuUzaT3^WMyvGoLES8TWhO+-oOaHzg2wWNu(Z-O#75nmJdk4BY+H zBQqtwtv{0ZR|Wzf9XYaDX)eBzxz50WfrRLR0Tp3?lpWG_{RRDqfB77SPC}y$O(!~{ zZ}|-wGDHm<HVpj>scQWA@yg{wA8dBErrp%3Q`O~{U#_mW;tF;3)mN((MU&M0`SaBe ze((deaN$B#S&5!e`j6$ym#g3SCaB-vFkU^hB3;$Y&r&yEo2hOqcd1`iW~jfcx={V0 zI$Ql>)jajs?G<Y0k1th?cVDYgo(!nrPY2ZKodGprM?jtXY(QP~LO{)VKA<vRMqG9< zpr*bRP*r?0-wCLz{t-}xD36=>VL+`~wMy01)u~%<xkdf_=Ra3>+;NB6uwjGRxN)Pp z_uhMT{cdKW{KzAZ=)UZUC!SC*ZQra`e;QDav<B2O&pe~{?AfE9fBt#3fB$~<^2;x) zg9i_)H{X0yee&J`_0m5B>g~7R)_l?2+^jzT<OB8YmjTt<+NzEmIih(*$QnM1^>ZF) zaA2C4^~}XL!#p(ho~`g{pqCmS7_F`fOjXwhu2Z)MZc&c}9#k&}_6Pe@)ratB<T1Wh zC^ddP>x#|DE(kxrm9a9AsMZmF1L6Nj_y)qiNcck?!k<O>j69{TtYGf79vRVQ=A(pv zx|R7e;SUi0?}UGs@Xdt(gz$$ugdcSl>mL`qeiid#FY|ELXZu*ov~H&nzL=;22S%&F zn^RTb&~+;C!7VD#`k)FN-XF^Us6K>0gYc<@znJj3geR`nQo>)Kr~>OotH7_Os=%MF zQ-N)_5a)v`@ZSFR@Jv4PA_?D(@ZAX?OL+R9Rwgk4XD=1Fc(e-4nW_ReT&Ds*zeNQe zeNY8n-rpWx^;CJPFY}#2YCXE{HluxXADrKc?%qJ+l`1g0LItMYqypD%P=Q<itO5`2 zR)PI*bqMby{CR}WA^beTFD3ks2!9XZA0qsdgx^JY(!Avj!oNrO)=>E2XsfuC7)puZ zhs1C%G3+3Qw~3)OF`)iAI-owE8c>I?3#cQv1k{%g2GrO41EKK!2|t4H69_+z@P&k5 zO!&2gzaufA?i(FY4^IuKr>_gBeYXVE8xICl%l`K8F@zTiBoV$p;Rg}^EW!^b{Kbg@ zHG6bGT{ktLZoZDVZV9NT9t^01``g1G-!;rNzmArfnG;<TU2biab56>T!DpOt)+Hei zu8EVgv)%5=nG+_s+;c~y3>`XT@Fka=IoI~!&c>Pl*&uHB++k;nhf6MT+U}EcGqWa7 zAo{FK*My0#xx>Z7kRfLbvfZ<DGhCBhneI%&OU_0PA2#e_JCEXJa_;2dZ&Lr{zH>+7 zL1Gwu#vtdJWHMXgpFBBx!sNN9_3oF9J04PO4`<CK`~;UokU4p7|K7d(8F!t>!Dl$h zWOgY2xk-I`_r7$zj$oKB<sjV2zgxn)Cga|x_i2~fdCNIRvOxH`6I{8MO`e=JdG4ta zLBHg_eNTytiyM$5c@%e1>Yhs~b5A|a!(1VQxMybNW>21+o0U88I1jiFJx9ksG1omg zE7zSn>GV^R>?BBG?%63LawxyVpQ)1wQf=-<$z(xH&`-xdIz1N>=VndL)rHV`4AP(c z&vlQ^kSt8j&7FJdq)EMd_ofgb_qpeFo0~BzizF_?{q#wbbSVdf+%p|fj<lYsS(Dwl zP<hfoUCPrY+3YdlLYHeygd-|-!ra`sx!GB{CYN*}=9is0Zer%B$gtSUYzLuLwwvzC zcAtB}sD$p_!om_g<WRDhJ9z?()b8#&|J>BE-6LXpW#gDR5ndiQE;F;;eeT?et|{p~ zqod9vh0M7Ud$zkh{kfUKcT>{i!=p3AJ#zx=Iyo2|`u%_Hoe6Z6)wRbjRi1!VV&7{G z6q^tf0(lygA^`#k5-K=BwMB}y8ZinAVTepmAYrJWAVaklp-2V_n0phM1O#Q0D#cb2 zL~$rVM2#SVGBhf(-~V@VPrQTx0eo-0x0YwEopZl?zwewqpMCZ|_Xhq*!BWq^=)$gd zbi6U8t#qy8V}0&h=ctQX-`GKX>=N-Y{7-Xt=>1kLI<}RmM1JhXmwc~FlOlHM)Ur*b zk0ZvHpu2QvbL`Wyk7L`7#|$q2YHPB~>gJ^EP;jwEkW&t46VGL9jLKkD#d0};luGg3 z$>S6s&)t0U%`P4pm2quF>@jax`@n=godVMbbqp*S(<!iE#;8E64T>q`=Q_V-f$oYO zh#7{df8vQJ0yAgM49uD}E0C9$7nn0=j*Ar*FJA28!4)f31c(*3u9+8@sd!_7VukhV z*9TsG^;H)e?Alop*tv6OVArl)fjxWn1U~)r)4&&>?{l%ip+koP-+c2;VA{Vduu!qW z>t9Fs*~Erg=vrx^lU+{jrG;*|7P=`~=;j74vL%5lZB^ixwjnUkwgjfxdx3@aq1Nwj z*O@?~7Q}ARdR~j|)c@4;SL^vyJ%62^zfI2%)br!@{471cNYDRG&zDrJGvCK4V&C_1 z%KytarGB8i)vsS)Mx<P7J^t2@zAP~@u^F#o>(`I2->_Ls)P)yPQ>_;lf7xYCE=#;9 zs&3N@bX~t;qb5ys??q8Di3y1bfd+BS;u9~alX#JCxbT7te%7GjW$}sU*NOjG_>I`u z_-2W<YW*-?&ouqfg%{M1zPMT9`L$}*sa><?51ZDlTmRgDtKaOB^J~@Njl`&!AD>s_ zyg1#Pn0QI;nvDOG=pX#MuFh{%r*`cG?E@NK_S173=u<s%iLNz&LWBQk(kLM@F`<d@ z*Zj>A5-v?hXqe#WYoHdZ*07<w3}n|;?c_Y&s94!S4fRzu-uL%#zn%-kxSU`>;d$Ur zL5)DH{+?C)fu2^x9#F(y0X6lSng2K1iKsu>+~9k@;KwmBF%8uNmkJu!u3cL@ApP8y z<CH>Zm5T%y2t3F2@;hnphjF#LsaAu(Q3BLIcJ}Pqw`$#4xo_XTk3ReCvyZj*?A*I| z?}zG34jw$XPy5nc@4ffl!k1rud7{qwBqS#%OY@Pw`>F`zXm{o0b~^k+)-23FYTUSS zBlYl#^m27t^li7@mU6Y4ZvLtAh*PwWw$-aw+s7Y&Y+6rEc^48?R#H-8+J76Liw4sk z{GX+zrJsn__cw3e{M6dDYqPJp=9(572RStmTr9Xq04~V2P8fI%kNNB;f=dJ!D_7B? ztgP%Sz1G56ZT#zr->=CAf6f@KTD58ne!|P)zj5P6Q;yVxpDka$+~KVpl6~~iN49Cx zCR0wq-N!SE5qN)>o|(OH;lh#2mMt5pu&>kDv17Xq7%(7h^5n_g6jy^=Q$fPT7hl{$ zxs9GzU3C?_i%v^Ri)UQDJ}vwY95`T#Wu`T1)TpI+IC=N(-KLyw1)pDg?KOM*?YAAC zl8?h3+3eV{!`^-OU3U#l9u)Jq*V4IzpMU;&ne^^!jamBDS6|r|Uwm<3>(;H$eel5t zC7SC?o)Mjo$=+HDhsiv9y7()eGR;*1;O_yxdO#)~z#m@G``26po(_8O@gSVdgJf<V z{CmDX6Z~I%@x@7!W1Mti9JH)kx6aXkekk8$$Uu1}r!U}$O~{5_zh%o71F#L`L63c4 zJ3RBsE3Y^jkfH3~kb`pdq;M;muD{m%NBCSee3fXZuKm_KeE6_w8FKocl$6vMJqD10 z@@BSj<w}#xO);~xVPt?zlvi^A?i)62Fl=Spwr%b;G~ff!H|zu%f!k93KhKPJ%u-%8 z`}yC@68~zJp#RN$#Tv7lH<}fGXm;rE;VrTi?7uqA;lFCts%e*AdTC3=@RQ{)P4aLw z;B$}xe1#rl0q(p;{@b^2cff0S4!^+{U4RzwhX3ej{-<VXZ<?iUHfyUCd@I!qT5J#v z>&=>phQ=?L#jP-#{GMt9M~-l6p*qdsFJEsSz~6(Hh40VAPOt;?<<{+H*KX5yIkt-b z&>$IH{fgODq9OAw)jN(T-~GB-^Hol*_E!ga-BV9_QgnGzixw^7qzmJqLvnERcsl6e zC3*z^tD*zG!*gVW-k|^ZUG3Q%LS2Rb_3xI`aE-n{*<QG|5e>>G+W^r8{(ZKXUHW{v zSyUH)tr_P1Kl-or$N(=7dhqvpj{YML^a|bZv|!`t8G4Ux_#V5U-SWQKO`_pC>Hjap zZ)j+@$*lE1%4ujWTTq_d;#a%d@uSsg^}t`gc$#wjEhUG^*cra1Dmu{jNSS!OqKE%{ z&bq*}_?gJoeV1$$8vbe4MKp904GZ>0-f_a$pX-wuCm^QYmtTHqT4#;{Xz<|as7fZD zmPk6j{`zZs{kd88ml4<5jDackME_KWSkI5le*J;jFL#*T2n}zC?O8gL?Clw!Bzwju zHI^M<&zDP|u<4_!vsFQZ!(Z~Aq})ZE{Q5X(!5`9l8tBn`^tv(~Ucb<FbgAU;OKjfI zWGfil&YsF@?`VL4-+p5Dn=lRCO3ZE$PMt+Vg*`(<*q*PzC&{N?CZE($G{h_~_pKUc zbR`-b{&8_}75-mqJnQ^%^1yys54=3E2d=%HfWQ2nt@+d4795^zbBDIGe9<sVG|<xJ zKW`TeJ>=tmwbKk5ygfsMw`Y9Pl`o6$FPb%l2Jt`kIkT?vQ);hPg}?m0(|_68Wa1%g z&eP#(K?bY`kuveL(1SmI^4ZZHtoXsURwx?&A{z1@D91l*pV^&zL_?|Bt)c;Y#wT4T zpXBWs8oWJ2gSTgVQv6DX|Gz5ZkDal;D9)+?Xz);(4sR=wbnqG)Xv?x&+oBO|toXm$ z+S9{C!;p4%X#Z||OXds>cZden*zDE_d&VbW&mCk7_@pHHB-Ur|lbWnHYgCSZWxrGp z{1x9%YT2@7oMM!5(1Bm1M`xgen1>!(c#pN#$7OuRdP~eeTanY+mX2y|3q?bbXqYb= zo`15x9o#47sYd%mboP@>l31TbgO4$qtSW!FGXL=xUjN&)Y10^4c)(u48{EL%(}M5E z-?Oj5Z?aagw$WCNNwVie!?UB>*n$ylEhHK$+w&jglcx3SV3Y2@#wPUXXk+i~XxVqC z*~Gy;ZTnlB3>qS13}?^sNwli)m%n%VFMKA0J9yE96L@0}@Y%}*A0HVX6AwWn`v2mD z_V$8kSS}itj1&#B1?(9buxEVIlk%(3Fcli^yT-=%77h2L*{HiaIU2x|7=!iM#~3ZO zzNsCj|M(2nFyaVc{`~nC3<iz3)Pt7?{mJY^d0E@^RJyGl+sd9FEgEDCi$%jTVSC0W z<qd9cvj(-d=>t;i@yt}4(zk<66b%oFhB2aHq-da3jxl~G+fX~s|A4=A!Rf#BdK|Po z^w2{V3WXd!o(B38(E&~97SEQxzS!1{Yh`Oh!%ETM>{&G6le|6W$|wDKP>TI&K&m|^ z8YYW|M?}N#rAMPP(j21rqz(}=#$wsM+Hw9L{Js9SZQHgnIDn^4g;dahTq5Z>87<#@ zbHv_XJH!4usiUnPn`A4)_KZ*R_B=;E2^waGY50TmX!89X?BPD;G=Qj!F%)m#Aebe8 zb=2&YO1jTwJ!G!+cbd*IT$h=dxe;8kM{Ho!s8NoFNd2lzOJ$jGjZeT9PSxI8UtAGm zV9$?+?YYu(Cyjr=AK7>A+!;C8(@#I`VlQGE<p*rYkRcT`oQ#f>$pn1xNlWlar?O|! zK&yh+RlQXYKIyE@TKP_E(xi#qamO8|vl&KQmz9-e4?OUIO`0^x*|kzW&hHQlRYeQD zXKlb&5epHIvLB#f&#%oLY)8J*exqQBTc0Cij3>kPJXN+ps|s2Fo^lm+73=>>*AD;7 zFTeb19qq}~zu?}ZINHe}KR@5PcI|39TV#(u`lzX<WRE}oxWkA2J^Y3S)&kZE^a@}P z0}c37^pLd>9<864W}D}M|6$uWXUMlPhHQZt1AE3N%@}qg=$~@`rQF}cDus?lf28Zv zwC``Jb1)&<u<0|?|HL!dgmHcS_1C+&N9SRj{{(k%MMluzf%o7yy2n0;*bZ4>57@M` zXUT!}+1oQdDbk+*zHBye;7rf!ll84zw|-n_dR!iaxx=<>I-_VhlVr+iS<|LXO|iAZ z1w4p1A^{pacsl4A4?w5<*!b3e%(}~-|9z^~=kgeX^*K1Qqb+;vUVHbIrFQtxp)Hbk zwet@;OKIcAjXR(-N;YQ9m_t2#_O!lz`#QdBU+HuS8n6rUKHvr&m1!Xd!8(lI6C+_O z<ReyVKgoN<NURgkQbiLBzTA$yPB;I<ShC$*<tR%YeDJ}e;Hh(?4zOuYL$hYhOlQ}e z99T2Zd14<=2l#?NIuD)f4d}54Yy^AoGNDHYiB0_4!`_?yv-IV(@}Jli`Y&C|(|NHs zuv1mUZN!KXP7ct3UsZ0|di3bw-bV(|-~k%^zLz~EF*bS+p7iJtw0ZsFePSwNZuycw zDED-=&KXW&4{;{=qxS&1M7|lCz>S=<&fYou?bxxSrKYBuPNz6I@Hx+T8tA=Vd6{@S z!9Kv<_vjIO2K0(J5ZU?shVZA{_CrB+n)9Ecdy+~3apK`P?D>TkUT`$PAM%E>4@X0K zdb-_k!wpWiRFil3!)tgBE#QqE;D30ZJp%eeti4)xfDK?1jEfHz?ngDwxajEURLMPG zd8bk#P0fe8PIa1#AG{6{|9HNFPf1CM!$-0;#TJ|@cJ?57oAk};0BaO{=6--{Xuyxc zQ~dv;MT@MsxY(_M&ph*tdyX7|_*5$2_FJuyBZT`py*FM+Q}?KD{iYuH693@;Wsj4& z#@^_`1-auFq^B;HP@Tm2R@u5+-|#i)H#!IY*a3ElOwbc#Kt7Aled9IbA|vF%dM<kZ zBE0dN)CsHG9R6NU=_7R*8a#j-bfH7o!uavyO+L$5rz0D<dEgoR48Dkl{;($kz#Y0~ z&z?<=F;`!UKV96R(c$mofAM=9G<d)0Y4J4B!*6_()^4{?K&M!1Bl!!iT!TNd@i7E- z9rPYug6G(fe8zO)aVEG|;2){~_=re(R3#JWBFBFF?YEoG<vRWLJdf<T4_!jnd7m`^ z+<kpSd!$)sfxE+>IsE=tYtLl#k~IiAutE5Q|G?L=9!1Io-Y#9b)Mm_>;q)1Pdw_0q z2iqZT;u@V{Jk|pF%z66hgfrzkxYO!^Pvp7?{vM#g!^vdA7&;T}#zDt;2HL<E-C8Z* z2`wIX#iw)6P~Xe3{`RlS|H!`F{#WbwIB0?Y^qvlSPX|3b#lK(+0J7md<bdzQPLUbD zh*|<QC}@H2n&V9T%(oaH`^w?(^<S|<WB3G|OeVzl#6QqQT@FCE=wEv2CFghH0rrZ% zgFkj4eVn7|&sJw5DX0fOeg0E^q9XqT4XmBSFZe(7fc1#A6CShYg!jmSSisAHYkWSw z7C)vvg{y0b=I<JJ=y3WUsT-B`++TaUjqK5NuDJ(Up!4t<8qsy>(NV<+rIHovDrYQC zXKez%Q?(!I2F@V-PXcpr_JMO6XWcAEi_7YvLyrW`v!f|Phwjk#bM}|_dgZRayZ)T; zcs0VkzWt*4)XuW^<N5I;@Xld)U9SoEJI<!h4&!O%eT{H`PPjiW+}92FG2uQX+-HY- zEAMO2#|xiWL8{;k&uN{jM~QC%^!bFbq9i?9pe(o@7DP6#si;)W2Wq$~8*@c#FW6jb z{&g7{8NXD#pQfCD2fg21K+OGPL4}?&_CL$ahN#xl`OWeg8@XU=3RkJNbGvfGZ+_uW zIvEbo+Ms65nys|A8z`GD!=EYUJS2bniPnNo<(J-3e4ej7*?o$E({%rpf;hz+`|eeZ zgE|?x&{XAbsd-Z`e&c}IYs$*$`$)9UbpMIkCGz0~nzIc5iH~Hz1P*|&VT5D;qw=c< z<a4*kZ?0fqYNpg1sdG_ZAtz3KXzM|<O`^RTwcZ%~wDJn+$~|=l2Vy7oo~)1fb=D$$ zHL)yl3ZL<scAIKd)I6!RQRCtK0yQe?JJkKDvC&RMyxxyt51>70ruGZ>vHr5IvCm~* z;%Imhrr*U*<s+8=rC!!kxm)Ue)XJ9ZkNmXC*A%fL^})^0evlXe00#j74!{%Eue~3> zWAny#cXbK!;k3K<s@}D`yl#4naG>tX83=Ow)O@`@QtzeCn5=&5u{s&Unxg${y5ijK z;NfvW{=~rG!uqrF$$l=UP0g8{yq_N&ekUBLds07l`Y0TzwNc|iAE{AM>!eQj8`+53 zhOkUxSjUxj$<+7n?qh8RJPyPh@Pyd9cvMGM>!UtH9+Ae}0JT@@$JG2H^^vnI)M%*n zQDZ_M6JBtS9`^GS2l@IU>nd@YkKa5lJV9QWTwsCnnbdssT{;^r8;isNee7OIA9a4; z*#u{4(8o%3x)|nHJtbXvh3=7fFgJQmPov&Ojf8q3b*iO*8gR;*;qI@@29>DhDI7S@ zgH2GgrFr^^D~1mro|&DUeIGF>;PD`D0xxLf*Qqs8<DgbXor(Gebuwy9)HbM*QC}L- zs~nyf)5pNU>7(kdzV^*{k@MpN0Ad>;5)W`e{%gml81=42!hxJ7xp8VN=p%J5>MPV5 zU5zU||H!+4!BoyBygs%QAJi)3Pi&?-M7q9r*&ScNexLXpy1@k=&~Gm2<LZXgtEk;l zGo<!BJ6tcsCa8~4E6h=i%JYE9B7HQqTeS-P$F}s{-c0%b`>;j)4*LT3cJKt>13wOy zzHJ*H>*neN<fqX`YWU=)y-iSOqSi;9EIW(?lXWX8ujAjYx!?O2?^O7p7~)HG5IZF1 zrw0!JegNPBpOC}J+Lm?CxSA=ox^cpRdTFJ0od09~i4!NLKlRj8-Kh-`kAnw1XJ5^p zn!G7^zzh5lpV6q_at^@TL{<98ePLazs*54UR=y%r`Idh0Vak*#&hO!OS^x0==mPve z7nvJeXxPMOJKwOqJKvN|RMJP;1o}v;YGSYd?8nJ-kkf_7?5|{tZoh?Xf|Hj|q->DI zU#E4m{kz_<eILB}tv<>os9&RxGe`Y~>8o7-<y(ojW0+sHi1$-ci0X#A+O&RC!5(?! z5x2hsC;TFI03PrF`M{e4rSI6QdH36MV>`IB0A3&ShyTJB{c(Wp*`?s*TU|Sb4ei^v zZ=rneaOhASv;SRp-Sx7*zis#5f4|EE>x`t!UHH8ZJcl3fpZy)W06)OLa$`*0w^_Os zxz-*tKNqTt{aSYK_JOSR)McnGDb6yT`*w8u{TFxu@B=(5%Le>dx9}b8OEvt}0^+-q zwHWrGia&N!OQbHL@0l9;S8y0OaG>Km{(yCmvAqs37k<Ips@91ks>|FfUCvUBbX1da z=IK;>su*}B-cRk_yZ1}fc*xP~3;%{JsIO^VcJmMqz<1^ch{=f2e6O|YEAjI|jWt{K zioKeft-z_8(?4vGv54*9H@RGNkX$--4eSFveeMbw;1_rfT;K(M8=ncDt5>g{qKPj& z)#Uoz*}v+y>G~dBcjjhH<O~k_odDDC3fN6I-Q;is2Y`F90q#R@h?DRaoJ&+LJWu1Y zSEx>N{ulqFbM*HyUWsz@toisHY76?!0#{?kF2IilAAF9Nc$u0Cal2^vvnH+X+~DWt z_xcQ90q_8rkM&5uk>Gp-XH}scokL#m8oKex%Dv1z&AHwD%p+gp{C`z=_!wB<IWT?S z(dCMX3&}a5k7qHrn;&}p`im|>FJl7uJZuizhE8;h^E;fq(DwqIe>g+BnVS{^{gwG1 z$$w-3;Cu0X$XUM$f&T4PKI~zQLH^`S8{ajFIzzy|PUYWDh}V%AeP_2u#rJh%a>NTk z4MCt9z<vIi7(D`{=sgeFdJR}k6~DofbKLiKV*@8@I;+LlRW|8k$Jw)IFHw!=uzb{K zIv4VXxpU_p<9Ad8*rzf27Zw(}_jS*?1qB6Nl+RvOR8-{T>DMsiET8^n<Yd&-bpDw< z>(?4%hxG3tcF5iseJ$1=<QBzsoNG+c-fE`yfojYA7~G3a6T|s<#@iIW><;al*fWts z*sQST=Q`J^=Huf4^#kU@*6>RJ`pUDkbZt-A11~EN$=rM4KZn#W#W!I`;7aU(Zf|;i zj!o-d9vk)w@88%zvVUc-z+R=f_DW9*FKVsq$-y1{XWpkrb#XDy0qwzN3TMV(FV5bJ zTm<_*_MJ)EcaozfrUGyL7L7d&d#J^K9B7OY+4jiAlG7oNz@82MAt%-#;vm|}hdQ}< zi~R?C5B5I9y6iVXe}32)gMD~s;eq`0+cU1tjSqtlJJ#pf(wuf~KS}IP+|Is%z0=5y zYuy~Tl$g!ezE<D!w(RU|cSeM|8nQ%RJpbLz`m<^60vGp>)ZQoDAof}Kr**;kEoyR{ zAK+Q9a|=hcx7|CoIDWE+`h6ca|3R-m=pMcvzk@9G-F3%Pd^NJ%zAoPujk(sH=bkm} zdfT{S9@9VTpVxO+T&#xwLT~Wf#9YV&e;SDkuUX6ev-I5W<qz+C+*P7=lKEMW*e5VH zepzdOcI5N;Y4Bzoc%jdoJ+Wq!PvTkj*{t2F#T@f2<BU2l)FDa`I42AN^(O*<5BCFg zxwnQAnX$1^>d$q*xTfEij4Hq4_tt)(&$y?S`?a-m^jY~8zqNL*e$j}lSoce7HN$u6 zO85L*_|91OD{JQklGC%YGWy-scX;fuetk2u+QkhXl-VY$SMQ8|=~<We>(gsc|E&J^ zJaBoh{{7mdXZ5>c$dz%i{n9i0+>?>z|Ep)axGS${9@qYYI<c|I4-6Wd<^IoYHAibv z{+yo7%IGy%{}yw&8z)vRc~Hi{!I~xG?rwwn4AFmLyf-7O;={;~I}OvL{KwN9GKOUI zjqS_dc5&%hT{4ICzdvJ8T<qXJ9eVZBzw~SucTalXtc<w!$%z$XANz2{_z`y;H+yp8 zF}NltRuHYnlM_8Z+jr~GtwUN`mz%Eb(Ifog`|IzHANq+O_vPN@`D^kw<Zsgd_}QP| zydbFny=Mb6KX-ZVhTLtrrMYFfweq6#;`5U7I_LGs>zy|&Z$jSmy!m;{^ETvd%PY+* z%i{+g-@s^dqjGbeo?1A)Ft>1i;ex{Dg=-2o6mBZqR=A_Ev~Yi6Sz#bpD;O1w4#o!K zgUy3U!PH>qV7Fk8;GMzV!2!Wx!JOcP;MCysU~X`Ja6xc+a7}PSa8qzwa7VB#7zou0 zMTMe6v7z`-^H5SKHPkuOEz~1)XQ+2*KxkMfCo~~6H8eex8=4<l5LzBu6WS2k6xtTr z5h@Ms50!-iMYW2eilU2Ri{gu#7bO*?7IiLKUbL;KtSGuTsd!lN)Z*#IxyAE~7Zk54 z7A>jz?OSyN@`vT;<WI<-o}ZgPKYu~K=hwFU9pYJ8exRUMK~zC>L2N;MLGyxc1w9Jx eEa+V@pkP=*PQiqNsRh%;(eK;$9QeP+f&T_bxyt<j literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/util.py b/env/lib/python3.6/site-packages/pip/_vendor/distlib/util.py new file mode 100644 index 0000000..0b14a93 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distlib/util.py @@ -0,0 +1,1755 @@ +# +# Copyright (C) 2012-2017 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import codecs +from collections import deque +import contextlib +import csv +from glob import iglob as std_iglob +import io +import json +import logging +import os +import py_compile +import re +import socket +try: + import ssl +except ImportError: # pragma: no cover + ssl = None +import subprocess +import sys +import tarfile +import tempfile +import textwrap + +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import time + +from . import DistlibException +from .compat import (string_types, text_type, shutil, raw_input, StringIO, + cache_from_source, urlopen, urljoin, httplib, xmlrpclib, + splittype, HTTPHandler, BaseConfigurator, valid_ident, + Container, configparser, URLError, ZipFile, fsdecode, + unquote, urlparse) + +logger = logging.getLogger(__name__) + +# +# Requirement parsing code as per PEP 508 +# + +IDENTIFIER = re.compile(r'^([\w\.-]+)\s*') +VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*') +COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*') +MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*') +OR = re.compile(r'^or\b\s*') +AND = re.compile(r'^and\b\s*') +NON_SPACE = re.compile(r'(\S+)\s*') +STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)') + + +def parse_marker(marker_string): + """ + Parse a marker string and return a dictionary containing a marker expression. + + The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in + the expression grammar, or strings. A string contained in quotes is to be + interpreted as a literal string, and a string not contained in quotes is a + variable (such as os_name). + """ + def marker_var(remaining): + # either identifier, or literal string + m = IDENTIFIER.match(remaining) + if m: + result = m.groups()[0] + remaining = remaining[m.end():] + elif not remaining: + raise SyntaxError('unexpected end of input') + else: + q = remaining[0] + if q not in '\'"': + raise SyntaxError('invalid expression: %s' % remaining) + oq = '\'"'.replace(q, '') + remaining = remaining[1:] + parts = [q] + while remaining: + # either a string chunk, or oq, or q to terminate + if remaining[0] == q: + break + elif remaining[0] == oq: + parts.append(oq) + remaining = remaining[1:] + else: + m = STRING_CHUNK.match(remaining) + if not m: + raise SyntaxError('error in string literal: %s' % remaining) + parts.append(m.groups()[0]) + remaining = remaining[m.end():] + else: + s = ''.join(parts) + raise SyntaxError('unterminated string: %s' % s) + parts.append(q) + result = ''.join(parts) + remaining = remaining[1:].lstrip() # skip past closing quote + return result, remaining + + def marker_expr(remaining): + if remaining and remaining[0] == '(': + result, remaining = marker(remaining[1:].lstrip()) + if remaining[0] != ')': + raise SyntaxError('unterminated parenthesis: %s' % remaining) + remaining = remaining[1:].lstrip() + else: + lhs, remaining = marker_var(remaining) + while remaining: + m = MARKER_OP.match(remaining) + if not m: + break + op = m.groups()[0] + remaining = remaining[m.end():] + rhs, remaining = marker_var(remaining) + lhs = {'op': op, 'lhs': lhs, 'rhs': rhs} + result = lhs + return result, remaining + + def marker_and(remaining): + lhs, remaining = marker_expr(remaining) + while remaining: + m = AND.match(remaining) + if not m: + break + remaining = remaining[m.end():] + rhs, remaining = marker_expr(remaining) + lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs} + return lhs, remaining + + def marker(remaining): + lhs, remaining = marker_and(remaining) + while remaining: + m = OR.match(remaining) + if not m: + break + remaining = remaining[m.end():] + rhs, remaining = marker_and(remaining) + lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs} + return lhs, remaining + + return marker(marker_string) + + +def parse_requirement(req): + """ + Parse a requirement passed in as a string. Return a Container + whose attributes contain the various parts of the requirement. + """ + remaining = req.strip() + if not remaining or remaining.startswith('#'): + return None + m = IDENTIFIER.match(remaining) + if not m: + raise SyntaxError('name expected: %s' % remaining) + distname = m.groups()[0] + remaining = remaining[m.end():] + extras = mark_expr = versions = uri = None + if remaining and remaining[0] == '[': + i = remaining.find(']', 1) + if i < 0: + raise SyntaxError('unterminated extra: %s' % remaining) + s = remaining[1:i] + remaining = remaining[i + 1:].lstrip() + extras = [] + while s: + m = IDENTIFIER.match(s) + if not m: + raise SyntaxError('malformed extra: %s' % s) + extras.append(m.groups()[0]) + s = s[m.end():] + if not s: + break + if s[0] != ',': + raise SyntaxError('comma expected in extras: %s' % s) + s = s[1:].lstrip() + if not extras: + extras = None + if remaining: + if remaining[0] == '@': + # it's a URI + remaining = remaining[1:].lstrip() + m = NON_SPACE.match(remaining) + if not m: + raise SyntaxError('invalid URI: %s' % remaining) + uri = m.groups()[0] + t = urlparse(uri) + # there are issues with Python and URL parsing, so this test + # is a bit crude. See bpo-20271, bpo-23505. Python doesn't + # always parse invalid URLs correctly - it should raise + # exceptions for malformed URLs + if not (t.scheme and t.netloc): + raise SyntaxError('Invalid URL: %s' % uri) + remaining = remaining[m.end():].lstrip() + else: + + def get_versions(ver_remaining): + """ + Return a list of operator, version tuples if any are + specified, else None. + """ + m = COMPARE_OP.match(ver_remaining) + versions = None + if m: + versions = [] + while True: + op = m.groups()[0] + ver_remaining = ver_remaining[m.end():] + m = VERSION_IDENTIFIER.match(ver_remaining) + if not m: + raise SyntaxError('invalid version: %s' % ver_remaining) + v = m.groups()[0] + versions.append((op, v)) + ver_remaining = ver_remaining[m.end():] + if not ver_remaining or ver_remaining[0] != ',': + break + ver_remaining = ver_remaining[1:].lstrip() + m = COMPARE_OP.match(ver_remaining) + if not m: + raise SyntaxError('invalid constraint: %s' % ver_remaining) + if not versions: + versions = None + return versions, ver_remaining + + if remaining[0] != '(': + versions, remaining = get_versions(remaining) + else: + i = remaining.find(')', 1) + if i < 0: + raise SyntaxError('unterminated parenthesis: %s' % remaining) + s = remaining[1:i] + remaining = remaining[i + 1:].lstrip() + # As a special diversion from PEP 508, allow a version number + # a.b.c in parentheses as a synonym for ~= a.b.c (because this + # is allowed in earlier PEPs) + if COMPARE_OP.match(s): + versions, _ = get_versions(s) + else: + m = VERSION_IDENTIFIER.match(s) + if not m: + raise SyntaxError('invalid constraint: %s' % s) + v = m.groups()[0] + s = s[m.end():].lstrip() + if s: + raise SyntaxError('invalid constraint: %s' % s) + versions = [('~=', v)] + + if remaining: + if remaining[0] != ';': + raise SyntaxError('invalid requirement: %s' % remaining) + remaining = remaining[1:].lstrip() + + mark_expr, remaining = parse_marker(remaining) + + if remaining and remaining[0] != '#': + raise SyntaxError('unexpected trailing data: %s' % remaining) + + if not versions: + rs = distname + else: + rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions])) + return Container(name=distname, extras=extras, constraints=versions, + marker=mark_expr, url=uri, requirement=rs) + + +def get_resources_dests(resources_root, rules): + """Find destinations for resources files""" + + def get_rel_path(root, path): + # normalizes and returns a lstripped-/-separated path + root = root.replace(os.path.sep, '/') + path = path.replace(os.path.sep, '/') + assert path.startswith(root) + return path[len(root):].lstrip('/') + + destinations = {} + for base, suffix, dest in rules: + prefix = os.path.join(resources_root, base) + for abs_base in iglob(prefix): + abs_glob = os.path.join(abs_base, suffix) + for abs_path in iglob(abs_glob): + resource_file = get_rel_path(resources_root, abs_path) + if dest is None: # remove the entry if it was here + destinations.pop(resource_file, None) + else: + rel_path = get_rel_path(abs_base, abs_path) + rel_dest = dest.replace(os.path.sep, '/').rstrip('/') + destinations[resource_file] = rel_dest + '/' + rel_path + return destinations + + +def in_venv(): + if hasattr(sys, 'real_prefix'): + # virtualenv venvs + result = True + else: + # PEP 405 venvs + result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) + return result + + +def get_executable(): +# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as +# changes to the stub launcher mean that sys.executable always points +# to the stub on OS X +# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' +# in os.environ): +# result = os.environ['__PYVENV_LAUNCHER__'] +# else: +# result = sys.executable +# return result + result = os.path.normcase(sys.executable) + if not isinstance(result, text_type): + result = fsdecode(result) + return result + + +def proceed(prompt, allowed_chars, error_prompt=None, default=None): + p = prompt + while True: + s = raw_input(p) + p = prompt + if not s and default: + s = default + if s: + c = s[0].lower() + if c in allowed_chars: + break + if error_prompt: + p = '%c: %s\n%s' % (c, error_prompt, prompt) + return c + + +def extract_by_key(d, keys): + if isinstance(keys, string_types): + keys = keys.split() + result = {} + for key in keys: + if key in d: + result[key] = d[key] + return result + +def read_exports(stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + # Try to load as JSON, falling back on legacy format + data = stream.read() + stream = StringIO(data) + try: + jdata = json.load(stream) + result = jdata['extensions']['python.exports']['exports'] + for group, entries in result.items(): + for k, v in entries.items(): + s = '%s = %s' % (k, v) + entry = get_export_entry(s) + assert entry is not None + entries[k] = entry + return result + except Exception: + stream.seek(0, 0) + + def read_stream(cp, stream): + if hasattr(cp, 'read_file'): + cp.read_file(stream) + else: + cp.readfp(stream) + + cp = configparser.ConfigParser() + try: + read_stream(cp, stream) + except configparser.MissingSectionHeaderError: + stream.close() + data = textwrap.dedent(data) + stream = StringIO(data) + read_stream(cp, stream) + + result = {} + for key in cp.sections(): + result[key] = entries = {} + for name, value in cp.items(key): + s = '%s = %s' % (name, value) + entry = get_export_entry(s) + assert entry is not None + #entry.dist = self + entries[name] = entry + return result + + +def write_exports(exports, stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getwriter('utf-8')(stream) + cp = configparser.ConfigParser() + for k, v in exports.items(): + # TODO check k, v for valid values + cp.add_section(k) + for entry in v.values(): + if entry.suffix is None: + s = entry.prefix + else: + s = '%s:%s' % (entry.prefix, entry.suffix) + if entry.flags: + s = '%s [%s]' % (s, ', '.join(entry.flags)) + cp.set(k, entry.name, s) + cp.write(stream) + + +@contextlib.contextmanager +def tempdir(): + td = tempfile.mkdtemp() + try: + yield td + finally: + shutil.rmtree(td) + +@contextlib.contextmanager +def chdir(d): + cwd = os.getcwd() + try: + os.chdir(d) + yield + finally: + os.chdir(cwd) + + +@contextlib.contextmanager +def socket_timeout(seconds=15): + cto = socket.getdefaulttimeout() + try: + socket.setdefaulttimeout(seconds) + yield + finally: + socket.setdefaulttimeout(cto) + + +class cached_property(object): + def __init__(self, func): + self.func = func + #for attr in ('__name__', '__module__', '__doc__'): + # setattr(self, attr, getattr(func, attr, None)) + + def __get__(self, obj, cls=None): + if obj is None: + return self + value = self.func(obj) + object.__setattr__(obj, self.func.__name__, value) + #obj.__dict__[self.func.__name__] = value = self.func(obj) + return value + +def convert_path(pathname): + """Return 'pathname' as a name that will work on the native filesystem. + + The path is split on '/' and put back together again using the current + directory separator. Needed because filenames in the setup script are + always supplied in Unix style, and have to be converted to the local + convention before we can actually use them in the filesystem. Raises + ValueError on non-Unix-ish systems if 'pathname' either starts or + ends with a slash. + """ + if os.sep == '/': + return pathname + if not pathname: + return pathname + if pathname[0] == '/': + raise ValueError("path '%s' cannot be absolute" % pathname) + if pathname[-1] == '/': + raise ValueError("path '%s' cannot end with '/'" % pathname) + + paths = pathname.split('/') + while os.curdir in paths: + paths.remove(os.curdir) + if not paths: + return os.curdir + return os.path.join(*paths) + + +class FileOperator(object): + def __init__(self, dry_run=False): + self.dry_run = dry_run + self.ensured = set() + self._init_record() + + def _init_record(self): + self.record = False + self.files_written = set() + self.dirs_created = set() + + def record_as_written(self, path): + if self.record: + self.files_written.add(path) + + def newer(self, source, target): + """Tell if the target is newer than the source. + + Returns true if 'source' exists and is more recently modified than + 'target', or if 'source' exists and 'target' doesn't. + + Returns false if both exist and 'target' is the same age or younger + than 'source'. Raise PackagingFileError if 'source' does not exist. + + Note that this test is not very accurate: files created in the same + second will have the same "age". + """ + if not os.path.exists(source): + raise DistlibException("file '%r' does not exist" % + os.path.abspath(source)) + if not os.path.exists(target): + return True + + return os.stat(source).st_mtime > os.stat(target).st_mtime + + def copy_file(self, infile, outfile, check=True): + """Copy a file respecting dry-run and force flags. + """ + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying %s to %s', infile, outfile) + if not self.dry_run: + msg = None + if check: + if os.path.islink(outfile): + msg = '%s is a symlink' % outfile + elif os.path.exists(outfile) and not os.path.isfile(outfile): + msg = '%s is a non-regular file' % outfile + if msg: + raise ValueError(msg + ' which would be overwritten') + shutil.copyfile(infile, outfile) + self.record_as_written(outfile) + + def copy_stream(self, instream, outfile, encoding=None): + assert not os.path.isdir(outfile) + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying stream %s to %s', instream, outfile) + if not self.dry_run: + if encoding is None: + outstream = open(outfile, 'wb') + else: + outstream = codecs.open(outfile, 'w', encoding=encoding) + try: + shutil.copyfileobj(instream, outstream) + finally: + outstream.close() + self.record_as_written(outfile) + + def write_binary_file(self, path, data): + self.ensure_dir(os.path.dirname(path)) + if not self.dry_run: + with open(path, 'wb') as f: + f.write(data) + self.record_as_written(path) + + def write_text_file(self, path, data, encoding): + self.ensure_dir(os.path.dirname(path)) + if not self.dry_run: + with open(path, 'wb') as f: + f.write(data.encode(encoding)) + self.record_as_written(path) + + def set_mode(self, bits, mask, files): + if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): + # Set the executable bits (owner, group, and world) on + # all the files specified. + for f in files: + if self.dry_run: + logger.info("changing mode of %s", f) + else: + mode = (os.stat(f).st_mode | bits) & mask + logger.info("changing mode of %s to %o", f, mode) + os.chmod(f, mode) + + set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f) + + def ensure_dir(self, path): + path = os.path.abspath(path) + if path not in self.ensured and not os.path.exists(path): + self.ensured.add(path) + d, f = os.path.split(path) + self.ensure_dir(d) + logger.info('Creating %s' % path) + if not self.dry_run: + os.mkdir(path) + if self.record: + self.dirs_created.add(path) + + def byte_compile(self, path, optimize=False, force=False, prefix=None): + dpath = cache_from_source(path, not optimize) + logger.info('Byte-compiling %s to %s', path, dpath) + if not self.dry_run: + if force or self.newer(path, dpath): + if not prefix: + diagpath = None + else: + assert path.startswith(prefix) + diagpath = path[len(prefix):] + py_compile.compile(path, dpath, diagpath, True) # raise error + self.record_as_written(dpath) + return dpath + + def ensure_removed(self, path): + if os.path.exists(path): + if os.path.isdir(path) and not os.path.islink(path): + logger.debug('Removing directory tree at %s', path) + if not self.dry_run: + shutil.rmtree(path) + if self.record: + if path in self.dirs_created: + self.dirs_created.remove(path) + else: + if os.path.islink(path): + s = 'link' + else: + s = 'file' + logger.debug('Removing %s %s', s, path) + if not self.dry_run: + os.remove(path) + if self.record: + if path in self.files_written: + self.files_written.remove(path) + + def is_writable(self, path): + result = False + while not result: + if os.path.exists(path): + result = os.access(path, os.W_OK) + break + parent = os.path.dirname(path) + if parent == path: + break + path = parent + return result + + def commit(self): + """ + Commit recorded changes, turn off recording, return + changes. + """ + assert self.record + result = self.files_written, self.dirs_created + self._init_record() + return result + + def rollback(self): + if not self.dry_run: + for f in list(self.files_written): + if os.path.exists(f): + os.remove(f) + # dirs should all be empty now, except perhaps for + # __pycache__ subdirs + # reverse so that subdirs appear before their parents + dirs = sorted(self.dirs_created, reverse=True) + for d in dirs: + flist = os.listdir(d) + if flist: + assert flist == ['__pycache__'] + sd = os.path.join(d, flist[0]) + os.rmdir(sd) + os.rmdir(d) # should fail if non-empty + self._init_record() + +def resolve(module_name, dotted_path): + if module_name in sys.modules: + mod = sys.modules[module_name] + else: + mod = __import__(module_name) + if dotted_path is None: + result = mod + else: + parts = dotted_path.split('.') + result = getattr(mod, parts.pop(0)) + for p in parts: + result = getattr(result, p) + return result + + +class ExportEntry(object): + def __init__(self, name, prefix, suffix, flags): + self.name = name + self.prefix = prefix + self.suffix = suffix + self.flags = flags + + @cached_property + def value(self): + return resolve(self.prefix, self.suffix) + + def __repr__(self): # pragma: no cover + return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix, + self.suffix, self.flags) + + def __eq__(self, other): + if not isinstance(other, ExportEntry): + result = False + else: + result = (self.name == other.name and + self.prefix == other.prefix and + self.suffix == other.suffix and + self.flags == other.flags) + return result + + __hash__ = object.__hash__ + + +ENTRY_RE = re.compile(r'''(?P<name>(\w|[-.+])+) + \s*=\s*(?P<callable>(\w+)([:\.]\w+)*) + \s*(\[\s*(?P<flags>\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? + ''', re.VERBOSE) + +def get_export_entry(specification): + m = ENTRY_RE.search(specification) + if not m: + result = None + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + else: + d = m.groupdict() + name = d['name'] + path = d['callable'] + colons = path.count(':') + if colons == 0: + prefix, suffix = path, None + else: + if colons != 1: + raise DistlibException("Invalid specification " + "'%s'" % specification) + prefix, suffix = path.split(':') + flags = d['flags'] + if flags is None: + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + flags = [] + else: + flags = [f.strip() for f in flags.split(',')] + result = ExportEntry(name, prefix, suffix, flags) + return result + + +def get_cache_base(suffix=None): + """ + Return the default base location for distlib caches. If the directory does + not exist, it is created. Use the suffix provided for the base directory, + and default to '.distlib' if it isn't provided. + + On Windows, if LOCALAPPDATA is defined in the environment, then it is + assumed to be a directory, and will be the parent directory of the result. + On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home + directory - using os.expanduser('~') - will be the parent directory of + the result. + + The result is just the directory '.distlib' in the parent directory as + determined above, or with the name specified with ``suffix``. + """ + if suffix is None: + suffix = '.distlib' + if os.name == 'nt' and 'LOCALAPPDATA' in os.environ: + result = os.path.expandvars('$localappdata') + else: + # Assume posix, or old Windows + result = os.path.expanduser('~') + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if os.path.isdir(result): + usable = os.access(result, os.W_OK) + if not usable: + logger.warning('Directory exists but is not writable: %s', result) + else: + try: + os.makedirs(result) + usable = True + except OSError: + logger.warning('Unable to create %s', result, exc_info=True) + usable = False + if not usable: + result = tempfile.mkdtemp() + logger.warning('Default location unusable, using %s', result) + return os.path.join(result, suffix) + + +def path_to_cache_dir(path): + """ + Convert an absolute path to a directory name for use in a cache. + + The algorithm used is: + + #. On Windows, any ``':'`` in the drive is replaced with ``'---'``. + #. Any occurrence of ``os.sep`` is replaced with ``'--'``. + #. ``'.cache'`` is appended. + """ + d, p = os.path.splitdrive(os.path.abspath(path)) + if d: + d = d.replace(':', '---') + p = p.replace(os.sep, '--') + return d + p + '.cache' + + +def ensure_slash(s): + if not s.endswith('/'): + return s + '/' + return s + + +def parse_credentials(netloc): + username = password = None + if '@' in netloc: + prefix, netloc = netloc.split('@', 1) + if ':' not in prefix: + username = prefix + else: + username, password = prefix.split(':', 1) + return username, password, netloc + + +def get_process_umask(): + result = os.umask(0o22) + os.umask(result) + return result + +def is_string_sequence(seq): + result = True + i = None + for i, s in enumerate(seq): + if not isinstance(s, string_types): + result = False + break + assert i is not None + return result + +PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' + '([a-z0-9_.+-]+)', re.I) +PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') + + +def split_filename(filename, project_name=None): + """ + Extract name, version, python version from a filename (no extension) + + Return name, version, pyver or None + """ + result = None + pyver = None + filename = unquote(filename).replace(' ', '-') + m = PYTHON_VERSION.search(filename) + if m: + pyver = m.group(1) + filename = filename[:m.start()] + if project_name and len(filename) > len(project_name) + 1: + m = re.match(re.escape(project_name) + r'\b', filename) + if m: + n = m.end() + result = filename[:n], filename[n + 1:], pyver + if result is None: + m = PROJECT_NAME_AND_VERSION.match(filename) + if m: + result = m.group(1), m.group(3), pyver + return result + +# Allow spaces in name because of legacy dists like "Twisted Core" +NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*' + r'\(\s*(?P<ver>[^\s)]+)\)$') + +def parse_name_and_version(p): + """ + A utility method used to get name and version from a string. + + From e.g. a Provides-Dist value. + + :param p: A value in a form 'foo (1.0)' + :return: The name and version as a tuple. + """ + m = NAME_VERSION_RE.match(p) + if not m: + raise DistlibException('Ill-formed name/version string: \'%s\'' % p) + d = m.groupdict() + return d['name'].strip().lower(), d['ver'] + +def get_extras(requested, available): + result = set() + requested = set(requested or []) + available = set(available or []) + if '*' in requested: + requested.remove('*') + result |= available + for r in requested: + if r == '-': + result.add(r) + elif r.startswith('-'): + unwanted = r[1:] + if unwanted not in available: + logger.warning('undeclared extra: %s' % unwanted) + if unwanted in result: + result.remove(unwanted) + else: + if r not in available: + logger.warning('undeclared extra: %s' % r) + result.add(r) + return result +# +# Extended metadata functionality +# + +def _get_external_data(url): + result = {} + try: + # urlopen might fail if it runs into redirections, + # because of Python issue #13696. Fixed in locators + # using a custom redirect handler. + resp = urlopen(url) + headers = resp.info() + ct = headers.get('Content-Type') + if not ct.startswith('application/json'): + logger.debug('Unexpected response for JSON request: %s', ct) + else: + reader = codecs.getreader('utf-8')(resp) + #data = reader.read().decode('utf-8') + #result = json.loads(data) + result = json.load(reader) + except Exception as e: + logger.exception('Failed to get external data for %s: %s', url, e) + return result + +_external_data_base_url = 'https://www.red-dove.com/pypi/projects/' + +def get_project_data(name): + url = '%s/%s/project.json' % (name[0].upper(), name) + url = urljoin(_external_data_base_url, url) + result = _get_external_data(url) + return result + +def get_package_data(name, version): + url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) + url = urljoin(_external_data_base_url, url) + return _get_external_data(url) + + +class Cache(object): + """ + A class implementing a cache for resources that need to live in the file system + e.g. shared libraries. This class was moved from resources to here because it + could be used by other modules, e.g. the wheel module. + """ + + def __init__(self, base): + """ + Initialise an instance. + + :param base: The base directory where the cache should be located. + """ + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if not os.path.isdir(base): # pragma: no cover + os.makedirs(base) + if (os.stat(base).st_mode & 0o77) != 0: + logger.warning('Directory \'%s\' is not private', base) + self.base = os.path.abspath(os.path.normpath(base)) + + def prefix_to_dir(self, prefix): + """ + Converts a resource prefix to a directory name in the cache. + """ + return path_to_cache_dir(prefix) + + def clear(self): + """ + Clear the cache. + """ + not_removed = [] + for fn in os.listdir(self.base): + fn = os.path.join(self.base, fn) + try: + if os.path.islink(fn) or os.path.isfile(fn): + os.remove(fn) + elif os.path.isdir(fn): + shutil.rmtree(fn) + except Exception: + not_removed.append(fn) + return not_removed + + +class EventMixin(object): + """ + A very simple publish/subscribe system. + """ + def __init__(self): + self._subscribers = {} + + def add(self, event, subscriber, append=True): + """ + Add a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be added (and called when the + event is published). + :param append: Whether to append or prepend the subscriber to an + existing subscriber list for the event. + """ + subs = self._subscribers + if event not in subs: + subs[event] = deque([subscriber]) + else: + sq = subs[event] + if append: + sq.append(subscriber) + else: + sq.appendleft(subscriber) + + def remove(self, event, subscriber): + """ + Remove a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be removed. + """ + subs = self._subscribers + if event not in subs: + raise ValueError('No subscribers: %r' % event) + subs[event].remove(subscriber) + + def get_subscribers(self, event): + """ + Return an iterator for the subscribers for an event. + :param event: The event to return subscribers for. + """ + return iter(self._subscribers.get(event, ())) + + def publish(self, event, *args, **kwargs): + """ + Publish a event and return a list of values returned by its + subscribers. + + :param event: The event to publish. + :param args: The positional arguments to pass to the event's + subscribers. + :param kwargs: The keyword arguments to pass to the event's + subscribers. + """ + result = [] + for subscriber in self.get_subscribers(event): + try: + value = subscriber(event, *args, **kwargs) + except Exception: + logger.exception('Exception during event publication') + value = None + result.append(value) + logger.debug('publish %s: args = %s, kwargs = %s, result = %s', + event, args, kwargs, result) + return result + +# +# Simple sequencing +# +class Sequencer(object): + def __init__(self): + self._preds = {} + self._succs = {} + self._nodes = set() # nodes with no preds/succs + + def add_node(self, node): + self._nodes.add(node) + + def remove_node(self, node, edges=False): + if node in self._nodes: + self._nodes.remove(node) + if edges: + for p in set(self._preds.get(node, ())): + self.remove(p, node) + for s in set(self._succs.get(node, ())): + self.remove(node, s) + # Remove empties + for k, v in list(self._preds.items()): + if not v: + del self._preds[k] + for k, v in list(self._succs.items()): + if not v: + del self._succs[k] + + def add(self, pred, succ): + assert pred != succ + self._preds.setdefault(succ, set()).add(pred) + self._succs.setdefault(pred, set()).add(succ) + + def remove(self, pred, succ): + assert pred != succ + try: + preds = self._preds[succ] + succs = self._succs[pred] + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of anything' % succ) + try: + preds.remove(pred) + succs.remove(succ) + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of %r' % (succ, pred)) + + def is_step(self, step): + return (step in self._preds or step in self._succs or + step in self._nodes) + + def get_steps(self, final): + if not self.is_step(final): + raise ValueError('Unknown: %r' % final) + result = [] + todo = [] + seen = set() + todo.append(final) + while todo: + step = todo.pop(0) + if step in seen: + # if a step was already seen, + # move it to the end (so it will appear earlier + # when reversed on return) ... but not for the + # final step, as that would be confusing for + # users + if step != final: + result.remove(step) + result.append(step) + else: + seen.add(step) + result.append(step) + preds = self._preds.get(step, ()) + todo.extend(preds) + return reversed(result) + + @property + def strong_connections(self): + #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm + index_counter = [0] + stack = [] + lowlinks = {} + index = {} + result = [] + + graph = self._succs + + def strongconnect(node): + # set the depth index for this node to the smallest unused index + index[node] = index_counter[0] + lowlinks[node] = index_counter[0] + index_counter[0] += 1 + stack.append(node) + + # Consider successors + try: + successors = graph[node] + except Exception: + successors = [] + for successor in successors: + if successor not in lowlinks: + # Successor has not yet been visited + strongconnect(successor) + lowlinks[node] = min(lowlinks[node],lowlinks[successor]) + elif successor in stack: + # the successor is in the stack and hence in the current + # strongly connected component (SCC) + lowlinks[node] = min(lowlinks[node],index[successor]) + + # If `node` is a root node, pop the stack and generate an SCC + if lowlinks[node] == index[node]: + connected_component = [] + + while True: + successor = stack.pop() + connected_component.append(successor) + if successor == node: break + component = tuple(connected_component) + # storing the result + result.append(component) + + for node in graph: + if node not in lowlinks: + strongconnect(node) + + return result + + @property + def dot(self): + result = ['digraph G {'] + for succ in self._preds: + preds = self._preds[succ] + for pred in preds: + result.append(' %s -> %s;' % (pred, succ)) + for node in self._nodes: + result.append(' %s;' % node) + result.append('}') + return '\n'.join(result) + +# +# Unarchiving functionality for zip, tar, tgz, tbz, whl +# + +ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', + '.tgz', '.tbz', '.whl') + +def unarchive(archive_filename, dest_dir, format=None, check=True): + + def check_path(path): + if not isinstance(path, text_type): + path = path.decode('utf-8') + p = os.path.abspath(os.path.join(dest_dir, path)) + if not p.startswith(dest_dir) or p[plen] != os.sep: + raise ValueError('path outside destination: %r' % p) + + dest_dir = os.path.abspath(dest_dir) + plen = len(dest_dir) + archive = None + if format is None: + if archive_filename.endswith(('.zip', '.whl')): + format = 'zip' + elif archive_filename.endswith(('.tar.gz', '.tgz')): + format = 'tgz' + mode = 'r:gz' + elif archive_filename.endswith(('.tar.bz2', '.tbz')): + format = 'tbz' + mode = 'r:bz2' + elif archive_filename.endswith('.tar'): + format = 'tar' + mode = 'r' + else: # pragma: no cover + raise ValueError('Unknown format for %r' % archive_filename) + try: + if format == 'zip': + archive = ZipFile(archive_filename, 'r') + if check: + names = archive.namelist() + for name in names: + check_path(name) + else: + archive = tarfile.open(archive_filename, mode) + if check: + names = archive.getnames() + for name in names: + check_path(name) + if format != 'zip' and sys.version_info[0] < 3: + # See Python issue 17153. If the dest path contains Unicode, + # tarfile extraction fails on Python 2.x if a member path name + # contains non-ASCII characters - it leads to an implicit + # bytes -> unicode conversion using ASCII to decode. + for tarinfo in archive.getmembers(): + if not isinstance(tarinfo.name, text_type): + tarinfo.name = tarinfo.name.decode('utf-8') + archive.extractall(dest_dir) + + finally: + if archive: + archive.close() + + +def zip_dir(directory): + """zip a directory tree into a BytesIO object""" + result = io.BytesIO() + dlen = len(directory) + with ZipFile(result, "w") as zf: + for root, dirs, files in os.walk(directory): + for name in files: + full = os.path.join(root, name) + rel = root[dlen:] + dest = os.path.join(rel, name) + zf.write(full, dest) + return result + +# +# Simple progress bar +# + +UNITS = ('', 'K', 'M', 'G','T','P') + + +class Progress(object): + unknown = 'UNKNOWN' + + def __init__(self, minval=0, maxval=100): + assert maxval is None or maxval >= minval + self.min = self.cur = minval + self.max = maxval + self.started = None + self.elapsed = 0 + self.done = False + + def update(self, curval): + assert self.min <= curval + assert self.max is None or curval <= self.max + self.cur = curval + now = time.time() + if self.started is None: + self.started = now + else: + self.elapsed = now - self.started + + def increment(self, incr): + assert incr >= 0 + self.update(self.cur + incr) + + def start(self): + self.update(self.min) + return self + + def stop(self): + if self.max is not None: + self.update(self.max) + self.done = True + + @property + def maximum(self): + return self.unknown if self.max is None else self.max + + @property + def percentage(self): + if self.done: + result = '100 %' + elif self.max is None: + result = ' ?? %' + else: + v = 100.0 * (self.cur - self.min) / (self.max - self.min) + result = '%3d %%' % v + return result + + def format_duration(self, duration): + if (duration <= 0) and self.max is None or self.cur == self.min: + result = '??:??:??' + #elif duration < 1: + # result = '--:--:--' + else: + result = time.strftime('%H:%M:%S', time.gmtime(duration)) + return result + + @property + def ETA(self): + if self.done: + prefix = 'Done' + t = self.elapsed + #import pdb; pdb.set_trace() + else: + prefix = 'ETA ' + if self.max is None: + t = -1 + elif self.elapsed == 0 or (self.cur == self.min): + t = 0 + else: + #import pdb; pdb.set_trace() + t = float(self.max - self.min) + t /= self.cur - self.min + t = (t - 1) * self.elapsed + return '%s: %s' % (prefix, self.format_duration(t)) + + @property + def speed(self): + if self.elapsed == 0: + result = 0.0 + else: + result = (self.cur - self.min) / self.elapsed + for unit in UNITS: + if result < 1000: + break + result /= 1000.0 + return '%d %sB/s' % (result, unit) + +# +# Glob functionality +# + +RICH_GLOB = re.compile(r'\{([^}]*)\}') +_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') +_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') + + +def iglob(path_glob): + """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" + if _CHECK_RECURSIVE_GLOB.search(path_glob): + msg = """invalid glob %r: recursive glob "**" must be used alone""" + raise ValueError(msg % path_glob) + if _CHECK_MISMATCH_SET.search(path_glob): + msg = """invalid glob %r: mismatching set marker '{' or '}'""" + raise ValueError(msg % path_glob) + return _iglob(path_glob) + + +def _iglob(path_glob): + rich_path_glob = RICH_GLOB.split(path_glob, 1) + if len(rich_path_glob) > 1: + assert len(rich_path_glob) == 3, rich_path_glob + prefix, set, suffix = rich_path_glob + for item in set.split(','): + for path in _iglob(''.join((prefix, item, suffix))): + yield path + else: + if '**' not in path_glob: + for item in std_iglob(path_glob): + yield item + else: + prefix, radical = path_glob.split('**', 1) + if prefix == '': + prefix = '.' + if radical == '': + radical = '*' + else: + # we support both + radical = radical.lstrip('/') + radical = radical.lstrip('\\') + for path, dir, files in os.walk(prefix): + path = os.path.normpath(path) + for fn in _iglob(os.path.join(path, radical)): + yield fn + +if ssl: + from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, + CertificateError) + + +# +# HTTPSConnection which verifies certificates/matches domains +# + + class HTTPSConnection(httplib.HTTPSConnection): + ca_certs = None # set this to the path to the certs file (.pem) + check_domain = True # only used if ca_certs is not None + + # noinspection PyPropertyAccess + def connect(self): + sock = socket.create_connection((self.host, self.port), self.timeout) + if getattr(self, '_tunnel_host', False): + self.sock = sock + self._tunnel() + + if not hasattr(ssl, 'SSLContext'): + # For 2.x + if self.ca_certs: + cert_reqs = ssl.CERT_REQUIRED + else: + cert_reqs = ssl.CERT_NONE + self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, + cert_reqs=cert_reqs, + ssl_version=ssl.PROTOCOL_SSLv23, + ca_certs=self.ca_certs) + else: # pragma: no cover + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + context.options |= ssl.OP_NO_SSLv2 + if self.cert_file: + context.load_cert_chain(self.cert_file, self.key_file) + kwargs = {} + if self.ca_certs: + context.verify_mode = ssl.CERT_REQUIRED + context.load_verify_locations(cafile=self.ca_certs) + if getattr(ssl, 'HAS_SNI', False): + kwargs['server_hostname'] = self.host + self.sock = context.wrap_socket(sock, **kwargs) + if self.ca_certs and self.check_domain: + try: + match_hostname(self.sock.getpeercert(), self.host) + logger.debug('Host verified: %s', self.host) + except CertificateError: # pragma: no cover + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + + class HTTPSHandler(BaseHTTPSHandler): + def __init__(self, ca_certs, check_domain=True): + BaseHTTPSHandler.__init__(self) + self.ca_certs = ca_certs + self.check_domain = check_domain + + def _conn_maker(self, *args, **kwargs): + """ + This is called to create a connection instance. Normally you'd + pass a connection class to do_open, but it doesn't actually check for + a class, and just expects a callable. As long as we behave just as a + constructor would have, we should be OK. If it ever changes so that + we *must* pass a class, we'll create an UnsafeHTTPSConnection class + which just sets check_domain to False in the class definition, and + choose which one to pass to do_open. + """ + result = HTTPSConnection(*args, **kwargs) + if self.ca_certs: + result.ca_certs = self.ca_certs + result.check_domain = self.check_domain + return result + + def https_open(self, req): + try: + return self.do_open(self._conn_maker, req) + except URLError as e: + if 'certificate verify failed' in str(e.reason): + raise CertificateError('Unable to verify server certificate ' + 'for %s' % req.host) + else: + raise + + # + # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The- + # Middle proxy using HTTP listens on port 443, or an index mistakenly serves + # HTML containing a http://xyz link when it should be https://xyz), + # you can use the following handler class, which does not allow HTTP traffic. + # + # It works by inheriting from HTTPHandler - so build_opener won't add a + # handler for HTTP itself. + # + class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): + def http_open(self, req): + raise URLError('Unexpected HTTP request on what should be a secure ' + 'connection: %s' % req) + +# +# XML-RPC with timeouts +# + +_ver_info = sys.version_info[:2] + +if _ver_info == (2, 6): + class HTTP(httplib.HTTP): + def __init__(self, host='', port=None, **kwargs): + if port == 0: # 0 means use port 0, not the default port + port = None + self._setup(self._connection_class(host, port, **kwargs)) + + + if ssl: + class HTTPS(httplib.HTTPS): + def __init__(self, host='', port=None, **kwargs): + if port == 0: # 0 means use port 0, not the default port + port = None + self._setup(self._connection_class(host, port, **kwargs)) + + +class Transport(xmlrpclib.Transport): + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.Transport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, x509 = self.get_host_info(host) + if _ver_info == (2, 6): + result = HTTP(h, timeout=self.timeout) + else: + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPConnection(h) + result = self._connection[1] + return result + +if ssl: + class SafeTransport(xmlrpclib.SafeTransport): + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.SafeTransport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, kwargs = self.get_host_info(host) + if not kwargs: + kwargs = {} + kwargs['timeout'] = self.timeout + if _ver_info == (2, 6): + result = HTTPS(host, None, **kwargs) + else: + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPSConnection(h, None, + **kwargs) + result = self._connection[1] + return result + + +class ServerProxy(xmlrpclib.ServerProxy): + def __init__(self, uri, **kwargs): + self.timeout = timeout = kwargs.pop('timeout', None) + # The above classes only come into play if a timeout + # is specified + if timeout is not None: + scheme, _ = splittype(uri) + use_datetime = kwargs.get('use_datetime', 0) + if scheme == 'https': + tcls = SafeTransport + else: + tcls = Transport + kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) + self.transport = t + xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) + +# +# CSV functionality. This is provided because on 2.x, the csv module can't +# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. +# + +def _csv_open(fn, mode, **kwargs): + if sys.version_info[0] < 3: + mode += 'b' + else: + kwargs['newline'] = '' + # Python 3 determines encoding from locale. Force 'utf-8' + # file encoding to match other forced utf-8 encoding + kwargs['encoding'] = 'utf-8' + return open(fn, mode, **kwargs) + + +class CSVBase(object): + defaults = { + 'delimiter': str(','), # The strs are used because we need native + 'quotechar': str('"'), # str in the csv API (2.x won't take + 'lineterminator': str('\n') # Unicode) + } + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.stream.close() + + +class CSVReader(CSVBase): + def __init__(self, **kwargs): + if 'stream' in kwargs: + stream = kwargs['stream'] + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + self.stream = stream + else: + self.stream = _csv_open(kwargs['path'], 'r') + self.reader = csv.reader(self.stream, **self.defaults) + + def __iter__(self): + return self + + def next(self): + result = next(self.reader) + if sys.version_info[0] < 3: + for i, item in enumerate(result): + if not isinstance(item, text_type): + result[i] = item.decode('utf-8') + return result + + __next__ = next + +class CSVWriter(CSVBase): + def __init__(self, fn, **kwargs): + self.stream = _csv_open(fn, 'w') + self.writer = csv.writer(self.stream, **self.defaults) + + def writerow(self, row): + if sys.version_info[0] < 3: + r = [] + for item in row: + if isinstance(item, text_type): + item = item.encode('utf-8') + r.append(item) + row = r + self.writer.writerow(row) + +# +# Configurator functionality +# + +class Configurator(BaseConfigurator): + + value_converters = dict(BaseConfigurator.value_converters) + value_converters['inc'] = 'inc_convert' + + def __init__(self, config, base=None): + super(Configurator, self).__init__(config) + self.base = base or os.getcwd() + + def configure_custom(self, config): + def convert(o): + if isinstance(o, (list, tuple)): + result = type(o)([convert(i) for i in o]) + elif isinstance(o, dict): + if '()' in o: + result = self.configure_custom(o) + else: + result = {} + for k in o: + result[k] = convert(o[k]) + else: + result = self.convert(o) + return result + + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + args = config.pop('[]', ()) + if args: + args = tuple([convert(o) for o in args]) + items = [(k, convert(config[k])) for k in config if valid_ident(k)] + kwargs = dict(items) + result = c(*args, **kwargs) + if props: + for n, v in props.items(): + setattr(result, n, convert(v)) + return result + + def __getitem__(self, key): + result = self.config[key] + if isinstance(result, dict) and '()' in result: + self.config[key] = result = self.configure_custom(result) + return result + + def inc_convert(self, value): + """Default converter for the inc:// protocol.""" + if not os.path.isabs(value): + value = os.path.join(self.base, value) + with codecs.open(value, 'r', encoding='utf-8') as f: + result = json.load(f) + return result + + +class SubprocessMixin(object): + """ + Mixin for running subprocesses and capturing their output + """ + def __init__(self, verbose=False, progress=None): + self.verbose = verbose + self.progress = progress + + def reader(self, stream, context): + """ + Read lines from a subprocess' output stream and either pass to a progress + callable (if specified) or write progress information to sys.stderr. + """ + progress = self.progress + verbose = self.verbose + while True: + s = stream.readline() + if not s: + break + if progress is not None: + progress(s, context) + else: + if not verbose: + sys.stderr.write('.') + else: + sys.stderr.write(s.decode('utf-8')) + sys.stderr.flush() + stream.close() + + def run_command(self, cmd, **kwargs): + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, **kwargs) + t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) + t1.start() + t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) + t2.start() + p.wait() + t1.join() + t2.join() + if self.progress is not None: + self.progress('done.', 'main') + elif self.verbose: + sys.stderr.write('done.\n') + return p + + +def normalize_name(name): + """Normalize a python package name a la PEP 503""" + # https://www.python.org/dev/peps/pep-0503/#normalized-names + return re.sub('[-_.]+', '-', name).lower() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/version.py b/env/lib/python3.6/site-packages/pip/_vendor/distlib/version.py new file mode 100644 index 0000000..3eebe18 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distlib/version.py @@ -0,0 +1,736 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2017 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +""" +Implementation of a flexible versioning scheme providing support for PEP-440, +setuptools-compatible and semantic versioning. +""" + +import logging +import re + +from .compat import string_types +from .util import parse_requirement + +__all__ = ['NormalizedVersion', 'NormalizedMatcher', + 'LegacyVersion', 'LegacyMatcher', + 'SemanticVersion', 'SemanticMatcher', + 'UnsupportedVersionError', 'get_scheme'] + +logger = logging.getLogger(__name__) + + +class UnsupportedVersionError(ValueError): + """This is an unsupported version.""" + pass + + +class Version(object): + def __init__(self, s): + self._string = s = s.strip() + self._parts = parts = self.parse(s) + assert isinstance(parts, tuple) + assert len(parts) > 0 + + def parse(self, s): + raise NotImplementedError('please implement in a subclass') + + def _check_compatible(self, other): + if type(self) != type(other): + raise TypeError('cannot compare %r and %r' % (self, other)) + + def __eq__(self, other): + self._check_compatible(other) + return self._parts == other._parts + + def __ne__(self, other): + return not self.__eq__(other) + + def __lt__(self, other): + self._check_compatible(other) + return self._parts < other._parts + + def __gt__(self, other): + return not (self.__lt__(other) or self.__eq__(other)) + + def __le__(self, other): + return self.__lt__(other) or self.__eq__(other) + + def __ge__(self, other): + return self.__gt__(other) or self.__eq__(other) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + def __hash__(self): + return hash(self._parts) + + def __repr__(self): + return "%s('%s')" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + + @property + def is_prerelease(self): + raise NotImplementedError('Please implement in subclasses.') + + +class Matcher(object): + version_class = None + + # value is either a callable or the name of a method + _operators = { + '<': lambda v, c, p: v < c, + '>': lambda v, c, p: v > c, + '<=': lambda v, c, p: v == c or v < c, + '>=': lambda v, c, p: v == c or v > c, + '==': lambda v, c, p: v == c, + '===': lambda v, c, p: v == c, + # by default, compatible => >=. + '~=': lambda v, c, p: v == c or v > c, + '!=': lambda v, c, p: v != c, + } + + # this is a method only to support alternative implementations + # via overriding + def parse_requirement(self, s): + return parse_requirement(s) + + def __init__(self, s): + if self.version_class is None: + raise ValueError('Please specify a version class') + self._string = s = s.strip() + r = self.parse_requirement(s) + if not r: + raise ValueError('Not valid: %r' % s) + self.name = r.name + self.key = self.name.lower() # for case-insensitive comparisons + clist = [] + if r.constraints: + # import pdb; pdb.set_trace() + for op, s in r.constraints: + if s.endswith('.*'): + if op not in ('==', '!='): + raise ValueError('\'.*\' not allowed for ' + '%r constraints' % op) + # Could be a partial version (e.g. for '2.*') which + # won't parse as a version, so keep it as a string + vn, prefix = s[:-2], True + # Just to check that vn is a valid version + self.version_class(vn) + else: + # Should parse as a version, so we can create an + # instance for the comparison + vn, prefix = self.version_class(s), False + clist.append((op, vn, prefix)) + self._parts = tuple(clist) + + def match(self, version): + """ + Check if the provided version matches the constraints. + + :param version: The version to match against this instance. + :type version: String or :class:`Version` instance. + """ + if isinstance(version, string_types): + version = self.version_class(version) + for operator, constraint, prefix in self._parts: + f = self._operators.get(operator) + if isinstance(f, string_types): + f = getattr(self, f) + if not f: + msg = ('%r not implemented ' + 'for %s' % (operator, self.__class__.__name__)) + raise NotImplementedError(msg) + if not f(version, constraint, prefix): + return False + return True + + @property + def exact_version(self): + result = None + if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='): + result = self._parts[0][1] + return result + + def _check_compatible(self, other): + if type(self) != type(other) or self.name != other.name: + raise TypeError('cannot compare %s and %s' % (self, other)) + + def __eq__(self, other): + self._check_compatible(other) + return self.key == other.key and self._parts == other._parts + + def __ne__(self, other): + return not self.__eq__(other) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + def __hash__(self): + return hash(self.key) + hash(self._parts) + + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + + +PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?' + r'(\.(post)(\d+))?(\.(dev)(\d+))?' + r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$') + + +def _pep_440_key(s): + s = s.strip() + m = PEP440_VERSION_RE.match(s) + if not m: + raise UnsupportedVersionError('Not a valid version: %s' % s) + groups = m.groups() + nums = tuple(int(v) for v in groups[1].split('.')) + while len(nums) > 1 and nums[-1] == 0: + nums = nums[:-1] + + if not groups[0]: + epoch = 0 + else: + epoch = int(groups[0]) + pre = groups[4:6] + post = groups[7:9] + dev = groups[10:12] + local = groups[13] + if pre == (None, None): + pre = () + else: + pre = pre[0], int(pre[1]) + if post == (None, None): + post = () + else: + post = post[0], int(post[1]) + if dev == (None, None): + dev = () + else: + dev = dev[0], int(dev[1]) + if local is None: + local = () + else: + parts = [] + for part in local.split('.'): + # to ensure that numeric compares as > lexicographic, avoid + # comparing them directly, but encode a tuple which ensures + # correct sorting + if part.isdigit(): + part = (1, int(part)) + else: + part = (0, part) + parts.append(part) + local = tuple(parts) + if not pre: + # either before pre-release, or final release and after + if not post and dev: + # before pre-release + pre = ('a', -1) # to sort before a0 + else: + pre = ('z',) # to sort after all pre-releases + # now look at the state of post and dev. + if not post: + post = ('_',) # sort before 'a' + if not dev: + dev = ('final',) + + #print('%s -> %s' % (s, m.groups())) + return epoch, nums, pre, post, dev, local + + +_normalized_key = _pep_440_key + + +class NormalizedVersion(Version): + """A rational version. + + Good: + 1.2 # equivalent to "1.2.0" + 1.2.0 + 1.2a1 + 1.2.3a2 + 1.2.3b1 + 1.2.3c1 + 1.2.3.4 + TODO: fill this out + + Bad: + 1 # minimum two numbers + 1.2a # release level must have a release serial + 1.2.3b + """ + def parse(self, s): + result = _normalized_key(s) + # _normalized_key loses trailing zeroes in the release + # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0 + # However, PEP 440 prefix matching needs it: for example, + # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0). + m = PEP440_VERSION_RE.match(s) # must succeed + groups = m.groups() + self._release_clause = tuple(int(v) for v in groups[1].split('.')) + return result + + PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev']) + + @property + def is_prerelease(self): + return any(t[0] in self.PREREL_TAGS for t in self._parts if t) + + +def _match_prefix(x, y): + x = str(x) + y = str(y) + if x == y: + return True + if not x.startswith(y): + return False + n = len(y) + return x[n] == '.' + + +class NormalizedMatcher(Matcher): + version_class = NormalizedVersion + + # value is either a callable or the name of a method + _operators = { + '~=': '_match_compatible', + '<': '_match_lt', + '>': '_match_gt', + '<=': '_match_le', + '>=': '_match_ge', + '==': '_match_eq', + '===': '_match_arbitrary', + '!=': '_match_ne', + } + + def _adjust_local(self, version, constraint, prefix): + if prefix: + strip_local = '+' not in constraint and version._parts[-1] + else: + # both constraint and version are + # NormalizedVersion instances. + # If constraint does not have a local component, + # ensure the version doesn't, either. + strip_local = not constraint._parts[-1] and version._parts[-1] + if strip_local: + s = version._string.split('+', 1)[0] + version = self.version_class(s) + return version, constraint + + def _match_lt(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version >= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_gt(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version <= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_le(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + return version <= constraint + + def _match_ge(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + return version >= constraint + + def _match_eq(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if not prefix: + result = (version == constraint) + else: + result = _match_prefix(version, constraint) + return result + + def _match_arbitrary(self, version, constraint, prefix): + return str(version) == str(constraint) + + def _match_ne(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if not prefix: + result = (version != constraint) + else: + result = not _match_prefix(version, constraint) + return result + + def _match_compatible(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version == constraint: + return True + if version < constraint: + return False +# if not prefix: +# return True + release_clause = constraint._release_clause + if len(release_clause) > 1: + release_clause = release_clause[:-1] + pfx = '.'.join([str(i) for i in release_clause]) + return _match_prefix(version, pfx) + +_REPLACEMENTS = ( + (re.compile('[.+-]$'), ''), # remove trailing puncts + (re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start + (re.compile('^[.-]'), ''), # remove leading puncts + (re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses + (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion) + (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion) + (re.compile('[.]{2,}'), '.'), # multiple runs of '.' + (re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha + (re.compile(r'\b(pre-alpha|prealpha)\b'), + 'pre.alpha'), # standardise + (re.compile(r'\(beta\)$'), 'beta'), # remove parentheses +) + +_SUFFIX_REPLACEMENTS = ( + (re.compile('^[:~._+-]+'), ''), # remove leading puncts + (re.compile('[,*")([\\]]'), ''), # remove unwanted chars + (re.compile('[~:+_ -]'), '.'), # replace illegal chars + (re.compile('[.]{2,}'), '.'), # multiple runs of '.' + (re.compile(r'\.$'), ''), # trailing '.' +) + +_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)') + + +def _suggest_semantic_version(s): + """ + Try to suggest a semantic form for a version for which + _suggest_normalized_version couldn't come up with anything. + """ + result = s.strip().lower() + for pat, repl in _REPLACEMENTS: + result = pat.sub(repl, result) + if not result: + result = '0.0.0' + + # Now look for numeric prefix, and separate it out from + # the rest. + #import pdb; pdb.set_trace() + m = _NUMERIC_PREFIX.match(result) + if not m: + prefix = '0.0.0' + suffix = result + else: + prefix = m.groups()[0].split('.') + prefix = [int(i) for i in prefix] + while len(prefix) < 3: + prefix.append(0) + if len(prefix) == 3: + suffix = result[m.end():] + else: + suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():] + prefix = prefix[:3] + prefix = '.'.join([str(i) for i in prefix]) + suffix = suffix.strip() + if suffix: + #import pdb; pdb.set_trace() + # massage the suffix. + for pat, repl in _SUFFIX_REPLACEMENTS: + suffix = pat.sub(repl, suffix) + + if not suffix: + result = prefix + else: + sep = '-' if 'dev' in suffix else '+' + result = prefix + sep + suffix + if not is_semver(result): + result = None + return result + + +def _suggest_normalized_version(s): + """Suggest a normalized version close to the given version string. + + If you have a version string that isn't rational (i.e. NormalizedVersion + doesn't like it) then you might be able to get an equivalent (or close) + rational version from this function. + + This does a number of simple normalizations to the given string, based + on observation of versions currently in use on PyPI. Given a dump of + those version during PyCon 2009, 4287 of them: + - 2312 (53.93%) match NormalizedVersion without change + with the automatic suggestion + - 3474 (81.04%) match when using this suggestion method + + @param s {str} An irrational version string. + @returns A rational version string, or None, if couldn't determine one. + """ + try: + _normalized_key(s) + return s # already rational + except UnsupportedVersionError: + pass + + rs = s.lower() + + # part of this could use maketrans + for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'), + ('beta', 'b'), ('rc', 'c'), ('-final', ''), + ('-pre', 'c'), + ('-release', ''), ('.release', ''), ('-stable', ''), + ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''), + ('final', '')): + rs = rs.replace(orig, repl) + + # if something ends with dev or pre, we add a 0 + rs = re.sub(r"pre$", r"pre0", rs) + rs = re.sub(r"dev$", r"dev0", rs) + + # if we have something like "b-2" or "a.2" at the end of the + # version, that is probably beta, alpha, etc + # let's remove the dash or dot + rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs) + + # 1.0-dev-r371 -> 1.0.dev371 + # 0.1-dev-r79 -> 0.1.dev79 + rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs) + + # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1 + rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs) + + # Clean: v0.3, v1.0 + if rs.startswith('v'): + rs = rs[1:] + + # Clean leading '0's on numbers. + #TODO: unintended side-effect on, e.g., "2003.05.09" + # PyPI stats: 77 (~2%) better + rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs) + + # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers + # zero. + # PyPI stats: 245 (7.56%) better + rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs) + + # the 'dev-rNNN' tag is a dev tag + rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs) + + # clean the - when used as a pre delimiter + rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs) + + # a terminal "dev" or "devel" can be changed into ".dev0" + rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs) + + # a terminal "dev" can be changed into ".dev0" + rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs) + + # a terminal "final" or "stable" can be removed + rs = re.sub(r"(final|stable)$", "", rs) + + # The 'r' and the '-' tags are post release tags + # 0.4a1.r10 -> 0.4a1.post10 + # 0.9.33-17222 -> 0.9.33.post17222 + # 0.9.33-r17222 -> 0.9.33.post17222 + rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs) + + # Clean 'r' instead of 'dev' usage: + # 0.9.33+r17222 -> 0.9.33.dev17222 + # 1.0dev123 -> 1.0.dev123 + # 1.0.git123 -> 1.0.dev123 + # 1.0.bzr123 -> 1.0.dev123 + # 0.1a0dev.123 -> 0.1a0.dev123 + # PyPI stats: ~150 (~4%) better + rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs) + + # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage: + # 0.2.pre1 -> 0.2c1 + # 0.2-c1 -> 0.2c1 + # 1.0preview123 -> 1.0c123 + # PyPI stats: ~21 (0.62%) better + rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs) + + # Tcl/Tk uses "px" for their post release markers + rs = re.sub(r"p(\d+)$", r".post\1", rs) + + try: + _normalized_key(rs) + except UnsupportedVersionError: + rs = None + return rs + +# +# Legacy version processing (distribute-compatible) +# + +_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I) +_VERSION_REPLACE = { + 'pre': 'c', + 'preview': 'c', + '-': 'final-', + 'rc': 'c', + 'dev': '@', + '': None, + '.': None, +} + + +def _legacy_key(s): + def get_parts(s): + result = [] + for p in _VERSION_PART.split(s.lower()): + p = _VERSION_REPLACE.get(p, p) + if p: + if '0' <= p[:1] <= '9': + p = p.zfill(8) + else: + p = '*' + p + result.append(p) + result.append('*final') + return result + + result = [] + for p in get_parts(s): + if p.startswith('*'): + if p < '*final': + while result and result[-1] == '*final-': + result.pop() + while result and result[-1] == '00000000': + result.pop() + result.append(p) + return tuple(result) + + +class LegacyVersion(Version): + def parse(self, s): + return _legacy_key(s) + + @property + def is_prerelease(self): + result = False + for x in self._parts: + if (isinstance(x, string_types) and x.startswith('*') and + x < '*final'): + result = True + break + return result + + +class LegacyMatcher(Matcher): + version_class = LegacyVersion + + _operators = dict(Matcher._operators) + _operators['~='] = '_match_compatible' + + numeric_re = re.compile(r'^(\d+(\.\d+)*)') + + def _match_compatible(self, version, constraint, prefix): + if version < constraint: + return False + m = self.numeric_re.match(str(constraint)) + if not m: + logger.warning('Cannot compute compatible match for version %s ' + ' and constraint %s', version, constraint) + return True + s = m.groups()[0] + if '.' in s: + s = s.rsplit('.', 1)[0] + return _match_prefix(version, s) + +# +# Semantic versioning +# + +_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)' + r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?' + r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I) + + +def is_semver(s): + return _SEMVER_RE.match(s) + + +def _semantic_key(s): + def make_tuple(s, absent): + if s is None: + result = (absent,) + else: + parts = s[1:].split('.') + # We can't compare ints and strings on Python 3, so fudge it + # by zero-filling numeric values so simulate a numeric comparison + result = tuple([p.zfill(8) if p.isdigit() else p for p in parts]) + return result + + m = is_semver(s) + if not m: + raise UnsupportedVersionError(s) + groups = m.groups() + major, minor, patch = [int(i) for i in groups[:3]] + # choose the '|' and '*' so that versions sort correctly + pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*') + return (major, minor, patch), pre, build + + +class SemanticVersion(Version): + def parse(self, s): + return _semantic_key(s) + + @property + def is_prerelease(self): + return self._parts[1][0] != '|' + + +class SemanticMatcher(Matcher): + version_class = SemanticVersion + + +class VersionScheme(object): + def __init__(self, key, matcher, suggester=None): + self.key = key + self.matcher = matcher + self.suggester = suggester + + def is_valid_version(self, s): + try: + self.matcher.version_class(s) + result = True + except UnsupportedVersionError: + result = False + return result + + def is_valid_matcher(self, s): + try: + self.matcher(s) + result = True + except UnsupportedVersionError: + result = False + return result + + def is_valid_constraint_list(self, s): + """ + Used for processing some metadata fields + """ + return self.is_valid_matcher('dummy_name (%s)' % s) + + def suggest(self, s): + if self.suggester is None: + result = None + else: + result = self.suggester(s) + return result + +_SCHEMES = { + 'normalized': VersionScheme(_normalized_key, NormalizedMatcher, + _suggest_normalized_version), + 'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s), + 'semantic': VersionScheme(_semantic_key, SemanticMatcher, + _suggest_semantic_version), +} + +_SCHEMES['default'] = _SCHEMES['normalized'] + + +def get_scheme(name): + if name not in _SCHEMES: + raise ValueError('unknown scheme name: %r' % name) + return _SCHEMES[name] diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/w32.exe b/env/lib/python3.6/site-packages/pip/_vendor/distlib/w32.exe new file mode 100644 index 0000000000000000000000000000000000000000..732215a9d34ccb7b417d637a7646d9b843ecafa8 GIT binary patch literal 89088 zcmeFae|S?>wm*FGqitH!CO`{C3REl(SahnPNDYM`O{q#T7)Xdvz$<hz5gjb&0FD$| zPs=nt#$jgk-tk`NUb*PJIy$3wMg@OiORy=5PEkZ0P^;6cyPm2=Y8#~Xyq~pC(iU~@ z_dd`2ywCg3TS)e=wSTX@_S$Q$y;FJf9>F9Cf*Bu86NCeB>CeT#|L3nblC!40kR?2m z{>H@z3`^g*ct!B1Tk<#8{Ol(+x7?n8>n(TO@iQ_1pEl;#NO$D_^p5<p8@>6r|7^p? zD>5@P3KB)%InlPUc<U{5lb_$uJ2!U@!dIxT&CTYnHuoCtzCL#`-1%8w%`JjkymkHD zpYr&>Cg9H}+(GW%^wV2|ROUbGfyXTfOART)i?<!WISJD#7!6|#8G`TvV*Xu^z32+K zc6>T0?9%;4K}Zn{6fx`yPa}*$9C+R!7zI~72c&$InY+UdMGBkF3c`HyxD3K09`bzW z?_q;rO&5ec#{?noJ4vI19bbHBt~vx^h2FH$V8i|^#EsiUg#JboP3@w-(&Uf&%NK<8 zSJZ5{MZ852W)~s>WeT(LIf&1wKNqULLI)GN_(-E-D)X~ZK=1;t<%*guHMhdg`-(mb zHzDv1KBN9zR9?--O+N$ReOXAr81V9z!X5SJ5`=3<1^<8V|AP@&sr2}Qvp;hQT24iW zOHg|EiZd1ojV;oo#(r^ba2`^8T22{~_UQ@YMZp7O1R*2@?SerFE~TuJB_wDaYC0h8 zfONF1t%{=H`W`bdYp+>YBsg9Ty9ec3iy+O3xa}TIvPK#Q&udyx1MvwG0(#iaGC|N| zJ#3?<Y4YLRkU`54s9BYRjyISA>){9$tW;Y34lPnX=&>D4X~|k7c$TwEfOzs@Yh#Nz z`FV;`(w!E`sKg@`2E}bDY>ku^4XS@tV(WO*<eu67;0Clk;vRHp!Qf<+5w0B!*Y>g? zYH=KK#%%Yu1~&m>IlB^#2^syGG|2AB1(}4aOcaC%!)}%`I7AIC2(Ro3yW`GSttng^ z_xb=ECor!L{-PNO>_ulJ3%fm=O0X!s%)$GZ?~I94l-^KEAX0n$?4wGpr7&i4#~)OB zQD%2NrWUKtZuYT1Vnu}AeF`cSgx>Rkk@}Lg{WltgT76VeA2aic`cTnpXrt2WXmJkM z9%u<Rp-*9{$HQ)>Xm?McyDyZ28Ux7mpxy?mnmvzMMr-85vkRrJLaDRx>|I7je+cM+ zj{RJ(3Vrgke;W@#D!y%U%fQLtlPKTAzWtVuOQdXpwsy6eRjt^cZ%0D4bF7$F;f!th zLN$fmy;M~5BxHB@2G;SZm3yqd&=nXUM}Js~v*{K=2m~;xQ+&bAQx@r{f)-eSY8D^{ zQp9rgPJi$y3Xiz^JeW@pJIh<wr|>!WIY*3a=a6(=#2xp%avG2{mumi~B7u=3MM~KO z==U)PdIp?wwn@iTlcT?!n){#F9|G%?wza&uKBZU7$wfotONEdzWWexHQ64SFLulLE z*e_YN92Wt^Qzb(=^6B_TOJUsJ&3vti=^+6*@&V;&ap~z@@%o<An0$Tp91b@WoJ0ti z4CHcXet)>EAWzGgN0pq6loi-Lq0Ml%dqU}6EvE?47#XX)qrkpdN<pEj(a{p@LeD+y z)<I3Hzqg=?h(-~OF3&0IIjzVUG^+&X1YD?Wtq;Y{@q5^BDrqdT!(zcqrFcHTLjBqa z4-z-9J|I%eTu{KXnUM`;eyt}4*}Hn8izj}HC6B~DJ#iCzK~G%66JOI44MB0dONf;f ztO0!iSz3y^P)#n?HQVF&`+;+QO+=%#oNT1Qn;qQFOK3s~3ZO7&h|S$c!;9f4(4jD1 zE2!NX(%{e2%LOsw!I=mKALhj@;tfHxU8g=rL2{O3+?CR$$6UgThXLfpBx~=|{E7=A z<rWY(+kT-MV?IrePZuu=Sv+hn@QzSdvI2Ne2bSJOhD@c(BDYzT^WAUIlvY_n)?f#f z$z}t$97h^kKzh8vUPLWt&wit6k-Kwk`_n)Use<uTwmVU_n4DX0a83_R+HQcO_j1gL z90Z<4;1iNf`LtSIC@2HsB-{Q}O8C~6Xd@bAtS(8FK20QaB@#r7qoq2Wic~*ai<$Le zfp0=hleLhrs{T`2lAtLbmc{F}SIf@n(xu2EFPQGN-QN;?n769;oTlmJplNMJIch`$ zlTaW@j6=N!C{&N;Q^PN%_EjaDk;}8Iyf+oe$T27j82~MPs<^G;B2f3WtNFUD?<v5> zGP3m$NzO52imT;$(?xSAUrh;3ms`w%<sNnrkorW$8avU)PAn6(AhOx0j-@udm!&6* zqpJ%)OOQHUKS;ZW4?AGaY+gYrg{O;_@UsjsYG$mF+z=vVW>g-afa6GY*m`ZGu@`<% zo4$pyjp)A;ceFHW7*edKd7smaJ`=~1iTr|g5J!JN`KvR&C8v38-8Y${weFh?F>R5v zz2-~RsGLE@exmOlo~@R$1-y~QJ`iG0TdGhv;PZzp!R~KqP0c|=iDWxYInPp_9X!u< z$V21YAW^13Ao47^)g`|pXBcOWWG2Q#$C;_pr+g+a2|k8GFy|g-;B|+L>-72hZ8AfK zX@I878I>5%a&hovGRvC-RG|(Z`~mn#V-F3LFZ?@l*=_g=H+JFM(Ngj|a)Z_{P&=Wb zjG`!(0E6?Av9}{u;W@C5A{9n#NTyh|^KGfWu=QA6=~Z|I-%AKLo<=bWpTX}XD(wnK zn1~0(<)XO8Qz-7xvAC(-6rp_nh<K&N#~ufJGd!_D5l!;2_xrK?b0pF@nrP2bd$nhI zUmgR9&*x_+(uw94`PpYcjiamp({R+8N&J*#JR9xafE^yRW6zf^ffY;verVy^E=LO} zit|GZg)=5)vnUP^F<}A-4XcLN(W2fB2+7KM0k9q)U?xfyaA@%@wi|nK*lj`1Ocv%j z!N3UW5wJ$pB~B@yiNnQ(h9d4>PmmhJ{$(n))2i;p-e>oD*>2P)AGU|xT`@N?NE$;& zuz7W{L&zTm?PT#BU{O@ji2qb13--zJY$6gv6V`@{*o%_^-li4=>yQs+f?s6gIJ;yG zr-C?`(T_CtDM1O?P<b)f8>^L-R@GANd`%oDw}3QQsvD0;z11al5!5CD7JLabT+3OH z6@Y?rf=?od&6hY_gj0yFcO*cmo$aU<oRKB56ND^mYd*Ccp8KciUGTAME<r<jeeC>n zLGUpVgXm+O-2XcNZb25H0ltBJYe#yGUf4jq>`GSS5z(j}liSQr$y(Es?2=r1dhQ}Y z5GMu6Wp%SqAsU&%+e1+S_WVrn&m#H|T!SyRmzqnP&I+GD=r2P|F#ry%K-$4o_zEa- zXWJH=l7?c8T8A7nJBMn{$fccB&$_kZ<RJOjghWl&5OFmaE{dfg4CM!(CUkrDB21Nq zg=h%mjf2Ful%&{g*bN$pPuXXMx7ls~PYYbR)+*Q<A4arRd>rK{#Tzi#+6m=kxT>S^ zlo-^CI}nYCc)0d>xaxGc_N4r!8Gh&anj6^r7Yjm3n)o>a3$&{#8+#2=;WX`Sy*!Fa z7Ew}lT1qK#pA@sGoT`qn`y?+_sp?Rlh`GDAV+`tRyBgqZ84H9|XwqpQ++Ak%lbE}+ zi34=rn*it>0qEoaIy&d0Gjgq6kY>eruMG%eIPSqBBxGSPW2I8MXhG~IijA@u&_bVj z3@RM~*i%><ST+f4Lo7caJeZ6nZZagWupW*ghzR!cM3Cb=8^T0UQA;q(fgt2J0(%N5 zaFnOgc}u(;$*8VaxCld>6Xa+v<@(qku(FAHCEgTg0fYkK)Fk39r#bZzFO8K)<$sQP zlwLFz3pKaIJt&T6KSdToMz)?xsvHbkI8&Tli$3K{Te%ew(yjV@m0OgGP2nu1A{bs~ zR<fL#2ds+(ah#I5IRgjIip^3Q`=bN%nyQqWohjSXkvKs?rr~r8v(83(xf!wjuFXa% zTdvq_L?s3_L$RP_mzfg5VIMLJ`T+FU9W7peiQ8^#IEt|WWdw=7i2VuYg9K4r4(|bs z*sks;2%y&5sEHrqfRP=k>}5qrz|lnBo-Ig=3O}^%H#_C{qMA%Oe)Beq+>&qG-;15M zmzXm|kD=&P9^C@|Mys@oW!2#K7FIiZ#i%-u=%sDH$-@?+o5-q%(>(0Q2!mYeY!R~A z_G4HnXA0$Px9!LOw!+rB+CgEhn5I<5<y$s?yAF(w-pSH@Os(LP?!vA|J*}sXaFRNa z9R?*%^z<k2@}8=<DwS3coR167phsEX=}`xI)M49i_+F1%d5_Spx2RnmVwAV*P+_Y& zp3s?SG+~O&@zb8fBrh?$=R3Fk%;%R&2?qr!msR`-%VjG2^$gH1j<XT4k04v8M6(jb zNvp->89~`iKu{&#s7aTGtZPeB3Q&fa>1F>;s|wilI5vV0u$f@jc$YiG1ghCyR!aaZ ziny3y#gI5!R#!!j;&@>70&Q<nRuotqr<&IkSO*S)0R*x&XUK;PSG_LQ$jl&KrN>I2 z$;@0c&aa$r{kz5VAvt!_hw9{Y;2p)RWDXZ{NMEgv66}8~8IIRq(T0Y0n$F2*G{;}% zL+1LA1cRYo>{PBFMERForHYeUyY28=;Weu5>mt``tD})?ht|<IrWW6W_)e}Rw0E4$ zEcNr=e;?xqjF|#nA&{UO%a@O4bHN9m@;lFB=RZQ+0pCqY<j_4EGz#<MV@H5(fq>I( zsYxSdFI9a9yt5)Gu52)7vy`^#lBwck?49yCLg{ma(yjT`Vc<JX2)WXKJ6gIw#&~(X zA<g#+9no+&EJvA2J2tm)W!(9?G?6Vul`-Kmz_CT}Dnj?4;4i7PY#GZdWrftb>D^UW zVb0fgE)I1%-dZ(qMvfb6u8x$YTS`eJv~4^qrGgJTqhel6IEp2#j`grdKwJZeN{<ON z9&(EXOF}t`m7U3sk55jwx|?@vIW^O-ZO&%@c@^J&F_vTL^yXt%@Cd8Yt6JGZ$QQaS z02+42%Nl{g0i`Xnu?NQqUG%gY>{@cY^#IlFL>|V{akJ7w>zEjnJCKdmXdp1WNE2CT zeelKcBDy<5@gweB!gDEmWc9o~hc_}YwQ`Rg<zoq1armsYV{pS&gCI~B$+g*nm<{G; zBKvsU@Ct8)*U`d{K#G(L`eBvZHOE^6110Y4tryGZHPPw~;Y`_HN|k_=i>)I7+n%*O zRhvCfZna`cAqP`F6fH`5E+kHBTFl)?a#$Qp8vcf9OaO^xpwt-7Qd`qkh*i!zPu4)- z=BypG{o+ML__euo@P!oT<N1OxYp2<;z)%}6{1BvWR_<+uPj;C}&k)%^JD3PY|a) zPjuMvmh-)>=}PN>)TgwnX-bql(ZWOO7*4#LC$|}usM9^TZ8Zix?qlmwb^uZhr{1R) z@oqV;i5m>=c;U%e?m@M{$L_$O1}OF>8Pg+92fAqPc#{F$yFtUo<?d@dWox{Y6Z`D$ zmxzGXLV!RNji$%K{snblz}VKYG}d70gzjGw)G}+n5-W^ih$VY>gC1j7dqOzR6O*(D z;3UTCDv|8sk4vO%@v;&rSGt^+ZbRuL$YR$d3ZKLa=bZXW7;HxidjK(DmUG!Ek~xKG zEORfwmUoHGfJ|nD&xUA__-vJDyPvY@L}WM{q#vmBW{!v1NeY6I6@=;%w?zVDeI$B- zRy75;U@LNC2R@t$#%{lPkvfG~f{-ENw%}XK+1x=)vt+uM#2@sjv|iGh!1?8h+m5ts zwg{a`Y(XSdpbEh86ZT~Sxq-t|6#AaXaz_AP<bd7xp*ObuWBs(}Tw&Y40NJX6{^Jy! z+zyRCkM~00ARwX@O#fIwOYc<6B|^AzP@E4cVB0?r2LsBPW7~fvWo`rPA;20*c7V*+ z2ohzcOc3>PnTG*kbO98>Jy_T}aB_0XCGNp>koqM!3#%7P5<7_VJSK?b6p20x@M2YU zZ^Rhl2-jMI3F*b;#Y@(iAst?44jH^Yc9*^cAvbbHZTFt1S@UBfvLKUWDO_Uio&led zrrc;zP8PlwuIlSQWI|s~w0@JKWIymQ<XZA4(^@}C?Jv8^Tn9esn6qPxUFog6CPOkT zzSeo7=4<33Y?mf0(e9Hdzu~RzU$SQ*%h21|{R_nPFR>bI4bSk}%@{Sy#Vh-|AEjVb zT#@31t)@e*=TlHqB=2`rCys`SiPu_$TJPdV11#?+bqvO$l=77&pvD$cyP94%FGDhE zZi~y=T61<_iB-r4`F7EQ;xu8Ko~g0rt`rQHI`4pB#0KHEY_lsjTKiiqqGh0!HUZJ3 zECCfl#r<VPX0u2|;;4tEt+@M4?!mRh2w}zb(u$jk{t*{s!GRVto5K!gCOgc20x#7x zlNLZ~8j%DrxfHkzD;MJQ?;TciP#Qn1-ayd$#M5C_DYrf*OLktOKAKeSPvp)o!Sfr! z<yI-n$KJu(G`OwuM!OL3u`2Wy*#ZqMcUk1lLBOaJ9y5sJ9<zv9k5f0Lv{-pA4S~KF z_*m1`sMEtdplgmJAU>d9wg*}xi!^Xn=&rpN-Tg7TbU}CD0i%<^!|m`=vlc3n9cwK^ z9x{k2@{m#b8}EN=qW>U4d}o@*DT4I}M!|+k_$at3PXhf*kK)88_>|%}R4qg`)NOto z4X!9D?nQ+76Xti}6qt+CAG>oQofGa#XCEyfk932c32j=$4=7G*&mWM6v#C1M!~TQ3 z&e+zAl+<c@{`OL7ETr`o4|^lAYqf|roRbxZ0i?HRw*wJNjg)OSs(l!iA{v%lbUl>D z-T*xb5d9s5#G}^Y93m-48z|CKq`%^vkrzJDXH^Ve4LSj`U<?;wKqWf|72m;RurU~! zfO69Lf%uM>8PT}NW<$v6V{d=O_wsO>Lxa3zA`74_ozrB?;8n0%y41;DpNGAQ!xLr@ zP#04zF{%ZUnxt$5tOu8k{2sZYkZ=3_?5CjI=)qN>C8O}pFaK4;AZN2Lkerz2U%@*j zrk3@WTV-*ckM)7x_>?&NCC1;!78eUR+`WPsz^2QW+FvzwoKl{LZF`J|oj8LoKr9rH ztE~d@%^bBnG=|4fu3Xs#Ng6*&B-fKTQu9QD0D@(rYL}SFi-3fu6VXv0dz6H#{1nON z(*TY_EZU>g<0#h0z9Oh3O637t3{ncaY_fi)-GT$NemuD23zPtUH?%6anHqOB>WH|1 z3$e|P4i~oAlHzutqcp|`)fW^)+Yx!7@@Cq@P?t*(Q)rIo?wt>R{Q-(0?Z5Qd^J73{ zt4o@45hI<J4~THHw!ZB+Qt~u|7t1YN*~N}S)B#+xgz}h|dcxdbU3r9^UCSf7v1tR# z*k|A~FvynBWVW^!kXnw2mh3x>wy~SbupY8$Jv^{D0cBzH2#R6B=-JZQk0>H!U_+n7 z1v-M&&*m@+rnBFD*dV52lWkW`p$tf_eL?CA>rx>Mb$6CXT~ext{p*(yx3%I+y#mTT z#iFE#D^Ei|s?oB-SZ`#C`!vAi+Ae|M>j?f~d?nCPad)!3bj%@JfF^g7^nveq^*u8& zS^H+%u?=Jv(05KgeNV}w@8VqgF3rYw^}RVR?qts4&J;U$QmovWVd2i@W;hT1GG!hd z#Vzcc&0X`pBDml#_RXg-7p}%qwi909-(E`GHyfc?N<O~R%|c5orGTd<Wc}gBlQ<de zwLmDTOK!ECdua+b0|>*Kh8_j51LZU!GI;!$3*H8J<x&E`&I_2oV|oHXDqC0V0m<qB z0<Or8QlooYFq^(m_F7;-8sD?jRA*aln$ot9QbJ}A`+`ac?0e)=u1B95GW){Cl*Zn< zAR#4m(W<rJ9iOLi)s(#iAGR9h)_giKp4($wB<%PsG_)2F`GD{%hg#z)in)<CV&xn8 z(`l`rrYdGg9yHMcQ~@V08N*j1`?dAw>X2c_o9m#6kFrhJNa{*S5Ql}pN-+dlG1bLR z5|WMVYP^5W-kRz4Lz_|ewu_WE3)@@IrO2)J<*XjGseYMNs6*G(47n{I%WMyZC3(!p zjx5KsYbVGpb`M)Y<j-xbuh8K^ps^3O<YOc$OM>Y{4&HNc2h&P<g}L?Xt=cY4D{Mta zc|liUonzVSdG8;Uw-PLXRX&$pJ3f(zH`4SEz;TS)I|u3)$6H?FJi&B@SN4Th#=|Rn zRTC+`<OTU6sV3z3En<-zuMsbi<J-iEY&Xb9g3iu@7bo9ajDql@0SiIRbexha2Muca zi|L>u@EUb(l;g-EKe$s{!wE!3?%MSNmZ;Ep#MY39FeE#2+-v*gZv;%nE}7-q8v5at z*%<Mr3URT_a>dJKmLXmYNrJ6$FLIQ)<F-Z|e1(`hj-VjFZtsr59rI!LOK|$5E?eUt zdAy5Duww3B0_%Q|CmL;yd*A^JEB}26e24tHaZC*T@2*3vTpJf8ak$)k^$wY>5O0=Q zmgug)IG|BEGE22JPC|(TQK1DZ#69M3>JmDwNzVF>gW4-ZHu|VS^-3N)BYovyGG<yL z&)E9c32#)}0e*-)3F-Y2)i~zlu=dW@c$&^mwY`uYKa8?M`Rnv~DOn)KLndGFG*&8` zcRiX+1|GJ?CIHE8sUtv`!BR=0spMHwf?derW2bxg7LAnw-CYKn0ZvO@1T9!EPkwoH zvYnERf;v5v7Bs&_x&M7Qd9vJ=Mac$X6uvm18CxKZW48bs5=deu#K;TQVL(Vk;3my4 zIG458QOmQj=Mtloueh_dl(KPH#W;IbSRsrHZ#ReMg|}P6+w;N|c7A{l*@Js}(d(rs z(s7Dwc0ioIAx0E30OD#oot&!JBClaIIQ58_pDxK2SCcG<7}Sc#Y1pMjk@9clxP9BW zNIBMKKQVC_!-o5lxwJV?``FbaPzvx;6!X*lE-({TkB{BQqs!G_kr2I0y)`tCz0Ntc zOir!L{{=D3vf#sDQ<7QrwfbAE)y5VcCyhr1Y*RPN=izYP)9Pf@F+vdZtn;5n4dHcG z=Xr5L{ZBaMz+ox8&Jvy$UY94|XisiX4Ace83fqRI7*%S!Ff17(oGHl4z6y+Og39Q- z)+hlP#F2iIgrs@dIWUv~`B75j3ZbPDt{j9R277!q7B1g=^z9_lSj5lSR((qeH+CWz zj-00N2Ts!W?~c>=vmNyP@=<(O-}^wEJCPpl{H~oljfc7OXbX4#_!69le%aUyM{%1Y zmF94SG_5gSAMJSIMn-AZ4Td9K<N>BcsJTj|9Wn5Pxz<wO<U){=>N{J6?}8w=w6_8I z?GT<?6P-3wr-69u9B-^<Pybz)YJqJ!I1xLZFJxq|(o#GO(F8YZN$h?Jot0KhL#ySG z{s!1=s?s{YE0TB$9MMrni--PL3;`g%zQ<r@U{+;*gElsC{37|><uzv_6`#xJ#%{|B zWv;@6ncuh{X;smbGUN+o1)S|TbhkpjR~2PhU`f!G)B|x7c45Rt*?CJMYEX}|CiXJ+ zF|y+jx;zYx47>rDK)nPvT!h^!=(*Y@Y=An1kf^M{9^O=7kKj|-2@?U1Cs)EE>{U;A zBZGJegfqbw!P*LPz76{*UsS2=-4MpH2t&D!M1=ocwLE%M|4T>*a=Fk>*<x`NlZMo< zq_(*=&~Q#GBX`^7_z=V&%gm;~I;`{9Ote^8W`$lu59d<Y4JC)UTBp94@W@IQ_6{nm zv3;>{WsiJ*NL&}WPKcOSD@%80N6L0X-P%isjyOd7*~+_&szRlP#+L1_T}u=<M5L%y zdb6%p6T|`qo89OpJo=H|1Rrn0HS7TjyZiLsMM(gNKlKQPwZ7!mEw^_{v*gl;!9@sS zevn-DcRFt#CV8MuSqVn!CM}2J<-?E%SP{eSM|-eqm#ngi<G9+`ue`0avwzm3A(JCo z_?=eSzSAn;8!2Rz3JW@T8FG>Vkyhfh+8S<zCsFL{Y!Q;WCn4r2neD77uw$yTm8Au_ zD{t~FUmo0CGK>Q{X*djXD$9oO4C*96i<DIsK+kqeQZ`%Vp&`+UGUmwN42fpvS6i=$ z%-I@Q^B21^1}&EFHVe|xpn-a^*up*rd-Xj0JkW=L`t*ihf_0#=$;DtsQLK4jkr@O_ zI6!cs1NA{OW^uH$i_yE4N-$U3Y}Gc~NoKtOUBF_j;xOn&*mwZ@fdCuGrN}f(yE9L_ zGgrHFCd)|xLi4rK=l3d~k!^?LEk{3$43Dkmhvd(cGfFFn<wCTIryfDNrhNhpJO(Kw z+!UN}4Otu=gwzc!B{Q^51(utZ!wxE&J*!iNszVlN)?k6w|B45cK%W1#*-VGFDG~G6 z0({|ld^6CB<XEnjqwfNuwOk%5*zq>BI_uU(<pK_f7N}<w&8tj#1hm9=kTUy~1rX7V zmmw$?GyU4zel{2M35In6SWDFp<)?tBJ<TE4(dl1ICrHWU(cT_O|AdB^j0&)Pv^lR* zv09GKIYK({bT4+)Zy>dIXiLac;#A2LQb|F8_Z*1~rZNG0i+<!h{~>LNIHX4A@CHLE zVpd}6?V((Dgd{VNbDx)NYy%2Qs+UwxD1)uS^w17nGF2+%V*$G(eH^5Tezp+{JHUQC zoGDz@rH%<NP}BVEdP+))1VHStR=U`402xK8voO60RvjJlMf9FQ<|&Q$uuCWrw9yh8 z%Ra$^2|(_iWX{3sbWo>LP!L;eMyamt7`h2u^wpt41LUG3VX|KLwE~1_RB7--C!LNS z!tCsOcsxjMa#Z&{g3!Ll=<7-PdKzCNEInk!4D(syF@p@8xvk%7lAt((WTmF(wj)+k zrDd(NbxR5*8_AqNE2c8^4TWqAda11eC<8ge13Lh&Jsh*^1~Es8hKzy2R&hE$Fy|HF zmlm@DFagAyoWvHF4QWL83M{IF)Wp5?rLNSrtx?`)RWwAA%@!q9U9Lb)XA`diXDeP@ z0sd_-Y-<wyTN%9S^9QL-d+AG^aF9B>m%h}DgVfc%^aVo#TD#aBX(z;C+R-A{c0!bT z0<k6TPAM&9Vn4!aE^&9!8GbQVPr%Uv##=sbfhg|$7>MG|n<1OMaecV*czDTr#7hg5 z8#jb2J7P;V+>2r;X10=f0K<s`yps76JSHA2sXSdfNvS-I0ag5K!ewJEn)|f+K6?ha z!l}ur%t@3nP20A?tF7z|sFN_53QQ|-@P}OjL_A<10#TlJVY6Tuk!(|{;_e79*#KpP z1!FUU1P3q*BeTBmr3-^CBSEd00>dl=v>vdp0k(3>#i}T`DM{d2RLgl1!^xHRKCQR_ z>s`xv8Zq3AcCuF7K3o!vZIS@b5J217=w6}^bQqrCficK1BuqOpDMi~$<xzSTrtJVS z$(g){?yW#~EED?~pxy_t5({R7`_~PGu>51?Yi45P<!TGetCoa!W`}ofZrk0C!Cl<8 z`vzda;#hgmgQ7#3Yz+-4Us+p&TFy(TBRLu1nPI8LO=<(MrNTBCF5g5;b}t!0z^`rH z#J(hHUor<xAG;sTiK1Gx1H~7hp)fqMvdP>-bXYgv(2A*t5c(aa-LQiX*Ro(XmIlcE zdIwfWFO=*3;x!mFJ{HACM~x5WA{S=MEKW!Ynblz$n`LGVn&EUG`^)=?b@ZdA7Q~a? zGmyZ?cA+9(k0oShcM=SxU>N7oF#Zd)rD!wk5gX#@hf-dEO0W*9Ibiv0J+w*>&Cx^G z>!JC2XuckD>7gt1P?;Xe*FzP0Xq+BeQ%ciBl^7@j!}TWF6wquaJA<S%MHkx91&q3_ zUZWWkL3NLx22gU${rW6DKp7E7OI+Ex33)H^vyY$)2slf*%}RE?);R|GtuUsQP{`WR z6E_vPd~64m%Z`7oX@t&v`pCQ!G#q{_3+R5$KN{J{#vz@}`IgKV*Fi^CT!!WbJ-|l4 zb|P3t?!Ln`aVLWFcz~~m6Tu=L;8S`+E+q(<SkbcBN6P@TzLWrAG$EG~kibPn1$${_ z?W2^vv>IY5l=0pLpf(&kcwT)$?n|s3TSF`QrY}Q}c7hI(Pa}eTa}vl<OxcxU+Ap*d zI!vOmnqHpXSbb^k(;Lg{U{3<cF_pI}r9@1Fl`fRPOVi%fI`$=3by*Uh_Y@7|bJ2zP z3~-b)nQ(bFt;7p(85<o8Mc&2AflzFnPRL->t=ppclZIrv85tR-4Z^Cd7sj#o)DspA z6`qeQG0SmtjpSu7U^T<&eu+8YJh`RffPiBNJkUy;qRwcI4QlB@HW#hrc6bvai|vSA zz+>|hvEq+gHKQjo=RjhEC960PMx~Sw-@9aQZT4yJ?jy4}?Du564~~pfkG_yOl+W({ zF_lh4P~V^_KL>_(ASILwu_Cx868?ebSx*Zx^(?l3*Sp}R5-Kk;V;e1#Pcj_S0T^Y| z0I3fVTE+HbdP)B|a57LqG~aii?n{^x(wF}S%?ZKg5mXaF(bxY3h06@u{+U>fdRM}~ zAV16!Wo>57C%hnH<|-`-dA@-}C}_l@`KH$Td0dSDB?P3pAipBlcK;#e5UhMg{*r8q zQZe5IGpa?|UY~9MovDtu{E;#X*+@)=&6iSPb)Kt92iI?U4`zlL*UBw3p+kk0GG~NG zN;|3>)`f<Gbr@FtEV&m5B6#E;x|xwUK*n)^96l}LBne_dKtV39zYN2vACy*LZZGHR z2tCEaQ!GH@YZO;OE1NdZA$J-Q)M@mYlOH)DRfTxp@m4)4IdEkFJs-u&L2onakebLq zj4>rxoYq%6{|0iI;gaJMYQEq@YJRl~QHf2xzK6))D7gvgJ#i!Ec{CBG%%=k3m(4&S z=XqPhCIEun$%B#!MiyX#()5Ti6ai`rvLoOKjD#;R2K7TU6t;%B01D=v#vo?nMDxl? zP!B(Q2t+m^;yXa&Wd_i}jBE(pz1921O&}zh4I1&{d2DScd0MdN6s}G9*oI_2(V7%J z{0JHiAM;Mf@S;`ow_fIB<p@N~?)!;QwHLk_G95b?>_P#B?|D6J4;sm3bkfVg(}+As z&4T{k#N#1#lpfWdr7k1x%lV0BO1}!)^Kl7o4>I`KCa7xBdUdUr{<`nNP~oOa&V00( zNQsDJkR~p2v@~0nG~JtGL0Q!$c}ql#tF#aOtYI+LrwTlgMoRNERh?(|U4t=9Mqsrc zrLrKrSxeHJua1%Q21CD>WI7mnF$aPBDL{jh7<OqwkVc%%K$-f{W-@%x1mOlB$zw?! z(t%?6y^{x8_tAk!Y1LOhFa^Sd$qC~ur`x*4#%iF8OP+YDqPC|ht+4b2-7%Dd2_EX) zHr^MqK)ixdw46;S7frcJE+wlyLCo{9Pq8IZ^WNg-A!2H>C>6#ac2t%cGvFvjdWFj$ z!3>DgAqf{J$_&>XDnwWYMi0=P!svl<{M!uL8$B?V{Gd2~rI#PX>1tpetkODj>7)xY zMWr>o(;VJu3GcMFeq<lh8rf-o;)y4oCnf+B2H?UsU!fCxbGD($?z&MLi0qz1s0R@o z;w*?;CJiKsQGHJy7Tl<%;g2*junZn;t69rAjxKYnINBa*;2kYZ4g8j%%NeWbYi|4k zplyvrfAbq!#G=UWzC?uWxpv!gCTIJ$35BgLPvrhgN)P4V#HOGNXkznX1FS<ETBH-0 zuB~);5}8n+2XzxDtZ~55_h4gkobn<bv0x+o!^wVR^0NQnxLlS1z!o_I5QONPO7^m! zbD#nmw|m`GO@FIF%&J?OI>p|6ZweKwj_q$Xia8XOPf;kS>E2WtFg2~|A?~5RzM|fw z4`Zyc3&s2g8tgbSi~E%aC??X7MVU+;k(=}7^OLq^)Gf`LVvj7(S2N{rCT+7)Fh8=q zv&pWS+CV~_f30atN-q1~<hXAQK1|!&k07aM8-ZC$d@r{qUb0!7BJbKHh!d4<K_I6E zo963p0rQkLwh+Kk@P~gQ#VY3yw*{deb{2D!<GI)pF2YJ1W)+8YRuSg=lz%S)g_i^s z4vlO89nI*Is7Jj|k-AWU2ojVyv_k{s#mtb=;As>So?^y&fM7|Q8cQKBh5^gvG;n8L z)u8B3nE0ym<)Lq-aic*_0z^F}4-HD=NDk&Qk0h#xDQ_ACee(Lv-zsgx_Q5^*qmY$s z35k@m4VVv5^8PScMo3vol)Zq7go<k~8iS6(4B|i?I%{kA6*bP?CASon_QG59X}!k2 zB^oXQvR<PbMYe_@{I>luoUJ<fqPI2p5EF4T5BP3c6to_$MigJ6k;Qqii*VtP<9O>@ z;CJ|Lb9jrjWF@ohrZrPn`vr=88@`D2Wph>ov}Zs7!S-A^R?3m?$KfAU%(-mvW0hSf z=C&h6VW~6nUwdehpz<$lE;nG2&9XhW!1i1V3?JN`&2>AFXeo0}L1~61&iK~P=#p!j zu@1tN4osW|)p(l)$9XeKsOeT>Xj^PC8D<IWr*u?I4KKEWsf&agP|(fQ%9YjOBG`(- zJob9&jB=LqnBC7{QPwRrO)<mzX=^_fzf;2{h$0MnWbxGJhfrFb#p1e7I;Si)b3;;; z8unUnD;6?ioo4Bz>TV)c>i<<zOn72zT!Dfi=NR4uK7XArgzR7oErh91v&fa58e0Y9 z6q1XaW~2~T)&()=>`p9Ry4|zbY-{k-RODmPvu_HpVUYQ(t)U!|&o`lMST)M|vyaM` z@QaS@8DtEAUF^gpHqrzs(rJy(xQSIVRw)mb>g&YA>i-Jh@Y<kPDW#R6z1v^D8G6Xi zkgY$3R;R(%_&x$yA%&!Y=1Xv~d0#gA2NCMNT<%);<-sMHNtYZ?skK)tOBdU5Y0hmy zv`YnRhLlnw+O?U@OLOy@nl`6VLbskkE2<U*E0-utQ`2?}rG29DRXg15@q9H8H@qD* zB@muME8MHSn>-X^hy=9=jeQhm7J;Zb12P0Dzy7@5T`&7J<zj0-O$w1zv%IkrTze-w zfw-t#dJ@bz?)rwlWj@SZAeKTAXcaoA@3M3Xp^+kcLY(xJUp$ROFoBS4B0!ALaDYTS z!-1BOKxCaK=C+Vfx&H3Bh%pw^6;lFGz8f~L89=jSIBM4_EfR`84)_!*5*2Yqmov-M z46^i<kw8tOIsX5wSzui0#y)QtDeo=H?w8OmvT=`!KP#O_#%4ifH;ot~*1|shW@R@G z8A1~gf_U`IQ7pRWC>K+~q2b)LoBBKg&@lpV3kT4iqw?5+k@!EtOBq}AjAba<rR+Y8 z(B-t0J?vfpD7%ls|4JRc3S%C27mw;lgl^-7wIIaS!wYjPWjBp~>2{i}$tAy~Hc@kb z0_n2$93`=<0f<|eJBR0Xmp->+l{B<|3>pD($2bBiSvLr)*d^wX=<}cp0XfE}I_6_N z6ue-L>7t7h2M}Pz9G_C;91v&v!}C~(mO34aeC!K&Az}_dUNXex9cezcg-}?Dt>s5j zZY|bHbm#Y7*nqpRn(=-F-+;?EgLB)74LFazLD8ExC3ayqH3Ylx^T<K6teK=3=BLtP zlC8*Xn|v&Q2_tedY$3<j;6XrN%qX|ir6UA4Tb6sW$yC|aKnAT|GG~lb_OPv}Mv;1y zW!bRMeO<~yM<W%lRLkckZ8KLx^$bG{^#*R|95QfA=c#nk{erf5&^Q=92gKRReKeo8 z=gbB*KeBT0CfGf%v_OBvR-rvm=jec<nM|MORmI8ZzfZ=^9Q~w@qn6y0@AW%Qg&<|e z;|^Wns?M`dlvHIy-EcV+8wr_n2ju=FxZZ=hVB=2)Do}w>!WE7jmO29VxDKY?#;xEn zHqQ7qyfx9KNqG@BX<=jTo@-GA#Nc6xd9V#?EAe&%l!DnfdK5dOJEm)uUE9GPbT;Lx zW0A+>Wn<8h0Jn@!fl-2L!=7K&)wE`TX8T<W?kaQBTE6t?z5&{^6^%$Vdy?Lsswfnl z)M$$z_b*^*D%ndUt27RL#2?TaAW|B?Qw2N9lHttU_L$8=7_HY8$j<XyFH`0%_WFWf z{}TB0GVOAQb4g#hgb7sDyy;Sa^+UT6o~L@0Tz>-aejTt4&rCqLJ0wkl1+E5T%%gy$ zJTurK%9E?Y;_<G@xCTu?cJ@a^>W2KxLy+y-$l5@Iz+{iUlyOe4BteGs&JK4hhpk5m zx;d#CKBbLo+qm(23SF=HP&rYE3sn|uUc@BeN~&nOQNo!U;=bbmVEL4PI=_3OzLbzx zz*uDG7|4w54AzI3BYmJKLbun|a=Jj46D=YWVC&X#IIWQZyO{0*JA%Vn%^65SmPeo| zENquQvYA>Z9~sjc=)R_05QbyZlCcGd<#jJ74D_*dXu`s{7X+K-$L_&^-VJR=Odv54 z-FhH|{W12i$6)x$*7Kz4NK*d}E*ECa6T)lFkKra7ElFh=44#L=SulA<VPqrSZiF8Y z5VAdPMIq8`_Ao?<Se3Z#W@YztzsuPxWumWa;f?4&4vZi^>N0q7`GlA;zpbH;O2-Ak z1$g2GxBr&q7RIl`)k5d_om2c6DJ%Q5k{_O+c2O(k_I%WecB^+mA1$BL#L4oxAz040 z%AM1cW3a1l?MyY9xoN$ca-bbI9-w~D7qde~uNTiSxCP;oLs(CwL70JBn=36%7IgxI z^M1MwLDg*^uCAnZ5ZXz7oK&(_pP`|x>sF849{5LuA^XGO6}JBaj+orcYlJZ5fiUE< z&yki`h<g%X_oIdc4FCba`me|G(iUz_hq4NuU21B?ybyF_m|y_KJ)>!a4>aOhq7fOK zMt~1ak_oas(aFRQ(2&@0BBvp^a0wbx9WL8B^bd&0l_PCG{yy*G|3F0kgkFs%B7MlA zcf4Cc66-?v2Z)IB2N0284WV?Mh+M~Bq^dv=s@Q+Q2O_e96A=y=1R&Yi=T?W8?i?CT zMj8{je#GvkT)FC0kdZ0$dIPRa5+T`u=ma52BiPY|ga`!)Vh&r#f%wgyk56k__cv%c zze8Ya_!8d7T(-tf$u;6u3P)3ZVMA`Gs|5NAfg<LM8-V<33Q#Ycs%_2Hw!(aR1DS(? zWdP$xhbX#@Wow{u0hZbt?turk+4o4xaYLOT33rQd?@jj)i3JeoU5o(CEq#&s+_035 zMHnk0K&v4wur9F_nqB6Cd*<IGrI8J2KDf-C{H_A_Ra}L*@Ct(+eaq68nQhM*7KO9a zY?2(yop#uBrepn8o5^N8X*DYM<S8rp?hzVV6?XT8S9W39Pi7Zml;nJft>Gq!4hT*y z4%eVB&a*o#&f>Zmi-ekKY~U143ws}q4#?`@CGxZk&`KM+=BN8Bdhe7pTwZBjT4aVy z17`Fu=$RiL&a4LONv^VM+cMmqalUP9NJSwKcGw!fg@~!7$|@E&mlYKlTRP%R?jhU3 zmWq%$AWo{l@%hj|2N6E`<Du|MI}X|5s$TWydaN{wdGwm*@|rjzy!ST<Qsyd$j=`A6 z4jWj!h6K`XXPYLX*(YH^(2M<aDl_vOBoSj)?uEdMMzH%1G)TW7PK?X1T*ze$mu1bQ zEq_JUY{h1JvwxAfLWzn$Z@w<2?py}0wbF(3O}JlNS34v$jZtio#Sku&>``bd%Sy=* zJ&LV)Y1Rw^c5~o`O%}!G(sK|f*aZTeks;0CpqCOTE+eAc>?A0_Ah#p1OEW@3q>?R1 zw>(OkHYZifVc4_?N4En+sbnyVZMq#^C+<A|Vgtpc87liCWvS+Vq0ZJoN_Mo>Xlo!{ zCicyYI%kHIQfD!%rn>y|N>wji0g8sJz~%HgPuk>Ts2F0zX2bl8Yz<E#Fdt&WCez|Y z7~^gdV*tLw*}f$=vdBQ!ljzPDlG;oes)X@ZZ`a&*v>8GRy5pu@*lH<5*S2CW!sswT zT&Se=qp1~QHcYBA#OK>gnMzu7rPj1GHAS7_tm>6gdBVe(Cr!V1*9{3BW{5|d0lydx zqC4C7lmqS593@TfyNfz$R8yJ_Xgn@Ix_dDU26WQaNaqO}!FISeG>>UGvORTi_ihBB z;DT&KwLwX>Ydk8i$-2Sz+!$Bg^DSVj1(7w6w>|fo?O>RKxNeup9>+ebU(r>6jz?r9 zv+1PjQf&QYSE5TZ7B{W9G6mOhcceFuS8PoyvSuun<0dH?x^!{jNp;-7$p>NRh0V{x zY<kaNv{G2rdyLVB2;<jY2$@G-oS}=BEz+Fin*+<xfPh@tLl#|NS4em3euemZcMpiG ztx>`BV|==0-Bl*Sd@zbj873V4`@%~n6sc{%i7|L{=zl~CZkmNLrW?&bi}x^A^0`cL zY;|}H-MDWtV&=P_MJ%!JtwXR+nMyCc$R!z2U9^~y8p_}|5ebPJD7V{=H!(Pt80n#~ z3vhcBmaOJjvNDM!Gpk{6ogw}iwvN?d6Jbi6Foitl;F+PMwUwn_nxS4sn3JXhH*(Y& zq5=NXe2zMLe7ar;+Mh(AiwJ=xVNHu!=KfSdpe&=BUabhI3t*TOkhJb!W2e)HKa{c- zccCV-eJ6$~=M(UTi@HO!ZN_i6HQr2~jXgs58rmOQIQs&#WZ^69t<M6M+vW^`T-P8# z<k~-67E!3@FjS4Fwp*N2IIX^j-H-q)8U|x;zk^o4r9?rdlO*~q$Tg_6l4d)I`+m@W ziERha=v_{?eR7KnyQw)*OgHlCtbn@T%f9r{21nX{EXpESkR>ivLqG#vJME6RjWv-y z!!r>Z%U*&Pt)@#(Nm&okSu(rILlseU&&#fcO~8oZ6|gsl-8oz@%cdgQHL&3>`^f1a z8=F3~l<F;Xy$#RWxB#V}54w)i`9Q7U6mtR&N;P9~U<KW`F!L0eUaK0z!lC1bg-s_^ z!@WPEt}g<1OBU-#{^UvF&!GvcXdhisU?rPE+>re#R7<cWgqC1J{Wo>UU6Uo3ZpS`} zdmr6btOC!hoRhyX*IYU9p8SzXv=$y~N|R#-x!WN1EA6eF7E>!Zb~vxeADddcjbiHA zCs1&P4)+<sp#o^n%L>f;5nS$Bif2Nef!KEkNPEZ?%3teapMwVo1h86LVf+P5uz`9< z_K==@AHPM)H*e>mEpz3T6uIKORvmL`LPog41kW@fqs?_O0*<DNA1lQKFSocNp7Zr> zT+x1_<_)jEx}@?GTSFG73(VPSYcP>Fm#@}AQ}iG~(({NP>@X@HlyLm3z3r7pP!_e2 zRr2;h@UdJ@A>7Q5H1Qm1So>Ed+9a<x33cnI)yc4*(c6db{0ubi$JlAnB-5K{l=E24 zTToW(RqXgrHC5r2Ikd)8J8Q_~mN+qS4ak}3hsd@$os=n+xZNt@aF^4|cSEsM834{J zF)RWgzG;qr!-q2CEfn`)v(Qv-=B9*^aw~G_)`YeOl4xn=7TFy)FVS$1`23Fh9H*4C zR0JP3vZd9)QKIhDx@G1%_0G0(b3M3<lu-XFH6yZ^EA{1;uTkcFv_y%^c;)L_h=mqF zWS?l8gP&Z&3$PPlH(O?Qsp+&0nBld2uTA1?v`5V#uvjWln#J;YlkMIhd>Wr~EvE++ zB}fS{L+$5BPGXdO?C?!GiwY(v3v!rYzE7>h{ZDMmSQuz&U$Uk2B$EpPl0;!DtsXW6 zeQ1D6oy_^Z#8oMsoZ$6Ob6x(oi93$=VE$JiV!g;POvL_(01>fY!yT@>n@&|15V73} zu-k^gSQfnhXCmfhOL+%>h(z@hrA?Bku_Gl<(kuHR5_K1nbP{zx6+2o&ii380-A5+J zsk(DU$@Ms$mc<gB8(?2s2pglFv;DZ)G3z`A%fk-yZJVy9+Q7==cCt<G{1(R!HH{x% zU)R<lBXR9`8%T-5D0q0nK;&>LDaQB&)E~p(33H5NI_w%T0n>0u7hI?|+s9hKQ9~HI z0&p-OncPD21-cc=4!UR#Hg#Zcp?AxmtAMrIB+yE-kh|c5i;PP6B@w#dGEZEq;AoBu zDn4{$*Ac)6phOC<9MtcTn4g9<M}Mn~2NzaaBs>>#J++LM!c-`+)JY|^7Acz)m^M7T zlSq<G>9`YFJIIK;E>{~IkR5jN;`rs5CSD1Q;7Qf0v2g$bX~(m&UAUa?KFvOd#skkY zG5KxOM4o9>ZjwOSNkWEY2Pt58D&~Uk3Ky}<kSTG5e*-E6zQ=Wcf}%<Og2BCrSw<k9 z7>0-f!v6}&-{t&pYYyu|LZXykz}x*xB;#ODipDL^0lP^@Xvem7JuL7GPv8xQtG@Hx zPdlFbfn5w}1-<q%>H(uR;|=r((Ghy=r(Jy2wjVOARy@OTwbw_tnt}#9$-RH>rA-Q= zP$`eCLIddi6TLknn#h=n)87!Kkcd=&yl6y5ns~j))=2z$Vr;TTdi7s#B*%tWFNki? zq7M&J|8YxuYc~5k(h~!)8(!?L>p)HZUwPSRHeaQ~FB?IuQ?2k<allT0UFfzPi*xfR z2!5Ay%QCtN?>Gf1my$}_3Xf7c2lR^G&GC4lV4@8wDlQ5cJ?tegBqZ=2L_`s;op^9s zm`gBNP?zD!4cMp5!a$Qj+4dZM`5jO7JeM#E<8-tHs^5MxJTpB5)KgVhLny^`{T}vJ zgiHYH{v8IeT8@{Bh9y2$LMR#$%aadie&P+8m-yLLXbFy=P=nFMe%dUk_>Fp5YO#+Q zkdJQL1tQxG;&i-<%rq<-WFU3ZBA?&G-u;XdGoZwEa?~7|vy<NDkY3ezbJ(qaLFx=@ znE>r#EQQ_?z@osW5WqDB7=%;3tdv8MmtQf5GFrb-v>al6m|#KaK8T^j3zkQmXNcEY z8?7$M5`kv80BDio3Afi<*cMr3^n2L^v{8u%AaTt^-Z<rrfuD`SWD5r)LSd`=$uMby zSmf%Z=oAqR&tA@g3>l>K{yIWI15S;2MIo=>Rfh9EE=*Yg8ZyB8)<k*masipS43M-2 z+tmtGg^@@c9r3_{(LrWONd5jE6==gIg-3QoE9QaJBsBEg^bh3H9t;w>EG4}Ly{JWe z_cyQUTh@ngN46|#<ON&S+X=7(!(ePRy#Jdm>p|+qXs#LtBJW@ska9+~vFhOxe`t$3 zfmD5qIzz>bY#m(oB~FFl=pu-*nCHLnAaS*I06P+}Ae<V8h^yS3lo4T>DMI}`e&^^i zdAr>t72v6=()^U)OcHZSFD56ebUHa{rGo20dO#ozgcBxhu)GaEw8;g-iYClLQrqfd zQE(SzyKHN?3Ye!XTLaptwEFQ%B+h%*$n$$JfUsZrvc8r3TDO?>2PA>uX^O{!cT$Ly ze+?-4n-pBETMyIl7FTx{kARumNq9i0yvo}3;brTU3bTC9Vr!)1P^ciF>|7&nD6t4~ ztF`_dmA~V2+}GT>q4Yi&@k!~i@(r}xsXI%Nribe(d`BtBqaLRDRo9mOCK+y|@UqfK zGJFq(7nEWr(BVlqpk7h>>twi@!c$A1H73%LQBbo=pP?`wA=!Y2z!GQ!thn9^gm-QL zCx8wvr@-3vtAUbSy+L<?v=Xk{#(+;Gy+<*<a{yYCa&TIwOK47=pT7-f5%CHjbiw@T zQaTLGG(2pqoP#Nbhs|CNl)#7@%FETa0h(6coQjjn*WAK9sWeYvFGIHgN=6SS=#(s% zC>d=P^L0A539A=$EY-lXf5wT}?Pg3s95ktvNHFSuT`$3_I(*ED40S}mRuTO&c-cZg z1D0OIT<8O(wNK;p0IPNhSbNi1G7M?gcoW#8e<JEdtr6rO{Sft{3fV3WMcxak*KgXA zFEcKOl*;R9@rBcrukbWcjARtJIAVd?9*f&;F^9)vH>g`MT_Sn9d+;yQ@vv?=DKh43 zX*iKtnp2yM;CZagQ*F;hJ>~5E8};EFucgs*o6z@HTANcfx3vjxj<3WWV|&9X+<Bn~ zfgZ0rpD(7=ollqk10|W;u7&0dOewl)Yv@KZXy!!o{B$X`Ts^x?{X0+SKmu*fXdrI- zQv>aq!EtIa-Q+G`RJUM_c#c|oDG64m@mIEBRtFi|bjghO)iUY68M=W^pby8ov4@Yl zj#qA6b1gb_L^?x#0Ro6Cc^fTg!VUA&#U*$WuoQAu4}dRW@nw1^Gn-S<J-u@pSc+s% z)lBwI0>o6Vr6FmhwY=Mh9Shmh+Kd8?7-g_`XCV?jS-TpuZUWn{8Ad(q<T&N?mrNlu z4dAx(1G-m;`8hZfmM1=M)0EkGN&wHn9&-tM3FX7Z0`LJlcN+ROFIO_*DG+7lAW4S2 z-big9%K*V7tE|J;Qq;O|%k?B^I;U;9R*AQMYK)j|5dY5guo~2XiyF|@&PrrA4f%ak zz>#m01yt<o0#=2K>CF&G7)*RQju?7bq5sx*#*>bI=Lh<`Jk=$R#Lifh*cpqYUc6;) zMhAR8Ut!dDEbj7neLhIlbgraLoabSG$Bc89q)oic2Ps7yBE3F95mQ1>1`V>`K0zCS zG>8}0`Hl+gq{rvUpI}Y+jtbj1Zht0kjB`Zu`d?Z0iR;A|L1pl8j+ohqm;44g_R*Ui z>P83?P*F}c+NR*$9{h?zjvanSuYNCuTq_UrNB43p1n2H^xO*MF&H*Xy;EgC*wKtxX ztV&zv*d@?yblS)ChWRDjffM0ks)ehy<rzCjGLqPoaG{YzF4zAN1I^ilAC;sOVVbv2 z!+_?ZlE`jm!mgl4(y-$(RM-K%rXMJE&H;t+Q!178GD&)9%E5B{FUyL4r!1Uzsieu^ zDu|e&pxIFKivP>fTEABs#7w-vZM;CeX(-YDzb)})y+m*#O<#c%`O6_}vfys%{{5}E z=uMX%L#B1%+D>OFy>?Q&LK)yksFJ*=VbPd)Q~-yrf5m+Q&RXFmuP#~3-Z3+IaDxUI zbN>kNCwcZ_a~=mU58>T_CF<Vf8H^-&B&n-GE9k^!0cjmRam|62sNZd`!;drI72u|- zp>S0^=00jg9y|#wriZJJ<6^X3&4{^=6X9*D(jxBD9yVVm9=660piny?)Paw3+of>> zcx~U623k<!J}sK-Dz!S;F3X*!!szqJ-0}jx>cqtr;>-#$pH;V#LQ2lk*mdyfV!zK@ zfDCQj9{@01*{4oM+d%25SOLZM=$ETqF#TJJU-!rk+3A|&)%aY7&lG&@GIL3fmaOdK z^s`5u3Oz5dQBZT>F_Om$5A_6PRN?BY{RI>Zef9GK_BKB^M|QUEa)>iQr3$d-CYF-9 zuehRAKrzzgF*oC-tbhj@1oZ}Zcm>y%3ebRwb4mq_2(lg$XR!wKM(^QI;M03JhW8LQ z6i0AMrr#%lDKI?LB(w%^lGMT17=yY!vlFCi;MgUVxlmmIrk7w~Mli0vvKKycePtiL zvjMe$`EfE&7ftE4y^rOK>3X_F9rKu7Ow$8)_R1n`ORwXq8qPx(v(H}P{BU?Gdjr0h zkc$G@A$alp=z5S|VkJxSu$pO^_Qv?RLz7s<eH65<2jq=b2+n1lC~Y%{^a12=xuhMs zkwQ$FL7Y*)u@A-U;@Oc4dB30WYGv;CS$kdlG?dhQtU#LrY$k6Rj)C2fub@SpNvWGu z_J1scUK`J%Qg}9g%IKH=$lf_f{mw+Y)rWPFU8OcGva8fA1U;8|A@osF6<jWvHu9bz zX?z3FUez5jG%YpaeYNkTBRNuLEU3$`YpVRVZsWHVO_iFotG^X`y2pS$7$Bp#8O9PQ zpUkm2@xvAANo)n$R6kEfn?w&1{~4C4-5Rz;dE-f(n<DO$6#F?iNFqrs(nZj4oV-XH zuf!gNW@#>)TY)LY<7rXx5Pvs&E^AjgEMlJScWbDBChC8Mmgw_>3usnrhcL^1`jVKG zl3oeh@pQKxNO(!i*(#Qm5^{;Na8iIX9U-G0f<qxQL=;HFmHw%1?jeJk3tr&>J@>=0 zAOmqm)r@Z;v3P9_PXd1(8rtE449nK|I|Q*Ial&v(D@qhx`Yk!~)`@pQSlxIRhhg}I zq1|`Di+S1#YXbAuLKwQ*8doFAcF;ZK&?;;VQxT}JH7<ceW*kE}LKwTpA%ufBo>yfL zPN%`Kk2xeB#j|mUXhH<HQ-P+e;XaIV$P{sR(KsrdiBYpPY^BkU8^sVrxwgh9(K0-V z{^g6W2+}I*HMc&%&kRum-#{WjZM%O8#784-zwpD8sC{y>c3Z>sh}Bz=sz+XO2rX!& zvqX0qsspW_L63}<4&h6>2<^F#%<oyjO9>K!2Qs_Yp<|`gNole_WCY9y-NS7@1RjVF z{{r66!+MJPuNW3;dC0UHdeRtvBc_!1wr3W{8tl=qLPAzk-1*_I_dzOv*Yv2BVvX=~ zJyrnWwV|Nda#iI}-AB8Ma7X-yA%F_ac0AkM@=;pd$Gb9KtE>H1XtGEbb80@Ba?yGk zE?O_wTxZF@fRgOwlw|wxvQ5&G5UhP+ujuZ>FloEs$Ik{4sRAh)R+vtC5d-;;>d7C% zfPF*vg}V3RBn=Ak=1607A$ZuF1wOQUB_y)^iV`!vfCC96v5(P){xJf{UyzV;6Tc4u z^;JFMbt*ptJ_y(X*xhg{sij9Yv6*~OQk#d%{_gA<yD-sY?%@(&gD`cp2_4}ej6T=I zX+H5@ZD(>H&jEtK+z7>(XwTe-8gStmuNRlpc8N1fYCCa1PoEDO+|Ja*ykW-7b-c&_ zOcuL37ssE^AN?K;yO@ynaBf0sO`0@fi4mj|k%b;;&%sOQ)i?uWRy&PQR>6$_kWu{v zk+%IAg{`okgES99qLoB?g7a!!Ak!B15X2c(*zEA`%lF^&xC+X#Y^7q*(ax4X#NYF< zE1sD*DV~CxST?Pu!<O$o=YblK%f8Zsi-j&!l6ap+zqWChe)!0&zdQq|ne;XZy-K2q z*WmY^W@}=qQ!JRh^>VyQqFDbDURq$KXGwPa1w?G3{w_(`Uo7l88ffyYgGb#@D#OGR zHW(oE1vl|GD70}1^>>-64fs`x<yPe;YsqqplyU$oQ;2$(zi7wqt&T}}P>3~rgC)G! z9KOxEa=B+!penE$QWvcw%BCOWw|Xt139JC#Kz~X;vKqy)Awv+8?!ryA)eumFvcos{ z@4!R-599482{=|8?9i1~<(R3>IkeT{jN9&c7_FeU?V#DrOKLX9`+f4KHu)2qb7<{4 zDW&_-GQ3q5Bj!cQh6WRQOh{j9W}SUhD|kStb3yL+0wv1b<{B6ynoR9lL1XqV%xgi! zj^sUX??-S?0lqx=i1=J`49D#>+$CvRVrVWmqS&S-c<s$3$9UmKhYQDhJj1XY%7B|6 z_$dHt!taNn_UJHjn}He#1uRmVhEq|-0P5_*=QKXY@i~gmVSHY~=NWvIvirKiWxwo# z%iuD&J#c&A_QLIj+XuG~ZX9kL?jYR3h?!(Pf~nUOPmYF0ozBIl0H1mI(4iQ=#I}ce zJle1KJ`sa9UN9mUjwCF8@n$fxqhv5K2L&T@kooP|M+Z^wL-nXd8+Z>Ry%b)Qm7J4v z`kWNfoIH%S0D^$`JPrSGd<OApE;n6rtZ9i!{(_I*mwfd8JgR7FTLNZeuD&qJU-DUK z8U?v$6dZbh9wz4O3zPgM&8+Pnk{yi7fwDbl&?<b^;}gVZGd|n#sl(?t_+YH_FxGi+ zbK&N~&4!x|*A5pC5#48nYlUlpYk_NqYrb%9^f7u8<voMXVSG+f*~z&nH(>-u%>@sm zx1;6(eFKahcfs80XL7Pk$YH;SvLK;6xei_&Yw;utF3S!7Tu454APAq0?7n})8m4>I zL^uXoO+X{Dpc!vu)BUPtdW7^PG5$j2k%Cc<Vt9f77D9*It2qwIt>EenhI)}Hiw2SN zs)jDH;Bhd&co7T^e#_0MzJWK6sg}alXk^jQu<zSp-(bR*kS-LA+-}2=c(J+Qc@3W* z5^DCJ*f7b0MJ}C(!#qp^m{c2_-3`pjc?_pa{nC>iR7EM}Z!P2_iECYK4ctbPKWBi; zF+n>e?4v@ocPTuGw<f&%<`0KEdDxW<@2h|hyNW&7P6<5EzB0t^r7sx1>CX;;Azf0E zNwDHVl#18YC^b&MX9J<-Y{<T5auv{|Nag9}A>3=|T^=&vxg(UE+K=ZTB^-)yM9LRb z$6jiS)6K+LC<y7(wGP-*!ZT?vwTe>kFaf3D4A8?9wPM~MinDVg6qp8g@<dWP3|}z2 z_W}KlI1pyi)8m!rq)C4ctNwG6HOPQ3Vfjwicu7J|w(svNC+Kl?F%{eDjZz<a^=aWT z+H%9@3~aWv^A6j>A0k`$Y$`Ef3;zc`!4S?vftu~p?-#s<`voic{Q~S+RZ)`b>Guo9 zv-hAz(D{^F_`s*UuPa@v!$!3O+exxjM!Y?I2(RHuxyo0hC#1$?B^vW180M?fl{1B3 z+4fsg6F+(ZX=Un-qUi=nC4Cqf|I+=aM!MgPE6*qhKhj_gd0_K={m`mzx>t!R+4a;M zy(irv#Oa-(3)S1F%IAzbVS{dxviH)X$5m18gfsO3vmt9@S^oLy#Ij5vyDm!Ozox3J z%!Qo8=Xbuna~ilW<+bbP<>E3qZoBl$M>GXL%u?YzRBLdB-MNr|lvEW+W3WExaY)#- z6)44|j3i_2nNT!Jx!~nUZ)|=Qs{!}Hqyk_e?hp3j2uIqEn-HD+m5u=%2<TTj77qVP z$G<?~^4(wQ7<v#C18206U+I8+k>nG7B~^Xmld$Q)ahrb5fY>(FMn;XDs9Pzfn+;yC z4gk!wYA3j=RDXVii!>U+xy@4C`E<yk;|o2K`pbQK!}CUG<aT>XE#B(dd98knoi~0i zHQw2}Z325Anr3AX7jA;FH2QrJIc8BW@~}nUYrL+y1K>i4LQzF8Z@=C=VDWUXx1=gx zGJCyhDy|1|lk~VB16zQf9Rv|Y5B%)%f5ng>A1i<doOmFzv5VM9{^Pa<zaCy^keGrB zlMer>e+K4AGa>Axk`m?8pz#F@o0tE_*+xEGaDj3?A<4ercVP%oI2PoM#)7;NEXXG; z$mUpJAHjk=!UCWGnX~QMIlo@Rp>5Z;Iiu(P-$?8JonL4%{`R|bPo!3SA!1(W3E_7x zm%K-<w$k`ePY4506NYgtP{M;)9DX(f3ovS4Hh{wP!5aBFmVrG#Fj)71Qel|*U?r{< zC5K7H$wWS!_d~-(M2&QTw_;Dj&3Sg{ev|^O<O|%jvOjaz&i=q%{EQ5DbJ@R=JD2^2 z2lJW2-2x_acP6`=yR+FA?iR7zxI2g4%H2G+n!EFum%H=X4RGbdE^Vuexp-6=o5$S> zR>a+<Y$kU-ET6k8ST1*0F*|qHFbj9rGl9DsSo}BClOXHm?k2{#yP0)zS7ax-yPb7# zcL!^SOHFvL4l9YhK(Ws?koPI>Z6fa@+<Py1ALQOW<h_r3?<cR!y}u>zcJ6(Myti}j z!{q%Z?tO&3e(rsYyi2*4U(#H}y)6_wmwOMBw~%|=$(zr;SgZoeftNSAgF<OMfuwe9 z5UWXh?j(87aPMjIe!{(-<UPf`UF7ZHUPj(mxwnVBFL3X1@;=4Az2tp_dm*Ak|G76# z-ut+Bki0VY3LuwkJNJ_6h~3V;WQoQ830~f0D}`3@1Y$whb=;dx-UZy7OI`=}(%aE& zCimu(cMA6wkaryS&LnRd_s%Bo;IF9vMdUrhy>rO>3HQz;?<ww`Pu>phb&>Z~?kywl z3*1{l-lyQ@O<qc&y*$A~-hbxa735X8cNKZ-xOWYCCGK5M-VnSvY9`hC`mz%PN~$x3 z-{OV!_Y!~v_^-ds$5~NCOtBtgai+KuF_-8uMb4C9B4#X)**eEryoty7m<H+)xUeEy z!)KTs7#=uN$`bkBBPgZZ?o6pj#KiQNY-h^SM9i~#Os+G<lZe@;$K*LvRwQD6rN`tu zQ&uHncIhz%&XhHYnA`Q3dCrv0iI~-T%zS5xn25Pfk8$C*?-DUr>oH}{lpTqf8G1~G z6XxCe2#x14TbDYE8+Z&XbSMVBA3jg}64o;?p@HCo&PT?K7TixeWUxJ9F2FOK5PTfb z5D#v?Ih7~18EpH^1zWzr?YP7F$y;mS#K47(;<$eDSd!x!10Ogp2jr{};hLL_>c?QN zdYdgx)>Kymzwme#3aqiv!LlnUSAxZBBSm4dsl36kXEuYsw<LakZN6f>#vomRMqPT% zEe2^uMwd9HmD#UZWRxZ$a_lv?m?S$+74ji-Mi(BH0Y?_yGr8qhr`%$Q4jcmF31V(D zq&fx^^C>!rOs5A987cmeYK6o-NO%*mZB+iNDF0>ff@+gKdPhnA^S>BBMdJg9A2-$q z?o6Z{$W9~IzX(5$kt*K>)p>z-oq78hWo(mCGthGsRw%ad^TWGIbwvs>SRtlHwNzc0 zwY-0^)ddBXLRw`QrreDK8xG`FL#ny}rU#_t-&q8Hu36CVyzc9aXg+=!M_!;wS@Ocm zAOU~<>4j`3A_;WYJM>f?F6a%0(~{F!-&2QS7&$!YFBap6*IV!c0By^W$dlkMlFwRq zk-zaV{!K4dha2flYyE}la3ei>9d3Mte;>v-`#OTJ^50YJLkPnzq>x>WV1lpx+oP}* zeE9%U$X20|9+;q4OK<}1zGV!*w&s#xNiKI_Y+j(tm>3`2*n}<f1jt!Gnx^!&4yCky zlxng*OLANoF}~Kns9=}$Zv;1q;IfnQSV#>`W-jghzu9{exT>nPe|#aRI37*SA<d&X zC8;Q=Ofm?lpeTw;rbZ|S1rZR=aVQn^K#508Z}Z?SGt1j_wL&W+QOlvaPFb0mT3H>! zlF}57T>tNP?Y-fEXjb>W@8|vh?uO^Az4w})=ULBs*6^&o7DDYfAKER~ls^Z2p@rO( zHv@OeTNM-?0|o^}YB%qqx7GdA_+9qU8T{rQGUdJ67+XCStl`ex{wlC(MFCzF4m}xk z`#h8BogJplxld!1Xg_IE!>2+fGOMJKX>*=u3EroAZg+azS&+}yfxGbGA9^II4JW}K zaGx3JV8)}-lkAV%3%TVtxV8e0!BLIV8jm&JlgFjsHKM6t2aXZ2j<7r3t-fBntldW7 zdn7!V^7q2GQ4xcezJptPA#XiOU#+@#D}4_4OVs;ZREW|?s=VrzI&hzRskiMmtbr*g zX5l+>DhrB<2?jp}X;nczE~w4TOYguNhmwg|C5=t*ypQeG4rnq`8p6QqNtY<~(bMfw zGhKQz-PS5yKFLx~l_Kw5Q{;2$51zy>$~qzU(oAba?xi$qyWC6jO!*(<y+#lh^q1~p zu)s9ijp6!BjFz=BEPWAP8cSM~K2>#ZE`0#^%iV(p*>29g$IaK)4&Tn`als-zZU({x z`KS;Bv=HmbTbY9rQxZ}d!w^ZPyhyq!ro16^Gfa6{e;|Tvro29M=bQ4n)4ko4*9oqD zJ!Jz9%T^TTv+~87Ht&daCSbx8Yo(UwQy3@V{JO#exG7X(=zIc`{mb2T(aggj>Y+-M zm1~Fv2vNEf24TudMUv_MEl`W5ya}lN>M#l#u&tx)&M<2W2oD9wa|Db|8!(%byTiO& zNg@@gRvAj;5buK8wB8$7oAUn-FBhJ31#-2wt#P%NLy0z4zS!!&Np3*`w16#;j*E<) z1aYRkUp-*TleQ*npGCW-?YZwzJMZsSyXX_4(!B^{cY=Q%(Rl~nIbQe{<TZE^f10u# zN^<MS$m(s{GQ2WYZF^Oyoso`R^T~1NR&ONA(~_*6cPov?tjby~A6~gk1>CK|@ue0~ z!>x1yv%~i^dTl}UAT0gE+`&MNND8EBo4^MC$i>?FF`g6dLW(a*(Tcvb7w*?ST}5`R z^FA0<CzW!WGkZm*>Ha||0aIRARel!J3JK!v6*EjLUjt6|iWKX7dqujnx4k06l>asS z&a1-8)p()lIBS|-EJ&m5E_*)fw+D_#hvUesbZ%$~-b0k4FJb#H729lWn(W9w27gZL zcxt!UY`BU+rNIyvo!v^Kglc?(ZW9jm;=*>3OT51T>)bCphF3V^D@z*zff}*w#jN0> zP@OriðeLyi^XKZ@T)O?Rs!5wIFTl%?8Q=3bg%o36c5r4;Y4ycajn+NC8bv2_T% zc!;Bj2`||-6#*wY>mNg{QObQ%LH^V5KzX&$Pj)C`PL3m39-<OsbezPe4z0%YKtX;k zy@yv8<mcf=ucxwz`Wh7sr@7-sljc~dk}6a&;;=1oXnjl|eu5o(i>WU#AYJkuSR$}j znMO4xV6<u<CkKww!%mu%Tg-Yt11EZ7xLh4zM0J3`KDL&6fr2{d0&*;_l!~N4!>4cq z?#p`Ho!Qv73km`iByTbY#cV0wK_smZcB=-yCveh*Mk_?9O%&WfgkP=#c#PeZe;5gR zFU#jKce_>dkCGInEfEF<jR>avwoHFFzO^=6+i46rza4!c)3)K2W!N-jc;#bdt-wdV zbbkzu=Ar69H{~D7X}i!Pza+cmg(LPu;=<8t^7QJo3OHYK9I;_Rwaz_5IFuxGuq&N> zL5WaJsc`IUvfI|6i$c24cGjUvSt{j}Uy{>+-qt=2Rm-si0jtOTK#qO8cflG>FmU+N zMgPSO+!arkH6LA8To`%QYu7LxG4KKBhA7sv7_8GFS`8iha{axnw@X#~9qM<Nn|Ekg zXjHP@4?`WSSjYV2&t6Ky9mF|!O7&Ii08RCkHH^wd==z}u`->J<%gSE-EV2U6X{ghx z8`(M#MS&OAMp|+8DeJ)R&-5p+GJGZjgPi6EJ<t9Hd8f7FUWx`Uy=O<t&u?#v@4hR_ zI=Qwk8_i>!?S4T)7gIS%>@6&FPRJsr<tToVziU)urQ{{HP^l1S2Sm^W%oB&@_SJ4Z zka#ikDQmAaZRo*zk8y52S2OXi?ZuR9U`ZZZZP+Mfj-!a2F34YnUwq3V3waZp+MqD< z9I{--2N>d<U-m{hQnT9z`K!0qzOsnEH853iNQ^UEWzDp(K-l(N*)4W;=!-1tphZCa zX1+R6SUt&pyyAmcI6x-~^OPD<S8(mCvo#4jVK07~Sy@}jBHlRtmLkA<^=^_w^(&UU zFH?O0SWZk@h4neV{6J}v;{V$!ax;4|ZyK8N3xQ7^U2IKtS55D~yofVt82qh%AXKbe z9V$uy9sdU77QhO43ve2K`hbW#$9Q<*V3rF9ZrS5_x4S4ue|Aq+W5@PM58{JGk8V5r zu=eMRvQjl=Jr?J{svQS0mxs||JXV=jIzQ`xg4mDg;oY{D&Mue?)xKVE>~?%w7EL7= zQ;A&zxDfQ5HVcS=t+d5cs7#Gwovk>NTDJdc3$1>`d?D7*^0Zd0ZrJm4sBn5923Lwr zJ%(+VFmA#S0iP_rfq7oLSvc3g)p6MA_^5O(Y5_jA!L$QE56$Y}T&0JzRfTi5!)@6? zwN#`qNDVKrBvku!8N-25{XOho;?w0==S0w2;}Z%@P3qIPTubmNl_a-mI#w{@WLbM> ztLhwX(lXUhHWo}YWd)5q=9qBOIT!xwV9viW3vJl=#wpx8<B@3fz)mj!te-E)_x*;A zlb`za#;-pXOL2bx6||^ea%bmrm~F*2U@PG0L$!Xm<+#j&AR5Cj#}#(eK2mkpj-gvV z%CB3!y$3E9tm%d{mlcNK7at#5vG0VNttV_R7Y=e^cqiuwrUicYsZRdDBj}xWLfYpe z{IK7SiHFK^3-5Oz#bC@tXE1uM=>3}V9)o7(DX<Maa!q+F@Zii^i@F<{h65W%pkXv4 zSA1X)>u{I84IMwpHoY+M&aaS7d3Pan;7``3ehM{HUM!wvn{M!TtWhjck+F4P{%vFm zb_=-o7W8Kd@<Z{sJfHNSYn?v>>T!7!M-i#?DZ%C!T#%XH5l@Z=?z`>tp_|(*kM&(P z;oP#AU+j1JVVM?HP}xY+JKJNO2QZ%DCl3LRLgwl9fqM*A_lE}VG2Oc!2_tNN1m5El zgz+2&;nQ!fUiuq)ec@6eQg#!*SI`2*I=x-Oa5E52@}$-;j5!AzVPPQ?8VmhIp6IbP zLxlF>%Me7s76yviSgg4|7wgC;Q4RstY1<5w<Bbu_p|W-qQd-=%7vxiR{ENsY(~9%x zUD}IDKe`88Dpr*y7xq;rx-0Trvl<C+QW2Smn`y;HJji^eFIDQLcBB-9oxX$W*}9mv z_+V?E%i#2O?$)4?saH@$`U(n}vmS7qRPUS}x`WBdfr^u+{3zz560N7>X}uL&C&U&g zf9DU~Tmy3up<$q519j!E@-r>H2w^PsgukQt@>W`yW#di0j^+gsCcn}%kS3qK6iq*h z=2|@F7E@->+b5zJOR?4}N%O~6C-?q_5RMOg&U-0AD9j&$K-dE%yT|fY*sEj>3oZ8& zwkOf>*d>~U4#OVA!<Jl@t+4jxV@J6b#S7bO6z2CrXq2H!r0!x2XLc%_o8krLA65kN z-A*s15U{m^X=xhRU)sG2b?kY=0(C)p1pMslSs{SChB-x+xZg?S!%bUSxgU4mr{Ql0 z{vF(kSgNVbCMOrPb11UhcexcY3GFwzpoROcCB=MknUuZpM=47|y{S!EBHy`EHk_1= zR3WvxuIf0;!N!+`eAEeZA?u<}R-oe{CG1;na8pw6zK^8g#4-|fRdO~!p=Fi$oS!&x z=7eS#DHfJv(dyX3!VyT;Yr7i!#X7d*1Xpc$6q*Qm#IPyv5xo|vQUw{iVhi&IGICu- zYi;UY#DIavb3Q18&5Yl}Yod)eQyi*#YelTpTUF3az>cyI;KG>PoJSUsEz;M|sQ5T+ zM@fr)J*5{Zvd-5`->%vaVN0_Yt1#9+*igh@RY7d5G#Z8otvPj0#BG6hS%_Xe@4(}v zvi^p)p)wCeUq*>xu3rabXzb=}uHttDE1Wmt)^1kcQ8jfaM7)%$pw$#-*xl(oZDj`r zUnMx%%+oMZ^?l?~SOyT+xecy{4%;!BscL*5`I;W=(W+q1K-^x>_mK;3fC6#-K2mSl z<DBUgY`&UmyN(1`r#wu{tgi1RpM#h7z2tT#t2OA_Ns(%&c4sV1tvI{SvmlJSb~dv| zHTB#b){fn+vX*%8C=6>2FT6rc6^<}Zdm_Huchgrt>U|!Qa}(?X8aKEU(xsbogY)4k zFQ-3l%Z?j!sQR)?Or>I17B>(3$xvfWrT%e`T9LpFTAT7T=y+2;_xH!AmTv*$t;YOq z--*sw`GV84T_-x9!@cX)V-MrGdh0Pv*6v&$+O1no5cYp^f}n{`n-*DGkx+_DLQ`uc z6myk?@NMQ(Dxuq45;D6a)Vfwe9#<rUJppsAQ)!)#JeOKOmR}(`m){J<s3wdWzJcl@ z%BJGe$VKIgy|D}0{2LtDaBRpq?2gr3-w~}jSUBVyw!bdKXM)<cdV7WkE-GJmAQ@#; zX0{LWxIDj^150(WWu%+ELA|nO+>jIza*+{sp@raU`~T0O<g0!j_KHI%sQ!2?^+7{v zY~PP{1gc25XPUK*|I`-N7JfH#<8V)`%g`3G;L~Y*v!S}&<<79Vs63J7{<pWb-K+&_ zVA|Xq%UxVI7Dtbz>;uwm+Kkhq-vvMUS-N!*w;EXX5+3ar-4=I8Gq=#}bI5-x776bd z2Y!v6nA%8YVuO4xJIOCWXfB&+5vZbMnf>l`!%E*=#z$tRyksaOeXoGhJ8<v(74`&e zw<=d;Sf2yo&2=nXjvR}ekTgyKuaTQ6KLCNs-clnHuDDj~HqiU#x6CNIYGKCG1FpLm zY(uNP8LMSt${huJfE0F42<t9#(XhL2W1z3JV2YmVXbc`wZdi^qcOsVqHz~38f>Dd7 z5H|HWbm1q*5!`vgEqCm^kc9Vp9J}bcA1oV=`zd(CZG8~tVClin4}O2}NRz#`a~IA! z$`J~N4fwRU@U8p7tmc6TH_tQ66o{~M8;$mY$4%qAVbfdOdE&0=g`=22IVv3Q<FLig zn9rR%T4)=6Vh;^krsl&k)q#9-dSPB@(xf*j!pYdWr#=>BV>w_0ojKPqNQD~)sezCh z(-vB!VzHk**RJ4G(R#2+VGm&%rCOwh;QeB}pQNl(2kNp)%}1xRdhnvwe!)Tss}zb2 zYrP5!U&nlQLD6CUt~tfu{Fl^kh59|Be$T1jW9oOK`aOVOXF@i9HaZyRNjxj7)LK01 zL!AiR<`$R0M_Hxb#*JZ>`T-M}uozvo?l)~|i%B(j6%?ITzwfHw4g7uRBz}_vKQZM! zi|{zG&i!IR@zZ$cuVR>1%*SikjIa#@twvjFTaAjCmS#2bcC{MuHLXTt$kK|2ru&*9 z9oUS#He-YXn-R$cO^?DU$k*uA_@Hq>;|w`adJ@A1<Xh8bgm26=o6%<YHY#H9(k&cc z6!O-i^u?-sx6-F@zeK%9Z4jla7(+dl=HoWm4x5pe16z)H7==SbFqkMvu1W+ZBfc!< zFbC5(xTS$-i*7PfUj7Wco@9yAK1RCTYotq0rpuFIbq<u)Qz_ENHAOy`e#iND5MB|5 zZJ#P;T0`v>uo~f<Zyc`;FO4K+H7dqqvOOft*3$i={u)he>2r9kF{DN5<5l<O(v`S3 zZNWM0gKX`S?K$;)Z8u=nQ;!H9mCk113LnjPfabft0yxPW;5A11!d5g)-RQ<5PTuGF zsy8qfd8r%Ni@K^C7(CkZW<A9M96StXT%nS%-T5qKw7tUFmeskmoOU0Q4ZBZ&^b1UR z+wiOGK3r77L&#zGfdJeAz`2U1{4X)^wbSnNsGgE0)ea?O8UsQLi!0%&P0tsX;}!=I z;}`om7OX__yYtC%bb&?baXengC$*ZjXfIxBiI_`^abN9cYpB0>6f-t9qq5cCMkYa) z&Z`v6nn*b&R~D>crl!1$&=pve*+jRN@TGk{F%ga;KER~7vL)!@r{y@92zQ$LfcYJk z_lB3K@X&Z+69TCS@GoZMg$GKHAz@`%IsyD?ReFecI~$tv&Y{U5o8CeQ(-sQ4&-q~n zenkfm-4Og|ac>>EEPoH&!u;3qi#|LZY+Hz5AvdzQD^&iNrX)vkRHW)RVvcfwsi0^h zUX?XWE>M12h=P35rKz6MLBPSKPN=yBWpnb;*ji<CVjSxY7<N?4N9j)-8?PtAqVzl5 zyYMj9+2#<1=KzDy;`9wf1tWa?+ACmjdPlv7#pydv(!k=BPK#4FWpR22ei}P+3yP=? zt<&_n08W-otWpdpMQ7**RfY5Bqabt^DmxRkEA}m<ok=NL_uWdVQLIZZuw<0NfrW|b zLlf#!Byx%6V+iJc+!0>kh^s997!S4^pp|CW;x(ycB4r~3_uw=jC8dr%OD?X%yTyYG zH)*z^Qhd1UP?{XJA*yxo6}F*jzGDs?wjl~BBRbr5+t5y=xC$>F;jj%oh#S4oVFkj2 zvJEkgCLWlZp{NzA81b&Qp*5^5v<-a?a+Ga|vG6&Io*WLVtF{d#+l$Eq8izi24dG%O z3Q@)(CQ4ht@B&p|<4fB^jth#I^lsIDLQjaYZpuJ(4>Sj5{z>+ZXdv2$h+3$g2&I>< z#(ii;@O&Xcrg~#v%lr?KphG2SFByyHeR#%M>G&<>tfVfNh-J#>b9LRecGS`h)%Zg@ zQMj@bg;zdS)>iieYYkhGYWnrp*1GIP2E}@ot58x^_9Dem+KYIjBm(v#MlkF}d`~MP zUBg~PA_|Lsg~&V9d#N;{tI3E(p_z>8p9gI;8LeOfN2*LlFX2%$8Rat}Wi?WWqH<_D zv_L5?>zxp~hK^mie%{tNSxDJ)C<)Z`)Gpy&7NiuPW*x8gudd)_jm9Y3QUmwxFeaV# zQqd1qXt~&nHzG+C!Uj<9P}&tAVSC}@42YaZYM-*^*v;4@)vK%lcBO8eYMzu_P{^X) zolh>1V~*k@xbsp9)2o<o3zR5sXlm<LcC*$frJA)?X^eBZGEuNHV*vwgM8{<j<G_%@ z-!+HvTd0HwnvlVq_bD3@c|#jfKdq!aY&{$*m$DA2-dfs@onxH0)lU5@d+`Cp6t-E8 zHK%cd?HChbbj4i_MQmtbYU=pdk<MK|v*w|$v(#W%>L!cmr4gfL=PZF)#l4sTCgp#g zv$3o%R_x$GmUY)^v__FsrD)Kl2->XMhxi~C`sf5%d+P*U!LoRShh>v#>nyVuZ(v*+ zh<M{{I};(+t9O$es+;LhwF12n?4PHUV{+gwh|z)4IpC1L>Rn>qpqTvzScu&4vX1Vq zrgW=@nU3kYg=ojiIGp#zP;m}0=A}^a1mJnV4}eqf)4p)PNHrdl3X?S3N%mOnV_KR< z3^R!dTU6SEnoHY2{fIiQJhR<w$Ck=1xc0=>DlYY0nP*(;FJ<m=spoO#A7{rJ7IbXj zQ!=^iMrU~rJM8G`31<av?PksnX}c0(wI1e2w9g_wSU5Sb8SOXOagObnXK?(aaL^1F z+z9-<+9!69J;+;Hy^YiII(dCyZ4qVljJgUI_NEOm+AM41Os&Qu3%A%2!FBI<41{Q5 zYUiA&VdxC33XX6!rd>KX2k2qiR)ulCwHOlh@s%GUpO5gw-nRd8czCMAS3J*R&g)i0 z<F}IBKvZodTD^-XzK&*H#pXMAdfEo!An`ZRty0I6<GKA68+c;xnyII-ho@m|@<v?j z=5Ot!($;J}`nH;d5q{i`a}`GO+=t_P;JXvsk1p~=CilQ_sZU>l4_D41;p<(-eGqX$ zMZ1~3NX2xdz*#YVf_yBsi8F?Yz*MVtkmZ~%-u=pV__P@3ecNTIsW1?)9lpg}0mvo% z{OTSDA2>YJoBHhi1eTlZ599UuVK^#&vVAdDQThSHSD6sDFT$QB>fKFOvZ6lhZe{M= zU=^DqIeQ@u`=Vp`q_*%*Y;3LQW}4QU@IXTByF$zCI9(eXM<V?NOUMBg;alvw>V~1T zmxkx|%MHNHll9%pal9Cg;+D1r!%J_IXg;|!?Q0l)V=pQmv}H{!Y^vzM9FVq4t)u0V z9g3zM*==`)Vk3P|xhu4OV{Xrd{b&)X?2GN+)yFmjnk4MUGD)nE)XQrZ1t;FQ2aOK7 z>ft&8$*ROc?7j$v2sUrcCHBdtGbLkDb+Dol5pwX&)K@v%oeiyn^$+}#O*w9WuonC= z^T(S%q5O&E&t3e<EG)=H@*`{quyS~?V+U42T1{o{FgU~4tuU&^2z;*emC4%CvBT%{ z?cdk4dKR=VjPi)aRz)Y>tqqp74;x{-H2L<Ojw2U7x9@D>^^s4>ILAl$tQUT6VbQir z+shaSDS3>vRoqcJfvm)Kd*09)>$v$b76fB*IF=$N7NVkPo9DH1zHpDBML3DH7C5?0 z3g6&#RCzo5;<oJQRX3?_fay>->%KT=8V6j)WN-t>Mt^Ke4Nc2^0DXZDxY$IWqsQ)f zI40~AL<(oIw|DkO4miQCz|+~fDqGhA(u}^RoO$roW{96+UfK*XCS9@P^kQsmh@Ep! zKZf@>8Q2loQ9nVZo%4u#hwt_}K&o&$x~(09@*^AWq3Z|C%5HS*^V)-rf$QB~)&$$> z?lnDDQWzoJurymKb?m@af#qmr-Oy3RL{a!lsH$qSjuwsvm>I)*vmw@-`7vL6wK*jS z{lu}$t3Kmwuc&w72-B&4E-Tko2H;p=d}p{H9|ex`Z;18i+dV>Ye2wkG#L!faSm%b# z;0ewx#|BfFrtwT|<ZPS>+8o@ct&P{^Sm(Fs_^XqW9Ug-z(m&wLV=Qpy*UKt>%zi@q zOyo|o=djgzJLek<SmHOLq@9y-%VMW!#^E3k=z+lFR!i!TIOoQx$QS!9VX9|B;0fzk ze8}iOub$d@r(!KB3anp=Ut446Ac)#J^=6lC{#tJLhF!X?K4@|KId6X%e%$5KnFVHH zG#Bsu>P6mIM?1Z%@L-d{Lt*@F6{urSX2A2NR1fEjB2c=HDI&3>1>XLO{kB%{8l}9n zT&>#*yd7$zP9Xo97q?ufJP7q!?i_}$HHZkjY+CUfNc8tH<^2M;(0407Svr}(3DZgZ zXijbZftDV}=fJGs=5wAD%uB5|8Q)-4I@<u_N$xviogI;>&XYUfLBESHXnyg|?;h5r z!Ib|E^1$?~&)!0h;g~kY1g`te3!mMz7yJF}8H-J&gS1*;vd5g0&RgfRIC>*WyfX$U zO#&CJ=oM5tL-2&6n*{2x{r^$YGTLK!L35z*-@tYjx)s}~ne3zCq<UaDL~b~50FFv1 zu}}>js$fdJaED}V;KZVO7fuuouZK-dR^%haAzB)D<2E+cLr-Jh>naV-v?VfE?Bdq+ zje?d1b3DkN{#u%!!3X=MO@wu0E)oI0_upyYi<J~!jL3tQtH0tag+FGKv8Tbg<r7-x zjd$yW-VV2Q$B|7g^UJs31PXk43mL&Vx1n`TG<&G2W*qhUIC4f$)tI>E=iE(Dqn*pJ z&k^RJX^26s!2wF1&YvLAlh9>}i7)fUJGWt|{Vtz)hoVdzJbM04|N7lJ;G>eziO!*m z5e0a3!hybXKbYtoz-Q-tkZT;*%kGwQ23|PIjZKZ(;t9KVmVM<MQ{%m~srQcEW$!D# z;A2^I&;mc0TYau@3!E$5!6o&wFq|vg0-Iv|glT;%2=jDCVo;7#Dv7((6U1v<qFGx9 z9?x#!b#`K^a6bWU`?M-QZd7J$Rc$}r4!djM5H>e{;OL2SRqzF>Gx2lu*dm>3r<{Tb zIBr5(C-I;sl@t6Ndv19j?By|0Vcd*jQ46o~n=YA_?Si!Cc;Z-ibqe8Dn%?5DQD&Rl z(F1^%PlJQ?O;AQ{ozWm$<CZf7w+;cE_U@d?ISS^|2DP0nJvhIxagIme*(?voPT0J5 zLBpbgc{=+XQ?wAZL60%_`3-VDLiz1?@Y_>-E#m1s2yA#XSS6&wKw&0O@cCSw2;w|y zMG}7XZB3lTxYPE9vhS17hh@cqzvj$Yh-|ZGfoQ!ka7G`M(m3VHlotUuGaIj8z<N+O z1xah?z>j#WAhP_H9oVl7=-F;{-dUBrGa0vrOkn$ZTlp=vbJ(lIl*c{JkrlStax&2q zpSfnmI)%DJN6CosEs9!iRM=YUvp3e3SGn(79)=@$;!T?uc;E{fZ3*qciY32_P>fw; z;ey51p>Q#nwf3M}PgrlJi!D)x<VJbqIvN*_>fdd7%iOP)<i78=tYulxns1lpzF)7* znVaa5Q`W57C$~#(KF^7>HB@`UP2EXmbQdK!M3Y(TbrFgS<=@5`4O4^O+We3OMRnut z2+-^<Sm+91%$T|}-my#dtW5da1+?Zt`<FS}zi()@;GC`}@B-bVj0S+w=w4+^G~?$^ zcCQ)&#^6;>JYMA_;8jL)D1Opo@iX@>{4CCd#|*s7$-ujud3cwTlUcYq7iku5<8QQW z9hRj>;n1`1J<us@>zZFq!@{{O3UgXS+s@#~IBPeIoNx?8GsleuZ3}U%bdE<97R=%U z9CvFY?f^1uc$DqJ?RO?j-T4-(g77-v_5N7L4kN&g%i5{{+nqI%7-2g*sk8~}u&yR` zBPBjNYJt<uTH-9gX5b=hl=TYQL2=!qJ-DO4t5ko^+*Z!huzhMD`>(<sK0SkCXh+N4 zqDgpEHcV6gDxd_LYU?*)(V|If?jYDfO-Q~;JPwWD@uaF?&b3HDYloZ<Gc9!lbB&G4 z5vxbwuk*~#xy-feodw>`4!ENsG9N1qX(B2`n=Y5(xCWx#K-*eQ88U-Nryj-|_k}S{ z$Hv7^#QLb1)??!erZ!bZQLg_$#MU@s%9O8m3yARy{J>gV;8ymgreEz9cboEKkpPrW z9z;t9{%X2+7F?2Y;R!`b>0~@Brm~rES!DPC>J|1~(j|1PeaJ?%n3u7+X#%F5*7AY% zShHSZEd@?(s^luN7*8xAYZIXwZR=OGPU~GO{fxkG&<a(b5!l`tpGi(~G{M6Ilr6k; z2=tsfB``pFc7<ozmzY~>%R>ctMqnY{)H)-OhlF9!6_;8M#|2Jqva{+KwA-BXIOS2I z9TYfA-B%qHc;gAEDO5ci6u9F!ZmSOpv@;9spul?iL4jT$;G{sDg`l4lSp0=zAar-x zX#EH_b`a21UH?geZQ{6aajbLs$JLq*>&TJsR2cHxm387`z)Gn1rVbokg;!H6osm`f z`s2vJCRJ=JYv|b79aB9nX9l9pL}REH$Y7gws?fP>Ax@cYsr4p0hff5u+-?*b6m*6k zwl$B1!CdR4d!XrUs%`kyGM#!>wT`-(8XcARiE*&RH`!~Aft?i=)-7SL-Lcus??ihv zKDE4|wI{xgyaiu2VymKn@3I;Md_AvTy>A0{<8CMJb_JZoj_5c;=Ah5sfRolnZ-Q?! ztKNmr9NQ<eFRs=ly2qxv&qOJN&g}SoC>ks7Y_^Vax6_b5D^@#R#wWkpmg&_-bQ-~F zI&ZQEzfEn8@a%EnOW3gEE_|t*(P0vI*9_`kx_>koUb6esXsZ5uT@ME;@hy8}j14*h zdIKT=;{Zv3`GA#x7XZ5e#{fSAnx7Y<8^8}R0&qJZ6>vA;Z-8e3jnUj*x+KIA+@A(i z0z5AW(HhVL;0G8DxC>wftOC3O*a0{S_!-avAJ_K=3<8V>+zChp%m>^HSOwSscmuEx z@GanHfVe0`Q^1XYt^i*^1RxeL4Uh>~3V0H*8So+CD?lZnDRAxz2m(X^#sTgGqyusR zs{k(nN&trdrvV*7E3XpJSo`ws7b;#3$9_>`u!q%Kq2hR2XjPEhJ)z=D_}9L+&N_St z-$6Nx7(lx-fG@BNI3p`^eu_0VD`UDPJ6nj^B3n!s86rzq@Yh!)i8TBPVaB*~K43cj z5KK%E<Ha!1NBFti_ZNf2C=rP_^dBvvM7S6uBGj9a_#KCLkqF@<1|a1K+>KIUhoxl? z%d%Jm+&GJMVq%(2-4A!U4@*nSn9jgi!Y0y%Ripq@i!cMS@JD>jDy?RMwmSjrMj`Jw z{JDnFeB*%GXfeDFACwPb#b85chPX1ON*Z(w65gP~M=-rU2!|mK{w%;L6RsJMfnOUq zMI2**MHKKcE6ygVcgYIt*(&TzP?L?jMEo+wY%vWyUw}7BNFxir>0$<+(h-7`&s8a~ zl(e+ypQ3yc5idpDt%u9N`^Mmekv_vE!GF5)x9EAAk(*7WPk%G=%}}`#pG^EQMJ+}O ze9}>JW>7;aHLPdjZMJ%5$XUQm!`+H}Nj)Vg8!4u#Fc~5l@0cFt%AhP!<!tb&kKz+~ znSwAD;A|GXL_fSuLu$9;DH-Xd<C)wbPJBZtAU_iUS8^&{;YjIWEKOdt-|CnV3a1#P zK)m{@(q)Mo<vpOPywi~aIbcCJa%%=?H6w>i#GS6FPS*2CgR4<XUNBwFXEs6_*c&oy z<kP<@pStCWsnjfQrfu*$9QcmMGs}g%*L*Y%lUKyA?|(!tYSZCeMTdct!MD07NUFu} z0njZ*{bFe+s#>AZ!`f9>8fGPPlv@Lr>hc>P23MtL;BkHBHxGH$o#T~d=$hZ4Kan46 z66@%n$!}m)ewmO-t?cI_tt_Nz1OL*%wJU3mQ5UN@rpe&|;aip7_46wMn5RR+<|AJ# z>MS{AJW&#jQmoF+s9*hse^q{Um3Ib8mF1;rQP$@Sr5&gxEzpV7QWlj0%Yc_gNB>%K zL~NK6^?;^ZsHZ41*Vgy_UF3)OU75~oaG6?;l+RTBPgJ@~(*d)7H~l{&FEw*YQv6QT zxnR&=T@T59Y6UCwDfQ?~glEe$4b&LC)ig6XW0aAWj<-wsT)#XpRZRvd7i_cHdTS{g z=~mbC>rXckRLlUsjTE!1ur<ov8@&wX!}d8D<&vRzL#@HKG!xIfrxs`H#FkFu3|kLP zyK8YMUDdpxT(J+8do+}GGtmn!N2doq3q@Yo&;Tx{Exh5j<ohF`;s(Bl+lueuwu4*p zX{cxqHxq6LxB~7?a8`YSPbJ}o!o|{TA>ej~<M(kWFhzQYLq!+3Cl6tdP`GcyT>$rK zxOc<NgUd~$7Q*Ee#T2+Ps&24-WSvP?G*84k(yeKQaN&hNGinSan-Z+`u14W*8aPdE zGZ*y16_o-l2>;Nh1>czR3>Av)FEzCmn`zBb+?<EsU=_c14v~nb_20Uvv}0?e7NBSz zAx4M*q&XYE>>=?=(*JHlh#38knjlH=%>w<8ecc#nA-?aY-Z5@0-th|4-3XL0bqD)h z>`(F<4Vu``a(y>O>5Po3P;u~Qf6p}(tT;+8smFOm<C*;-<DPnhSD4}(`!~dq{GE>4 zh?4^0PmZ%>44y~hKHcSwhOdF6K#!HW4AaI6AEr$ljJ!2BUC)P@Xn7m?8hJ7t^I<yF z+H66nPc+<z;VH%Cc|5RXh-{a4@yI_3DX_n1#2k-MuFo;xD|?|vx^eKY`OLhD6Z2y~ zp83F@qtfOx(<AOohqy96cjpM0Jv)FS7{~^$+srYz%)D8EOT@(VOG@g88(gs{{xCp< zWx8edG>a<m(HV2A?#E2GX7F{KCDUrW471I!WdpHjTbfEE+Lo>gGAuJIMU`P<)^t^V zmT6hG#4IgLV!FCTJ_6|rq{}n~(qcM*bP<!dKp+jcSz?%FnutzJ7jcPJTUHW0a1F~! zQFr5K+Oo3sckz~_bc_Bl-lpBfWTY$KIGgS!S{YX$o<MkkZ~|cj{DoURcaQoF8hSQr z+@xu<=B5@cZ)nxJ&5do_weN6K$4)nQHh1ZIOSkSldiJ`tx7Tfb`u6kgKfuR#;Gn^N z{sDnOLxMv>!-j@OL=GE1A}V_1sF=}XV#kh)8$V%U{OyzOm^@|boe6g(PMe-&NuDt? zC3RNX?DUMxIa%3O+uV8c7u<c%!bOX7NY?+;ml+zW=3g(g$J$@b_jNywKnruNzdUi} zU(N7KMD?GWxV}Hv6qbtWzj|m7ujkK*Z~lL}>^0)AEZ3R;(-Pper0UP@IxnLH=`MAv z{;TJ9eSb#$E2;mZZ+@=^FsIh|Gjilp^?eI$7yd`C+TpK{+Uig(MD@LP=jKK^$X&A3 zo|k{`eai}#J605~ynof|zdi8aLk~al=wpvR@#LDnKlSw5XVyKt{<-Hj{NsfeUwZkK ze{S6L>T9nTZQk<6n{RD>yZD`LCEIuGeD}RwyZ7vU|AP<zwQv7{gNHsk{P8EB9{KF( z=f}P{e&XbpUwwV*n{U5!mVRG$`iCFSl>hYe*>k^~zff`U(yx`j;nbAB+7Mh_(O*|X z@V`y}|91X=ng74n5MSM2HN^kh^w-BQ2pco1s}t_=6^6PVcDXOaJ;zzRR=V6{OSr1{ zt6c6^yWIcH<^BPedmc+_q*>&0?~Xh;mP2}?DF74D`n-62N_tYpyf|y3H3fYryq}P5 z$r>{))iT{0m7biDHCuVaYjH*;W?RFiTT|v*Vly&rnJOLaSxZL5V5U8BhGl5R{CLch z$Elg|+H>N)(5vOhAr0enUcJ(7X=&Q6z(-IG&}c)Wxi=6E>b3BQjEt<$YoIsAHVka` zi@m*}rx@(*9UqI2FwAB}uIF7d@4^LRN9KH7o(O?6-8?<vc;b(b>S~aidvETNTx8&W zZ|=QIb6eN1j~w#z^HE^l-np^4%MiL(PrumsT*OBvv9Ze#(cCL{SuSD;WHWQ-Oi#~- z^D{H&WoFh$pB9190NM|_A*>CI^cm1MDGi<IYQBt5nQh6iS;Nr(O|&ManIp5ZGP2Am z>E=<1w)E*U(KpY+D2dmw#FR8klG&PJ&d9W+n=SJ#({0woX=&`7Pe|tt{gP3B=EQV! zVkQD5Av}BQD4@PBVX`cVN#^9NjM)gSqKjL)aI6>xOh=%MZikE2bj%c2bjCE`Q&sfl zIW|kyf|T?b=46G?75>>4tJTF@WLuL!PLeG%EoHhA9wQ>sh98Kp+Y^44tgO2Is_C5$ zVyqT(re@VK+p?K;&+cj2{MN>WXEQ8Tuq?%Do}Q6pF=r$jF*v^G{1&+=Fmk3PBMa^{ zjLFjBYO^}Z6F-sjGf@$=QqVY!^k&aa1kb^3W(`t%_C?wB<X!em%d|wiOiM|(1ek#o z)43vzc{5Y2mh8;L=@xThvelAh?%B_loz-tzN_sy_`rMvI#A=#V&ei45oDeW2D<gfj zCEaR-ubG!6Yj#RH*bi*LatKSGIkpTi4&~}1N>@XAyi0T~-n<m+Ocq_XH8D%2`bUy; zDHlbH&Zjzf19m99wL0O-&l-G8vZ!Kp_18sV!uVl*{Az?%IrhAzr#Vr_p3>kNv#v7g z*`+5~1=HL-C0~eLt9a|cy=Dp3$l(eZNX(jHW8r0+ds*h9cBdqpEweMN3vRu3KGPsF zdc{wq-H=tm5J(E0dA4PC28QTeMR(}eY`A9q&RoYk%q>u8{^aGs*_Jb*f^EyVxQMu* z^t;xFmHCa%9em5Dr#JoFgCDep2TWmml09W2gmJ+Xv^i{8vZrLH%+5@+B&6v&Z%R^1 zwsp!pXv@qbP5J3K#SX?iqCe&g7uAg<R_bsnP#o4><JsCFR7EhJOKSPw(W$0??f6H! zU)#USt=IPN+56i5k9%F)f5B}v{g=7mf2>caC<pK|=sV!z&-dGWL+Moc*QRe>U`_uc zF7YM&+=FWR*XG~pAvOJ7<0D(~Oh{-|-kV(RYllk;s}7gxa$g(Y))80FZ);?z*azTc z@S}G82_tLzf9OIl<MkX<)4z877su7~ug&iciP!cow$${moqyB}&0pYZgL{LH+WFj- zRx_R2{_k3A`q$3qfrZ!hFIiL5zjpeo*4Ol}&8Jas)b!s|i_b0J()<Okm^ZIZ=c`*o z#fGyCrCqO{hkHPXbLWgVSciglqqwNIj_becbpHM8blSVQE~mNA4P%9HtuHhREl<4Y z<Pm3K=QB3ZIx}9(!ISF<0=-IYR3WChaY@XmlxbO<Ox_^ERgcaXj$kmOQnqDfp?TEb zii;lci7D1$8Ch{?j?*l9N;42L-I9eyFgznYJ0r~!Zp})IM01MS>>EW`c6Q2)bUlUf z8Py1g6U>3cgrje0WFcDD!`qmQBwLzg7`n-$6K8`cvEF?w`@b$@q3VQYxX0MitSLhm zSS{l-;!~0=;WIJU{g6&kRXFtZlbCj#HE9HTj_iTg<1<H7s{4<=1fPjXSW;4!TCEi1 zJ}M(IN#_z$=tF+Gkg27Vh0nAi^q#gwPkfvRPs_+Qo<>HF8yz{y7>7i<#aXlP9|g+d zMvRINi;YqPly=p$YpKBheOeL7pp2Asbj!s4dLt}}nQA$c_(0ujOP5kK+OkkG;}>L- zXZO~UjWF?%3SUzqgqW_yM2FIrNe0JbpltL!tIOphj|u5B72lE~=TC>>0s~bP5K$8A z$(5^nyGLb5Sf<%#%&=s|f~^#cA4Mdo3C~KgVtA4khwcamzQW&qlqGSl<%;)T*NaL= z(8RQqyK8%fr6<K@rlf~w*pTk`NY|BH?L5X=(<Y>&`<NsSf|F?$NNKH1LtM)ke0$Gh zd|I}?9&9)7Ar#u0u|0eOsoK*nKB<B@1MENzdV;eoNm1z-9I;c0aTfXFB46*h)T!|^ zIUbBk!qQ^b*eGlxcK7=6z*N-&@h??!S{=n2HI+^*3ro!8eONe#<%3y{V<KaC-xxdx zmMKZ9ZecD!^l={*9+Q{}se^=o$`bc+$e&eAhz#PXNu0(;ah9plld>d|>CY!a2t}gO z9+B0<A1&SNcyW|5>+%q-s->H0v1X%2wXAPc+3^`w^;kTw)%RFSmKp)4Pls+n`9jzp zU@GHNrq9wb-L801Lr;OJ3zSSR$cA~1L;bL9MGNl4s~&M_7TCz{*YyKE;>BUDW<a`x zcwD8Zt1|HNC6{L?CWN?;{BY4D9^kgFz%sIiVPtH}LdCHtNso6MmS)SI$!vz&l9SO3 zr)?Q3J2NrMQdJu9OwomF6BDyiIK+q-hZJVxESf;ZY2lK_Br`$G5|B<kYw888an+<o z*HI|w{~umlA2Isd(5;)=Z;K##B|RWWx~t86t4B^?U2!q4+ZAuCU(lcM-|N2|xLyuW zo^Avfvcs`B?>zuqi&G!a48XB?BS1p{=YKgLN|*2Ga(xf|IA?A6F<r)`YXUG`iZ$PH z{m%_L@O>8m-wy=vnYDt?;rcU_nTj(`e;=p2)Dld85`cMQs#L|B4c82ST2X6;(9|nj zNe9E{0*Lbx-CYWowAcak&jT=j%$ciM=D+LDYvB^VX93Lbc>u$00g$GB0Q!Flpnp3s zh5R9qFTC6|r^?kItDX)2tG&OP{~rx+q+_^7{QpfC!##C%TN)~^2mZ6Twz%r0<!ip` z^`FJ-e=7B>8!<Huu5Lv8pTh7zpU+iAKyRV1Dgx&JJZ}GkIs6|Nh1mLGFe!U@K8ptK zuB8i>cF|e)+zIVw?Rw2!^3+K#5f!0}bCtUR;kU0ly3VY5U-7%-vi2%ecZ;qwe3%}7 zGs5qG=F~GC#5d14pP791^cSyHF~*`n9((l8qh^tCH1+6!E5m<u<oL;pLiAs(Va9pD zaD;mDM5uTa@E~9nU<F_qAP=w*U<ITB?gESli~@uLf&hL1Uw}8@Hb5^xH-H(?5zr3M z8qfmZ38*}dO{)N>0Stc%?lHh&z#c#e;B~+Tz_Wm-0S^Ll0aidGbUutWDGkp_fGL3S zfKh-5KoH<IKu17RK*blx2XGir0(b+k5wHR9G~iLdD!?*8E?_<&6EG8S7a$f81n>rQ z12B9$KvO{FF>Gc7I0ASVuo18U@HAirAPEo);AP;g9{!C#*2ZBqQv0jV#IrW8e$A`% zFn<Xezd>1l2VNoXlTE@zx35EM;P8L(;rG@y)vI;6>z6?*`O+19U?(M4`<%}3`ctmC z{sZk@BeL)4diBKv!5d{W3Y!`H9^~kkFT%$n{MCSE&GKj3bd76#De4!%xC}dj@hnEx zXBYav*I(oSR2(jM{9#E7h9k}4Vyu$OG1oG<aMY!RYpBZTX}qU=yaAW-4#1_q6E4ST z2$ajhX$O~MF&syo3rAgAI&Ny!!iYNFQwHv;<2@tKcM%XH9ll?7mG_Tc<^6`Myno{= z@Aq8g{gJD@ch>QqZAk^*dk%FIPB8D3Mt~@fa1*`o-gs^|%uQ^-0L^$V8SN%!f~m%H zukrBjUj2Mv0iMwVH2mi;M|h-dJfB>NXYj~)9&x{$xWgsCUhp#mLIG|<M+*0KU48rZ z6~l)Q7xD4&A|)k7*laeD%YeDL_|FxOKKiJ5{`u#{n{U1;N=iz^r=NZ*E?>S(9i>94 zz<4O<Q&VyOhq+CKY4^N&`_<!qd8RVAY300m3l}oL8Tn!5e)-`Ac<;uuCmzlqE}rMj z<Fj)=b}~@Ucq@e`e$L3sWaU3^VY%mNM3%es=Xv`*D;PgH8R_%NVfYXCCnxVm{_0t! zpPZbA_Xv~2^ef@d3t?1v#^;rG0ne4lfB!D^j7*sR3Ffb^3&<ZEGwi3)lJAIL<^GE& z@N}L56#h#qmw#CK;d!RP=gZ@!L?J+B8q&ve{kQkFzV*ou_k)CF^2hVY%S&330k|so z{9FEjmZLu0j}R*m9O?h#@&0~V0QFq)ul>(<ed$9Lmi~x$IxI+5AiA0Z0RH>djsLKs za(`|K{lOpk+k|ZumCBpp!Qav+E2Q#P{AUtQ&70xXP7+5nZ<W5njfpe8W%^a39z|v6 zlAM$7xgohibanxE+`9pkXk5jmKf}4kV>)#~4&KXL;RbLAGz6Fcoh9YG1;7mWJvsOF z^~JX+_|iEpPTYO>-3m8D##gOcC0=;p1@ZdpuPYh<<daXtH{X0yCFhx(ZrKPOx^YWG zO8PyS_sGr3+eFT`Gux79Wr!RUQSvtIZoaK70g^B)=f__XHsdDe@84h-mtT_??fquL zgbCY{NBO=8H(^%voH0@sCCJ4EJ-1DskRWp8x@|=hnBJQ!nSPFxKU}!5Q79?C2isY9 z?krn0>@0=54xk;Zs~e0k3{Nm=d?JrR6`!Dpu5oqm-d%Wkd11Yww-_>HhzJYAH~CRf zig#njjuqG|4f|A05>uy66^oOGi5W9yh}6_nF?;rGk(Gt-r=UL;Em|bj+Cs#OcMlP- zEe;g+8IfYetZ?yQMwoaaD^zS<5+GjAjS`P9nI^V8nkjbOKT;f6H%s(-M~d5aNYQ_n z6d~_QG4OpUg7!%<<zG^SAC_X|Q7OirkRlr};VUU7e<#KClTu7OEya>0OGH6IfmpqI zwRq%_N5o@~Jtm%f@=5XZ(@%@_>({ID-MDe1C@Lyay6o+@-xeS3+$fg*Bt=oV6z{$F zp4hv0ulU!${v{3`JSYwyJ}izNJu1HV;tO%+n<L_*pQZTn%P*B&l$Mr?b7#I6Utg4> zyu4goxNt$q3fWq(ALch>F{cTk@mz&@z>N?dRQBfOkW+bs7$W<N$#Se%Am@lj<!VtR zH(`CyK|`O4)`;IV7VT@M5JOgB4Z%jtKO+8&a<rAQrC5&mPa^&+h+l&EhY<giOZ*;) z9~vvf9hpK{S7Gh$Mj`%z_&dslIEeU15dS#he~tL1h<^t0&%4C$*8^)W!yxOEu^tz; zx{WI_hWv*R<vURc549BXNPi)}7%SwdIYNHFTFCNELY_ZZg@3=+h~E|Qy%9ec@naAl zX_coS{(_c5F7Geonz2IubB>VPS0l|$LVk0wIzBodu^x!u0P!(tx)J^0{m?(<=)_3# z4MGO@7jnv2A@80e<Rhzv+`LK1!w0M5i{@B6+7|ttE?7_Og=#xU?1$u+WA6mn@(v;U zX9_v?ULof^Ddg%`h1|49$b%<b;+qkF5aN$Q{Aq|kAMsZr{<Dbx8sfi$_`4Awd9FT& z_}?IYc~$(|uu5|zQb<7x_aTM#Na0<i@Fh|xZz;vk{iQfJR*Lg;q`0tJii?}1`1K$T z5Rj@p6*nP%AH)wq{Bejs9r5QP{xZaWtfdss^_SxHu~O`qBgOvJQXJbPMcKjX_@0PQ z4zxl1n-ISX;`c!O+Ympvr4+aKmtxLXDOSuuTC1hlwn>Vk2dm@PR1FE*<@z!_d}!Fv zun6^PRPSEhyLIi_V|Z17u%XdWQ4tZ*;UOVm5ea>I_3YWb+wkGHB<KMmqTr4HDBMIu zBzWD*0K<oyb^qv?@W|*8Bp(?b7BVy};Wh^7-o0xV-9IWOG%PwSJR%(NiCN#<yu5;S z92q1!Cfc}$-PEpaLSF=63f;PPG2a3tqnLhlbW})mLZ?>k+rb|Jdg%drBp`lB7!wGO zPPnO6tM;0|!m(RdGmwm`N<X1Z>sGBs4p9ksDJr|bANWTwepod8Tes>oQpasnZ=!(s z2_a!IqoboEqZ2wXf%fg%w!N`w)21Cq5l8y-<H;E0lF*?>fCRFE{^5}^QPI&ckujZX z1b~0f-YWf}F%i*`F%dDtZtl=V&jL9l+}f+pDA3RJ!&Mdr*Ajx-0R=vRe=7a{fiYll zOk{M7B0}M4@E`amMD!0O3ehn!2_1$FYt^b1h^X>U7}Ovkv|l8$7!CiMhYeFycC7Lb zH`O=!HTRB;j)(!5hjms|b{eLOM@T?e*Z_A^eeaNjn1q<9$QTVvs1WUn3illv-p|9W z@o3$Hyo!o|y3+j<{QI?N)WFTH<p^L%6l0=8AV}5zVS@*H`!sU*ydesn;X@(IzP{n% z)&2<yL&L@dHg8ye5ON4lVCqp3)%hocQ|`tDHmlb#l>Xr%5Z7oUI`~KaK)^zqv}`^m zAv`7|5VBU~4S(Yr(W0e)Oc?kW<>F2LN2m~ZKiDTc5LLz1TcwZkjgEjIMnw(`sO9aF zJ_<T2ETCp=)Q_51OpU~$Uu8*7g()}WBsv!*fLsm!N`Ec~Y19T;S6?nygw*QKUAv!R z^ykKn8>jjsi!H;e`X0~E4;IT(BE_Rw;o|kBkz(WHi^bqQ=u<$?jrdfGiRe49&oCeE z!w)|!o_OL3v1ZL0v3BiR@$9qDs=mT&uf3-FgKxd{mSA7u<C5pa6X<VjL|@_Eci$EJ z_U%)BgHvCf5MO=ul{j_klqf4J6Q@s~7C--VM)eJT`Q;b!+i$;#)fc4LgucSTUtRQU zzb<I#2BV=vy<9aN4c!7XboZm7drq{JuZ#Zj9Whq!5p(26VzoRbHpy?$epk0Mq8}Q> z*mj7I#&*?hh(8GNLlHk3@h2nx9K^p5@z)^!tBAiB@lRB>Gym<UfPMe%r~IGnr>K29 z1oo8c&;gZ5i1uohvqQV?{rdIm&G+UG9olv1(z|nu=FM+Lf4F_uUfsL*?B1_s3)8L5 z@Z6!xEj@c8Y|9p%`+4>A5*@qt?$xhT<9;m>pn0=qH+SsPy;r{`jeFg!zc8D7_3r2C z*|Znp+<HUvW*yqK?cJ}5r)T3v_3Jmi)nw}6(V#=`PE9-;^F_ZFo!c~YYuFWG`}OP8 zs6Nxbv0c4ZcxrM><3^3VFb?R_{U-O0@I{nPct-wS9b5Oj#j9UGub#jk`S<qn>gMIu z#Y@rG5f?3iUAm}8kz>OC9}N)@ePso1aO=AG-U!3+h$lL$IYEK$xws49Cd~N#OIJO_ zMvq-B<3O)nx%J^P=l_Z8YSdpVZ{pho(57?e&RyUVx&eALYSaiLAbgIZIE5}WwmAee z0~j2~?Z4$Bf0!;tH`jHMw=Dn^fAdpMJr$33XX}|WXTJUM#~;5#+w;{AKm70w+!N={ zojZeZ>8WGKj&0h#d$$9239q1_AV@w-UoE|1I$V-MKKv2>v8>rJ-_WB+k6Yl9SDaqH zo~zyD$&*6{;X>e>FpqdY#?f-ywr%pe@4l00PbKDEI6);(oH!vd{+9Fy4H9GUFPu(i zDQNxj(4j+r-@bi&&d{MleUJ{zsUx5*fJ<bE3(K{!4!kFi>Bprvod9hySK(7pQE>_P zXgIGM|MuYbe$+u<rZIT%;2y*ecq#nffB$`nIZ_Gy<Qs3iq435WlKl4DZ{>jl2PEbc zR5->#AA#>rA<k2qHf>t8Wy_XD==Me0?e;O5nVAtQSFVgjznZw+3h-*%wyh85HtrZO zU;ug7E+QhL7t_W4AHx6a*|QRTnbo)4a?3#QaOL;ke=jkoTgB)7`}fPwKKo4JiSkjn zvusYDJSji_{B!k8nhZdnN8Ll_&i(Y$PZf~6Uy-Ks(xprC=bwK*`|-ygzxnmoU!Oo; z7a0e1K7jf*5IC%4+&_vx`co2lRRQrgfP6K8WnuvFC$H!l_k1Q8bkHR}1^{Pi0Lok% zV7xQjKZF0yojX^c9J@j$@<_{$9Xk{aln=}|NtOZTnUuT`N7f0{VdZ}G(MOWNy20|G z%lg5(!#E#&@PVR%Wr+GOSq_-1=M-+$h38-KzLNf^!!+_=Z~bk5{``4~W=P4upPye3 z$}xdufO#{yb?a7%GMDHxt2)dwV3}ZEO#yM=vuBTFT{&{(h`J{Y)B%(?))STyaohZ( zloo7ZAF>apneUad-+MgAPRhO|I3M$UDPO=b=D(aj{}Jj6>;Ls!3jcTBd1rOEZrui= z55E%nQldN*4b(X-1M-#huq=o>?^*uGj~`dSd-9z8Cccyf(n7q+f6C}Hr=^VeM9R=Z z*xDOY@PlD9@Yw?z-j%X9Xy{QaW!JZ)T!~`=Dl03WydLBcM&S=#FAX661{h^wxPPXe zupUre;*U!?93R7lWB2P2(tt7;^nsKEKtuYcuy<5qzWX3f0ewe_Rr&Sek9ktc@(Ldx zpRSOFJko)3Q1lpd&?PS^N92E9bdc}lIm?K0L;0uP#h6VYGzR#O{9L19C_VxU2Cf01 z0rSZ+6Lb;(l#itB_O@0nt`~o_8Pd@Ilz+5Gl3<hvUE*)ZIpv?_LAj!A7__jCQ_d*& ztQ&^QdciC1ODRW#1|H!s4E!byL-4Vi|GzXEyipf0PcD0HQ^Bc6ujgt{{Gp3iV~&3y z%3&qz8FfotbWq-1%fyf?y5v9o*)A|H^^EH^@f7MPY4}3QD9{iI8aDpm8se(=Kf&km zs}R%v;>C*+?Mw|I4F(u=)Kw-1Ev|I@`s=Uq;G1jY`i0T*@i`&#;fzp)*d5<WIpJ$5 z$DWijhBV-O3ss*XGeJgurcOeArcUaCdcgYJ2lB)^eHnJPI%rV%qr6vO?xHL7dLC(^ zKBQ~VK$mh)xvouzAzzer%F>AsUzg9%3zF+>L*(DHgB1-VU}`D0%-3m%Jt5^N;4}g> zRMltFpx5WV)Jf2(-Jz4ZfQHU*Xu1{2EU86<!oO?Ru2uRUZ9LogE6RiQm+gU39;^?1 zHtGrShu)JVk0i+#7X-=Y<_(e0fQB`ofmb5*=M>OzJ9PZ`uW+c(iR$`H8jSi(oz#Cf z_`XxhTS)`>Z^oYWW1y#S?W+ra=zAsqsB0_PA7Y&|=rCwu8L&NYEfa$ly2PJ)vS>+| zeC3`%`2uKI4;t25HT<*BNO@NoXmH|42hhO!Oq~=Bon+K!(qPnQ(qPnQ>ZD#<75*1$ z<Ij4=_67Z{Dj*F8)TYCzE3S0#o;2{<lH)I5T^Jx=xjRt)V?Jn@J4F6c{=NJZ)tNLT zfCkvuWV}m#rcPpg4ntj_PV$3JV*709q@LSwVuFT$ZN1c<_@jTnV&K4mUC~F$BOTO> zbSX2WgMA*lq=oO;)*AgX`mw!bpMlp~x&CtVVt=^_G`tKNHh_k=AIp&E&Omss<0=K6 zvr#5~Y@b1c(Z}fdjuyE#|EVtw`40#P=)tlufc2Gl6F1^+&_dl$ea~?X^(NaYwr#xL zS?VX>1Pw)t1LVeqf$}BLP`f@q1f8^cc9>i-Yp7h75-#mC!evfUgml>MkjFngAW4I3 zA4An==p<fs;SYVU<RAE~B<{qEE^#8>tPkX~Q6|*!uKi>7he#vkf9JAbSqvK901dA% z0u87Otk0x@^_e>9G3Zs&u!=OK4wd)K1P#d%a&c0mqJem_kHPlY=wl2-`-bZZ`KQic z8^(SFVZ(+E^2HZll<b!pV3Y^lYt<9X%gO_PPn6s3gXP;xKm+Q+YoOtudVQu&T5AiI zYqEmnqnRP{!Sqmhe_EJyfQEZP!&1<&2sH4j-N(2SbpzKG`iJ;K7L@!$uJcICz4zWL zUwY{!MUO!P-K)_-nkZY0>pb|H+>tj}mVkz>ph4AV&_JDJ)aR$6lb*~9k&k4C$_GHh zO3<(zG~`2$mRKSbqSQ%YE`5yGQ15YFq5p}$A^(Aafjx)=@x-c-DjHZWu5?_BmfwD> zlwWRtT)wv=TyC@b$*p>QrcN^I^Rv)Nq~QsjhQC3MR?Z5O1t}U0L{#-L(BB>fSOa}^ zS;`M;$vz+3v&_-{uEsis==AjT_lYa(BkRE8#fudUuJTozmfB^)XX*sjh2OhAefO2B zJ_hS^p<bVB#k*$zhxoJXM~oQ3a*+S{$3Ilxi+vi*56HQ5=T^~hEjq4MCd7w2>2>O) z->c7{fma>8uIp8M@(F8gwnKNyo;`cYgoFf%wHcEAy6o(1X|-DAiWMtVy~dP}(mU)6 z)kO<=&$fZOihUvWM>!tg#rnMeIh(A!gz?6@xvG73?PENq*XLEJ3%u$=)_8}x3KRPJ z<MFKU@6)HxAS~@k$5yvx=trw^c;=aB<d`vIB-R$m!oorco05F+!3PyS9N&}Qq=9V# z+X>1QfnylbKz&L%WLrocz3YgOhn^??=jHp)&aLiapf0eF!TL;{^!WT3qW`<&FU<WF zz!bWK@&j34jdA}#tb=(8by&hrVox%OIwAQya^y(W@4<Q)r9X)~ab+2i1_Stx{HE-2 zoWs5y%YyZRbz0SDlmpvmqdrq7xz^_ow>-su;1dR~ujTst`#*>^J!&3=dFypqVvQnC zaFm$SlDFP^t3=;g;X*vv-*5%eV1PjfU8YB%Oc`lYxBhF*H2HRZDB5SOkHPl&#YN$A z%L6my=O1jA=YRR-Bb4`b&p%)-rOeCAJBu|+a_Q2gzua-h9WpH~P4OM$N+nCAf%Ss( zKE#c5)TV`V5NwAj_v|CFu5dnL8^)7-$37C<3DQ!>1r}U9&hq-B^FK@rb@yq^QJ%Qx zo_j76PplhNz&dTv(7ShUiM4C09N1=1=Gpf#=peqtpE6H6IX0ln`oKEE`e2j^UCJQ) zCPv%Cu{X!hke5F+|H-;V`G+j6#d@*Btf#Pu%Y_RUs&XI=)T@|Vmbc%2yL!(uAPokP z24mdIF(v!hlzZYymoh}!4Ef@F_NmzChA#OV=AH&&o#Fi)L;M;1Dfa}*66c#q6LI65 zGuGaz`Wqe|E<-~@C03`Xa-cus7&Oo|<jN=$qn@yS5O>3+9C6G*xne(%WoOK90Dqd> zngXupQu-5guYd`lD|nd4`dnOGtY{#AIB$shp=d}<Oq8QWjZ(4&o4mrGye7{{3-M+> zp#I@|ju9w7>}zjBJzyPRonX4u!NC18(#dPru3ad~{Ta+VIe|27KD6sx&!zegh77X* zWAK&uoH%hp;e)c3=v#1Av8oR!ZwYx*GQc*9d}eq8pGgDtD0xc#|LUu+%2!@_MYVzd z{O3PaJkAk-PfqBzsc0h?0{0zwcOQ_Z-Q#-gH|>cp`#;qGsE;f8%=$)`xUk%*7a*sq zF9AD=(ygfLs(qubp?p*3h(GHA>m|#Ca>6p;d=~wUi}y^IWyJDedk%Wn18?e0+6k}s zQurHkO4n6}NrM5zjdW3lSQqZQ?>-5gCD~59Uc}7+#-W~}F5*S`;h2a(+)39{Pd&vs z#;5Ta&mYB|G%Ea!{y+GgM;Z*hXwYKNK$rZcZbI9w#tD=uwzaPOC9ZrX{wy1#4?(*Q z<({%cp0f@?XFLi#{tWI__`AwKb%bkq)Kw;=i*xK#rc9Aom#gI4;JK^IaFiv=I^VMm zAnt~J1Y@K%e*t%eKl3ogzi4|_QZCsBkq*{D@`?I`x{mFUYnhO@n>TNkk3asnl4tVU z0Mbp_VclWBiO-Z7rpLAbI`esa>h)*&PTYC5CqAz2BJnqXG#GHLGGQ866Rpyrj4=*r zBfgZaZP1;h#lRi?)93!AylYrj|El>P)GsyuMf;scTF8I81|4(_I_Q$8)Gw?H1eOio zu^gy7Sx;GJ)J3!<&;~_X$am!NXY@=p#x?I0{)YUcuh4^hB3!FX*uQ80hjh^{N1$xc z-L-3%(!1mV>nr7*__H2B9-l?_e^q8&DQHiA8uOpf6IJs+q=9WG`!CdglmoU$Y&*$g zjycJDmIM0&Mmg}AI-k0hdJJO<W!C`B|8Cq#hmwC+*{Ch&#<NjxU0upLpBaW_L769? zNh4()a&#Ge1SiUh?JCz;{E@ba`2F5>0|Iaj!haH&2iHDuoyK3jWG)()ZS&?;iiYxX z$h>(c@qJFY=HgC0|NC5bJv}$w#n&&$57%FGjr5H4UGUCj@^~)M-IIUC&C&6cn(L;! z?z-DhcTKw6S$9KpH%E7+=DN}C1$@kaP{5yDr^WR>?B5b7&sXg$qNJAqFbgiv16(gY z)1p#)J`l;(c`>hc7z=u%&5yQNEMw8%kHDON7~XpW*ynBosFG8r{Y8b8b7AWl@rh<* z<6JOp3Ikx<nS#0DPkvS?T?+@&+7Y|4560MT4(fCT^%?q{zd+xXqAfTLy>t})^Jg$m zmWn=b1j6?RbVYyT%naB#XeZ-bXej1yY4fJN`0!aN_hVKL-y`w*5#g_Pb%G9Ghde8& zKdB=*ULp<z!-k>v`7cASo`uf+2zv7^CQO?t?Txfs)4swvaoUGIJ}2b?(0(1Z-p<t1 zm{&-|+|xwjz`hg5o@^hf*Vz_PSF<n6ehU3~&ucPlRkV51)<zo-?VYqy(Y`~wKW%Kh zu0}lGcjg!XW6*Sr7gE{&vR&gim-(_EO`hoVtG<(#qV`LB*&Ud>rM-`~ve(O9ee1lZ z5zAE`RQ?<fvX4L@4g~6V0(rvr>xXZS%IED9m0g1K;k>5(0DIT>n%y)GIMD9PH4vQJ zr_I-pN7{R7XAFY-aZQ=*%r*t%*F^MlClU_>7nVQ!z{G{^&(_Cgt2u4joH-|N<WC&# z1P-)&(tfVw5jfD+MjH?1kv1yYI%y}IggSz2k6tF7*^Xo0B^}@2rLt`%7&x%cL7uR0 z{mSBSW$UAThVzKLm^Xp8SK5zh^K+F)u4SQ(hPFQ1m?)24#VYD~BY*aT4ErM6Rrb@2 z{+oddV{l%XbAjtHpGli9zNM2(P{&+xpgc~jC68FYuj&NX(oi02+37knf7nwJF|RPu z6%Xc3Ij75u_BPr`XfLFlYV#wRziTu6{t`|?KLML3aNv3#)(P5dc^UMxUx72v(sOch zQrQP37<h2rguLLz`E}ZwXyc%*jCLm4H)toLjfu7m+Q?{MS~y*UhnJ>E;-KUac2~pp z&GcOJrw$;nZ$og!gSfE#x8D~cY43U!IB-srbK|tJP#$ULqJ4$7MrGsD^Iw!Sj9IBV zVaVeU@Bvp<`Lk~ZJ47PByPQZ}!0|r&=cJpskOy=RtxHjML)xooyQR&Lw&$mGdm-xt z?IW}m=E6p0@PL`Q>S)?-aaGAb>lVK4O^5zZWnHA+;kbZfJMx6Ohx|C_{7k<8K&-M8 zaDJNdNE<%qrj0s5I}>evw3FrNI54x=6Pg`=3i3X7fp4mGP-pfpDTAzs?DNwl9t84( zKs?ANmczBKk9Pc1*-UAx%L5Lym)5$j(0|O|;cz7W{qKLDNZSzm<HUnJ=eU|<YR;Py z5AuTgh<?0izvVgrqfXQ%j|`{FT3t?O_OUTvk&gM6+2q6h_usGd9`!EUKk9$V0{KB% zWZuMu7wg22Umcb|eDw+HL@jwlouEAOsw-nd{y84!JO}4=$zzVMP#4vBi*=JY8Rg?z zHY|(xR>#WnQ-|f5uRp1lN7M=0uPKjDES|*d>ump_TiM_4%=}@C_%bAfy}EhIHjO<M z<nraq)%cD$Q7^I{5D)Tz<wM?_bsm-b*3Ob|+QZaZ07D+1Sujk#`cS4UJB7|kwXD@( zLvV1g59WIpkPg@}Gp0?Owj1AX%UQE#sd-?mkyLXR#@L5ECqKx4j_)W7<OlJueKAdh zI|SKsZEI`f&j;AWCZOJ{aUk1z+GS{4LO)Al-M6CK7=IBD0{KBaYL^Z1W4lG&!Eq^) z9|U0k?pjx8jzQ7?_@1^z+9mLvspR}CahNk_j^aD@0oy^QZO8!gqFylSD%y!k*kxuw zmb1}Ex{OS@=IQswRGrBy@O~8z;n_tS59jFd!M|i#(7uLtS>?n20Qt`R3G9=xk7l@N zt1f|`_aLpOV6XTAd2=Z6dzX?w)<LGlz8(3^xm?O1=hA7{VErJT#@rRlfO>)Phzof^ zy-l4-JhyGzc0V$1{(G6jU)6uuZxity-9+ZiG+EBX0s90<>?<Hgj~=aXBMt<HVI5#N z$_@KT)E8Wrh`I2!NS9-U>$#NvrT)P>`c$TO0(0_g^Qm)aTfm+L%ErukLHu};55^oX z`^&Vcu-^_Eo<yeCn>X=O`5W>~z7mKBf%&mL!X61qH*l>gX{XGwyvS?PO`VLnm*@Uq z-YS3Q16`x^e_ePOePDcZAo0DUnk#0%kaJFy$G?!b%AfQa?Js4C^fFBXbsp;+>o)16 zjB))A*IwYe0Hq)PB;CxLS7*{+oA0jtXBiNvd#U?a&e#)z^7jMg!wQfF=TH9Z#cxeo z{7GQ_`aS)2RlF)Y<C|T#s_%6>=YkgiH-NYfU^pXAXG9@{;GF?ExF^WmI`&}6z2duF zv$)!YwOUNO&I|Gw{?t=Xy$&1AdFZGgu`c9q&pr2C4ZQ;!z!{{G@xlu)sP_otv2NYE zD9mTS^YY6ttMWA3FqSiP`X{cL(Vm9&&zxud6=|G={GDSx<k*<<`U=J%ap>1^U1JEw zR!?9Yh--_H2E$UO*@rXwGe(`FE=$0;iDM?tAsj-t#uw{4aT)z%LHhyoVqK$NB2Zo# zmscW&CmaKB(Yj>no&NI++|ATYtVhI^eFw_+fw!NPt24B|;dFic#_=P^uN*6Itm2Ka z(qq7jwpNbGi96+=`Tk>Zl<MQ0#Te`f;LJ2Q7U$TDa}gZ(aop*LaVO`f*{34j)LXnb zhT$0MwTI?Nrr~;(LB(=Thw}&=vyp!+C$>TC2l3i^Z=~vPas0ut2gg3_>vFvD(vt;} zX>c4q0(h|eu=k9zb5n<r4=3MUD>vs3QR7MW-Pv#FxPfD*MV8?zkGK<3Zos$}-+9ZN zoE)`AgmyKSCFRB7zY2_>1KT&Me*Yqjee{dO7Lk8w7nI(jO^)ja7}t=wO^bu&_a{G6 z{NxxaKSkw#&X6C<9(6tS4$BhXt}C8WSF<dS?|4SOx^%c&&%I{;NcsL-&olcXBR;-e zQGGS)FUk$|Hv3#G59(7_TzJp6+=xq;;f?Yq?~H!e3AB^UpY0LH2~3-M8Et=#YkcZy z;>~o(3;3)0#5SArNsP;JHrsC4Vrs-?I*S{=6xISD+;tFeuLedKh9{U9)(sPxW;4!( zg|g(1Yi<HN=&C2~w^k2+eCMflTXTn>_Qbu`Jg}n?pUi5fwfcHUJRwX2J)~Lfver-p zC1z*i{PDB}=J~VJ(zAzjwPmFTWKW-InVp#3XLic;tc>i8WNRNBZ5EK2J-hGR{$0&D zbT1{@l5L%cgA8zD$B?f5`+9c`Zr0dr4zgz90CS#6ah<98Xz>s;8)quwsEY+E9W$<= zEDO#%#OV!5u~{i|anivIOLmo~t9PV2juD8A!uf4!<~05e>6(}wl|DCPmL;pJ*_IME zeL9YE8PYX5F)iEDH8`kWRoXQpSEcU~;)>jZ`qjWSs9zP)h#u6>;Ae1bSZr8CMAYcv zx8JVc{P+3a#}D>CsBG$cH}Sr~yR&x>Z!hmq?^y5Y-p_dN@c!7l(z`+boBRLVf82ms z16~}kb-<nhKM%M(pt(<k&m<p<&te~k&sv{%d_ML$?PKz7?K{YKr0-1MeBTFspYeUi zcen2szNdVD^u6TUXkhb!=7D_&`V72p;Nt`L4)hrmJSbsM{-EQ7P7i7|IAU<j;PHb~ z2ImicV(_8C=La|N8|k;$Z?E48zbyZU{9p6m=^q;Kb3jFaTcBrP@4%-5pAUR1@V&tI z1CIrM8(0?DFsNlv&!7Q8qk`gt5`r><Y(e)1Z425PbSCJ0P>UgLhYTH3I^@g{x8Qq% z?ZJ-*uM2)R1Qi;mGzl@=`$<r_$@{(j@dLXL8ZxM8@U6)84S&CY(15W4Qv+-P`vY19 zIf6DKz1=~FgH8sW4*EIBV@Ok^dgqYoLuL<oXNY(3px~h3p}`}9M+Z*`o)SDgI5l`q z@ciJM;QNB_4}Jvsy%fADcuVm1;P-<+4*oLu+u-xTzXrR7Gz{q!(k;X*q<_fBkUK(V zgk*=<Lzag;7P3C%)sRm@oFV5!fFQIF)Q@Q5-Nrl6JKcMc_r2aPcz@>Y^j<&U(*frP zwDFnm^Mp^a&mN!4KFxjoe4~6P`cCzo?VID9=X==qTi;y+4-EWb;7x<N4(c~(;GnEQ zD+g^F^zNXa1~nf%ZE%oZgx?gud4Bu+e(`hjZyMkanxg}z1Y`#+3Aiudg@8=~?*|+Y zI2G_ifC&8m+PnUrs>(PF6Acv$4Ga99VxgkGU(Px2Iqz4Th-fs?X}XE&4E1Ut*wom> zb!CL8=){<!q23r0GL~p&WXkRc4V^A6GjsSQ!?dzAXKKW<&&59@?k{({+d1$1JfG)z zK0ABQJ~v)uiUx5>+z@ecsEh}$89b&>kkh3fxZWbalb2<Tic`bXNR_CtGAf|<s{@MZ z6yRi`_5l}L^e!C$E)MB0I=Q&0TXmcM71-#|J<anb&WtdVfC;||nv-y1_|SBi&>>bl z;O~b<gCb9(-_av99y9F6MYsZgi1*_I_y|6Rzr?5Td3*_9!8h?AxE(*i5u_J+mc)=a zGL*y<N~Vx$WH!klnPe$hK~|Hsq=b}`9V9@4<RrOD!f7OprhfV!-9mTMAU#1((;umA zaci!%0Q9`cDz#i&+0*Q7dmU)_Z#&8v;w*C3I_sTEr`D-=8lC&j6DNjE0&YKMt*pD- z*PZRobC<XsuIHtDL4Kb17O`TuK*AFf#XF)vl!$%efM^0050*3JBH;196slI8Qd&>d z^K_2hsJ8)c2SGbO>%00d{jct3`WeqG1ih>`--a}j9u^illp2nfp+>Y07<q+EAT!7& zQfeKwKDVRUi|nHN#O=+8^9KG4UoPX-ICVq)seF2so@!>A2PQPsVJqB&piESPuHz{r zhpZxdNgMf%gwe@#I?bnL^boyBdswkninY|rvFfek;O<A(D0{lS#@XZScg{EiS%Ev( z%l2BmIG(`Y;;Xrzm+}f&_W>U+=8L6bou~!ZM#yXOrqudXJrf*Kt*`3adX%w@Hq$@> zu#w!(nX+L6@EA<7hc!;d^-$?=ptdQ1Z86zEwv$RyOAe7na*Nz0?PLHQLC4d@G>5LC zhrtu4V6V?v1L5_zS=Jj+!EM$KYoE0O)Stm}*cKLGpRf~9rIBtsA0%dpd{H5)#c|Mh zcX>!As#3LE_1ELI3w}PT@8}5A8&n!=2AN@IipepXO_?b-6((S+OpU2C^`_l~9-KnC z5{4pBB#J_BgJa9lUQ~r@P#ro8>K%m<wy=vOP6AA+I1O(HB?oX7uEDWn2x$g1_sL@t zN&C<t^ktez9V%%GO{1^V`E(he%cBK!GdTVR?Vx^8;%57x-EP0{yveSxFn6b0D7VWR zc~qY1OsW@Dg5oL-(kfRKs4`Wp8q{eODs~!dGaPo-1Zk3nGx0`TY3;N}vuw7L9c8<{ zp0W?*YJV9g2g{e_NV!+m$rJJ?IaEznmz4+Z?a+5ZwF*4dUw_D_R(ymM(a-2vNTadV zX{*DEw=3-)&M0T1lj#&WUpuj^iJfIDAlY~D0<lXpimzbaAB>yp)L(Cuf;Zq#@%K1L z$3SLLhdIJAaP?$ouCv^E*Qs)@fzNue(QGZ-#~RsX_Ka(}#?5xuxP@-Bd(rLZF)!K6 z^7eSuUYmD`U*WfSUxCFEu|kB)1eqbT<Tz!iBsE24s1MXf>WFGm*}7T(pd-y#^G^s< z5N4&oS2*f}l2Hci>NQB1b9fAyL#jy*EwZcZhjur}Rn8``Tz56BzRukStHVX-p8f~z z0^rn#vv4lXhrGG~>D0#WfDbx&goqL`Vi2TiqOgP%$s$e66d91QSz;yVun=+q?sNU= z5RoVo4f<+<jsU;ids??hlz>u^4;7+fP*w|SgBnKS1W;Bw&cJ!N442~uSn)Av%7r~= z5`SmXT?aQ0qe-A9AI!lB4~v0pjYhF3!A`W1ZP~6Z?Ib(dPPNnQJTH&ugMSNoF)!g; zc^NMUCkJ>Hui<sPo*xERALBvZ%v<;wevV(@mv}3`4jtn*;JgP&9|P7f5dl3rN<;(V zSin3C($5kupiTnZsen8ku+IVX3jn_l7{~<<@_+?D@K6j)Y!zjq9K0D2RiZ}JiF$Du zdeJcv6wRVVoDt{51?Wt!OqFw_Pv*%AP=6hC?8CA_9+N@UtX$~NNje!ibec|w#F+zK p+NZO0u3ia!I$!&Bp)Q8rR25QU*QbktE(W?7=whIYf&T>q{{c%Cm0JJ+ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/w64.exe b/env/lib/python3.6/site-packages/pip/_vendor/distlib/w64.exe new file mode 100644 index 0000000000000000000000000000000000000000..c41bd0a011fd760ce20ba795d9e535e0d2c39876 GIT binary patch literal 99328 zcmeFadwf*I`9FR(yGxc_IE%0lE|C=$MI#uSs)<W<5A4cW*(hGnR8g_PNChhmX9X)r z;z`;p<67FPA3xSsTm7`u+D~r^0TB}d*>Det5JUwPb(RAdM3ZnmzxOj|HwkF_`h5TY z@zR_*bLR5QGtWG?d1kiku4R&4k|YQIH%&=uz?1$3#NYq?rvsk{j9NWFdZYi=iyCZ^ ztry)s`$zM=^Qs<su<HJYy%qQW_{WcE-XA{Tt&0BG`=cLwgE!yiefW`C4@}6-&GMz1 zZhKID=9zvMC({3Cs%sO^;{6xDi6`C^&!-Y!h-X9Mw|M^ci8m9!#PgYcmn2$6etoL^ zn$+_x@x1j%6|<?$^G7f(BuTS=)=&D!oLzUNzja8XrR<C>N!p2=|Fv=#eGgB!NC#~6 zpmc^LIq47nrJo`b$aAl-;Y*+<T`5%;C9Ou%52~BWp``SDzD=4)iDfqF)oFNE+oUxB zrRQIVO_~J&yvHSJWKZV*A<-d8f44yW&cYM42Nr7hQo93x2p}3e5ka4SUP+ocp=#Fs z+WnHW_G)}Un^H0U-;MwK{0o3wCRoL!db~)50C+H-1MuwgFCa;c6Xsb3#TYSGDF+2c zf2&+zLe>1L3Vaec0dAyQ@iO7N$~`dm5fls%5d&9Z4AgF)e*sCF)aUj8Pxiq;-A1`? z9o{4CgK+FNcUf$5URi9a_qIFLn!_sSL1oU5N7*E`XuTS%^%Wu~!ZxiYEQjNh^VE36 zR~U>>GK)+#7W8@fm1U?B&)t0*+{9COfa<rSiQko>iMqz<<!aqN?M5~3*?<Bn+iZFq zR3_$JoGOqlGJn2bl8iBtxN`*+i{I`mR93kqn^d$h5%i5~$d;ta*dm|TY+FSWZF;Uj z`7O;`)YuH4OO0th_noSK*vp)W@1a|EQf0@A_C?snNPN<1d2L&mZR9@~L<4CBOj<s3 zz9h<RQ~b8D9NZf=o5BSs94t9)w5d$<6|1aSWixz*s=nTPpVg0>`pHuXjOohl%5>!p z<xZ6yM!$gwO9J#k0(8$)lK?`ztT0q`FcN#9kdFlL3fofG2qbi$k|g<=Ccf~jX{rom z;z3_V(M~>YBq67)kPjMRB_b6aN__2U6st28Sv?&pYGix34aFj&+9ID#VSAJY2hb5_ zTlv1B;;FJW&PChpG|*9i;{f$Bm_CeIjJ7MxaKRbXek%DS@c0%OfrD-4bpx$l(IuAD zsXD;c(c3EnOw?<THHX<Am2Kk>qc=RM(VwC>j1FU)h_RtlOuzhW6MyauMu^^3_O8-E zoPoc(NOVv23eExoe$4<$Dp=e><1ScyxaLb5OK-29RIlkV?xA6RJw)IVy>*`K+uJzw zc2j&tfm!DNuxhxx(s>-8E0q$vmQ_};ADQ#NGVEpSQ-S08`4~@phA$9i>%;8s;xL<! z<l}0XeX882+^gK9%($m{i$z=0?;ye|1hKPY@jEBb?9b%C>yN(U<Nrky4J?mWR?+If z6sc@?(MBpWPL0h4wR&FuK$7Yj<=R>x;2ok|V=)NjE`l92KARR(Ij_J-RYtE2udJPC zfK==DMQ;`FhR<p~!oJ<SBASm5FCyCY%>-IxG|le-mH=3^#c+yFMLWC|e3xi?TGxJc zM58)p18BSOzI$n?=dGiF%HCJm3DaXk`>H-h!Wt|j$+DJ)AOLBNu+1vlgB3AOpXKvn zMTSt8wWHS@(=!Zdy}O?r{D>A)xwV$2p}zpFCH?TYx{c8bSZ-C=Ce>}!Ttz!g&mZ?e z6QSl&YFsm|Ypl0LzP#ybe6Ft=tcf^0_{t3<fOKL_p?LtYXlKzz)AIuAM&h9Z%Bp45 z0Qe8EF>N(w2``%kn=(BQpb#c&V9g@mG%6O&6s*L^z>LK``@4a+PfnU<O<9Lj*tfZK zGp1+X`)Duj*@dF4mA{F?MLTH*=Ybv0<{PF;(yC6=G=Z#nv(bU+$wHTEe0rmb;xEvu zjC)pEl^3G&s&7!(^n4~-o!&}?qE?}QT7_<-LSKtQjaDJ>rdB@9D)d*Y&;%3$`~4F# zR}=aYq1VAS(fMCdg{Ztd0$i$uk$E8^&Y&-#V#<nv=vlr(34g{}q`OD(S}!$Xdd$6I zth3QcgeA~x%L&sAf_gxtxBJS0)?h6EpHplo`ZW~=<<2xcVFHtgSl@(^ZBBhCCt9Co zdfv6#oM-_aMT01c{5i<)rNOy0@nE!f5|1UOuChZw+yvCZ8Z<p00;yoF%G)jgN^^}# z4azI0x++0R1(o`V(b`yry~mRm(0oUSq3%~*C>fvX!g{0`5p|+lNHArG?H{V_Y;rrb z75D5#L8XE86XJ4vEXIxeB=YMTBdmpZ_nL0P_!D{ZN}0MGNS!T0XM&v2qupYMXHW+< zc$|vsiHub794cOB!Nyg#zcN^Ii8f4LnN9guS@~J2-kgCCW1?TAK8zF}G*@LXHikZ` zY&)NED$RK}(f9;>D(fcZI}CkR>er2qURtU9M_cf`he80KKswol&*_%*mJ9-~x8P|i z_^c?n0|fyA4O8IsS;z^Xdl*$VWI!yhY~|HfW)8t6ue4VJ1)tHfpQgs{0Up6;RW=tC z$Py$>iM!qh*@_ih4#_<mlRXbYb)nZ1skG3c*I}XGGc>i(Ykl*OV|pTcz(Yr4ZYoQP zHD~<hvqq-8a=WTGI^>l*XklauCVX(MAPhj!KN25r{{}pFbR<4I!?=L$CUAGS#TW*J z^$i({yfP#azy~Aot9D&$lins7RJ1Y7b_dx8({m}htSl*AjW4V%nvFvLKvLBYWvKC| zd^5okD?>5g9WBnAFSAM0_->%fLbeMy#Ehh5;Zm>#HLhE-`ZTd5fBXEQ)g`%_TwRi- zWvOP9>Tj(&YS_Wjr~wGzxaWgTF%LX~+QIru1WzkC6=BV6_p0O>NeZ&<5HgT7(85g5 z)6mdSRcIMIQB$oDlpv!rW^2}>X=)sd4@5P~H%%kjWxIKcEKmV*!~BB|qr6hTevCDO zwbVPPeH*xxF~CCIb5?oGG?8;AA?|b3xX7xpCRO9bJ~d{2M-AczAc^gf*eqXw_dA`& z*&75k;fxar-+@+$iUI;3oxsO>Y9C)F;!9CuSl?(utqZd=@o5>AU;PcMSnb*oi7S*V z-p}A&H8)1=O;%1=l<1p-0^#@!S0-kJA9AAixaXxb5Z#LW?0WRgw}Jylfau}~xKd#x zo=b3I7NFQ*5X7o1Vb5;|yP$O$0Am9?6LO9Zf?`51-}+ZsDUTwp0CoJiKN)P3q65{Y zCU<35lW9gXjyJ1K8{Nr|hk!GtWMLncSf~PZ-Vi%rX}Y1JBMRG~La%?e5mPdh{{gEc zA)&6suRyY>F<FDOD6><P(LTTnX8bbiZ5wa6kYH~oTAGGAi01NpMH_jjXS{=w6QZN} z)ShO6D2qu(U;u1E_OfL~jm9Xb*C8xyOGm65K}~iOZU=~Df_MQWhXzFjnuo;*fS15+ z#O1>b{B@Jc_F^1S58r<gv{M>EERWAYnl(cYYN$pAlulCTKPM>|vjbSofxa{OU#S$< z=6dhl&0CEAmHu_wKvi!7q1uR0zeGmVp|YmtqQ>S_pXe-}Te_Q=%b(sw%f#9+v=PVw zKQTRbr81!+n{BC<)9|vrd?iFP88m;{Ddg8G;ycL+K6<gzqsLI%8XqsaM6?Pl^L`?^ znZF>gzL_7rm_BXhAElmEXoI)}jU^eP237y2`QwGLBkUY8?|W4yhMz&OHM>;*{;DJA z+bk|ooD@@-M0=@~p}@r;m1P6XT86nJT{GFYW=bEly3$K0-D(4B_iC{Ha<4&}KXkd^ z8!j6B#gy@E0RWDBmhPfQ?%jCsCx9Gubr;^R$5(#nGWwQ(g(y;5h=(!Yy9S__?z;uu zk?E4V6DfWMwJ|X=)a#;+3KqNRS&e)e)4BE;qzUZ6AV}EK9fW-b;|%MBy&q&J#GLS_ ziCXp2eq(KtO|G?}tAz6_A6_td!TPaZAX&Fht^<6Sg4|;CWO{xDj|nUyS{;QT61nyj z>+1#Lt0um73qBlSoq{7o7^nAQRsRxT!D3YXXRGG%@K`idk{Qh~KBtWPR3@9A?~D5F z{Brsj%QV4OavP^nzr(FBcwa8wv+y4AACEdCey**Fu;WA#<5cTe*wZH#9s2efFJMZB z5(&q1yFsFSr7+1ngRLwl9{g2gEeq8h)MN`g^Sn^5>JCOPPB=dvV<E-*a3?+}U~bKV zU2iu%E1)uMp(%hGuK5;%Rs(6{2|7Ma*3M)l*60>WW7=%Va=ekg4sF?%oGLzT5h^RP z*t%p1<ydP4Yikck3@c(Ds0Q^{!A&VqBYcomkEr(=!8anT3HYhL5)=U|q61KY>W1={ zVB=w_K`tT(y1^8@gIieKA-yJ{`v#yRLnG$)@uEEOz7ZwJ&^&wwH37=Y=C>jRRW%TE zkz3&0Hn6N(lspvN8C}Bn(!z~RcB^bzBOq2|SZVbQA~h8Y)UyMA6>SKsZbep|^VcwC zqAotKm`XzQJD(g5<)UP=OB<oG$!=CJisjCwV*UAJSBSO&!en=+XdxpySQlk7S|;b& zpbfb*j>}7IePR4gJqzqM01Y|}8Wo-rg{?0ms@_C^g8L5V_mAumAV<Nw8?cs_B>y5l z@6<jfn+N=Q2S16)h=2sVp^}Qh(d*TME_vn1>ninAN>vvA-nGC{sW*E`Q_#CbR=3D% z^pXOhnp?F%C3J9qrN3QkF}^Ra96+jpm+!d-f_|keUgVYSJb`yLrKVD?EUM+CT3evd z$t#-+nu5i!XtW%%Jqq>T6W-1cZB8T2GbM7^BG@iOr8CI3hYeZ;fwMEh_Levwx&jzp z032vPO^qLeP6^PX!&Bo&UD%~{7=NVT{mJsmoI$WP#Hdb)Q8js?O<qu`pA>l~Mcz7Z zSYMYBHd_hijRf;1ZN0p5oxE~mK`RKCnuJltrvs5z`{(+zh-d334lGz?nelZSRXKD5 z<o-*zMF~<b|J$W7JRn9S9)eI2>xAD81q6lWq7ZC1S{5H~0S3XqK;4&@IG5mq2IdRD zk5};4T;nhN#~5cqxMq1pPf}$q#s&O7l;St_WTrVUmOc2JaF1;v+~pJx)Lc+yG2H0a z;jVV!WN3n{old?oB04hVp}X9J|D(lfY;geoF%@)Qm5t#PZGcUW#ok#)boo_E6BxqZ z8`i4{+>dnf5wL1ra4kmUZ>j1B<+jLpKg>cBQwIabw<}N#p`NWKjvgfKjPy1yf1w1t zv*G|Sa6NyLngnMd<>FDKnUoxf(qz04q2}W6T?;_8jowP)8NwDTiXMO-1#3bIvn^r& z*YgKa&-@x{3L-^H-XjFw6AzM4VA27#>zJx{XH=C>#bR-*H7CeJwBSlL4hNUNX+f6S z(1G&!C#(8_4*oo#Qwt7|jt&W9YL_N5w_whULHrOgZE$oFfeGou4{0eR+=iXB**Y`o z=5InrvnDQi1==G_q)-5aq_6-RUq~VT40sJ%x&U+mUlozS5ah6SBZohXc2Y=D<2QHI zlu%z$C;ufJ45aOrVT!i$n}u4A8DyA2h8bj-!T1Aia+|@E)T5#VV9Agd$zm04d^=hp zPV2SQX8r|+RlNvai0@joSf!Q>NxqH|!B0*8X%J8`9MI#!vB@1_y@E^p?umqq^~EV) zofp2k9=|L!N^;7wV`?lyK8sCd>_izU%wMUo+kNwhUWb=~Ts&oUozYA9yrDe{{>%{4 zn2CFkQ1kW(H8{#tw#HYDxuzK!fO*zi&2ZP(5r6BWm#X?%Q**<(F4{?c$_AQGwv7tD z(x+hef@j0<y32pt-!&yK2PCI5L+cl0H~Wl!@bbYj4FcSm@Jz_H<@lT`rK|=TmT31z zDi*jxb)DKYxt`oKVgJ#fJn5Sd+ZOR}lz)B*kVP1b85qMDGgogwbC)h~DXb-ewK`iZ z;HnLu$zg>*Gb8p6wV*Zmdr=MFL#!$6=T);qK#Lc{@E>{<PKk9P_Lb6g%;>MO4MFxz zxT_;r@RgB8N&lgcJmo7iPZa$KnLd9FL0qT0gAEP2yCg^iXE{LiZd02BFJ=KxK8gWR z1@0$Dp{apK11ys2mniiDAz<`jvjx_gzzD73*))|Xb0oQ6uuC93w+G=<K*szv56z-j zjaQ+Fesi8zE%*@lhvT~I>*P2q&uttEvqW+@-k`vw{1Efp@ImB7Vl%zM=~NhI#{?fb z8DSq2vCJf5xtpkhW+ysY)lZsQR)npC^L{hr6Q3aZ2JJ}vt-)BiSJ2{GoC{@>)&z)g zTN!LL1~2v&ep6~qqr`EPfM*0=NI`M|Ql`KXVX8;iEQl^)C<&h_>`#c@G6v7<Iu-sk z9v~cA<iDj7-&{f;f~+P^_I{<mHA;q2sDT(E-sVFqan8=O;p0Ymr9(9?77nWjRwx)V zHe7<N$@IL_LjDg7*eS2X2YF?aymIpxl2l|?3Aq_6+E>QfV5|Wz(V_e*j5Z}J*9{WJ z<}bw(*{K~Q5p`9Vx#&6Gcn>N=WvFDBmKP$MEa-295Q2Dw^Dd(@gtiXD&KwSY&}+19 zg}r7JoL|rOUG<`(9$FX{-ENSdY#6lz$_&S{!g7$*wsF8?dcMEqcM^_9VSMKCA7UJ- zA$<wQ8<u(UE+CO_n}nCgT_i^qJM;<{R8DvZNT21~vDjEgUE{)??6$nFbuJW-W);kL zv6dusK8g9^_J0L?PVCEqIquGNc5Nbw5kS+_?bJUW6=se`b<W1}&Y;Uxt$9etGqS3S zv!WU12Gwj-{r0M3U{oX6hGZMCTf1}NF}?<%Sx_eL0x5K^gFW*%d?W~?nMFsEf51z@ z<L)wcxQu;mf7}&rI_p&Q&qhsUj*hZ9N6MAXpcB0<N{=Fg_s1ywEzvPT{oJnFThl-! z1Q1PC$GRrxNnVimWN77VH~B~2!<xqmuiuUJ)PN0f$=|(TVCUpKmzGV1A@*3eW@nH4 z)VTBaP}6Tj@1kc1?9W6&S3GzXRzR<pkQ|4ge<FRawHcG*?^vNA@|69%7waYM@bK_| z3{0bfUxy?n%oxt4ELwb6W0`==azxZ2izMHSwB{n@746laadNOg$atBOr?76Jfh^Y{ zU~lhfoZ5XzKk#MIkr>M8-8QJ@uVF2-Ghn6Y=ry1s$nMSy)$V&NOVK|)9gN+bXAD<5 zn{C=){B>yq6nXWhSyh>d$v#3AReCHyl@fwm#=tGX4g>PD8{#1_NHTDt!9vo)5k?4s zSnT5u>P3jc+6mwj=V0YGkS8fJ9)~1BKOUNMmVU~nIrUQ+(GU3t@L4Rwz8>iU+xbyK zR6C_+VDE&4J|XJ-zG2X&_gi5{V-F0&$p3{0DjYi|*XkfT;*dpZN&Z8~)S|rLMr6gD zui}k=p%S$`)}Id%i70kZ^KYZN0BouX*>(eY-ani|jdrKp6h(Y1z55f74nt0*KJl^A zsBCF4d=$QbTlFJ9hOwO-3i05=j5Iq1_Ij1np0$5IuNQg&Z5gl11n_(=*4472eHeaS zr{z%_#HY!<O6UjJwlp49V;7Qix2v&Hthuc2pyf&1P3(OS&`8EO;JTUo(lTI-#An)8 zW5!8)<WC#}=Z^Iwb$l@Y2F;}u??Xs35DX+mnC+sf3n$R<qC-Sik=P{`gqtv%aEN(? zWP$x4tZkSPzZ2rdT1($4Mtb!#uf84hCHUQ7UL`;fvdcsh*$Lx@P9;y*uI25MXd*VV zZvJDMM3C#IQuN)*e-jq-=tMIHjNGZ?NOouOS<q@#y<(iCSLCHuT6zmTO}Bm4Fe>*U z*une|I!_Vo0wf_8F9sX|t)G5>o@8|J3H?$l`YP}{YeoMHSq6x=XGN#h2L&^{Or!|$ zR%m*<-+}+&t+Kl)qx94^@`;z^AIB5U!+pk;YK3$3c0g`V)D%;=Q1sBMY)n>ViJBMP zU$jqes6|r)9_?T9d^ZyEv(2#=eSb%aYcP~CKcn^1trx&u5_R$mk+TZ_OZ*L(29`m# z$uLds0e-Ebe@GQQ4l5Hu4k#MyDf$u>-3@JY8FvHISSneoPz-yMM=@s4IE({JDRv!} z(i=C6bO^2Szu%N9i}ft=6)4MpJ2jPsr7ZDRkR{|jzvwVI=Cn*q;?q>_1SYmK=$mVk zS3)sXR)IxIK{>Pu+q|SXZRg)l60*sogk??DMp3oz;g2z#b?a*cCg8}xmx0LK!Y{$! zWyEG*^fjO{wPyXxco6Fn`UC%x0EakE-in1n^97Q?bk$SYc<y+N$Mh^&ix=k18wA#X zuFFB<q@>IwJ+Ykcg)f4#v84=6NzxP*o379RsevH$CwwZTRDe?BAb8pbTJ1m&!<J)& zp+(xqoQXyAbF^iQR#~l2002G(0MvNuRc{2lb6!bV%(;{vG3O~az18}bQGYS#X|$bi z?nGvPht!yGZb1r6kt+Q6X$wvmAkJq~>3ODS>Dw?pofA>dnv!1uA+(SE5b}Y6W=yi_ znT{8|bO;JTld#G?gmR{5?ixv2O<f5e^~F@DW<9|U1dEuSyItYfoV;*szL(;A`r1T+ zGWM9Oj9tkTN0>cUTvpIk#y*Z<f0ZSdSITuq&`7xACu%r0wH0AOZ7~1+*T{Mev3NW1 zKO>RDCU-~t9rBaQQIt(SO=<l_r1i%gl3e>MO8ITtFyg~lcwzQl*q$)kNI!_-+Y?&N zVyR;O!v~_{RK0Yy9}R04V#NPIiVqeGL>nM{f-2jJeSKuJsSZWN1&Fq`^QS?dVa#8E z-R6i?z$m2|ri^i6`<@6f&aIT?H9bM#iT?t{VMgf9ZW_r-z>qA#LV_tz>$i+4-l8Me zKPf9nEca^uqMao}AH53ZuZt-dTVquwv*Gb*jtP~!$?YFHKhO9K>bJd-MG|gkwf)%K zM2=w(vLxY6Wgq#knLk}Mv3v8WL$&W`lVKfpRFzm*n}21f?uh0L`U|uxDdC@US{tYP zCG1gy78k~Eu>HfP0CqJ9%$C^`irJP1z3NSYgH+VY-9YS|0K!)KhOAiYEAA{k2}(%$ zQJvGSwMk@-5a-Dh+L__>H_MS`wW4*k3;8kUcogB!ml2^lZP>;nspN}KbAj2%^4;|D zaqHv3ORbNY4R8fP0*bha?I)<>U>g*9F#0C=e>Q6{6`d<l*!WzmT2BIHLU9!?i`Gco z^V>^on83zf#djSkz7aOq7ABi28-LGg&NBE@`z12KAj~lIU$h=JH%8wE7l0OYH?;Y~ z<M@}Ql2l##{am6VC;t&xAiQ4J5tPJS_JVDt{K+Kf;<-Ko)xWpuI`D(#v>5&E3%*D0 zh=#wsCjMQf7VJ>F4yjEi?Qj^VP08!yk%4YL10*D`o|6Ypjp#Z)Rfx2^RdZ}|6`bG} zuHo#^PYhl>2xP)9JHN(_JvCNR4e-3U=UIpnY{*oB+?>M%IIRmVl~?}+{S<8K15;P> zJb;p$F>%0kApn?%=BV3Td39BujJ}iqOCRIt&>VYPsxQl@IG1AA;0f!?4+_~;OW|FH z(=E)Bq4*Qlp5puPoWQ9NV!K1^BM8cCqv?PIzt#ySnYAAE?)$Yuy}L>qqjxXRhVt={ z#3|vu+9~Y7$q=FM*synR<SOi}lHFmr-sz9Y(zhYvpgBWfZC1E|T-%J?x}9?E*B~so zboCRrEqP3PNGaGZi(pkfnXOGRH)_}E$vo{6J(;Bq(@!{}PlZsY?oX%_Vr}|%ck+Lz z`syz;lf;n%or!>4M_fnW9!M?~rP_@}$j*p9s(*1-|NOMRJwsl7G}D-ehN3@2BTBYu zROrduwVU<i0Bs_=yo~xDs-Ng5*9`|Pb|s`n@Z+n|%K)PUo|0%mh_%YAI|mpA^p-E) zjt(&LQzdgyQZ{6H^%ptwoeJA+IFJETH61D;*E!H^h#WqUc#`3uPrx}a4U*l68B`j8 zK~VO){L|ZM9m{pU#Hv8Cetioge4|vZTVQ?ZX~45gG=S+q1Dkn2ED5Y`n$0YLae;9Y z=y0(pLcD;Ae9C)@xKU)FQGg;<q6jI+RHRW9k?XRo&IX{E`y5b$B}H=GVe)Z62EeVf z1?CO7H$=3z8WYXGxXl{JUKBB%xiXl1&^J34&5MOb24hiIBo;+S8}M-B|8b<;%_K8u z9%=LBlq>9U$_bs~9G`h%(Z2Xp8~+ojibW=Ez4=RTyb4~A_+f&-q6^GvI5z)OePV~C zRjRN?I|F%obb41HPpZb;M&eV>=-uuG?)c4az7VJ*_Z0~Xm3cnK?x4yBe+8|G;uaz- ze>-wuydS4F$d%cKPjm3eaANn#SY#%Wk71Cj%2A*H3w8ixHbO7~!*kSBEC-I=jT*w7 znhICHER%Hq<D1lD^g^(*`J%U+6MVpDqOS_-OZT0U3?CYpZhh^hx~AtoR0B8xD#Avd zBK4b0s{9%xFe-9ZKreUW&lpNTL7zG@r4TN15Lm+43lN4?!Z4(X8q0kQyfqz*2lIG3 zc!wH)-RDN+5fru#AP0cFaU72#_|fVPpe12Cv%UHW6Fhm<4d{B7Zyq>O#HYc}(C`c) z;sqZWM6`7n54jN}ifN`0HHbb$gVnu3Bl#clppeD~F-JRy{PW-A9eV3Eso*t8%mB4I zlhj8jdr%80x)(3d_sZb(06_4ULBabYY8oR|R_7vkV|7-`>9^%#X2l!0<2(?Irp7|m zh|BvIDP^o1acf9X2de35L=v;(hUYy;KV?FT-pqG}@R`MF@M_T(KK>(2R{TXDVF%Hr z&`st;Tz`tFC-RR&ZdvSe^-ySVN?f(^7qN~^&vXBTR!4b6A&_6--&4^U$%-`p?)6c? zuEmCZ?)%s`QP|!vnL=Ng9s^AT+2*uwe=DKuq6n$E5%|0jJ54JIQ#_YEjO1n!^Q>a= z0f|_g{9XVebo63EvKMlp$Fy94Pbg2mc7(wE_cz4g;g<`>11^7-R$w-U?QMGZ_^Pks z$`QSL;DUY)FyN%-nx-HsHjrtlpMdk@hQ?;~d4&a=92PG47;qKRIQl~h{-x7mfQ@cK zfF<MYeZK!9{e7_b-m_P{A8S3`#+;rq*xPBr2WT9(A&+q3h{UJ4F=2jyq{JYDET1M& zjDLONacT~?V=l{z8q1*KP^3)+*crzkdI`<ctfz0pcBqa#Du_H`zW~E|L6mcc6HQ*{ zuZJ0~RGDSHg=>KZt8!{i;YSfcu2@#IVG$qucS3Au(Y`P{tuJxeq8F<bEf!;FQR6w) zK+;i2P$WU=2pxFrA4HDY#ZWl0DWsqBE!rsbM4D*Oh=^lfgek{JvF)i=G(&R*2U(BF zGxmK4U3uT}0YHRli~}4Dtg-;^c5MVaiZ*geLM0!H9+BXWA0T=lUefGV&27<LsB{YA z8?FGs(z0O&MEjYZFTgpiGTWn{rocMSp`}(J0N1&KD%&@z3_~-=K2Ozp__~E7D;Tt5 zuLweh`M`u}G?c%dBT4a_@_7(imt}9?^#Oomr&K!@;`n>ea9Ce-21=p>+nJB88iyhB zON3*Cf=8n=uYw@5Trj{(xv+qFB}y%WxU;H8$EVnHCICa7p(F?w?1vqEb#L8HBR3qI zaYi!w@frAp+PT%}-1Kbv8wgwfu1V>MGq>ED70^>lUq4Qm*arc%A3<UK8?XoT=!&ZY z`G?>wz|elHDa`79z|}0YF7)BML79j*Xrn2FRB@O)42i!Vp|8UC4c-HtVh5<-^h}{j zF=rc5llV$}CU+yf#&S1VkImL&3m!twvfNdaV~&0De~B^C{G?b?pKZ57wpWPVnMIv! z2LdJK+6{mQ3GIjUr;PrT>xb;xZTcaHHbFnsPa9%x(yro1AXHNN<4=(^0$2_T^~Z9r z#UuF?nnBieR@OCm7>n?JvGqQR-skdNP&rc@-7fe$lT{=|*s_-iC2Uiq!ueLKB$fQY zp$&rAbPF@&01UNt6n@Fid7ba1Vh9e8#P5g6e7Vn{^|>6v0|5OV53wZ=aWT{MRQ;7i z77y!ZY;e3cCZWwsWpziQSxUUj&QF6Hx4>2Cf-G`lGMDM6T-reWRJH~Y;?Oc9@OF<g zE|0LaB#R<w62|c#DJ`L7bWo730MSW_pV42!g5eBeY0spOB`o*D1eWR!l6B1a9@6$! z|Bl6%sTG^VYWmsBr~q!>>XOWLBn4RR>nLQr%i%YW!ZC*rkYxh0N1?R5<3gT)e>9gp zM}4vFius=5WSjBu@7D$p$o&DbT$cwDio^uG{{3=Yf4l}`0?OnWjF8B4HnE?1psjP1 zH4s-kQ|J`uKGQbTULbc73YeY?UkBXaZEmbIav*4#g0l(FuEn^)oXxRIPz;Qg=%3~O z^D7cF*cLQBzd*j8`!X^}Y>i9GASo(GJjGgTRzzIf`61|2jn%x20qgg>rG!GE08*j` zgd*HHEj1c4Cb!gd(F2_7bu@|!<l0(ALXP`^IxKdbayraZ=Edl6RSta~DL%*EYQzit zxKTu2I9pFq8@kA1j2`>}Y%?sgY8=95dG8VzYp(}x)3fF^OigafG5m#c@*KIstcZ3M z+QjNstN^|dma&zo6|vmkpeQs`(5Wc98OR)hZOabKl<9et@^r+;$Lt8AeD!_!M)onp zcikb%3awBWulJ>V3j8OimFf^Y$<4#4(i3t-dos0`(xM~gkU}&jHC#*_$;pGF@Kn`$ zyV`myx8BBCZ?mnp;nthj6L#_(>usU+?d)Fal2rq3R>l!4<7LW-JG0m0;crEE?;-yk znj>iC&z=;s#AudCWFyXEqc*a`><?xhckbyxE_{Q9RIwj_5fzuA(##X-S-iulo}NXm zfLx=cZtAJ@<Ov9u;b`TT-Spm#rE)z&wy4BAD3O}g!Dx%bMgVQo>_W*B8#)&4^Y`9^ zG&zU}a!k)m@acbu>Dm8h^3m-562ysZa#c*P<qZ7HPY{y?)2?K>D^Wts@zbb)tP9C^ zyP4;ZiRJkf@=|kbEuu`m8AO?OreIxIFbI!O{Kh;U6K%JmdDD|mm1E8c<480(N6`a* zU?+|O8rXFiL$MOpdOTlViz9MEuVKgd{#^q5xnIEhV|o;Dw+KaJ<Zc)5SK=L=h5hU0 zCx1ip!@9!-rz7!4T;QoL`N^Lnk2X%84b!)duz$n+>8mMRLiMjx<w<R-y-oGERSl#v zw*dB{eI(e2gIVlT<9B#f|L&?VeY=Xr1?)^zn@o5pM?!B#n6ICDk2xa%CCs*&o&xcX z?{s41LhIut;^RluA|HjZoNF-Aem!BKcfXSSQwFiZuJ#d)UG1jO>B*+*;Xm<wqZR$e zOAq4l-LbGjixWh0PQ7NU-kSf{daoRTKL$z?P?(MLT7RNZ+o7DnX}3LUoXc=5<yHK} z9{eiWIjJmhX?6rps}<@*Yz-GR)|807lAP7nUdb(a@-vOco3}(Nl!>q$D7envgqXO% zVC?so_q#D3k@Dc^&@N=R<V5^CtFoBlvagF^-ed&_AoQkjA^+y8PpAh(Y4-v^yd6=V zFgqr_N%2h^`9mOT%;M;>0)+OrQBFNEC1lAg;A`Tey4v&uwv?#Au0Po<&N^tWWV5uv zUKt=<if+AoNYydw?|IUTS+LQS=C#mk&heMQ+MOm;3untgppC5ET3-M;>;!)qeHV3F zOHr;p3g#ET$<A+@N=p`7^3haADKbEmxq9hO(F8usz=s->bOpJVmL*OU{t^;LSu+?8 z9{{`_<{VUQh3(=$xd5;uDnqWrK>>Ul{{j^hhd(Q0S3R3Sg!`W9`RS{$zY#+A1^T%R zE!>J0DB@%*FGK@CkEhq76TUz`7dw#%L)gmaT!zjFE8P<xokAIRAp=chq6uY{@Ib{Q zp*5J9aKQ$3N4HvlwX=smj|KT%?GC1PQGE@NF19ka^3PN;eg(04shoGM957CgigHSW z+e_NjR{l@84TLlAd~n~!_9DCiX{*#4!}7kFR@SqWl{z|C_X?r0o2Ua81eoZ}Y`2K{ zU7D@byDCeHw1MV2;bU&js5$vH@LTm6S^I%N{Tsd_lBSP`2o=K1Ku!m?olZcKE7kO< zz_`ku5~M)Jf8jRE_$OkqjQ=?>{^#R}>8CyC9f+O<M)_sH9%w`xYQ2sS$c*OerRAT1 zLZX9L5pnRj;Ng~C^FyRao3U(Xctr#c6vy_&@l4axMuUwxf8ro6-KbrPIBn!i>^FZn zc<tTDrIF)kwF3=PVCXV@wJb)<2<YWAVG0B|Nis4Yq9hg3=uga-c@W6$v6k*p<-eV< zTEL+tIoM6&z&x-Z5B(W-n}m%7_#@z-`U3z1S&r9BO=L?rXaVWy#OWktZYEdNKNEIR zmyFlwrJwb34o^zSmvdq@)c6nJ*-5UYIDW{xAjRKtKZqScN@{$uTZvEY2Nu*EjNOwH ziQVI<JpLDO+(R6t3(hQcjH<WOW)1Au<7zyPdl@7VFuBxGQg5R}S>`5uux#mbjaDm` z09?X4yM{xm%NIi71DmLJ5+u%-4&Wi2V)KA^oygWOqgaD5-Bc#Bj^ax6~76DR|9 z&8xRz{h3Yv=BjU&263n?=$q%IfY3XPOXU@J+Jf?m#-M-Qf+0#lo5C8wP91dc)b>v= zzi}YCKuGn{%-Y82yX=M>cyEfuRL_G)qLAI-fLn-s7;3z39w<}nu#s;g2&d1ViBZk+ z1tb(>cjO?i7;d?F5LAU!U>s3C0tTClHX_T*pCp<+zj)m|4CCAyir8&RRVqUz9E6=0 zLH{VJ7GMSbmLO4KxJL?Fluo-%>tEg9y)>BhsWAYr8JfGgzgJlrbbVT1L&!2RTf`Gw zswoT~IX0T2t;Y34n>XbH4=*Y4fLHGxw!npE;*-2a4gz$BEf|1SK)mO*f$cmX`rRt* zQOCvnVChD2WR?7OG=W&Ud04hyoC=5k?o&j5VsMr^%fG>SFz}OfbpAF2#0+b)p%KI@ z(cahdq+uF!E!$ue77J{Hmn^N5aO%(h%^?(=L}Wgq!^}o-N8DNdIdG9yKb<_Ef)NlG zje=_I=b|oi&Ju5!PuLiOD6Jme#%?=lWUlyBFJA-xlTuC6>ZPUI1!xP5{GmH}5RL0& zrxQ~3Q)i=N6V4lu#v2r~<?wM?VY#;na8j}KMo2$(CVD}_IkKnZqbO<Y(oda^;%))Z zQ37bQT44c)@m5c4FW}W*hNdE&fs0vRAHd!naG95i)depMV4y$w5Cf&vLd3_u;$af0 z`&c*0ONs}wL0e4@0lh%<J)DG4!`X8q%r+=%2<(z#x%Oft6#JI2{V)!M7V0OQRmmEY zpCQ>w?54(~6O(dn7mDN1sOd@k&RT!KJBX0%Vr0IM%G@9_ZyHGsYvL?C2^%8zgd=uu z4xOL$%0GKblnDARE}r4LSQ}qlZqvreD~jb6s?C35fu~?Q+Po$o099u>!uDO^f-TW+ z09ZzGUi4ixe#3>U&Z2>8mn&>vFTSyx95u;T?Vt;6Z#m#`OX6%Owx1jb^BLaY01Gh& zHC#ZCoCb#r4`;(byv1(F3C1d2hCOz#3rCXO;B>3{p=GeS<bFI$@*hD*a$N&7QF4d8 zC&2DAFgFGkiQoP({^aL?D~6jsalDO&rJLiy%iterRf_YpD~o?%(=L}+43t+WHh<fK zVPFa0%U71#75gs5Wux3O`xbxOqriPrMlk{~!w8!wvwsjSI2?Toc&qu6hDMNLv+zp? z_S3%1)v}><4u8uU+FRDp-m-@F77Z;@up8*c;&xAsFS_jO)if-Gt;&@#>@_!KgbQ{> zJBT!@V3&b`V(k=@0RCGGEdZ=y6en^MQa5$S0c{D;nd9^0zi{SO-)i!a&{<$ya5soe zWxu0U2NAUvZ$WmzKiOfaa;v-`L(a#edDf0=A&80p5HzB)=d5+wsx*iqZ<b(w{Mfgn zAW0V_caU+?;Ic&Fr>QU;O80$2K7;-^{K-UC*uMpKYyM_{5ECGDnD)+}(fY}d>#(yg zbvp0<g49P5tg#l;Cjkko5H+-nO?JebzlO@dCc9$Jr$r(Mu|kyC16|DVnrV^;NI&UZ z?16tsUKyO~u2f>R>g-!#l&v#9!F>++UUQOYe(@j_SsENRa#DS*jluv>;A7fOt1*dJ zEFPj(Y+qVCjASO2@FG;hOc=QD!MWx$8j_2Kl>2w2ts&hA_NvD6UlJeffZ2HnPQC&t zWYLR(hdV!}f*;@j39Qx&QI>!BMcN-|IE#S1FX{X$WHI=KMjIIChe!(_kX{-e28f_K zk>ShxgYFZ~1;|3CWvPy9?l5sgss=$iLPfF*Zd&6#_cia_*TTN9aiv^oAaaD>x(mZm zOQv;b*(!6>;UF}JjfI8X;#>oQD0;FQy>u=H0mwDwXspAmo{0xS4B;>NFSJERHadbf zh>sq~9C!yhdO%CRr(sLKr-8HH`$Bhh8aixD3h1Z^c7F*x@Kp=ERGtMN4&Ykw1#R^N zAX#E5y(=B_i4yPu;0uBW8@0#asbD0S{7u@!@PVb#dcR&QMQ6ZCV<C2<HbgHWZ}#L? zT3n1;;MpoQ!>sZ2l(?u*iN#Yy32K!0?~{KfL9OWNkx!w{CZVZf0<0!U%$4sIWtfVr zcdBW&3bykNR^Bu!hSwF=D>cJkv(hv<kjJirc)$}hTwg_KAjoZ#k;NDl;ESnn{PFU& z78y9Veu{4;Q#!N@aH5lR<}4bmlTW8|`VycUF_Jff8hXC?#g~MabkDV*`IMHKj#f>M zz<1cKX}{?EL`VwNpBP~SVLfYCs4*<CO?(0f9l<ghLdZAqzv4yFuLZamgX42>7=Ihs z$+hH&fFz+q25=`JkU!;fx)ia1U}I@rCIiF3Dc`g%Gl2IO-=A*;28;sBH+j<+q|n3} z<zYwML?`?Xvc6fnRbc$*cuDIcmkV}Gf?pgTu=WhZVTI||)k)LDc?BF#kZWI~7UN|H z_$#8)JqPs12TRdQ<dxH*&%J{)S;Oc&;W}^iV&D_&t*j%Zo!~&7F;8Au<`&93&e+W# zD>PjGIkX|D5dCn=gWD*j5+E8P2ziCWWn9jt4u+=dRfIo{j+sE$4MC@eHpAyYV1Zks zqUONgZ+A))HXGxbcHaFtVy{Zv50x4qot8L&h#ES26fbKJ;s_I+5juf$aBX~<6Jv%? z*u#&0l4f`OaU>~N`%#1yYfcJ^MvPvW8eyTpDqwm^#Q!mlY{LAOr!=9-wnT_eBHYEI z2k?JeEYuE%Jz2b+S`khpTx6EDF9ahAwbkbt*lwTR26z7gjE{_A2ZC$|l-$!kseZ=A zru^IH_~f&+Gd~R)Ftp$K(0(6>0V|AGu~(Irp?%L!K#4<{WEckw#l1l|DkIk(21Il~ z!~>#5fEz{X>_rxKN5GUYW)#^d5(xWgL?R6Rp^Wp7NeREf2Cia*&A?zr27R+5R!rYG zYpWX93y3MIC#9zQY;-RFCu-@%qvbmC6Y^K_7Rxi9;J-lv4K;wxpoJjwrWylBBU=b9 z&!oEwL}Pw5))E|L0x4&s!|60vG>_k9wHqzs(~t_ryvC&n$Q{>`DN5z@YmsZ6H!`yM zc=0lXAXtsfQA}V;TFp&ww?kx=gJKG4D;9^@EC2*3Vz-^YM9b9LW4G34h{lg-tc^HJ z(5-YzjtJYK4N_RZRaP<0(bXQY??4=1WXz6mMe~hpFsK`+hE<nppX%yV?6Q^v)y$6L zh3fMWduya<6Fg;NB_?BUgS(I)0s)YK10Qzq7)YyucpZuepIHiqBAk)kNStQ?@u2=R zr};QuGmuZjDAQLowC+cYaB7){3tC}vOO=yi4uteIX5jV(P%%vDBpQgiabwCrE>Ydl z=*(D$E!TwQ7bTyHFtQ566>#jevy<!?f~uatT5lt@;2M;tlWVg@=q~(7V$ITdJ}=u7 zWUb`8?fMl4(5oc?+afL!z7p8XG=K{bTxCO~4*9z1e9*e8qwraN`(K}U0R1omR0jJf zNX1o#Yvo`r<V6bF%H&D!J`iCe$HEUSmHFSDKcrHr`9$g{i~k1l-BeoalpF0pXrqr# zNxOhpH@fQa9n3~R-aKziO>am+_`c5{LM%R;nx|HDbT&O19m;oqL^RePUFcKuyrjf` zACuJC6wq?{YVl2=@slF6vk8H>vBzDVO>Vh1h6+Ru`p0MC<c&?uW0Ujn4To3s)(qVE z_h&kN7`=jT#;yZdObCY52JpA7x0!g0W<y822X0@?H<-s}e&z=_kBDPE#6D}|Xdb}< z>^3)!Gr>D#Lnmyq&uooNcGVquVi<1ELG?rBDAjG;sc-ZGg5>p}FghkVMn8KNVZUq} z;)|g}Jq61QN1!Mmlp;Y@d(Ju|6YF;U#~7#L@@hkicCu|S@iX}JX+)mgrd<0atra#E z4g+6dz0DZ%Hoy{wTp^0G4g3+&wf~G<w*e*P6(~kz0b-w<)*vC11Z`Mz-)Fwz<J7xd zRkz72c7|8kQvYyha|f~mRTrvVd-abqwSJL;CN<`UCmd(;!uo;h!%fM4(Gu$=6EAjc zU}a_HvSlQw5gAMIt6$Sm!mRp%<imhA*x4+P3P@B4|Alczc%&rFhu3AG8oLE{$}Nb4 zX$s)kj{gs!J{}zZci{i-XuI)DD*i-n9;nLAsDVFL6(ri}O;o~@(@O{a>5jH_ROk4l z<n5S@)PAT~cmHp4tDVpfxs}JS{-8&0HSZH_FNuu>u>O?j`sh6A6t00mWnTYi|3s)A zi6utn<ha8JnUxlCB+<Z@dqmg_h_En;FsM)@!ssup-dL#3fczHH?9XVEq}fIEk;*#y zh`OgzA4$~75OrWoGb+#iA7mY^_daLCD9C|C9m2)GCGC!hA)h1dK0+3x9du($+HDgF zNV^S4khGh|PhwXyCGB3Bk(PD~`%1fcJpzi(N|0=t$Je8lkapyR;4APJ%S%>M`Zc7n zaPCA~y#f#I^$NV+_9(O(Y<%v0XZuRL*;d=p@AG?+B8hhyB;KVccm);9=Tnf~Bl4~l zFGA!cu&J36dDCc(m6Me=6CAJio}X{o5hqAB@bIruH5K?#gJcp%J|vvMTdvEs#B7!a z<T{C-8Ctg9>cz>sBw<@`wIOM-u}nU5Iz$Q1HY-ryVBKx!W56+kYzhJ;?{u?P-kk|! ze2R;V^;f7ev5*&#=$w-GWb{6&@RdtsV`~sEpNW^VP_psTju$-D_*I{DBURC^Hl1?v zb;u2}c1Y$?WpnnGN9-d%GBV*s8So*Dxuug`z9%O>8Fk95AIZzDSt1VmMMt1Fm*xRf zIM*$~$01I9%fY4?!6}<JyW|zZ?HrSh!L)CvA8_l9?*3Eq^aovZe2+L-p%vDZVqPTt z+!TK{{kA@b-^Hb*<!VI}js+${2QbrS?4E-(fy4M8vFZ?@Q_L45VF{Mx2ADBLCRDQe zSCR5NkHSqR>(3(%e@KLNN8-7O|Hjr^lj+ny$MMmpPI&Ld2E&}r@}<EmUCXhTH5X-T z-~cz5JUWz5M$716r09UCBY7V_2iX@C<$N8I%i+tonp%fE-Vt0n_sU}IWuaUs<A~CB zMv>QU4c0$A6Yj6U;F;O1xvOotO<S&idN#toGG?aUpWk69{&q#4)DGcP4$c{|F1Q%M zU)e(=qs;`~Ad;9WDR}^@HnixXo#MRid}x;#R>$*LfvkNhs})0nh%XGJUvPk3X~;s= z&R`*UYb|X|FO3xTSFVMz@-$v(I6uQvsUMK8qu^k+Vt+pzzrz-a&+H1;2Mha$YL93? zOcy`{C1^mq1>WDX`mA&|>Zs;AROzNFB}z3Si6*2!qRzEcCm(f&_13W}oIq8ob8UKB zBJr{h;I2oQ*p<Z7u}Nh>86e892aeT>=^sEBucx426!N#pKmQ4ma1VX<Us&+=Pta*M z%`OePTCTkc9{~-*xSq;DD?7DZrTV(8Xl8Y!(B&|k)w2p+r@|GuM)jjIb~J2n0bAzc zP!Nt4U@MdSQ_9kiG;z3GOPo--cD<G>HeJHzhU6CHfMavjY{iX6^M{7xAsZcf?nig? z8BZXjDoLvqBNHIe$iVQyT5_(mf@sl5YK)m;jL|I9^II4J&{--=7Js1Kk8$N<Tzv<n zYQw7+muG3Cg7st4EiVm~*9WicZ}`LhkD}T1IqGb&XBh<`7^^37;{LiRaN6P{T3z`Z z&JN17;+?M}iMFtj6!AaI<tKBn3=FK2UOWaYd}SE@2^V7cLH22=;PdLmGvUeG0nFmk zCApSjcd)O~X|IOLWi}_VDQfN27_xrSv1BNqtGWOM%yse#`=sJE(o6*hY!AvSvM0%F zYi;y~kq_d3K!8F*x(v1T&1N!+R?SA-e&$juaCE8NxTh~{s^&*m^`LA0WX7U_OYLc# zP;T(b68)q-`mNk+silH;V-WDu^?nLN6+3zU4WO#f!3cYJno{u54vuZ7GS)&SPSH+r zXCYmr2a5c-j`+a=0P8V5hw%ayJsv<4xSK#Y{$_U10k|-3dDb*<Od%47q7xze!Pufg zHTg%BsoS}l7N?erYkr6PM}nLw2=akmf)uwt>fNA8+=PQ`g8l0?cj~4WqP!&O#-K}x zug0@Pn8QJs8I>jOXn%ZmB>#$Ti8jIM&!^3Hi#CCA#GG!(ND=2l;1U??0MjF3tmFiK z4h2A<mLd%rn*GfAMCuo#t1Ow`j$4v&-R9B|xRkXHh*Xz`_V#%#NzZ8g?S<DG5u##u zPyMK~GB(J#22-Ub){!g#1GMw1AZUvah~5wV8oB`WG{neB=k2CE^o#hA4AAvV`AJfw zjnV0zh(Gkubq)+ZT%>7TmSd4NCXmRW%>Rw%LR1aE@eNGxsk4e4`~aS)ADEtRbP~AE z=m>s%M_Lkb*G@`;Q%gw&zAU}_;i!Y{H{Jw&<l<BEa%93_9M1Qn4rEM2Z`BZzBqR0p z<vqtk?!B90?rHCiblf}=lzopsc@+|Ght!G_x=FHeDViwShrsdsdzwwI1uRATNH+c! zDSzqC4w8+(B*d{VBCLxpRfxn(6UbKdgU|&Img9bfE-c81*QI}K(|*L7m6~o6jq-Di zpn}1C3|dDgr`u?(Bp;1Cp=NBo50PS`YG=@kc<X`K%tllPz6?ce947JbO`--g8;S2n zGM0z+g75yB#LIyc`i_QQ=`B`>WJ+=9(a3tQw6Wp{EG_KEqLR?%PSFM-{7-hKj7&KL z&ePq(bm*W1@d0r4h+9VPuo}4(jig-yZ=w}IHIYo73sh{j;<6S<rQQ5RFGh@03UqbK zvaiUF=+ri-{!{aFcoKCix4>F)X5d>2PYMnZ1Axn+t+-?Cx-SDx#Qvt~$$b)p4eL*e zKh%m?lC^^=7y#ipZE#mdMNse@VzV*lU<#RYO4&f()>!Vzqj>YT%@5(MVSiP>vCFJX zOfsdcV64iH)D==z%cLvmZA!4dYBIJt-lGcoI$UmiW0%zo1rcw+o(Dd_$)NduDA$s$ zF1$|Y+}fm!RCwQ#ppf87T1T1vvywbHlIPzn$04;ag3_O(vVdL%>=}R^6}8#Sb2s%C zuP116N1*+^m?um*{-*hg8lPqXT>Zl<s?PzsFsl#1ncoI>BlFm)6+Pkdzq_d3=*RMp z9|0Y+vvvEPSQOSL`b=#$J<e+PE1e<9rrkvkS9Cy_<@ij`A8Lr3X(6@ZOS0EdRYb|F zYy)l?vF!OHu;(v_GsoeBW*}4n*mv&w5F+c=R102M`y=8>p&T;-X&%<syZomX)rQ#T zP$^Va`Uo0DB)s3G%RDFg&uUrK5x=WC2|kKYr=Sl`JHM-@tGxk{B<6^Xdeb)~NrQ;$ zi!VV<+`>S1VHsgj6~TT-MX#D4(G+UzazF0qBH7`01&l1c+a7gN8hfb`zdIH2?@58V z4y?xv`g4q2QCv+p$Y%(C(_F5Y>(!c5!ZY%eiaf+v!~KbPEL6oUbMY6&QE|(qI;Y2_ zDlVEQe~@Ko_Ux}z?6uBNZRR)JO@qe{nVWCllID@LGH=D*zi??WTzr6$jzKp#h>vo> z>hGSw>Vl&m1MUy5xd-(OxSX)T&<tOT7jUIc-m1(YPTp3}a=FehaVg6AwgvbmvCOn( z0ppiAW{`c7;+T279L?)qi5K>Om;8l<Y2CPRzQ6QO2#uIO2X|aY*q7m^GhmZfy^Jha zOe!oUu)3pQl6itjhQP+Qz}Z9$@MvccUM9i+5c`G?`+#Tgzae}RwnZ(Ih3`ia9a{|W zTHpz+W(418<?`S67Mws?mgz*!*Hnn+efOqfF?)QDbnOeViM>Ai#Ej_mU5fB@X;-t7 zg2!;&>xuEa?IdO!9xt3`1Y!q3)$u2yGW=?_jgQItfiv)`8JWUKw@tXF=*%4)T(E<2 z*ByMAWV@xzww`?*aRCQ*<gh)G{5}xJ9|ZYe>YOyoUb;Ug02K#EZ#MH=&^`Kn0>(Yr zhP|6Pz4Rk|ID#iq<aHrVrQ<A-8NbQKpWljIo1%RRZrlL1dHwm<oQ4>SgK%(}(Wl^M znIMcC^@#^0Z8ki~2RbL=IODzObyTy6UvYkciMx^I91!<{6e)Dl;R1d&fK~hnZH(Ch z@#-^1;Cy>@2#O$jhYzzpLg3RWOt{-UuZT7Ve}ZOJGz$fH9{yU<%w3o!YE=%NwP+Du z1pIOUZS&3qdgu@VE)H)Ng_BvR*0XN<#)C~G*9_#+`?v~+9MyCfgJX;&orZI^F)eU; z9+C}cWl3_Q_>5Z}!<LX<&`xp$`x!g<h}X$D#Iz}JG@^wmYQZUlAdJFR)H%0V(hce7 zT?I*66Snr~+DA-gB{oM}#I;+E1x?13ATDfUiPbhYd=ldJXo{<P0yGSWFX5kIpVLwc z0>#)mkZW%ToFqd6sW_VCWMn`y6`$$MDPk4-+<7^Z+%IsETc<Ck;jVS~`9(bLj$ldd zk`Lhar7(Kn<ME!JxC=>v58yg=%ViHfA_Y64-Z^l!<@v+LC7<WsfC&+kzU?wK><#H* zTMeW2WDv_4`^xf4_$q04U}yAe6|Pjol$x#Ni6?r7zJ~Z)LB(_wF#t}UKXNN35*JGV zqfx|bAbT9R<1C*Sro>ipTM6Bv@w3NJssfQi+Cn&ORzuWdKPCrxutEY<_x=dOeGOab zCR#z4$cII8Gpga^wKVwwC?@|UJl_j16yedL)9BL_W<uvwf!}FFlRz!u9>~;k=)5n| zyyDMTr5lXhG>(6_SB9}XzlF4tg9<xMTc!C}?a9@E6eupGTd`=p?<F+FXG~7tssCww zicJLQW6Ov0Y{Er1w3s1iiH3#H-26#ML-QD;A$V&ue;y6cvGDjjn>InMI4W>KYQ<lu zax9Z-3Fjbt4`PJpV&4`7cnfI*ICY5V_{r2(^bIte+)Ul!zR=}Z5wWrF;}w??C7ez; z#Gv6&WCbu2Aj|+geQxlgy{_JPUApnz%e&Xp<`c`EfmV2t)#5aylbrxKHUEA6t|WF$ z&rO&h#B^GLyphV1QBW$0d`j}7UFpQpf&{YE3QnU8!ML%Gf@uRI6gskpMmY)^F0SbS zX_%hl3n`6B6kfG+13c4k5D9uA?jVL5ZbNa>CVl{Zx#u9<cSKyeiCe$|wG9nON3;AV zqWPVHnF#6SF%%c<3OnO`5!LlKBldkepGRqu+anf;B@drP@=*w|cCd6Dz2UFkO18y* zayd4gz=drPmvPU7=nuB#Ch|2-Y6%W=!TY4;U`jfHO%fKgw=V_)Spveu8WQFX$!=>1 zdnwp2kN=U%Lt;OHXUhEtEsiAi1OEJPK83`-hmKC-PHAD-j6I4h{R>EJvm9F7*O1u7 zLSkQozK3HZ%P6-qSPY4sjjROb5L-S2_=P>~L+0Sab0`Z3{0Kfa1;81IfRr#_ieySg z)55}8Rg%xv{v!z-NM&s_5-ttFzF{{%`#<=dmWsSCI2<u*Se>!SZfrbJRm3(Bzf+m5 zrdV7wQ=l)*b416ofVb3(W4W5A^dfo;C>>3Y%u*@v-#Dns-$Km67BoiekLI4*z!@BH zTAcrm3JMpT(bCoW${a5?_Hhc}F2pQ>(lOasSMZhz#Xo)Uu%ra%*y2w%0@2vgag(8N zIQ@>B86EhJ+fitjRjx%W7N%cuL4L`#A-rJM$r;U!#KJCwxSkf<f5@^$g)KvIk1G4i za%wsDFM8;YIb3pGCsL~mDVOOl(?ef}P0UC}w_0<u5$}t>s400GKhDP9`a9mDazdvk zOmZvmzNTauKBlv4C>y1vni3ek_=O*<bS-j>pVpN8x=$YI<wj*qNxV<qlazOBO$ju^ zp1SaeNXB(FCD^X*$y-W!m(`TKnbDhvb|hm+P01U`gUlo#<lwrPfR_E5U>5h2<EosP z7>SWZh(fS6a@3f>OL7-zGQB#$p^Aw^1&COzbz*J57MUfbw;l&7ocv$dg#gp@;SD~h z*D)k8X;lukSNpvS`d9lUgjL{*-fI8oXn!c$CXT(}XIEBa^B2m65V2y)n~k=KC=NH; zQai%3kN0pI(Q%r;^x0nm%@yJvv9B;D05QP=;=vZso`|OzK|<mUe4N-LB0eGeFh0X> z6Fq#<S3poWt|cHo+d%L)YK<7uFtkSSL%G__Uq$WeKH>qCgeqJ4Q&iOw5A%h|F2ut_ zV+6CBnidasU_*+oM5F7e1GG2$4?vqf_6@v(+%CbLSf3yJcjCd4o<ex{iU<DDT1zr~ z4^8l4R&!&ixuhg?pj7;3-`JDX)zaPwCU#~SJBGUu6#W5OL7#wYC<zK>#PE0Gm~AYB ztjnS!G;aejZ$|<vMq}zQL5v$EiWx#VoP}L#ug_G?guJ4NzHH+g?hunY===is8mR5m zMnL8LYoPk9SL=tr{iDC9M*6o*F(m_Zu(_xcXp(EeM1gCs=B)Pjk0KbN**GThA@;;p zDQ@hV=p*nHvtKGY9~y<(Y&KQ<XJ}|+boA=lP0@U~N^nW~WO&I(qZx>dY`SP4_Y{Hz zJ9Z?Ef!HKQ|NUArXJ8BjGBiDd$V0ms_lLSBdbKGN`)iYdkZa_%&?B&KMioYYKn5%& z!WDz3iYYu8+4uy6G*h0y0rd7C&~#Y+z+o-=Ne)Jy*!9H)13r}O_it2!o<nj%Mv?vH z6?&dtx)Tl=BK@M>r1_K{+yN6`oN*$yx~Ijtdndl*cYuCE^p;;o?weTY#mzJV+y_tO zXos;WZh#ZF+QLE>ry#8epSNMZs};2JR{>J`H%j#%n?FOXh(Kw<vEli*sPVzaRR4}D z=rEhO6Vg4UZWKr2j*i}Tg(_kFtQda~t<uk81pvyNbpRMs-iV*Zx*KW3-Xqk6EhC8j z-|K||)AOTwLM^nqm23Zu@nVvphIuvId6D`}sLr0$VN$4y;fSYap}GS<4ula0asLf| zQUO`G`g|5Cx1-T4v?5I8EwsQ3gv{t5rl5!xM0KhLvqb2{b78Mpdhr*Jh-DA+bsPWr z@96m5*Ftw%iF~>p9v`4d{TXi-hXrTp+5Cm=HXCe~Zrm4hZRt|Xss9yqvGxlk^_}ni zVgLC5F2Fm?$Q-o`Jli=JpK-q?Sz)H<=b|4dXr@O)5<9NpqJ6lB<;T70+o=dj7pFh^ zkwJD_aRdq@SvB~FoH6ZcLs6=`h<|?s3_TLd?|dB+w(DJ_zDCNTS&KvKV?TZ$69E12 zJNORiRID`IhH)Rk=^jPj0+F$ePkb3fCRBjvIIIsJU74{3u(Hw%Q8XC;Ew|(0->)!3 zk*6G1{D-5hgt3%N0>(glDUB3R0CWGIUc8VCbm?o7ItQ(b3*EQ!5-1`m5H2fm{%N}; zJvy4^C`2fWzhnWB{|n?R>=b}Ft@uwzQTaat@o#ZJE)!gxDqs-nVk=*M3*nK}py(CE zBml)l78Hojdgoz+;&&-1`U@zI1B$N||JM`<`yZhIF3!`xQZX4`y1=X(KNLrS{J61O zT!Dl8rnm4fzYWYY+XPAg%=gX#W)bogM!<Zd_`iw1MZjP-gf%0kMCc6GF}@TW>3B6b zS#GxRqBZc9#P72MMtE4>JA^3t2h<7(e&?S-NY;f@SRXkZ!inPM#%oY}VNdik6R<>m z`l-Q-u0-KQR2Ze8DT3<%IEUVHAG9)$@r>V)QzHLPyo1pmmmo@hVYyJ=SPj)%`0)Y+ zHF%m8{6?@s_ruTPsN~J-drLk71#n^wL|$YnZ&oX~EuX!Qc%3}t1mh3dVvE}Y^^42# zOAQ^N+O3)^wzyka={7Qhu?M<?OTnYd+W8aE>uE>p(oCErl+adPN7UWbA_X$R%1c7+ znmv?thHbOXkdl$qsEpZ&!(otz+TGaVSMYx^cVn5xiuG)hf4;JJ3!w^WS`9DyT2UT% z?^NT_ySZ~ubC7iy!vdWxF3p85C3Ja2p$(gWVGpqmRyGsYi8U&3gOf{o(yfiBKV#_6 zF#03wCj!yhkiP?ww9l+F@I!t(aB7UbXJo?fXk@TGtYsD2lw#A@k7)wPV4K<v3a%|L zDV5Nqc3G)})vR4uDmn1Dyp;B>v~i_UKRgD&<&&TEf%AmeXT~694ZcZ6wz39AAs9mJ z<5ks2#mcrQbsgG}rOu1jAkd&~bZGYS>bOz^4>mqwWC3l6ygE4?*G3<8>zf<^1^F~8 zdG+Tx^ZO~;sd7+1P;P{gg*t966X}sCD1HMVW+&J1Mf~*@X&gU^R6?Rz<_nO-&tH(Q z06U(6APXU|xNKTNnDs1#1-Hmo2--ntbQ#C}btG0GQKW}3AF<E@5sX*I(9$aW--zD= z7}Ml$icS`Rq=DKFtpshP8^s_xM}235G$8zr){lA{jbe+LesAxQ_lW$P`ossq-fSVC z^;y&N&j-O-TlsMu3+MlfKdd<U8{k(M*Zt5KIM7t9nQ{NAnK_~vSn02!4UGPP>8VB* z95a9BcOokx<zjHgi;90kw4YMFJz(^wtpW#<E|pC`z_NK@61o)tuicdfIJGT~u*OhU zH$hF&jludw*j}iB*!+$$6;~@u6F#**)F#*I=rUAm%CY|esVTMJXi=6O!o~_N{@;uc z{8*@!W_1|fjeXUcVk)hb0gx$QVwaSHBUv^=C3aabRK^zX;yVzX2ZRi*)EDm>9(5xS zq!s1wEW^?O4_c4bNnPSUM&p$6FRQ8mVKhKkO)Gkp6aZen!UZv*_A&T5wd`X?Ms^1# zeI3Ysaw%#1+NCh5@feM_46z&ncwB@xha}8xhZM~Z)`!ZCp{wX_$P~q7A>yapa1gt( zR<d0CI@X(lOB{o2abs+8t6cXIWw3;xv@uO+Q3}Ylbo(bItF?LlCb{l$yoy)~y7?#M z|Ljq>qMvp=Iv#(Wa@{@1l-HJ(BNoYosf~NbB|PhHz%vnQHP6D*7=X0EG;nd#Sj;7~ zq%CRWlu`z;BiGU$i~_|t?uAiEyb7^!;tSou2ui^jKG4YhHH%xneCr)4nv0v#&eE{O zxjgKax0arTQW71GIr#Mv(C^WI_V7J+D!B_#loCAAQKE19anN131yJ<hK@Ws3VUnjo zTVZ`QN-=tEAx19$>;iBf8om7UvjSR>4+iy^PYg<~J#J-+S)WCjXgqpdl71x&$RMbd zw`piBJ6y3Ib#bK8*%~TX&j*bcGy^9GDZlmuA^+he?;AK{ytpw%^A*{_r4N)_G_F)1 zT#}CMO|9MH>Y!_ut5~f-Nas5Jlhjt>PB@ryS{N4hz~UuM+}(H}(-j<894KDg7WE1y zKm5BFPQ>N%rr1w{*pFQIEWlQ3I{+aP){6e$3XG(e7xl0qFr}_h#N6n5N*XSHR@?M= zA4uWjD%cxTvz5o;42o2Y2xB{+Tyj{}<cp&bO{$)i=Q@jtJaiohs0v;wF|ti!WG4{R zEJn6XjXARLyCdUI2uG1vZ7^)WU%mXZzmeffx((^b0ihd{LbnJvD8auVe%3LMZ~l7^ ziSRp-X3b)=O_#~L0oM@~)Ar&<?1eaQq;xec!32|_#qW61mv=<Ce310|AnwA#&riW6 zN?JC5zYC4E<HPq+xe%fZr7uz|WK5u#xwwZzQ6nBq@!8ln6#SRTyy<#klX7hpC=FmF zAHtJVX!7C(D7YG<DOOmM_`Mn2uNr}R0a*g#hV+OK91s~e46~V=fEAVKenHx}$-}K2 z)(<x#VBepCQ@|9gZ|yEbKgSn&WmW+$2|@egc*Va^7a(V?f^%2mv+&yK{P17kKls%J zA&FLdz$<XsVg|qBuh?;-Y32|7Qh1x;h!OvK<G=BfV7Lfae)4w!6OJ}l)%~4KncCG9 zUZ1_xk%ho*aXCO|Q-&s2GGypHBXugZv&o_5T3HTpPSru-;7v(q^cE{&haHFS6r`F8 z3CC|49mu31BTA-O#Dk*`(E&w>AE!(Nh||CH?}Z|nju-dBU5j&Xjx5~rm<9i5lUI!? z2O<<E&VpHpZMhwihvLSS4rF#mccHUU$)8}B`nI<WK0&%n2bWBvK}pVYWXXBKtbkh6 zRE>9l0W~p6B^m;FkFXr8zlhx2AOGKi8y+HAZt5%#uVFXk<xw8IGPh@q16TpdKS24M z@-4s+^oQII%I%K67Ovsc19qM0A?pOB6{)@or19<X0|(2(b_Cv^J3;utp!6Jx2~zcB zMxRL%6NEfR&m^TMXj<<C=}CxnnrI--7VVj!OlyMR@aXB{|2RLvtZu>tH2Ne>5PoI| zar&4bAa1bLje<*W2S(o$(=*?ip5FNu^V4CP3#|FUT+p=iOp`S~O?~DklIoA}+fDcd z!XsHg{q0#a31Loo2Pm(d^4b8Gm?4_JTQHFk3o>hhT*w^wpXO(^Si10+!__p5UyM!D zR5bU${z7Y7gk+6twtlCv{T<jE_~ESe<8XId^L^s(HnEM8%y{~o<=FQ=PH>^Fz3QK# ziAz{<%a-E8$;R59+I`D2f&T{ctoZjt7<X!je>FJlipj*LkSk)}q;~BL7wrg-c~`@l znAVMYx4>&RMQu9dM!E#)D^lr;kREDfXVrE_SHa<9GZ8cqw4D`y&xpUL#otrn?@96Z zg!tQy`_Xz#E3hRPdd?SK1fR36HBc1>{C|aWfIJ960>?y2{{P3`o4`j|U5)>DvXBrG zCIKR0lVOuc6vCoLgOVW=oM0kh6I3jOEF>C|n9N8}gkVAoj8U|{Rx7q>t=iVMxPw{= ziY$r})GBqUsdaf`P@`3d7M=h1-1|%t0<`vR-(Ual=k1f+Ip=QY-h1x8o!hq&YgUfe zFgRreBkqSavb^=X%fwVxXPosvqzPj4=W;Ccpmxs!5V5#gEMyO9mAGz5Ow%stVj`c0 z!@P4Y4#j?<u~!}T_iFpvko^>6-^f;!T*%?xxVB#uvLA{)2c)rBgN3xR!wGJ=aUM)e z3%NTi$hye+IBKPAN6rwWH^?*)=Bww6Z`ODgv-09&hl!|7)_&to7n?v^F!40>A{2}@ z&dCzcS;5x?jM06RTVE~}={0G(=WsX7{1Phe;Hx-^;{A#Yc5$3fe4N;t!EAXulR$6! zKHsEZ;%jn-1~;dQ_*+?&@D0Vm{kDU?!Nf;x2h@3a<rg=BxaC*}6YsEXIPvi!$5=Ux zr=6IsHd!=8-oTXa>wYWGNd8**y~{4O<>Y0Z@ZMYmTg0J5avRwGWQNO9&!x|@%$I=t z5b5Z+EccmQmg^yx<yv3-E;s6a10{2G?tTtP{+d3`?4Rk!y5yK1*Z1_#%YGLQSee=5 z>#r}wi6uuw2eew!4Gjjf@`admrkp1H_`c34(lv5!FPp_u|KQw|H#im$R-CymqOZJi zDJLDqNuSlynw*oepBFMC1|nS`JX_W+v6GI|7b&-(-qWWBu^S|NTx-f{?=;C?#p@Q# z{FY6wyw(j8i#G{bC#l7c6W!OdP3fxpDr{w3-B;oMlLC?Re@!tP7KqFPu8W&4y1v%W z@Mkwcsi2`2h<7NpNIJKSvGU|C$_($i%TRtVD;!c^Ke~;wGEv^4j1FA3Xkw84ge+b@ znadbw<rq)kmF&QFckua)G<j(>*eF8d+vLDMBf^wtg1gh7ZJ6&^5f(V$2^7@?4l@q- zM|lGdPhe5R;X7+#ra28gNZt&SlLvM0vdhirFZc{{o90vYWrbEBSve#lkB$c-*EFvD zo`|`b+MInDAtF2~IpczcoYOlVm8620M<v-pY}BtwPTxhLG_Of!$O_y89j{4l3}9pS z&|^AZlN2{=P3-jMgFI@+`3u&7C3=YyiJUY#IYi47@T*~RXoVk_TOJAE=*2EQW?peI z+$(4km&0+%f(di+tTpXEjviOa&ZZozPXEk#BVwqi@mVE|0FyiXL(3~eQ{*E-r==Nw zk?@Y!G+FeP*EDGh&Wz7a-@S}i6HKCGy1k@%p<Eu9w9A^jj7@Ka(ynm(HO-CdCH=Y? z=p9P`v+>ku1O~q8EAJYcj-xr8bA$o<QM}24tV*(ih_l$tAs*JX89$S2y(BHVVQP(& z)e=dbP7bUz{%O%0F6ew!GyMldr0X-AV=$1@V!6@s%%-(k;z{8QP&fk=PI+Q;p=o3~ zWCnMfU8|I3;Qi80=5aEY(}UI`p;oV--|?_~)(8)@wTC%I1`$ldax;dYsa|l|IL_ga zpu`>RCbV+Q?KSi+{pK_Bd`j%MIv3f8jkE-J4mHMePdte`;xgVps*YuZ-h1s@$G*?p z%zo=N;4hiT@>ol*oGJX8?J2g59D&M+!<SVCUXmIz2v$=9sVGuM9+)e2q}i?`&lAO! zOFA;2OYzOizT;!7DV#vCi%6<u`-2_D<Jz?y#lzX9c!bNg5)Rkq26pHjrQGmF>-W#e z+MaX_y3TOzRCZis5=R+U#FaKLkxPw>nB-_FFs#AnV@&PqS`a3zA*Wci&4nv3T5YTT zk$hj&k?$Es47^nWHOX+vPx6uWOy_%s<hx$Rlg!?rb64cVbAVLma27*2a(KR-!+Bkf zlU2%Eat%GyB0Zmzdi);qr`fcjE|CEe$d0zk>m}9~#9KuMNwO9g#AVGkNU}BEAgPvH zBfYG%jX{Q$Y7nnA+8}w>$p)EkB^qRr6>E?ZE8HMy*4K49%u4GsgVb1-Mgp~@&U(w> zYXxsIxTM<JVQ}G#wZ-6@1b@Kb!ZvG@!KHdww-|hj;A;)uDEJD4?-sn$;7x)T8vKCZ z^9|lCc#gp>!7~irCio<Sw+o(Za9Me^1{++C6kG8I*FsPt44x$B?bithw15@spuuHk zZ8aNQOEr1b;AvvM+u$tDDeEbNdj;QYa1n}P-DB`d!EZBojo{ZCyjJj9gRd3bXYdVz zml*su!50|3MDRR=-!Hhw;F|?cGx!$4PcwL<;4Xvj7QCOqn*@(G_$I-R==L#iK=3w$ zHw*rr!7afL7`#pJy#{X=ywTvSPV$DD!J`Gg-{A3rZ!~z4;2R9?61?8v$%5AyTqG}A zWd=_Ze38L31fOGYui##T=LtU9;PVAfHTWXIM;g3D@Fatam^>@i;5C9PgVzfFMXfFa zYX!FqzCrM}41Sy7O$Og2_zr{LFZdRNZx;LkgKrUhli+$p%K8nX#Ny7n-qV{Ncjw(q z!y>J#wT}yU)jsZr#-zlUaJngOoiVw{n9Rpyn=$bilSP<3YD^}Gi4Kjhob`$v>I^SC z35Oozu&43r!sHxda#%N(JUy5mmugJjHzr6kh&#oYykbn!Fd1x2wi}ZSO!^p;-x?Dy zCeg;^PGeGn$w|f}U`#48i8Ut6jY$nAVa8;MHqi|bhF{*L`83NIN=t;HWem?XhSDTq z_@*&T7DJowX6!E9kCQRF90v0b_N;YSyEb-?%x8m#CJ)Hy$VKkPdX>2rsT<6XIe#nD zh>v9&(R*!zb%S^uPJpe?G5mj?`N7d&=lT+hUp($BYhHuaJEpGh*Ylib{1+jy0Fv{( zhquc#-2cIb&gu5_hJHPZSHBx{7O(!g-MQ=qI&qGWQ&gVuZ_9onG(FD=XWHLJ0+EC; z@tN#3&-%EHFHgkR9SA<lYM-oXF(Dt#Bz%NDAs-|YavOJo_cD|72qxzNk=ko}Ht9Gj z{(4ntxFn$;>jjQ;B4Yff8JP>1`9HCjdi+CStIuid8PRK*44-)+&DCpSwSkqpFyC3E z$D0m?OTC=R^QN<G<Ie%5oO@#8{pW&GN<A@2{;{$EClj0(WgRE9o^(gZ{~(=qtUtjz ziuWL3MTZK+BEw;~*SUu)Jtp*{v!}~?h(y|NGO05YB&|bDc99TrY@STFE+o&Uhw|)C z-7;!_myCK&$F0ta?#!^OxNy@JB-`xl+TI!~tO;@byXVzf@kVM)ndW9e2C`k4LDgLy z&la(pgHoe{)1zw;NEWu1Yp-F>$F{ddJD=WTy~2jJ>`6rQ3~voXUbkI0*1Wtj$KHQG z>KrCAl{ow#cq3yy@4hpldM#u9i$TsT2ca?rNk?nmT=k(jBHMY&0Fqh;NOyXna|LeR zbz%_C15{A=1V&^BEWL>1i_Na9(b0D#w4PTtg4?JoC%M<>GKePR-v?i^dJ`-cnh4q7 zr!IYrpEJl}^WYTg3kF+j4z1cRFaL$h4NJ%qM#o_T;?yZCm~mT3R;K$RX08nnj7*jF zw$%rl!e~4Kk!Nyh(g>QB4pTM_{vh*^N|6ju!VY~c60dUuUop%pL;xXRzJANo114tz z7NF)><F8={s_Zt|m|)1AwTZ#5|1|4T4D5Nd^&_!+)01}DZ6bOrYlD3kfw^_do3se- zx|f2y`JBO(`a3U1-pZPvOVh@>{v9$EZAfgt12^eAeaQ$R@QuhR;8Ika4~N+-;ZRrP zpmXjOjq`KE>-Wgzb53yvp25jXoR9@+tNf>m?~}#%0B-@SG)HY4h2OGWUs<1;R_i;> zx`(*h(H?(-XtPh<GLmRZas$FA+Spy5^~2qP&pq&s;qsi3-YlZ9Ztmu+dTwUF$DTg( zu`TW=m9QQ8vBF-?-K~8R8@Y(qJbC`IKH1KvcT9CYc760F=DKx%a<+!M1J5Iefd_jU zvkMxL6*JBGSbfh%Y~9<f2+4#+h^dWs)=#6jxC6VhWcFM@%w&7=`e*|u+3+v6`5$e# z(T4RzLAEhRN>qqJU3;PP#%fA~v%Z|za&5y|pN6)sJ=0nLdorf3J<D81tZP5tSuY|S z>)L1ge0A;fo%OG2-6hEAY|l5lt99)KY{ufPC4~unb|5OdZWp5LSncr!5U0t!=oBY= zh0Z75?^WIBcJ_DXKtiQFv%kIN5Hkhg<(!s-U>w?uryNMJNdAaz>fdALKKxuN$)g=$ z@vc{PV5mEt3qV`p#wrA3NJzMd$Az<=7hYJ&dovV(*XbFu=f|2{%ew^FjZ&vi-)Q8| z&tTRyw^8Q6-rT^m)>9L7=97p4&=X1Qs@3UUe||eUujtlh;rOz~mNepsS&f*vYpI!a zFEXvYjBTsGuraspWcZ7<2hOXO1)AAK&g;YRRM);_<;!*LrAF>SU3<BCM6RyA!U%k< zYp?Vb)U{VT>!YNBYWLd@<+Rs0*E|cMEu$q)dBNR!Sx`EPFvb9F-jo--`@d>+!3beM z<L#OOtuevfp0orxr@0+hpQpS)6X0#R20HQpg=Egh&Mg!dneZ37^`paxrC7coVkTmy z^HNz&{d}!w<jmF?Zhf3u>@u=?wIUE9Fhw%Kdbp3xFyvqykxKa-hgpl)oz)t)?yL|8 zTPh_(VUT%5n2MF7PXu|^SpFy}AF<Uvb4~GS9jmiesPHiQa~Zy<T<5m)+gtmSRe*SH z)+4t7JR&P_KEls%6GzXtJUaU@LO-7PI9jO2XsT5lr{Ng(@+kcz4~J~(-aqV4=WN@I zzcP0%lvS~YJH<{OW7sp|d3ck<iMV}h_N?NaQ5J+n9v}5{kN5CroS<x&np)R>_R6sE zog%5>h1ORgLY}V?3M@x>Ql8~BEPV84t!Z`E_s0|(91Bl%)@#|W$Y;QEXLxGMUtzdE z-d%fSi9b_Hi4-DE8k?vIsnx-sVYKp;)w|?m0uoiN8(*irT$i<3>Q@viRe{5;(c)Fy zQT14xj$=<-Gtc?-5pTgE9B&CGwt4VQW6!-%y(Gn5B(C=CB&Erq9@ZZ~UAWj1M0of& zt3RxT9*=}Ho(*jzPzT~(;t&t=pEaGiwR)jjM|>bsp2YrMZNIjweQo;b{%D;SFhpwg ztPdEF!YrCq;g9F+IU^T1*IYu<bpDN(+={M_k|K9H*>vbRhc8|<DLUg|NDhav4m}s< z(@tEi78~F>o<O0ICRFK3DTHXshd7D#_t8guNh4>q9&}kB9)@p$tWDZfFLhxR67F|$ zyDqWOz9hUx9PKsLdH}C-@KQ5lj&BK<Jqx)1;&DFCQS1MbDJgur--vt-?(^2|4708s zYu5(<xWL@|={#h?Z<9PLjfiu(64ub5{lNv7wF&xg1U^2{n(Pi7^02a)Aumqbu{9Aj zi*GKg%D5Ibt~pdQOd{+X)JfT-_Nzl3jE0OZENs^Aw%%tWo}i@LB1L37os>u99GZTM z9yK_V1)}xCrBhpT##gweoW;-uMbYZOB~RL1-NEN>u$O>jNwPsk7P8qfYpF1Kqdq6W zX*?N$gcc>)`FNJ{lI=T}-xA+ge#=Bw%#mgO4N^pUwXUl@r;L64;4`9)ZCNHAUfqlF zEwcO%;%X@ZSsy00&dF!``_*3Un8c<9I}6&VMS94|6Set-4)ZUaEn~&Jkp4^{&F#qo zX=G0lNUJIXvB@>g`W^!5;N&cf^ht~85Nh4d$5`s7gR(AnGI}}$H&TRaWFuQj?)7`E zX<tg3g^%lphx&azB#?@Kw9`R2wTpktYC<9PWBe?V>lRe1yYv`xD1!_3QV2XPRcIPw zw@Ihnu)^CUZd$&l9egdZ^0dqhks0JR4JRdPK4XM+X<||LB0?Zron-KQ9tYG^JWxT~ zl4Lx(d~RYf!Z)7Z6yI=uPv>Yz?U7V&2yu7^3ehbe8ziFTJ%cz}-V}s<h-%qyjKW%W zgGiKW<Y$5W%$J{>EY958sS*90mY?F#8g@g+5bQS%!C*$3|F)KED6^esTwGp#&0U63 zX|x;*2E$Al>nB+6<H6hulf7d^W8gw^W3seuvTr!at-G-_anAnTucw#>nBxBtUR${< zz!a1{*~E!#2OaOSyIiJCGc$p%<NxE`b(d=-Mmk86RR;As7i(A@5p21cR+KHNtJ2SX zk^xe6`B8V>AHnE&cilNLZ#54(>aKeqE$bt6*PUj+wZHZD^&vi3pMw&z*W7l`5ngfK zeX#Y0F}EJ-bUFo8@3}LnIL>`{SH~8$nM*Xi&k><iB4eHOYdo@V5{P{7TIkEJ^%~be z>I94293S{-n6xJaf1th4TU75e;*q6=q1IeN9F}H{)p5`VaGqq7NhSu{ag!6^Qo^6P zMs@G6=(e^W;XdPsL**j5+&Re0<yN74z0a8@uih=n2s@O~^ewk&!t`Al?o~2Q*RsEU z>^|giA_+1MNsupSNstdy%qTZcF{Y6r^Yb)C1mY%;>4Au5oJWcDs)sSQJU`U??MUrh z!0U*OWEn2|<MQl_`$<eVb>?*5@@z0MQ<o%X{k8aFRLiZ-L(ck2G@J*+3J!4G>usvn zf-FY<^|{!HlQs3U1naqoQ~Om6C#|s0A%MqzDN*Uy?5<nUKTr8wJY~^8XKl{Ne77vJ zDMnk2{0p&?*9R80GIQ8tjqRn2@}z89-IQmu!<m)yg3t}WmX=1k72{Q&f_@}MMwpyK zd`Rp#13uV3dB=0+;?nwI$iYqGfCFV|`ZrZE99Y1a2Hu3#Eu20&KJjIS7|D^s89H(j zzz)VI=gJmPvoQBUA%Q<mbrp_u3HSc+3o(h^Eu9^EawI^Qm=)L&Ogs~R;z_muxnGd- zO*jQdUQL+qygm{$PMGCTD#&~@;O(Hf2AT{87RBk(aTH;bwqW8LlMU^i#1uO79nR|? z0%f@6LH7LBlpKdHIbr^jd0CwM)x~w~F~y`O!|l9fhg_SHJ(?VOKM~Ny`8m$Vg=tTm z!u{!VocovXnu;8x9A=9!3+zP}QXx)$)Zv66C19Nt*#Ulz8EW+@Egw-Zxk%NJ_~`w_ zY!``n*S*%`%%NDm>)z#w%7X`r%9+#hl`dJIoP+j2%Rw~Sv=synFT=MmLKl2W^0yev zR{s0C#JvCwT?U6R;%=B7=h3}=!=o;0+{8Tyhqj|L+PywvWOi`J<QRXiwI{i`SRUNT z6oLP>Jp^6-Rj@DTzFzVUW7o>tutxmT6nRn_rHy3QG49j*bWU&gNZ%wcRqC9UeD^Xq z68F@bbdxRP=wZoiEHoi+N}IQUVW2mCr}O$);!nJuh$H4Ea?b0dM{oHH{E#Z|TqBz% zI#IG(t{t8zSrbGafn1jhlMMFrCd5f@GE9bMGEWj!ypG4|(60XKTS?)wCr)A9cSvDA znIwfemtOTmDI7yWx~DK*QYg>V>U33#3;iy#)KNB)J2PtB(J6|hwCQIqWcVY>K3~W| zz58K#)y}RpT55A;3Uv`6^>{8k=E*jB@Z|Osge>iEM>cXOPfX=bAUz&=Ud2c?luJC4 z-KKgMyyR`TB~G$7izTauxPz?aP}w;ys#^`o`9N&6CuL_aG5c)ci?3HW*G)X>->%xy z8XZg=oF*k&w28<!FbSp|U}EpQE*ysFQp8Yb%^hSo?d*cOzef5mus&YX(cguAU*uS| zY+-yFOnmVyA=kP`S{N^FuuB)dEPWn_cU%ZelJPK;Qay-&v-KYOg=UDg)Zjl<jz{Z6 zL~PeRLx!)HyY^az{I>}>v8r{*^s4Off!4WH%ek1RP~v6Hu0gs%8UD^*Ec7;xTuXn+ z@K?IcuESqhL;QPVh-5?GQaZ?<1ldJi&a!)sk(X16(ecI*A}?nn1-qNP93ED~qAvq^ zIChftV>V0eEB3zyC1GX+5yQ=2EidPGV`^R5>F`^280oxac+_qDVAgMgkqd5=^6&>O zywrMUH$36`z06nR5PlQAD|4irL~QMgh8M3gT^LZGIUI%~5lAgB(TkwCj=TDGUw!M3 zvpykQ6W7+M45`MZ$CWVWt>yu2UWZN;{OBtaUXCF<pY}u}(mJEHkM-LQ@782c)<Ns^ zJ880hDtD}1wjhKI1~}n<h?!$~sj?i+4L0ZFpE=j8p?=EE&ks3@&trU^A257r81?bp zWC0f<&X952UaVvhlKCEQdx|>-r3FSJ>}IrGB*Nx~4}PmPJSXrr(qPj@u9_TXR@21e z_caxp<npd0%Z?^_+21CK;MFIDwUC@>ljH)~KI;^lBvNJfSp$wH8G|-piF~drKAxW) ze%f1yvSg&RPUz5)kCdj##oYHL_ZArtST_%qjz?yRTjUmHi#R-+-eTo>a=K$ALThVS z9&oPDQXE<iL^&U03BQr+<E_29XhGCow_<-rh`Z%o-rq602tJpN?-je8kMHEDUcn4m z+4^?fjA-)w6zCMM8{f*~H0NX7_^p=MTf@vYs(4+hw=N1%DEh|f>qO5gC1NrvNtLR( z<$ID93t0AM9bHZ<aOqUbcnZWNR+9jV!ybqC?6@XQ12INpwZgvB+4&aGT>HHSh7Crx zovfrYweKrSM7=#+xE>|@aS=4l!(^GAJpnlyQKC=j%P2sC<uwQv3m8)Fx85$0rtY%l ztg{RH`A5hyX%FXPBqZFER@AN+(cA1r^evP-)%t7aV@{@|%~mryY0`cKl)DR_z&bfO z<*xh6u`<=2{@IEY5pO^OINkJLjchACA?*kad+6{<9gCadk)e7YZ^3Hu>`ia0I>nQ= z#Nl%Vw1GszS$`%CEG|P2X|%{3+A3tD)kqILCZ;l~55tXgTt>Eujswew9A)06qY&Uy z7?sWy@J0j^zZ$QTq92HEy_Y>8F8jCC(uwM}B=jbk^^k5@8Fuo<-J6WjGl=n2y7-Gn zsakIKtYGlUev#z0y(%FYjHC~HPbox)%Pl4UO-d);n>xwb!iA2lQuYz!<d5WzVwcUR zzqc3VeB9oPay~A5QU3++>i$RfqB6~1)Y*(Dx@<-bAzYD1O4$BH*^TPAbqZz1Y)R?6 z7i>uF{SF&alC|CUqg-^o>_;_6NU4jww*sJNk4=nCJNBb$yz5w1Y{*4i4BJtkA&O*V z9_6KyDF(poNy*GZ_N2;C9_!9Ul*I$r>IE%X<fOQ>C$$R$dvj-wwv#=n`*n5NWLGCC zqzjn-@Lo!-)ZG_YQ)b=m<*dNhS=@VJZHz^SjACS!IqQevj(wd*wyzp4yx7z5y<yo% z5XxDXRVzZ0+o@PE^0{Jn%ClttvP4EhD=#o+&c|}1n~&Mz+0GUZqxXw(L|Kk?xVN{k z%fpiu)LwRZJ{5w2Jv?gRte?)NhV1g>VkWTN*8kjw8*G?m!_iE2WtS(<j?~sda*)eT zH};`>MNl)`);m3WsXTh8XQoEj>5=nkdZ*_>jm&n|Z^x_N?YRcOb?uk<X6W6XjoO~w z9+|v(Xh}WWPRFIJ<CbxGjuD~z(2fje{Wy9C)`#AUcGevt*Nd4OEa+ImZArv7JAe%5 zom^^=+tcL1*`ckXR83Jl_o_aLZ6i-Z#jCf=VN;K67%?Voy~*R;md3J@EGK!`B@YaF zvsN<c1?w;S%<5*Ky=9>^LWg(gt)uX)UVlc~rZ+oqGF_DH8?|?YomKy5XZ>taL+K&^ zWy_>NQ<L4B8wTLqlXjA`J|4vR7|XqLH(2>pcUi7&^s)NC)6AJX>A@ARwR|LNYFz@Q zXy{bBe@6gtf#I=EiB2^k`zA5VFJyssgJ<oWkwrNLwbB%G*KC9Iw1w1$Caao(rVed+ zFY#N}8d1!{2JA33Wd*)zxt0)jWv$iZv%*`qE+HKKt|nLhGsID#P1Y1;<H@~^>|wKw z`B%%GI#=0<l2t<ODSi9OU0X|WK!MS_6naBy2L@ZIuiEr(plnLz>rJU&iBHB!+Ujj_ zS7S75zJz##`?sxLSZY!(T%EE?0w)_~Whyqfk6o*H9&@8IvF^>T)w9TVBX?VV^$jnw z+gnlx&6bqej)HrYPs`?NRzs<a1=i&&it8F@(%F`cTp-(1!rt$-UJ=@#dT;qm?b_4p z1LPeC&g;l=t0$vh%~5B4Ei7Tul9APL2_*k48&*+lSVb{QDifZzY@pHzE6s+LCg}cM zDmE-ShW4(SdGp4UX0~o9x3N{+2-+Df^akjhmO7#Luco!P4yLUEBx*DEufW;An%3Uw zu<It)K{m0P^(GcnkKV-Ui7?^A%$R0;>tT>lY4))85$O}`VY!aGhs6tx_9oUVIgnvO zzeZ?CA<?^7<cMd@2b6!Ba0+}+Pg)A^WlDP<e1VehS)0=GCDp;7Q0z%>@lSE<O5XA# z@-V&Ky^S%sa?WTMYVP#l%F&&>SbLs=gxSS%)|cR1?_|kz-rI`sJ+}Qh>Km7dAeWAO zWA*wQvz^r#OdK|bgi1Z3=}ZmeayES*STaV8ci545MasELwz7`cTUm&nH2YLNAR0_; zK2-?J-JtiebPdoc2yJHlp;8<@+0~J5mon1Q^^Vp-5x*`?PVEc2FiE=<Y1%?*6<a<+ z7AINk@Hw}nzyv9q3tGN5hGD+GTU;T-h8lS@F{iaBNzc*7lAYY4<V<Z=%3)8+VaS9w zvjV&IcGhdwz3c(eY;3Xjl320pvYGWZmUxQx4dOS#pQs&el|pO#E|DE1q!Q7?1y|-H z%*`D*ERB$9cyd_zGYt`AbIz%J{)JAVzI?E9lx%3!$YPjT9<<(L(LOu;8>vr+Rz0Dk z-FmtP+<TC2v~@&BM0M~8*|R0%oFUO*y%tAnUanavk@a0{A}j2%Bl{!)=%R^J_mg5F z9-t|kS~T4>2(qyiJKs$2aU~r%-L*I~9$L=8k)~nvt@7ynb9L?6zP!5j5?|)lGN^Jt zJ!$KFkh=C|&NT`6pe`Tm)e;4U0QqQbhg@SCEU}1fk$2o_8sb~t2cd^)IUojDIg8zO zX=nO*?5VCMj2$YxL}>Gt$8aH*6KzYjscg$HoGrIwxkb8cLZiE0FIs7kjaII$Z;_VR zXr*=9BCT(O_s_;;AtnuOm(hrbI4}kja#le)N@gKsoLSOks3|+f*l?bdEh%``@)FHC zQTt|$pOKJmEOml)vn2IbdXXb|2z*PA?bQ#g=zXcjpR@O+ye$W?b_evnlvm_I5(A-k zn%gvvUFb2MBk$I+{UID=i;C(ozvWIL52AOfpc~q$sugEsm>cnk=hd~3@J-k%b&3od zu~kMNApLalPTy`aX&ywQTW1RC)@l4YS~G<IQ8dqWd&62Mde+TdD?BNapU^JVyC!93 zP!TT;5oJg}CMLr~O<0DBdQ`ZkUAV?vuLq;HDF3-^is}(&?k0Qs5qQQMXprHpOg+!d zlgherWGcfRR)u63+;vsRO5{GLJz~APN^;}a5i4?(esipB8*{?`4JO7*(&H{=tEqY+ zdr>S*2<{CW7O9NLXcWneta5$H`u^mopg+Ct+<_D`hkq0aI0@k@y*P0=U@Nse*c!W) zgaun8w^Hd?S-bQUp(8%m4MtKf3r3|x&|WZF30mjs^6E^7WFA&-x}BNRf}M!xmFaO0 z-$Xo1*(f5Og46uy+wwu;`KHA48(tO>@y^IupPUpV$Irc@K%zUkOLPYvC%W=l{Vzx9 z2!BITW5ZeFA;yn$PuPy}UeIG=Y)2MeCd)cgqIIuvWv%S+*vfUZn(^sz?Hh>tsonxE z(rEnXLuRZuZ9n9RC+2ir5%KK4EwCv~>Y!w8ucR!<>X|G)9cPWfeUZ-80&da$SvJEE zaB>5ocC7GC`at)vbt@*uD*vR089l9cC_NjDejrOT6GKwTSB=nOpkE&Gb<i_0b}Q2u zWj#!mag)n=K_K=6+zyBjCVFuuYna^rkXD;r&lMvxWU<0a6p_Rxb7+&|9>n9=6hkJS z>$an!p<|zsMCOs)$_dHdmj}FcRr-K;u^PT7B1tnP9&8hG$WEF>Oa@3Q5LeX@`O_Qq z@MC0MFvHmpz&5Ag@btjoHd)N5|BO^fD5-3gw1s5qA8^*~!TeArFHNuc6<FOhFlVw! zaw9=)&<4Y}RTE;);^DIF;P2&WC~?RDGPyN8m}xWV269V!M~2@Qz@X7M71FFPX#G(1 z3~fn~IGVB(EAP6Aypi$24LtUdLPWRxEHL&7>+Zl<@8MRqhsnl>Y!+j-TT^nvn*+PH zH%B58LyO7bc`}5<3oR0UFaBGGQtdqHA2`=gGuRwzX{A{<Zxu}6ZS_Nvi%fx2Wx`bL zEs!^xEAf@RE~2R}@?tO-Yv4`X%Ga*|Ao_pc&J0;hP2HK%Gp24)EY+fMzNXOn+V&44 zWr)O)B3p=qExiGG6^%PtHFdBucqs!j8fp(i%n!x!)?UQQT1pv~)up2sxMan}Tt73i z+!&5&x$veu>-RI_tuf`H%BWj=X1QzEKr;Q@I~%B4*UBv_h9X9;?5!Bt<(@c+Tv;Ot zUY^2uh7l+W1tM2gnu-*R(q&L8yZ^r#rt=n|xsME`Y-ypntgR(lFD{i$1M6OZX%Ng> z_sJxZDM??ep~J!&2x_l!v*|%Su-6KYFXxTdDCM;aW~6~X$#ayw8GmLoCb_>sqI5v^ zxb!06BJL}4HnvF?U9ZQESh>i_!DMMNmn&rm_j|4b>ML#RuevB`{ZkHT=`ju7Z=K%l z`XW5_z{6I{(MN)*wXzs5(v(d<kwn)P-*qn^*`X6Z4t$*x_$=@?%ak;9ca{kw^oINx z(3Y>siJZVNL^K+4luxix%RGIr)ihm(V3r<lJui;Zo6QPA@!2I|-oP1;T*(+Cz_VPp z?DD3c@f&phiiw=E_=J1;B2%*U5_a06S@b=-xBtsvV~31tx#uo-P4!)Ne{_Slr#0k4 zG0I{#-sl_TP5D%YcH=2^B5ssZhBU0)OK$XA$kbamjzqI+bjBG3NI0P*#gU2VY2~to zhR53Xb11Hh^kQfQ?ve7me7W@mE6&(fTDM;j;(^ZYjU#JGpJt?emr{-`F--PF(3x<> zuhBoE!5eMGY}Q17RVg7i`Y7?q@ZkZiJ>+-kBa&KX3nAZXJNk)n^aIfbK9Ni8*Lwp! za#Id^1K(!v|3)4@s?06mItK4$JT0ux6`9mck^z_A&4$Gu0v{kN-BZt=RN}0^3m2fV zpmKrLa7RRT`)SS37$%{leCypF3_tu1?JmQ&wb$b;=j^47Ew2*~1&sM~zEsxBInv$* zeZ@Ivmwl;xLuk>zgwYQ6g600fI_ds{JGG_XqR|NnOcd#uP0l)o8l)#~Gh<Wt7FME^ zwOp7(Dz0g*A|^k=!j4G`jXcu93%^vk>&CWnMuIKAzv6?FnPG?DX*fSDeV6lkA1xlo zzdJ+8NpE!4$(bipZUQe^NhSaWx0up^<*XaI8}{vvhAa#pMTnlWS>^Zng?SI%6%0Nd zCb4XY?ZN9ZFZ~8j=7#fJvA-GnTxrgDz^`8qYxFuP8b~>3D9UDm7@p4}!x(S)YeYEP zne}_dMMHEG)0+mwnpY|%i0LR?*k`S|z%Y}8%6qLEG(-t0Ja^N^bc(T+tae#_N;vK$ z&Sn~CEkDMQIsepgZ#P38D$w_KUGBhFJgs5fk7J2ivaE!=>sN#i@eidaC5oA=>&p0u zog#*SY>3pm2gh3@rwz%IPG4FM5ylhP@7~Jbltb^0tiscAcD|v|@%D&*IYHWw7}*Fb z&t<fDxg7I}vewf|v@Y=0ecik)lLFyf!3>4&TNc(>|3Qujl0&&c984U=Myg%sE6$GW zi#sE3cmSOP@lEm8{k8e3-^t`U3KQ$l5@B#Glw_^AW1OPLZPp*Kq=^kC?jAr}zuQ`k zeqDI$@2$p=#cz#8iM9}dq)&F^u}h?6!e)`D;2Xku|AzCMy@&sjo!(gOw3g#76nN{o zJn_oS@IAh0>+~m0A>uR*1@d(%hq@K>oh0uRC3sWbp|nLwf}gcsq(sR>w%y`RJ58xu z#{v0u-Q*SP(K50sCk91U>-mnrXV&`%OcN~ojCq)a$&0tvK1}6MoR2$3ZzJVZ#z;48 zhf~2s5z*-0CL65{^Qg_c*{KgE{uM)c@rLp}-ug*#C~4LYK?B$vw@+~~S&^4Do=~;0 z_$BL)&}UJe7X-7@-|+^1pXJQmh4HpO&}9;pSnq8nCMyTEzG+<y<+`*;e=lpC4lWMQ zUgW*YyDcwSKh#k@msn}}`@3ZbKLcIs+~t>12qBK6k6t&4H1AwsFq5f@?&)K3dia2M zP|$n$9j_d`Wg%*mwX;alB>`D)g6h)o%l-^1*-~Z^S0DF;xV)V8lGV^b{4jaQV;}<b zI5tNvWeiDRc)~p`xq^~yUqRu}%?nf4dmL%p)L7t%4r_F;{&Ov%d7Mu-x%YpKq@)Z= z^n4(uK9QWHkam|W5IJ`)fk?RXPOZ&xMAtHoeF5oUDs44-mF8$o?Ph+RHV<n}!j&U> zRcn06o=9MlR-47HN0dHA&;5kMZwzW2r5Y82CW(Y(Xtaz6nA=E|?Qi|EP<W%e{Ifvy z6I!h?fymF7!ygA6muZQqAGCQnBjq%p+4Wqc#it@gK(;*nX3JBmLf9}QWw)$XgoZA> zu4mAG7UZf#YcfHEgoNy7vy)L4i`{csm(Fdr-kByP`m$zs-8KD`bIn6gK<HiC)*uL5 zDOXLG9<mSX4!$_ubVxm``#$Z#2T(E%uJtBp?X~SHpk#liW0z7U+qIA(HH)PG7D|kn zgZ@*kU*Mx*;c$k5XMrI(jy+Ba38kdNcgtM#oPLu1*j?e$f=Lj_0@Tj3LVL0;k?8A+ zl3DV&fMX0<?5)b7Q7xG*ry(oNY=3gj9We!3hBfb5!FU?DzIE3olj4a&k3jCCy}C1) z7?sFyxBY78nnBw9n6&?34Qc;~-AjE(lLTA^Zen(iZhksrc8loi4(iCbly$n+A~~&C z#^l-}5VwV$0<TPXW36mDM>)T5OoEgq*~@GlWlsC(!!)uTkr6s%x6~x#56;I~Ifut0 zk}cGxmW@mbJt)Com@a~*)!K3sc|TjfSpLj)NPrqi%<U04FP7`jTGC+jp=Y?lVfE0q z!{K{tGjEnZW4~Gc3~!d}VkSpK-qmlG&py$c<^3-f287-$?`d?4LT{G;;%(i-(<;7T zWrKITSuO*lu5Xs}I-|T<j`X&p9xXrPEs`UTmOt-WH#@0ivXp~kA1!~qmi#=<qvg&b z66!wc(eh2NNOa|}=0Tm4nekLUSL@*4kui)~=8UB5djij4$h6#g>`e(?9xi_x*DQ?b znOKILYv?*xVU|qK5|Tfx>+K$2j&;v%45SQQFUK+3tiG89ZiYi+y}U}R1^Vx{T<r1F zO&~&kS8I|f37rEN>!O8>4NP}Tz$^$-40!EE%0b;hG=&HW<cG?kXD_Um>>exQiak$| zq6`l;#T86^o8w6g*T%vSyn93Ma>NbM2Al@r2IyDcBK$b9Zy)U>`Pty88qnaVtMzX^ zzXXX)5khs*VN#3ptY_vkH-w-IF(@52b-yD!XpVcCq4_WKySZWULwql?>TTbtU43Jd zYF*x)uK3v+1y$=(n_M?f@OOV8+G@F@TVN7ZBawM{_js68#9e-o^x=;raDMBUlw7+= z`JyN8u|8n?`xx7;y5D=Di<K~_lMh-*mi4PH_8ly;O3&>isC|WgI-D>on7C(kTBK5q z&Rezz`0E}!6aEFqqi=#vJ)V_h+mAl(481eJf?PPlY&UT&+lSsjzvr!S^E%xf6G6k( zy_C+QI3qb#cP(-&C(`{?)?GUZd9&UEMzC6XC95c7t~Hev>6NnnVWd~aSmAb@j>=cF zu2j-J44tVlQW|o*Sm_vcB*(0uoZiL0!zz%QVcl}DGu~2K|6Si&m-Ko{-D<=8ZTOfC zn{4<;8@AanVwmwa(1vH)aFz{AY`D^fKepj!8}6{-t2TVkhKFp}Lrxv=oovHY8*Y?0 zZuqXZVWDkaW5cyJyv2r_Z1|83ci8YX8-8TN9wQ9hkv2@TVU7)pZFsc}*V=HC4Y$~^ z*@kU4j2>y|CfV>58&0y}c{aSzhJ`k)wBZUH-fY87HvEkZpRnN$8#dYST^oLG!*6XE zKg#B-4O4A6-G+rWyxN8vZ1_tXK4HTq8-8TN&uysee2KN;U>lCL;dwTkWy3-nuC(EA zY^XDtpBy`XLi!WI_s$SO(@OR88hGF$14mwCV0V8te>CPh&OE+-SN9KI@#-a_m##Dy z<?7>5u68j|Dzl<0tESYKS5;9^RL$YSrK(yLs0vl3iufC=3RM|@N~L-nI(|(yEWj1c z5e+p*O;e|<G$F<p(vMeXsvMPN47K@%DqCf$8OozgtCU}rE1xPQ#3JQFRq-d`y0o7- zCGC@fdJcYO^Vii)$QJP`P`yx{e*#-(I{402Ztd!L6lM|6R5g)5p?NxV+{DU9e3q(8 zpbJ&Oub<RPJacu3Ibg!2LTy&8W4=_oEg?nKVEO!tkELoc{9BGmAvCJ6Emup>mE%HE zf0d@tNlWZYwM{<$O4Zf2y9&%>;6{kPxGTiIK-(ADVY&#-ujz}ui*PG+s1lz_{)D3O zQHo7D`Rjr+l1dZnrI=Q0y|}!BxS6>72v<@srJ@>&W!g=JD#lFcNf`;HEMJEl;?wDx zPr}Pm+!PUKmr7RSFfD`HspyKKSB_q|A#oBDDG9>QeAKa=D%Ww85+uG%nK8eeK0P{4 zxloXJjn%m;<tdck6FTy{916mLBHRhLmXKB#AyneGK&QIc4x<ccQY*X=y2fTHE<>^J zESVBoXL#c~!aIKX5^6{1yMz(qdnWOogI@AS^1#>_P7BW@o@4(<%10<YA-<({q$3ok z5dV&sg2E2I8*YU7F@g3XRNo}`^K~sT>5|%ZJX$VY2BjQ_;?&9C3F@p4dZGAyxBOg2 zSjP|X_<3shFo9Mh6#wrYp42R<v;Up&&gclQk}_(_z*W$yf~Ft-mBF=RYf`9gc5*Bf z|4AM6zI*%^67zCOS`Fd)sKdgckWNZuC>J|J3+4BCb#_O1$II^ua#ixnw5n3SD|Bli zt!5Ezq_kQ^nnI`?g`i2#_-^G%VkH!%eK2i~v|mzoj@Ji-XmysS@m<Q3#P?WwOX0e- zg2IUsogewSZ8L4e6(<ZQ6yN_&`8hhILe2mDZd?h`@2sbiE7EHCXm_Q(Ey17kP>V@b zi07sS7fyA}yH0+jcFDi*US5Q{DWg(uq@R}_|L71*d{evR-*=~<PpX!X4<X9c-Dr1> ze`kANOukiU-bpJXeQ_mv!KD|F-c5Q+!!hZ7Oz&X)NNGHpM>3|8&vnF%!(>-UzQ(^A zeNUrRi)_6a)hHEj>sj>H-)5F#9binO^_OSsSvD}At!EuVf1AEC?papWU#hKV2SI<$ z?Z$qft*^B8T=O!Yt!IBjf13{*_b1!>5?jx<wEi|7GWIT8pK9xe+4_txjD3x*ciDOt zYt3itueP<#e=+Wl*!lyuUY1nl+hXf;b={ZofV2t4I*%^GO!CRJdYLMPKNmfu)KsaF zW<;3-wwMwkrNqP{#vW4&{jvIw_b16~p}s`Blre|&MuvjWyF!<MW&EBe-i`?+3x8(Z zIt=PDD(@(2=RnVVUgcHiKzAv>GBON(g_I}xI}Mk)_MfzR#$-7$mp>V|=hEVe`8eA| z5tfYR**meJ;2uh-w3RaMlreSayO1=>ShKtN4BbXnbhw_48yWX@cj<<^QrDyplW#h> zj15Dd=%t+ueQwQH8S|M>Mq*WjRbuO>;Z|}-axs)YVl^Foc^6ZY9>o}6;;$IZOPem^ zeu<}~UE&prkAdarB|M3piHFe(H%z#paD<NZV$!2Zd)k?vX_%CDF`Glo#Z`3|GsvlV z#G_EB{AimwxR$YUh-xnJml1m?jM>;9t(SO6{3KkN_egjl`l1(lk`AFG@fQEAGkMQh zy7U~GxtXt!?=)8~pG%PCeA*&6cU)oNIHSp<*tkv6QJ$iLqNR(AItt2#6;~alpHbke z5bN1RmA;UL+rPwLO=PG0%QW%n{&Jo1?#ilC$?x3!s)CMuE?QjW&#&rW?DYI{ttDJV zSPJ?=TR~Ikp~~^UTtQ3dsA)xu6@Jw0e4oFn5DUI(Ri!otUrPK{)g3zcRa97B)Wu+q z-{^8H%C)Q6ejDcdgpT+ZKMHpWHwt^ruh5SpEIcBzM^tprm|n4Qy`Aw1efsuGJgI-u zfPsSs4>@_LYuNA+BS)PwI{DNwDW{!2c3kTC2@@xsapqZRXP=Wk`P}oSWVokhda|aS z@6DcmK~C<4GxBE6nmvb&s`(dRa_NH0zPE5u{^EkdqT(ebrI%k(wzRyW^2(}epZ}_5 zHOsHQX2rEDSxEdp{<&xukN!($9Ao~DHb1`3%eM00`m0yR{vGXpwd(wHsPFdImBMwZ z^Dm4wnD6oz^6&aTp8JygWAnA-|8)s)tnT<Tud@98-%t(;yw1`27v}hGe<A<Jrv4u_ z`Fk}$Lh8p~C`8e9;_>!7G=D$E#zlAc-z~MBuHscEZZL<VYbB^%eO+Dsn(zN$?Yi}W z>l<#kal=hF|L~Uoy7fo5-Tvb{Hs1M@pKiMA?w{TB^Ly{R{};df)dRo&&2Kk9_|U_T zJo?z<Ted#&<Ws+UdRyc6XP({h+|FIQ_dNf?-WOlm*R=oTS6==7Yp);p!y9kD_4Xg% zY5vo@e}3=%4?eV7K59Mq@h6|QefIfZzWDOc;r6e-{_Bx%c;)V&?GR4f;D4tM;oqJ9 ze|Px*H2?ozhkRmt-68+G(;vyyQcgC9K2G;F$~m~ucWXC&1NzXMRNUOqO@CuI{f2J( zo4V<5?xz1?H~k}FM~C-lH+?v9kU4Q9Di(EqQE2aBUTJw@#j@GHd|xT!FwEyx7gfzz ze0foUFT1?BqH3wOn1>(vu+Pb__PGmurB@Z@RaE+AGGNS&nIWfg+11EjQZ%)q#`v7e z$T^Ey+(b2j8K8&Vg&gfKnbEY&Xv$b4kuSO2UseXnrOO_Q7ARhY_fS2;;v=%MvLfX> zBbE7=ICQ1u&5w#!XQigj%VUS#<<jhqUR1J(PpO*98op>;@cM3aG$5Kk(dw^9ZSD7K zSJx6i`1fnSe_d_A$Vft1vt|t$otj#kSGyM1$)nTq=GEe#K=Sg|;?b2{yS5fzieO4g zN}{8qYAP$2RaPEDUv^wBM9IF)NZk8j>ck0S3(M?@StozylrAl*@cZ02;+vN5%P(_f zRaI3~xk}4jIr;wbf)eHdRm|FqeQ8-yq03j{s;Dd~cNNtX75IJmi_7#x00(o+MI-jb z<f|*c+?8L6!$SPX?0`%gtLb#Qt18MbbQM=sEX9W*Gh&#`aoxn+OP<XKa{ONVV(w9~ zn0R#*3)hwYqN?Sk<x5<}`4X{X?5m4>zAo0P+E+-j3jLL3r3JcRg*@8yv4<Hac7T0R zRn>{@JJVG_!hA)pO0&b}@>feVMvp419xXvl)6o?^Eh+NB#!{cFprWwIRZ$%BVInk^ z*<X#G0UG@{N^3Q_F^>F@wX1R1$!c5oFRP}K3S@H1<OKAWF3l%T$PI}KY#2S3{2MJ; zbxG0Ud<@G<%Ztu&5y=p}W9Td^DfJaqSLPQKx$=vBMOCiR<NVcC;}(~ek1Hy_YIMkR zCm(d^o#jMA!(nMvMfuXAa$m@OXE;SwOH0ei8e&6MP?%1?(q921$@4D7?Qce!$j|9Q z#`s@W>MN0~t@h<tY3lzF+0OjZ>9P66d7<unh9x@wrv7y0t7d6e_k~3|%e%U<i^<$M z(@r-#t6kmekVlUg?aH@PA*H3OZ$dcM{jkxnuB@zLnJ$0$?v~vqr;qV>bdGfk<v8Kw zS1s{NhE}_hi>{(&C@ppsEv@t|KlSg2x0s@2HyZgwlzLgzQCOuwx|S9#t+1C8hO1Gu z)zxaaa@m**yt^6eb+bLQe^LGAyferCF#Dm!mp^prsT-<kT2<yb3#5OkUT_hGZ21EA zzonz8UQk`Sw6d&dVVT|DEhsFl_AOX;=A^Neg{I*%Y&zjDU4=^M8?E{l#;U%Pda1s% zoK10!y(-_DX!hSHu;4luwONiP^IZa7-bL@|5vF=f2upQ1Bb0Me59OTDt1@A5j2c|% zRD&n=R)edO+D>Zj*VLymUelED!dawEa4Ge?P16w*reY@aNF8u`+!;f6IMpCuZ`Hpr zj`+kX=fs$*9?cO=VRT;6-=mMgzau6>#Y|%LvZ_a=qnAVVBBhRgF{<Arr|LH$zO8q2 zY*S2QbY;|{9(f4^V$^^gz0?37u&}2ZfIRmBvl5#meG->S`pk8xPeO4S603&zdZ{6W zF>1)1o@xm0hfEmI+`lQYF+ry>N~ba8|Krm*Cyk1J2V#7Ve$lF5A!<$(@#!J)sZ8hx z6thW~p%3{@pi6O~I{Z4}Um^5uK5E|13j3RBkoEjg<CJ;|HQbKF^Wg7v(H}3~+hF7# zR4DA8agi!+Qhyca=%;#ADiw<3ed9S3GJ*H7p)JsXEWubuf^xWGm8&p8x#q+x7y0a} z8r(LpIjQNS#(tHK!LcU)=aB#8;ow=FWgtPPu_tMaR?b<boJl@>bdFLnla;#EPQ!q) z@V;;d{C`#raXhVhH^x@>$}?r4nF?X}OrDcR{h-xvR(z!+!l5GUQt#kyd6eT0oyS67 ztHfxP=tIHdM0lJyA)zthShJ3@k>XaW6m^vyr=CK8YA0`6u=^P0fWE+l{xOtW%I>5% z)qhsswuI)+x+HE^XL1hPqtq=ny#!s3dq~;WWkT}uqAbQpsE2Gfe(;T`mqUK~Mo=%J zRNo2C4nIexDRt`koqp26t56+&qz-f~{}U3M`!*#shVuAoF9%1nNy7=!`S^752NmCJ z;DFP6sX>J^)u1_fYS5$^YLH{PGBw=fPX-AYFs`RM+4rbAx$qHn@}!5=$&Sq`-p;?u zNO+~w9@UX{;ZM~}$|Pz6;m9{&Xtau-#N58Bvp!23AU(|AtJeqSDpk)(&k!xUoytqF z+pAF8GdXE=F)FuNLL@KyM5sRGU!R+M<v9k$&=$p$R@x-W_@JA0D)StgpR_5o5d$cH zcAL_u{Yx3?1D*cSl!czE{~RFs)qg@?YL#6MlVVg7?QPPWUMh)pENPbEo?~!NHTdqt zw!X~?P0q%+%GgD{Qe)s*g2V&&gJ_=zP3Yfda?j+y@NMH#rCvgnG)oACF}Rl+>`PLE z=k%xkoumdk`Y7WET2kJjL)o`WXUw7yFYc^TY9A^@NAuVhOFBs_@ub~s3zZuws}opL zzY4XwSwdCnD@I>8qJOYiYwbGPn>rdt9gS7}Zl;b({ht#<{ih9}{wE~#PzlgZm>|7Q zXIy5m@m7tx;!id$-EQ>4AAa#?@*s}55kL5-<5!i~X!?d|U9OB9@`tu}0PXPr>imES z(vC|zdu$u@y*1PsRHzMdjEqquCk;>|KOfkZ)O=D?zs5e535(+MOxZR4Pl$J4F>xKt zWOZ*S?Izl~Z5VJeZQaeintL`yHAYrOEDFyHOO<j=wNvzYSwnq&$lnCYNxbTt+56_0 zw&>;_O%aWuK2O|fzTw7+8|TZtDs^9Jm(Bi^&At<RS9SN7+TqWMzuwCE5Pa($rg~3^ zNtJRVUsvczil$tm;8ha5N}ABGazJV?HFQ#;8tS-29o@5<y1tu&V0doYvA<Qm);*}y z?@*!gHQ>x%YSg4F!$&!a!;eSX#pdtdqW9ufPI99<=nacjedai+zr9tTD&1o&>KHRf z9kRz3gBV*3qC8Tc^K`pV6_#q!c`xbQc-(Y0v1nn+ZfE~&+91joeaZm(6uW#ihswl5 zFDms4D$y>(ratw?E#;Q*2UK;o!&0C2zob+<D%Eyh13sXMIc3QI3F^vyxM@Xo)D>x8 zC?~WlZGD>Kn+zvn@=O`B>rfSC;rL^nHW}+UBEwYVBz6xH=_jF;_)zc4xJ9vfj-C$H zb1}T=BfO4**T4xT1Vg{GUg`e$I^4$gP_fk8*jZ*wHef=8ayizi0hLL*9;6Cyb(=`M zX-|`&N!in5#sQ;fJAIR=8xvK3N2&^q0V*lDj-jz?s4qbcEsUp(I@Qq5v1Z<Y331Bh zTgDjWD#j>&#weVYa8xQYk~3vjUQE$(oT}6Td5FjU&KnNi#!`r@(`-Iwn?4V@I2Qir zT({xXf6jFqKK%dvT=!5%*JIQT4%R0#%{ogCGuL@4C1Q4wEPUnV`%31imFT*zYO<PT zRytLQV_H>FQBLXND%mu@UuEjmBl~zZOPD(~Wq(x_Q|!>8?dZsP`K7*T6;-pD+LsmC zl$PSMyr_yvbY?|)bwycGrmw0jizzuHij$PPy1H~pxlLhCMP~%m2NHrLCX;ojP!MWh z7^b-uh5oXlX)Lf^n7@=nsr$lb%DP*Z_1R8BrQx~$GGFP`<-Vdh74u39i!w{t&;Ktw zMIG+kw6j8cwy)63s+6q!MT=fmclGik^CZzrOx%TqRr-|rdEq$~`Gq!@ppY*7?2=4F zsY)GG9(#qa$R<8ZWtLS`hjbTY&AKovC$t`#<(TcO;y;^|&GzQZbLVC2h0>Ed({89> z|AL&H5C&D0mb28OUJvsY<yT7H(MM_AElU`x(xk15oSCz{Qh0WKx3b|@mm4=MNBJv- zmGjg~5utUf&K!6*Vs3ef=1^f)O##O-SSBYcByjbxr2mB0Vd2@;o}$J6B}<B`@?fA8 zfm0%8`N}RTCCL1;x#g@06)H7U%yKF!uJBh*W4TuX$SU{Af=iOvWad|L2181duc*eS zMv0m6npLAdS2!)I%B(8&v7lNuo5d7a##ia#IYs$b6&+*V8kSv-)BLj1tGipe%L`{$ zmX>E$_=)Ey(Cy0Sm*IO^5oNktK<=(30xn<Tm{wLzvAHO}jFTOgiVhYT-NmBQSyd~1 z>r&EQ=u*4pl*o#7Hhh^c?oD>8Ns;piS=Ryerp{+5bSeKz{ZexCB9@kP`hTz8m|9E^ zsOjg4dsQyDJ2OwI^TfxDtX#ok$tz-6TBvIuI~QtPcur<+ekJ9SGDOOr4WC84q!KE% zLgH`Aq+yr(TBwSpls4rPd(nl8sam4jm#of(S3|dYp8AvcJf4LbAn`l8?o;Pnk)cXG zr=p{#t6!KFA+M-Pug#YiP+rJK3h9r9%AC@IE9{u=(!4mTC4<?u;%fHtY7QI8hJ?FX zrv;2{uGwWpoD{mzZb8Iio_fpV0i{T(O`4+J*ul$(y6A~ME^ZNibZKcWBWL~9B|>Vd zzqpu8Q=HMF;jYZDD(c{Pk@)JunTztPN@byPo_a%vKC8$Sj(p>;a7M8ZRBEQA#cn~R zbwDbFDLr;eMP~lz`zJXd<9zuh-2Kk!x2Gue;$^Ovi(i#X;pWR&WbQ3>*B^DNAU<S! zVkqI5k;sYNbhQ!pp}2K5{}28j4*Z7$-<<<e=T1U}>XV!w5S%HllC$rTs5lh+^!i*u z6fhPQZNr{6lymoDCub)@c0yPD3Us1`t~3Q=mVoMGD={C267w@qqNfAYdX}KIp6*8b znPr>LwxP7~LjPivge}|dLT4$;g`!hXZ2z#6YvM2I5cjnxiSue3UI&!4)S<+_9wl^c zLb2VVeq!tI0!sYuK}mS`p~T(eC`l7bAlm+Qpx7tD6yaBhXQ29Xgpd>HyICL8Hg@;_ zkGlUy{r^wX?;fA-`tITXU)2Zsx~6@@&E}rRzY2Dx{jbO4PxzUzTqpeg^>O)kP(1PB zL=&46ACCKX#P)wC%o7#?dz5&>BH;R;iO;{8>i_a0(RP*@|2x7Wq}x~bFo0Ne-SX>( z*=)Vn-lM(i=f+_5Pn(Yjma(!{!~1c+{bz6d%w?>P`Ca|3G0L#vqu<fJ+jjpL?)TmO z-rWP$`*&M+UwUHtEAP@iW-{QJ`NsF&aH)lFT>i#{W8J@U;E&B;DK&nji5X&-f|*@h z<%Jo3<k-+_LyrwJY&hA5X*Nu?VX_TfHjK9+$F?)1PyE7Rpk>4NY}jnWw`_R8hOgSN z$%cDvxZ8$1Y}jbSr);>zhMR4;$%ePs@Om4D=-1o&6*lzQu*8OoY?x=m3>&7|aFPv^ zZ5VGuWy78fNV|>++Lhr$n++`+Hrw!k4V!GZ+lE_gc)tyAvtg|bOKdpbh8Z?Yvtg<Y zlWpj-VUi6wpRe;N+J?%8?LpH|x7pCLVY3aJY`DdS_uKF`8;0nwwe>YNEU{sp4Kr+* zY{PgPM%eJkw<aEz4I6EEzYV+cxJ$AAm!l)kHGDb|;eWTi@ZoRqqIv4^<KG-^%qyb} zZ0l)Ys-51pDMtTZq=6?&|Nj>AzjtpXtlJlEf1&h+`1P*M4;kOeS1XgVf8i%{Hu=Wd zDwKasc0TT&ZSv!vg{$oERh#Z!o9;gfrR+&uyO+WFu_m5g8%qBppSmDYrndk3k5SnR zs0j9(_W~y}$sOF!p(X)O>C4^`=E=Z>e)LU)9jYI26>0@|E$|-H2HXg|m5FsK_-(*? z78jDh1r~8`q6EAIScMYz8-TwR99c@6felRJZvo!`{4=TvJc7#^=b{A90A6I{mB5E> z{1M=Xr~|mMfWuB!>eYb`H4^xWjlT*ka4Gd3ZZ-m+K?%(rK*um-M)f5uU=~Wuy}%Y5 zKL`vP!P%GrxB+fNN&E%Q8fkb{36$qE21927@II6Wd^7N6R37-Nz+Nmg%m<GJK8&ga ze+0N6wIC5Uz;ni!_)G@=6(wOE0X}pZ>+|9s7<)SBD}@g5X;d?KBk&vw)ED5Bfrn9& zkL|#($0;=l{0Q)>RKuSd;2%*w%$tGB84BG7z5;jyO6L*qjfqM%VlHP${=nkRZg6C6 zDdc7u9s*lY;{G6T##zQZ54hUK+kh{g1OM^c1ROnCaR|ksQi0c;%M}pt6~JGkgwAH* z(DUFq<}ToQDB;gq;Qc6ZBXGBk3+z3`gez~edr&2~&l8BM1g`{MiSmKh0#~DI!0Un0 z8I)&mfj6RrpZ5cAa1$2h8-TBhqFf0)jCu<5c3|RE6CW?|Y7b{IF~0@43ngXgRp3`B ziSrTQgIVO?N#r9iZJMDeZ~;o(3%v1s#?QFf08HSpm6#*fOwF_L`M|~5qy;yHz-v%K ze+BRnl*CiuK^qr1e7Z5;4Ezjr4}M)2D0OWvc?n(%d=@2ib^r%nXz;<n$529Z3(zye z@JHaJnMgpyufR`G(fu8&4Olx1zk{F&{Bn*`r(xa>eC;Cg6dc)Xaxq_rl{#OkU$D3* zW%U7I_$4O25y0iBTHLPy&b*ZJ4?YKY&H|+#0G|x3zl`{U3%na8<>Ve<)b|V>Id3%z zCFx2A{?InR1^5~2E!wfvg-WeNHAANs7?)4Fz+J$Y#l$s<x&}O@z=WF&^cT`*VqOD$ zzR1LXFYwbcaLn6)y~^Q{_ytZtNgO5tpF~M|p90=+B|B!g*#JDH%9tkuqpC?e=Fz~_ zC~;p8Y!XFz5crgj_~2$Yu+2|?gA1&`%7nEEShS4z({_~r@2oMr+5{Z6+~i|2@I92| zufY4RW(^nj_XFQSNm$LmVb>U%$kS7kZCqgNwT6zsH7KF87Pw`lslNh$P;1(*wZN6D zDfiH?1^TWt?W(}hbxJM5JQesGR2leY;KX`^PXcbW@uz^EHKu<N_z9{W_ie!28sG`I zz{_tSz2IfQpP-~{>;=Z&Xwu>W&bgVq#*M%Rl;qK7;QSxLQ_KY}`Y+m5aDnfmgeMm8 z@mmeP1=x-fzXG4R9Uj7m9l%R|OnSi=0H3&n^nyPHJZB^A68L1`m`&t0_-Vk~P|_a> zy!$R=eh)D6ZWESEAZjh{n}PTJjBvplf&P2&4_*U&8YS^;1Sb5Pehc${z+a%GUw#01 z&1UXVV7>yF@Sw@Ne!#=1X3X1x^S8n;>axJIo}hmRPXh)}LjQVTD@xLSQ1GYVGj1Y& zNBw=8HWs`dxCbR|!d_tCZ3gcLyZ|M1a)5WDB%Ygq=QNslP6pok0_no<ZNRL(ro4H9 zpQE0_{0m^oizW^N<6h$2E9Pmyj3%XyfD7!upR!ARPXa!Tl6IpJc+KyrgP5-X9!AO7 zCiXSTKT7IdGqCt|gO>o+A2@}A8-XvO?gwuI22sKX^#-)xgtwRrJb;q2Cvf~*^rx6l z0zUgTeVDibp8ZGD7EJ~&Lmk8%8I|hCC|xFjazCYpI17~fA%Y8(I}@i7Pl0mhLd*rq z9SJcPC})VpT%g<^5L}>~{TE!IobeZ2;EOgcXZGbBzL*OPZ#H;5aDt5s%(ZcWB{nW_ zwT%n>g^fQ5+-u_k6aQr5DKHf!VF|pz#`A!3mQ>6I*4w!H&-X9mK!m0guEK%IG&{Y4 zl|Y8?YAq0;KjSV+&s#QO9){@_p!lPT)r!3Zj0EDSwg-T2sg`Xr#Ubxnfc^{}hrW@? zyM?v3u?m?xFj#aPb8%NAmi8BNFWR*yFu&~t=J%h#e9H;Ucb~xgzzNK)<Cx1jS3Bm> zH`9l}y!T83RNF0#PcRSZlWwICj_%Yq{)9dpriSd3@4<dlr@m<q`uI+L%^SFfcF2A6 zAJN03kly=_gEL;8`ef`}s0<XXH@_$T#*Q7U&OiTrHE-TLRa#oA{C>Zx73a0J{MV}6 zZo5t0ci(+#>(;HRv9VFT{`%|c+i$;BGL02i+A*3o(Z#A8_ttW+Zr8G9`?Pjn@Y5r; zu}79ITd_hMd>Y()WM6RaVXSwdk4Ez;e$g*mCVFcha<{b})1Zpx=hNVkVr{={MO*Yi zJO_8$`epl~+r@uzG4$nICGPj`D=ywg_*$>&7Z>wLRuDI<g#Hoi<-?73FaG5#JB<Da z;qTk2^#mgH-;wb3cbM=I;J=UfL9LiqQ6&Cfy@T$MIMDIG?#TMRNA?~P8lwMp)`Dyt z94Ui7`pBpD^gDIq-hCvYSojlt;Mc4B3Iq7sMgPs32?;rS_u=AtoJ0Q?KOUcE9B6&} zi~H^w{=i=CR_yV2&^<ZWj`vvYe&61gel}-s`;mRMjiQG?!EfenYd@l`#XbCO+1MV` z)|&r9!ZOz4Uduf{V5~KL9XBB^^w!#6qt>b;LmO8$hu2Q2Ws28Dg@Zd#x@epLioLk& z>QCq#Pp!f{SgRbUa8wk^i5eP|avqOzq5fVupEPNbLJ*jmJ$tsg`s%B7+(Ko1!-fs& zm%sd_dgPHubQyo`wb#`9@4w$s&MW13z-HRe&5uV(Nx!D@n&4x_+tjLUpKdF@qC%}A zi;B1LK>4=Tg_MLVR(<m2!pA^X{p9^igTcQRU%Ti1xpU`kE6$npE8xN_re8TD7<_bL zaOJwu+b*5EP^}97Y}=!Ah2GX1g#M~v@Z-aWH!EF=uVI`$bZ9U>YG{zltwxb{Sa*CC zawG0VISrqLM;V$=G(>;n8#QW_N=ZpU-a@K6_uO-p+l{>F>}<`unKNgqi!QoIU3~Gy z>axo&Q!5LnsU=I6sLL<ETrFL?R8>_W!;bdj+H0>>cloENUtN8!dU$2Js#}t!uD>Ev z-BRIJcT{DlCs&`NeqEcbe!P0Idi=IZwe!XcRMXF{P{~gR)oITJ)%cx3HDyOoo$-87 zO@1k;7Q7f#nXj^J_C`?6d?%=?QFGr7s!Kl%ssa|}7at6&)vH&lb?ertn{K*E{pd$O zQn%lJyV|&MquR7-lluA3f3EX)^XAR!(MKQEZP`;#J*8fGcC)(fv!HskEvTM*?m4w* z&mQ&Si!ZAE`}eC?Uwu`*@x~kK?YG}npT2)Uz4Ccb{pnAC(&eJ1rA2-5=|}3%Uj<cL zTbnw3_^>W3!q%{H%q8<UgF~T--oTu5GlhrB-n)%*8tkLa4USir24||}!7J5m!JE{h z!3Wi=!Tq83RQ1FE@I3n0O2&K}ShL&Ac^UjKX``<UCaCrJ--!R;;J*?7FXR8cF8)u! ze?}g2l}hGh8<>-BW~_$)XWCes!v6vM{}KOx#(xX`KgIu{F8;@z!Wx&GvVJLJRpz3b zZ(vXE7tEiYr4qiJpn?a+tKi!+Rq(wlRq&&mRIu$q6+E=RBmU$1;eR;(Q}KTu{&VpU zt+rD9FHcax_2X6W&Y3Fs+bdOY`%Tb%PzB%L-|3&hM_vT}d*Ht({$ub@`_slCCg|#; zg6EA_!38r_@aij7@JBbP;A0P};H&#P{j1){$>`5`XPDZ6EX>VFC*DWNZ$q|sFyRsv z9ABw|GrzBbS8i0nn;ufZ2Y0LB{&%|gcj5m`{O90*G5%}te*^yS!T-bfe;WV0@K2aG zy@mhx@!!_r|1_jeTmXeqDEt5lKZn8&DEtWuZ3#j3`S_svVrEbsx-zH^-xO3|Js4Df z-5>1mKM4P)<9`bNXW_p9|5xFEE&gv$2&#L>2h}4pgX)<pgKFPRLG{*yLDjmy(|<Jn zg#(HBAB6v5_&){zr{Vv+grJ%~KB%so8C2I_39XxgYTJWB^~U~A|3^0s3(eQnFf((i zd#c-`jdI2$j~Y3A_$jPy*`Isr^z3YpXL{z8DQ?fg)00P!9yRj(^G7VStv%UT^Pdgk z@hnU^RUDjuzRR|so|~C9eG25WGTl?Ax)+`%4n~a{KFqey&dqR7cV~Js@h>qOds<4$ zd3GGd$@JXmp>Nutr2Y%X;y@^j96ro7f=Ffy{pr)Qr%Ycsr0;+v>~WB6J2+(_{-?Nw zK<4y?gZlOzVC;1qM-F!p$?OjL3lsbG?R&wwnm~$9<uL4tzeoJLr(@r*?~n`ZxaEwI zDByqL6nE~0)2C-mUpP<*3`pwV|D@R1*ugmxN3j=SmAQnnaNto677820J~Jyfd;0X; ztlXhTIlzAO7)^g_u4j5yt|xce$paJZAP8aMsmZ73kba?`se=e{ZQ<l3q98i>r|FMR z&xOUgS<`cMB6J)>{3rekJ>xSZ3e$6Q7Y>{@t#9AHB%;H9;h8-aW{k@shzqeldD=9c z%E2A>na)UOTJO}X>7HD;JZ-2><&bH1@tAUs+dU!N8JRj|VeZ1*?5tc9OPUb#WoJ&B znmI1Q5p$t!A-u}=&|KN}3(p=G-?N9qk>DkU62;u<Qz%HC_U^OJNS)X-Ji1RdmYGv2 z%abN$W_H>yTsYM|BfWQ2<e7w!xlpKQdpg5km?`CMMtWRWREF4RPNBH60&9PgzY8e= z*%`eOde2yxnL8z&vKF!x{)Rqpe8SneZupqp#aj69(JnAQYhq?PRi&#n_LLv!dnkz6 zS!bNn&AJPHGCJFR&e6W9A4h+=M-iue4K}W7ch*j6W^mDnsx|7L8PBbPMt2oF-dL_y zP3bnC``hD`(0FdvtXX<Iva;y>j<LsmHRq|drCI8>s!a9Bby;fjk5{U*b~C1+o%6mP zR2MOJkTF9I@YY*zRd?KRhr095JJnrx-KFli=N>&)c=+Lm^?2}!C!SC;R``A6ed-Rz z8=Dy`?AWnGz4X#cdTj9CyYHxX-+foT_uhM|wY60pJa|xj{@JH`Z1ClmU#f4u`9|Gz zIH(?Etg!#DUE0}k!|3SFqNAf;ZYZFmTTVxJBOTqnDk1oY8XtUG%?$2VR|a2EHwE8P z4+h_-|L*K()HpiCyd?b7*={%u|7YSq1OL<Ue<}X2#QzWQe<%JQ!v7xpzthpr{AZj3 z`~EXd`CmCs8K}%RaNs~H5nCby<ac1wsBz=QjS<W>aA4BFVPl5I_wIc%<KY3rlShpj zJ!)J+yz|sJ^aF>D7(E)d3GqY6rHo5agNKhv9ycUrTmlYy$HkpIc-W}qalK-aPqqzQ zuH-S}qN8Jz@pEdQ-f;ty`i~jcD>^!+XJlmTsZQs>h#mvS4CxgeBL?H*hbBfjqK4yk z+_)h<BZdA+Nnw4_^%@b=vu6tPfMKHsg%8FSFGJ81e#+o}qerBS8<#Sg_!It^l$4Pv zDZ^59`Udm4_=XMBTD8i_+IJKV87ph$03PoX^N<^Hk3Th3uL&xK=V}qkp<Mj_Q(qXq z8L{hY2aMV^j08%^|1Z81rT%Y)E%EJzN*p?L=rEwDk*HI8_Uy?F2=Q5(Qw*Wm!a>EM zLL3MEmwdt>q08*%JAK02c$5@>*H3@?(|PnePk#F8ryqRs$tNGu_q_Y@#~;5BeCLZV zzW9`R>3eUz_11&CcI^sq?k8pP<jItL$$foQMCdTP()sp}@K5quI_5s7oN~$tpzunT zm%q!Gbm^s+PC1hg$Aef$ypefyaND+R!4E(DFi3wIWZgv;sDkgj^G=ZYcTns}Ly$T6 z+m>aukk&uF{PN2`*}i@Is;N__PK1u+(_mD8R02xk^8dAW?!i%3X&w%%yINDbtF=|T zLe)+s8(l;#G=fNgkc-4IDkGV71|!Ri1dMV^2m}JGkOYXLf&sjYh$2Z^32M@fs1QZ2 zUM34O5=AiT4vCksBCd*ZnFtK;{+^RQv1vjG!KvCmJXI&Bzs~oa^WM*UIo%Cke-iGz zhQ|EwU(|o9et~ioeOp>u4(qix&Q8PML-gJu9lVM;`uFeO6Ziy|gMZt$ZKfQl2|lZ> zt#xoKhh(39_L<e!*PC(*?mnK8kHGt%>X{YKKmYvfb?es6mfLsjym|A6<>%+8mz9-e z%C82tOVlsB;DQVKDz`DBPoF-}u6ufVdN1bH>(hdN|Ni|ZUuMaL7hc#;G%VY@cdsd@ z+eYUdJ9gOn@4xTh6n`A-@TRG$$v*t>Lw5~MzRTxvuO)K_zWL^x7RlWa&DnhT@L~J* z+i&;p+O_NDPd@piQEPq2Gs5#B>03X+P{y;T3%~p+(^_pE_<e`2zQYsWfgf5i`qy03 zJsym}<GWxs-^FwD-M{C@uE77=Yp*R7ALAtxrQou4>sE&Y@}Yc_!2{))oV)-dIw2i) z<IbHsjgD@>4@UF@-Qk(-+qXL$;Gy*2;Dd7Yq;T7su8-*bqx@ew+($TcR)2dOI&{di z4LSKwOib*F9Mj=}@@BSv{dyD6O+K^JVR!&flvi^d*z4-*3|(nxXmGE=0ULn4p(pSN z*w%h!mh*vG%3Eev{kPc_Z<t-S#q9FUX8*Cx?Abq=9Xxbsr*sAV?@V*>Z``<X$we1k z)K5Nqne3&B9}Wj>4m^Oa-~%s!o!9Vx&z?Q5^BS5%Z{S51zy-LWKQdaW-vdqGt)HoW z$83;N@cmUY=vyZoUN^f`IP~0L7XOM_nSPM?=+UE3bOw0c<KUOAH{XHZch3tyc12In z1LWnFJ!aQ6=&bxM(H|VdgR8fj^$`wv`r-9MN0sk>+bm(D6RUllg<pA6WO-rVzJ22* z3#H&8J~(_l9*ocuIfDM};Q`&DIXpsckbms1Vm1fRFu{MLer*{XuF>b8lLhNQ;h=o7 z<?B7*AE)1LzUb9fwdgGT+B3}Cf8<~Lk<mRr7=ho*Ir0xbkSk=v<ARPOXUILe;Yaj> zcJoJOHwlLuB>z7Xy}@B{z1e_2wBnE;T~MCfdTnyIV@EsFdH}y{@e<|u`-u-_=oz-9 zJv@;2NS=7PVub$upM8O6u``iv_@~lQaQLIy^}^v=;jmgi$=iwcjqX`?5@dRO_uY4< zedf3h4!(Ol+T)4GB@&M#M~>LrFE6*JXWd}SCa2hMC#5<NM|@`Xt53{+(PZ{>aCkqg z&ytxWug};d=`%K|r}O}Q?k#yjr;n-5)(#F1e(`&uau@Nk>!sj=J!JGaFe3NJbu=Dc zzL0ffsqyzOTGjL<3rrhqPZlIQ96;dLpPStp#v!xOY^Y$$5Dsnn3=Uy^z8sq*oBHpv zN&hAsVryG%tEQO~g@c1XK0dz9{%enCpFe>g=r8+$=Lh=0wbv8im)*0?kKS(2%}lbD z(+69na9A!JXxXx#BZWhjZ2T`jHUkH*&*0$o8Jl$F??m_4%q{^3(Le5GvthDRYHzg% zzwEt}f9YBo{t!Cn@$k671NMVRo_Jgsfgd~h(wsC4-ZjXc6%J1ehl;7K@E3e-HtGxE z&}?>#a6q52NjJzQd3^>4ug~D%^%<MgYrTX2&(ZLsXY4QXv)Vd1_#Tai*Of>-cnuD; zb;Sd0&8&eI{I5aw%nadBIM@#E+iUMhox$NY;h-9u-4dbC*d+8hO}c<hN|a4vfA%)% z;!S23w!$B6mwEud{QHId`t^&Kk5URA*hNNU20ZY27{P`2*lWFC#{bxF@fm2Zlnk)7 zMFZ@4;ZP$SRtbk!ADd(czLxNGqJ1trbH$TH_GjVXeT<7Ywmuw9f9!>q|A7Ms_JkL{ zqp!dXY{2eu!S-YCiEFT%>{aY-w2gBU?PcNcQqe$LJ!_Cv3x{ZZend8DNp6}goN$fZ zJ8p>0yK{&Y-=1y@rj4*Y@6{VPMEV#`pJkJ1?ZGd5@8n<blmR<%F#;2CqYu#8^8_0o z=^x_{fg|$&+P%rPK{(V3hZkoH2k8R(3=ZfsHt8|hRd84g4&$$}`D2B{9qCqd`?U@S z;KavZfA&5`KkaX7C&)iGgFOsCg1&0iDtqp^=M2Bpch3*Tj`T!%S*w3C+qTT>Z?Db~ z4$_4eghME-&)B4jY00*HO0q4@PqBydQf<-1G+Q7X?hy`ig~M#&K#TS<ek0vbJHh?{ zzhuG5zvQ|UT<*E&9;>ddcKCQ47*B==I3Zg++x+$mwzagsZ59seg@e;);ebu@`dluX z^u&}Ddo(}Q9uf{^!r^}5aJS@WPENW56q}S5;bXiYy;nQI{sX_4|3QNW^#lgs)Txj* z9N<eN9v$KGm%kjfkG3qcHx>@DP4g0MeORBdNnW3yl1&1K$HO>0C^;&dkY@LdYsCRT zosS`Zd#L(y*{fq_+oNQk%O3Dt`|lE+W4IwNFK-*LqL1i6QBje@AyU4gaf#*$*VqJf z;Z*It*Mn_72KxL!Sf8Vw>(KuJet4gekpUm<nP;AHz85}?@&i^_SlEU`M|gC^6X3xn zy@*XZl|BmxT069E@2wutNoQ@g$adPr7hi0*-FBPlY=+_26%-WM)TvW#;lhPZua)v~ zb_ZXmJzStYdjqx#UkHDccz}jJ?^rp_jviLL5h!%~bEJ>)SXiGIOBZPE0qfsWuHroT z`oGk*gTHt0-dF2rPo92S@C*6Tjt`ZUl{ReHFw@y0d*FcwOf@BY_~C~gJjD0V8ywgR z*e8%HIx!45U{8@l_Cjd%`hs+OrwaHF*|w(&zxOeu3-}o5Gd5}2jGqJkDdR8Y{_ayL zbPn<(Szn^Kzn{**R7;0V|1<q2i%BPp>l<&p(fK_(59917umdYR0terD4|*ee#5wqO z@B)26r=32F5A4rgpRq}i`uzKKEARs!_q6V4957(O!#dOB@*u1o)@9QfMbnuiQ%=h+ zx#SX)Z|z_K4*ZSC4i3J1JQ$gej!gNvv8{htKH6TrJ5~F0tB=9{{M_szw(g-j?ZfSB z?a;x4JH_u#=O1*I(n?E9_v?(3&7C{<;D`|;Y~sX;j_!&poh*R^dO_X?*uWzi7jh8n z!^k~861qY@Vw2)Y-or;?p8%J3nn>{79{6>-`5)$z?v^V@*?8AocO3&xof~x>o%T3f zdg-O6vulnI>>0>BzK_QPc!3|82Tx)HM)Uz4K_5I%7?DAI6TkNmdlNrPUQR3jiEbhP zlBEir7kd{yRYlxp&6?%-01nty<(4fgE6cqP55U29aPV<2F(p1Wau1x0$Pl=B`Qm+i zDtvC)k_VN0x?1N97ZF2r1%Bk7jx3RH1}9)6=d81LPJf3C8DgoasixB@jt~5wXFLv! zUamY(yq=&R!0t!nh?oJn!ViRZKEEOODYyN-dS{xmpTc{gN&xYqVJZ5&VZ#Q81N0$p zDE)9aWM^mF(4j+}Y^f&i;D^@G99)1KJ;46(J~0CFgRi|wdVmg~6U>VZ7VO6~Pign= z-BZQ)O68rJ1vE7u>N=fi&VTSSi2vj13OtRCjSe31*5q4os@Um+_-&FmCj;zJ(3$(` zT!RC46q;iH*Q{A%!C=tsfuT^yJx7i}bZVAu`?dDSS%Q75-kUF=se5#`e$xYZ@qe)Y z(#JBc(Kkk5f$!J_$*J=tR3~w^Rl4r>H*5{^jm!Z*dVpTS6XXOQkk8`(zVVuQ;Su~` zKNr4F3vTQtb;8az2fvq7#z+|k2j77WypSPuVgCI2CYxpK(~%9>eCHYL47P}d{16k- zfgQY7tXM&gv0NXMKV8_t(ZTQif6==X9K2ohxOf~Gp*OZkd$)@dkSX@sNcsXR*T4^N zybnQL2f0U<pgB4uo3T`IbOrV{_#@>X8xhHm_ILtb<k&}!9BDe2>*U+hJaXhdWC>a4 zef9ug_w^CQNXyRvb_YLe`1n_QPZ@H_9t0lfAaufhVC&eAB6$LB*REY_%a$#3@(jIw z2XABt-NA3-8ku1}_5#_=Dt(ngSLqJyv>w0{xi130@8IBjM?7H;or!kyAY(iOZorFd zZIbN-7Z1Dq)0JJ6_g1LC|LgKU(k~bPYX2?;7wFIE@nH0LFhW!83%WpuH@pWQu$|~B zJi``IOP~e?F3??Tbj8km5ApG@9Q<DX<ty}rPV|m=f`5<y176hS=*Sl1n{U49>@GAw zUy*m<M-L>APigrxm6=EkdO%N~|CF6*%m07_dnf)2_76E=KVt8M#>AY^9zNg;cs_8A z&BxYa#}rezx`uH6p<xFPC;ySM5iRHb+UsrPh^%wXJ@5jVhtA-LtV@oL$wz1wuh>^P zV{tlr6Y!m?{gZCs48mWtvj%4$IHz&O%}TVnY??m(Xv{fwEM@xiCVf6<U+bt>?)rxt z&kE1iEj;SeFIrFS3`akopFaZb5~kPn=J42bCS!3JPHP>zg~zkP<2m8+yzm$s9#g_& zad@=Wu^VGA!4s#Rs@}zOT4(D~{98Kmd{SRgoSvhuEVvy~k8E61QHjn6YPxnCYjsyF zn4mrXhMb(7U&!B2SI$38?<c6^bN{n?o18NDA6v`{RqM&v-CAQK7fel|k7_$3l^fpu ztplke7{K*k^=q{K728dgPPbss<Z~XBz5QH!!T-uGeIWn5QhBoR@`2NJ|CQ?T@;AP| zQ#B6iWaL6qmA|FtO}+Tt{boCqmDA^uXkY36leM49h6l7}3-%KmNxTFMbYH^=`~1gb zSNF^2?v&kpg^8(|Qg5WrMSX>wIQ5}j2h8e)dnanWvDj(l6|$9k8V(HjPQ;$<kJxqg zB5XCjEPe|A<2CI+RjZ=rNv(|<59b%CQBmKa?oW-4b~5Dkek?J7V$eLr3**^;+1H43 zSr<PVnuPIpzEkUrt$(SPjZp5EdLOm27xzW}x82tiu_EQctxr6Nk3a_oI`*9oP1wJ_ z{L=?kHE+19OOOwzjs8;guDz{w)0+hYbzgsefSRwDN9w)Q8Iv^bI$kDY*;5q1X3NhV z4jdj9_>T_^EbKq)AIo()ZEDWs<o)`<@EgHE-IMyclSiFbpw>o>2YIAMMXi%M;jPjU zwYo4*V%f))cgfS|@5Zw?(>)CM9MA;cI#@Ks)%vK<kVmAkHl5ll^<!#&k@Cn{7HTxq z`lvA>kC$z5j~?>t;|KZrBKs<Sn)ly4EIdJ8nOtB%`AllQ`YfHzk&Z>efIJS5l1H82 zcRImY8sssmP8ZAis;6WtuP{6k4%SA_8EMqpsF6@Fq)xT=(fm`^4EG*3o6@M7r(obb z4?023mge!tub4S=W?pe|@pybtx`%_j3ACV*U#HeYje}YlbtdW?)XAtZQQM$KMty14 znAZN=x#J8Ns5K?2?&@pb%on*nHh_+ALyv?5Sm6Jb`6))dYmHzar%7&{8VmABos0Sk zwMJLt3a>x=_Mfqo(+Mw+gGC3mHvZ$AsSc5?&s`4377*{_pMy8BKm*2i0^?lWka`uh zTWW^Xo>zqHh3EwJ5o(1cs!@3wuvnyxrgp2=CjaP`KHHlo`#&CC#O@Fm5Zgf$Y!CD} z(EPq_dnnV@3CK?)kJRwVO?#c7&P1(`I$3cT1{TX~Y^~#u)Y|{^XWnVEL9zIk$RK)% z&(8=Pbm&0`4(J3QI%+$&hFr~*T3xANpk5l)POyKhzhJ?F>?famayYdi{Bhub=ET*+ z)Z|Tp16p8@_#ch>E$0BdPP8YF+!vO$_J&w|Y~?HRlyAv}4vQ8oa&`~9%l?P`M;4$5 zvdG%NLPICM`uJV@^5fmoi70uLP9Tr8_7?W?PdrYZgPbljCccs`x_ArS1SZd)NZ!DU zH<o1DzE9t^uRq!Sy*x@Ms9z(Gj~Cs_^6l>bvaR^rv8=CJ#78M9cy-fVZCc-{VE5mD zzl-mH3A>0M00%UHKhS1>^9S};#RPkKUYa`#;N`J$=Fe=+Bl-5lr*cldm$l=_ker;{ zSNYzV;GsI^q|u{C|4yIZwh0p^xID1VNV?pGkA0vy^nm`vcgO<t0RHI4oVxEF$yVfE zd))e5s4n&^>Ai~s+3TsxP+O9pWjgom@b>W+aL}O#a76P4_}I6w9mJ)Y{%UpnyN+5c zF{u2Hz0?w^OXzc^M*bBTCQqL1=#D*LA7pMX1FVHz@Vcsf;;8B}cS@EE<Rcx^qMUg; zRZJBNtwj69W5<qtlNt{>dVS#E;05(H?aOW*`~m3B`gD9Ud^A65uR1Jx-le%#s9y1< z)+Q7<Rde!(4l)<M9rPxbiwu%Wr>=p10H@Dg!2|39&jAaxz;0tRfpgQQO^dYfKb~rF z{om=o>bKeY9Nlo%W={AF4Emk`)AtJ4O*h@-U;_p^_n-sZhuq*NVJ|qBs9bo3<|S6> zOmp@Z`=fL8<C(8fIeGScY!0;reP@BIF{2m2M}rPN$BVyAO$EPQI6R?6JG(aUx%Is~ zLsvR*&{>cDNZ*m*Yy)Ri!5x`{U(g!7vB}E4tUS%N-TJH}TjT71dvJIkSf4pCecsXK zit!7{IU$c{u(n$teEt55EP*d`(y@8y9J&pj$Qb8$ID4Vb1vvZAMZ8&?77PB-bdRJz zJfLHHv3>Aa-wA>IeW`rdeVT*(Nmm=6H95bF9sN3$eLE>!M`QJw-EM84*NrU^E!4ZI z$8@4|pMNG+kI+-}p6|taO}CPEzJsOYgwO59#hk3^tQK>(+a!-eR;*a@qG~jUWTU>) zxsV4}u3UMX-BAtTYt1p~*=L`1@9Unk1A)Nx%4ct^si|@N^m`b5mQCLsxft~{oqs0J zdPH+HN&XI?hs4InYfv%B&GPFw*O;Q%>T$(^YU}(Q+>1=(!+C$k>lC)^HpNZEOym&W zky~?>&ULDJ|2T&F0c)Xa*d;ph%Cod=g(t+o>snnh_g?tVL5*v%P3RG@;yWPQ^{+l< zOD47YhGW9<8}TFYE3pExN`hjg#{?I(R$_8sNB&v&nWF2RkF#Gf*yDnkIf%uHy~srn z_Yrp{D()mljZX#K*ex0{3^CLTk4!e^h-_cT#FEn?k3h@@{ooUO5PlGC{XN$@e~b8o z*n`*yUzd2J`ic9DIf%nE1PA=nch9&wH#Q79G`(J7YfA>ZcoN?ozn!>&*lBjob#9HD z8_iZJuGQzft+=?@oe`m~1}~8pPk(o_{?u<-?fm}PihaTjWM6`Q+83PNq9(`r0iN|T z_k2;Z?QPoW=t&H9_c*uy0WUwu9=0C4126U2bw^WdHN4!jwbItiz0RHIUOwYS+xAKo z%fIBG*JoFpuZI0XZm`?<T<`;X8VL)p*~|U2jNI?}5AD3))u?@v_1TYz6PO#jti8WD z@_Fnua5E3I(Epu2v1gM{;#uNs_HNZ;j(e7Qiq5G{J6}EKtZ<Lfc(U&A;ePsg+}llw z%(ysxZ@-Ks-?-K-M!!(vuK2#SztR7A?_Bq7YiH?ytyg^4+S&R>Bd+4yH?93`_)cBv zp1%v<8Rx!b?VOmT?1F-v+|d(f#?8o`m{%}3e%h40fdym6=HzA<^v)eOX3C_3Nq0=` zJ!Vqw!0dwD%L}iJkIT)@8+S)ef&Z<`!SPpKo)Dk>4?l^EOPV@mT7mmj%udeMxAi$a zS&%bkntot&rkf{DEon;5<Y`(Z=l0Ag;|le=ig)G|wEZ*k-`CF2qx|UT(44}YiE$J8 zJ2*bO;QGA6NfUCW#K%n=mo{dMem-_^{2kd73v%L<ldfo+`}l|3=8w4Jgw>O-I1bjN zE839O<4IR|dM0P4Wu~R4Uw_keSy|y1KVJWE^w3xQ#E{gktE{hVsBEfisf-DX4de&z ztzKHas=BtiuDYSRxw@tL+?wt+y=oF`GHSAF#@5WJxwmF%&8nK(n!1{X8ot=EUUS5i zCzO|0t*TmGRa>>Ws;;WOs-dc>s<|qzIzdmS>e);^Jxb5#Ylafdu~@T|Yo67bX|v|4 z*KAES%{BXKT54i~=LXLYb`Qn{dj%7MiNVxhMlds&6&w{D8_W;R2$lqw2Fru1f~$kI z!Og+CV12M5*c5CI?hCdAV?yVK&JT4D#f5r>5<-cg)KEq!Gn5q?6&f4L56uXbgzgP3 z4lNCphgOAFhiXHcLv^A0P(!FG)Ep8bsrv444O!(g%9obcmN%4luSlp!tVpfMsK~6y zsu)!{*3qiDa-XBuxq<To-2-ufUV+3wY9J$!8OREZa#UO#SQ^koKeit!@c)zo{|Cco B=L-M; literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distlib/wheel.py b/env/lib/python3.6/site-packages/pip/_vendor/distlib/wheel.py new file mode 100644 index 0000000..7737223 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distlib/wheel.py @@ -0,0 +1,984 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import base64 +import codecs +import datetime +import distutils.util +from email import message_from_file +import hashlib +import imp +import json +import logging +import os +import posixpath +import re +import shutil +import sys +import tempfile +import zipfile + +from . import __version__, DistlibException +from .compat import sysconfig, ZipFile, fsdecode, text_type, filter +from .database import InstalledDistribution +from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME +from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, + cached_property, get_cache_base, read_exports, tempdir) +from .version import NormalizedVersion, UnsupportedVersionError + +logger = logging.getLogger(__name__) + +cache = None # created when needed + +if hasattr(sys, 'pypy_version_info'): # pragma: no cover + IMP_PREFIX = 'pp' +elif sys.platform.startswith('java'): # pragma: no cover + IMP_PREFIX = 'jy' +elif sys.platform == 'cli': # pragma: no cover + IMP_PREFIX = 'ip' +else: + IMP_PREFIX = 'cp' + +VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') +if not VER_SUFFIX: # pragma: no cover + VER_SUFFIX = '%s%s' % sys.version_info[:2] +PYVER = 'py' + VER_SUFFIX +IMPVER = IMP_PREFIX + VER_SUFFIX + +ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_') + +ABI = sysconfig.get_config_var('SOABI') +if ABI and ABI.startswith('cpython-'): + ABI = ABI.replace('cpython-', 'cp') +else: + def _derive_abi(): + parts = ['cp', VER_SUFFIX] + if sysconfig.get_config_var('Py_DEBUG'): + parts.append('d') + if sysconfig.get_config_var('WITH_PYMALLOC'): + parts.append('m') + if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4: + parts.append('u') + return ''.join(parts) + ABI = _derive_abi() + del _derive_abi + +FILENAME_RE = re.compile(r''' +(?P<nm>[^-]+) +-(?P<vn>\d+[^-]*) +(-(?P<bn>\d+[^-]*))? +-(?P<py>\w+\d+(\.\w+\d+)*) +-(?P<bi>\w+) +-(?P<ar>\w+(\.\w+)*) +\.whl$ +''', re.IGNORECASE | re.VERBOSE) + +NAME_VERSION_RE = re.compile(r''' +(?P<nm>[^-]+) +-(?P<vn>\d+[^-]*) +(-(?P<bn>\d+[^-]*))?$ +''', re.IGNORECASE | re.VERBOSE) + +SHEBANG_RE = re.compile(br'\s*#![^\r\n]*') +SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$') +SHEBANG_PYTHON = b'#!python' +SHEBANG_PYTHONW = b'#!pythonw' + +if os.sep == '/': + to_posix = lambda o: o +else: + to_posix = lambda o: o.replace(os.sep, '/') + + +class Mounter(object): + def __init__(self): + self.impure_wheels = {} + self.libs = {} + + def add(self, pathname, extensions): + self.impure_wheels[pathname] = extensions + self.libs.update(extensions) + + def remove(self, pathname): + extensions = self.impure_wheels.pop(pathname) + for k, v in extensions: + if k in self.libs: + del self.libs[k] + + def find_module(self, fullname, path=None): + if fullname in self.libs: + result = self + else: + result = None + return result + + def load_module(self, fullname): + if fullname in sys.modules: + result = sys.modules[fullname] + else: + if fullname not in self.libs: + raise ImportError('unable to find extension for %s' % fullname) + result = imp.load_dynamic(fullname, self.libs[fullname]) + result.__loader__ = self + parts = fullname.rsplit('.', 1) + if len(parts) > 1: + result.__package__ = parts[0] + return result + +_hook = Mounter() + + +class Wheel(object): + """ + Class to build and install from Wheel files (PEP 427). + """ + + wheel_version = (1, 1) + hash_kind = 'sha256' + + def __init__(self, filename=None, sign=False, verify=False): + """ + Initialise an instance using a (valid) filename. + """ + self.sign = sign + self.should_verify = verify + self.buildver = '' + self.pyver = [PYVER] + self.abi = ['none'] + self.arch = ['any'] + self.dirname = os.getcwd() + if filename is None: + self.name = 'dummy' + self.version = '0.1' + self._filename = self.filename + else: + m = NAME_VERSION_RE.match(filename) + if m: + info = m.groupdict('') + self.name = info['nm'] + # Reinstate the local version separator + self.version = info['vn'].replace('_', '-') + self.buildver = info['bn'] + self._filename = self.filename + else: + dirname, filename = os.path.split(filename) + m = FILENAME_RE.match(filename) + if not m: + raise DistlibException('Invalid name or ' + 'filename: %r' % filename) + if dirname: + self.dirname = os.path.abspath(dirname) + self._filename = filename + info = m.groupdict('') + self.name = info['nm'] + self.version = info['vn'] + self.buildver = info['bn'] + self.pyver = info['py'].split('.') + self.abi = info['bi'].split('.') + self.arch = info['ar'].split('.') + + @property + def filename(self): + """ + Build and return a filename from the various components. + """ + if self.buildver: + buildver = '-' + self.buildver + else: + buildver = '' + pyver = '.'.join(self.pyver) + abi = '.'.join(self.abi) + arch = '.'.join(self.arch) + # replace - with _ as a local version separator + version = self.version.replace('-', '_') + return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, + pyver, abi, arch) + + @property + def exists(self): + path = os.path.join(self.dirname, self.filename) + return os.path.isfile(path) + + @property + def tags(self): + for pyver in self.pyver: + for abi in self.abi: + for arch in self.arch: + yield pyver, abi, arch + + @cached_property + def metadata(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + wrapper = codecs.getreader('utf-8') + with ZipFile(pathname, 'r') as zf: + wheel_metadata = self.get_wheel_metadata(zf) + wv = wheel_metadata['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + if file_version < (1, 1): + fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, 'METADATA'] + else: + fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] + result = None + for fn in fns: + try: + metadata_filename = posixpath.join(info_dir, fn) + with zf.open(metadata_filename) as bf: + wf = wrapper(bf) + result = Metadata(fileobj=wf) + if result: + break + except KeyError: + pass + if not result: + raise ValueError('Invalid wheel, because metadata is ' + 'missing: looked in %s' % ', '.join(fns)) + return result + + def get_wheel_metadata(self, zf): + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + metadata_filename = posixpath.join(info_dir, 'WHEEL') + with zf.open(metadata_filename) as bf: + wf = codecs.getreader('utf-8')(bf) + message = message_from_file(wf) + return dict(message) + + @cached_property + def info(self): + pathname = os.path.join(self.dirname, self.filename) + with ZipFile(pathname, 'r') as zf: + result = self.get_wheel_metadata(zf) + return result + + def process_shebang(self, data): + m = SHEBANG_RE.match(data) + if m: + end = m.end() + shebang, data_after_shebang = data[:end], data[end:] + # Preserve any arguments after the interpreter + if b'pythonw' in shebang.lower(): + shebang_python = SHEBANG_PYTHONW + else: + shebang_python = SHEBANG_PYTHON + m = SHEBANG_DETAIL_RE.match(shebang) + if m: + args = b' ' + m.groups()[-1] + else: + args = b'' + shebang = shebang_python + args + data = shebang + data_after_shebang + else: + cr = data.find(b'\r') + lf = data.find(b'\n') + if cr < 0 or cr > lf: + term = b'\n' + else: + if data[cr:cr + 2] == b'\r\n': + term = b'\r\n' + else: + term = b'\r' + data = SHEBANG_PYTHON + term + data + return data + + def get_hash(self, data, hash_kind=None): + if hash_kind is None: + hash_kind = self.hash_kind + try: + hasher = getattr(hashlib, hash_kind) + except AttributeError: + raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) + result = hasher(data).digest() + result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') + return hash_kind, result + + def write_record(self, records, record_path, base): + records = list(records) # make a copy for sorting + p = to_posix(os.path.relpath(record_path, base)) + records.append((p, '', '')) + records.sort() + with CSVWriter(record_path) as writer: + for row in records: + writer.writerow(row) + + def write_records(self, info, libdir, archive_paths): + records = [] + distinfo, info_dir = info + hasher = getattr(hashlib, self.hash_kind) + for ap, p in archive_paths: + with open(p, 'rb') as f: + data = f.read() + digest = '%s=%s' % self.get_hash(data) + size = os.path.getsize(p) + records.append((ap, digest, size)) + + p = os.path.join(distinfo, 'RECORD') + self.write_record(records, p, libdir) + ap = to_posix(os.path.join(info_dir, 'RECORD')) + archive_paths.append((ap, p)) + + def build_zip(self, pathname, archive_paths): + with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf: + for ap, p in archive_paths: + logger.debug('Wrote %s to %s in wheel', p, ap) + zf.write(p, ap) + + def build(self, paths, tags=None, wheel_version=None): + """ + Build a wheel from files in specified paths, and use any specified tags + when determining the name of the wheel. + """ + if tags is None: + tags = {} + + libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] + if libkey == 'platlib': + is_pure = 'false' + default_pyver = [IMPVER] + default_abi = [ABI] + default_arch = [ARCH] + else: + is_pure = 'true' + default_pyver = [PYVER] + default_abi = ['none'] + default_arch = ['any'] + + self.pyver = tags.get('pyver', default_pyver) + self.abi = tags.get('abi', default_abi) + self.arch = tags.get('arch', default_arch) + + libdir = paths[libkey] + + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + archive_paths = [] + + # First, stuff which is not in site-packages + for key in ('data', 'headers', 'scripts'): + if key not in paths: + continue + path = paths[key] + if os.path.isdir(path): + for root, dirs, files in os.walk(path): + for fn in files: + p = fsdecode(os.path.join(root, fn)) + rp = os.path.relpath(p, path) + ap = to_posix(os.path.join(data_dir, key, rp)) + archive_paths.append((ap, p)) + if key == 'scripts' and not p.endswith('.exe'): + with open(p, 'rb') as f: + data = f.read() + data = self.process_shebang(data) + with open(p, 'wb') as f: + f.write(data) + + # Now, stuff which is in site-packages, other than the + # distinfo stuff. + path = libdir + distinfo = None + for root, dirs, files in os.walk(path): + if root == path: + # At the top level only, save distinfo for later + # and skip it for now + for i, dn in enumerate(dirs): + dn = fsdecode(dn) + if dn.endswith('.dist-info'): + distinfo = os.path.join(root, dn) + del dirs[i] + break + assert distinfo, '.dist-info directory expected, not found' + + for fn in files: + # comment out next suite to leave .pyc files in + if fsdecode(fn).endswith(('.pyc', '.pyo')): + continue + p = os.path.join(root, fn) + rp = to_posix(os.path.relpath(p, path)) + archive_paths.append((rp, p)) + + # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. + files = os.listdir(distinfo) + for fn in files: + if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): + p = fsdecode(os.path.join(distinfo, fn)) + ap = to_posix(os.path.join(info_dir, fn)) + archive_paths.append((ap, p)) + + wheel_metadata = [ + 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), + 'Generator: distlib %s' % __version__, + 'Root-Is-Purelib: %s' % is_pure, + ] + for pyver, abi, arch in self.tags: + wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) + p = os.path.join(distinfo, 'WHEEL') + with open(p, 'w') as f: + f.write('\n'.join(wheel_metadata)) + ap = to_posix(os.path.join(info_dir, 'WHEEL')) + archive_paths.append((ap, p)) + + # Now, at last, RECORD. + # Paths in here are archive paths - nothing else makes sense. + self.write_records((distinfo, info_dir), libdir, archive_paths) + # Now, ready to build the zip file + pathname = os.path.join(self.dirname, self.filename) + self.build_zip(pathname, archive_paths) + return pathname + + def install(self, paths, maker, **kwargs): + """ + Install a wheel to the specified paths. If kwarg ``warner`` is + specified, it should be a callable, which will be called with two + tuples indicating the wheel version of this software and the wheel + version in the file, if there is a discrepancy in the versions. + This can be used to issue any warnings to raise any exceptions. + If kwarg ``lib_only`` is True, only the purelib/platlib files are + installed, and the headers, scripts, data and dist-info metadata are + not written. + + The return value is a :class:`InstalledDistribution` instance unless + ``options.lib_only`` is True, in which case the return value is ``None``. + """ + + dry_run = maker.dry_run + warner = kwargs.get('warner') + lib_only = kwargs.get('lib_only', False) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + if (file_version != self.wheel_version) and warner: + warner(self.wheel_version, file_version) + + if message['Root-Is-Purelib'] == 'true': + libdir = paths['purelib'] + else: + libdir = paths['platlib'] + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + data_pfx = posixpath.join(data_dir, '') + info_pfx = posixpath.join(info_dir, '') + script_pfx = posixpath.join(data_dir, 'scripts', '') + + # make a new instance rather than a copy of maker's, + # as we mutate it + fileop = FileOperator(dry_run=dry_run) + fileop.record = True # so we can rollback if needed + + bc = not sys.dont_write_bytecode # Double negatives. Lovely! + + outfiles = [] # for RECORD writing + + # for script copying/shebang processing + workdir = tempfile.mkdtemp() + # set target dir later + # we default add_launchers to False, as the + # Python Launcher should be used instead + maker.source_dir = workdir + maker.target_dir = None + try: + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + # The signature file won't be in RECORD, + # and we don't currently don't do anything with it + if u_arcname.endswith('/RECORD.jws'): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + if lib_only and u_arcname.startswith((info_pfx, data_pfx)): + logger.debug('lib_only: skipping %s', u_arcname) + continue + is_script = (u_arcname.startswith(script_pfx) + and not u_arcname.endswith('.exe')) + + if u_arcname.startswith(data_pfx): + _, where, rp = u_arcname.split('/', 2) + outfile = os.path.join(paths[where], convert_path(rp)) + else: + # meant for site-packages. + if u_arcname in (wheel_metadata_name, record_name): + continue + outfile = os.path.join(libdir, convert_path(u_arcname)) + if not is_script: + with zf.open(arcname) as bf: + fileop.copy_stream(bf, outfile) + outfiles.append(outfile) + # Double check the digest of the written file + if not dry_run and row[1]: + with open(outfile, 'rb') as bf: + data = bf.read() + _, newdigest = self.get_hash(data, kind) + if newdigest != digest: + raise DistlibException('digest mismatch ' + 'on write for ' + '%s' % outfile) + if bc and outfile.endswith('.py'): + try: + pyc = fileop.byte_compile(outfile) + outfiles.append(pyc) + except Exception: + # Don't give up if byte-compilation fails, + # but log it and perhaps warn the user + logger.warning('Byte-compilation failed', + exc_info=True) + else: + fn = os.path.basename(convert_path(arcname)) + workname = os.path.join(workdir, fn) + with zf.open(arcname) as bf: + fileop.copy_stream(bf, workname) + + dn, fn = os.path.split(outfile) + maker.target_dir = dn + filenames = maker.make(fn) + fileop.set_executable_mode(filenames) + outfiles.extend(filenames) + + if lib_only: + logger.debug('lib_only: returning None') + dist = None + else: + # Generate scripts + + # Try to get pydist.json so we can see if there are + # any commands to generate. If this fails (e.g. because + # of a legacy wheel), log a warning but don't give up. + commands = None + file_version = self.info['Wheel-Version'] + if file_version == '1.0': + # Use legacy info + ep = posixpath.join(info_dir, 'entry_points.txt') + try: + with zf.open(ep) as bwf: + epdata = read_exports(bwf) + commands = {} + for key in ('console', 'gui'): + k = '%s_scripts' % key + if k in epdata: + commands['wrap_%s' % key] = d = {} + for v in epdata[k].values(): + s = '%s:%s' % (v.prefix, v.suffix) + if v.flags: + s += ' %s' % v.flags + d[v.name] = s + except Exception: + logger.warning('Unable to read legacy script ' + 'metadata, so cannot generate ' + 'scripts') + else: + try: + with zf.open(metadata_name) as bwf: + wf = wrapper(bwf) + commands = json.load(wf).get('extensions') + if commands: + commands = commands.get('python.commands') + except Exception: + logger.warning('Unable to read JSON metadata, so ' + 'cannot generate scripts') + if commands: + console_scripts = commands.get('wrap_console', {}) + gui_scripts = commands.get('wrap_gui', {}) + if console_scripts or gui_scripts: + script_dir = paths.get('scripts', '') + if not os.path.isdir(script_dir): + raise ValueError('Valid script path not ' + 'specified') + maker.target_dir = script_dir + for k, v in console_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script) + fileop.set_executable_mode(filenames) + + if gui_scripts: + options = {'gui': True } + for k, v in gui_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script, options) + fileop.set_executable_mode(filenames) + + p = os.path.join(libdir, info_dir) + dist = InstalledDistribution(p) + + # Write SHARED + paths = dict(paths) # don't change passed in dict + del paths['purelib'] + del paths['platlib'] + paths['lib'] = libdir + p = dist.write_shared_locations(paths, dry_run) + if p: + outfiles.append(p) + + # Write RECORD + dist.write_installed_files(outfiles, paths['prefix'], + dry_run) + return dist + except Exception: # pragma: no cover + logger.exception('installation failed.') + fileop.rollback() + raise + finally: + shutil.rmtree(workdir) + + def _get_dylib_cache(self): + global cache + if cache is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('dylib-cache'), + sys.version[:3]) + cache = Cache(base) + return cache + + def _get_extensions(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + arcname = posixpath.join(info_dir, 'EXTENSIONS') + wrapper = codecs.getreader('utf-8') + result = [] + with ZipFile(pathname, 'r') as zf: + try: + with zf.open(arcname) as bf: + wf = wrapper(bf) + extensions = json.load(wf) + cache = self._get_dylib_cache() + prefix = cache.prefix_to_dir(pathname) + cache_base = os.path.join(cache.base, prefix) + if not os.path.isdir(cache_base): + os.makedirs(cache_base) + for name, relpath in extensions.items(): + dest = os.path.join(cache_base, convert_path(relpath)) + if not os.path.exists(dest): + extract = True + else: + file_time = os.stat(dest).st_mtime + file_time = datetime.datetime.fromtimestamp(file_time) + info = zf.getinfo(relpath) + wheel_time = datetime.datetime(*info.date_time) + extract = wheel_time > file_time + if extract: + zf.extract(relpath, cache_base) + result.append((name, dest)) + except KeyError: + pass + return result + + def is_compatible(self): + """ + Determine if a wheel is compatible with the running system. + """ + return is_compatible(self) + + def is_mountable(self): + """ + Determine if a wheel is asserted as mountable by its metadata. + """ + return True # for now - metadata details TBD + + def mount(self, append=False): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if not self.is_compatible(): + msg = 'Wheel %s not compatible with this Python.' % pathname + raise DistlibException(msg) + if not self.is_mountable(): + msg = 'Wheel %s is marked as not mountable.' % pathname + raise DistlibException(msg) + if pathname in sys.path: + logger.debug('%s already in path', pathname) + else: + if append: + sys.path.append(pathname) + else: + sys.path.insert(0, pathname) + extensions = self._get_extensions() + if extensions: + if _hook not in sys.meta_path: + sys.meta_path.append(_hook) + _hook.add(pathname, extensions) + + def unmount(self): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if pathname not in sys.path: + logger.debug('%s not in path', pathname) + else: + sys.path.remove(pathname) + if pathname in _hook.impure_wheels: + _hook.remove(pathname) + if not _hook.impure_wheels: + if _hook in sys.meta_path: + sys.meta_path.remove(_hook) + + def verify(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + # TODO version verification + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if '..' in u_arcname: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + + # The signature file won't be in RECORD, + # and we don't currently don't do anything with it + if u_arcname.endswith('/RECORD.jws'): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + def update(self, modifier, dest_dir=None, **kwargs): + """ + Update the contents of a wheel in a generic way. The modifier should + be a callable which expects a dictionary argument: its keys are + archive-entry paths, and its values are absolute filesystem paths + where the contents the corresponding archive entries can be found. The + modifier is free to change the contents of the files pointed to, add + new entries and remove entries, before returning. This method will + extract the entire contents of the wheel to a temporary location, call + the modifier, and then use the passed (and possibly updated) + dictionary to write a new wheel. If ``dest_dir`` is specified, the new + wheel is written there -- otherwise, the original wheel is overwritten. + + The modifier should return True if it updated the wheel, else False. + This method returns the same value the modifier returns. + """ + + def get_version(path_map, info_dir): + version = path = None + key = '%s/%s' % (info_dir, METADATA_FILENAME) + if key not in path_map: + key = '%s/PKG-INFO' % info_dir + if key in path_map: + path = path_map[key] + version = Metadata(path=path).version + return version, path + + def update_version(version, path): + updated = None + try: + v = NormalizedVersion(version) + i = version.find('-') + if i < 0: + updated = '%s+1' % version + else: + parts = [int(s) for s in version[i + 1:].split('.')] + parts[-1] += 1 + updated = '%s+%s' % (version[:i], + '.'.join(str(i) for i in parts)) + except UnsupportedVersionError: + logger.debug('Cannot update non-compliant (PEP-440) ' + 'version %r', version) + if updated: + md = Metadata(path=path) + md.version = updated + legacy = not path.endswith(METADATA_FILENAME) + md.write(path=path, legacy=legacy) + logger.debug('Version updated from %r to %r', version, + updated) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + record_name = posixpath.join(info_dir, 'RECORD') + with tempdir() as workdir: + with ZipFile(pathname, 'r') as zf: + path_map = {} + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if u_arcname == record_name: + continue + if '..' in u_arcname: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + zf.extract(zinfo, workdir) + path = os.path.join(workdir, convert_path(u_arcname)) + path_map[u_arcname] = path + + # Remember the version. + original_version, _ = get_version(path_map, info_dir) + # Files extracted. Call the modifier. + modified = modifier(path_map, **kwargs) + if modified: + # Something changed - need to build a new wheel. + current_version, path = get_version(path_map, info_dir) + if current_version and (current_version == original_version): + # Add or update local version to signify changes. + update_version(current_version, path) + # Decide where the new wheel goes. + if dest_dir is None: + fd, newpath = tempfile.mkstemp(suffix='.whl', + prefix='wheel-update-', + dir=workdir) + os.close(fd) + else: + if not os.path.isdir(dest_dir): + raise DistlibException('Not a directory: %r' % dest_dir) + newpath = os.path.join(dest_dir, self.filename) + archive_paths = list(path_map.items()) + distinfo = os.path.join(workdir, info_dir) + info = distinfo, info_dir + self.write_records(info, workdir, archive_paths) + self.build_zip(newpath, archive_paths) + if dest_dir is None: + shutil.copyfile(newpath, pathname) + return modified + +def compatible_tags(): + """ + Return (pyver, abi, arch) tuples compatible with this Python. + """ + versions = [VER_SUFFIX] + major = VER_SUFFIX[0] + for minor in range(sys.version_info[1] - 1, - 1, -1): + versions.append(''.join([major, str(minor)])) + + abis = [] + for suffix, _, _ in imp.get_suffixes(): + if suffix.startswith('.abi'): + abis.append(suffix.split('.', 2)[1]) + abis.sort() + if ABI != 'none': + abis.insert(0, ABI) + abis.append('none') + result = [] + + arches = [ARCH] + if sys.platform == 'darwin': + m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) + if m: + name, major, minor, arch = m.groups() + minor = int(minor) + matches = [arch] + if arch in ('i386', 'ppc'): + matches.append('fat') + if arch in ('i386', 'ppc', 'x86_64'): + matches.append('fat3') + if arch in ('ppc64', 'x86_64'): + matches.append('fat64') + if arch in ('i386', 'x86_64'): + matches.append('intel') + if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): + matches.append('universal') + while minor >= 0: + for match in matches: + s = '%s_%s_%s_%s' % (name, major, minor, match) + if s != ARCH: # already there + arches.append(s) + minor -= 1 + + # Most specific - our Python version, ABI and arch + for abi in abis: + for arch in arches: + result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) + + # where no ABI / arch dependency, but IMP_PREFIX dependency + for i, version in enumerate(versions): + result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) + if i == 0: + result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) + + # no IMP_PREFIX, ABI or arch dependency + for i, version in enumerate(versions): + result.append((''.join(('py', version)), 'none', 'any')) + if i == 0: + result.append((''.join(('py', version[0])), 'none', 'any')) + return set(result) + + +COMPATIBLE_TAGS = compatible_tags() + +del compatible_tags + + +def is_compatible(wheel, tags=None): + if not isinstance(wheel, Wheel): + wheel = Wheel(wheel) # assume it's a filename + result = False + if tags is None: + tags = COMPATIBLE_TAGS + for ver, abi, arch in tags: + if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch: + result = True + break + return result diff --git a/env/lib/python3.6/site-packages/pip/_vendor/distro.py b/env/lib/python3.6/site-packages/pip/_vendor/distro.py new file mode 100644 index 0000000..aa4defc --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/distro.py @@ -0,0 +1,1197 @@ +# Copyright 2015,2016,2017 Nir Cohen +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +The ``distro`` package (``distro`` stands for Linux Distribution) provides +information about the Linux distribution it runs on, such as a reliable +machine-readable distro ID, or version information. + +It is a renewed alternative implementation for Python's original +:py:func:`platform.linux_distribution` function, but it provides much more +functionality. An alternative implementation became necessary because Python +3.5 deprecated this function, and Python 3.7 is expected to remove it +altogether. Its predecessor function :py:func:`platform.dist` was already +deprecated since Python 2.6 and is also expected to be removed in Python 3.7. +Still, there are many cases in which access to OS distribution information +is needed. See `Python issue 1322 <https://bugs.python.org/issue1322>`_ for +more information. +""" + +import os +import re +import sys +import json +import shlex +import logging +import argparse +import subprocess + + +_UNIXCONFDIR = os.environ.get('UNIXCONFDIR', '/etc') +_OS_RELEASE_BASENAME = 'os-release' + +#: Translation table for normalizing the "ID" attribute defined in os-release +#: files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as defined in the os-release file, translated to lower case, +#: with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_OS_ID = {} + +#: Translation table for normalizing the "Distributor ID" attribute returned by +#: the lsb_release command, for use by the :func:`distro.id` method. +#: +#: * Key: Value as returned by the lsb_release command, translated to lower +#: case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_LSB_ID = { + 'enterpriseenterprise': 'oracle', # Oracle Enterprise Linux + 'redhatenterpriseworkstation': 'rhel', # RHEL 6, 7 Workstation + 'redhatenterpriseserver': 'rhel', # RHEL 6, 7 Server +} + +#: Translation table for normalizing the distro ID derived from the file name +#: of distro release files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as derived from the file name of a distro release file, +#: translated to lower case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_DISTRO_ID = { + 'redhat': 'rhel', # RHEL 6.x, 7.x +} + +# Pattern for content of distro release file (reversed) +_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile( + r'(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)') + +# Pattern for base file name of distro release file +_DISTRO_RELEASE_BASENAME_PATTERN = re.compile( + r'(\w+)[-_](release|version)$') + +# Base file names to be ignored when searching for distro release file +_DISTRO_RELEASE_IGNORE_BASENAMES = ( + 'debian_version', + 'lsb-release', + 'oem-release', + _OS_RELEASE_BASENAME, + 'system-release' +) + + +def linux_distribution(full_distribution_name=True): + """ + Return information about the current OS distribution as a tuple + ``(id_name, version, codename)`` with items as follows: + + * ``id_name``: If *full_distribution_name* is false, the result of + :func:`distro.id`. Otherwise, the result of :func:`distro.name`. + + * ``version``: The result of :func:`distro.version`. + + * ``codename``: The result of :func:`distro.codename`. + + The interface of this function is compatible with the original + :py:func:`platform.linux_distribution` function, supporting a subset of + its parameters. + + The data it returns may not exactly be the same, because it uses more data + sources than the original function, and that may lead to different data if + the OS distribution is not consistent across multiple data sources it + provides (there are indeed such distributions ...). + + Another reason for differences is the fact that the :func:`distro.id` + method normalizes the distro ID string to a reliable machine-readable value + for a number of popular OS distributions. + """ + return _distro.linux_distribution(full_distribution_name) + + +def id(): + """ + Return the distro ID of the current distribution, as a + machine-readable string. + + For a number of OS distributions, the returned distro ID value is + *reliable*, in the sense that it is documented and that it does not change + across releases of the distribution. + + This package maintains the following reliable distro ID values: + + ============== ========================================= + Distro ID Distribution + ============== ========================================= + "ubuntu" Ubuntu + "debian" Debian + "rhel" RedHat Enterprise Linux + "centos" CentOS + "fedora" Fedora + "sles" SUSE Linux Enterprise Server + "opensuse" openSUSE + "amazon" Amazon Linux + "arch" Arch Linux + "cloudlinux" CloudLinux OS + "exherbo" Exherbo Linux + "gentoo" GenToo Linux + "ibm_powerkvm" IBM PowerKVM + "kvmibm" KVM for IBM z Systems + "linuxmint" Linux Mint + "mageia" Mageia + "mandriva" Mandriva Linux + "parallels" Parallels + "pidora" Pidora + "raspbian" Raspbian + "oracle" Oracle Linux (and Oracle Enterprise Linux) + "scientific" Scientific Linux + "slackware" Slackware + "xenserver" XenServer + "openbsd" OpenBSD + "netbsd" NetBSD + "freebsd" FreeBSD + ============== ========================================= + + If you have a need to get distros for reliable IDs added into this set, + or if you find that the :func:`distro.id` function returns a different + distro ID for one of the listed distros, please create an issue in the + `distro issue tracker`_. + + **Lookup hierarchy and transformations:** + + First, the ID is obtained from the following sources, in the specified + order. The first available and non-empty value is used: + + * the value of the "ID" attribute of the os-release file, + + * the value of the "Distributor ID" attribute returned by the lsb_release + command, + + * the first part of the file name of the distro release file, + + The so determined ID value then passes the following transformations, + before it is returned by this method: + + * it is translated to lower case, + + * blanks (which should not be there anyway) are translated to underscores, + + * a normalization of the ID is performed, based upon + `normalization tables`_. The purpose of this normalization is to ensure + that the ID is as reliable as possible, even across incompatible changes + in the OS distributions. A common reason for an incompatible change is + the addition of an os-release file, or the addition of the lsb_release + command, with ID values that differ from what was previously determined + from the distro release file name. + """ + return _distro.id() + + +def name(pretty=False): + """ + Return the name of the current OS distribution, as a human-readable + string. + + If *pretty* is false, the name is returned without version or codename. + (e.g. "CentOS Linux") + + If *pretty* is true, the version and codename are appended. + (e.g. "CentOS Linux 7.1.1503 (Core)") + + **Lookup hierarchy:** + + The name is obtained from the following sources, in the specified order. + The first available and non-empty value is used: + + * If *pretty* is false: + + - the value of the "NAME" attribute of the os-release file, + + - the value of the "Distributor ID" attribute returned by the lsb_release + command, + + - the value of the "<name>" field of the distro release file. + + * If *pretty* is true: + + - the value of the "PRETTY_NAME" attribute of the os-release file, + + - the value of the "Description" attribute returned by the lsb_release + command, + + - the value of the "<name>" field of the distro release file, appended + with the value of the pretty version ("<version_id>" and "<codename>" + fields) of the distro release file, if available. + """ + return _distro.name(pretty) + + +def version(pretty=False, best=False): + """ + Return the version of the current OS distribution, as a human-readable + string. + + If *pretty* is false, the version is returned without codename (e.g. + "7.0"). + + If *pretty* is true, the codename in parenthesis is appended, if the + codename is non-empty (e.g. "7.0 (Maipo)"). + + Some distributions provide version numbers with different precisions in + the different sources of distribution information. Examining the different + sources in a fixed priority order does not always yield the most precise + version (e.g. for Debian 8.2, or CentOS 7.1). + + The *best* parameter can be used to control the approach for the returned + version: + + If *best* is false, the first non-empty version number in priority order of + the examined sources is returned. + + If *best* is true, the most precise version number out of all examined + sources is returned. + + **Lookup hierarchy:** + + In all cases, the version number is obtained from the following sources. + If *best* is false, this order represents the priority order: + + * the value of the "VERSION_ID" attribute of the os-release file, + * the value of the "Release" attribute returned by the lsb_release + command, + * the version number parsed from the "<version_id>" field of the first line + of the distro release file, + * the version number parsed from the "PRETTY_NAME" attribute of the + os-release file, if it follows the format of the distro release files. + * the version number parsed from the "Description" attribute returned by + the lsb_release command, if it follows the format of the distro release + files. + """ + return _distro.version(pretty, best) + + +def version_parts(best=False): + """ + Return the version of the current OS distribution as a tuple + ``(major, minor, build_number)`` with items as follows: + + * ``major``: The result of :func:`distro.major_version`. + + * ``minor``: The result of :func:`distro.minor_version`. + + * ``build_number``: The result of :func:`distro.build_number`. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.version_parts(best) + + +def major_version(best=False): + """ + Return the major version of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The major version is the first + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.major_version(best) + + +def minor_version(best=False): + """ + Return the minor version of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The minor version is the second + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.minor_version(best) + + +def build_number(best=False): + """ + Return the build number of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The build number is the third part + of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.build_number(best) + + +def like(): + """ + Return a space-separated list of distro IDs of distributions that are + closely related to the current OS distribution in regards to packaging + and programming interfaces, for example distributions the current + distribution is a derivative from. + + **Lookup hierarchy:** + + This information item is only provided by the os-release file. + For details, see the description of the "ID_LIKE" attribute in the + `os-release man page + <http://www.freedesktop.org/software/systemd/man/os-release.html>`_. + """ + return _distro.like() + + +def codename(): + """ + Return the codename for the release of the current OS distribution, + as a string. + + If the distribution does not have a codename, an empty string is returned. + + Note that the returned codename is not always really a codename. For + example, openSUSE returns "x86_64". This function does not handle such + cases in any special way and just returns the string it finds, if any. + + **Lookup hierarchy:** + + * the codename within the "VERSION" attribute of the os-release file, if + provided, + + * the value of the "Codename" attribute returned by the lsb_release + command, + + * the value of the "<codename>" field of the distro release file. + """ + return _distro.codename() + + +def info(pretty=False, best=False): + """ + Return certain machine-readable information items about the current OS + distribution in a dictionary, as shown in the following example: + + .. sourcecode:: python + + { + 'id': 'rhel', + 'version': '7.0', + 'version_parts': { + 'major': '7', + 'minor': '0', + 'build_number': '' + }, + 'like': 'fedora', + 'codename': 'Maipo' + } + + The dictionary structure and keys are always the same, regardless of which + information items are available in the underlying data sources. The values + for the various keys are as follows: + + * ``id``: The result of :func:`distro.id`. + + * ``version``: The result of :func:`distro.version`. + + * ``version_parts -> major``: The result of :func:`distro.major_version`. + + * ``version_parts -> minor``: The result of :func:`distro.minor_version`. + + * ``version_parts -> build_number``: The result of + :func:`distro.build_number`. + + * ``like``: The result of :func:`distro.like`. + + * ``codename``: The result of :func:`distro.codename`. + + For a description of the *pretty* and *best* parameters, see the + :func:`distro.version` method. + """ + return _distro.info(pretty, best) + + +def os_release_info(): + """ + Return a dictionary containing key-value pairs for the information items + from the os-release file data source of the current OS distribution. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_info() + + +def lsb_release_info(): + """ + Return a dictionary containing key-value pairs for the information items + from the lsb_release command data source of the current OS distribution. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_info() + + +def distro_release_info(): + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current OS distribution. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_info() + + +def uname_info(): + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current OS distribution. + """ + return _distro.uname_info() + + +def os_release_attr(attribute): + """ + Return a single named information item from the os-release file data source + of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_attr(attribute) + + +def lsb_release_attr(attribute): + """ + Return a single named information item from the lsb_release command output + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_attr(attribute) + + +def distro_release_attr(attribute): + """ + Return a single named information item from the distro release file + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_attr(attribute) + + +def uname_attr(attribute): + """ + Return a single named information item from the distro release file + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + """ + return _distro.uname_attr(attribute) + + +class cached_property(object): + """A version of @property which caches the value. On access, it calls the + underlying function and sets the value in `__dict__` so future accesses + will not re-call the property. + """ + def __init__(self, f): + self._fname = f.__name__ + self._f = f + + def __get__(self, obj, owner): + assert obj is not None, 'call {} on an instance'.format(self._fname) + ret = obj.__dict__[self._fname] = self._f(obj) + return ret + + +class LinuxDistribution(object): + """ + Provides information about a OS distribution. + + This package creates a private module-global instance of this class with + default initialization arguments, that is used by the + `consolidated accessor functions`_ and `single source accessor functions`_. + By using default initialization arguments, that module-global instance + returns data about the current OS distribution (i.e. the distro this + package runs on). + + Normally, it is not necessary to create additional instances of this class. + However, in situations where control is needed over the exact data sources + that are used, instances of this class can be created with a specific + distro release file, or a specific os-release file, or without invoking the + lsb_release command. + """ + + def __init__(self, + include_lsb=True, + os_release_file='', + distro_release_file='', + include_uname=True): + """ + The initialization method of this class gathers information from the + available data sources, and stores that in private instance attributes. + Subsequent access to the information items uses these private instance + attributes, so that the data sources are read only once. + + Parameters: + + * ``include_lsb`` (bool): Controls whether the + `lsb_release command output`_ is included as a data source. + + If the lsb_release command is not available in the program execution + path, the data source for the lsb_release command will be empty. + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is to be used as a data source. + + An empty string (the default) will cause the default path name to + be used (see `os-release file`_ for details). + + If the specified or defaulted os-release file does not exist, the + data source for the os-release file will be empty. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is to be used as a data source. + + An empty string (the default) will cause a default search algorithm + to be used (see `distro release file`_ for details). + + If the specified distro release file does not exist, or if no default + distro release file can be found, the data source for the distro + release file will be empty. + + * ``include_name`` (bool): Controls whether uname command output is + included as a data source. If the uname command is not available in + the program execution path the data source for the uname command will + be empty. + + Public instance attributes: + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter. + This controls whether the lsb information will be loaded. + + * ``include_uname`` (bool): The result of the ``include_uname`` + parameter. This controls whether the uname information will + be loaded. + + Raises: + + * :py:exc:`IOError`: Some I/O issue with an os-release file or distro + release file. + + * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had + some issue (other than not being available in the program execution + path). + + * :py:exc:`UnicodeError`: A data source has unexpected characters or + uses an unexpected encoding. + """ + self.os_release_file = os_release_file or \ + os.path.join(_UNIXCONFDIR, _OS_RELEASE_BASENAME) + self.distro_release_file = distro_release_file or '' # updated later + self.include_lsb = include_lsb + self.include_uname = include_uname + + def __repr__(self): + """Return repr of all info + """ + return \ + "LinuxDistribution(" \ + "os_release_file={self.os_release_file!r}, " \ + "distro_release_file={self.distro_release_file!r}, " \ + "include_lsb={self.include_lsb!r}, " \ + "include_uname={self.include_uname!r}, " \ + "_os_release_info={self._os_release_info!r}, " \ + "_lsb_release_info={self._lsb_release_info!r}, " \ + "_distro_release_info={self._distro_release_info!r}, " \ + "_uname_info={self._uname_info!r})".format( + self=self) + + def linux_distribution(self, full_distribution_name=True): + """ + Return information about the OS distribution that is compatible + with Python's :func:`platform.linux_distribution`, supporting a subset + of its parameters. + + For details, see :func:`distro.linux_distribution`. + """ + return ( + self.name() if full_distribution_name else self.id(), + self.version(), + self.codename() + ) + + def id(self): + """Return the distro ID of the OS distribution, as a string. + + For details, see :func:`distro.id`. + """ + def normalize(distro_id, table): + distro_id = distro_id.lower().replace(' ', '_') + return table.get(distro_id, distro_id) + + distro_id = self.os_release_attr('id') + if distro_id: + return normalize(distro_id, NORMALIZED_OS_ID) + + distro_id = self.lsb_release_attr('distributor_id') + if distro_id: + return normalize(distro_id, NORMALIZED_LSB_ID) + + distro_id = self.distro_release_attr('id') + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + + distro_id = self.uname_attr('id') + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + + return '' + + def name(self, pretty=False): + """ + Return the name of the OS distribution, as a string. + + For details, see :func:`distro.name`. + """ + name = self.os_release_attr('name') \ + or self.lsb_release_attr('distributor_id') \ + or self.distro_release_attr('name') \ + or self.uname_attr('name') + if pretty: + name = self.os_release_attr('pretty_name') \ + or self.lsb_release_attr('description') + if not name: + name = self.distro_release_attr('name') \ + or self.uname_attr('name') + version = self.version(pretty=True) + if version: + name = name + ' ' + version + return name or '' + + def version(self, pretty=False, best=False): + """ + Return the version of the OS distribution, as a string. + + For details, see :func:`distro.version`. + """ + versions = [ + self.os_release_attr('version_id'), + self.lsb_release_attr('release'), + self.distro_release_attr('version_id'), + self._parse_distro_release_content( + self.os_release_attr('pretty_name')).get('version_id', ''), + self._parse_distro_release_content( + self.lsb_release_attr('description')).get('version_id', ''), + self.uname_attr('release') + ] + version = '' + if best: + # This algorithm uses the last version in priority order that has + # the best precision. If the versions are not in conflict, that + # does not matter; otherwise, using the last one instead of the + # first one might be considered a surprise. + for v in versions: + if v.count(".") > version.count(".") or version == '': + version = v + else: + for v in versions: + if v != '': + version = v + break + if pretty and version and self.codename(): + version = u'{0} ({1})'.format(version, self.codename()) + return version + + def version_parts(self, best=False): + """ + Return the version of the OS distribution, as a tuple of version + numbers. + + For details, see :func:`distro.version_parts`. + """ + version_str = self.version(best=best) + if version_str: + version_regex = re.compile(r'(\d+)\.?(\d+)?\.?(\d+)?') + matches = version_regex.match(version_str) + if matches: + major, minor, build_number = matches.groups() + return major, minor or '', build_number or '' + return '', '', '' + + def major_version(self, best=False): + """ + Return the major version number of the current distribution. + + For details, see :func:`distro.major_version`. + """ + return self.version_parts(best)[0] + + def minor_version(self, best=False): + """ + Return the minor version number of the current distribution. + + For details, see :func:`distro.minor_version`. + """ + return self.version_parts(best)[1] + + def build_number(self, best=False): + """ + Return the build number of the current distribution. + + For details, see :func:`distro.build_number`. + """ + return self.version_parts(best)[2] + + def like(self): + """ + Return the IDs of distributions that are like the OS distribution. + + For details, see :func:`distro.like`. + """ + return self.os_release_attr('id_like') or '' + + def codename(self): + """ + Return the codename of the OS distribution. + + For details, see :func:`distro.codename`. + """ + return self.os_release_attr('codename') \ + or self.lsb_release_attr('codename') \ + or self.distro_release_attr('codename') \ + or '' + + def info(self, pretty=False, best=False): + """ + Return certain machine-readable information about the OS + distribution. + + For details, see :func:`distro.info`. + """ + return dict( + id=self.id(), + version=self.version(pretty, best), + version_parts=dict( + major=self.major_version(best), + minor=self.minor_version(best), + build_number=self.build_number(best) + ), + like=self.like(), + codename=self.codename(), + ) + + def os_release_info(self): + """ + Return a dictionary containing key-value pairs for the information + items from the os-release file data source of the OS distribution. + + For details, see :func:`distro.os_release_info`. + """ + return self._os_release_info + + def lsb_release_info(self): + """ + Return a dictionary containing key-value pairs for the information + items from the lsb_release command data source of the OS + distribution. + + For details, see :func:`distro.lsb_release_info`. + """ + return self._lsb_release_info + + def distro_release_info(self): + """ + Return a dictionary containing key-value pairs for the information + items from the distro release file data source of the OS + distribution. + + For details, see :func:`distro.distro_release_info`. + """ + return self._distro_release_info + + def uname_info(self): + """ + Return a dictionary containing key-value pairs for the information + items from the uname command data source of the OS distribution. + + For details, see :func:`distro.uname_info`. + """ + + def os_release_attr(self, attribute): + """ + Return a single named information item from the os-release file data + source of the OS distribution. + + For details, see :func:`distro.os_release_attr`. + """ + return self._os_release_info.get(attribute, '') + + def lsb_release_attr(self, attribute): + """ + Return a single named information item from the lsb_release command + output data source of the OS distribution. + + For details, see :func:`distro.lsb_release_attr`. + """ + return self._lsb_release_info.get(attribute, '') + + def distro_release_attr(self, attribute): + """ + Return a single named information item from the distro release file + data source of the OS distribution. + + For details, see :func:`distro.distro_release_attr`. + """ + return self._distro_release_info.get(attribute, '') + + def uname_attr(self, attribute): + """ + Return a single named information item from the uname command + output data source of the OS distribution. + + For details, see :func:`distro.uname_release_attr`. + """ + return self._uname_info.get(attribute, '') + + @cached_property + def _os_release_info(self): + """ + Get the information items from the specified os-release file. + + Returns: + A dictionary containing all information items. + """ + if os.path.isfile(self.os_release_file): + with open(self.os_release_file) as release_file: + return self._parse_os_release_content(release_file) + return {} + + @staticmethod + def _parse_os_release_content(lines): + """ + Parse the lines of an os-release file. + + Parameters: + + * lines: Iterable through the lines in the os-release file. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + lexer = shlex.shlex(lines, posix=True) + lexer.whitespace_split = True + + # The shlex module defines its `wordchars` variable using literals, + # making it dependent on the encoding of the Python source file. + # In Python 2.6 and 2.7, the shlex source file is encoded in + # 'iso-8859-1', and the `wordchars` variable is defined as a byte + # string. This causes a UnicodeDecodeError to be raised when the + # parsed content is a unicode object. The following fix resolves that + # (... but it should be fixed in shlex...): + if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes): + lexer.wordchars = lexer.wordchars.decode('iso-8859-1') + + tokens = list(lexer) + for token in tokens: + # At this point, all shell-like parsing has been done (i.e. + # comments processed, quotes and backslash escape sequences + # processed, multi-line values assembled, trailing newlines + # stripped, etc.), so the tokens are now either: + # * variable assignments: var=value + # * commands or their arguments (not allowed in os-release) + if '=' in token: + k, v = token.split('=', 1) + if isinstance(v, bytes): + v = v.decode('utf-8') + props[k.lower()] = v + if k == 'VERSION': + # this handles cases in which the codename is in + # the `(CODENAME)` (rhel, centos, fedora) format + # or in the `, CODENAME` format (Ubuntu). + codename = re.search(r'(\(\D+\))|,(\s+)?\D+', v) + if codename: + codename = codename.group() + codename = codename.strip('()') + codename = codename.strip(',') + codename = codename.strip() + # codename appears within paranthese. + props['codename'] = codename + else: + props['codename'] = '' + else: + # Ignore any tokens that are not variable assignments + pass + return props + + @cached_property + def _lsb_release_info(self): + """ + Get the information items from the lsb_release command output. + + Returns: + A dictionary containing all information items. + """ + if not self.include_lsb: + return {} + with open(os.devnull, 'w') as devnull: + try: + cmd = ('lsb_release', '-a') + stdout = subprocess.check_output(cmd, stderr=devnull) + except OSError: # Command not found + return {} + content = stdout.decode(sys.getfilesystemencoding()).splitlines() + return self._parse_lsb_release_content(content) + + @staticmethod + def _parse_lsb_release_content(lines): + """ + Parse the output of the lsb_release command. + + Parameters: + + * lines: Iterable through the lines of the lsb_release output. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + for line in lines: + kv = line.strip('\n').split(':', 1) + if len(kv) != 2: + # Ignore lines without colon. + continue + k, v = kv + props.update({k.replace(' ', '_').lower(): v.strip()}) + return props + + @cached_property + def _uname_info(self): + with open(os.devnull, 'w') as devnull: + try: + cmd = ('uname', '-rs') + stdout = subprocess.check_output(cmd, stderr=devnull) + except OSError: + return {} + content = stdout.decode(sys.getfilesystemencoding()).splitlines() + return self._parse_uname_content(content) + + @staticmethod + def _parse_uname_content(lines): + props = {} + match = re.search(r'^([^\s]+)\s+([\d\.]+)', lines[0].strip()) + if match: + name, version = match.groups() + + # This is to prevent the Linux kernel version from + # appearing as the 'best' version on otherwise + # identifiable distributions. + if name == 'Linux': + return {} + props['id'] = name.lower() + props['name'] = name + props['release'] = version + return props + + @cached_property + def _distro_release_info(self): + """ + Get the information items from the specified distro release file. + + Returns: + A dictionary containing all information items. + """ + if self.distro_release_file: + # If it was specified, we use it and parse what we can, even if + # its file name or content does not match the expected pattern. + distro_info = self._parse_distro_release_file( + self.distro_release_file) + basename = os.path.basename(self.distro_release_file) + # The file name pattern for user-specified distro release files + # is somewhat more tolerant (compared to when searching for the + # file), because we want to use what was specified as best as + # possible. + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + if match: + distro_info['id'] = match.group(1) + return distro_info + else: + try: + basenames = os.listdir(_UNIXCONFDIR) + # We sort for repeatability in cases where there are multiple + # distro specific files; e.g. CentOS, Oracle, Enterprise all + # containing `redhat-release` on top of their own. + basenames.sort() + except OSError: + # This may occur when /etc is not readable but we can't be + # sure about the *-release files. Check common entries of + # /etc for information. If they turn out to not be there the + # error is handled in `_parse_distro_release_file()`. + basenames = ['SuSE-release', + 'arch-release', + 'base-release', + 'centos-release', + 'fedora-release', + 'gentoo-release', + 'mageia-release', + 'mandrake-release', + 'mandriva-release', + 'mandrivalinux-release', + 'manjaro-release', + 'oracle-release', + 'redhat-release', + 'sl-release', + 'slackware-version'] + for basename in basenames: + if basename in _DISTRO_RELEASE_IGNORE_BASENAMES: + continue + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + if match: + filepath = os.path.join(_UNIXCONFDIR, basename) + distro_info = self._parse_distro_release_file(filepath) + if 'name' in distro_info: + # The name is always present if the pattern matches + self.distro_release_file = filepath + distro_info['id'] = match.group(1) + return distro_info + return {} + + def _parse_distro_release_file(self, filepath): + """ + Parse a distro release file. + + Parameters: + + * filepath: Path name of the distro release file. + + Returns: + A dictionary containing all information items. + """ + try: + with open(filepath) as fp: + # Only parse the first line. For instance, on SLES there + # are multiple lines. We don't want them... + return self._parse_distro_release_content(fp.readline()) + except (OSError, IOError): + # Ignore not being able to read a specific, seemingly version + # related file. + # See https://github.com/nir0s/distro/issues/162 + return {} + + @staticmethod + def _parse_distro_release_content(line): + """ + Parse a line from a distro release file. + + Parameters: + * line: Line from the distro release file. Must be a unicode string + or a UTF-8 encoded byte string. + + Returns: + A dictionary containing all information items. + """ + if isinstance(line, bytes): + line = line.decode('utf-8') + matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match( + line.strip()[::-1]) + distro_info = {} + if matches: + # regexp ensures non-None + distro_info['name'] = matches.group(3)[::-1] + if matches.group(2): + distro_info['version_id'] = matches.group(2)[::-1] + if matches.group(1): + distro_info['codename'] = matches.group(1)[::-1] + elif line: + distro_info['name'] = line.strip() + return distro_info + + +_distro = LinuxDistribution() + + +def main(): + logger = logging.getLogger(__name__) + logger.setLevel(logging.DEBUG) + logger.addHandler(logging.StreamHandler(sys.stdout)) + + parser = argparse.ArgumentParser(description="OS distro info tool") + parser.add_argument( + '--json', + '-j', + help="Output in machine readable format", + action="store_true") + args = parser.parse_args() + + if args.json: + logger.info(json.dumps(info(), indent=4, sort_keys=True)) + else: + logger.info('Name: %s', name(pretty=True)) + distribution_version = version(pretty=True) + logger.info('Version: %s', distribution_version) + distribution_codename = codename() + logger.info('Codename: %s', distribution_codename) + + +if __name__ == '__main__': + main() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_ihatexml.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_ihatexml.py new file mode 100644 index 0000000..4c77717 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_ihatexml.py @@ -0,0 +1,288 @@ +from __future__ import absolute_import, division, unicode_literals + +import re +import warnings + +from .constants import DataLossWarning + +baseChar = """ +[#x0041-#x005A] | [#x0061-#x007A] | [#x00C0-#x00D6] | [#x00D8-#x00F6] | +[#x00F8-#x00FF] | [#x0100-#x0131] | [#x0134-#x013E] | [#x0141-#x0148] | +[#x014A-#x017E] | [#x0180-#x01C3] | [#x01CD-#x01F0] | [#x01F4-#x01F5] | +[#x01FA-#x0217] | [#x0250-#x02A8] | [#x02BB-#x02C1] | #x0386 | +[#x0388-#x038A] | #x038C | [#x038E-#x03A1] | [#x03A3-#x03CE] | +[#x03D0-#x03D6] | #x03DA | #x03DC | #x03DE | #x03E0 | [#x03E2-#x03F3] | +[#x0401-#x040C] | [#x040E-#x044F] | [#x0451-#x045C] | [#x045E-#x0481] | +[#x0490-#x04C4] | [#x04C7-#x04C8] | [#x04CB-#x04CC] | [#x04D0-#x04EB] | +[#x04EE-#x04F5] | [#x04F8-#x04F9] | [#x0531-#x0556] | #x0559 | +[#x0561-#x0586] | [#x05D0-#x05EA] | [#x05F0-#x05F2] | [#x0621-#x063A] | +[#x0641-#x064A] | [#x0671-#x06B7] | [#x06BA-#x06BE] | [#x06C0-#x06CE] | +[#x06D0-#x06D3] | #x06D5 | [#x06E5-#x06E6] | [#x0905-#x0939] | #x093D | +[#x0958-#x0961] | [#x0985-#x098C] | [#x098F-#x0990] | [#x0993-#x09A8] | +[#x09AA-#x09B0] | #x09B2 | [#x09B6-#x09B9] | [#x09DC-#x09DD] | +[#x09DF-#x09E1] | [#x09F0-#x09F1] | [#x0A05-#x0A0A] | [#x0A0F-#x0A10] | +[#x0A13-#x0A28] | [#x0A2A-#x0A30] | [#x0A32-#x0A33] | [#x0A35-#x0A36] | +[#x0A38-#x0A39] | [#x0A59-#x0A5C] | #x0A5E | [#x0A72-#x0A74] | +[#x0A85-#x0A8B] | #x0A8D | [#x0A8F-#x0A91] | [#x0A93-#x0AA8] | +[#x0AAA-#x0AB0] | [#x0AB2-#x0AB3] | [#x0AB5-#x0AB9] | #x0ABD | #x0AE0 | +[#x0B05-#x0B0C] | [#x0B0F-#x0B10] | [#x0B13-#x0B28] | [#x0B2A-#x0B30] | +[#x0B32-#x0B33] | [#x0B36-#x0B39] | #x0B3D | [#x0B5C-#x0B5D] | +[#x0B5F-#x0B61] | [#x0B85-#x0B8A] | [#x0B8E-#x0B90] | [#x0B92-#x0B95] | +[#x0B99-#x0B9A] | #x0B9C | [#x0B9E-#x0B9F] | [#x0BA3-#x0BA4] | +[#x0BA8-#x0BAA] | [#x0BAE-#x0BB5] | [#x0BB7-#x0BB9] | [#x0C05-#x0C0C] | +[#x0C0E-#x0C10] | [#x0C12-#x0C28] | [#x0C2A-#x0C33] | [#x0C35-#x0C39] | +[#x0C60-#x0C61] | [#x0C85-#x0C8C] | [#x0C8E-#x0C90] | [#x0C92-#x0CA8] | +[#x0CAA-#x0CB3] | [#x0CB5-#x0CB9] | #x0CDE | [#x0CE0-#x0CE1] | +[#x0D05-#x0D0C] | [#x0D0E-#x0D10] | [#x0D12-#x0D28] | [#x0D2A-#x0D39] | +[#x0D60-#x0D61] | [#x0E01-#x0E2E] | #x0E30 | [#x0E32-#x0E33] | +[#x0E40-#x0E45] | [#x0E81-#x0E82] | #x0E84 | [#x0E87-#x0E88] | #x0E8A | +#x0E8D | [#x0E94-#x0E97] | [#x0E99-#x0E9F] | [#x0EA1-#x0EA3] | #x0EA5 | +#x0EA7 | [#x0EAA-#x0EAB] | [#x0EAD-#x0EAE] | #x0EB0 | [#x0EB2-#x0EB3] | +#x0EBD | [#x0EC0-#x0EC4] | [#x0F40-#x0F47] | [#x0F49-#x0F69] | +[#x10A0-#x10C5] | [#x10D0-#x10F6] | #x1100 | [#x1102-#x1103] | +[#x1105-#x1107] | #x1109 | [#x110B-#x110C] | [#x110E-#x1112] | #x113C | +#x113E | #x1140 | #x114C | #x114E | #x1150 | [#x1154-#x1155] | #x1159 | +[#x115F-#x1161] | #x1163 | #x1165 | #x1167 | #x1169 | [#x116D-#x116E] | +[#x1172-#x1173] | #x1175 | #x119E | #x11A8 | #x11AB | [#x11AE-#x11AF] | +[#x11B7-#x11B8] | #x11BA | [#x11BC-#x11C2] | #x11EB | #x11F0 | #x11F9 | +[#x1E00-#x1E9B] | [#x1EA0-#x1EF9] | [#x1F00-#x1F15] | [#x1F18-#x1F1D] | +[#x1F20-#x1F45] | [#x1F48-#x1F4D] | [#x1F50-#x1F57] | #x1F59 | #x1F5B | +#x1F5D | [#x1F5F-#x1F7D] | [#x1F80-#x1FB4] | [#x1FB6-#x1FBC] | #x1FBE | +[#x1FC2-#x1FC4] | [#x1FC6-#x1FCC] | [#x1FD0-#x1FD3] | [#x1FD6-#x1FDB] | +[#x1FE0-#x1FEC] | [#x1FF2-#x1FF4] | [#x1FF6-#x1FFC] | #x2126 | +[#x212A-#x212B] | #x212E | [#x2180-#x2182] | [#x3041-#x3094] | +[#x30A1-#x30FA] | [#x3105-#x312C] | [#xAC00-#xD7A3]""" + +ideographic = """[#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029]""" + +combiningCharacter = """ +[#x0300-#x0345] | [#x0360-#x0361] | [#x0483-#x0486] | [#x0591-#x05A1] | +[#x05A3-#x05B9] | [#x05BB-#x05BD] | #x05BF | [#x05C1-#x05C2] | #x05C4 | +[#x064B-#x0652] | #x0670 | [#x06D6-#x06DC] | [#x06DD-#x06DF] | +[#x06E0-#x06E4] | [#x06E7-#x06E8] | [#x06EA-#x06ED] | [#x0901-#x0903] | +#x093C | [#x093E-#x094C] | #x094D | [#x0951-#x0954] | [#x0962-#x0963] | +[#x0981-#x0983] | #x09BC | #x09BE | #x09BF | [#x09C0-#x09C4] | +[#x09C7-#x09C8] | [#x09CB-#x09CD] | #x09D7 | [#x09E2-#x09E3] | #x0A02 | +#x0A3C | #x0A3E | #x0A3F | [#x0A40-#x0A42] | [#x0A47-#x0A48] | +[#x0A4B-#x0A4D] | [#x0A70-#x0A71] | [#x0A81-#x0A83] | #x0ABC | +[#x0ABE-#x0AC5] | [#x0AC7-#x0AC9] | [#x0ACB-#x0ACD] | [#x0B01-#x0B03] | +#x0B3C | [#x0B3E-#x0B43] | [#x0B47-#x0B48] | [#x0B4B-#x0B4D] | +[#x0B56-#x0B57] | [#x0B82-#x0B83] | [#x0BBE-#x0BC2] | [#x0BC6-#x0BC8] | +[#x0BCA-#x0BCD] | #x0BD7 | [#x0C01-#x0C03] | [#x0C3E-#x0C44] | +[#x0C46-#x0C48] | [#x0C4A-#x0C4D] | [#x0C55-#x0C56] | [#x0C82-#x0C83] | +[#x0CBE-#x0CC4] | [#x0CC6-#x0CC8] | [#x0CCA-#x0CCD] | [#x0CD5-#x0CD6] | +[#x0D02-#x0D03] | [#x0D3E-#x0D43] | [#x0D46-#x0D48] | [#x0D4A-#x0D4D] | +#x0D57 | #x0E31 | [#x0E34-#x0E3A] | [#x0E47-#x0E4E] | #x0EB1 | +[#x0EB4-#x0EB9] | [#x0EBB-#x0EBC] | [#x0EC8-#x0ECD] | [#x0F18-#x0F19] | +#x0F35 | #x0F37 | #x0F39 | #x0F3E | #x0F3F | [#x0F71-#x0F84] | +[#x0F86-#x0F8B] | [#x0F90-#x0F95] | #x0F97 | [#x0F99-#x0FAD] | +[#x0FB1-#x0FB7] | #x0FB9 | [#x20D0-#x20DC] | #x20E1 | [#x302A-#x302F] | +#x3099 | #x309A""" + +digit = """ +[#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] | +[#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] | +[#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] | +[#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29]""" + +extender = """ +#x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 | +#[#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE]""" + +letter = " | ".join([baseChar, ideographic]) + +# Without the +name = " | ".join([letter, digit, ".", "-", "_", combiningCharacter, + extender]) +nameFirst = " | ".join([letter, "_"]) + +reChar = re.compile(r"#x([\d|A-F]{4,4})") +reCharRange = re.compile(r"\[#x([\d|A-F]{4,4})-#x([\d|A-F]{4,4})\]") + + +def charStringToList(chars): + charRanges = [item.strip() for item in chars.split(" | ")] + rv = [] + for item in charRanges: + foundMatch = False + for regexp in (reChar, reCharRange): + match = regexp.match(item) + if match is not None: + rv.append([hexToInt(item) for item in match.groups()]) + if len(rv[-1]) == 1: + rv[-1] = rv[-1] * 2 + foundMatch = True + break + if not foundMatch: + assert len(item) == 1 + + rv.append([ord(item)] * 2) + rv = normaliseCharList(rv) + return rv + + +def normaliseCharList(charList): + charList = sorted(charList) + for item in charList: + assert item[1] >= item[0] + rv = [] + i = 0 + while i < len(charList): + j = 1 + rv.append(charList[i]) + while i + j < len(charList) and charList[i + j][0] <= rv[-1][1] + 1: + rv[-1][1] = charList[i + j][1] + j += 1 + i += j + return rv + +# We don't really support characters above the BMP :( +max_unicode = int("FFFF", 16) + + +def missingRanges(charList): + rv = [] + if charList[0] != 0: + rv.append([0, charList[0][0] - 1]) + for i, item in enumerate(charList[:-1]): + rv.append([item[1] + 1, charList[i + 1][0] - 1]) + if charList[-1][1] != max_unicode: + rv.append([charList[-1][1] + 1, max_unicode]) + return rv + + +def listToRegexpStr(charList): + rv = [] + for item in charList: + if item[0] == item[1]: + rv.append(escapeRegexp(chr(item[0]))) + else: + rv.append(escapeRegexp(chr(item[0])) + "-" + + escapeRegexp(chr(item[1]))) + return "[%s]" % "".join(rv) + + +def hexToInt(hex_str): + return int(hex_str, 16) + + +def escapeRegexp(string): + specialCharacters = (".", "^", "$", "*", "+", "?", "{", "}", + "[", "]", "|", "(", ")", "-") + for char in specialCharacters: + string = string.replace(char, "\\" + char) + + return string + +# output from the above +nonXmlNameBMPRegexp = re.compile('[\x00-,/:-@\\[-\\^`\\{-\xb6\xb8-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u02cf\u02d2-\u02ff\u0346-\u035f\u0362-\u0385\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482\u0487-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u0590\u05a2\u05ba\u05be\u05c0\u05c3\u05c5-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u063f\u0653-\u065f\u066a-\u066f\u06b8-\u06b9\u06bf\u06cf\u06d4\u06e9\u06ee-\u06ef\u06fa-\u0900\u0904\u093a-\u093b\u094e-\u0950\u0955-\u0957\u0964-\u0965\u0970-\u0980\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09bd\u09c5-\u09c6\u09c9-\u09ca\u09ce-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09f2-\u0a01\u0a03-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a58\u0a5d\u0a5f-\u0a65\u0a75-\u0a80\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0adf\u0ae1-\u0ae5\u0af0-\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3b\u0b44-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b62-\u0b65\u0b70-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bd6\u0bd8-\u0be6\u0bf0-\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3d\u0c45\u0c49\u0c4e-\u0c54\u0c57-\u0c5f\u0c62-\u0c65\u0c70-\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbd\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce2-\u0ce5\u0cf0-\u0d01\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d3d\u0d44-\u0d45\u0d49\u0d4e-\u0d56\u0d58-\u0d5f\u0d62-\u0d65\u0d70-\u0e00\u0e2f\u0e3b-\u0e3f\u0e4f\u0e5a-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0f17\u0f1a-\u0f1f\u0f2a-\u0f34\u0f36\u0f38\u0f3a-\u0f3d\u0f48\u0f6a-\u0f70\u0f85\u0f8c-\u0f8f\u0f96\u0f98\u0fae-\u0fb0\u0fb8\u0fba-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u20cf\u20dd-\u20e0\u20e2-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3004\u3006\u3008-\u3020\u3030\u3036-\u3040\u3095-\u3098\u309b-\u309c\u309f-\u30a0\u30fb\u30ff-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqa + +nonXmlNameFirstBMPRegexp = re.compile('[\x00-@\\[-\\^`\\{-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u0385\u0387\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u0640\u064b-\u0670\u06b8-\u06b9\u06bf\u06cf\u06d4\u06d6-\u06e4\u06e7-\u0904\u093a-\u093c\u093e-\u0957\u0962-\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09db\u09de\u09e2-\u09ef\u09f2-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a58\u0a5d\u0a5f-\u0a71\u0a75-\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abc\u0abe-\u0adf\u0ae1-\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3c\u0b3e-\u0b5b\u0b5e\u0b62-\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c5f\u0c62-\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cdd\u0cdf\u0ce2-\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d5f\u0d62-\u0e00\u0e2f\u0e31\u0e34-\u0e3f\u0e46-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eb1\u0eb4-\u0ebc\u0ebe-\u0ebf\u0ec5-\u0f3f\u0f48\u0f6a-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3006\u3008-\u3020\u302a-\u3040\u3095-\u30a0\u30fb-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqa + +# Simpler things +nonPubidCharRegexp = re.compile("[^\x20\x0D\x0Aa-zA-Z0-9\\-'()+,./:=?;!*#@$_%]") + + +class InfosetFilter(object): + replacementRegexp = re.compile(r"U[\dA-F]{5,5}") + + def __init__(self, + dropXmlnsLocalName=False, + dropXmlnsAttrNs=False, + preventDoubleDashComments=False, + preventDashAtCommentEnd=False, + replaceFormFeedCharacters=True, + preventSingleQuotePubid=False): + + self.dropXmlnsLocalName = dropXmlnsLocalName + self.dropXmlnsAttrNs = dropXmlnsAttrNs + + self.preventDoubleDashComments = preventDoubleDashComments + self.preventDashAtCommentEnd = preventDashAtCommentEnd + + self.replaceFormFeedCharacters = replaceFormFeedCharacters + + self.preventSingleQuotePubid = preventSingleQuotePubid + + self.replaceCache = {} + + def coerceAttribute(self, name, namespace=None): + if self.dropXmlnsLocalName and name.startswith("xmlns:"): + warnings.warn("Attributes cannot begin with xmlns", DataLossWarning) + return None + elif (self.dropXmlnsAttrNs and + namespace == "http://www.w3.org/2000/xmlns/"): + warnings.warn("Attributes cannot be in the xml namespace", DataLossWarning) + return None + else: + return self.toXmlName(name) + + def coerceElement(self, name): + return self.toXmlName(name) + + def coerceComment(self, data): + if self.preventDoubleDashComments: + while "--" in data: + warnings.warn("Comments cannot contain adjacent dashes", DataLossWarning) + data = data.replace("--", "- -") + if data.endswith("-"): + warnings.warn("Comments cannot end in a dash", DataLossWarning) + data += " " + return data + + def coerceCharacters(self, data): + if self.replaceFormFeedCharacters: + for _ in range(data.count("\x0C")): + warnings.warn("Text cannot contain U+000C", DataLossWarning) + data = data.replace("\x0C", " ") + # Other non-xml characters + return data + + def coercePubid(self, data): + dataOutput = data + for char in nonPubidCharRegexp.findall(data): + warnings.warn("Coercing non-XML pubid", DataLossWarning) + replacement = self.getReplacementCharacter(char) + dataOutput = dataOutput.replace(char, replacement) + if self.preventSingleQuotePubid and dataOutput.find("'") >= 0: + warnings.warn("Pubid cannot contain single quote", DataLossWarning) + dataOutput = dataOutput.replace("'", self.getReplacementCharacter("'")) + return dataOutput + + def toXmlName(self, name): + nameFirst = name[0] + nameRest = name[1:] + m = nonXmlNameFirstBMPRegexp.match(nameFirst) + if m: + warnings.warn("Coercing non-XML name", DataLossWarning) + nameFirstOutput = self.getReplacementCharacter(nameFirst) + else: + nameFirstOutput = nameFirst + + nameRestOutput = nameRest + replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest)) + for char in replaceChars: + warnings.warn("Coercing non-XML name", DataLossWarning) + replacement = self.getReplacementCharacter(char) + nameRestOutput = nameRestOutput.replace(char, replacement) + return nameFirstOutput + nameRestOutput + + def getReplacementCharacter(self, char): + if char in self.replaceCache: + replacement = self.replaceCache[char] + else: + replacement = self.escapeChar(char) + return replacement + + def fromXmlName(self, name): + for item in set(self.replacementRegexp.findall(name)): + name = name.replace(item, self.unescapeChar(item)) + return name + + def escapeChar(self, char): + replacement = "U%05X" % ord(char) + self.replaceCache[char] = replacement + return replacement + + def unescapeChar(self, charcode): + return chr(int(charcode[1:], 16)) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_inputstream.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_inputstream.py new file mode 100644 index 0000000..a65e55f --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_inputstream.py @@ -0,0 +1,923 @@ +from __future__ import absolute_import, division, unicode_literals + +from pip._vendor.six import text_type, binary_type +from pip._vendor.six.moves import http_client, urllib + +import codecs +import re + +from pip._vendor import webencodings + +from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase +from .constants import _ReparseException +from . import _utils + +from io import StringIO + +try: + from io import BytesIO +except ImportError: + BytesIO = StringIO + +# Non-unicode versions of constants for use in the pre-parser +spaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters]) +asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters]) +asciiUppercaseBytes = frozenset([item.encode("ascii") for item in asciiUppercase]) +spacesAngleBrackets = spaceCharactersBytes | frozenset([b">", b"<"]) + + +invalid_unicode_no_surrogate = "[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]" # noqa + +if _utils.supports_lone_surrogates: + # Use one extra step of indirection and create surrogates with + # eval. Not using this indirection would introduce an illegal + # unicode literal on platforms not supporting such lone + # surrogates. + assert invalid_unicode_no_surrogate[-1] == "]" and invalid_unicode_no_surrogate.count("]") == 1 + invalid_unicode_re = re.compile(invalid_unicode_no_surrogate[:-1] + + eval('"\\uD800-\\uDFFF"') + # pylint:disable=eval-used + "]") +else: + invalid_unicode_re = re.compile(invalid_unicode_no_surrogate) + +non_bmp_invalid_codepoints = set([0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, + 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF, + 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE, + 0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF, + 0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, + 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF, + 0x10FFFE, 0x10FFFF]) + +ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005C\u005B-\u0060\u007B-\u007E]") + +# Cache for charsUntil() +charsUntilRegEx = {} + + +class BufferedStream(object): + """Buffering for streams that do not have buffering of their own + + The buffer is implemented as a list of chunks on the assumption that + joining many strings will be slow since it is O(n**2) + """ + + def __init__(self, stream): + self.stream = stream + self.buffer = [] + self.position = [-1, 0] # chunk number, offset + + def tell(self): + pos = 0 + for chunk in self.buffer[:self.position[0]]: + pos += len(chunk) + pos += self.position[1] + return pos + + def seek(self, pos): + assert pos <= self._bufferedBytes() + offset = pos + i = 0 + while len(self.buffer[i]) < offset: + offset -= len(self.buffer[i]) + i += 1 + self.position = [i, offset] + + def read(self, bytes): + if not self.buffer: + return self._readStream(bytes) + elif (self.position[0] == len(self.buffer) and + self.position[1] == len(self.buffer[-1])): + return self._readStream(bytes) + else: + return self._readFromBuffer(bytes) + + def _bufferedBytes(self): + return sum([len(item) for item in self.buffer]) + + def _readStream(self, bytes): + data = self.stream.read(bytes) + self.buffer.append(data) + self.position[0] += 1 + self.position[1] = len(data) + return data + + def _readFromBuffer(self, bytes): + remainingBytes = bytes + rv = [] + bufferIndex = self.position[0] + bufferOffset = self.position[1] + while bufferIndex < len(self.buffer) and remainingBytes != 0: + assert remainingBytes > 0 + bufferedData = self.buffer[bufferIndex] + + if remainingBytes <= len(bufferedData) - bufferOffset: + bytesToRead = remainingBytes + self.position = [bufferIndex, bufferOffset + bytesToRead] + else: + bytesToRead = len(bufferedData) - bufferOffset + self.position = [bufferIndex, len(bufferedData)] + bufferIndex += 1 + rv.append(bufferedData[bufferOffset:bufferOffset + bytesToRead]) + remainingBytes -= bytesToRead + + bufferOffset = 0 + + if remainingBytes: + rv.append(self._readStream(remainingBytes)) + + return b"".join(rv) + + +def HTMLInputStream(source, **kwargs): + # Work around Python bug #20007: read(0) closes the connection. + # http://bugs.python.org/issue20007 + if (isinstance(source, http_client.HTTPResponse) or + # Also check for addinfourl wrapping HTTPResponse + (isinstance(source, urllib.response.addbase) and + isinstance(source.fp, http_client.HTTPResponse))): + isUnicode = False + elif hasattr(source, "read"): + isUnicode = isinstance(source.read(0), text_type) + else: + isUnicode = isinstance(source, text_type) + + if isUnicode: + encodings = [x for x in kwargs if x.endswith("_encoding")] + if encodings: + raise TypeError("Cannot set an encoding with a unicode input, set %r" % encodings) + + return HTMLUnicodeInputStream(source, **kwargs) + else: + return HTMLBinaryInputStream(source, **kwargs) + + +class HTMLUnicodeInputStream(object): + """Provides a unicode stream of characters to the HTMLTokenizer. + + This class takes care of character encoding and removing or replacing + incorrect byte-sequences and also provides column and line tracking. + + """ + + _defaultChunkSize = 10240 + + def __init__(self, source): + """Initialises the HTMLInputStream. + + HTMLInputStream(source, [encoding]) -> Normalized stream from source + for use by html5lib. + + source can be either a file-object, local filename or a string. + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element) + + """ + + if not _utils.supports_lone_surrogates: + # Such platforms will have already checked for such + # surrogate errors, so no need to do this checking. + self.reportCharacterErrors = None + elif len("\U0010FFFF") == 1: + self.reportCharacterErrors = self.characterErrorsUCS4 + else: + self.reportCharacterErrors = self.characterErrorsUCS2 + + # List of where new lines occur + self.newLines = [0] + + self.charEncoding = (lookupEncoding("utf-8"), "certain") + self.dataStream = self.openStream(source) + + self.reset() + + def reset(self): + self.chunk = "" + self.chunkSize = 0 + self.chunkOffset = 0 + self.errors = [] + + # number of (complete) lines in previous chunks + self.prevNumLines = 0 + # number of columns in the last line of the previous chunk + self.prevNumCols = 0 + + # Deal with CR LF and surrogates split over chunk boundaries + self._bufferedCharacter = None + + def openStream(self, source): + """Produces a file object from source. + + source can be either a file object, local filename or a string. + + """ + # Already a file object + if hasattr(source, 'read'): + stream = source + else: + stream = StringIO(source) + + return stream + + def _position(self, offset): + chunk = self.chunk + nLines = chunk.count('\n', 0, offset) + positionLine = self.prevNumLines + nLines + lastLinePos = chunk.rfind('\n', 0, offset) + if lastLinePos == -1: + positionColumn = self.prevNumCols + offset + else: + positionColumn = offset - (lastLinePos + 1) + return (positionLine, positionColumn) + + def position(self): + """Returns (line, col) of the current position in the stream.""" + line, col = self._position(self.chunkOffset) + return (line + 1, col) + + def char(self): + """ Read one character from the stream or queue if available. Return + EOF when EOF is reached. + """ + # Read a new chunk from the input stream if necessary + if self.chunkOffset >= self.chunkSize: + if not self.readChunk(): + return EOF + + chunkOffset = self.chunkOffset + char = self.chunk[chunkOffset] + self.chunkOffset = chunkOffset + 1 + + return char + + def readChunk(self, chunkSize=None): + if chunkSize is None: + chunkSize = self._defaultChunkSize + + self.prevNumLines, self.prevNumCols = self._position(self.chunkSize) + + self.chunk = "" + self.chunkSize = 0 + self.chunkOffset = 0 + + data = self.dataStream.read(chunkSize) + + # Deal with CR LF and surrogates broken across chunks + if self._bufferedCharacter: + data = self._bufferedCharacter + data + self._bufferedCharacter = None + elif not data: + # We have no more data, bye-bye stream + return False + + if len(data) > 1: + lastv = ord(data[-1]) + if lastv == 0x0D or 0xD800 <= lastv <= 0xDBFF: + self._bufferedCharacter = data[-1] + data = data[:-1] + + if self.reportCharacterErrors: + self.reportCharacterErrors(data) + + # Replace invalid characters + data = data.replace("\r\n", "\n") + data = data.replace("\r", "\n") + + self.chunk = data + self.chunkSize = len(data) + + return True + + def characterErrorsUCS4(self, data): + for _ in range(len(invalid_unicode_re.findall(data))): + self.errors.append("invalid-codepoint") + + def characterErrorsUCS2(self, data): + # Someone picked the wrong compile option + # You lose + skip = False + for match in invalid_unicode_re.finditer(data): + if skip: + continue + codepoint = ord(match.group()) + pos = match.start() + # Pretty sure there should be endianness issues here + if _utils.isSurrogatePair(data[pos:pos + 2]): + # We have a surrogate pair! + char_val = _utils.surrogatePairToCodepoint(data[pos:pos + 2]) + if char_val in non_bmp_invalid_codepoints: + self.errors.append("invalid-codepoint") + skip = True + elif (codepoint >= 0xD800 and codepoint <= 0xDFFF and + pos == len(data) - 1): + self.errors.append("invalid-codepoint") + else: + skip = False + self.errors.append("invalid-codepoint") + + def charsUntil(self, characters, opposite=False): + """ Returns a string of characters from the stream up to but not + including any character in 'characters' or EOF. 'characters' must be + a container that supports the 'in' method and iteration over its + characters. + """ + + # Use a cache of regexps to find the required characters + try: + chars = charsUntilRegEx[(characters, opposite)] + except KeyError: + if __debug__: + for c in characters: + assert(ord(c) < 128) + regex = "".join(["\\x%02x" % ord(c) for c in characters]) + if not opposite: + regex = "^%s" % regex + chars = charsUntilRegEx[(characters, opposite)] = re.compile("[%s]+" % regex) + + rv = [] + + while True: + # Find the longest matching prefix + m = chars.match(self.chunk, self.chunkOffset) + if m is None: + # If nothing matched, and it wasn't because we ran out of chunk, + # then stop + if self.chunkOffset != self.chunkSize: + break + else: + end = m.end() + # If not the whole chunk matched, return everything + # up to the part that didn't match + if end != self.chunkSize: + rv.append(self.chunk[self.chunkOffset:end]) + self.chunkOffset = end + break + # If the whole remainder of the chunk matched, + # use it all and read the next chunk + rv.append(self.chunk[self.chunkOffset:]) + if not self.readChunk(): + # Reached EOF + break + + r = "".join(rv) + return r + + def unget(self, char): + # Only one character is allowed to be ungotten at once - it must + # be consumed again before any further call to unget + if char is not None: + if self.chunkOffset == 0: + # unget is called quite rarely, so it's a good idea to do + # more work here if it saves a bit of work in the frequently + # called char and charsUntil. + # So, just prepend the ungotten character onto the current + # chunk: + self.chunk = char + self.chunk + self.chunkSize += 1 + else: + self.chunkOffset -= 1 + assert self.chunk[self.chunkOffset] == char + + +class HTMLBinaryInputStream(HTMLUnicodeInputStream): + """Provides a unicode stream of characters to the HTMLTokenizer. + + This class takes care of character encoding and removing or replacing + incorrect byte-sequences and also provides column and line tracking. + + """ + + def __init__(self, source, override_encoding=None, transport_encoding=None, + same_origin_parent_encoding=None, likely_encoding=None, + default_encoding="windows-1252", useChardet=True): + """Initialises the HTMLInputStream. + + HTMLInputStream(source, [encoding]) -> Normalized stream from source + for use by html5lib. + + source can be either a file-object, local filename or a string. + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element) + + """ + # Raw Stream - for unicode objects this will encode to utf-8 and set + # self.charEncoding as appropriate + self.rawStream = self.openStream(source) + + HTMLUnicodeInputStream.__init__(self, self.rawStream) + + # Encoding Information + # Number of bytes to use when looking for a meta element with + # encoding information + self.numBytesMeta = 1024 + # Number of bytes to use when using detecting encoding using chardet + self.numBytesChardet = 100 + # Things from args + self.override_encoding = override_encoding + self.transport_encoding = transport_encoding + self.same_origin_parent_encoding = same_origin_parent_encoding + self.likely_encoding = likely_encoding + self.default_encoding = default_encoding + + # Determine encoding + self.charEncoding = self.determineEncoding(useChardet) + assert self.charEncoding[0] is not None + + # Call superclass + self.reset() + + def reset(self): + self.dataStream = self.charEncoding[0].codec_info.streamreader(self.rawStream, 'replace') + HTMLUnicodeInputStream.reset(self) + + def openStream(self, source): + """Produces a file object from source. + + source can be either a file object, local filename or a string. + + """ + # Already a file object + if hasattr(source, 'read'): + stream = source + else: + stream = BytesIO(source) + + try: + stream.seek(stream.tell()) + except: # pylint:disable=bare-except + stream = BufferedStream(stream) + + return stream + + def determineEncoding(self, chardet=True): + # BOMs take precedence over everything + # This will also read past the BOM if present + charEncoding = self.detectBOM(), "certain" + if charEncoding[0] is not None: + return charEncoding + + # If we've been overriden, we've been overriden + charEncoding = lookupEncoding(self.override_encoding), "certain" + if charEncoding[0] is not None: + return charEncoding + + # Now check the transport layer + charEncoding = lookupEncoding(self.transport_encoding), "certain" + if charEncoding[0] is not None: + return charEncoding + + # Look for meta elements with encoding information + charEncoding = self.detectEncodingMeta(), "tentative" + if charEncoding[0] is not None: + return charEncoding + + # Parent document encoding + charEncoding = lookupEncoding(self.same_origin_parent_encoding), "tentative" + if charEncoding[0] is not None and not charEncoding[0].name.startswith("utf-16"): + return charEncoding + + # "likely" encoding + charEncoding = lookupEncoding(self.likely_encoding), "tentative" + if charEncoding[0] is not None: + return charEncoding + + # Guess with chardet, if available + if chardet: + try: + from pip._vendor.chardet.universaldetector import UniversalDetector + except ImportError: + pass + else: + buffers = [] + detector = UniversalDetector() + while not detector.done: + buffer = self.rawStream.read(self.numBytesChardet) + assert isinstance(buffer, bytes) + if not buffer: + break + buffers.append(buffer) + detector.feed(buffer) + detector.close() + encoding = lookupEncoding(detector.result['encoding']) + self.rawStream.seek(0) + if encoding is not None: + return encoding, "tentative" + + # Try the default encoding + charEncoding = lookupEncoding(self.default_encoding), "tentative" + if charEncoding[0] is not None: + return charEncoding + + # Fallback to html5lib's default if even that hasn't worked + return lookupEncoding("windows-1252"), "tentative" + + def changeEncoding(self, newEncoding): + assert self.charEncoding[1] != "certain" + newEncoding = lookupEncoding(newEncoding) + if newEncoding is None: + return + if newEncoding.name in ("utf-16be", "utf-16le"): + newEncoding = lookupEncoding("utf-8") + assert newEncoding is not None + elif newEncoding == self.charEncoding[0]: + self.charEncoding = (self.charEncoding[0], "certain") + else: + self.rawStream.seek(0) + self.charEncoding = (newEncoding, "certain") + self.reset() + raise _ReparseException("Encoding changed from %s to %s" % (self.charEncoding[0], newEncoding)) + + def detectBOM(self): + """Attempts to detect at BOM at the start of the stream. If + an encoding can be determined from the BOM return the name of the + encoding otherwise return None""" + bomDict = { + codecs.BOM_UTF8: 'utf-8', + codecs.BOM_UTF16_LE: 'utf-16le', codecs.BOM_UTF16_BE: 'utf-16be', + codecs.BOM_UTF32_LE: 'utf-32le', codecs.BOM_UTF32_BE: 'utf-32be' + } + + # Go to beginning of file and read in 4 bytes + string = self.rawStream.read(4) + assert isinstance(string, bytes) + + # Try detecting the BOM using bytes from the string + encoding = bomDict.get(string[:3]) # UTF-8 + seek = 3 + if not encoding: + # Need to detect UTF-32 before UTF-16 + encoding = bomDict.get(string) # UTF-32 + seek = 4 + if not encoding: + encoding = bomDict.get(string[:2]) # UTF-16 + seek = 2 + + # Set the read position past the BOM if one was found, otherwise + # set it to the start of the stream + if encoding: + self.rawStream.seek(seek) + return lookupEncoding(encoding) + else: + self.rawStream.seek(0) + return None + + def detectEncodingMeta(self): + """Report the encoding declared by the meta element + """ + buffer = self.rawStream.read(self.numBytesMeta) + assert isinstance(buffer, bytes) + parser = EncodingParser(buffer) + self.rawStream.seek(0) + encoding = parser.getEncoding() + + if encoding is not None and encoding.name in ("utf-16be", "utf-16le"): + encoding = lookupEncoding("utf-8") + + return encoding + + +class EncodingBytes(bytes): + """String-like object with an associated position and various extra methods + If the position is ever greater than the string length then an exception is + raised""" + def __new__(self, value): + assert isinstance(value, bytes) + return bytes.__new__(self, value.lower()) + + def __init__(self, value): + # pylint:disable=unused-argument + self._position = -1 + + def __iter__(self): + return self + + def __next__(self): + p = self._position = self._position + 1 + if p >= len(self): + raise StopIteration + elif p < 0: + raise TypeError + return self[p:p + 1] + + def next(self): + # Py2 compat + return self.__next__() + + def previous(self): + p = self._position + if p >= len(self): + raise StopIteration + elif p < 0: + raise TypeError + self._position = p = p - 1 + return self[p:p + 1] + + def setPosition(self, position): + if self._position >= len(self): + raise StopIteration + self._position = position + + def getPosition(self): + if self._position >= len(self): + raise StopIteration + if self._position >= 0: + return self._position + else: + return None + + position = property(getPosition, setPosition) + + def getCurrentByte(self): + return self[self.position:self.position + 1] + + currentByte = property(getCurrentByte) + + def skip(self, chars=spaceCharactersBytes): + """Skip past a list of characters""" + p = self.position # use property for the error-checking + while p < len(self): + c = self[p:p + 1] + if c not in chars: + self._position = p + return c + p += 1 + self._position = p + return None + + def skipUntil(self, chars): + p = self.position + while p < len(self): + c = self[p:p + 1] + if c in chars: + self._position = p + return c + p += 1 + self._position = p + return None + + def matchBytes(self, bytes): + """Look for a sequence of bytes at the start of a string. If the bytes + are found return True and advance the position to the byte after the + match. Otherwise return False and leave the position alone""" + p = self.position + data = self[p:p + len(bytes)] + rv = data.startswith(bytes) + if rv: + self.position += len(bytes) + return rv + + def jumpTo(self, bytes): + """Look for the next sequence of bytes matching a given sequence. If + a match is found advance the position to the last byte of the match""" + newPosition = self[self.position:].find(bytes) + if newPosition > -1: + # XXX: This is ugly, but I can't see a nicer way to fix this. + if self._position == -1: + self._position = 0 + self._position += (newPosition + len(bytes) - 1) + return True + else: + raise StopIteration + + +class EncodingParser(object): + """Mini parser for detecting character encoding from meta elements""" + + def __init__(self, data): + """string - the data to work on for encoding detection""" + self.data = EncodingBytes(data) + self.encoding = None + + def getEncoding(self): + methodDispatch = ( + (b"<!--", self.handleComment), + (b"<meta", self.handleMeta), + (b"</", self.handlePossibleEndTag), + (b"<!", self.handleOther), + (b"<?", self.handleOther), + (b"<", self.handlePossibleStartTag)) + for _ in self.data: + keepParsing = True + for key, method in methodDispatch: + if self.data.matchBytes(key): + try: + keepParsing = method() + break + except StopIteration: + keepParsing = False + break + if not keepParsing: + break + + return self.encoding + + def handleComment(self): + """Skip over comments""" + return self.data.jumpTo(b"-->") + + def handleMeta(self): + if self.data.currentByte not in spaceCharactersBytes: + # if we have <meta not followed by a space so just keep going + return True + # We have a valid meta element we want to search for attributes + hasPragma = False + pendingEncoding = None + while True: + # Try to find the next attribute after the current position + attr = self.getAttribute() + if attr is None: + return True + else: + if attr[0] == b"http-equiv": + hasPragma = attr[1] == b"content-type" + if hasPragma and pendingEncoding is not None: + self.encoding = pendingEncoding + return False + elif attr[0] == b"charset": + tentativeEncoding = attr[1] + codec = lookupEncoding(tentativeEncoding) + if codec is not None: + self.encoding = codec + return False + elif attr[0] == b"content": + contentParser = ContentAttrParser(EncodingBytes(attr[1])) + tentativeEncoding = contentParser.parse() + if tentativeEncoding is not None: + codec = lookupEncoding(tentativeEncoding) + if codec is not None: + if hasPragma: + self.encoding = codec + return False + else: + pendingEncoding = codec + + def handlePossibleStartTag(self): + return self.handlePossibleTag(False) + + def handlePossibleEndTag(self): + next(self.data) + return self.handlePossibleTag(True) + + def handlePossibleTag(self, endTag): + data = self.data + if data.currentByte not in asciiLettersBytes: + # If the next byte is not an ascii letter either ignore this + # fragment (possible start tag case) or treat it according to + # handleOther + if endTag: + data.previous() + self.handleOther() + return True + + c = data.skipUntil(spacesAngleBrackets) + if c == b"<": + # return to the first step in the overall "two step" algorithm + # reprocessing the < byte + data.previous() + else: + # Read all attributes + attr = self.getAttribute() + while attr is not None: + attr = self.getAttribute() + return True + + def handleOther(self): + return self.data.jumpTo(b">") + + def getAttribute(self): + """Return a name,value pair for the next attribute in the stream, + if one is found, or None""" + data = self.data + # Step 1 (skip chars) + c = data.skip(spaceCharactersBytes | frozenset([b"/"])) + assert c is None or len(c) == 1 + # Step 2 + if c in (b">", None): + return None + # Step 3 + attrName = [] + attrValue = [] + # Step 4 attribute name + while True: + if c == b"=" and attrName: + break + elif c in spaceCharactersBytes: + # Step 6! + c = data.skip() + break + elif c in (b"/", b">"): + return b"".join(attrName), b"" + elif c in asciiUppercaseBytes: + attrName.append(c.lower()) + elif c is None: + return None + else: + attrName.append(c) + # Step 5 + c = next(data) + # Step 7 + if c != b"=": + data.previous() + return b"".join(attrName), b"" + # Step 8 + next(data) + # Step 9 + c = data.skip() + # Step 10 + if c in (b"'", b'"'): + # 10.1 + quoteChar = c + while True: + # 10.2 + c = next(data) + # 10.3 + if c == quoteChar: + next(data) + return b"".join(attrName), b"".join(attrValue) + # 10.4 + elif c in asciiUppercaseBytes: + attrValue.append(c.lower()) + # 10.5 + else: + attrValue.append(c) + elif c == b">": + return b"".join(attrName), b"" + elif c in asciiUppercaseBytes: + attrValue.append(c.lower()) + elif c is None: + return None + else: + attrValue.append(c) + # Step 11 + while True: + c = next(data) + if c in spacesAngleBrackets: + return b"".join(attrName), b"".join(attrValue) + elif c in asciiUppercaseBytes: + attrValue.append(c.lower()) + elif c is None: + return None + else: + attrValue.append(c) + + +class ContentAttrParser(object): + def __init__(self, data): + assert isinstance(data, bytes) + self.data = data + + def parse(self): + try: + # Check if the attr name is charset + # otherwise return + self.data.jumpTo(b"charset") + self.data.position += 1 + self.data.skip() + if not self.data.currentByte == b"=": + # If there is no = sign keep looking for attrs + return None + self.data.position += 1 + self.data.skip() + # Look for an encoding between matching quote marks + if self.data.currentByte in (b'"', b"'"): + quoteMark = self.data.currentByte + self.data.position += 1 + oldPosition = self.data.position + if self.data.jumpTo(quoteMark): + return self.data[oldPosition:self.data.position] + else: + return None + else: + # Unquoted value + oldPosition = self.data.position + try: + self.data.skipUntil(spaceCharactersBytes) + return self.data[oldPosition:self.data.position] + except StopIteration: + # Return the whole remaining value + return self.data[oldPosition:] + except StopIteration: + return None + + +def lookupEncoding(encoding): + """Return the python codec name corresponding to an encoding or None if the + string doesn't correspond to a valid encoding.""" + if isinstance(encoding, binary_type): + try: + encoding = encoding.decode("ascii") + except UnicodeDecodeError: + return None + + if encoding is not None: + try: + return webencodings.lookup(encoding) + except AttributeError: + return None + else: + return None diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_tokenizer.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_tokenizer.py new file mode 100644 index 0000000..178f6e7 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_tokenizer.py @@ -0,0 +1,1721 @@ +from __future__ import absolute_import, division, unicode_literals + +from pip._vendor.six import unichr as chr + +from collections import deque + +from .constants import spaceCharacters +from .constants import entities +from .constants import asciiLetters, asciiUpper2Lower +from .constants import digits, hexDigits, EOF +from .constants import tokenTypes, tagTokenTypes +from .constants import replacementCharacters + +from ._inputstream import HTMLInputStream + +from ._trie import Trie + +entitiesTrie = Trie(entities) + + +class HTMLTokenizer(object): + """ This class takes care of tokenizing HTML. + + * self.currentToken + Holds the token that is currently being processed. + + * self.state + Holds a reference to the method to be invoked... XXX + + * self.stream + Points to HTMLInputStream object. + """ + + def __init__(self, stream, parser=None, **kwargs): + + self.stream = HTMLInputStream(stream, **kwargs) + self.parser = parser + + # Setup the initial tokenizer state + self.escapeFlag = False + self.lastFourChars = [] + self.state = self.dataState + self.escape = False + + # The current token being created + self.currentToken = None + super(HTMLTokenizer, self).__init__() + + def __iter__(self): + """ This is where the magic happens. + + We do our usually processing through the states and when we have a token + to return we yield the token which pauses processing until the next token + is requested. + """ + self.tokenQueue = deque([]) + # Start processing. When EOF is reached self.state will return False + # instead of True and the loop will terminate. + while self.state(): + while self.stream.errors: + yield {"type": tokenTypes["ParseError"], "data": self.stream.errors.pop(0)} + while self.tokenQueue: + yield self.tokenQueue.popleft() + + def consumeNumberEntity(self, isHex): + """This function returns either U+FFFD or the character based on the + decimal or hexadecimal representation. It also discards ";" if present. + If not present self.tokenQueue.append({"type": tokenTypes["ParseError"]}) is invoked. + """ + + allowed = digits + radix = 10 + if isHex: + allowed = hexDigits + radix = 16 + + charStack = [] + + # Consume all the characters that are in range while making sure we + # don't hit an EOF. + c = self.stream.char() + while c in allowed and c is not EOF: + charStack.append(c) + c = self.stream.char() + + # Convert the set of characters consumed to an int. + charAsInt = int("".join(charStack), radix) + + # Certain characters get replaced with others + if charAsInt in replacementCharacters: + char = replacementCharacters[charAsInt] + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "illegal-codepoint-for-numeric-entity", + "datavars": {"charAsInt": charAsInt}}) + elif ((0xD800 <= charAsInt <= 0xDFFF) or + (charAsInt > 0x10FFFF)): + char = "\uFFFD" + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "illegal-codepoint-for-numeric-entity", + "datavars": {"charAsInt": charAsInt}}) + else: + # Should speed up this check somehow (e.g. move the set to a constant) + if ((0x0001 <= charAsInt <= 0x0008) or + (0x000E <= charAsInt <= 0x001F) or + (0x007F <= charAsInt <= 0x009F) or + (0xFDD0 <= charAsInt <= 0xFDEF) or + charAsInt in frozenset([0x000B, 0xFFFE, 0xFFFF, 0x1FFFE, + 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, + 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, + 0x5FFFF, 0x6FFFE, 0x6FFFF, 0x7FFFE, + 0x7FFFF, 0x8FFFE, 0x8FFFF, 0x9FFFE, + 0x9FFFF, 0xAFFFE, 0xAFFFF, 0xBFFFE, + 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, + 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, + 0xFFFFF, 0x10FFFE, 0x10FFFF])): + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": + "illegal-codepoint-for-numeric-entity", + "datavars": {"charAsInt": charAsInt}}) + try: + # Try/except needed as UCS-2 Python builds' unichar only works + # within the BMP. + char = chr(charAsInt) + except ValueError: + v = charAsInt - 0x10000 + char = chr(0xD800 | (v >> 10)) + chr(0xDC00 | (v & 0x3FF)) + + # Discard the ; if present. Otherwise, put it back on the queue and + # invoke parseError on parser. + if c != ";": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "numeric-entity-without-semicolon"}) + self.stream.unget(c) + + return char + + def consumeEntity(self, allowedChar=None, fromAttribute=False): + # Initialise to the default output for when no entity is matched + output = "&" + + charStack = [self.stream.char()] + if (charStack[0] in spaceCharacters or charStack[0] in (EOF, "<", "&") or + (allowedChar is not None and allowedChar == charStack[0])): + self.stream.unget(charStack[0]) + + elif charStack[0] == "#": + # Read the next character to see if it's hex or decimal + hex = False + charStack.append(self.stream.char()) + if charStack[-1] in ("x", "X"): + hex = True + charStack.append(self.stream.char()) + + # charStack[-1] should be the first digit + if (hex and charStack[-1] in hexDigits) \ + or (not hex and charStack[-1] in digits): + # At least one digit found, so consume the whole number + self.stream.unget(charStack[-1]) + output = self.consumeNumberEntity(hex) + else: + # No digits found + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "expected-numeric-entity"}) + self.stream.unget(charStack.pop()) + output = "&" + "".join(charStack) + + else: + # At this point in the process might have named entity. Entities + # are stored in the global variable "entities". + # + # Consume characters and compare to these to a substring of the + # entity names in the list until the substring no longer matches. + while (charStack[-1] is not EOF): + if not entitiesTrie.has_keys_with_prefix("".join(charStack)): + break + charStack.append(self.stream.char()) + + # At this point we have a string that starts with some characters + # that may match an entity + # Try to find the longest entity the string will match to take care + # of ¬i for instance. + try: + entityName = entitiesTrie.longest_prefix("".join(charStack[:-1])) + entityLength = len(entityName) + except KeyError: + entityName = None + + if entityName is not None: + if entityName[-1] != ";": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "named-entity-without-semicolon"}) + if (entityName[-1] != ";" and fromAttribute and + (charStack[entityLength] in asciiLetters or + charStack[entityLength] in digits or + charStack[entityLength] == "=")): + self.stream.unget(charStack.pop()) + output = "&" + "".join(charStack) + else: + output = entities[entityName] + self.stream.unget(charStack.pop()) + output += "".join(charStack[entityLength:]) + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-named-entity"}) + self.stream.unget(charStack.pop()) + output = "&" + "".join(charStack) + + if fromAttribute: + self.currentToken["data"][-1][1] += output + else: + if output in spaceCharacters: + tokenType = "SpaceCharacters" + else: + tokenType = "Characters" + self.tokenQueue.append({"type": tokenTypes[tokenType], "data": output}) + + def processEntityInAttribute(self, allowedChar): + """This method replaces the need for "entityInAttributeValueState". + """ + self.consumeEntity(allowedChar=allowedChar, fromAttribute=True) + + def emitCurrentToken(self): + """This method is a generic handler for emitting the tags. It also sets + the state to "data" because that's what's needed after a token has been + emitted. + """ + token = self.currentToken + # Add token to the queue to be yielded + if (token["type"] in tagTokenTypes): + token["name"] = token["name"].translate(asciiUpper2Lower) + if token["type"] == tokenTypes["EndTag"]: + if token["data"]: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "attributes-in-end-tag"}) + if token["selfClosing"]: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "self-closing-flag-on-end-tag"}) + self.tokenQueue.append(token) + self.state = self.dataState + + # Below are the various tokenizer states worked out. + def dataState(self): + data = self.stream.char() + if data == "&": + self.state = self.entityDataState + elif data == "<": + self.state = self.tagOpenState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\u0000"}) + elif data is EOF: + # Tokenization ends. + return False + elif data in spaceCharacters: + # Directly after emitting a token you switch back to the "data + # state". At that point spaceCharacters are important so they are + # emitted separately. + self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data": + data + self.stream.charsUntil(spaceCharacters, True)}) + # No need to update lastFourChars here, since the first space will + # have already been appended to lastFourChars and will have broken + # any <!-- or --> sequences + else: + chars = self.stream.charsUntil(("&", "<", "\u0000")) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + chars}) + return True + + def entityDataState(self): + self.consumeEntity() + self.state = self.dataState + return True + + def rcdataState(self): + data = self.stream.char() + if data == "&": + self.state = self.characterReferenceInRcdata + elif data == "<": + self.state = self.rcdataLessThanSignState + elif data == EOF: + # Tokenization ends. + return False + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + elif data in spaceCharacters: + # Directly after emitting a token you switch back to the "data + # state". At that point spaceCharacters are important so they are + # emitted separately. + self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data": + data + self.stream.charsUntil(spaceCharacters, True)}) + # No need to update lastFourChars here, since the first space will + # have already been appended to lastFourChars and will have broken + # any <!-- or --> sequences + else: + chars = self.stream.charsUntil(("&", "<", "\u0000")) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + chars}) + return True + + def characterReferenceInRcdata(self): + self.consumeEntity() + self.state = self.rcdataState + return True + + def rawtextState(self): + data = self.stream.char() + if data == "<": + self.state = self.rawtextLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + elif data == EOF: + # Tokenization ends. + return False + else: + chars = self.stream.charsUntil(("<", "\u0000")) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + chars}) + return True + + def scriptDataState(self): + data = self.stream.char() + if data == "<": + self.state = self.scriptDataLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + elif data == EOF: + # Tokenization ends. + return False + else: + chars = self.stream.charsUntil(("<", "\u0000")) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + chars}) + return True + + def plaintextState(self): + data = self.stream.char() + if data == EOF: + # Tokenization ends. + return False + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + self.stream.charsUntil("\u0000")}) + return True + + def tagOpenState(self): + data = self.stream.char() + if data == "!": + self.state = self.markupDeclarationOpenState + elif data == "/": + self.state = self.closeTagOpenState + elif data in asciiLetters: + self.currentToken = {"type": tokenTypes["StartTag"], + "name": data, "data": [], + "selfClosing": False, + "selfClosingAcknowledged": False} + self.state = self.tagNameState + elif data == ">": + # XXX In theory it could be something besides a tag name. But + # do we really care? + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-tag-name-but-got-right-bracket"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<>"}) + self.state = self.dataState + elif data == "?": + # XXX In theory it could be something besides a tag name. But + # do we really care? + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-tag-name-but-got-question-mark"}) + self.stream.unget(data) + self.state = self.bogusCommentState + else: + # XXX + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-tag-name"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.stream.unget(data) + self.state = self.dataState + return True + + def closeTagOpenState(self): + data = self.stream.char() + if data in asciiLetters: + self.currentToken = {"type": tokenTypes["EndTag"], "name": data, + "data": [], "selfClosing": False} + self.state = self.tagNameState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-closing-tag-but-got-right-bracket"}) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-closing-tag-but-got-eof"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"}) + self.state = self.dataState + else: + # XXX data can be _'_... + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-closing-tag-but-got-char", + "datavars": {"data": data}}) + self.stream.unget(data) + self.state = self.bogusCommentState + return True + + def tagNameState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeAttributeNameState + elif data == ">": + self.emitCurrentToken() + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-tag-name"}) + self.state = self.dataState + elif data == "/": + self.state = self.selfClosingStartTagState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["name"] += "\uFFFD" + else: + self.currentToken["name"] += data + # (Don't use charsUntil here, because tag names are + # very short and it's faster to not do anything fancy) + return True + + def rcdataLessThanSignState(self): + data = self.stream.char() + if data == "/": + self.temporaryBuffer = "" + self.state = self.rcdataEndTagOpenState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.stream.unget(data) + self.state = self.rcdataState + return True + + def rcdataEndTagOpenState(self): + data = self.stream.char() + if data in asciiLetters: + self.temporaryBuffer += data + self.state = self.rcdataEndTagNameState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"}) + self.stream.unget(data) + self.state = self.rcdataState + return True + + def rcdataEndTagNameState(self): + appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower() + data = self.stream.char() + if data in spaceCharacters and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.state = self.beforeAttributeNameState + elif data == "/" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.state = self.selfClosingStartTagState + elif data == ">" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.emitCurrentToken() + self.state = self.dataState + elif data in asciiLetters: + self.temporaryBuffer += data + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "</" + self.temporaryBuffer}) + self.stream.unget(data) + self.state = self.rcdataState + return True + + def rawtextLessThanSignState(self): + data = self.stream.char() + if data == "/": + self.temporaryBuffer = "" + self.state = self.rawtextEndTagOpenState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.stream.unget(data) + self.state = self.rawtextState + return True + + def rawtextEndTagOpenState(self): + data = self.stream.char() + if data in asciiLetters: + self.temporaryBuffer += data + self.state = self.rawtextEndTagNameState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"}) + self.stream.unget(data) + self.state = self.rawtextState + return True + + def rawtextEndTagNameState(self): + appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower() + data = self.stream.char() + if data in spaceCharacters and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.state = self.beforeAttributeNameState + elif data == "/" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.state = self.selfClosingStartTagState + elif data == ">" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.emitCurrentToken() + self.state = self.dataState + elif data in asciiLetters: + self.temporaryBuffer += data + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "</" + self.temporaryBuffer}) + self.stream.unget(data) + self.state = self.rawtextState + return True + + def scriptDataLessThanSignState(self): + data = self.stream.char() + if data == "/": + self.temporaryBuffer = "" + self.state = self.scriptDataEndTagOpenState + elif data == "!": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<!"}) + self.state = self.scriptDataEscapeStartState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.stream.unget(data) + self.state = self.scriptDataState + return True + + def scriptDataEndTagOpenState(self): + data = self.stream.char() + if data in asciiLetters: + self.temporaryBuffer += data + self.state = self.scriptDataEndTagNameState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"}) + self.stream.unget(data) + self.state = self.scriptDataState + return True + + def scriptDataEndTagNameState(self): + appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower() + data = self.stream.char() + if data in spaceCharacters and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.state = self.beforeAttributeNameState + elif data == "/" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.state = self.selfClosingStartTagState + elif data == ">" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.emitCurrentToken() + self.state = self.dataState + elif data in asciiLetters: + self.temporaryBuffer += data + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "</" + self.temporaryBuffer}) + self.stream.unget(data) + self.state = self.scriptDataState + return True + + def scriptDataEscapeStartState(self): + data = self.stream.char() + if data == "-": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) + self.state = self.scriptDataEscapeStartDashState + else: + self.stream.unget(data) + self.state = self.scriptDataState + return True + + def scriptDataEscapeStartDashState(self): + data = self.stream.char() + if data == "-": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) + self.state = self.scriptDataEscapedDashDashState + else: + self.stream.unget(data) + self.state = self.scriptDataState + return True + + def scriptDataEscapedState(self): + data = self.stream.char() + if data == "-": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) + self.state = self.scriptDataEscapedDashState + elif data == "<": + self.state = self.scriptDataEscapedLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + elif data == EOF: + self.state = self.dataState + else: + chars = self.stream.charsUntil(("<", "-", "\u0000")) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + chars}) + return True + + def scriptDataEscapedDashState(self): + data = self.stream.char() + if data == "-": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) + self.state = self.scriptDataEscapedDashDashState + elif data == "<": + self.state = self.scriptDataEscapedLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + self.state = self.scriptDataEscapedState + elif data == EOF: + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.state = self.scriptDataEscapedState + return True + + def scriptDataEscapedDashDashState(self): + data = self.stream.char() + if data == "-": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) + elif data == "<": + self.state = self.scriptDataEscapedLessThanSignState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"}) + self.state = self.scriptDataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + self.state = self.scriptDataEscapedState + elif data == EOF: + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.state = self.scriptDataEscapedState + return True + + def scriptDataEscapedLessThanSignState(self): + data = self.stream.char() + if data == "/": + self.temporaryBuffer = "" + self.state = self.scriptDataEscapedEndTagOpenState + elif data in asciiLetters: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<" + data}) + self.temporaryBuffer = data + self.state = self.scriptDataDoubleEscapeStartState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.stream.unget(data) + self.state = self.scriptDataEscapedState + return True + + def scriptDataEscapedEndTagOpenState(self): + data = self.stream.char() + if data in asciiLetters: + self.temporaryBuffer = data + self.state = self.scriptDataEscapedEndTagNameState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"}) + self.stream.unget(data) + self.state = self.scriptDataEscapedState + return True + + def scriptDataEscapedEndTagNameState(self): + appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower() + data = self.stream.char() + if data in spaceCharacters and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.state = self.beforeAttributeNameState + elif data == "/" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.state = self.selfClosingStartTagState + elif data == ">" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.emitCurrentToken() + self.state = self.dataState + elif data in asciiLetters: + self.temporaryBuffer += data + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "</" + self.temporaryBuffer}) + self.stream.unget(data) + self.state = self.scriptDataEscapedState + return True + + def scriptDataDoubleEscapeStartState(self): + data = self.stream.char() + if data in (spaceCharacters | frozenset(("/", ">"))): + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + if self.temporaryBuffer.lower() == "script": + self.state = self.scriptDataDoubleEscapedState + else: + self.state = self.scriptDataEscapedState + elif data in asciiLetters: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.temporaryBuffer += data + else: + self.stream.unget(data) + self.state = self.scriptDataEscapedState + return True + + def scriptDataDoubleEscapedState(self): + data = self.stream.char() + if data == "-": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) + self.state = self.scriptDataDoubleEscapedDashState + elif data == "<": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.state = self.scriptDataDoubleEscapedLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + elif data == EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-script-in-script"}) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + return True + + def scriptDataDoubleEscapedDashState(self): + data = self.stream.char() + if data == "-": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) + self.state = self.scriptDataDoubleEscapedDashDashState + elif data == "<": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.state = self.scriptDataDoubleEscapedLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + self.state = self.scriptDataDoubleEscapedState + elif data == EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-script-in-script"}) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.state = self.scriptDataDoubleEscapedState + return True + + def scriptDataDoubleEscapedDashDashState(self): + data = self.stream.char() + if data == "-": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) + elif data == "<": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.state = self.scriptDataDoubleEscapedLessThanSignState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"}) + self.state = self.scriptDataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + self.state = self.scriptDataDoubleEscapedState + elif data == EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-script-in-script"}) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.state = self.scriptDataDoubleEscapedState + return True + + def scriptDataDoubleEscapedLessThanSignState(self): + data = self.stream.char() + if data == "/": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "/"}) + self.temporaryBuffer = "" + self.state = self.scriptDataDoubleEscapeEndState + else: + self.stream.unget(data) + self.state = self.scriptDataDoubleEscapedState + return True + + def scriptDataDoubleEscapeEndState(self): + data = self.stream.char() + if data in (spaceCharacters | frozenset(("/", ">"))): + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + if self.temporaryBuffer.lower() == "script": + self.state = self.scriptDataEscapedState + else: + self.state = self.scriptDataDoubleEscapedState + elif data in asciiLetters: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.temporaryBuffer += data + else: + self.stream.unget(data) + self.state = self.scriptDataDoubleEscapedState + return True + + def beforeAttributeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + self.stream.charsUntil(spaceCharacters, True) + elif data in asciiLetters: + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + elif data == ">": + self.emitCurrentToken() + elif data == "/": + self.state = self.selfClosingStartTagState + elif data in ("'", '"', "=", "<"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "invalid-character-in-attribute-name"}) + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"].append(["\uFFFD", ""]) + self.state = self.attributeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-attribute-name-but-got-eof"}) + self.state = self.dataState + else: + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + return True + + def attributeNameState(self): + data = self.stream.char() + leavingThisState = True + emitToken = False + if data == "=": + self.state = self.beforeAttributeValueState + elif data in asciiLetters: + self.currentToken["data"][-1][0] += data +\ + self.stream.charsUntil(asciiLetters, True) + leavingThisState = False + elif data == ">": + # XXX If we emit here the attributes are converted to a dict + # without being checked and when the code below runs we error + # because data is a dict not a list + emitToken = True + elif data in spaceCharacters: + self.state = self.afterAttributeNameState + elif data == "/": + self.state = self.selfClosingStartTagState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][0] += "\uFFFD" + leavingThisState = False + elif data in ("'", '"', "<"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": + "invalid-character-in-attribute-name"}) + self.currentToken["data"][-1][0] += data + leavingThisState = False + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "eof-in-attribute-name"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][0] += data + leavingThisState = False + + if leavingThisState: + # Attributes are not dropped at this stage. That happens when the + # start tag token is emitted so values can still be safely appended + # to attributes, but we do want to report the parse error in time. + self.currentToken["data"][-1][0] = ( + self.currentToken["data"][-1][0].translate(asciiUpper2Lower)) + for name, _ in self.currentToken["data"][:-1]: + if self.currentToken["data"][-1][0] == name: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "duplicate-attribute"}) + break + # XXX Fix for above XXX + if emitToken: + self.emitCurrentToken() + return True + + def afterAttributeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + self.stream.charsUntil(spaceCharacters, True) + elif data == "=": + self.state = self.beforeAttributeValueState + elif data == ">": + self.emitCurrentToken() + elif data in asciiLetters: + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + elif data == "/": + self.state = self.selfClosingStartTagState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"].append(["\uFFFD", ""]) + self.state = self.attributeNameState + elif data in ("'", '"', "<"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "invalid-character-after-attribute-name"}) + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-end-of-tag-but-got-eof"}) + self.state = self.dataState + else: + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + return True + + def beforeAttributeValueState(self): + data = self.stream.char() + if data in spaceCharacters: + self.stream.charsUntil(spaceCharacters, True) + elif data == "\"": + self.state = self.attributeValueDoubleQuotedState + elif data == "&": + self.state = self.attributeValueUnQuotedState + self.stream.unget(data) + elif data == "'": + self.state = self.attributeValueSingleQuotedState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-attribute-value-but-got-right-bracket"}) + self.emitCurrentToken() + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][1] += "\uFFFD" + self.state = self.attributeValueUnQuotedState + elif data in ("=", "<", "`"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "equals-in-unquoted-attribute-value"}) + self.currentToken["data"][-1][1] += data + self.state = self.attributeValueUnQuotedState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-attribute-value-but-got-eof"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][1] += data + self.state = self.attributeValueUnQuotedState + return True + + def attributeValueDoubleQuotedState(self): + data = self.stream.char() + if data == "\"": + self.state = self.afterAttributeValueState + elif data == "&": + self.processEntityInAttribute('"') + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][1] += "\uFFFD" + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-attribute-value-double-quote"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][1] += data +\ + self.stream.charsUntil(("\"", "&", "\u0000")) + return True + + def attributeValueSingleQuotedState(self): + data = self.stream.char() + if data == "'": + self.state = self.afterAttributeValueState + elif data == "&": + self.processEntityInAttribute("'") + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][1] += "\uFFFD" + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-attribute-value-single-quote"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][1] += data +\ + self.stream.charsUntil(("'", "&", "\u0000")) + return True + + def attributeValueUnQuotedState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeAttributeNameState + elif data == "&": + self.processEntityInAttribute(">") + elif data == ">": + self.emitCurrentToken() + elif data in ('"', "'", "=", "<", "`"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-character-in-unquoted-attribute-value"}) + self.currentToken["data"][-1][1] += data + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][1] += "\uFFFD" + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-attribute-value-no-quotes"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][1] += data + self.stream.charsUntil( + frozenset(("&", ">", '"', "'", "=", "<", "`", "\u0000")) | spaceCharacters) + return True + + def afterAttributeValueState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeAttributeNameState + elif data == ">": + self.emitCurrentToken() + elif data == "/": + self.state = self.selfClosingStartTagState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-EOF-after-attribute-value"}) + self.stream.unget(data) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-character-after-attribute-value"}) + self.stream.unget(data) + self.state = self.beforeAttributeNameState + return True + + def selfClosingStartTagState(self): + data = self.stream.char() + if data == ">": + self.currentToken["selfClosing"] = True + self.emitCurrentToken() + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": + "unexpected-EOF-after-solidus-in-tag"}) + self.stream.unget(data) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-character-after-solidus-in-tag"}) + self.stream.unget(data) + self.state = self.beforeAttributeNameState + return True + + def bogusCommentState(self): + # Make a new comment token and give it as value all the characters + # until the first > or EOF (charsUntil checks for EOF automatically) + # and emit it. + data = self.stream.charsUntil(">") + data = data.replace("\u0000", "\uFFFD") + self.tokenQueue.append( + {"type": tokenTypes["Comment"], "data": data}) + + # Eat the character directly after the bogus comment which is either a + # ">" or an EOF. + self.stream.char() + self.state = self.dataState + return True + + def markupDeclarationOpenState(self): + charStack = [self.stream.char()] + if charStack[-1] == "-": + charStack.append(self.stream.char()) + if charStack[-1] == "-": + self.currentToken = {"type": tokenTypes["Comment"], "data": ""} + self.state = self.commentStartState + return True + elif charStack[-1] in ('d', 'D'): + matched = True + for expected in (('o', 'O'), ('c', 'C'), ('t', 'T'), + ('y', 'Y'), ('p', 'P'), ('e', 'E')): + charStack.append(self.stream.char()) + if charStack[-1] not in expected: + matched = False + break + if matched: + self.currentToken = {"type": tokenTypes["Doctype"], + "name": "", + "publicId": None, "systemId": None, + "correct": True} + self.state = self.doctypeState + return True + elif (charStack[-1] == "[" and + self.parser is not None and + self.parser.tree.openElements and + self.parser.tree.openElements[-1].namespace != self.parser.tree.defaultNamespace): + matched = True + for expected in ["C", "D", "A", "T", "A", "["]: + charStack.append(self.stream.char()) + if charStack[-1] != expected: + matched = False + break + if matched: + self.state = self.cdataSectionState + return True + + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-dashes-or-doctype"}) + + while charStack: + self.stream.unget(charStack.pop()) + self.state = self.bogusCommentState + return True + + def commentStartState(self): + data = self.stream.char() + if data == "-": + self.state = self.commentStartDashState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "incorrect-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += data + self.state = self.commentState + return True + + def commentStartDashState(self): + data = self.stream.char() + if data == "-": + self.state = self.commentEndState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "-\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "incorrect-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += "-" + data + self.state = self.commentState + return True + + def commentState(self): + data = self.stream.char() + if data == "-": + self.state = self.commentEndDashState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "\uFFFD" + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "eof-in-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += data + \ + self.stream.charsUntil(("-", "\u0000")) + return True + + def commentEndDashState(self): + data = self.stream.char() + if data == "-": + self.state = self.commentEndState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "-\uFFFD" + self.state = self.commentState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment-end-dash"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += "-" + data + self.state = self.commentState + return True + + def commentEndState(self): + data = self.stream.char() + if data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "--\uFFFD" + self.state = self.commentState + elif data == "!": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-bang-after-double-dash-in-comment"}) + self.state = self.commentEndBangState + elif data == "-": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-dash-after-double-dash-in-comment"}) + self.currentToken["data"] += data + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment-double-dash"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + # XXX + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-comment"}) + self.currentToken["data"] += "--" + data + self.state = self.commentState + return True + + def commentEndBangState(self): + data = self.stream.char() + if data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == "-": + self.currentToken["data"] += "--!" + self.state = self.commentEndDashState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "--!\uFFFD" + self.state = self.commentState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment-end-bang-state"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += "--!" + data + self.state = self.commentState + return True + + def doctypeState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeDoctypeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-doctype-name-but-got-eof"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "need-space-after-doctype"}) + self.stream.unget(data) + self.state = self.beforeDoctypeNameState + return True + + def beforeDoctypeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-doctype-name-but-got-right-bracket"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["name"] = "\uFFFD" + self.state = self.doctypeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-doctype-name-but-got-eof"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["name"] = data + self.state = self.doctypeNameState + return True + + def doctypeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) + self.state = self.afterDoctypeNameState + elif data == ">": + self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["name"] += "\uFFFD" + self.state = self.doctypeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype-name"}) + self.currentToken["correct"] = False + self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["name"] += data + return True + + def afterDoctypeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.currentToken["correct"] = False + self.stream.unget(data) + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + if data in ("p", "P"): + matched = True + for expected in (("u", "U"), ("b", "B"), ("l", "L"), + ("i", "I"), ("c", "C")): + data = self.stream.char() + if data not in expected: + matched = False + break + if matched: + self.state = self.afterDoctypePublicKeywordState + return True + elif data in ("s", "S"): + matched = True + for expected in (("y", "Y"), ("s", "S"), ("t", "T"), + ("e", "E"), ("m", "M")): + data = self.stream.char() + if data not in expected: + matched = False + break + if matched: + self.state = self.afterDoctypeSystemKeywordState + return True + + # All the characters read before the current 'data' will be + # [a-zA-Z], so they're garbage in the bogus doctype and can be + # discarded; only the latest character might be '>' or EOF + # and needs to be ungetted + self.stream.unget(data) + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-space-or-right-bracket-in-doctype", "datavars": + {"data": data}}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + + return True + + def afterDoctypePublicKeywordState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeDoctypePublicIdentifierState + elif data in ("'", '"'): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.stream.unget(data) + self.state = self.beforeDoctypePublicIdentifierState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.stream.unget(data) + self.state = self.beforeDoctypePublicIdentifierState + return True + + def beforeDoctypePublicIdentifierState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == "\"": + self.currentToken["publicId"] = "" + self.state = self.doctypePublicIdentifierDoubleQuotedState + elif data == "'": + self.currentToken["publicId"] = "" + self.state = self.doctypePublicIdentifierSingleQuotedState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + return True + + def doctypePublicIdentifierDoubleQuotedState(self): + data = self.stream.char() + if data == "\"": + self.state = self.afterDoctypePublicIdentifierState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["publicId"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["publicId"] += data + return True + + def doctypePublicIdentifierSingleQuotedState(self): + data = self.stream.char() + if data == "'": + self.state = self.afterDoctypePublicIdentifierState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["publicId"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["publicId"] += data + return True + + def afterDoctypePublicIdentifierState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.betweenDoctypePublicAndSystemIdentifiersState + elif data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == '"': + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierDoubleQuotedState + elif data == "'": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierSingleQuotedState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + return True + + def betweenDoctypePublicAndSystemIdentifiersState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == '"': + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierDoubleQuotedState + elif data == "'": + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierSingleQuotedState + elif data == EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + return True + + def afterDoctypeSystemKeywordState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeDoctypeSystemIdentifierState + elif data in ("'", '"'): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.stream.unget(data) + self.state = self.beforeDoctypeSystemIdentifierState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.stream.unget(data) + self.state = self.beforeDoctypeSystemIdentifierState + return True + + def beforeDoctypeSystemIdentifierState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == "\"": + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierDoubleQuotedState + elif data == "'": + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierSingleQuotedState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + return True + + def doctypeSystemIdentifierDoubleQuotedState(self): + data = self.stream.char() + if data == "\"": + self.state = self.afterDoctypeSystemIdentifierState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["systemId"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["systemId"] += data + return True + + def doctypeSystemIdentifierSingleQuotedState(self): + data = self.stream.char() + if data == "'": + self.state = self.afterDoctypeSystemIdentifierState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["systemId"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["systemId"] += data + return True + + def afterDoctypeSystemIdentifierState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.state = self.bogusDoctypeState + return True + + def bogusDoctypeState(self): + data = self.stream.char() + if data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + # XXX EMIT + self.stream.unget(data) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + pass + return True + + def cdataSectionState(self): + data = [] + while True: + data.append(self.stream.charsUntil("]")) + data.append(self.stream.charsUntil(">")) + char = self.stream.char() + if char == EOF: + break + else: + assert char == ">" + if data[-1][-2:] == "]]": + data[-1] = data[-1][:-2] + break + else: + data.append(char) + + data = "".join(data) # pylint:disable=redefined-variable-type + # Deal with null here rather than in the parser + nullCount = data.count("\u0000") + if nullCount > 0: + for _ in range(nullCount): + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + data = data.replace("\u0000", "\uFFFD") + if data: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": data}) + self.state = self.dataState + return True diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_trie/_base.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_trie/_base.py new file mode 100644 index 0000000..a1158bb --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_trie/_base.py @@ -0,0 +1,37 @@ +from __future__ import absolute_import, division, unicode_literals + +from collections import Mapping + + +class Trie(Mapping): + """Abstract base class for tries""" + + def keys(self, prefix=None): + # pylint:disable=arguments-differ + keys = super(Trie, self).keys() + + if prefix is None: + return set(keys) + + return {x for x in keys if x.startswith(prefix)} + + def has_keys_with_prefix(self, prefix): + for key in self.keys(): + if key.startswith(prefix): + return True + + return False + + def longest_prefix(self, prefix): + if prefix in self: + return prefix + + for i in range(1, len(prefix) + 1): + if prefix[:-i] in self: + return prefix[:-i] + + raise KeyError(prefix) + + def longest_prefix_item(self, prefix): + lprefix = self.longest_prefix(prefix) + return (lprefix, self[lprefix]) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_trie/datrie.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_trie/datrie.py new file mode 100644 index 0000000..e2e5f86 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_trie/datrie.py @@ -0,0 +1,44 @@ +from __future__ import absolute_import, division, unicode_literals + +from datrie import Trie as DATrie +from pip._vendor.six import text_type + +from ._base import Trie as ABCTrie + + +class Trie(ABCTrie): + def __init__(self, data): + chars = set() + for key in data.keys(): + if not isinstance(key, text_type): + raise TypeError("All keys must be strings") + for char in key: + chars.add(char) + + self._data = DATrie("".join(chars)) + for key, value in data.items(): + self._data[key] = value + + def __contains__(self, key): + return key in self._data + + def __len__(self): + return len(self._data) + + def __iter__(self): + raise NotImplementedError() + + def __getitem__(self, key): + return self._data[key] + + def keys(self, prefix=None): + return self._data.keys(prefix) + + def has_keys_with_prefix(self, prefix): + return self._data.has_keys_with_prefix(prefix) + + def longest_prefix(self, prefix): + return self._data.longest_prefix(prefix) + + def longest_prefix_item(self, prefix): + return self._data.longest_prefix_item(prefix) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_trie/py.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_trie/py.py new file mode 100644 index 0000000..c178b21 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_trie/py.py @@ -0,0 +1,67 @@ +from __future__ import absolute_import, division, unicode_literals +from pip._vendor.six import text_type + +from bisect import bisect_left + +from ._base import Trie as ABCTrie + + +class Trie(ABCTrie): + def __init__(self, data): + if not all(isinstance(x, text_type) for x in data.keys()): + raise TypeError("All keys must be strings") + + self._data = data + self._keys = sorted(data.keys()) + self._cachestr = "" + self._cachepoints = (0, len(data)) + + def __contains__(self, key): + return key in self._data + + def __len__(self): + return len(self._data) + + def __iter__(self): + return iter(self._data) + + def __getitem__(self, key): + return self._data[key] + + def keys(self, prefix=None): + if prefix is None or prefix == "" or not self._keys: + return set(self._keys) + + if prefix.startswith(self._cachestr): + lo, hi = self._cachepoints + start = i = bisect_left(self._keys, prefix, lo, hi) + else: + start = i = bisect_left(self._keys, prefix) + + keys = set() + if start == len(self._keys): + return keys + + while self._keys[i].startswith(prefix): + keys.add(self._keys[i]) + i += 1 + + self._cachestr = prefix + self._cachepoints = (start, i) + + return keys + + def has_keys_with_prefix(self, prefix): + if prefix in self._data: + return True + + if prefix.startswith(self._cachestr): + lo, hi = self._cachepoints + i = bisect_left(self._keys, prefix, lo, hi) + else: + i = bisect_left(self._keys, prefix) + + if i == len(self._keys): + return False + + return self._keys[i].startswith(prefix) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_utils.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_utils.py new file mode 100644 index 0000000..0703afb --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/_utils.py @@ -0,0 +1,124 @@ +from __future__ import absolute_import, division, unicode_literals + +from types import ModuleType + +from pip._vendor.six import text_type + +try: + import xml.etree.cElementTree as default_etree +except ImportError: + import xml.etree.ElementTree as default_etree + + +__all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair", + "surrogatePairToCodepoint", "moduleFactoryFactory", + "supports_lone_surrogates"] + + +# Platforms not supporting lone surrogates (\uD800-\uDFFF) should be +# caught by the below test. In general this would be any platform +# using UTF-16 as its encoding of unicode strings, such as +# Jython. This is because UTF-16 itself is based on the use of such +# surrogates, and there is no mechanism to further escape such +# escapes. +try: + _x = eval('"\\uD800"') # pylint:disable=eval-used + if not isinstance(_x, text_type): + # We need this with u"" because of http://bugs.jython.org/issue2039 + _x = eval('u"\\uD800"') # pylint:disable=eval-used + assert isinstance(_x, text_type) +except: # pylint:disable=bare-except + supports_lone_surrogates = False +else: + supports_lone_surrogates = True + + +class MethodDispatcher(dict): + """Dict with 2 special properties: + + On initiation, keys that are lists, sets or tuples are converted to + multiple keys so accessing any one of the items in the original + list-like object returns the matching value + + md = MethodDispatcher({("foo", "bar"):"baz"}) + md["foo"] == "baz" + + A default value which can be set through the default attribute. + """ + + def __init__(self, items=()): + # Using _dictEntries instead of directly assigning to self is about + # twice as fast. Please do careful performance testing before changing + # anything here. + _dictEntries = [] + for name, value in items: + if isinstance(name, (list, tuple, frozenset, set)): + for item in name: + _dictEntries.append((item, value)) + else: + _dictEntries.append((name, value)) + dict.__init__(self, _dictEntries) + assert len(self) == len(_dictEntries) + self.default = None + + def __getitem__(self, key): + return dict.get(self, key, self.default) + + +# Some utility functions to deal with weirdness around UCS2 vs UCS4 +# python builds + +def isSurrogatePair(data): + return (len(data) == 2 and + ord(data[0]) >= 0xD800 and ord(data[0]) <= 0xDBFF and + ord(data[1]) >= 0xDC00 and ord(data[1]) <= 0xDFFF) + + +def surrogatePairToCodepoint(data): + char_val = (0x10000 + (ord(data[0]) - 0xD800) * 0x400 + + (ord(data[1]) - 0xDC00)) + return char_val + +# Module Factory Factory (no, this isn't Java, I know) +# Here to stop this being duplicated all over the place. + + +def moduleFactoryFactory(factory): + moduleCache = {} + + def moduleFactory(baseModule, *args, **kwargs): + if isinstance(ModuleType.__name__, type("")): + name = "_%s_factory" % baseModule.__name__ + else: + name = b"_%s_factory" % baseModule.__name__ + + kwargs_tuple = tuple(kwargs.items()) + + try: + return moduleCache[name][args][kwargs_tuple] + except KeyError: + mod = ModuleType(name) + objs = factory(baseModule, *args, **kwargs) + mod.__dict__.update(objs) + if "name" not in moduleCache: + moduleCache[name] = {} + if "args" not in moduleCache[name]: + moduleCache[name][args] = {} + if "kwargs" not in moduleCache[name][args]: + moduleCache[name][args][kwargs_tuple] = {} + moduleCache[name][args][kwargs_tuple] = mod + return mod + + return moduleFactory + + +def memoize(func): + cache = {} + + def wrapped(*args, **kwargs): + key = (tuple(args), tuple(kwargs.items())) + if key not in cache: + cache[key] = func(*args, **kwargs) + return cache[key] + + return wrapped diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/constants.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/constants.py new file mode 100644 index 0000000..1ff8041 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/constants.py @@ -0,0 +1,2947 @@ +from __future__ import absolute_import, division, unicode_literals + +import string + +EOF = None + +E = { + "null-character": + "Null character in input stream, replaced with U+FFFD.", + "invalid-codepoint": + "Invalid codepoint in stream.", + "incorrectly-placed-solidus": + "Solidus (/) incorrectly placed in tag.", + "incorrect-cr-newline-entity": + "Incorrect CR newline entity, replaced with LF.", + "illegal-windows-1252-entity": + "Entity used with illegal number (windows-1252 reference).", + "cant-convert-numeric-entity": + "Numeric entity couldn't be converted to character " + "(codepoint U+%(charAsInt)08x).", + "illegal-codepoint-for-numeric-entity": + "Numeric entity represents an illegal codepoint: " + "U+%(charAsInt)08x.", + "numeric-entity-without-semicolon": + "Numeric entity didn't end with ';'.", + "expected-numeric-entity-but-got-eof": + "Numeric entity expected. Got end of file instead.", + "expected-numeric-entity": + "Numeric entity expected but none found.", + "named-entity-without-semicolon": + "Named entity didn't end with ';'.", + "expected-named-entity": + "Named entity expected. Got none.", + "attributes-in-end-tag": + "End tag contains unexpected attributes.", + 'self-closing-flag-on-end-tag': + "End tag contains unexpected self-closing flag.", + "expected-tag-name-but-got-right-bracket": + "Expected tag name. Got '>' instead.", + "expected-tag-name-but-got-question-mark": + "Expected tag name. Got '?' instead. (HTML doesn't " + "support processing instructions.)", + "expected-tag-name": + "Expected tag name. Got something else instead", + "expected-closing-tag-but-got-right-bracket": + "Expected closing tag. Got '>' instead. Ignoring '</>'.", + "expected-closing-tag-but-got-eof": + "Expected closing tag. Unexpected end of file.", + "expected-closing-tag-but-got-char": + "Expected closing tag. Unexpected character '%(data)s' found.", + "eof-in-tag-name": + "Unexpected end of file in the tag name.", + "expected-attribute-name-but-got-eof": + "Unexpected end of file. Expected attribute name instead.", + "eof-in-attribute-name": + "Unexpected end of file in attribute name.", + "invalid-character-in-attribute-name": + "Invalid character in attribute name", + "duplicate-attribute": + "Dropped duplicate attribute on tag.", + "expected-end-of-tag-name-but-got-eof": + "Unexpected end of file. Expected = or end of tag.", + "expected-attribute-value-but-got-eof": + "Unexpected end of file. Expected attribute value.", + "expected-attribute-value-but-got-right-bracket": + "Expected attribute value. Got '>' instead.", + 'equals-in-unquoted-attribute-value': + "Unexpected = in unquoted attribute", + 'unexpected-character-in-unquoted-attribute-value': + "Unexpected character in unquoted attribute", + "invalid-character-after-attribute-name": + "Unexpected character after attribute name.", + "unexpected-character-after-attribute-value": + "Unexpected character after attribute value.", + "eof-in-attribute-value-double-quote": + "Unexpected end of file in attribute value (\").", + "eof-in-attribute-value-single-quote": + "Unexpected end of file in attribute value (').", + "eof-in-attribute-value-no-quotes": + "Unexpected end of file in attribute value.", + "unexpected-EOF-after-solidus-in-tag": + "Unexpected end of file in tag. Expected >", + "unexpected-character-after-solidus-in-tag": + "Unexpected character after / in tag. Expected >", + "expected-dashes-or-doctype": + "Expected '--' or 'DOCTYPE'. Not found.", + "unexpected-bang-after-double-dash-in-comment": + "Unexpected ! after -- in comment", + "unexpected-space-after-double-dash-in-comment": + "Unexpected space after -- in comment", + "incorrect-comment": + "Incorrect comment.", + "eof-in-comment": + "Unexpected end of file in comment.", + "eof-in-comment-end-dash": + "Unexpected end of file in comment (-)", + "unexpected-dash-after-double-dash-in-comment": + "Unexpected '-' after '--' found in comment.", + "eof-in-comment-double-dash": + "Unexpected end of file in comment (--).", + "eof-in-comment-end-space-state": + "Unexpected end of file in comment.", + "eof-in-comment-end-bang-state": + "Unexpected end of file in comment.", + "unexpected-char-in-comment": + "Unexpected character in comment found.", + "need-space-after-doctype": + "No space after literal string 'DOCTYPE'.", + "expected-doctype-name-but-got-right-bracket": + "Unexpected > character. Expected DOCTYPE name.", + "expected-doctype-name-but-got-eof": + "Unexpected end of file. Expected DOCTYPE name.", + "eof-in-doctype-name": + "Unexpected end of file in DOCTYPE name.", + "eof-in-doctype": + "Unexpected end of file in DOCTYPE.", + "expected-space-or-right-bracket-in-doctype": + "Expected space or '>'. Got '%(data)s'", + "unexpected-end-of-doctype": + "Unexpected end of DOCTYPE.", + "unexpected-char-in-doctype": + "Unexpected character in DOCTYPE.", + "eof-in-innerhtml": + "XXX innerHTML EOF", + "unexpected-doctype": + "Unexpected DOCTYPE. Ignored.", + "non-html-root": + "html needs to be the first start tag.", + "expected-doctype-but-got-eof": + "Unexpected End of file. Expected DOCTYPE.", + "unknown-doctype": + "Erroneous DOCTYPE.", + "expected-doctype-but-got-chars": + "Unexpected non-space characters. Expected DOCTYPE.", + "expected-doctype-but-got-start-tag": + "Unexpected start tag (%(name)s). Expected DOCTYPE.", + "expected-doctype-but-got-end-tag": + "Unexpected end tag (%(name)s). Expected DOCTYPE.", + "end-tag-after-implied-root": + "Unexpected end tag (%(name)s) after the (implied) root element.", + "expected-named-closing-tag-but-got-eof": + "Unexpected end of file. Expected end tag (%(name)s).", + "two-heads-are-not-better-than-one": + "Unexpected start tag head in existing head. Ignored.", + "unexpected-end-tag": + "Unexpected end tag (%(name)s). Ignored.", + "unexpected-start-tag-out-of-my-head": + "Unexpected start tag (%(name)s) that can be in head. Moved.", + "unexpected-start-tag": + "Unexpected start tag (%(name)s).", + "missing-end-tag": + "Missing end tag (%(name)s).", + "missing-end-tags": + "Missing end tags (%(name)s).", + "unexpected-start-tag-implies-end-tag": + "Unexpected start tag (%(startName)s) " + "implies end tag (%(endName)s).", + "unexpected-start-tag-treated-as": + "Unexpected start tag (%(originalName)s). Treated as %(newName)s.", + "deprecated-tag": + "Unexpected start tag %(name)s. Don't use it!", + "unexpected-start-tag-ignored": + "Unexpected start tag %(name)s. Ignored.", + "expected-one-end-tag-but-got-another": + "Unexpected end tag (%(gotName)s). " + "Missing end tag (%(expectedName)s).", + "end-tag-too-early": + "End tag (%(name)s) seen too early. Expected other end tag.", + "end-tag-too-early-named": + "Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s).", + "end-tag-too-early-ignored": + "End tag (%(name)s) seen too early. Ignored.", + "adoption-agency-1.1": + "End tag (%(name)s) violates step 1, " + "paragraph 1 of the adoption agency algorithm.", + "adoption-agency-1.2": + "End tag (%(name)s) violates step 1, " + "paragraph 2 of the adoption agency algorithm.", + "adoption-agency-1.3": + "End tag (%(name)s) violates step 1, " + "paragraph 3 of the adoption agency algorithm.", + "adoption-agency-4.4": + "End tag (%(name)s) violates step 4, " + "paragraph 4 of the adoption agency algorithm.", + "unexpected-end-tag-treated-as": + "Unexpected end tag (%(originalName)s). Treated as %(newName)s.", + "no-end-tag": + "This element (%(name)s) has no end tag.", + "unexpected-implied-end-tag-in-table": + "Unexpected implied end tag (%(name)s) in the table phase.", + "unexpected-implied-end-tag-in-table-body": + "Unexpected implied end tag (%(name)s) in the table body phase.", + "unexpected-char-implies-table-voodoo": + "Unexpected non-space characters in " + "table context caused voodoo mode.", + "unexpected-hidden-input-in-table": + "Unexpected input with type hidden in table context.", + "unexpected-form-in-table": + "Unexpected form in table context.", + "unexpected-start-tag-implies-table-voodoo": + "Unexpected start tag (%(name)s) in " + "table context caused voodoo mode.", + "unexpected-end-tag-implies-table-voodoo": + "Unexpected end tag (%(name)s) in " + "table context caused voodoo mode.", + "unexpected-cell-in-table-body": + "Unexpected table cell start tag (%(name)s) " + "in the table body phase.", + "unexpected-cell-end-tag": + "Got table cell end tag (%(name)s) " + "while required end tags are missing.", + "unexpected-end-tag-in-table-body": + "Unexpected end tag (%(name)s) in the table body phase. Ignored.", + "unexpected-implied-end-tag-in-table-row": + "Unexpected implied end tag (%(name)s) in the table row phase.", + "unexpected-end-tag-in-table-row": + "Unexpected end tag (%(name)s) in the table row phase. Ignored.", + "unexpected-select-in-select": + "Unexpected select start tag in the select phase " + "treated as select end tag.", + "unexpected-input-in-select": + "Unexpected input start tag in the select phase.", + "unexpected-start-tag-in-select": + "Unexpected start tag token (%(name)s in the select phase. " + "Ignored.", + "unexpected-end-tag-in-select": + "Unexpected end tag (%(name)s) in the select phase. Ignored.", + "unexpected-table-element-start-tag-in-select-in-table": + "Unexpected table element start tag (%(name)s) in the select in table phase.", + "unexpected-table-element-end-tag-in-select-in-table": + "Unexpected table element end tag (%(name)s) in the select in table phase.", + "unexpected-char-after-body": + "Unexpected non-space characters in the after body phase.", + "unexpected-start-tag-after-body": + "Unexpected start tag token (%(name)s)" + " in the after body phase.", + "unexpected-end-tag-after-body": + "Unexpected end tag token (%(name)s)" + " in the after body phase.", + "unexpected-char-in-frameset": + "Unexpected characters in the frameset phase. Characters ignored.", + "unexpected-start-tag-in-frameset": + "Unexpected start tag token (%(name)s)" + " in the frameset phase. Ignored.", + "unexpected-frameset-in-frameset-innerhtml": + "Unexpected end tag token (frameset) " + "in the frameset phase (innerHTML).", + "unexpected-end-tag-in-frameset": + "Unexpected end tag token (%(name)s)" + " in the frameset phase. Ignored.", + "unexpected-char-after-frameset": + "Unexpected non-space characters in the " + "after frameset phase. Ignored.", + "unexpected-start-tag-after-frameset": + "Unexpected start tag (%(name)s)" + " in the after frameset phase. Ignored.", + "unexpected-end-tag-after-frameset": + "Unexpected end tag (%(name)s)" + " in the after frameset phase. Ignored.", + "unexpected-end-tag-after-body-innerhtml": + "Unexpected end tag after body(innerHtml)", + "expected-eof-but-got-char": + "Unexpected non-space characters. Expected end of file.", + "expected-eof-but-got-start-tag": + "Unexpected start tag (%(name)s)" + ". Expected end of file.", + "expected-eof-but-got-end-tag": + "Unexpected end tag (%(name)s)" + ". Expected end of file.", + "eof-in-table": + "Unexpected end of file. Expected table content.", + "eof-in-select": + "Unexpected end of file. Expected select content.", + "eof-in-frameset": + "Unexpected end of file. Expected frameset content.", + "eof-in-script-in-script": + "Unexpected end of file. Expected script content.", + "eof-in-foreign-lands": + "Unexpected end of file. Expected foreign content", + "non-void-element-with-trailing-solidus": + "Trailing solidus not allowed on element %(name)s", + "unexpected-html-element-in-foreign-content": + "Element %(name)s not allowed in a non-html context", + "unexpected-end-tag-before-html": + "Unexpected end tag (%(name)s) before html.", + "unexpected-inhead-noscript-tag": + "Element %(name)s not allowed in a inhead-noscript context", + "eof-in-head-noscript": + "Unexpected end of file. Expected inhead-noscript content", + "char-in-head-noscript": + "Unexpected non-space character. Expected inhead-noscript content", + "XXX-undefined-error": + "Undefined error (this sucks and should be fixed)", +} + +namespaces = { + "html": "http://www.w3.org/1999/xhtml", + "mathml": "http://www.w3.org/1998/Math/MathML", + "svg": "http://www.w3.org/2000/svg", + "xlink": "http://www.w3.org/1999/xlink", + "xml": "http://www.w3.org/XML/1998/namespace", + "xmlns": "http://www.w3.org/2000/xmlns/" +} + +scopingElements = frozenset([ + (namespaces["html"], "applet"), + (namespaces["html"], "caption"), + (namespaces["html"], "html"), + (namespaces["html"], "marquee"), + (namespaces["html"], "object"), + (namespaces["html"], "table"), + (namespaces["html"], "td"), + (namespaces["html"], "th"), + (namespaces["mathml"], "mi"), + (namespaces["mathml"], "mo"), + (namespaces["mathml"], "mn"), + (namespaces["mathml"], "ms"), + (namespaces["mathml"], "mtext"), + (namespaces["mathml"], "annotation-xml"), + (namespaces["svg"], "foreignObject"), + (namespaces["svg"], "desc"), + (namespaces["svg"], "title"), +]) + +formattingElements = frozenset([ + (namespaces["html"], "a"), + (namespaces["html"], "b"), + (namespaces["html"], "big"), + (namespaces["html"], "code"), + (namespaces["html"], "em"), + (namespaces["html"], "font"), + (namespaces["html"], "i"), + (namespaces["html"], "nobr"), + (namespaces["html"], "s"), + (namespaces["html"], "small"), + (namespaces["html"], "strike"), + (namespaces["html"], "strong"), + (namespaces["html"], "tt"), + (namespaces["html"], "u") +]) + +specialElements = frozenset([ + (namespaces["html"], "address"), + (namespaces["html"], "applet"), + (namespaces["html"], "area"), + (namespaces["html"], "article"), + (namespaces["html"], "aside"), + (namespaces["html"], "base"), + (namespaces["html"], "basefont"), + (namespaces["html"], "bgsound"), + (namespaces["html"], "blockquote"), + (namespaces["html"], "body"), + (namespaces["html"], "br"), + (namespaces["html"], "button"), + (namespaces["html"], "caption"), + (namespaces["html"], "center"), + (namespaces["html"], "col"), + (namespaces["html"], "colgroup"), + (namespaces["html"], "command"), + (namespaces["html"], "dd"), + (namespaces["html"], "details"), + (namespaces["html"], "dir"), + (namespaces["html"], "div"), + (namespaces["html"], "dl"), + (namespaces["html"], "dt"), + (namespaces["html"], "embed"), + (namespaces["html"], "fieldset"), + (namespaces["html"], "figure"), + (namespaces["html"], "footer"), + (namespaces["html"], "form"), + (namespaces["html"], "frame"), + (namespaces["html"], "frameset"), + (namespaces["html"], "h1"), + (namespaces["html"], "h2"), + (namespaces["html"], "h3"), + (namespaces["html"], "h4"), + (namespaces["html"], "h5"), + (namespaces["html"], "h6"), + (namespaces["html"], "head"), + (namespaces["html"], "header"), + (namespaces["html"], "hr"), + (namespaces["html"], "html"), + (namespaces["html"], "iframe"), + # Note that image is commented out in the spec as "this isn't an + # element that can end up on the stack, so it doesn't matter," + (namespaces["html"], "image"), + (namespaces["html"], "img"), + (namespaces["html"], "input"), + (namespaces["html"], "isindex"), + (namespaces["html"], "li"), + (namespaces["html"], "link"), + (namespaces["html"], "listing"), + (namespaces["html"], "marquee"), + (namespaces["html"], "menu"), + (namespaces["html"], "meta"), + (namespaces["html"], "nav"), + (namespaces["html"], "noembed"), + (namespaces["html"], "noframes"), + (namespaces["html"], "noscript"), + (namespaces["html"], "object"), + (namespaces["html"], "ol"), + (namespaces["html"], "p"), + (namespaces["html"], "param"), + (namespaces["html"], "plaintext"), + (namespaces["html"], "pre"), + (namespaces["html"], "script"), + (namespaces["html"], "section"), + (namespaces["html"], "select"), + (namespaces["html"], "style"), + (namespaces["html"], "table"), + (namespaces["html"], "tbody"), + (namespaces["html"], "td"), + (namespaces["html"], "textarea"), + (namespaces["html"], "tfoot"), + (namespaces["html"], "th"), + (namespaces["html"], "thead"), + (namespaces["html"], "title"), + (namespaces["html"], "tr"), + (namespaces["html"], "ul"), + (namespaces["html"], "wbr"), + (namespaces["html"], "xmp"), + (namespaces["svg"], "foreignObject") +]) + +htmlIntegrationPointElements = frozenset([ + (namespaces["mathml"], "annotation-xml"), + (namespaces["svg"], "foreignObject"), + (namespaces["svg"], "desc"), + (namespaces["svg"], "title") +]) + +mathmlTextIntegrationPointElements = frozenset([ + (namespaces["mathml"], "mi"), + (namespaces["mathml"], "mo"), + (namespaces["mathml"], "mn"), + (namespaces["mathml"], "ms"), + (namespaces["mathml"], "mtext") +]) + +adjustSVGAttributes = { + "attributename": "attributeName", + "attributetype": "attributeType", + "basefrequency": "baseFrequency", + "baseprofile": "baseProfile", + "calcmode": "calcMode", + "clippathunits": "clipPathUnits", + "contentscripttype": "contentScriptType", + "contentstyletype": "contentStyleType", + "diffuseconstant": "diffuseConstant", + "edgemode": "edgeMode", + "externalresourcesrequired": "externalResourcesRequired", + "filterres": "filterRes", + "filterunits": "filterUnits", + "glyphref": "glyphRef", + "gradienttransform": "gradientTransform", + "gradientunits": "gradientUnits", + "kernelmatrix": "kernelMatrix", + "kernelunitlength": "kernelUnitLength", + "keypoints": "keyPoints", + "keysplines": "keySplines", + "keytimes": "keyTimes", + "lengthadjust": "lengthAdjust", + "limitingconeangle": "limitingConeAngle", + "markerheight": "markerHeight", + "markerunits": "markerUnits", + "markerwidth": "markerWidth", + "maskcontentunits": "maskContentUnits", + "maskunits": "maskUnits", + "numoctaves": "numOctaves", + "pathlength": "pathLength", + "patterncontentunits": "patternContentUnits", + "patterntransform": "patternTransform", + "patternunits": "patternUnits", + "pointsatx": "pointsAtX", + "pointsaty": "pointsAtY", + "pointsatz": "pointsAtZ", + "preservealpha": "preserveAlpha", + "preserveaspectratio": "preserveAspectRatio", + "primitiveunits": "primitiveUnits", + "refx": "refX", + "refy": "refY", + "repeatcount": "repeatCount", + "repeatdur": "repeatDur", + "requiredextensions": "requiredExtensions", + "requiredfeatures": "requiredFeatures", + "specularconstant": "specularConstant", + "specularexponent": "specularExponent", + "spreadmethod": "spreadMethod", + "startoffset": "startOffset", + "stddeviation": "stdDeviation", + "stitchtiles": "stitchTiles", + "surfacescale": "surfaceScale", + "systemlanguage": "systemLanguage", + "tablevalues": "tableValues", + "targetx": "targetX", + "targety": "targetY", + "textlength": "textLength", + "viewbox": "viewBox", + "viewtarget": "viewTarget", + "xchannelselector": "xChannelSelector", + "ychannelselector": "yChannelSelector", + "zoomandpan": "zoomAndPan" +} + +adjustMathMLAttributes = {"definitionurl": "definitionURL"} + +adjustForeignAttributes = { + "xlink:actuate": ("xlink", "actuate", namespaces["xlink"]), + "xlink:arcrole": ("xlink", "arcrole", namespaces["xlink"]), + "xlink:href": ("xlink", "href", namespaces["xlink"]), + "xlink:role": ("xlink", "role", namespaces["xlink"]), + "xlink:show": ("xlink", "show", namespaces["xlink"]), + "xlink:title": ("xlink", "title", namespaces["xlink"]), + "xlink:type": ("xlink", "type", namespaces["xlink"]), + "xml:base": ("xml", "base", namespaces["xml"]), + "xml:lang": ("xml", "lang", namespaces["xml"]), + "xml:space": ("xml", "space", namespaces["xml"]), + "xmlns": (None, "xmlns", namespaces["xmlns"]), + "xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"]) +} + +unadjustForeignAttributes = dict([((ns, local), qname) for qname, (prefix, local, ns) in + adjustForeignAttributes.items()]) + +spaceCharacters = frozenset([ + "\t", + "\n", + "\u000C", + " ", + "\r" +]) + +tableInsertModeElements = frozenset([ + "table", + "tbody", + "tfoot", + "thead", + "tr" +]) + +asciiLowercase = frozenset(string.ascii_lowercase) +asciiUppercase = frozenset(string.ascii_uppercase) +asciiLetters = frozenset(string.ascii_letters) +digits = frozenset(string.digits) +hexDigits = frozenset(string.hexdigits) + +asciiUpper2Lower = dict([(ord(c), ord(c.lower())) + for c in string.ascii_uppercase]) + +# Heading elements need to be ordered +headingElements = ( + "h1", + "h2", + "h3", + "h4", + "h5", + "h6" +) + +voidElements = frozenset([ + "base", + "command", + "event-source", + "link", + "meta", + "hr", + "br", + "img", + "embed", + "param", + "area", + "col", + "input", + "source", + "track" +]) + +cdataElements = frozenset(['title', 'textarea']) + +rcdataElements = frozenset([ + 'style', + 'script', + 'xmp', + 'iframe', + 'noembed', + 'noframes', + 'noscript' +]) + +booleanAttributes = { + "": frozenset(["irrelevant", "itemscope"]), + "style": frozenset(["scoped"]), + "img": frozenset(["ismap"]), + "audio": frozenset(["autoplay", "controls"]), + "video": frozenset(["autoplay", "controls"]), + "script": frozenset(["defer", "async"]), + "details": frozenset(["open"]), + "datagrid": frozenset(["multiple", "disabled"]), + "command": frozenset(["hidden", "disabled", "checked", "default"]), + "hr": frozenset(["noshade"]), + "menu": frozenset(["autosubmit"]), + "fieldset": frozenset(["disabled", "readonly"]), + "option": frozenset(["disabled", "readonly", "selected"]), + "optgroup": frozenset(["disabled", "readonly"]), + "button": frozenset(["disabled", "autofocus"]), + "input": frozenset(["disabled", "readonly", "required", "autofocus", "checked", "ismap"]), + "select": frozenset(["disabled", "readonly", "autofocus", "multiple"]), + "output": frozenset(["disabled", "readonly"]), + "iframe": frozenset(["seamless"]), +} + +# entitiesWindows1252 has to be _ordered_ and needs to have an index. It +# therefore can't be a frozenset. +entitiesWindows1252 = ( + 8364, # 0x80 0x20AC EURO SIGN + 65533, # 0x81 UNDEFINED + 8218, # 0x82 0x201A SINGLE LOW-9 QUOTATION MARK + 402, # 0x83 0x0192 LATIN SMALL LETTER F WITH HOOK + 8222, # 0x84 0x201E DOUBLE LOW-9 QUOTATION MARK + 8230, # 0x85 0x2026 HORIZONTAL ELLIPSIS + 8224, # 0x86 0x2020 DAGGER + 8225, # 0x87 0x2021 DOUBLE DAGGER + 710, # 0x88 0x02C6 MODIFIER LETTER CIRCUMFLEX ACCENT + 8240, # 0x89 0x2030 PER MILLE SIGN + 352, # 0x8A 0x0160 LATIN CAPITAL LETTER S WITH CARON + 8249, # 0x8B 0x2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK + 338, # 0x8C 0x0152 LATIN CAPITAL LIGATURE OE + 65533, # 0x8D UNDEFINED + 381, # 0x8E 0x017D LATIN CAPITAL LETTER Z WITH CARON + 65533, # 0x8F UNDEFINED + 65533, # 0x90 UNDEFINED + 8216, # 0x91 0x2018 LEFT SINGLE QUOTATION MARK + 8217, # 0x92 0x2019 RIGHT SINGLE QUOTATION MARK + 8220, # 0x93 0x201C LEFT DOUBLE QUOTATION MARK + 8221, # 0x94 0x201D RIGHT DOUBLE QUOTATION MARK + 8226, # 0x95 0x2022 BULLET + 8211, # 0x96 0x2013 EN DASH + 8212, # 0x97 0x2014 EM DASH + 732, # 0x98 0x02DC SMALL TILDE + 8482, # 0x99 0x2122 TRADE MARK SIGN + 353, # 0x9A 0x0161 LATIN SMALL LETTER S WITH CARON + 8250, # 0x9B 0x203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK + 339, # 0x9C 0x0153 LATIN SMALL LIGATURE OE + 65533, # 0x9D UNDEFINED + 382, # 0x9E 0x017E LATIN SMALL LETTER Z WITH CARON + 376 # 0x9F 0x0178 LATIN CAPITAL LETTER Y WITH DIAERESIS +) + +xmlEntities = frozenset(['lt;', 'gt;', 'amp;', 'apos;', 'quot;']) + +entities = { + "AElig": "\xc6", + "AElig;": "\xc6", + "AMP": "&", + "AMP;": "&", + "Aacute": "\xc1", + "Aacute;": "\xc1", + "Abreve;": "\u0102", + "Acirc": "\xc2", + "Acirc;": "\xc2", + "Acy;": "\u0410", + "Afr;": "\U0001d504", + "Agrave": "\xc0", + "Agrave;": "\xc0", + "Alpha;": "\u0391", + "Amacr;": "\u0100", + "And;": "\u2a53", + "Aogon;": "\u0104", + "Aopf;": "\U0001d538", + "ApplyFunction;": "\u2061", + "Aring": "\xc5", + "Aring;": "\xc5", + "Ascr;": "\U0001d49c", + "Assign;": "\u2254", + "Atilde": "\xc3", + "Atilde;": "\xc3", + "Auml": "\xc4", + "Auml;": "\xc4", + "Backslash;": "\u2216", + "Barv;": "\u2ae7", + "Barwed;": "\u2306", + "Bcy;": "\u0411", + "Because;": "\u2235", + "Bernoullis;": "\u212c", + "Beta;": "\u0392", + "Bfr;": "\U0001d505", + "Bopf;": "\U0001d539", + "Breve;": "\u02d8", + "Bscr;": "\u212c", + "Bumpeq;": "\u224e", + "CHcy;": "\u0427", + "COPY": "\xa9", + "COPY;": "\xa9", + "Cacute;": "\u0106", + "Cap;": "\u22d2", + "CapitalDifferentialD;": "\u2145", + "Cayleys;": "\u212d", + "Ccaron;": "\u010c", + "Ccedil": "\xc7", + "Ccedil;": "\xc7", + "Ccirc;": "\u0108", + "Cconint;": "\u2230", + "Cdot;": "\u010a", + "Cedilla;": "\xb8", + "CenterDot;": "\xb7", + "Cfr;": "\u212d", + "Chi;": "\u03a7", + "CircleDot;": "\u2299", + "CircleMinus;": "\u2296", + "CirclePlus;": "\u2295", + "CircleTimes;": "\u2297", + "ClockwiseContourIntegral;": "\u2232", + "CloseCurlyDoubleQuote;": "\u201d", + "CloseCurlyQuote;": "\u2019", + "Colon;": "\u2237", + "Colone;": "\u2a74", + "Congruent;": "\u2261", + "Conint;": "\u222f", + "ContourIntegral;": "\u222e", + "Copf;": "\u2102", + "Coproduct;": "\u2210", + "CounterClockwiseContourIntegral;": "\u2233", + "Cross;": "\u2a2f", + "Cscr;": "\U0001d49e", + "Cup;": "\u22d3", + "CupCap;": "\u224d", + "DD;": "\u2145", + "DDotrahd;": "\u2911", + "DJcy;": "\u0402", + "DScy;": "\u0405", + "DZcy;": "\u040f", + "Dagger;": "\u2021", + "Darr;": "\u21a1", + "Dashv;": "\u2ae4", + "Dcaron;": "\u010e", + "Dcy;": "\u0414", + "Del;": "\u2207", + "Delta;": "\u0394", + "Dfr;": "\U0001d507", + "DiacriticalAcute;": "\xb4", + "DiacriticalDot;": "\u02d9", + "DiacriticalDoubleAcute;": "\u02dd", + "DiacriticalGrave;": "`", + "DiacriticalTilde;": "\u02dc", + "Diamond;": "\u22c4", + "DifferentialD;": "\u2146", + "Dopf;": "\U0001d53b", + "Dot;": "\xa8", + "DotDot;": "\u20dc", + "DotEqual;": "\u2250", + "DoubleContourIntegral;": "\u222f", + "DoubleDot;": "\xa8", + "DoubleDownArrow;": "\u21d3", + "DoubleLeftArrow;": "\u21d0", + "DoubleLeftRightArrow;": "\u21d4", + "DoubleLeftTee;": "\u2ae4", + "DoubleLongLeftArrow;": "\u27f8", + "DoubleLongLeftRightArrow;": "\u27fa", + "DoubleLongRightArrow;": "\u27f9", + "DoubleRightArrow;": "\u21d2", + "DoubleRightTee;": "\u22a8", + "DoubleUpArrow;": "\u21d1", + "DoubleUpDownArrow;": "\u21d5", + "DoubleVerticalBar;": "\u2225", + "DownArrow;": "\u2193", + "DownArrowBar;": "\u2913", + "DownArrowUpArrow;": "\u21f5", + "DownBreve;": "\u0311", + "DownLeftRightVector;": "\u2950", + "DownLeftTeeVector;": "\u295e", + "DownLeftVector;": "\u21bd", + "DownLeftVectorBar;": "\u2956", + "DownRightTeeVector;": "\u295f", + "DownRightVector;": "\u21c1", + "DownRightVectorBar;": "\u2957", + "DownTee;": "\u22a4", + "DownTeeArrow;": "\u21a7", + "Downarrow;": "\u21d3", + "Dscr;": "\U0001d49f", + "Dstrok;": "\u0110", + "ENG;": "\u014a", + "ETH": "\xd0", + "ETH;": "\xd0", + "Eacute": "\xc9", + "Eacute;": "\xc9", + "Ecaron;": "\u011a", + "Ecirc": "\xca", + "Ecirc;": "\xca", + "Ecy;": "\u042d", + "Edot;": "\u0116", + "Efr;": "\U0001d508", + "Egrave": "\xc8", + "Egrave;": "\xc8", + "Element;": "\u2208", + "Emacr;": "\u0112", + "EmptySmallSquare;": "\u25fb", + "EmptyVerySmallSquare;": "\u25ab", + "Eogon;": "\u0118", + "Eopf;": "\U0001d53c", + "Epsilon;": "\u0395", + "Equal;": "\u2a75", + "EqualTilde;": "\u2242", + "Equilibrium;": "\u21cc", + "Escr;": "\u2130", + "Esim;": "\u2a73", + "Eta;": "\u0397", + "Euml": "\xcb", + "Euml;": "\xcb", + "Exists;": "\u2203", + "ExponentialE;": "\u2147", + "Fcy;": "\u0424", + "Ffr;": "\U0001d509", + "FilledSmallSquare;": "\u25fc", + "FilledVerySmallSquare;": "\u25aa", + "Fopf;": "\U0001d53d", + "ForAll;": "\u2200", + "Fouriertrf;": "\u2131", + "Fscr;": "\u2131", + "GJcy;": "\u0403", + "GT": ">", + "GT;": ">", + "Gamma;": "\u0393", + "Gammad;": "\u03dc", + "Gbreve;": "\u011e", + "Gcedil;": "\u0122", + "Gcirc;": "\u011c", + "Gcy;": "\u0413", + "Gdot;": "\u0120", + "Gfr;": "\U0001d50a", + "Gg;": "\u22d9", + "Gopf;": "\U0001d53e", + "GreaterEqual;": "\u2265", + "GreaterEqualLess;": "\u22db", + "GreaterFullEqual;": "\u2267", + "GreaterGreater;": "\u2aa2", + "GreaterLess;": "\u2277", + "GreaterSlantEqual;": "\u2a7e", + "GreaterTilde;": "\u2273", + "Gscr;": "\U0001d4a2", + "Gt;": "\u226b", + "HARDcy;": "\u042a", + "Hacek;": "\u02c7", + "Hat;": "^", + "Hcirc;": "\u0124", + "Hfr;": "\u210c", + "HilbertSpace;": "\u210b", + "Hopf;": "\u210d", + "HorizontalLine;": "\u2500", + "Hscr;": "\u210b", + "Hstrok;": "\u0126", + "HumpDownHump;": "\u224e", + "HumpEqual;": "\u224f", + "IEcy;": "\u0415", + "IJlig;": "\u0132", + "IOcy;": "\u0401", + "Iacute": "\xcd", + "Iacute;": "\xcd", + "Icirc": "\xce", + "Icirc;": "\xce", + "Icy;": "\u0418", + "Idot;": "\u0130", + "Ifr;": "\u2111", + "Igrave": "\xcc", + "Igrave;": "\xcc", + "Im;": "\u2111", + "Imacr;": "\u012a", + "ImaginaryI;": "\u2148", + "Implies;": "\u21d2", + "Int;": "\u222c", + "Integral;": "\u222b", + "Intersection;": "\u22c2", + "InvisibleComma;": "\u2063", + "InvisibleTimes;": "\u2062", + "Iogon;": "\u012e", + "Iopf;": "\U0001d540", + "Iota;": "\u0399", + "Iscr;": "\u2110", + "Itilde;": "\u0128", + "Iukcy;": "\u0406", + "Iuml": "\xcf", + "Iuml;": "\xcf", + "Jcirc;": "\u0134", + "Jcy;": "\u0419", + "Jfr;": "\U0001d50d", + "Jopf;": "\U0001d541", + "Jscr;": "\U0001d4a5", + "Jsercy;": "\u0408", + "Jukcy;": "\u0404", + "KHcy;": "\u0425", + "KJcy;": "\u040c", + "Kappa;": "\u039a", + "Kcedil;": "\u0136", + "Kcy;": "\u041a", + "Kfr;": "\U0001d50e", + "Kopf;": "\U0001d542", + "Kscr;": "\U0001d4a6", + "LJcy;": "\u0409", + "LT": "<", + "LT;": "<", + "Lacute;": "\u0139", + "Lambda;": "\u039b", + "Lang;": "\u27ea", + "Laplacetrf;": "\u2112", + "Larr;": "\u219e", + "Lcaron;": "\u013d", + "Lcedil;": "\u013b", + "Lcy;": "\u041b", + "LeftAngleBracket;": "\u27e8", + "LeftArrow;": "\u2190", + "LeftArrowBar;": "\u21e4", + "LeftArrowRightArrow;": "\u21c6", + "LeftCeiling;": "\u2308", + "LeftDoubleBracket;": "\u27e6", + "LeftDownTeeVector;": "\u2961", + "LeftDownVector;": "\u21c3", + "LeftDownVectorBar;": "\u2959", + "LeftFloor;": "\u230a", + "LeftRightArrow;": "\u2194", + "LeftRightVector;": "\u294e", + "LeftTee;": "\u22a3", + "LeftTeeArrow;": "\u21a4", + "LeftTeeVector;": "\u295a", + "LeftTriangle;": "\u22b2", + "LeftTriangleBar;": "\u29cf", + "LeftTriangleEqual;": "\u22b4", + "LeftUpDownVector;": "\u2951", + "LeftUpTeeVector;": "\u2960", + "LeftUpVector;": "\u21bf", + "LeftUpVectorBar;": "\u2958", + "LeftVector;": "\u21bc", + "LeftVectorBar;": "\u2952", + "Leftarrow;": "\u21d0", + "Leftrightarrow;": "\u21d4", + "LessEqualGreater;": "\u22da", + "LessFullEqual;": "\u2266", + "LessGreater;": "\u2276", + "LessLess;": "\u2aa1", + "LessSlantEqual;": "\u2a7d", + "LessTilde;": "\u2272", + "Lfr;": "\U0001d50f", + "Ll;": "\u22d8", + "Lleftarrow;": "\u21da", + "Lmidot;": "\u013f", + "LongLeftArrow;": "\u27f5", + "LongLeftRightArrow;": "\u27f7", + "LongRightArrow;": "\u27f6", + "Longleftarrow;": "\u27f8", + "Longleftrightarrow;": "\u27fa", + "Longrightarrow;": "\u27f9", + "Lopf;": "\U0001d543", + "LowerLeftArrow;": "\u2199", + "LowerRightArrow;": "\u2198", + "Lscr;": "\u2112", + "Lsh;": "\u21b0", + "Lstrok;": "\u0141", + "Lt;": "\u226a", + "Map;": "\u2905", + "Mcy;": "\u041c", + "MediumSpace;": "\u205f", + "Mellintrf;": "\u2133", + "Mfr;": "\U0001d510", + "MinusPlus;": "\u2213", + "Mopf;": "\U0001d544", + "Mscr;": "\u2133", + "Mu;": "\u039c", + "NJcy;": "\u040a", + "Nacute;": "\u0143", + "Ncaron;": "\u0147", + "Ncedil;": "\u0145", + "Ncy;": "\u041d", + "NegativeMediumSpace;": "\u200b", + "NegativeThickSpace;": "\u200b", + "NegativeThinSpace;": "\u200b", + "NegativeVeryThinSpace;": "\u200b", + "NestedGreaterGreater;": "\u226b", + "NestedLessLess;": "\u226a", + "NewLine;": "\n", + "Nfr;": "\U0001d511", + "NoBreak;": "\u2060", + "NonBreakingSpace;": "\xa0", + "Nopf;": "\u2115", + "Not;": "\u2aec", + "NotCongruent;": "\u2262", + "NotCupCap;": "\u226d", + "NotDoubleVerticalBar;": "\u2226", + "NotElement;": "\u2209", + "NotEqual;": "\u2260", + "NotEqualTilde;": "\u2242\u0338", + "NotExists;": "\u2204", + "NotGreater;": "\u226f", + "NotGreaterEqual;": "\u2271", + "NotGreaterFullEqual;": "\u2267\u0338", + "NotGreaterGreater;": "\u226b\u0338", + "NotGreaterLess;": "\u2279", + "NotGreaterSlantEqual;": "\u2a7e\u0338", + "NotGreaterTilde;": "\u2275", + "NotHumpDownHump;": "\u224e\u0338", + "NotHumpEqual;": "\u224f\u0338", + "NotLeftTriangle;": "\u22ea", + "NotLeftTriangleBar;": "\u29cf\u0338", + "NotLeftTriangleEqual;": "\u22ec", + "NotLess;": "\u226e", + "NotLessEqual;": "\u2270", + "NotLessGreater;": "\u2278", + "NotLessLess;": "\u226a\u0338", + "NotLessSlantEqual;": "\u2a7d\u0338", + "NotLessTilde;": "\u2274", + "NotNestedGreaterGreater;": "\u2aa2\u0338", + "NotNestedLessLess;": "\u2aa1\u0338", + "NotPrecedes;": "\u2280", + "NotPrecedesEqual;": "\u2aaf\u0338", + "NotPrecedesSlantEqual;": "\u22e0", + "NotReverseElement;": "\u220c", + "NotRightTriangle;": "\u22eb", + "NotRightTriangleBar;": "\u29d0\u0338", + "NotRightTriangleEqual;": "\u22ed", + "NotSquareSubset;": "\u228f\u0338", + "NotSquareSubsetEqual;": "\u22e2", + "NotSquareSuperset;": "\u2290\u0338", + "NotSquareSupersetEqual;": "\u22e3", + "NotSubset;": "\u2282\u20d2", + "NotSubsetEqual;": "\u2288", + "NotSucceeds;": "\u2281", + "NotSucceedsEqual;": "\u2ab0\u0338", + "NotSucceedsSlantEqual;": "\u22e1", + "NotSucceedsTilde;": "\u227f\u0338", + "NotSuperset;": "\u2283\u20d2", + "NotSupersetEqual;": "\u2289", + "NotTilde;": "\u2241", + "NotTildeEqual;": "\u2244", + "NotTildeFullEqual;": "\u2247", + "NotTildeTilde;": "\u2249", + "NotVerticalBar;": "\u2224", + "Nscr;": "\U0001d4a9", + "Ntilde": "\xd1", + "Ntilde;": "\xd1", + "Nu;": "\u039d", + "OElig;": "\u0152", + "Oacute": "\xd3", + "Oacute;": "\xd3", + "Ocirc": "\xd4", + "Ocirc;": "\xd4", + "Ocy;": "\u041e", + "Odblac;": "\u0150", + "Ofr;": "\U0001d512", + "Ograve": "\xd2", + "Ograve;": "\xd2", + "Omacr;": "\u014c", + "Omega;": "\u03a9", + "Omicron;": "\u039f", + "Oopf;": "\U0001d546", + "OpenCurlyDoubleQuote;": "\u201c", + "OpenCurlyQuote;": "\u2018", + "Or;": "\u2a54", + "Oscr;": "\U0001d4aa", + "Oslash": "\xd8", + "Oslash;": "\xd8", + "Otilde": "\xd5", + "Otilde;": "\xd5", + "Otimes;": "\u2a37", + "Ouml": "\xd6", + "Ouml;": "\xd6", + "OverBar;": "\u203e", + "OverBrace;": "\u23de", + "OverBracket;": "\u23b4", + "OverParenthesis;": "\u23dc", + "PartialD;": "\u2202", + "Pcy;": "\u041f", + "Pfr;": "\U0001d513", + "Phi;": "\u03a6", + "Pi;": "\u03a0", + "PlusMinus;": "\xb1", + "Poincareplane;": "\u210c", + "Popf;": "\u2119", + "Pr;": "\u2abb", + "Precedes;": "\u227a", + "PrecedesEqual;": "\u2aaf", + "PrecedesSlantEqual;": "\u227c", + "PrecedesTilde;": "\u227e", + "Prime;": "\u2033", + "Product;": "\u220f", + "Proportion;": "\u2237", + "Proportional;": "\u221d", + "Pscr;": "\U0001d4ab", + "Psi;": "\u03a8", + "QUOT": "\"", + "QUOT;": "\"", + "Qfr;": "\U0001d514", + "Qopf;": "\u211a", + "Qscr;": "\U0001d4ac", + "RBarr;": "\u2910", + "REG": "\xae", + "REG;": "\xae", + "Racute;": "\u0154", + "Rang;": "\u27eb", + "Rarr;": "\u21a0", + "Rarrtl;": "\u2916", + "Rcaron;": "\u0158", + "Rcedil;": "\u0156", + "Rcy;": "\u0420", + "Re;": "\u211c", + "ReverseElement;": "\u220b", + "ReverseEquilibrium;": "\u21cb", + "ReverseUpEquilibrium;": "\u296f", + "Rfr;": "\u211c", + "Rho;": "\u03a1", + "RightAngleBracket;": "\u27e9", + "RightArrow;": "\u2192", + "RightArrowBar;": "\u21e5", + "RightArrowLeftArrow;": "\u21c4", + "RightCeiling;": "\u2309", + "RightDoubleBracket;": "\u27e7", + "RightDownTeeVector;": "\u295d", + "RightDownVector;": "\u21c2", + "RightDownVectorBar;": "\u2955", + "RightFloor;": "\u230b", + "RightTee;": "\u22a2", + "RightTeeArrow;": "\u21a6", + "RightTeeVector;": "\u295b", + "RightTriangle;": "\u22b3", + "RightTriangleBar;": "\u29d0", + "RightTriangleEqual;": "\u22b5", + "RightUpDownVector;": "\u294f", + "RightUpTeeVector;": "\u295c", + "RightUpVector;": "\u21be", + "RightUpVectorBar;": "\u2954", + "RightVector;": "\u21c0", + "RightVectorBar;": "\u2953", + "Rightarrow;": "\u21d2", + "Ropf;": "\u211d", + "RoundImplies;": "\u2970", + "Rrightarrow;": "\u21db", + "Rscr;": "\u211b", + "Rsh;": "\u21b1", + "RuleDelayed;": "\u29f4", + "SHCHcy;": "\u0429", + "SHcy;": "\u0428", + "SOFTcy;": "\u042c", + "Sacute;": "\u015a", + "Sc;": "\u2abc", + "Scaron;": "\u0160", + "Scedil;": "\u015e", + "Scirc;": "\u015c", + "Scy;": "\u0421", + "Sfr;": "\U0001d516", + "ShortDownArrow;": "\u2193", + "ShortLeftArrow;": "\u2190", + "ShortRightArrow;": "\u2192", + "ShortUpArrow;": "\u2191", + "Sigma;": "\u03a3", + "SmallCircle;": "\u2218", + "Sopf;": "\U0001d54a", + "Sqrt;": "\u221a", + "Square;": "\u25a1", + "SquareIntersection;": "\u2293", + "SquareSubset;": "\u228f", + "SquareSubsetEqual;": "\u2291", + "SquareSuperset;": "\u2290", + "SquareSupersetEqual;": "\u2292", + "SquareUnion;": "\u2294", + "Sscr;": "\U0001d4ae", + "Star;": "\u22c6", + "Sub;": "\u22d0", + "Subset;": "\u22d0", + "SubsetEqual;": "\u2286", + "Succeeds;": "\u227b", + "SucceedsEqual;": "\u2ab0", + "SucceedsSlantEqual;": "\u227d", + "SucceedsTilde;": "\u227f", + "SuchThat;": "\u220b", + "Sum;": "\u2211", + "Sup;": "\u22d1", + "Superset;": "\u2283", + "SupersetEqual;": "\u2287", + "Supset;": "\u22d1", + "THORN": "\xde", + "THORN;": "\xde", + "TRADE;": "\u2122", + "TSHcy;": "\u040b", + "TScy;": "\u0426", + "Tab;": "\t", + "Tau;": "\u03a4", + "Tcaron;": "\u0164", + "Tcedil;": "\u0162", + "Tcy;": "\u0422", + "Tfr;": "\U0001d517", + "Therefore;": "\u2234", + "Theta;": "\u0398", + "ThickSpace;": "\u205f\u200a", + "ThinSpace;": "\u2009", + "Tilde;": "\u223c", + "TildeEqual;": "\u2243", + "TildeFullEqual;": "\u2245", + "TildeTilde;": "\u2248", + "Topf;": "\U0001d54b", + "TripleDot;": "\u20db", + "Tscr;": "\U0001d4af", + "Tstrok;": "\u0166", + "Uacute": "\xda", + "Uacute;": "\xda", + "Uarr;": "\u219f", + "Uarrocir;": "\u2949", + "Ubrcy;": "\u040e", + "Ubreve;": "\u016c", + "Ucirc": "\xdb", + "Ucirc;": "\xdb", + "Ucy;": "\u0423", + "Udblac;": "\u0170", + "Ufr;": "\U0001d518", + "Ugrave": "\xd9", + "Ugrave;": "\xd9", + "Umacr;": "\u016a", + "UnderBar;": "_", + "UnderBrace;": "\u23df", + "UnderBracket;": "\u23b5", + "UnderParenthesis;": "\u23dd", + "Union;": "\u22c3", + "UnionPlus;": "\u228e", + "Uogon;": "\u0172", + "Uopf;": "\U0001d54c", + "UpArrow;": "\u2191", + "UpArrowBar;": "\u2912", + "UpArrowDownArrow;": "\u21c5", + "UpDownArrow;": "\u2195", + "UpEquilibrium;": "\u296e", + "UpTee;": "\u22a5", + "UpTeeArrow;": "\u21a5", + "Uparrow;": "\u21d1", + "Updownarrow;": "\u21d5", + "UpperLeftArrow;": "\u2196", + "UpperRightArrow;": "\u2197", + "Upsi;": "\u03d2", + "Upsilon;": "\u03a5", + "Uring;": "\u016e", + "Uscr;": "\U0001d4b0", + "Utilde;": "\u0168", + "Uuml": "\xdc", + "Uuml;": "\xdc", + "VDash;": "\u22ab", + "Vbar;": "\u2aeb", + "Vcy;": "\u0412", + "Vdash;": "\u22a9", + "Vdashl;": "\u2ae6", + "Vee;": "\u22c1", + "Verbar;": "\u2016", + "Vert;": "\u2016", + "VerticalBar;": "\u2223", + "VerticalLine;": "|", + "VerticalSeparator;": "\u2758", + "VerticalTilde;": "\u2240", + "VeryThinSpace;": "\u200a", + "Vfr;": "\U0001d519", + "Vopf;": "\U0001d54d", + "Vscr;": "\U0001d4b1", + "Vvdash;": "\u22aa", + "Wcirc;": "\u0174", + "Wedge;": "\u22c0", + "Wfr;": "\U0001d51a", + "Wopf;": "\U0001d54e", + "Wscr;": "\U0001d4b2", + "Xfr;": "\U0001d51b", + "Xi;": "\u039e", + "Xopf;": "\U0001d54f", + "Xscr;": "\U0001d4b3", + "YAcy;": "\u042f", + "YIcy;": "\u0407", + "YUcy;": "\u042e", + "Yacute": "\xdd", + "Yacute;": "\xdd", + "Ycirc;": "\u0176", + "Ycy;": "\u042b", + "Yfr;": "\U0001d51c", + "Yopf;": "\U0001d550", + "Yscr;": "\U0001d4b4", + "Yuml;": "\u0178", + "ZHcy;": "\u0416", + "Zacute;": "\u0179", + "Zcaron;": "\u017d", + "Zcy;": "\u0417", + "Zdot;": "\u017b", + "ZeroWidthSpace;": "\u200b", + "Zeta;": "\u0396", + "Zfr;": "\u2128", + "Zopf;": "\u2124", + "Zscr;": "\U0001d4b5", + "aacute": "\xe1", + "aacute;": "\xe1", + "abreve;": "\u0103", + "ac;": "\u223e", + "acE;": "\u223e\u0333", + "acd;": "\u223f", + "acirc": "\xe2", + "acirc;": "\xe2", + "acute": "\xb4", + "acute;": "\xb4", + "acy;": "\u0430", + "aelig": "\xe6", + "aelig;": "\xe6", + "af;": "\u2061", + "afr;": "\U0001d51e", + "agrave": "\xe0", + "agrave;": "\xe0", + "alefsym;": "\u2135", + "aleph;": "\u2135", + "alpha;": "\u03b1", + "amacr;": "\u0101", + "amalg;": "\u2a3f", + "amp": "&", + "amp;": "&", + "and;": "\u2227", + "andand;": "\u2a55", + "andd;": "\u2a5c", + "andslope;": "\u2a58", + "andv;": "\u2a5a", + "ang;": "\u2220", + "ange;": "\u29a4", + "angle;": "\u2220", + "angmsd;": "\u2221", + "angmsdaa;": "\u29a8", + "angmsdab;": "\u29a9", + "angmsdac;": "\u29aa", + "angmsdad;": "\u29ab", + "angmsdae;": "\u29ac", + "angmsdaf;": "\u29ad", + "angmsdag;": "\u29ae", + "angmsdah;": "\u29af", + "angrt;": "\u221f", + "angrtvb;": "\u22be", + "angrtvbd;": "\u299d", + "angsph;": "\u2222", + "angst;": "\xc5", + "angzarr;": "\u237c", + "aogon;": "\u0105", + "aopf;": "\U0001d552", + "ap;": "\u2248", + "apE;": "\u2a70", + "apacir;": "\u2a6f", + "ape;": "\u224a", + "apid;": "\u224b", + "apos;": "'", + "approx;": "\u2248", + "approxeq;": "\u224a", + "aring": "\xe5", + "aring;": "\xe5", + "ascr;": "\U0001d4b6", + "ast;": "*", + "asymp;": "\u2248", + "asympeq;": "\u224d", + "atilde": "\xe3", + "atilde;": "\xe3", + "auml": "\xe4", + "auml;": "\xe4", + "awconint;": "\u2233", + "awint;": "\u2a11", + "bNot;": "\u2aed", + "backcong;": "\u224c", + "backepsilon;": "\u03f6", + "backprime;": "\u2035", + "backsim;": "\u223d", + "backsimeq;": "\u22cd", + "barvee;": "\u22bd", + "barwed;": "\u2305", + "barwedge;": "\u2305", + "bbrk;": "\u23b5", + "bbrktbrk;": "\u23b6", + "bcong;": "\u224c", + "bcy;": "\u0431", + "bdquo;": "\u201e", + "becaus;": "\u2235", + "because;": "\u2235", + "bemptyv;": "\u29b0", + "bepsi;": "\u03f6", + "bernou;": "\u212c", + "beta;": "\u03b2", + "beth;": "\u2136", + "between;": "\u226c", + "bfr;": "\U0001d51f", + "bigcap;": "\u22c2", + "bigcirc;": "\u25ef", + "bigcup;": "\u22c3", + "bigodot;": "\u2a00", + "bigoplus;": "\u2a01", + "bigotimes;": "\u2a02", + "bigsqcup;": "\u2a06", + "bigstar;": "\u2605", + "bigtriangledown;": "\u25bd", + "bigtriangleup;": "\u25b3", + "biguplus;": "\u2a04", + "bigvee;": "\u22c1", + "bigwedge;": "\u22c0", + "bkarow;": "\u290d", + "blacklozenge;": "\u29eb", + "blacksquare;": "\u25aa", + "blacktriangle;": "\u25b4", + "blacktriangledown;": "\u25be", + "blacktriangleleft;": "\u25c2", + "blacktriangleright;": "\u25b8", + "blank;": "\u2423", + "blk12;": "\u2592", + "blk14;": "\u2591", + "blk34;": "\u2593", + "block;": "\u2588", + "bne;": "=\u20e5", + "bnequiv;": "\u2261\u20e5", + "bnot;": "\u2310", + "bopf;": "\U0001d553", + "bot;": "\u22a5", + "bottom;": "\u22a5", + "bowtie;": "\u22c8", + "boxDL;": "\u2557", + "boxDR;": "\u2554", + "boxDl;": "\u2556", + "boxDr;": "\u2553", + "boxH;": "\u2550", + "boxHD;": "\u2566", + "boxHU;": "\u2569", + "boxHd;": "\u2564", + "boxHu;": "\u2567", + "boxUL;": "\u255d", + "boxUR;": "\u255a", + "boxUl;": "\u255c", + "boxUr;": "\u2559", + "boxV;": "\u2551", + "boxVH;": "\u256c", + "boxVL;": "\u2563", + "boxVR;": "\u2560", + "boxVh;": "\u256b", + "boxVl;": "\u2562", + "boxVr;": "\u255f", + "boxbox;": "\u29c9", + "boxdL;": "\u2555", + "boxdR;": "\u2552", + "boxdl;": "\u2510", + "boxdr;": "\u250c", + "boxh;": "\u2500", + "boxhD;": "\u2565", + "boxhU;": "\u2568", + "boxhd;": "\u252c", + "boxhu;": "\u2534", + "boxminus;": "\u229f", + "boxplus;": "\u229e", + "boxtimes;": "\u22a0", + "boxuL;": "\u255b", + "boxuR;": "\u2558", + "boxul;": "\u2518", + "boxur;": "\u2514", + "boxv;": "\u2502", + "boxvH;": "\u256a", + "boxvL;": "\u2561", + "boxvR;": "\u255e", + "boxvh;": "\u253c", + "boxvl;": "\u2524", + "boxvr;": "\u251c", + "bprime;": "\u2035", + "breve;": "\u02d8", + "brvbar": "\xa6", + "brvbar;": "\xa6", + "bscr;": "\U0001d4b7", + "bsemi;": "\u204f", + "bsim;": "\u223d", + "bsime;": "\u22cd", + "bsol;": "\\", + "bsolb;": "\u29c5", + "bsolhsub;": "\u27c8", + "bull;": "\u2022", + "bullet;": "\u2022", + "bump;": "\u224e", + "bumpE;": "\u2aae", + "bumpe;": "\u224f", + "bumpeq;": "\u224f", + "cacute;": "\u0107", + "cap;": "\u2229", + "capand;": "\u2a44", + "capbrcup;": "\u2a49", + "capcap;": "\u2a4b", + "capcup;": "\u2a47", + "capdot;": "\u2a40", + "caps;": "\u2229\ufe00", + "caret;": "\u2041", + "caron;": "\u02c7", + "ccaps;": "\u2a4d", + "ccaron;": "\u010d", + "ccedil": "\xe7", + "ccedil;": "\xe7", + "ccirc;": "\u0109", + "ccups;": "\u2a4c", + "ccupssm;": "\u2a50", + "cdot;": "\u010b", + "cedil": "\xb8", + "cedil;": "\xb8", + "cemptyv;": "\u29b2", + "cent": "\xa2", + "cent;": "\xa2", + "centerdot;": "\xb7", + "cfr;": "\U0001d520", + "chcy;": "\u0447", + "check;": "\u2713", + "checkmark;": "\u2713", + "chi;": "\u03c7", + "cir;": "\u25cb", + "cirE;": "\u29c3", + "circ;": "\u02c6", + "circeq;": "\u2257", + "circlearrowleft;": "\u21ba", + "circlearrowright;": "\u21bb", + "circledR;": "\xae", + "circledS;": "\u24c8", + "circledast;": "\u229b", + "circledcirc;": "\u229a", + "circleddash;": "\u229d", + "cire;": "\u2257", + "cirfnint;": "\u2a10", + "cirmid;": "\u2aef", + "cirscir;": "\u29c2", + "clubs;": "\u2663", + "clubsuit;": "\u2663", + "colon;": ":", + "colone;": "\u2254", + "coloneq;": "\u2254", + "comma;": ",", + "commat;": "@", + "comp;": "\u2201", + "compfn;": "\u2218", + "complement;": "\u2201", + "complexes;": "\u2102", + "cong;": "\u2245", + "congdot;": "\u2a6d", + "conint;": "\u222e", + "copf;": "\U0001d554", + "coprod;": "\u2210", + "copy": "\xa9", + "copy;": "\xa9", + "copysr;": "\u2117", + "crarr;": "\u21b5", + "cross;": "\u2717", + "cscr;": "\U0001d4b8", + "csub;": "\u2acf", + "csube;": "\u2ad1", + "csup;": "\u2ad0", + "csupe;": "\u2ad2", + "ctdot;": "\u22ef", + "cudarrl;": "\u2938", + "cudarrr;": "\u2935", + "cuepr;": "\u22de", + "cuesc;": "\u22df", + "cularr;": "\u21b6", + "cularrp;": "\u293d", + "cup;": "\u222a", + "cupbrcap;": "\u2a48", + "cupcap;": "\u2a46", + "cupcup;": "\u2a4a", + "cupdot;": "\u228d", + "cupor;": "\u2a45", + "cups;": "\u222a\ufe00", + "curarr;": "\u21b7", + "curarrm;": "\u293c", + "curlyeqprec;": "\u22de", + "curlyeqsucc;": "\u22df", + "curlyvee;": "\u22ce", + "curlywedge;": "\u22cf", + "curren": "\xa4", + "curren;": "\xa4", + "curvearrowleft;": "\u21b6", + "curvearrowright;": "\u21b7", + "cuvee;": "\u22ce", + "cuwed;": "\u22cf", + "cwconint;": "\u2232", + "cwint;": "\u2231", + "cylcty;": "\u232d", + "dArr;": "\u21d3", + "dHar;": "\u2965", + "dagger;": "\u2020", + "daleth;": "\u2138", + "darr;": "\u2193", + "dash;": "\u2010", + "dashv;": "\u22a3", + "dbkarow;": "\u290f", + "dblac;": "\u02dd", + "dcaron;": "\u010f", + "dcy;": "\u0434", + "dd;": "\u2146", + "ddagger;": "\u2021", + "ddarr;": "\u21ca", + "ddotseq;": "\u2a77", + "deg": "\xb0", + "deg;": "\xb0", + "delta;": "\u03b4", + "demptyv;": "\u29b1", + "dfisht;": "\u297f", + "dfr;": "\U0001d521", + "dharl;": "\u21c3", + "dharr;": "\u21c2", + "diam;": "\u22c4", + "diamond;": "\u22c4", + "diamondsuit;": "\u2666", + "diams;": "\u2666", + "die;": "\xa8", + "digamma;": "\u03dd", + "disin;": "\u22f2", + "div;": "\xf7", + "divide": "\xf7", + "divide;": "\xf7", + "divideontimes;": "\u22c7", + "divonx;": "\u22c7", + "djcy;": "\u0452", + "dlcorn;": "\u231e", + "dlcrop;": "\u230d", + "dollar;": "$", + "dopf;": "\U0001d555", + "dot;": "\u02d9", + "doteq;": "\u2250", + "doteqdot;": "\u2251", + "dotminus;": "\u2238", + "dotplus;": "\u2214", + "dotsquare;": "\u22a1", + "doublebarwedge;": "\u2306", + "downarrow;": "\u2193", + "downdownarrows;": "\u21ca", + "downharpoonleft;": "\u21c3", + "downharpoonright;": "\u21c2", + "drbkarow;": "\u2910", + "drcorn;": "\u231f", + "drcrop;": "\u230c", + "dscr;": "\U0001d4b9", + "dscy;": "\u0455", + "dsol;": "\u29f6", + "dstrok;": "\u0111", + "dtdot;": "\u22f1", + "dtri;": "\u25bf", + "dtrif;": "\u25be", + "duarr;": "\u21f5", + "duhar;": "\u296f", + "dwangle;": "\u29a6", + "dzcy;": "\u045f", + "dzigrarr;": "\u27ff", + "eDDot;": "\u2a77", + "eDot;": "\u2251", + "eacute": "\xe9", + "eacute;": "\xe9", + "easter;": "\u2a6e", + "ecaron;": "\u011b", + "ecir;": "\u2256", + "ecirc": "\xea", + "ecirc;": "\xea", + "ecolon;": "\u2255", + "ecy;": "\u044d", + "edot;": "\u0117", + "ee;": "\u2147", + "efDot;": "\u2252", + "efr;": "\U0001d522", + "eg;": "\u2a9a", + "egrave": "\xe8", + "egrave;": "\xe8", + "egs;": "\u2a96", + "egsdot;": "\u2a98", + "el;": "\u2a99", + "elinters;": "\u23e7", + "ell;": "\u2113", + "els;": "\u2a95", + "elsdot;": "\u2a97", + "emacr;": "\u0113", + "empty;": "\u2205", + "emptyset;": "\u2205", + "emptyv;": "\u2205", + "emsp13;": "\u2004", + "emsp14;": "\u2005", + "emsp;": "\u2003", + "eng;": "\u014b", + "ensp;": "\u2002", + "eogon;": "\u0119", + "eopf;": "\U0001d556", + "epar;": "\u22d5", + "eparsl;": "\u29e3", + "eplus;": "\u2a71", + "epsi;": "\u03b5", + "epsilon;": "\u03b5", + "epsiv;": "\u03f5", + "eqcirc;": "\u2256", + "eqcolon;": "\u2255", + "eqsim;": "\u2242", + "eqslantgtr;": "\u2a96", + "eqslantless;": "\u2a95", + "equals;": "=", + "equest;": "\u225f", + "equiv;": "\u2261", + "equivDD;": "\u2a78", + "eqvparsl;": "\u29e5", + "erDot;": "\u2253", + "erarr;": "\u2971", + "escr;": "\u212f", + "esdot;": "\u2250", + "esim;": "\u2242", + "eta;": "\u03b7", + "eth": "\xf0", + "eth;": "\xf0", + "euml": "\xeb", + "euml;": "\xeb", + "euro;": "\u20ac", + "excl;": "!", + "exist;": "\u2203", + "expectation;": "\u2130", + "exponentiale;": "\u2147", + "fallingdotseq;": "\u2252", + "fcy;": "\u0444", + "female;": "\u2640", + "ffilig;": "\ufb03", + "fflig;": "\ufb00", + "ffllig;": "\ufb04", + "ffr;": "\U0001d523", + "filig;": "\ufb01", + "fjlig;": "fj", + "flat;": "\u266d", + "fllig;": "\ufb02", + "fltns;": "\u25b1", + "fnof;": "\u0192", + "fopf;": "\U0001d557", + "forall;": "\u2200", + "fork;": "\u22d4", + "forkv;": "\u2ad9", + "fpartint;": "\u2a0d", + "frac12": "\xbd", + "frac12;": "\xbd", + "frac13;": "\u2153", + "frac14": "\xbc", + "frac14;": "\xbc", + "frac15;": "\u2155", + "frac16;": "\u2159", + "frac18;": "\u215b", + "frac23;": "\u2154", + "frac25;": "\u2156", + "frac34": "\xbe", + "frac34;": "\xbe", + "frac35;": "\u2157", + "frac38;": "\u215c", + "frac45;": "\u2158", + "frac56;": "\u215a", + "frac58;": "\u215d", + "frac78;": "\u215e", + "frasl;": "\u2044", + "frown;": "\u2322", + "fscr;": "\U0001d4bb", + "gE;": "\u2267", + "gEl;": "\u2a8c", + "gacute;": "\u01f5", + "gamma;": "\u03b3", + "gammad;": "\u03dd", + "gap;": "\u2a86", + "gbreve;": "\u011f", + "gcirc;": "\u011d", + "gcy;": "\u0433", + "gdot;": "\u0121", + "ge;": "\u2265", + "gel;": "\u22db", + "geq;": "\u2265", + "geqq;": "\u2267", + "geqslant;": "\u2a7e", + "ges;": "\u2a7e", + "gescc;": "\u2aa9", + "gesdot;": "\u2a80", + "gesdoto;": "\u2a82", + "gesdotol;": "\u2a84", + "gesl;": "\u22db\ufe00", + "gesles;": "\u2a94", + "gfr;": "\U0001d524", + "gg;": "\u226b", + "ggg;": "\u22d9", + "gimel;": "\u2137", + "gjcy;": "\u0453", + "gl;": "\u2277", + "glE;": "\u2a92", + "gla;": "\u2aa5", + "glj;": "\u2aa4", + "gnE;": "\u2269", + "gnap;": "\u2a8a", + "gnapprox;": "\u2a8a", + "gne;": "\u2a88", + "gneq;": "\u2a88", + "gneqq;": "\u2269", + "gnsim;": "\u22e7", + "gopf;": "\U0001d558", + "grave;": "`", + "gscr;": "\u210a", + "gsim;": "\u2273", + "gsime;": "\u2a8e", + "gsiml;": "\u2a90", + "gt": ">", + "gt;": ">", + "gtcc;": "\u2aa7", + "gtcir;": "\u2a7a", + "gtdot;": "\u22d7", + "gtlPar;": "\u2995", + "gtquest;": "\u2a7c", + "gtrapprox;": "\u2a86", + "gtrarr;": "\u2978", + "gtrdot;": "\u22d7", + "gtreqless;": "\u22db", + "gtreqqless;": "\u2a8c", + "gtrless;": "\u2277", + "gtrsim;": "\u2273", + "gvertneqq;": "\u2269\ufe00", + "gvnE;": "\u2269\ufe00", + "hArr;": "\u21d4", + "hairsp;": "\u200a", + "half;": "\xbd", + "hamilt;": "\u210b", + "hardcy;": "\u044a", + "harr;": "\u2194", + "harrcir;": "\u2948", + "harrw;": "\u21ad", + "hbar;": "\u210f", + "hcirc;": "\u0125", + "hearts;": "\u2665", + "heartsuit;": "\u2665", + "hellip;": "\u2026", + "hercon;": "\u22b9", + "hfr;": "\U0001d525", + "hksearow;": "\u2925", + "hkswarow;": "\u2926", + "hoarr;": "\u21ff", + "homtht;": "\u223b", + "hookleftarrow;": "\u21a9", + "hookrightarrow;": "\u21aa", + "hopf;": "\U0001d559", + "horbar;": "\u2015", + "hscr;": "\U0001d4bd", + "hslash;": "\u210f", + "hstrok;": "\u0127", + "hybull;": "\u2043", + "hyphen;": "\u2010", + "iacute": "\xed", + "iacute;": "\xed", + "ic;": "\u2063", + "icirc": "\xee", + "icirc;": "\xee", + "icy;": "\u0438", + "iecy;": "\u0435", + "iexcl": "\xa1", + "iexcl;": "\xa1", + "iff;": "\u21d4", + "ifr;": "\U0001d526", + "igrave": "\xec", + "igrave;": "\xec", + "ii;": "\u2148", + "iiiint;": "\u2a0c", + "iiint;": "\u222d", + "iinfin;": "\u29dc", + "iiota;": "\u2129", + "ijlig;": "\u0133", + "imacr;": "\u012b", + "image;": "\u2111", + "imagline;": "\u2110", + "imagpart;": "\u2111", + "imath;": "\u0131", + "imof;": "\u22b7", + "imped;": "\u01b5", + "in;": "\u2208", + "incare;": "\u2105", + "infin;": "\u221e", + "infintie;": "\u29dd", + "inodot;": "\u0131", + "int;": "\u222b", + "intcal;": "\u22ba", + "integers;": "\u2124", + "intercal;": "\u22ba", + "intlarhk;": "\u2a17", + "intprod;": "\u2a3c", + "iocy;": "\u0451", + "iogon;": "\u012f", + "iopf;": "\U0001d55a", + "iota;": "\u03b9", + "iprod;": "\u2a3c", + "iquest": "\xbf", + "iquest;": "\xbf", + "iscr;": "\U0001d4be", + "isin;": "\u2208", + "isinE;": "\u22f9", + "isindot;": "\u22f5", + "isins;": "\u22f4", + "isinsv;": "\u22f3", + "isinv;": "\u2208", + "it;": "\u2062", + "itilde;": "\u0129", + "iukcy;": "\u0456", + "iuml": "\xef", + "iuml;": "\xef", + "jcirc;": "\u0135", + "jcy;": "\u0439", + "jfr;": "\U0001d527", + "jmath;": "\u0237", + "jopf;": "\U0001d55b", + "jscr;": "\U0001d4bf", + "jsercy;": "\u0458", + "jukcy;": "\u0454", + "kappa;": "\u03ba", + "kappav;": "\u03f0", + "kcedil;": "\u0137", + "kcy;": "\u043a", + "kfr;": "\U0001d528", + "kgreen;": "\u0138", + "khcy;": "\u0445", + "kjcy;": "\u045c", + "kopf;": "\U0001d55c", + "kscr;": "\U0001d4c0", + "lAarr;": "\u21da", + "lArr;": "\u21d0", + "lAtail;": "\u291b", + "lBarr;": "\u290e", + "lE;": "\u2266", + "lEg;": "\u2a8b", + "lHar;": "\u2962", + "lacute;": "\u013a", + "laemptyv;": "\u29b4", + "lagran;": "\u2112", + "lambda;": "\u03bb", + "lang;": "\u27e8", + "langd;": "\u2991", + "langle;": "\u27e8", + "lap;": "\u2a85", + "laquo": "\xab", + "laquo;": "\xab", + "larr;": "\u2190", + "larrb;": "\u21e4", + "larrbfs;": "\u291f", + "larrfs;": "\u291d", + "larrhk;": "\u21a9", + "larrlp;": "\u21ab", + "larrpl;": "\u2939", + "larrsim;": "\u2973", + "larrtl;": "\u21a2", + "lat;": "\u2aab", + "latail;": "\u2919", + "late;": "\u2aad", + "lates;": "\u2aad\ufe00", + "lbarr;": "\u290c", + "lbbrk;": "\u2772", + "lbrace;": "{", + "lbrack;": "[", + "lbrke;": "\u298b", + "lbrksld;": "\u298f", + "lbrkslu;": "\u298d", + "lcaron;": "\u013e", + "lcedil;": "\u013c", + "lceil;": "\u2308", + "lcub;": "{", + "lcy;": "\u043b", + "ldca;": "\u2936", + "ldquo;": "\u201c", + "ldquor;": "\u201e", + "ldrdhar;": "\u2967", + "ldrushar;": "\u294b", + "ldsh;": "\u21b2", + "le;": "\u2264", + "leftarrow;": "\u2190", + "leftarrowtail;": "\u21a2", + "leftharpoondown;": "\u21bd", + "leftharpoonup;": "\u21bc", + "leftleftarrows;": "\u21c7", + "leftrightarrow;": "\u2194", + "leftrightarrows;": "\u21c6", + "leftrightharpoons;": "\u21cb", + "leftrightsquigarrow;": "\u21ad", + "leftthreetimes;": "\u22cb", + "leg;": "\u22da", + "leq;": "\u2264", + "leqq;": "\u2266", + "leqslant;": "\u2a7d", + "les;": "\u2a7d", + "lescc;": "\u2aa8", + "lesdot;": "\u2a7f", + "lesdoto;": "\u2a81", + "lesdotor;": "\u2a83", + "lesg;": "\u22da\ufe00", + "lesges;": "\u2a93", + "lessapprox;": "\u2a85", + "lessdot;": "\u22d6", + "lesseqgtr;": "\u22da", + "lesseqqgtr;": "\u2a8b", + "lessgtr;": "\u2276", + "lesssim;": "\u2272", + "lfisht;": "\u297c", + "lfloor;": "\u230a", + "lfr;": "\U0001d529", + "lg;": "\u2276", + "lgE;": "\u2a91", + "lhard;": "\u21bd", + "lharu;": "\u21bc", + "lharul;": "\u296a", + "lhblk;": "\u2584", + "ljcy;": "\u0459", + "ll;": "\u226a", + "llarr;": "\u21c7", + "llcorner;": "\u231e", + "llhard;": "\u296b", + "lltri;": "\u25fa", + "lmidot;": "\u0140", + "lmoust;": "\u23b0", + "lmoustache;": "\u23b0", + "lnE;": "\u2268", + "lnap;": "\u2a89", + "lnapprox;": "\u2a89", + "lne;": "\u2a87", + "lneq;": "\u2a87", + "lneqq;": "\u2268", + "lnsim;": "\u22e6", + "loang;": "\u27ec", + "loarr;": "\u21fd", + "lobrk;": "\u27e6", + "longleftarrow;": "\u27f5", + "longleftrightarrow;": "\u27f7", + "longmapsto;": "\u27fc", + "longrightarrow;": "\u27f6", + "looparrowleft;": "\u21ab", + "looparrowright;": "\u21ac", + "lopar;": "\u2985", + "lopf;": "\U0001d55d", + "loplus;": "\u2a2d", + "lotimes;": "\u2a34", + "lowast;": "\u2217", + "lowbar;": "_", + "loz;": "\u25ca", + "lozenge;": "\u25ca", + "lozf;": "\u29eb", + "lpar;": "(", + "lparlt;": "\u2993", + "lrarr;": "\u21c6", + "lrcorner;": "\u231f", + "lrhar;": "\u21cb", + "lrhard;": "\u296d", + "lrm;": "\u200e", + "lrtri;": "\u22bf", + "lsaquo;": "\u2039", + "lscr;": "\U0001d4c1", + "lsh;": "\u21b0", + "lsim;": "\u2272", + "lsime;": "\u2a8d", + "lsimg;": "\u2a8f", + "lsqb;": "[", + "lsquo;": "\u2018", + "lsquor;": "\u201a", + "lstrok;": "\u0142", + "lt": "<", + "lt;": "<", + "ltcc;": "\u2aa6", + "ltcir;": "\u2a79", + "ltdot;": "\u22d6", + "lthree;": "\u22cb", + "ltimes;": "\u22c9", + "ltlarr;": "\u2976", + "ltquest;": "\u2a7b", + "ltrPar;": "\u2996", + "ltri;": "\u25c3", + "ltrie;": "\u22b4", + "ltrif;": "\u25c2", + "lurdshar;": "\u294a", + "luruhar;": "\u2966", + "lvertneqq;": "\u2268\ufe00", + "lvnE;": "\u2268\ufe00", + "mDDot;": "\u223a", + "macr": "\xaf", + "macr;": "\xaf", + "male;": "\u2642", + "malt;": "\u2720", + "maltese;": "\u2720", + "map;": "\u21a6", + "mapsto;": "\u21a6", + "mapstodown;": "\u21a7", + "mapstoleft;": "\u21a4", + "mapstoup;": "\u21a5", + "marker;": "\u25ae", + "mcomma;": "\u2a29", + "mcy;": "\u043c", + "mdash;": "\u2014", + "measuredangle;": "\u2221", + "mfr;": "\U0001d52a", + "mho;": "\u2127", + "micro": "\xb5", + "micro;": "\xb5", + "mid;": "\u2223", + "midast;": "*", + "midcir;": "\u2af0", + "middot": "\xb7", + "middot;": "\xb7", + "minus;": "\u2212", + "minusb;": "\u229f", + "minusd;": "\u2238", + "minusdu;": "\u2a2a", + "mlcp;": "\u2adb", + "mldr;": "\u2026", + "mnplus;": "\u2213", + "models;": "\u22a7", + "mopf;": "\U0001d55e", + "mp;": "\u2213", + "mscr;": "\U0001d4c2", + "mstpos;": "\u223e", + "mu;": "\u03bc", + "multimap;": "\u22b8", + "mumap;": "\u22b8", + "nGg;": "\u22d9\u0338", + "nGt;": "\u226b\u20d2", + "nGtv;": "\u226b\u0338", + "nLeftarrow;": "\u21cd", + "nLeftrightarrow;": "\u21ce", + "nLl;": "\u22d8\u0338", + "nLt;": "\u226a\u20d2", + "nLtv;": "\u226a\u0338", + "nRightarrow;": "\u21cf", + "nVDash;": "\u22af", + "nVdash;": "\u22ae", + "nabla;": "\u2207", + "nacute;": "\u0144", + "nang;": "\u2220\u20d2", + "nap;": "\u2249", + "napE;": "\u2a70\u0338", + "napid;": "\u224b\u0338", + "napos;": "\u0149", + "napprox;": "\u2249", + "natur;": "\u266e", + "natural;": "\u266e", + "naturals;": "\u2115", + "nbsp": "\xa0", + "nbsp;": "\xa0", + "nbump;": "\u224e\u0338", + "nbumpe;": "\u224f\u0338", + "ncap;": "\u2a43", + "ncaron;": "\u0148", + "ncedil;": "\u0146", + "ncong;": "\u2247", + "ncongdot;": "\u2a6d\u0338", + "ncup;": "\u2a42", + "ncy;": "\u043d", + "ndash;": "\u2013", + "ne;": "\u2260", + "neArr;": "\u21d7", + "nearhk;": "\u2924", + "nearr;": "\u2197", + "nearrow;": "\u2197", + "nedot;": "\u2250\u0338", + "nequiv;": "\u2262", + "nesear;": "\u2928", + "nesim;": "\u2242\u0338", + "nexist;": "\u2204", + "nexists;": "\u2204", + "nfr;": "\U0001d52b", + "ngE;": "\u2267\u0338", + "nge;": "\u2271", + "ngeq;": "\u2271", + "ngeqq;": "\u2267\u0338", + "ngeqslant;": "\u2a7e\u0338", + "nges;": "\u2a7e\u0338", + "ngsim;": "\u2275", + "ngt;": "\u226f", + "ngtr;": "\u226f", + "nhArr;": "\u21ce", + "nharr;": "\u21ae", + "nhpar;": "\u2af2", + "ni;": "\u220b", + "nis;": "\u22fc", + "nisd;": "\u22fa", + "niv;": "\u220b", + "njcy;": "\u045a", + "nlArr;": "\u21cd", + "nlE;": "\u2266\u0338", + "nlarr;": "\u219a", + "nldr;": "\u2025", + "nle;": "\u2270", + "nleftarrow;": "\u219a", + "nleftrightarrow;": "\u21ae", + "nleq;": "\u2270", + "nleqq;": "\u2266\u0338", + "nleqslant;": "\u2a7d\u0338", + "nles;": "\u2a7d\u0338", + "nless;": "\u226e", + "nlsim;": "\u2274", + "nlt;": "\u226e", + "nltri;": "\u22ea", + "nltrie;": "\u22ec", + "nmid;": "\u2224", + "nopf;": "\U0001d55f", + "not": "\xac", + "not;": "\xac", + "notin;": "\u2209", + "notinE;": "\u22f9\u0338", + "notindot;": "\u22f5\u0338", + "notinva;": "\u2209", + "notinvb;": "\u22f7", + "notinvc;": "\u22f6", + "notni;": "\u220c", + "notniva;": "\u220c", + "notnivb;": "\u22fe", + "notnivc;": "\u22fd", + "npar;": "\u2226", + "nparallel;": "\u2226", + "nparsl;": "\u2afd\u20e5", + "npart;": "\u2202\u0338", + "npolint;": "\u2a14", + "npr;": "\u2280", + "nprcue;": "\u22e0", + "npre;": "\u2aaf\u0338", + "nprec;": "\u2280", + "npreceq;": "\u2aaf\u0338", + "nrArr;": "\u21cf", + "nrarr;": "\u219b", + "nrarrc;": "\u2933\u0338", + "nrarrw;": "\u219d\u0338", + "nrightarrow;": "\u219b", + "nrtri;": "\u22eb", + "nrtrie;": "\u22ed", + "nsc;": "\u2281", + "nsccue;": "\u22e1", + "nsce;": "\u2ab0\u0338", + "nscr;": "\U0001d4c3", + "nshortmid;": "\u2224", + "nshortparallel;": "\u2226", + "nsim;": "\u2241", + "nsime;": "\u2244", + "nsimeq;": "\u2244", + "nsmid;": "\u2224", + "nspar;": "\u2226", + "nsqsube;": "\u22e2", + "nsqsupe;": "\u22e3", + "nsub;": "\u2284", + "nsubE;": "\u2ac5\u0338", + "nsube;": "\u2288", + "nsubset;": "\u2282\u20d2", + "nsubseteq;": "\u2288", + "nsubseteqq;": "\u2ac5\u0338", + "nsucc;": "\u2281", + "nsucceq;": "\u2ab0\u0338", + "nsup;": "\u2285", + "nsupE;": "\u2ac6\u0338", + "nsupe;": "\u2289", + "nsupset;": "\u2283\u20d2", + "nsupseteq;": "\u2289", + "nsupseteqq;": "\u2ac6\u0338", + "ntgl;": "\u2279", + "ntilde": "\xf1", + "ntilde;": "\xf1", + "ntlg;": "\u2278", + "ntriangleleft;": "\u22ea", + "ntrianglelefteq;": "\u22ec", + "ntriangleright;": "\u22eb", + "ntrianglerighteq;": "\u22ed", + "nu;": "\u03bd", + "num;": "#", + "numero;": "\u2116", + "numsp;": "\u2007", + "nvDash;": "\u22ad", + "nvHarr;": "\u2904", + "nvap;": "\u224d\u20d2", + "nvdash;": "\u22ac", + "nvge;": "\u2265\u20d2", + "nvgt;": ">\u20d2", + "nvinfin;": "\u29de", + "nvlArr;": "\u2902", + "nvle;": "\u2264\u20d2", + "nvlt;": "<\u20d2", + "nvltrie;": "\u22b4\u20d2", + "nvrArr;": "\u2903", + "nvrtrie;": "\u22b5\u20d2", + "nvsim;": "\u223c\u20d2", + "nwArr;": "\u21d6", + "nwarhk;": "\u2923", + "nwarr;": "\u2196", + "nwarrow;": "\u2196", + "nwnear;": "\u2927", + "oS;": "\u24c8", + "oacute": "\xf3", + "oacute;": "\xf3", + "oast;": "\u229b", + "ocir;": "\u229a", + "ocirc": "\xf4", + "ocirc;": "\xf4", + "ocy;": "\u043e", + "odash;": "\u229d", + "odblac;": "\u0151", + "odiv;": "\u2a38", + "odot;": "\u2299", + "odsold;": "\u29bc", + "oelig;": "\u0153", + "ofcir;": "\u29bf", + "ofr;": "\U0001d52c", + "ogon;": "\u02db", + "ograve": "\xf2", + "ograve;": "\xf2", + "ogt;": "\u29c1", + "ohbar;": "\u29b5", + "ohm;": "\u03a9", + "oint;": "\u222e", + "olarr;": "\u21ba", + "olcir;": "\u29be", + "olcross;": "\u29bb", + "oline;": "\u203e", + "olt;": "\u29c0", + "omacr;": "\u014d", + "omega;": "\u03c9", + "omicron;": "\u03bf", + "omid;": "\u29b6", + "ominus;": "\u2296", + "oopf;": "\U0001d560", + "opar;": "\u29b7", + "operp;": "\u29b9", + "oplus;": "\u2295", + "or;": "\u2228", + "orarr;": "\u21bb", + "ord;": "\u2a5d", + "order;": "\u2134", + "orderof;": "\u2134", + "ordf": "\xaa", + "ordf;": "\xaa", + "ordm": "\xba", + "ordm;": "\xba", + "origof;": "\u22b6", + "oror;": "\u2a56", + "orslope;": "\u2a57", + "orv;": "\u2a5b", + "oscr;": "\u2134", + "oslash": "\xf8", + "oslash;": "\xf8", + "osol;": "\u2298", + "otilde": "\xf5", + "otilde;": "\xf5", + "otimes;": "\u2297", + "otimesas;": "\u2a36", + "ouml": "\xf6", + "ouml;": "\xf6", + "ovbar;": "\u233d", + "par;": "\u2225", + "para": "\xb6", + "para;": "\xb6", + "parallel;": "\u2225", + "parsim;": "\u2af3", + "parsl;": "\u2afd", + "part;": "\u2202", + "pcy;": "\u043f", + "percnt;": "%", + "period;": ".", + "permil;": "\u2030", + "perp;": "\u22a5", + "pertenk;": "\u2031", + "pfr;": "\U0001d52d", + "phi;": "\u03c6", + "phiv;": "\u03d5", + "phmmat;": "\u2133", + "phone;": "\u260e", + "pi;": "\u03c0", + "pitchfork;": "\u22d4", + "piv;": "\u03d6", + "planck;": "\u210f", + "planckh;": "\u210e", + "plankv;": "\u210f", + "plus;": "+", + "plusacir;": "\u2a23", + "plusb;": "\u229e", + "pluscir;": "\u2a22", + "plusdo;": "\u2214", + "plusdu;": "\u2a25", + "pluse;": "\u2a72", + "plusmn": "\xb1", + "plusmn;": "\xb1", + "plussim;": "\u2a26", + "plustwo;": "\u2a27", + "pm;": "\xb1", + "pointint;": "\u2a15", + "popf;": "\U0001d561", + "pound": "\xa3", + "pound;": "\xa3", + "pr;": "\u227a", + "prE;": "\u2ab3", + "prap;": "\u2ab7", + "prcue;": "\u227c", + "pre;": "\u2aaf", + "prec;": "\u227a", + "precapprox;": "\u2ab7", + "preccurlyeq;": "\u227c", + "preceq;": "\u2aaf", + "precnapprox;": "\u2ab9", + "precneqq;": "\u2ab5", + "precnsim;": "\u22e8", + "precsim;": "\u227e", + "prime;": "\u2032", + "primes;": "\u2119", + "prnE;": "\u2ab5", + "prnap;": "\u2ab9", + "prnsim;": "\u22e8", + "prod;": "\u220f", + "profalar;": "\u232e", + "profline;": "\u2312", + "profsurf;": "\u2313", + "prop;": "\u221d", + "propto;": "\u221d", + "prsim;": "\u227e", + "prurel;": "\u22b0", + "pscr;": "\U0001d4c5", + "psi;": "\u03c8", + "puncsp;": "\u2008", + "qfr;": "\U0001d52e", + "qint;": "\u2a0c", + "qopf;": "\U0001d562", + "qprime;": "\u2057", + "qscr;": "\U0001d4c6", + "quaternions;": "\u210d", + "quatint;": "\u2a16", + "quest;": "?", + "questeq;": "\u225f", + "quot": "\"", + "quot;": "\"", + "rAarr;": "\u21db", + "rArr;": "\u21d2", + "rAtail;": "\u291c", + "rBarr;": "\u290f", + "rHar;": "\u2964", + "race;": "\u223d\u0331", + "racute;": "\u0155", + "radic;": "\u221a", + "raemptyv;": "\u29b3", + "rang;": "\u27e9", + "rangd;": "\u2992", + "range;": "\u29a5", + "rangle;": "\u27e9", + "raquo": "\xbb", + "raquo;": "\xbb", + "rarr;": "\u2192", + "rarrap;": "\u2975", + "rarrb;": "\u21e5", + "rarrbfs;": "\u2920", + "rarrc;": "\u2933", + "rarrfs;": "\u291e", + "rarrhk;": "\u21aa", + "rarrlp;": "\u21ac", + "rarrpl;": "\u2945", + "rarrsim;": "\u2974", + "rarrtl;": "\u21a3", + "rarrw;": "\u219d", + "ratail;": "\u291a", + "ratio;": "\u2236", + "rationals;": "\u211a", + "rbarr;": "\u290d", + "rbbrk;": "\u2773", + "rbrace;": "}", + "rbrack;": "]", + "rbrke;": "\u298c", + "rbrksld;": "\u298e", + "rbrkslu;": "\u2990", + "rcaron;": "\u0159", + "rcedil;": "\u0157", + "rceil;": "\u2309", + "rcub;": "}", + "rcy;": "\u0440", + "rdca;": "\u2937", + "rdldhar;": "\u2969", + "rdquo;": "\u201d", + "rdquor;": "\u201d", + "rdsh;": "\u21b3", + "real;": "\u211c", + "realine;": "\u211b", + "realpart;": "\u211c", + "reals;": "\u211d", + "rect;": "\u25ad", + "reg": "\xae", + "reg;": "\xae", + "rfisht;": "\u297d", + "rfloor;": "\u230b", + "rfr;": "\U0001d52f", + "rhard;": "\u21c1", + "rharu;": "\u21c0", + "rharul;": "\u296c", + "rho;": "\u03c1", + "rhov;": "\u03f1", + "rightarrow;": "\u2192", + "rightarrowtail;": "\u21a3", + "rightharpoondown;": "\u21c1", + "rightharpoonup;": "\u21c0", + "rightleftarrows;": "\u21c4", + "rightleftharpoons;": "\u21cc", + "rightrightarrows;": "\u21c9", + "rightsquigarrow;": "\u219d", + "rightthreetimes;": "\u22cc", + "ring;": "\u02da", + "risingdotseq;": "\u2253", + "rlarr;": "\u21c4", + "rlhar;": "\u21cc", + "rlm;": "\u200f", + "rmoust;": "\u23b1", + "rmoustache;": "\u23b1", + "rnmid;": "\u2aee", + "roang;": "\u27ed", + "roarr;": "\u21fe", + "robrk;": "\u27e7", + "ropar;": "\u2986", + "ropf;": "\U0001d563", + "roplus;": "\u2a2e", + "rotimes;": "\u2a35", + "rpar;": ")", + "rpargt;": "\u2994", + "rppolint;": "\u2a12", + "rrarr;": "\u21c9", + "rsaquo;": "\u203a", + "rscr;": "\U0001d4c7", + "rsh;": "\u21b1", + "rsqb;": "]", + "rsquo;": "\u2019", + "rsquor;": "\u2019", + "rthree;": "\u22cc", + "rtimes;": "\u22ca", + "rtri;": "\u25b9", + "rtrie;": "\u22b5", + "rtrif;": "\u25b8", + "rtriltri;": "\u29ce", + "ruluhar;": "\u2968", + "rx;": "\u211e", + "sacute;": "\u015b", + "sbquo;": "\u201a", + "sc;": "\u227b", + "scE;": "\u2ab4", + "scap;": "\u2ab8", + "scaron;": "\u0161", + "sccue;": "\u227d", + "sce;": "\u2ab0", + "scedil;": "\u015f", + "scirc;": "\u015d", + "scnE;": "\u2ab6", + "scnap;": "\u2aba", + "scnsim;": "\u22e9", + "scpolint;": "\u2a13", + "scsim;": "\u227f", + "scy;": "\u0441", + "sdot;": "\u22c5", + "sdotb;": "\u22a1", + "sdote;": "\u2a66", + "seArr;": "\u21d8", + "searhk;": "\u2925", + "searr;": "\u2198", + "searrow;": "\u2198", + "sect": "\xa7", + "sect;": "\xa7", + "semi;": ";", + "seswar;": "\u2929", + "setminus;": "\u2216", + "setmn;": "\u2216", + "sext;": "\u2736", + "sfr;": "\U0001d530", + "sfrown;": "\u2322", + "sharp;": "\u266f", + "shchcy;": "\u0449", + "shcy;": "\u0448", + "shortmid;": "\u2223", + "shortparallel;": "\u2225", + "shy": "\xad", + "shy;": "\xad", + "sigma;": "\u03c3", + "sigmaf;": "\u03c2", + "sigmav;": "\u03c2", + "sim;": "\u223c", + "simdot;": "\u2a6a", + "sime;": "\u2243", + "simeq;": "\u2243", + "simg;": "\u2a9e", + "simgE;": "\u2aa0", + "siml;": "\u2a9d", + "simlE;": "\u2a9f", + "simne;": "\u2246", + "simplus;": "\u2a24", + "simrarr;": "\u2972", + "slarr;": "\u2190", + "smallsetminus;": "\u2216", + "smashp;": "\u2a33", + "smeparsl;": "\u29e4", + "smid;": "\u2223", + "smile;": "\u2323", + "smt;": "\u2aaa", + "smte;": "\u2aac", + "smtes;": "\u2aac\ufe00", + "softcy;": "\u044c", + "sol;": "/", + "solb;": "\u29c4", + "solbar;": "\u233f", + "sopf;": "\U0001d564", + "spades;": "\u2660", + "spadesuit;": "\u2660", + "spar;": "\u2225", + "sqcap;": "\u2293", + "sqcaps;": "\u2293\ufe00", + "sqcup;": "\u2294", + "sqcups;": "\u2294\ufe00", + "sqsub;": "\u228f", + "sqsube;": "\u2291", + "sqsubset;": "\u228f", + "sqsubseteq;": "\u2291", + "sqsup;": "\u2290", + "sqsupe;": "\u2292", + "sqsupset;": "\u2290", + "sqsupseteq;": "\u2292", + "squ;": "\u25a1", + "square;": "\u25a1", + "squarf;": "\u25aa", + "squf;": "\u25aa", + "srarr;": "\u2192", + "sscr;": "\U0001d4c8", + "ssetmn;": "\u2216", + "ssmile;": "\u2323", + "sstarf;": "\u22c6", + "star;": "\u2606", + "starf;": "\u2605", + "straightepsilon;": "\u03f5", + "straightphi;": "\u03d5", + "strns;": "\xaf", + "sub;": "\u2282", + "subE;": "\u2ac5", + "subdot;": "\u2abd", + "sube;": "\u2286", + "subedot;": "\u2ac3", + "submult;": "\u2ac1", + "subnE;": "\u2acb", + "subne;": "\u228a", + "subplus;": "\u2abf", + "subrarr;": "\u2979", + "subset;": "\u2282", + "subseteq;": "\u2286", + "subseteqq;": "\u2ac5", + "subsetneq;": "\u228a", + "subsetneqq;": "\u2acb", + "subsim;": "\u2ac7", + "subsub;": "\u2ad5", + "subsup;": "\u2ad3", + "succ;": "\u227b", + "succapprox;": "\u2ab8", + "succcurlyeq;": "\u227d", + "succeq;": "\u2ab0", + "succnapprox;": "\u2aba", + "succneqq;": "\u2ab6", + "succnsim;": "\u22e9", + "succsim;": "\u227f", + "sum;": "\u2211", + "sung;": "\u266a", + "sup1": "\xb9", + "sup1;": "\xb9", + "sup2": "\xb2", + "sup2;": "\xb2", + "sup3": "\xb3", + "sup3;": "\xb3", + "sup;": "\u2283", + "supE;": "\u2ac6", + "supdot;": "\u2abe", + "supdsub;": "\u2ad8", + "supe;": "\u2287", + "supedot;": "\u2ac4", + "suphsol;": "\u27c9", + "suphsub;": "\u2ad7", + "suplarr;": "\u297b", + "supmult;": "\u2ac2", + "supnE;": "\u2acc", + "supne;": "\u228b", + "supplus;": "\u2ac0", + "supset;": "\u2283", + "supseteq;": "\u2287", + "supseteqq;": "\u2ac6", + "supsetneq;": "\u228b", + "supsetneqq;": "\u2acc", + "supsim;": "\u2ac8", + "supsub;": "\u2ad4", + "supsup;": "\u2ad6", + "swArr;": "\u21d9", + "swarhk;": "\u2926", + "swarr;": "\u2199", + "swarrow;": "\u2199", + "swnwar;": "\u292a", + "szlig": "\xdf", + "szlig;": "\xdf", + "target;": "\u2316", + "tau;": "\u03c4", + "tbrk;": "\u23b4", + "tcaron;": "\u0165", + "tcedil;": "\u0163", + "tcy;": "\u0442", + "tdot;": "\u20db", + "telrec;": "\u2315", + "tfr;": "\U0001d531", + "there4;": "\u2234", + "therefore;": "\u2234", + "theta;": "\u03b8", + "thetasym;": "\u03d1", + "thetav;": "\u03d1", + "thickapprox;": "\u2248", + "thicksim;": "\u223c", + "thinsp;": "\u2009", + "thkap;": "\u2248", + "thksim;": "\u223c", + "thorn": "\xfe", + "thorn;": "\xfe", + "tilde;": "\u02dc", + "times": "\xd7", + "times;": "\xd7", + "timesb;": "\u22a0", + "timesbar;": "\u2a31", + "timesd;": "\u2a30", + "tint;": "\u222d", + "toea;": "\u2928", + "top;": "\u22a4", + "topbot;": "\u2336", + "topcir;": "\u2af1", + "topf;": "\U0001d565", + "topfork;": "\u2ada", + "tosa;": "\u2929", + "tprime;": "\u2034", + "trade;": "\u2122", + "triangle;": "\u25b5", + "triangledown;": "\u25bf", + "triangleleft;": "\u25c3", + "trianglelefteq;": "\u22b4", + "triangleq;": "\u225c", + "triangleright;": "\u25b9", + "trianglerighteq;": "\u22b5", + "tridot;": "\u25ec", + "trie;": "\u225c", + "triminus;": "\u2a3a", + "triplus;": "\u2a39", + "trisb;": "\u29cd", + "tritime;": "\u2a3b", + "trpezium;": "\u23e2", + "tscr;": "\U0001d4c9", + "tscy;": "\u0446", + "tshcy;": "\u045b", + "tstrok;": "\u0167", + "twixt;": "\u226c", + "twoheadleftarrow;": "\u219e", + "twoheadrightarrow;": "\u21a0", + "uArr;": "\u21d1", + "uHar;": "\u2963", + "uacute": "\xfa", + "uacute;": "\xfa", + "uarr;": "\u2191", + "ubrcy;": "\u045e", + "ubreve;": "\u016d", + "ucirc": "\xfb", + "ucirc;": "\xfb", + "ucy;": "\u0443", + "udarr;": "\u21c5", + "udblac;": "\u0171", + "udhar;": "\u296e", + "ufisht;": "\u297e", + "ufr;": "\U0001d532", + "ugrave": "\xf9", + "ugrave;": "\xf9", + "uharl;": "\u21bf", + "uharr;": "\u21be", + "uhblk;": "\u2580", + "ulcorn;": "\u231c", + "ulcorner;": "\u231c", + "ulcrop;": "\u230f", + "ultri;": "\u25f8", + "umacr;": "\u016b", + "uml": "\xa8", + "uml;": "\xa8", + "uogon;": "\u0173", + "uopf;": "\U0001d566", + "uparrow;": "\u2191", + "updownarrow;": "\u2195", + "upharpoonleft;": "\u21bf", + "upharpoonright;": "\u21be", + "uplus;": "\u228e", + "upsi;": "\u03c5", + "upsih;": "\u03d2", + "upsilon;": "\u03c5", + "upuparrows;": "\u21c8", + "urcorn;": "\u231d", + "urcorner;": "\u231d", + "urcrop;": "\u230e", + "uring;": "\u016f", + "urtri;": "\u25f9", + "uscr;": "\U0001d4ca", + "utdot;": "\u22f0", + "utilde;": "\u0169", + "utri;": "\u25b5", + "utrif;": "\u25b4", + "uuarr;": "\u21c8", + "uuml": "\xfc", + "uuml;": "\xfc", + "uwangle;": "\u29a7", + "vArr;": "\u21d5", + "vBar;": "\u2ae8", + "vBarv;": "\u2ae9", + "vDash;": "\u22a8", + "vangrt;": "\u299c", + "varepsilon;": "\u03f5", + "varkappa;": "\u03f0", + "varnothing;": "\u2205", + "varphi;": "\u03d5", + "varpi;": "\u03d6", + "varpropto;": "\u221d", + "varr;": "\u2195", + "varrho;": "\u03f1", + "varsigma;": "\u03c2", + "varsubsetneq;": "\u228a\ufe00", + "varsubsetneqq;": "\u2acb\ufe00", + "varsupsetneq;": "\u228b\ufe00", + "varsupsetneqq;": "\u2acc\ufe00", + "vartheta;": "\u03d1", + "vartriangleleft;": "\u22b2", + "vartriangleright;": "\u22b3", + "vcy;": "\u0432", + "vdash;": "\u22a2", + "vee;": "\u2228", + "veebar;": "\u22bb", + "veeeq;": "\u225a", + "vellip;": "\u22ee", + "verbar;": "|", + "vert;": "|", + "vfr;": "\U0001d533", + "vltri;": "\u22b2", + "vnsub;": "\u2282\u20d2", + "vnsup;": "\u2283\u20d2", + "vopf;": "\U0001d567", + "vprop;": "\u221d", + "vrtri;": "\u22b3", + "vscr;": "\U0001d4cb", + "vsubnE;": "\u2acb\ufe00", + "vsubne;": "\u228a\ufe00", + "vsupnE;": "\u2acc\ufe00", + "vsupne;": "\u228b\ufe00", + "vzigzag;": "\u299a", + "wcirc;": "\u0175", + "wedbar;": "\u2a5f", + "wedge;": "\u2227", + "wedgeq;": "\u2259", + "weierp;": "\u2118", + "wfr;": "\U0001d534", + "wopf;": "\U0001d568", + "wp;": "\u2118", + "wr;": "\u2240", + "wreath;": "\u2240", + "wscr;": "\U0001d4cc", + "xcap;": "\u22c2", + "xcirc;": "\u25ef", + "xcup;": "\u22c3", + "xdtri;": "\u25bd", + "xfr;": "\U0001d535", + "xhArr;": "\u27fa", + "xharr;": "\u27f7", + "xi;": "\u03be", + "xlArr;": "\u27f8", + "xlarr;": "\u27f5", + "xmap;": "\u27fc", + "xnis;": "\u22fb", + "xodot;": "\u2a00", + "xopf;": "\U0001d569", + "xoplus;": "\u2a01", + "xotime;": "\u2a02", + "xrArr;": "\u27f9", + "xrarr;": "\u27f6", + "xscr;": "\U0001d4cd", + "xsqcup;": "\u2a06", + "xuplus;": "\u2a04", + "xutri;": "\u25b3", + "xvee;": "\u22c1", + "xwedge;": "\u22c0", + "yacute": "\xfd", + "yacute;": "\xfd", + "yacy;": "\u044f", + "ycirc;": "\u0177", + "ycy;": "\u044b", + "yen": "\xa5", + "yen;": "\xa5", + "yfr;": "\U0001d536", + "yicy;": "\u0457", + "yopf;": "\U0001d56a", + "yscr;": "\U0001d4ce", + "yucy;": "\u044e", + "yuml": "\xff", + "yuml;": "\xff", + "zacute;": "\u017a", + "zcaron;": "\u017e", + "zcy;": "\u0437", + "zdot;": "\u017c", + "zeetrf;": "\u2128", + "zeta;": "\u03b6", + "zfr;": "\U0001d537", + "zhcy;": "\u0436", + "zigrarr;": "\u21dd", + "zopf;": "\U0001d56b", + "zscr;": "\U0001d4cf", + "zwj;": "\u200d", + "zwnj;": "\u200c", +} + +replacementCharacters = { + 0x0: "\uFFFD", + 0x0d: "\u000D", + 0x80: "\u20AC", + 0x81: "\u0081", + 0x82: "\u201A", + 0x83: "\u0192", + 0x84: "\u201E", + 0x85: "\u2026", + 0x86: "\u2020", + 0x87: "\u2021", + 0x88: "\u02C6", + 0x89: "\u2030", + 0x8A: "\u0160", + 0x8B: "\u2039", + 0x8C: "\u0152", + 0x8D: "\u008D", + 0x8E: "\u017D", + 0x8F: "\u008F", + 0x90: "\u0090", + 0x91: "\u2018", + 0x92: "\u2019", + 0x93: "\u201C", + 0x94: "\u201D", + 0x95: "\u2022", + 0x96: "\u2013", + 0x97: "\u2014", + 0x98: "\u02DC", + 0x99: "\u2122", + 0x9A: "\u0161", + 0x9B: "\u203A", + 0x9C: "\u0153", + 0x9D: "\u009D", + 0x9E: "\u017E", + 0x9F: "\u0178", +} + +tokenTypes = { + "Doctype": 0, + "Characters": 1, + "SpaceCharacters": 2, + "StartTag": 3, + "EndTag": 4, + "EmptyTag": 5, + "Comment": 6, + "ParseError": 7 +} + +tagTokenTypes = frozenset([tokenTypes["StartTag"], tokenTypes["EndTag"], + tokenTypes["EmptyTag"]]) + + +prefixes = dict([(v, k) for k, v in namespaces.items()]) +prefixes["http://www.w3.org/1998/Math/MathML"] = "math" + + +class DataLossWarning(UserWarning): + """Raised when the current tree is unable to represent the input data""" + pass + + +class _ReparseException(Exception): + pass diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py new file mode 100644 index 0000000..5ba926e --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py @@ -0,0 +1,29 @@ +from __future__ import absolute_import, division, unicode_literals + +from . import base + +from collections import OrderedDict + + +def _attr_key(attr): + """Return an appropriate key for an attribute for sorting + + Attributes have a namespace that can be either ``None`` or a string. We + can't compare the two because they're different types, so we convert + ``None`` to an empty string first. + + """ + return (attr[0][0] or ''), attr[0][1] + + +class Filter(base.Filter): + """Alphabetizes attributes for elements""" + def __iter__(self): + for token in base.Filter.__iter__(self): + if token["type"] in ("StartTag", "EmptyTag"): + attrs = OrderedDict() + for name, value in sorted(token["data"].items(), + key=_attr_key): + attrs[name] = value + token["data"] = attrs + yield token diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/base.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/base.py new file mode 100644 index 0000000..c7dbaed --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/base.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import, division, unicode_literals + + +class Filter(object): + def __init__(self, source): + self.source = source + + def __iter__(self): + return iter(self.source) + + def __getattr__(self, name): + return getattr(self.source, name) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py new file mode 100644 index 0000000..aefb5c8 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py @@ -0,0 +1,73 @@ +from __future__ import absolute_import, division, unicode_literals + +from . import base + + +class Filter(base.Filter): + """Injects ``<meta charset=ENCODING>`` tag into head of document""" + def __init__(self, source, encoding): + """Creates a Filter + + :arg source: the source token stream + + :arg encoding: the encoding to set + + """ + base.Filter.__init__(self, source) + self.encoding = encoding + + def __iter__(self): + state = "pre_head" + meta_found = (self.encoding is None) + pending = [] + + for token in base.Filter.__iter__(self): + type = token["type"] + if type == "StartTag": + if token["name"].lower() == "head": + state = "in_head" + + elif type == "EmptyTag": + if token["name"].lower() == "meta": + # replace charset with actual encoding + has_http_equiv_content_type = False + for (namespace, name), value in token["data"].items(): + if namespace is not None: + continue + elif name.lower() == 'charset': + token["data"][(namespace, name)] = self.encoding + meta_found = True + break + elif name == 'http-equiv' and value.lower() == 'content-type': + has_http_equiv_content_type = True + else: + if has_http_equiv_content_type and (None, "content") in token["data"]: + token["data"][(None, "content")] = 'text/html; charset=%s' % self.encoding + meta_found = True + + elif token["name"].lower() == "head" and not meta_found: + # insert meta into empty head + yield {"type": "StartTag", "name": "head", + "data": token["data"]} + yield {"type": "EmptyTag", "name": "meta", + "data": {(None, "charset"): self.encoding}} + yield {"type": "EndTag", "name": "head"} + meta_found = True + continue + + elif type == "EndTag": + if token["name"].lower() == "head" and pending: + # insert meta into head (if necessary) and flush pending queue + yield pending.pop(0) + if not meta_found: + yield {"type": "EmptyTag", "name": "meta", + "data": {(None, "charset"): self.encoding}} + while pending: + yield pending.pop(0) + meta_found = True + state = "post_head" + + if state == "in_head": + pending.append(token) + else: + yield token diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/lint.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/lint.py new file mode 100644 index 0000000..fcc07ee --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/lint.py @@ -0,0 +1,93 @@ +from __future__ import absolute_import, division, unicode_literals + +from pip._vendor.six import text_type + +from . import base +from ..constants import namespaces, voidElements + +from ..constants import spaceCharacters +spaceCharacters = "".join(spaceCharacters) + + +class Filter(base.Filter): + """Lints the token stream for errors + + If it finds any errors, it'll raise an ``AssertionError``. + + """ + def __init__(self, source, require_matching_tags=True): + """Creates a Filter + + :arg source: the source token stream + + :arg require_matching_tags: whether or not to require matching tags + + """ + super(Filter, self).__init__(source) + self.require_matching_tags = require_matching_tags + + def __iter__(self): + open_elements = [] + for token in base.Filter.__iter__(self): + type = token["type"] + if type in ("StartTag", "EmptyTag"): + namespace = token["namespace"] + name = token["name"] + assert namespace is None or isinstance(namespace, text_type) + assert namespace != "" + assert isinstance(name, text_type) + assert name != "" + assert isinstance(token["data"], dict) + if (not namespace or namespace == namespaces["html"]) and name in voidElements: + assert type == "EmptyTag" + else: + assert type == "StartTag" + if type == "StartTag" and self.require_matching_tags: + open_elements.append((namespace, name)) + for (namespace, name), value in token["data"].items(): + assert namespace is None or isinstance(namespace, text_type) + assert namespace != "" + assert isinstance(name, text_type) + assert name != "" + assert isinstance(value, text_type) + + elif type == "EndTag": + namespace = token["namespace"] + name = token["name"] + assert namespace is None or isinstance(namespace, text_type) + assert namespace != "" + assert isinstance(name, text_type) + assert name != "" + if (not namespace or namespace == namespaces["html"]) and name in voidElements: + assert False, "Void element reported as EndTag token: %(tag)s" % {"tag": name} + elif self.require_matching_tags: + start = open_elements.pop() + assert start == (namespace, name) + + elif type == "Comment": + data = token["data"] + assert isinstance(data, text_type) + + elif type in ("Characters", "SpaceCharacters"): + data = token["data"] + assert isinstance(data, text_type) + assert data != "" + if type == "SpaceCharacters": + assert data.strip(spaceCharacters) == "" + + elif type == "Doctype": + name = token["name"] + assert name is None or isinstance(name, text_type) + assert token["publicId"] is None or isinstance(name, text_type) + assert token["systemId"] is None or isinstance(name, text_type) + + elif type == "Entity": + assert isinstance(token["name"], text_type) + + elif type == "SerializerError": + assert isinstance(token["data"], text_type) + + else: + assert False, "Unknown token type: %(type)s" % {"type": type} + + yield token diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/optionaltags.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/optionaltags.py new file mode 100644 index 0000000..4a86501 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/optionaltags.py @@ -0,0 +1,207 @@ +from __future__ import absolute_import, division, unicode_literals + +from . import base + + +class Filter(base.Filter): + """Removes optional tags from the token stream""" + def slider(self): + previous1 = previous2 = None + for token in self.source: + if previous1 is not None: + yield previous2, previous1, token + previous2 = previous1 + previous1 = token + if previous1 is not None: + yield previous2, previous1, None + + def __iter__(self): + for previous, token, next in self.slider(): + type = token["type"] + if type == "StartTag": + if (token["data"] or + not self.is_optional_start(token["name"], previous, next)): + yield token + elif type == "EndTag": + if not self.is_optional_end(token["name"], next): + yield token + else: + yield token + + def is_optional_start(self, tagname, previous, next): + type = next and next["type"] or None + if tagname in 'html': + # An html element's start tag may be omitted if the first thing + # inside the html element is not a space character or a comment. + return type not in ("Comment", "SpaceCharacters") + elif tagname == 'head': + # A head element's start tag may be omitted if the first thing + # inside the head element is an element. + # XXX: we also omit the start tag if the head element is empty + if type in ("StartTag", "EmptyTag"): + return True + elif type == "EndTag": + return next["name"] == "head" + elif tagname == 'body': + # A body element's start tag may be omitted if the first thing + # inside the body element is not a space character or a comment, + # except if the first thing inside the body element is a script + # or style element and the node immediately preceding the body + # element is a head element whose end tag has been omitted. + if type in ("Comment", "SpaceCharacters"): + return False + elif type == "StartTag": + # XXX: we do not look at the preceding event, so we never omit + # the body element's start tag if it's followed by a script or + # a style element. + return next["name"] not in ('script', 'style') + else: + return True + elif tagname == 'colgroup': + # A colgroup element's start tag may be omitted if the first thing + # inside the colgroup element is a col element, and if the element + # is not immediately preceded by another colgroup element whose + # end tag has been omitted. + if type in ("StartTag", "EmptyTag"): + # XXX: we do not look at the preceding event, so instead we never + # omit the colgroup element's end tag when it is immediately + # followed by another colgroup element. See is_optional_end. + return next["name"] == "col" + else: + return False + elif tagname == 'tbody': + # A tbody element's start tag may be omitted if the first thing + # inside the tbody element is a tr element, and if the element is + # not immediately preceded by a tbody, thead, or tfoot element + # whose end tag has been omitted. + if type == "StartTag": + # omit the thead and tfoot elements' end tag when they are + # immediately followed by a tbody element. See is_optional_end. + if previous and previous['type'] == 'EndTag' and \ + previous['name'] in ('tbody', 'thead', 'tfoot'): + return False + return next["name"] == 'tr' + else: + return False + return False + + def is_optional_end(self, tagname, next): + type = next and next["type"] or None + if tagname in ('html', 'head', 'body'): + # An html element's end tag may be omitted if the html element + # is not immediately followed by a space character or a comment. + return type not in ("Comment", "SpaceCharacters") + elif tagname in ('li', 'optgroup', 'tr'): + # A li element's end tag may be omitted if the li element is + # immediately followed by another li element or if there is + # no more content in the parent element. + # An optgroup element's end tag may be omitted if the optgroup + # element is immediately followed by another optgroup element, + # or if there is no more content in the parent element. + # A tr element's end tag may be omitted if the tr element is + # immediately followed by another tr element, or if there is + # no more content in the parent element. + if type == "StartTag": + return next["name"] == tagname + else: + return type == "EndTag" or type is None + elif tagname in ('dt', 'dd'): + # A dt element's end tag may be omitted if the dt element is + # immediately followed by another dt element or a dd element. + # A dd element's end tag may be omitted if the dd element is + # immediately followed by another dd element or a dt element, + # or if there is no more content in the parent element. + if type == "StartTag": + return next["name"] in ('dt', 'dd') + elif tagname == 'dd': + return type == "EndTag" or type is None + else: + return False + elif tagname == 'p': + # A p element's end tag may be omitted if the p element is + # immediately followed by an address, article, aside, + # blockquote, datagrid, dialog, dir, div, dl, fieldset, + # footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu, + # nav, ol, p, pre, section, table, or ul, element, or if + # there is no more content in the parent element. + if type in ("StartTag", "EmptyTag"): + return next["name"] in ('address', 'article', 'aside', + 'blockquote', 'datagrid', 'dialog', + 'dir', 'div', 'dl', 'fieldset', 'footer', + 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', + 'header', 'hr', 'menu', 'nav', 'ol', + 'p', 'pre', 'section', 'table', 'ul') + else: + return type == "EndTag" or type is None + elif tagname == 'option': + # An option element's end tag may be omitted if the option + # element is immediately followed by another option element, + # or if it is immediately followed by an <code>optgroup</code> + # element, or if there is no more content in the parent + # element. + if type == "StartTag": + return next["name"] in ('option', 'optgroup') + else: + return type == "EndTag" or type is None + elif tagname in ('rt', 'rp'): + # An rt element's end tag may be omitted if the rt element is + # immediately followed by an rt or rp element, or if there is + # no more content in the parent element. + # An rp element's end tag may be omitted if the rp element is + # immediately followed by an rt or rp element, or if there is + # no more content in the parent element. + if type == "StartTag": + return next["name"] in ('rt', 'rp') + else: + return type == "EndTag" or type is None + elif tagname == 'colgroup': + # A colgroup element's end tag may be omitted if the colgroup + # element is not immediately followed by a space character or + # a comment. + if type in ("Comment", "SpaceCharacters"): + return False + elif type == "StartTag": + # XXX: we also look for an immediately following colgroup + # element. See is_optional_start. + return next["name"] != 'colgroup' + else: + return True + elif tagname in ('thead', 'tbody'): + # A thead element's end tag may be omitted if the thead element + # is immediately followed by a tbody or tfoot element. + # A tbody element's end tag may be omitted if the tbody element + # is immediately followed by a tbody or tfoot element, or if + # there is no more content in the parent element. + # A tfoot element's end tag may be omitted if the tfoot element + # is immediately followed by a tbody element, or if there is no + # more content in the parent element. + # XXX: we never omit the end tag when the following element is + # a tbody. See is_optional_start. + if type == "StartTag": + return next["name"] in ['tbody', 'tfoot'] + elif tagname == 'tbody': + return type == "EndTag" or type is None + else: + return False + elif tagname == 'tfoot': + # A tfoot element's end tag may be omitted if the tfoot element + # is immediately followed by a tbody element, or if there is no + # more content in the parent element. + # XXX: we never omit the end tag when the following element is + # a tbody. See is_optional_start. + if type == "StartTag": + return next["name"] == 'tbody' + else: + return type == "EndTag" or type is None + elif tagname in ('td', 'th'): + # A td element's end tag may be omitted if the td element is + # immediately followed by a td or th element, or if there is + # no more content in the parent element. + # A th element's end tag may be omitted if the th element is + # immediately followed by a td or th element, or if there is + # no more content in the parent element. + if type == "StartTag": + return next["name"] in ('td', 'th') + else: + return type == "EndTag" or type is None + return False diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/sanitizer.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/sanitizer.py new file mode 100644 index 0000000..af8e77b --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/sanitizer.py @@ -0,0 +1,896 @@ +from __future__ import absolute_import, division, unicode_literals + +import re +from xml.sax.saxutils import escape, unescape + +from pip._vendor.six.moves import urllib_parse as urlparse + +from . import base +from ..constants import namespaces, prefixes + +__all__ = ["Filter"] + + +allowed_elements = frozenset(( + (namespaces['html'], 'a'), + (namespaces['html'], 'abbr'), + (namespaces['html'], 'acronym'), + (namespaces['html'], 'address'), + (namespaces['html'], 'area'), + (namespaces['html'], 'article'), + (namespaces['html'], 'aside'), + (namespaces['html'], 'audio'), + (namespaces['html'], 'b'), + (namespaces['html'], 'big'), + (namespaces['html'], 'blockquote'), + (namespaces['html'], 'br'), + (namespaces['html'], 'button'), + (namespaces['html'], 'canvas'), + (namespaces['html'], 'caption'), + (namespaces['html'], 'center'), + (namespaces['html'], 'cite'), + (namespaces['html'], 'code'), + (namespaces['html'], 'col'), + (namespaces['html'], 'colgroup'), + (namespaces['html'], 'command'), + (namespaces['html'], 'datagrid'), + (namespaces['html'], 'datalist'), + (namespaces['html'], 'dd'), + (namespaces['html'], 'del'), + (namespaces['html'], 'details'), + (namespaces['html'], 'dfn'), + (namespaces['html'], 'dialog'), + (namespaces['html'], 'dir'), + (namespaces['html'], 'div'), + (namespaces['html'], 'dl'), + (namespaces['html'], 'dt'), + (namespaces['html'], 'em'), + (namespaces['html'], 'event-source'), + (namespaces['html'], 'fieldset'), + (namespaces['html'], 'figcaption'), + (namespaces['html'], 'figure'), + (namespaces['html'], 'footer'), + (namespaces['html'], 'font'), + (namespaces['html'], 'form'), + (namespaces['html'], 'header'), + (namespaces['html'], 'h1'), + (namespaces['html'], 'h2'), + (namespaces['html'], 'h3'), + (namespaces['html'], 'h4'), + (namespaces['html'], 'h5'), + (namespaces['html'], 'h6'), + (namespaces['html'], 'hr'), + (namespaces['html'], 'i'), + (namespaces['html'], 'img'), + (namespaces['html'], 'input'), + (namespaces['html'], 'ins'), + (namespaces['html'], 'keygen'), + (namespaces['html'], 'kbd'), + (namespaces['html'], 'label'), + (namespaces['html'], 'legend'), + (namespaces['html'], 'li'), + (namespaces['html'], 'm'), + (namespaces['html'], 'map'), + (namespaces['html'], 'menu'), + (namespaces['html'], 'meter'), + (namespaces['html'], 'multicol'), + (namespaces['html'], 'nav'), + (namespaces['html'], 'nextid'), + (namespaces['html'], 'ol'), + (namespaces['html'], 'output'), + (namespaces['html'], 'optgroup'), + (namespaces['html'], 'option'), + (namespaces['html'], 'p'), + (namespaces['html'], 'pre'), + (namespaces['html'], 'progress'), + (namespaces['html'], 'q'), + (namespaces['html'], 's'), + (namespaces['html'], 'samp'), + (namespaces['html'], 'section'), + (namespaces['html'], 'select'), + (namespaces['html'], 'small'), + (namespaces['html'], 'sound'), + (namespaces['html'], 'source'), + (namespaces['html'], 'spacer'), + (namespaces['html'], 'span'), + (namespaces['html'], 'strike'), + (namespaces['html'], 'strong'), + (namespaces['html'], 'sub'), + (namespaces['html'], 'sup'), + (namespaces['html'], 'table'), + (namespaces['html'], 'tbody'), + (namespaces['html'], 'td'), + (namespaces['html'], 'textarea'), + (namespaces['html'], 'time'), + (namespaces['html'], 'tfoot'), + (namespaces['html'], 'th'), + (namespaces['html'], 'thead'), + (namespaces['html'], 'tr'), + (namespaces['html'], 'tt'), + (namespaces['html'], 'u'), + (namespaces['html'], 'ul'), + (namespaces['html'], 'var'), + (namespaces['html'], 'video'), + (namespaces['mathml'], 'maction'), + (namespaces['mathml'], 'math'), + (namespaces['mathml'], 'merror'), + (namespaces['mathml'], 'mfrac'), + (namespaces['mathml'], 'mi'), + (namespaces['mathml'], 'mmultiscripts'), + (namespaces['mathml'], 'mn'), + (namespaces['mathml'], 'mo'), + (namespaces['mathml'], 'mover'), + (namespaces['mathml'], 'mpadded'), + (namespaces['mathml'], 'mphantom'), + (namespaces['mathml'], 'mprescripts'), + (namespaces['mathml'], 'mroot'), + (namespaces['mathml'], 'mrow'), + (namespaces['mathml'], 'mspace'), + (namespaces['mathml'], 'msqrt'), + (namespaces['mathml'], 'mstyle'), + (namespaces['mathml'], 'msub'), + (namespaces['mathml'], 'msubsup'), + (namespaces['mathml'], 'msup'), + (namespaces['mathml'], 'mtable'), + (namespaces['mathml'], 'mtd'), + (namespaces['mathml'], 'mtext'), + (namespaces['mathml'], 'mtr'), + (namespaces['mathml'], 'munder'), + (namespaces['mathml'], 'munderover'), + (namespaces['mathml'], 'none'), + (namespaces['svg'], 'a'), + (namespaces['svg'], 'animate'), + (namespaces['svg'], 'animateColor'), + (namespaces['svg'], 'animateMotion'), + (namespaces['svg'], 'animateTransform'), + (namespaces['svg'], 'clipPath'), + (namespaces['svg'], 'circle'), + (namespaces['svg'], 'defs'), + (namespaces['svg'], 'desc'), + (namespaces['svg'], 'ellipse'), + (namespaces['svg'], 'font-face'), + (namespaces['svg'], 'font-face-name'), + (namespaces['svg'], 'font-face-src'), + (namespaces['svg'], 'g'), + (namespaces['svg'], 'glyph'), + (namespaces['svg'], 'hkern'), + (namespaces['svg'], 'linearGradient'), + (namespaces['svg'], 'line'), + (namespaces['svg'], 'marker'), + (namespaces['svg'], 'metadata'), + (namespaces['svg'], 'missing-glyph'), + (namespaces['svg'], 'mpath'), + (namespaces['svg'], 'path'), + (namespaces['svg'], 'polygon'), + (namespaces['svg'], 'polyline'), + (namespaces['svg'], 'radialGradient'), + (namespaces['svg'], 'rect'), + (namespaces['svg'], 'set'), + (namespaces['svg'], 'stop'), + (namespaces['svg'], 'svg'), + (namespaces['svg'], 'switch'), + (namespaces['svg'], 'text'), + (namespaces['svg'], 'title'), + (namespaces['svg'], 'tspan'), + (namespaces['svg'], 'use'), +)) + +allowed_attributes = frozenset(( + # HTML attributes + (None, 'abbr'), + (None, 'accept'), + (None, 'accept-charset'), + (None, 'accesskey'), + (None, 'action'), + (None, 'align'), + (None, 'alt'), + (None, 'autocomplete'), + (None, 'autofocus'), + (None, 'axis'), + (None, 'background'), + (None, 'balance'), + (None, 'bgcolor'), + (None, 'bgproperties'), + (None, 'border'), + (None, 'bordercolor'), + (None, 'bordercolordark'), + (None, 'bordercolorlight'), + (None, 'bottompadding'), + (None, 'cellpadding'), + (None, 'cellspacing'), + (None, 'ch'), + (None, 'challenge'), + (None, 'char'), + (None, 'charoff'), + (None, 'choff'), + (None, 'charset'), + (None, 'checked'), + (None, 'cite'), + (None, 'class'), + (None, 'clear'), + (None, 'color'), + (None, 'cols'), + (None, 'colspan'), + (None, 'compact'), + (None, 'contenteditable'), + (None, 'controls'), + (None, 'coords'), + (None, 'data'), + (None, 'datafld'), + (None, 'datapagesize'), + (None, 'datasrc'), + (None, 'datetime'), + (None, 'default'), + (None, 'delay'), + (None, 'dir'), + (None, 'disabled'), + (None, 'draggable'), + (None, 'dynsrc'), + (None, 'enctype'), + (None, 'end'), + (None, 'face'), + (None, 'for'), + (None, 'form'), + (None, 'frame'), + (None, 'galleryimg'), + (None, 'gutter'), + (None, 'headers'), + (None, 'height'), + (None, 'hidefocus'), + (None, 'hidden'), + (None, 'high'), + (None, 'href'), + (None, 'hreflang'), + (None, 'hspace'), + (None, 'icon'), + (None, 'id'), + (None, 'inputmode'), + (None, 'ismap'), + (None, 'keytype'), + (None, 'label'), + (None, 'leftspacing'), + (None, 'lang'), + (None, 'list'), + (None, 'longdesc'), + (None, 'loop'), + (None, 'loopcount'), + (None, 'loopend'), + (None, 'loopstart'), + (None, 'low'), + (None, 'lowsrc'), + (None, 'max'), + (None, 'maxlength'), + (None, 'media'), + (None, 'method'), + (None, 'min'), + (None, 'multiple'), + (None, 'name'), + (None, 'nohref'), + (None, 'noshade'), + (None, 'nowrap'), + (None, 'open'), + (None, 'optimum'), + (None, 'pattern'), + (None, 'ping'), + (None, 'point-size'), + (None, 'poster'), + (None, 'pqg'), + (None, 'preload'), + (None, 'prompt'), + (None, 'radiogroup'), + (None, 'readonly'), + (None, 'rel'), + (None, 'repeat-max'), + (None, 'repeat-min'), + (None, 'replace'), + (None, 'required'), + (None, 'rev'), + (None, 'rightspacing'), + (None, 'rows'), + (None, 'rowspan'), + (None, 'rules'), + (None, 'scope'), + (None, 'selected'), + (None, 'shape'), + (None, 'size'), + (None, 'span'), + (None, 'src'), + (None, 'start'), + (None, 'step'), + (None, 'style'), + (None, 'summary'), + (None, 'suppress'), + (None, 'tabindex'), + (None, 'target'), + (None, 'template'), + (None, 'title'), + (None, 'toppadding'), + (None, 'type'), + (None, 'unselectable'), + (None, 'usemap'), + (None, 'urn'), + (None, 'valign'), + (None, 'value'), + (None, 'variable'), + (None, 'volume'), + (None, 'vspace'), + (None, 'vrml'), + (None, 'width'), + (None, 'wrap'), + (namespaces['xml'], 'lang'), + # MathML attributes + (None, 'actiontype'), + (None, 'align'), + (None, 'columnalign'), + (None, 'columnalign'), + (None, 'columnalign'), + (None, 'columnlines'), + (None, 'columnspacing'), + (None, 'columnspan'), + (None, 'depth'), + (None, 'display'), + (None, 'displaystyle'), + (None, 'equalcolumns'), + (None, 'equalrows'), + (None, 'fence'), + (None, 'fontstyle'), + (None, 'fontweight'), + (None, 'frame'), + (None, 'height'), + (None, 'linethickness'), + (None, 'lspace'), + (None, 'mathbackground'), + (None, 'mathcolor'), + (None, 'mathvariant'), + (None, 'mathvariant'), + (None, 'maxsize'), + (None, 'minsize'), + (None, 'other'), + (None, 'rowalign'), + (None, 'rowalign'), + (None, 'rowalign'), + (None, 'rowlines'), + (None, 'rowspacing'), + (None, 'rowspan'), + (None, 'rspace'), + (None, 'scriptlevel'), + (None, 'selection'), + (None, 'separator'), + (None, 'stretchy'), + (None, 'width'), + (None, 'width'), + (namespaces['xlink'], 'href'), + (namespaces['xlink'], 'show'), + (namespaces['xlink'], 'type'), + # SVG attributes + (None, 'accent-height'), + (None, 'accumulate'), + (None, 'additive'), + (None, 'alphabetic'), + (None, 'arabic-form'), + (None, 'ascent'), + (None, 'attributeName'), + (None, 'attributeType'), + (None, 'baseProfile'), + (None, 'bbox'), + (None, 'begin'), + (None, 'by'), + (None, 'calcMode'), + (None, 'cap-height'), + (None, 'class'), + (None, 'clip-path'), + (None, 'color'), + (None, 'color-rendering'), + (None, 'content'), + (None, 'cx'), + (None, 'cy'), + (None, 'd'), + (None, 'dx'), + (None, 'dy'), + (None, 'descent'), + (None, 'display'), + (None, 'dur'), + (None, 'end'), + (None, 'fill'), + (None, 'fill-opacity'), + (None, 'fill-rule'), + (None, 'font-family'), + (None, 'font-size'), + (None, 'font-stretch'), + (None, 'font-style'), + (None, 'font-variant'), + (None, 'font-weight'), + (None, 'from'), + (None, 'fx'), + (None, 'fy'), + (None, 'g1'), + (None, 'g2'), + (None, 'glyph-name'), + (None, 'gradientUnits'), + (None, 'hanging'), + (None, 'height'), + (None, 'horiz-adv-x'), + (None, 'horiz-origin-x'), + (None, 'id'), + (None, 'ideographic'), + (None, 'k'), + (None, 'keyPoints'), + (None, 'keySplines'), + (None, 'keyTimes'), + (None, 'lang'), + (None, 'marker-end'), + (None, 'marker-mid'), + (None, 'marker-start'), + (None, 'markerHeight'), + (None, 'markerUnits'), + (None, 'markerWidth'), + (None, 'mathematical'), + (None, 'max'), + (None, 'min'), + (None, 'name'), + (None, 'offset'), + (None, 'opacity'), + (None, 'orient'), + (None, 'origin'), + (None, 'overline-position'), + (None, 'overline-thickness'), + (None, 'panose-1'), + (None, 'path'), + (None, 'pathLength'), + (None, 'points'), + (None, 'preserveAspectRatio'), + (None, 'r'), + (None, 'refX'), + (None, 'refY'), + (None, 'repeatCount'), + (None, 'repeatDur'), + (None, 'requiredExtensions'), + (None, 'requiredFeatures'), + (None, 'restart'), + (None, 'rotate'), + (None, 'rx'), + (None, 'ry'), + (None, 'slope'), + (None, 'stemh'), + (None, 'stemv'), + (None, 'stop-color'), + (None, 'stop-opacity'), + (None, 'strikethrough-position'), + (None, 'strikethrough-thickness'), + (None, 'stroke'), + (None, 'stroke-dasharray'), + (None, 'stroke-dashoffset'), + (None, 'stroke-linecap'), + (None, 'stroke-linejoin'), + (None, 'stroke-miterlimit'), + (None, 'stroke-opacity'), + (None, 'stroke-width'), + (None, 'systemLanguage'), + (None, 'target'), + (None, 'text-anchor'), + (None, 'to'), + (None, 'transform'), + (None, 'type'), + (None, 'u1'), + (None, 'u2'), + (None, 'underline-position'), + (None, 'underline-thickness'), + (None, 'unicode'), + (None, 'unicode-range'), + (None, 'units-per-em'), + (None, 'values'), + (None, 'version'), + (None, 'viewBox'), + (None, 'visibility'), + (None, 'width'), + (None, 'widths'), + (None, 'x'), + (None, 'x-height'), + (None, 'x1'), + (None, 'x2'), + (namespaces['xlink'], 'actuate'), + (namespaces['xlink'], 'arcrole'), + (namespaces['xlink'], 'href'), + (namespaces['xlink'], 'role'), + (namespaces['xlink'], 'show'), + (namespaces['xlink'], 'title'), + (namespaces['xlink'], 'type'), + (namespaces['xml'], 'base'), + (namespaces['xml'], 'lang'), + (namespaces['xml'], 'space'), + (None, 'y'), + (None, 'y1'), + (None, 'y2'), + (None, 'zoomAndPan'), +)) + +attr_val_is_uri = frozenset(( + (None, 'href'), + (None, 'src'), + (None, 'cite'), + (None, 'action'), + (None, 'longdesc'), + (None, 'poster'), + (None, 'background'), + (None, 'datasrc'), + (None, 'dynsrc'), + (None, 'lowsrc'), + (None, 'ping'), + (namespaces['xlink'], 'href'), + (namespaces['xml'], 'base'), +)) + +svg_attr_val_allows_ref = frozenset(( + (None, 'clip-path'), + (None, 'color-profile'), + (None, 'cursor'), + (None, 'fill'), + (None, 'filter'), + (None, 'marker'), + (None, 'marker-start'), + (None, 'marker-mid'), + (None, 'marker-end'), + (None, 'mask'), + (None, 'stroke'), +)) + +svg_allow_local_href = frozenset(( + (None, 'altGlyph'), + (None, 'animate'), + (None, 'animateColor'), + (None, 'animateMotion'), + (None, 'animateTransform'), + (None, 'cursor'), + (None, 'feImage'), + (None, 'filter'), + (None, 'linearGradient'), + (None, 'pattern'), + (None, 'radialGradient'), + (None, 'textpath'), + (None, 'tref'), + (None, 'set'), + (None, 'use') +)) + +allowed_css_properties = frozenset(( + 'azimuth', + 'background-color', + 'border-bottom-color', + 'border-collapse', + 'border-color', + 'border-left-color', + 'border-right-color', + 'border-top-color', + 'clear', + 'color', + 'cursor', + 'direction', + 'display', + 'elevation', + 'float', + 'font', + 'font-family', + 'font-size', + 'font-style', + 'font-variant', + 'font-weight', + 'height', + 'letter-spacing', + 'line-height', + 'overflow', + 'pause', + 'pause-after', + 'pause-before', + 'pitch', + 'pitch-range', + 'richness', + 'speak', + 'speak-header', + 'speak-numeral', + 'speak-punctuation', + 'speech-rate', + 'stress', + 'text-align', + 'text-decoration', + 'text-indent', + 'unicode-bidi', + 'vertical-align', + 'voice-family', + 'volume', + 'white-space', + 'width', +)) + +allowed_css_keywords = frozenset(( + 'auto', + 'aqua', + 'black', + 'block', + 'blue', + 'bold', + 'both', + 'bottom', + 'brown', + 'center', + 'collapse', + 'dashed', + 'dotted', + 'fuchsia', + 'gray', + 'green', + '!important', + 'italic', + 'left', + 'lime', + 'maroon', + 'medium', + 'none', + 'navy', + 'normal', + 'nowrap', + 'olive', + 'pointer', + 'purple', + 'red', + 'right', + 'solid', + 'silver', + 'teal', + 'top', + 'transparent', + 'underline', + 'white', + 'yellow', +)) + +allowed_svg_properties = frozenset(( + 'fill', + 'fill-opacity', + 'fill-rule', + 'stroke', + 'stroke-width', + 'stroke-linecap', + 'stroke-linejoin', + 'stroke-opacity', +)) + +allowed_protocols = frozenset(( + 'ed2k', + 'ftp', + 'http', + 'https', + 'irc', + 'mailto', + 'news', + 'gopher', + 'nntp', + 'telnet', + 'webcal', + 'xmpp', + 'callto', + 'feed', + 'urn', + 'aim', + 'rsync', + 'tag', + 'ssh', + 'sftp', + 'rtsp', + 'afs', + 'data', +)) + +allowed_content_types = frozenset(( + 'image/png', + 'image/jpeg', + 'image/gif', + 'image/webp', + 'image/bmp', + 'text/plain', +)) + + +data_content_type = re.compile(r''' + ^ + # Match a content type <application>/<type> + (?P<content_type>[-a-zA-Z0-9.]+/[-a-zA-Z0-9.]+) + # Match any character set and encoding + (?:(?:;charset=(?:[-a-zA-Z0-9]+)(?:;(?:base64))?) + |(?:;(?:base64))?(?:;charset=(?:[-a-zA-Z0-9]+))?) + # Assume the rest is data + ,.* + $ + ''', + re.VERBOSE) + + +class Filter(base.Filter): + """Sanitizes token stream of XHTML+MathML+SVG and of inline style attributes""" + def __init__(self, + source, + allowed_elements=allowed_elements, + allowed_attributes=allowed_attributes, + allowed_css_properties=allowed_css_properties, + allowed_css_keywords=allowed_css_keywords, + allowed_svg_properties=allowed_svg_properties, + allowed_protocols=allowed_protocols, + allowed_content_types=allowed_content_types, + attr_val_is_uri=attr_val_is_uri, + svg_attr_val_allows_ref=svg_attr_val_allows_ref, + svg_allow_local_href=svg_allow_local_href): + """Creates a Filter + + :arg allowed_elements: set of elements to allow--everything else will + be escaped + + :arg allowed_attributes: set of attributes to allow in + elements--everything else will be stripped + + :arg allowed_css_properties: set of CSS properties to allow--everything + else will be stripped + + :arg allowed_css_keywords: set of CSS keywords to allow--everything + else will be stripped + + :arg allowed_svg_properties: set of SVG properties to allow--everything + else will be removed + + :arg allowed_protocols: set of allowed protocols for URIs + + :arg allowed_content_types: set of allowed content types for ``data`` URIs. + + :arg attr_val_is_uri: set of attributes that have URI values--values + that have a scheme not listed in ``allowed_protocols`` are removed + + :arg svg_attr_val_allows_ref: set of SVG attributes that can have + references + + :arg svg_allow_local_href: set of SVG elements that can have local + hrefs--these are removed + + """ + super(Filter, self).__init__(source) + self.allowed_elements = allowed_elements + self.allowed_attributes = allowed_attributes + self.allowed_css_properties = allowed_css_properties + self.allowed_css_keywords = allowed_css_keywords + self.allowed_svg_properties = allowed_svg_properties + self.allowed_protocols = allowed_protocols + self.allowed_content_types = allowed_content_types + self.attr_val_is_uri = attr_val_is_uri + self.svg_attr_val_allows_ref = svg_attr_val_allows_ref + self.svg_allow_local_href = svg_allow_local_href + + def __iter__(self): + for token in base.Filter.__iter__(self): + token = self.sanitize_token(token) + if token: + yield token + + # Sanitize the +html+, escaping all elements not in ALLOWED_ELEMENTS, and + # stripping out all attributes not in ALLOWED_ATTRIBUTES. Style attributes + # are parsed, and a restricted set, specified by ALLOWED_CSS_PROPERTIES and + # ALLOWED_CSS_KEYWORDS, are allowed through. attributes in ATTR_VAL_IS_URI + # are scanned, and only URI schemes specified in ALLOWED_PROTOCOLS are + # allowed. + # + # sanitize_html('<script> do_nasty_stuff() </script>') + # => <script> do_nasty_stuff() </script> + # sanitize_html('<a href="javascript: sucker();">Click here for $100</a>') + # => <a>Click here for $100</a> + def sanitize_token(self, token): + + # accommodate filters which use token_type differently + token_type = token["type"] + if token_type in ("StartTag", "EndTag", "EmptyTag"): + name = token["name"] + namespace = token["namespace"] + if ((namespace, name) in self.allowed_elements or + (namespace is None and + (namespaces["html"], name) in self.allowed_elements)): + return self.allowed_token(token) + else: + return self.disallowed_token(token) + elif token_type == "Comment": + pass + else: + return token + + def allowed_token(self, token): + if "data" in token: + attrs = token["data"] + attr_names = set(attrs.keys()) + + # Remove forbidden attributes + for to_remove in (attr_names - self.allowed_attributes): + del token["data"][to_remove] + attr_names.remove(to_remove) + + # Remove attributes with disallowed URL values + for attr in (attr_names & self.attr_val_is_uri): + assert attr in attrs + # I don't have a clue where this regexp comes from or why it matches those + # characters, nor why we call unescape. I just know it's always been here. + # Should you be worried by this comment in a sanitizer? Yes. On the other hand, all + # this will do is remove *more* than it otherwise would. + val_unescaped = re.sub("[`\x00-\x20\x7f-\xa0\\s]+", '', + unescape(attrs[attr])).lower() + # remove replacement characters from unescaped characters + val_unescaped = val_unescaped.replace("\ufffd", "") + try: + uri = urlparse.urlparse(val_unescaped) + except ValueError: + uri = None + del attrs[attr] + if uri and uri.scheme: + if uri.scheme not in self.allowed_protocols: + del attrs[attr] + if uri.scheme == 'data': + m = data_content_type.match(uri.path) + if not m: + del attrs[attr] + elif m.group('content_type') not in self.allowed_content_types: + del attrs[attr] + + for attr in self.svg_attr_val_allows_ref: + if attr in attrs: + attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)', + ' ', + unescape(attrs[attr])) + if (token["name"] in self.svg_allow_local_href and + (namespaces['xlink'], 'href') in attrs and re.search(r'^\s*[^#\s].*', + attrs[(namespaces['xlink'], 'href')])): + del attrs[(namespaces['xlink'], 'href')] + if (None, 'style') in attrs: + attrs[(None, 'style')] = self.sanitize_css(attrs[(None, 'style')]) + token["data"] = attrs + return token + + def disallowed_token(self, token): + token_type = token["type"] + if token_type == "EndTag": + token["data"] = "</%s>" % token["name"] + elif token["data"]: + assert token_type in ("StartTag", "EmptyTag") + attrs = [] + for (ns, name), v in token["data"].items(): + attrs.append(' %s="%s"' % (name if ns is None else "%s:%s" % (prefixes[ns], name), escape(v))) + token["data"] = "<%s%s>" % (token["name"], ''.join(attrs)) + else: + token["data"] = "<%s>" % token["name"] + if token.get("selfClosing"): + token["data"] = token["data"][:-1] + "/>" + + token["type"] = "Characters" + + del token["name"] + return token + + def sanitize_css(self, style): + # disallow urls + style = re.compile(r'url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style) + + # gauntlet + if not re.match(r"""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style): + return '' + if not re.match(r"^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style): + return '' + + clean = [] + for prop, value in re.findall(r"([-\w]+)\s*:\s*([^:;]*)", style): + if not value: + continue + if prop.lower() in self.allowed_css_properties: + clean.append(prop + ': ' + value + ';') + elif prop.split('-')[0].lower() in ['background', 'border', 'margin', + 'padding']: + for keyword in value.split(): + if keyword not in self.allowed_css_keywords and \ + not re.match(r"^(#[0-9a-fA-F]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword): # noqa + break + else: + clean.append(prop + ': ' + value + ';') + elif prop.lower() in self.allowed_svg_properties: + clean.append(prop + ': ' + value + ';') + + return ' '.join(clean) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/whitespace.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/whitespace.py new file mode 100644 index 0000000..0d12584 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/filters/whitespace.py @@ -0,0 +1,38 @@ +from __future__ import absolute_import, division, unicode_literals + +import re + +from . import base +from ..constants import rcdataElements, spaceCharacters +spaceCharacters = "".join(spaceCharacters) + +SPACES_REGEX = re.compile("[%s]+" % spaceCharacters) + + +class Filter(base.Filter): + """Collapses whitespace except in pre, textarea, and script elements""" + spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements)) + + def __iter__(self): + preserve = 0 + for token in base.Filter.__iter__(self): + type = token["type"] + if type == "StartTag" \ + and (preserve or token["name"] in self.spacePreserveElements): + preserve += 1 + + elif type == "EndTag" and preserve: + preserve -= 1 + + elif not preserve and type == "SpaceCharacters" and token["data"]: + # Test on token["data"] above to not introduce spaces where there were not + token["data"] = " " + + elif not preserve and type == "Characters": + token["data"] = collapse_spaces(token["data"]) + + yield token + + +def collapse_spaces(text): + return SPACES_REGEX.sub(' ', text) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/html5parser.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/html5parser.py new file mode 100644 index 0000000..ae41a13 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/html5parser.py @@ -0,0 +1,2791 @@ +from __future__ import absolute_import, division, unicode_literals +from pip._vendor.six import with_metaclass, viewkeys + +import types +from collections import OrderedDict + +from . import _inputstream +from . import _tokenizer + +from . import treebuilders +from .treebuilders.base import Marker + +from . import _utils +from .constants import ( + spaceCharacters, asciiUpper2Lower, + specialElements, headingElements, cdataElements, rcdataElements, + tokenTypes, tagTokenTypes, + namespaces, + htmlIntegrationPointElements, mathmlTextIntegrationPointElements, + adjustForeignAttributes as adjustForeignAttributesMap, + adjustMathMLAttributes, adjustSVGAttributes, + E, + _ReparseException +) + + +def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs): + """Parse an HTML document as a string or file-like object into a tree + + :arg doc: the document to parse as a string or file-like object + + :arg treebuilder: the treebuilder to use when parsing + + :arg namespaceHTMLElements: whether or not to namespace HTML elements + + :returns: parsed tree + + Example: + + >>> from html5lib.html5parser import parse + >>> parse('<html><body><p>This is a doc</p></body></html>') + <Element u'{http://www.w3.org/1999/xhtml}html' at 0x7feac4909db0> + + """ + tb = treebuilders.getTreeBuilder(treebuilder) + p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) + return p.parse(doc, **kwargs) + + +def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs): + """Parse an HTML fragment as a string or file-like object into a tree + + :arg doc: the fragment to parse as a string or file-like object + + :arg container: the container context to parse the fragment in + + :arg treebuilder: the treebuilder to use when parsing + + :arg namespaceHTMLElements: whether or not to namespace HTML elements + + :returns: parsed tree + + Example: + + >>> from html5lib.html5libparser import parseFragment + >>> parseFragment('<b>this is a fragment</b>') + <Element u'DOCUMENT_FRAGMENT' at 0x7feac484b090> + + """ + tb = treebuilders.getTreeBuilder(treebuilder) + p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) + return p.parseFragment(doc, container=container, **kwargs) + + +def method_decorator_metaclass(function): + class Decorated(type): + def __new__(meta, classname, bases, classDict): + for attributeName, attribute in classDict.items(): + if isinstance(attribute, types.FunctionType): + attribute = function(attribute) + + classDict[attributeName] = attribute + return type.__new__(meta, classname, bases, classDict) + return Decorated + + +class HTMLParser(object): + """HTML parser + + Generates a tree structure from a stream of (possibly malformed) HTML. + + """ + + def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False): + """ + :arg tree: a treebuilder class controlling the type of tree that will be + returned. Built in treebuilders can be accessed through + html5lib.treebuilders.getTreeBuilder(treeType) + + :arg strict: raise an exception when a parse error is encountered + + :arg namespaceHTMLElements: whether or not to namespace HTML elements + + :arg debug: whether or not to enable debug mode which logs things + + Example: + + >>> from html5lib.html5parser import HTMLParser + >>> parser = HTMLParser() # generates parser with etree builder + >>> parser = HTMLParser('lxml', strict=True) # generates parser with lxml builder which is strict + + """ + + # Raise an exception on the first error encountered + self.strict = strict + + if tree is None: + tree = treebuilders.getTreeBuilder("etree") + self.tree = tree(namespaceHTMLElements) + self.errors = [] + + self.phases = dict([(name, cls(self, self.tree)) for name, cls in + getPhases(debug).items()]) + + def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs): + + self.innerHTMLMode = innerHTML + self.container = container + self.scripting = scripting + self.tokenizer = _tokenizer.HTMLTokenizer(stream, parser=self, **kwargs) + self.reset() + + try: + self.mainLoop() + except _ReparseException: + self.reset() + self.mainLoop() + + def reset(self): + self.tree.reset() + self.firstStartTag = False + self.errors = [] + self.log = [] # only used with debug mode + # "quirks" / "limited quirks" / "no quirks" + self.compatMode = "no quirks" + + if self.innerHTMLMode: + self.innerHTML = self.container.lower() + + if self.innerHTML in cdataElements: + self.tokenizer.state = self.tokenizer.rcdataState + elif self.innerHTML in rcdataElements: + self.tokenizer.state = self.tokenizer.rawtextState + elif self.innerHTML == 'plaintext': + self.tokenizer.state = self.tokenizer.plaintextState + else: + # state already is data state + # self.tokenizer.state = self.tokenizer.dataState + pass + self.phase = self.phases["beforeHtml"] + self.phase.insertHtmlElement() + self.resetInsertionMode() + else: + self.innerHTML = False # pylint:disable=redefined-variable-type + self.phase = self.phases["initial"] + + self.lastPhase = None + + self.beforeRCDataPhase = None + + self.framesetOK = True + + @property + def documentEncoding(self): + """Name of the character encoding that was used to decode the input stream, or + :obj:`None` if that is not determined yet + + """ + if not hasattr(self, 'tokenizer'): + return None + return self.tokenizer.stream.charEncoding[0].name + + def isHTMLIntegrationPoint(self, element): + if (element.name == "annotation-xml" and + element.namespace == namespaces["mathml"]): + return ("encoding" in element.attributes and + element.attributes["encoding"].translate( + asciiUpper2Lower) in + ("text/html", "application/xhtml+xml")) + else: + return (element.namespace, element.name) in htmlIntegrationPointElements + + def isMathMLTextIntegrationPoint(self, element): + return (element.namespace, element.name) in mathmlTextIntegrationPointElements + + def mainLoop(self): + CharactersToken = tokenTypes["Characters"] + SpaceCharactersToken = tokenTypes["SpaceCharacters"] + StartTagToken = tokenTypes["StartTag"] + EndTagToken = tokenTypes["EndTag"] + CommentToken = tokenTypes["Comment"] + DoctypeToken = tokenTypes["Doctype"] + ParseErrorToken = tokenTypes["ParseError"] + + for token in self.normalizedTokens(): + prev_token = None + new_token = token + while new_token is not None: + prev_token = new_token + currentNode = self.tree.openElements[-1] if self.tree.openElements else None + currentNodeNamespace = currentNode.namespace if currentNode else None + currentNodeName = currentNode.name if currentNode else None + + type = new_token["type"] + + if type == ParseErrorToken: + self.parseError(new_token["data"], new_token.get("datavars", {})) + new_token = None + else: + if (len(self.tree.openElements) == 0 or + currentNodeNamespace == self.tree.defaultNamespace or + (self.isMathMLTextIntegrationPoint(currentNode) and + ((type == StartTagToken and + token["name"] not in frozenset(["mglyph", "malignmark"])) or + type in (CharactersToken, SpaceCharactersToken))) or + (currentNodeNamespace == namespaces["mathml"] and + currentNodeName == "annotation-xml" and + type == StartTagToken and + token["name"] == "svg") or + (self.isHTMLIntegrationPoint(currentNode) and + type in (StartTagToken, CharactersToken, SpaceCharactersToken))): + phase = self.phase + else: + phase = self.phases["inForeignContent"] + + if type == CharactersToken: + new_token = phase.processCharacters(new_token) + elif type == SpaceCharactersToken: + new_token = phase.processSpaceCharacters(new_token) + elif type == StartTagToken: + new_token = phase.processStartTag(new_token) + elif type == EndTagToken: + new_token = phase.processEndTag(new_token) + elif type == CommentToken: + new_token = phase.processComment(new_token) + elif type == DoctypeToken: + new_token = phase.processDoctype(new_token) + + if (type == StartTagToken and prev_token["selfClosing"] and + not prev_token["selfClosingAcknowledged"]): + self.parseError("non-void-element-with-trailing-solidus", + {"name": prev_token["name"]}) + + # When the loop finishes it's EOF + reprocess = True + phases = [] + while reprocess: + phases.append(self.phase) + reprocess = self.phase.processEOF() + if reprocess: + assert self.phase not in phases + + def normalizedTokens(self): + for token in self.tokenizer: + yield self.normalizeToken(token) + + def parse(self, stream, *args, **kwargs): + """Parse a HTML document into a well-formed tree + + :arg stream: a file-like object or string containing the HTML to be parsed + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element). + + :arg scripting: treat noscript elements as if JavaScript was turned on + + :returns: parsed tree + + Example: + + >>> from html5lib.html5parser import HTMLParser + >>> parser = HTMLParser() + >>> parser.parse('<html><body><p>This is a doc</p></body></html>') + <Element u'{http://www.w3.org/1999/xhtml}html' at 0x7feac4909db0> + + """ + self._parse(stream, False, None, *args, **kwargs) + return self.tree.getDocument() + + def parseFragment(self, stream, *args, **kwargs): + """Parse a HTML fragment into a well-formed tree fragment + + :arg container: name of the element we're setting the innerHTML + property if set to None, default to 'div' + + :arg stream: a file-like object or string containing the HTML to be parsed + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element) + + :arg scripting: treat noscript elements as if JavaScript was turned on + + :returns: parsed tree + + Example: + + >>> from html5lib.html5libparser import HTMLParser + >>> parser = HTMLParser() + >>> parser.parseFragment('<b>this is a fragment</b>') + <Element u'DOCUMENT_FRAGMENT' at 0x7feac484b090> + + """ + self._parse(stream, True, *args, **kwargs) + return self.tree.getFragment() + + def parseError(self, errorcode="XXX-undefined-error", datavars=None): + # XXX The idea is to make errorcode mandatory. + if datavars is None: + datavars = {} + self.errors.append((self.tokenizer.stream.position(), errorcode, datavars)) + if self.strict: + raise ParseError(E[errorcode] % datavars) + + def normalizeToken(self, token): + # HTML5 specific normalizations to the token stream + if token["type"] == tokenTypes["StartTag"]: + raw = token["data"] + token["data"] = OrderedDict(raw) + if len(raw) > len(token["data"]): + # we had some duplicated attribute, fix so first wins + token["data"].update(raw[::-1]) + + return token + + def adjustMathMLAttributes(self, token): + adjust_attributes(token, adjustMathMLAttributes) + + def adjustSVGAttributes(self, token): + adjust_attributes(token, adjustSVGAttributes) + + def adjustForeignAttributes(self, token): + adjust_attributes(token, adjustForeignAttributesMap) + + def reparseTokenNormal(self, token): + # pylint:disable=unused-argument + self.parser.phase() + + def resetInsertionMode(self): + # The name of this method is mostly historical. (It's also used in the + # specification.) + last = False + newModes = { + "select": "inSelect", + "td": "inCell", + "th": "inCell", + "tr": "inRow", + "tbody": "inTableBody", + "thead": "inTableBody", + "tfoot": "inTableBody", + "caption": "inCaption", + "colgroup": "inColumnGroup", + "table": "inTable", + "head": "inBody", + "body": "inBody", + "frameset": "inFrameset", + "html": "beforeHead" + } + for node in self.tree.openElements[::-1]: + nodeName = node.name + new_phase = None + if node == self.tree.openElements[0]: + assert self.innerHTML + last = True + nodeName = self.innerHTML + # Check for conditions that should only happen in the innerHTML + # case + if nodeName in ("select", "colgroup", "head", "html"): + assert self.innerHTML + + if not last and node.namespace != self.tree.defaultNamespace: + continue + + if nodeName in newModes: + new_phase = self.phases[newModes[nodeName]] + break + elif last: + new_phase = self.phases["inBody"] + break + + self.phase = new_phase + + def parseRCDataRawtext(self, token, contentType): + # Generic RCDATA/RAWTEXT Parsing algorithm + assert contentType in ("RAWTEXT", "RCDATA") + + self.tree.insertElement(token) + + if contentType == "RAWTEXT": + self.tokenizer.state = self.tokenizer.rawtextState + else: + self.tokenizer.state = self.tokenizer.rcdataState + + self.originalPhase = self.phase + + self.phase = self.phases["text"] + + +@_utils.memoize +def getPhases(debug): + def log(function): + """Logger that records which phase processes each token""" + type_names = dict((value, key) for key, value in + tokenTypes.items()) + + def wrapped(self, *args, **kwargs): + if function.__name__.startswith("process") and len(args) > 0: + token = args[0] + try: + info = {"type": type_names[token['type']]} + except: + raise + if token['type'] in tagTokenTypes: + info["name"] = token['name'] + + self.parser.log.append((self.parser.tokenizer.state.__name__, + self.parser.phase.__class__.__name__, + self.__class__.__name__, + function.__name__, + info)) + return function(self, *args, **kwargs) + else: + return function(self, *args, **kwargs) + return wrapped + + def getMetaclass(use_metaclass, metaclass_func): + if use_metaclass: + return method_decorator_metaclass(metaclass_func) + else: + return type + + # pylint:disable=unused-argument + class Phase(with_metaclass(getMetaclass(debug, log))): + """Base class for helper object that implements each phase of processing + """ + + def __init__(self, parser, tree): + self.parser = parser + self.tree = tree + + def processEOF(self): + raise NotImplementedError + + def processComment(self, token): + # For most phases the following is correct. Where it's not it will be + # overridden. + self.tree.insertComment(token, self.tree.openElements[-1]) + + def processDoctype(self, token): + self.parser.parseError("unexpected-doctype") + + def processCharacters(self, token): + self.tree.insertText(token["data"]) + + def processSpaceCharacters(self, token): + self.tree.insertText(token["data"]) + + def processStartTag(self, token): + return self.startTagHandler[token["name"]](token) + + def startTagHtml(self, token): + if not self.parser.firstStartTag and token["name"] == "html": + self.parser.parseError("non-html-root") + # XXX Need a check here to see if the first start tag token emitted is + # this token... If it's not, invoke self.parser.parseError(). + for attr, value in token["data"].items(): + if attr not in self.tree.openElements[0].attributes: + self.tree.openElements[0].attributes[attr] = value + self.parser.firstStartTag = False + + def processEndTag(self, token): + return self.endTagHandler[token["name"]](token) + + class InitialPhase(Phase): + def processSpaceCharacters(self, token): + pass + + def processComment(self, token): + self.tree.insertComment(token, self.tree.document) + + def processDoctype(self, token): + name = token["name"] + publicId = token["publicId"] + systemId = token["systemId"] + correct = token["correct"] + + if (name != "html" or publicId is not None or + systemId is not None and systemId != "about:legacy-compat"): + self.parser.parseError("unknown-doctype") + + if publicId is None: + publicId = "" + + self.tree.insertDoctype(token) + + if publicId != "": + publicId = publicId.translate(asciiUpper2Lower) + + if (not correct or token["name"] != "html" or + publicId.startswith( + ("+//silmaril//dtd html pro v0r11 19970101//", + "-//advasoft ltd//dtd html 3.0 aswedit + extensions//", + "-//as//dtd html 3.0 aswedit + extensions//", + "-//ietf//dtd html 2.0 level 1//", + "-//ietf//dtd html 2.0 level 2//", + "-//ietf//dtd html 2.0 strict level 1//", + "-//ietf//dtd html 2.0 strict level 2//", + "-//ietf//dtd html 2.0 strict//", + "-//ietf//dtd html 2.0//", + "-//ietf//dtd html 2.1e//", + "-//ietf//dtd html 3.0//", + "-//ietf//dtd html 3.2 final//", + "-//ietf//dtd html 3.2//", + "-//ietf//dtd html 3//", + "-//ietf//dtd html level 0//", + "-//ietf//dtd html level 1//", + "-//ietf//dtd html level 2//", + "-//ietf//dtd html level 3//", + "-//ietf//dtd html strict level 0//", + "-//ietf//dtd html strict level 1//", + "-//ietf//dtd html strict level 2//", + "-//ietf//dtd html strict level 3//", + "-//ietf//dtd html strict//", + "-//ietf//dtd html//", + "-//metrius//dtd metrius presentational//", + "-//microsoft//dtd internet explorer 2.0 html strict//", + "-//microsoft//dtd internet explorer 2.0 html//", + "-//microsoft//dtd internet explorer 2.0 tables//", + "-//microsoft//dtd internet explorer 3.0 html strict//", + "-//microsoft//dtd internet explorer 3.0 html//", + "-//microsoft//dtd internet explorer 3.0 tables//", + "-//netscape comm. corp.//dtd html//", + "-//netscape comm. corp.//dtd strict html//", + "-//o'reilly and associates//dtd html 2.0//", + "-//o'reilly and associates//dtd html extended 1.0//", + "-//o'reilly and associates//dtd html extended relaxed 1.0//", + "-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//", + "-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//", + "-//spyglass//dtd html 2.0 extended//", + "-//sq//dtd html 2.0 hotmetal + extensions//", + "-//sun microsystems corp.//dtd hotjava html//", + "-//sun microsystems corp.//dtd hotjava strict html//", + "-//w3c//dtd html 3 1995-03-24//", + "-//w3c//dtd html 3.2 draft//", + "-//w3c//dtd html 3.2 final//", + "-//w3c//dtd html 3.2//", + "-//w3c//dtd html 3.2s draft//", + "-//w3c//dtd html 4.0 frameset//", + "-//w3c//dtd html 4.0 transitional//", + "-//w3c//dtd html experimental 19960712//", + "-//w3c//dtd html experimental 970421//", + "-//w3c//dtd w3 html//", + "-//w3o//dtd w3 html 3.0//", + "-//webtechs//dtd mozilla html 2.0//", + "-//webtechs//dtd mozilla html//")) or + publicId in ("-//w3o//dtd w3 html strict 3.0//en//", + "-/w3c/dtd html 4.0 transitional/en", + "html") or + publicId.startswith( + ("-//w3c//dtd html 4.01 frameset//", + "-//w3c//dtd html 4.01 transitional//")) and + systemId is None or + systemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"): + self.parser.compatMode = "quirks" + elif (publicId.startswith( + ("-//w3c//dtd xhtml 1.0 frameset//", + "-//w3c//dtd xhtml 1.0 transitional//")) or + publicId.startswith( + ("-//w3c//dtd html 4.01 frameset//", + "-//w3c//dtd html 4.01 transitional//")) and + systemId is not None): + self.parser.compatMode = "limited quirks" + + self.parser.phase = self.parser.phases["beforeHtml"] + + def anythingElse(self): + self.parser.compatMode = "quirks" + self.parser.phase = self.parser.phases["beforeHtml"] + + def processCharacters(self, token): + self.parser.parseError("expected-doctype-but-got-chars") + self.anythingElse() + return token + + def processStartTag(self, token): + self.parser.parseError("expected-doctype-but-got-start-tag", + {"name": token["name"]}) + self.anythingElse() + return token + + def processEndTag(self, token): + self.parser.parseError("expected-doctype-but-got-end-tag", + {"name": token["name"]}) + self.anythingElse() + return token + + def processEOF(self): + self.parser.parseError("expected-doctype-but-got-eof") + self.anythingElse() + return True + + class BeforeHtmlPhase(Phase): + # helper methods + def insertHtmlElement(self): + self.tree.insertRoot(impliedTagToken("html", "StartTag")) + self.parser.phase = self.parser.phases["beforeHead"] + + # other + def processEOF(self): + self.insertHtmlElement() + return True + + def processComment(self, token): + self.tree.insertComment(token, self.tree.document) + + def processSpaceCharacters(self, token): + pass + + def processCharacters(self, token): + self.insertHtmlElement() + return token + + def processStartTag(self, token): + if token["name"] == "html": + self.parser.firstStartTag = True + self.insertHtmlElement() + return token + + def processEndTag(self, token): + if token["name"] not in ("head", "body", "html", "br"): + self.parser.parseError("unexpected-end-tag-before-html", + {"name": token["name"]}) + else: + self.insertHtmlElement() + return token + + class BeforeHeadPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("head", self.startTagHead) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + (("head", "body", "html", "br"), self.endTagImplyHead) + ]) + self.endTagHandler.default = self.endTagOther + + def processEOF(self): + self.startTagHead(impliedTagToken("head", "StartTag")) + return True + + def processSpaceCharacters(self, token): + pass + + def processCharacters(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagHead(self, token): + self.tree.insertElement(token) + self.tree.headPointer = self.tree.openElements[-1] + self.parser.phase = self.parser.phases["inHead"] + + def startTagOther(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def endTagImplyHead(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def endTagOther(self, token): + self.parser.parseError("end-tag-after-implied-root", + {"name": token["name"]}) + + class InHeadPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("title", self.startTagTitle), + (("noframes", "style"), self.startTagNoFramesStyle), + ("noscript", self.startTagNoscript), + ("script", self.startTagScript), + (("base", "basefont", "bgsound", "command", "link"), + self.startTagBaseLinkCommand), + ("meta", self.startTagMeta), + ("head", self.startTagHead) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + ("head", self.endTagHead), + (("br", "html", "body"), self.endTagHtmlBodyBr) + ]) + self.endTagHandler.default = self.endTagOther + + # the real thing + def processEOF(self): + self.anythingElse() + return True + + def processCharacters(self, token): + self.anythingElse() + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagHead(self, token): + self.parser.parseError("two-heads-are-not-better-than-one") + + def startTagBaseLinkCommand(self, token): + self.tree.insertElement(token) + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + + def startTagMeta(self, token): + self.tree.insertElement(token) + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + + attributes = token["data"] + if self.parser.tokenizer.stream.charEncoding[1] == "tentative": + if "charset" in attributes: + self.parser.tokenizer.stream.changeEncoding(attributes["charset"]) + elif ("content" in attributes and + "http-equiv" in attributes and + attributes["http-equiv"].lower() == "content-type"): + # Encoding it as UTF-8 here is a hack, as really we should pass + # the abstract Unicode string, and just use the + # ContentAttrParser on that, but using UTF-8 allows all chars + # to be encoded and as a ASCII-superset works. + data = _inputstream.EncodingBytes(attributes["content"].encode("utf-8")) + parser = _inputstream.ContentAttrParser(data) + codec = parser.parse() + self.parser.tokenizer.stream.changeEncoding(codec) + + def startTagTitle(self, token): + self.parser.parseRCDataRawtext(token, "RCDATA") + + def startTagNoFramesStyle(self, token): + # Need to decide whether to implement the scripting-disabled case + self.parser.parseRCDataRawtext(token, "RAWTEXT") + + def startTagNoscript(self, token): + if self.parser.scripting: + self.parser.parseRCDataRawtext(token, "RAWTEXT") + else: + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inHeadNoscript"] + + def startTagScript(self, token): + self.tree.insertElement(token) + self.parser.tokenizer.state = self.parser.tokenizer.scriptDataState + self.parser.originalPhase = self.parser.phase + self.parser.phase = self.parser.phases["text"] + + def startTagOther(self, token): + self.anythingElse() + return token + + def endTagHead(self, token): + node = self.parser.tree.openElements.pop() + assert node.name == "head", "Expected head got %s" % node.name + self.parser.phase = self.parser.phases["afterHead"] + + def endTagHtmlBodyBr(self, token): + self.anythingElse() + return token + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def anythingElse(self): + self.endTagHead(impliedTagToken("head")) + + class InHeadNoscriptPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + (("basefont", "bgsound", "link", "meta", "noframes", "style"), self.startTagBaseLinkCommand), + (("head", "noscript"), self.startTagHeadNoscript), + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + ("noscript", self.endTagNoscript), + ("br", self.endTagBr), + ]) + self.endTagHandler.default = self.endTagOther + + def processEOF(self): + self.parser.parseError("eof-in-head-noscript") + self.anythingElse() + return True + + def processComment(self, token): + return self.parser.phases["inHead"].processComment(token) + + def processCharacters(self, token): + self.parser.parseError("char-in-head-noscript") + self.anythingElse() + return token + + def processSpaceCharacters(self, token): + return self.parser.phases["inHead"].processSpaceCharacters(token) + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagBaseLinkCommand(self, token): + return self.parser.phases["inHead"].processStartTag(token) + + def startTagHeadNoscript(self, token): + self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) + + def startTagOther(self, token): + self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) + self.anythingElse() + return token + + def endTagNoscript(self, token): + node = self.parser.tree.openElements.pop() + assert node.name == "noscript", "Expected noscript got %s" % node.name + self.parser.phase = self.parser.phases["inHead"] + + def endTagBr(self, token): + self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) + self.anythingElse() + return token + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def anythingElse(self): + # Caller must raise parse error first! + self.endTagNoscript(impliedTagToken("noscript")) + + class AfterHeadPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("body", self.startTagBody), + ("frameset", self.startTagFrameset), + (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", + "style", "title"), + self.startTagFromHead), + ("head", self.startTagHead) + ]) + self.startTagHandler.default = self.startTagOther + self.endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"), + self.endTagHtmlBodyBr)]) + self.endTagHandler.default = self.endTagOther + + def processEOF(self): + self.anythingElse() + return True + + def processCharacters(self, token): + self.anythingElse() + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagBody(self, token): + self.parser.framesetOK = False + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inBody"] + + def startTagFrameset(self, token): + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inFrameset"] + + def startTagFromHead(self, token): + self.parser.parseError("unexpected-start-tag-out-of-my-head", + {"name": token["name"]}) + self.tree.openElements.append(self.tree.headPointer) + self.parser.phases["inHead"].processStartTag(token) + for node in self.tree.openElements[::-1]: + if node.name == "head": + self.tree.openElements.remove(node) + break + + def startTagHead(self, token): + self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) + + def startTagOther(self, token): + self.anythingElse() + return token + + def endTagHtmlBodyBr(self, token): + self.anythingElse() + return token + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def anythingElse(self): + self.tree.insertElement(impliedTagToken("body", "StartTag")) + self.parser.phase = self.parser.phases["inBody"] + self.parser.framesetOK = True + + class InBodyPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody + # the really-really-really-very crazy mode + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + # Set this to the default handler + self.processSpaceCharacters = self.processSpaceCharactersNonPre + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + (("base", "basefont", "bgsound", "command", "link", "meta", + "script", "style", "title"), + self.startTagProcessInHead), + ("body", self.startTagBody), + ("frameset", self.startTagFrameset), + (("address", "article", "aside", "blockquote", "center", "details", + "dir", "div", "dl", "fieldset", "figcaption", "figure", + "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p", + "section", "summary", "ul"), + self.startTagCloseP), + (headingElements, self.startTagHeading), + (("pre", "listing"), self.startTagPreListing), + ("form", self.startTagForm), + (("li", "dd", "dt"), self.startTagListItem), + ("plaintext", self.startTagPlaintext), + ("a", self.startTagA), + (("b", "big", "code", "em", "font", "i", "s", "small", "strike", + "strong", "tt", "u"), self.startTagFormatting), + ("nobr", self.startTagNobr), + ("button", self.startTagButton), + (("applet", "marquee", "object"), self.startTagAppletMarqueeObject), + ("xmp", self.startTagXmp), + ("table", self.startTagTable), + (("area", "br", "embed", "img", "keygen", "wbr"), + self.startTagVoidFormatting), + (("param", "source", "track"), self.startTagParamSource), + ("input", self.startTagInput), + ("hr", self.startTagHr), + ("image", self.startTagImage), + ("isindex", self.startTagIsIndex), + ("textarea", self.startTagTextarea), + ("iframe", self.startTagIFrame), + ("noscript", self.startTagNoscript), + (("noembed", "noframes"), self.startTagRawtext), + ("select", self.startTagSelect), + (("rp", "rt"), self.startTagRpRt), + (("option", "optgroup"), self.startTagOpt), + (("math"), self.startTagMath), + (("svg"), self.startTagSvg), + (("caption", "col", "colgroup", "frame", "head", + "tbody", "td", "tfoot", "th", "thead", + "tr"), self.startTagMisplaced) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + ("body", self.endTagBody), + ("html", self.endTagHtml), + (("address", "article", "aside", "blockquote", "button", "center", + "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure", + "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre", + "section", "summary", "ul"), self.endTagBlock), + ("form", self.endTagForm), + ("p", self.endTagP), + (("dd", "dt", "li"), self.endTagListItem), + (headingElements, self.endTagHeading), + (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small", + "strike", "strong", "tt", "u"), self.endTagFormatting), + (("applet", "marquee", "object"), self.endTagAppletMarqueeObject), + ("br", self.endTagBr), + ]) + self.endTagHandler.default = self.endTagOther + + def isMatchingFormattingElement(self, node1, node2): + return (node1.name == node2.name and + node1.namespace == node2.namespace and + node1.attributes == node2.attributes) + + # helper + def addFormattingElement(self, token): + self.tree.insertElement(token) + element = self.tree.openElements[-1] + + matchingElements = [] + for node in self.tree.activeFormattingElements[::-1]: + if node is Marker: + break + elif self.isMatchingFormattingElement(node, element): + matchingElements.append(node) + + assert len(matchingElements) <= 3 + if len(matchingElements) == 3: + self.tree.activeFormattingElements.remove(matchingElements[-1]) + self.tree.activeFormattingElements.append(element) + + # the real deal + def processEOF(self): + allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td", + "tfoot", "th", "thead", "tr", "body", + "html")) + for node in self.tree.openElements[::-1]: + if node.name not in allowed_elements: + self.parser.parseError("expected-closing-tag-but-got-eof") + break + # Stop parsing + + def processSpaceCharactersDropNewline(self, token): + # Sometimes (start of <pre>, <listing>, and <textarea> blocks) we + # want to drop leading newlines + data = token["data"] + self.processSpaceCharacters = self.processSpaceCharactersNonPre + if (data.startswith("\n") and + self.tree.openElements[-1].name in ("pre", "listing", "textarea") and + not self.tree.openElements[-1].hasContent()): + data = data[1:] + if data: + self.tree.reconstructActiveFormattingElements() + self.tree.insertText(data) + + def processCharacters(self, token): + if token["data"] == "\u0000": + # The tokenizer should always emit null on its own + return + self.tree.reconstructActiveFormattingElements() + self.tree.insertText(token["data"]) + # This must be bad for performance + if (self.parser.framesetOK and + any([char not in spaceCharacters + for char in token["data"]])): + self.parser.framesetOK = False + + def processSpaceCharactersNonPre(self, token): + self.tree.reconstructActiveFormattingElements() + self.tree.insertText(token["data"]) + + def startTagProcessInHead(self, token): + return self.parser.phases["inHead"].processStartTag(token) + + def startTagBody(self, token): + self.parser.parseError("unexpected-start-tag", {"name": "body"}) + if (len(self.tree.openElements) == 1 or + self.tree.openElements[1].name != "body"): + assert self.parser.innerHTML + else: + self.parser.framesetOK = False + for attr, value in token["data"].items(): + if attr not in self.tree.openElements[1].attributes: + self.tree.openElements[1].attributes[attr] = value + + def startTagFrameset(self, token): + self.parser.parseError("unexpected-start-tag", {"name": "frameset"}) + if (len(self.tree.openElements) == 1 or self.tree.openElements[1].name != "body"): + assert self.parser.innerHTML + elif not self.parser.framesetOK: + pass + else: + if self.tree.openElements[1].parent: + self.tree.openElements[1].parent.removeChild(self.tree.openElements[1]) + while self.tree.openElements[-1].name != "html": + self.tree.openElements.pop() + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inFrameset"] + + def startTagCloseP(self, token): + if self.tree.elementInScope("p", variant="button"): + self.endTagP(impliedTagToken("p")) + self.tree.insertElement(token) + + def startTagPreListing(self, token): + if self.tree.elementInScope("p", variant="button"): + self.endTagP(impliedTagToken("p")) + self.tree.insertElement(token) + self.parser.framesetOK = False + self.processSpaceCharacters = self.processSpaceCharactersDropNewline + + def startTagForm(self, token): + if self.tree.formPointer: + self.parser.parseError("unexpected-start-tag", {"name": "form"}) + else: + if self.tree.elementInScope("p", variant="button"): + self.endTagP(impliedTagToken("p")) + self.tree.insertElement(token) + self.tree.formPointer = self.tree.openElements[-1] + + def startTagListItem(self, token): + self.parser.framesetOK = False + + stopNamesMap = {"li": ["li"], + "dt": ["dt", "dd"], + "dd": ["dt", "dd"]} + stopNames = stopNamesMap[token["name"]] + for node in reversed(self.tree.openElements): + if node.name in stopNames: + self.parser.phase.processEndTag( + impliedTagToken(node.name, "EndTag")) + break + if (node.nameTuple in specialElements and + node.name not in ("address", "div", "p")): + break + + if self.tree.elementInScope("p", variant="button"): + self.parser.phase.processEndTag( + impliedTagToken("p", "EndTag")) + + self.tree.insertElement(token) + + def startTagPlaintext(self, token): + if self.tree.elementInScope("p", variant="button"): + self.endTagP(impliedTagToken("p")) + self.tree.insertElement(token) + self.parser.tokenizer.state = self.parser.tokenizer.plaintextState + + def startTagHeading(self, token): + if self.tree.elementInScope("p", variant="button"): + self.endTagP(impliedTagToken("p")) + if self.tree.openElements[-1].name in headingElements: + self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) + self.tree.openElements.pop() + self.tree.insertElement(token) + + def startTagA(self, token): + afeAElement = self.tree.elementInActiveFormattingElements("a") + if afeAElement: + self.parser.parseError("unexpected-start-tag-implies-end-tag", + {"startName": "a", "endName": "a"}) + self.endTagFormatting(impliedTagToken("a")) + if afeAElement in self.tree.openElements: + self.tree.openElements.remove(afeAElement) + if afeAElement in self.tree.activeFormattingElements: + self.tree.activeFormattingElements.remove(afeAElement) + self.tree.reconstructActiveFormattingElements() + self.addFormattingElement(token) + + def startTagFormatting(self, token): + self.tree.reconstructActiveFormattingElements() + self.addFormattingElement(token) + + def startTagNobr(self, token): + self.tree.reconstructActiveFormattingElements() + if self.tree.elementInScope("nobr"): + self.parser.parseError("unexpected-start-tag-implies-end-tag", + {"startName": "nobr", "endName": "nobr"}) + self.processEndTag(impliedTagToken("nobr")) + # XXX Need tests that trigger the following + self.tree.reconstructActiveFormattingElements() + self.addFormattingElement(token) + + def startTagButton(self, token): + if self.tree.elementInScope("button"): + self.parser.parseError("unexpected-start-tag-implies-end-tag", + {"startName": "button", "endName": "button"}) + self.processEndTag(impliedTagToken("button")) + return token + else: + self.tree.reconstructActiveFormattingElements() + self.tree.insertElement(token) + self.parser.framesetOK = False + + def startTagAppletMarqueeObject(self, token): + self.tree.reconstructActiveFormattingElements() + self.tree.insertElement(token) + self.tree.activeFormattingElements.append(Marker) + self.parser.framesetOK = False + + def startTagXmp(self, token): + if self.tree.elementInScope("p", variant="button"): + self.endTagP(impliedTagToken("p")) + self.tree.reconstructActiveFormattingElements() + self.parser.framesetOK = False + self.parser.parseRCDataRawtext(token, "RAWTEXT") + + def startTagTable(self, token): + if self.parser.compatMode != "quirks": + if self.tree.elementInScope("p", variant="button"): + self.processEndTag(impliedTagToken("p")) + self.tree.insertElement(token) + self.parser.framesetOK = False + self.parser.phase = self.parser.phases["inTable"] + + def startTagVoidFormatting(self, token): + self.tree.reconstructActiveFormattingElements() + self.tree.insertElement(token) + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + self.parser.framesetOK = False + + def startTagInput(self, token): + framesetOK = self.parser.framesetOK + self.startTagVoidFormatting(token) + if ("type" in token["data"] and + token["data"]["type"].translate(asciiUpper2Lower) == "hidden"): + # input type=hidden doesn't change framesetOK + self.parser.framesetOK = framesetOK + + def startTagParamSource(self, token): + self.tree.insertElement(token) + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + + def startTagHr(self, token): + if self.tree.elementInScope("p", variant="button"): + self.endTagP(impliedTagToken("p")) + self.tree.insertElement(token) + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + self.parser.framesetOK = False + + def startTagImage(self, token): + # No really... + self.parser.parseError("unexpected-start-tag-treated-as", + {"originalName": "image", "newName": "img"}) + self.processStartTag(impliedTagToken("img", "StartTag", + attributes=token["data"], + selfClosing=token["selfClosing"])) + + def startTagIsIndex(self, token): + self.parser.parseError("deprecated-tag", {"name": "isindex"}) + if self.tree.formPointer: + return + form_attrs = {} + if "action" in token["data"]: + form_attrs["action"] = token["data"]["action"] + self.processStartTag(impliedTagToken("form", "StartTag", + attributes=form_attrs)) + self.processStartTag(impliedTagToken("hr", "StartTag")) + self.processStartTag(impliedTagToken("label", "StartTag")) + # XXX Localization ... + if "prompt" in token["data"]: + prompt = token["data"]["prompt"] + else: + prompt = "This is a searchable index. Enter search keywords: " + self.processCharacters( + {"type": tokenTypes["Characters"], "data": prompt}) + attributes = token["data"].copy() + if "action" in attributes: + del attributes["action"] + if "prompt" in attributes: + del attributes["prompt"] + attributes["name"] = "isindex" + self.processStartTag(impliedTagToken("input", "StartTag", + attributes=attributes, + selfClosing=token["selfClosing"])) + self.processEndTag(impliedTagToken("label")) + self.processStartTag(impliedTagToken("hr", "StartTag")) + self.processEndTag(impliedTagToken("form")) + + def startTagTextarea(self, token): + self.tree.insertElement(token) + self.parser.tokenizer.state = self.parser.tokenizer.rcdataState + self.processSpaceCharacters = self.processSpaceCharactersDropNewline + self.parser.framesetOK = False + + def startTagIFrame(self, token): + self.parser.framesetOK = False + self.startTagRawtext(token) + + def startTagNoscript(self, token): + if self.parser.scripting: + self.startTagRawtext(token) + else: + self.startTagOther(token) + + def startTagRawtext(self, token): + """iframe, noembed noframes, noscript(if scripting enabled)""" + self.parser.parseRCDataRawtext(token, "RAWTEXT") + + def startTagOpt(self, token): + if self.tree.openElements[-1].name == "option": + self.parser.phase.processEndTag(impliedTagToken("option")) + self.tree.reconstructActiveFormattingElements() + self.parser.tree.insertElement(token) + + def startTagSelect(self, token): + self.tree.reconstructActiveFormattingElements() + self.tree.insertElement(token) + self.parser.framesetOK = False + if self.parser.phase in (self.parser.phases["inTable"], + self.parser.phases["inCaption"], + self.parser.phases["inColumnGroup"], + self.parser.phases["inTableBody"], + self.parser.phases["inRow"], + self.parser.phases["inCell"]): + self.parser.phase = self.parser.phases["inSelectInTable"] + else: + self.parser.phase = self.parser.phases["inSelect"] + + def startTagRpRt(self, token): + if self.tree.elementInScope("ruby"): + self.tree.generateImpliedEndTags() + if self.tree.openElements[-1].name != "ruby": + self.parser.parseError() + self.tree.insertElement(token) + + def startTagMath(self, token): + self.tree.reconstructActiveFormattingElements() + self.parser.adjustMathMLAttributes(token) + self.parser.adjustForeignAttributes(token) + token["namespace"] = namespaces["mathml"] + self.tree.insertElement(token) + # Need to get the parse error right for the case where the token + # has a namespace not equal to the xmlns attribute + if token["selfClosing"]: + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + + def startTagSvg(self, token): + self.tree.reconstructActiveFormattingElements() + self.parser.adjustSVGAttributes(token) + self.parser.adjustForeignAttributes(token) + token["namespace"] = namespaces["svg"] + self.tree.insertElement(token) + # Need to get the parse error right for the case where the token + # has a namespace not equal to the xmlns attribute + if token["selfClosing"]: + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + + def startTagMisplaced(self, token): + """ Elements that should be children of other elements that have a + different insertion mode; here they are ignored + "caption", "col", "colgroup", "frame", "frameset", "head", + "option", "optgroup", "tbody", "td", "tfoot", "th", "thead", + "tr", "noscript" + """ + self.parser.parseError("unexpected-start-tag-ignored", {"name": token["name"]}) + + def startTagOther(self, token): + self.tree.reconstructActiveFormattingElements() + self.tree.insertElement(token) + + def endTagP(self, token): + if not self.tree.elementInScope("p", variant="button"): + self.startTagCloseP(impliedTagToken("p", "StartTag")) + self.parser.parseError("unexpected-end-tag", {"name": "p"}) + self.endTagP(impliedTagToken("p", "EndTag")) + else: + self.tree.generateImpliedEndTags("p") + if self.tree.openElements[-1].name != "p": + self.parser.parseError("unexpected-end-tag", {"name": "p"}) + node = self.tree.openElements.pop() + while node.name != "p": + node = self.tree.openElements.pop() + + def endTagBody(self, token): + if not self.tree.elementInScope("body"): + self.parser.parseError() + return + elif self.tree.openElements[-1].name != "body": + for node in self.tree.openElements[2:]: + if node.name not in frozenset(("dd", "dt", "li", "optgroup", + "option", "p", "rp", "rt", + "tbody", "td", "tfoot", + "th", "thead", "tr", "body", + "html")): + # Not sure this is the correct name for the parse error + self.parser.parseError( + "expected-one-end-tag-but-got-another", + {"gotName": "body", "expectedName": node.name}) + break + self.parser.phase = self.parser.phases["afterBody"] + + def endTagHtml(self, token): + # We repeat the test for the body end tag token being ignored here + if self.tree.elementInScope("body"): + self.endTagBody(impliedTagToken("body")) + return token + + def endTagBlock(self, token): + # Put us back in the right whitespace handling mode + if token["name"] == "pre": + self.processSpaceCharacters = self.processSpaceCharactersNonPre + inScope = self.tree.elementInScope(token["name"]) + if inScope: + self.tree.generateImpliedEndTags() + if self.tree.openElements[-1].name != token["name"]: + self.parser.parseError("end-tag-too-early", {"name": token["name"]}) + if inScope: + node = self.tree.openElements.pop() + while node.name != token["name"]: + node = self.tree.openElements.pop() + + def endTagForm(self, token): + node = self.tree.formPointer + self.tree.formPointer = None + if node is None or not self.tree.elementInScope(node): + self.parser.parseError("unexpected-end-tag", + {"name": "form"}) + else: + self.tree.generateImpliedEndTags() + if self.tree.openElements[-1] != node: + self.parser.parseError("end-tag-too-early-ignored", + {"name": "form"}) + self.tree.openElements.remove(node) + + def endTagListItem(self, token): + if token["name"] == "li": + variant = "list" + else: + variant = None + if not self.tree.elementInScope(token["name"], variant=variant): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + else: + self.tree.generateImpliedEndTags(exclude=token["name"]) + if self.tree.openElements[-1].name != token["name"]: + self.parser.parseError( + "end-tag-too-early", + {"name": token["name"]}) + node = self.tree.openElements.pop() + while node.name != token["name"]: + node = self.tree.openElements.pop() + + def endTagHeading(self, token): + for item in headingElements: + if self.tree.elementInScope(item): + self.tree.generateImpliedEndTags() + break + if self.tree.openElements[-1].name != token["name"]: + self.parser.parseError("end-tag-too-early", {"name": token["name"]}) + + for item in headingElements: + if self.tree.elementInScope(item): + item = self.tree.openElements.pop() + while item.name not in headingElements: + item = self.tree.openElements.pop() + break + + def endTagFormatting(self, token): + """The much-feared adoption agency algorithm""" + # http://svn.whatwg.org/webapps/complete.html#adoptionAgency revision 7867 + # XXX Better parseError messages appreciated. + + # Step 1 + outerLoopCounter = 0 + + # Step 2 + while outerLoopCounter < 8: + + # Step 3 + outerLoopCounter += 1 + + # Step 4: + + # Let the formatting element be the last element in + # the list of active formatting elements that: + # - is between the end of the list and the last scope + # marker in the list, if any, or the start of the list + # otherwise, and + # - has the same tag name as the token. + formattingElement = self.tree.elementInActiveFormattingElements( + token["name"]) + if (not formattingElement or + (formattingElement in self.tree.openElements and + not self.tree.elementInScope(formattingElement.name))): + # If there is no such node, then abort these steps + # and instead act as described in the "any other + # end tag" entry below. + self.endTagOther(token) + return + + # Otherwise, if there is such a node, but that node is + # not in the stack of open elements, then this is a + # parse error; remove the element from the list, and + # abort these steps. + elif formattingElement not in self.tree.openElements: + self.parser.parseError("adoption-agency-1.2", {"name": token["name"]}) + self.tree.activeFormattingElements.remove(formattingElement) + return + + # Otherwise, if there is such a node, and that node is + # also in the stack of open elements, but the element + # is not in scope, then this is a parse error; ignore + # the token, and abort these steps. + elif not self.tree.elementInScope(formattingElement.name): + self.parser.parseError("adoption-agency-4.4", {"name": token["name"]}) + return + + # Otherwise, there is a formatting element and that + # element is in the stack and is in scope. If the + # element is not the current node, this is a parse + # error. In any case, proceed with the algorithm as + # written in the following steps. + else: + if formattingElement != self.tree.openElements[-1]: + self.parser.parseError("adoption-agency-1.3", {"name": token["name"]}) + + # Step 5: + + # Let the furthest block be the topmost node in the + # stack of open elements that is lower in the stack + # than the formatting element, and is an element in + # the special category. There might not be one. + afeIndex = self.tree.openElements.index(formattingElement) + furthestBlock = None + for element in self.tree.openElements[afeIndex:]: + if element.nameTuple in specialElements: + furthestBlock = element + break + + # Step 6: + + # If there is no furthest block, then the UA must + # first pop all the nodes from the bottom of the stack + # of open elements, from the current node up to and + # including the formatting element, then remove the + # formatting element from the list of active + # formatting elements, and finally abort these steps. + if furthestBlock is None: + element = self.tree.openElements.pop() + while element != formattingElement: + element = self.tree.openElements.pop() + self.tree.activeFormattingElements.remove(element) + return + + # Step 7 + commonAncestor = self.tree.openElements[afeIndex - 1] + + # Step 8: + # The bookmark is supposed to help us identify where to reinsert + # nodes in step 15. We have to ensure that we reinsert nodes after + # the node before the active formatting element. Note the bookmark + # can move in step 9.7 + bookmark = self.tree.activeFormattingElements.index(formattingElement) + + # Step 9 + lastNode = node = furthestBlock + innerLoopCounter = 0 + + index = self.tree.openElements.index(node) + while innerLoopCounter < 3: + innerLoopCounter += 1 + # Node is element before node in open elements + index -= 1 + node = self.tree.openElements[index] + if node not in self.tree.activeFormattingElements: + self.tree.openElements.remove(node) + continue + # Step 9.6 + if node == formattingElement: + break + # Step 9.7 + if lastNode == furthestBlock: + bookmark = self.tree.activeFormattingElements.index(node) + 1 + # Step 9.8 + clone = node.cloneNode() + # Replace node with clone + self.tree.activeFormattingElements[ + self.tree.activeFormattingElements.index(node)] = clone + self.tree.openElements[ + self.tree.openElements.index(node)] = clone + node = clone + # Step 9.9 + # Remove lastNode from its parents, if any + if lastNode.parent: + lastNode.parent.removeChild(lastNode) + node.appendChild(lastNode) + # Step 9.10 + lastNode = node + + # Step 10 + # Foster parent lastNode if commonAncestor is a + # table, tbody, tfoot, thead, or tr we need to foster + # parent the lastNode + if lastNode.parent: + lastNode.parent.removeChild(lastNode) + + if commonAncestor.name in frozenset(("table", "tbody", "tfoot", "thead", "tr")): + parent, insertBefore = self.tree.getTableMisnestedNodePosition() + parent.insertBefore(lastNode, insertBefore) + else: + commonAncestor.appendChild(lastNode) + + # Step 11 + clone = formattingElement.cloneNode() + + # Step 12 + furthestBlock.reparentChildren(clone) + + # Step 13 + furthestBlock.appendChild(clone) + + # Step 14 + self.tree.activeFormattingElements.remove(formattingElement) + self.tree.activeFormattingElements.insert(bookmark, clone) + + # Step 15 + self.tree.openElements.remove(formattingElement) + self.tree.openElements.insert( + self.tree.openElements.index(furthestBlock) + 1, clone) + + def endTagAppletMarqueeObject(self, token): + if self.tree.elementInScope(token["name"]): + self.tree.generateImpliedEndTags() + if self.tree.openElements[-1].name != token["name"]: + self.parser.parseError("end-tag-too-early", {"name": token["name"]}) + + if self.tree.elementInScope(token["name"]): + element = self.tree.openElements.pop() + while element.name != token["name"]: + element = self.tree.openElements.pop() + self.tree.clearActiveFormattingElements() + + def endTagBr(self, token): + self.parser.parseError("unexpected-end-tag-treated-as", + {"originalName": "br", "newName": "br element"}) + self.tree.reconstructActiveFormattingElements() + self.tree.insertElement(impliedTagToken("br", "StartTag")) + self.tree.openElements.pop() + + def endTagOther(self, token): + for node in self.tree.openElements[::-1]: + if node.name == token["name"]: + self.tree.generateImpliedEndTags(exclude=token["name"]) + if self.tree.openElements[-1].name != token["name"]: + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + while self.tree.openElements.pop() != node: + pass + break + else: + if node.nameTuple in specialElements: + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + break + + class TextPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + self.startTagHandler = _utils.MethodDispatcher([]) + self.startTagHandler.default = self.startTagOther + self.endTagHandler = _utils.MethodDispatcher([ + ("script", self.endTagScript)]) + self.endTagHandler.default = self.endTagOther + + def processCharacters(self, token): + self.tree.insertText(token["data"]) + + def processEOF(self): + self.parser.parseError("expected-named-closing-tag-but-got-eof", + {"name": self.tree.openElements[-1].name}) + self.tree.openElements.pop() + self.parser.phase = self.parser.originalPhase + return True + + def startTagOther(self, token): + assert False, "Tried to process start tag %s in RCDATA/RAWTEXT mode" % token['name'] + + def endTagScript(self, token): + node = self.tree.openElements.pop() + assert node.name == "script" + self.parser.phase = self.parser.originalPhase + # The rest of this method is all stuff that only happens if + # document.write works + + def endTagOther(self, token): + self.tree.openElements.pop() + self.parser.phase = self.parser.originalPhase + + class InTablePhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#in-table + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("caption", self.startTagCaption), + ("colgroup", self.startTagColgroup), + ("col", self.startTagCol), + (("tbody", "tfoot", "thead"), self.startTagRowGroup), + (("td", "th", "tr"), self.startTagImplyTbody), + ("table", self.startTagTable), + (("style", "script"), self.startTagStyleScript), + ("input", self.startTagInput), + ("form", self.startTagForm) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + ("table", self.endTagTable), + (("body", "caption", "col", "colgroup", "html", "tbody", "td", + "tfoot", "th", "thead", "tr"), self.endTagIgnore) + ]) + self.endTagHandler.default = self.endTagOther + + # helper methods + def clearStackToTableContext(self): + # "clear the stack back to a table context" + while self.tree.openElements[-1].name not in ("table", "html"): + # self.parser.parseError("unexpected-implied-end-tag-in-table", + # {"name": self.tree.openElements[-1].name}) + self.tree.openElements.pop() + # When the current node is <html> it's an innerHTML case + + # processing methods + def processEOF(self): + if self.tree.openElements[-1].name != "html": + self.parser.parseError("eof-in-table") + else: + assert self.parser.innerHTML + # Stop parsing + + def processSpaceCharacters(self, token): + originalPhase = self.parser.phase + self.parser.phase = self.parser.phases["inTableText"] + self.parser.phase.originalPhase = originalPhase + self.parser.phase.processSpaceCharacters(token) + + def processCharacters(self, token): + originalPhase = self.parser.phase + self.parser.phase = self.parser.phases["inTableText"] + self.parser.phase.originalPhase = originalPhase + self.parser.phase.processCharacters(token) + + def insertText(self, token): + # If we get here there must be at least one non-whitespace character + # Do the table magic! + self.tree.insertFromTable = True + self.parser.phases["inBody"].processCharacters(token) + self.tree.insertFromTable = False + + def startTagCaption(self, token): + self.clearStackToTableContext() + self.tree.activeFormattingElements.append(Marker) + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inCaption"] + + def startTagColgroup(self, token): + self.clearStackToTableContext() + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inColumnGroup"] + + def startTagCol(self, token): + self.startTagColgroup(impliedTagToken("colgroup", "StartTag")) + return token + + def startTagRowGroup(self, token): + self.clearStackToTableContext() + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inTableBody"] + + def startTagImplyTbody(self, token): + self.startTagRowGroup(impliedTagToken("tbody", "StartTag")) + return token + + def startTagTable(self, token): + self.parser.parseError("unexpected-start-tag-implies-end-tag", + {"startName": "table", "endName": "table"}) + self.parser.phase.processEndTag(impliedTagToken("table")) + if not self.parser.innerHTML: + return token + + def startTagStyleScript(self, token): + return self.parser.phases["inHead"].processStartTag(token) + + def startTagInput(self, token): + if ("type" in token["data"] and + token["data"]["type"].translate(asciiUpper2Lower) == "hidden"): + self.parser.parseError("unexpected-hidden-input-in-table") + self.tree.insertElement(token) + # XXX associate with form + self.tree.openElements.pop() + else: + self.startTagOther(token) + + def startTagForm(self, token): + self.parser.parseError("unexpected-form-in-table") + if self.tree.formPointer is None: + self.tree.insertElement(token) + self.tree.formPointer = self.tree.openElements[-1] + self.tree.openElements.pop() + + def startTagOther(self, token): + self.parser.parseError("unexpected-start-tag-implies-table-voodoo", {"name": token["name"]}) + # Do the table magic! + self.tree.insertFromTable = True + self.parser.phases["inBody"].processStartTag(token) + self.tree.insertFromTable = False + + def endTagTable(self, token): + if self.tree.elementInScope("table", variant="table"): + self.tree.generateImpliedEndTags() + if self.tree.openElements[-1].name != "table": + self.parser.parseError("end-tag-too-early-named", + {"gotName": "table", + "expectedName": self.tree.openElements[-1].name}) + while self.tree.openElements[-1].name != "table": + self.tree.openElements.pop() + self.tree.openElements.pop() + self.parser.resetInsertionMode() + else: + # innerHTML case + assert self.parser.innerHTML + self.parser.parseError() + + def endTagIgnore(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag-implies-table-voodoo", {"name": token["name"]}) + # Do the table magic! + self.tree.insertFromTable = True + self.parser.phases["inBody"].processEndTag(token) + self.tree.insertFromTable = False + + class InTableTextPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + self.originalPhase = None + self.characterTokens = [] + + def flushCharacters(self): + data = "".join([item["data"] for item in self.characterTokens]) + if any([item not in spaceCharacters for item in data]): + token = {"type": tokenTypes["Characters"], "data": data} + self.parser.phases["inTable"].insertText(token) + elif data: + self.tree.insertText(data) + self.characterTokens = [] + + def processComment(self, token): + self.flushCharacters() + self.parser.phase = self.originalPhase + return token + + def processEOF(self): + self.flushCharacters() + self.parser.phase = self.originalPhase + return True + + def processCharacters(self, token): + if token["data"] == "\u0000": + return + self.characterTokens.append(token) + + def processSpaceCharacters(self, token): + # pretty sure we should never reach here + self.characterTokens.append(token) + # assert False + + def processStartTag(self, token): + self.flushCharacters() + self.parser.phase = self.originalPhase + return token + + def processEndTag(self, token): + self.flushCharacters() + self.parser.phase = self.originalPhase + return token + + class InCaptionPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#in-caption + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th", + "thead", "tr"), self.startTagTableElement) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + ("caption", self.endTagCaption), + ("table", self.endTagTable), + (("body", "col", "colgroup", "html", "tbody", "td", "tfoot", "th", + "thead", "tr"), self.endTagIgnore) + ]) + self.endTagHandler.default = self.endTagOther + + def ignoreEndTagCaption(self): + return not self.tree.elementInScope("caption", variant="table") + + def processEOF(self): + self.parser.phases["inBody"].processEOF() + + def processCharacters(self, token): + return self.parser.phases["inBody"].processCharacters(token) + + def startTagTableElement(self, token): + self.parser.parseError() + # XXX Have to duplicate logic here to find out if the tag is ignored + ignoreEndTag = self.ignoreEndTagCaption() + self.parser.phase.processEndTag(impliedTagToken("caption")) + if not ignoreEndTag: + return token + + def startTagOther(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def endTagCaption(self, token): + if not self.ignoreEndTagCaption(): + # AT this code is quite similar to endTagTable in "InTable" + self.tree.generateImpliedEndTags() + if self.tree.openElements[-1].name != "caption": + self.parser.parseError("expected-one-end-tag-but-got-another", + {"gotName": "caption", + "expectedName": self.tree.openElements[-1].name}) + while self.tree.openElements[-1].name != "caption": + self.tree.openElements.pop() + self.tree.openElements.pop() + self.tree.clearActiveFormattingElements() + self.parser.phase = self.parser.phases["inTable"] + else: + # innerHTML case + assert self.parser.innerHTML + self.parser.parseError() + + def endTagTable(self, token): + self.parser.parseError() + ignoreEndTag = self.ignoreEndTagCaption() + self.parser.phase.processEndTag(impliedTagToken("caption")) + if not ignoreEndTag: + return token + + def endTagIgnore(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def endTagOther(self, token): + return self.parser.phases["inBody"].processEndTag(token) + + class InColumnGroupPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#in-column + + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("col", self.startTagCol) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + ("colgroup", self.endTagColgroup), + ("col", self.endTagCol) + ]) + self.endTagHandler.default = self.endTagOther + + def ignoreEndTagColgroup(self): + return self.tree.openElements[-1].name == "html" + + def processEOF(self): + if self.tree.openElements[-1].name == "html": + assert self.parser.innerHTML + return + else: + ignoreEndTag = self.ignoreEndTagColgroup() + self.endTagColgroup(impliedTagToken("colgroup")) + if not ignoreEndTag: + return True + + def processCharacters(self, token): + ignoreEndTag = self.ignoreEndTagColgroup() + self.endTagColgroup(impliedTagToken("colgroup")) + if not ignoreEndTag: + return token + + def startTagCol(self, token): + self.tree.insertElement(token) + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + + def startTagOther(self, token): + ignoreEndTag = self.ignoreEndTagColgroup() + self.endTagColgroup(impliedTagToken("colgroup")) + if not ignoreEndTag: + return token + + def endTagColgroup(self, token): + if self.ignoreEndTagColgroup(): + # innerHTML case + assert self.parser.innerHTML + self.parser.parseError() + else: + self.tree.openElements.pop() + self.parser.phase = self.parser.phases["inTable"] + + def endTagCol(self, token): + self.parser.parseError("no-end-tag", {"name": "col"}) + + def endTagOther(self, token): + ignoreEndTag = self.ignoreEndTagColgroup() + self.endTagColgroup(impliedTagToken("colgroup")) + if not ignoreEndTag: + return token + + class InTableBodyPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#in-table0 + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("tr", self.startTagTr), + (("td", "th"), self.startTagTableCell), + (("caption", "col", "colgroup", "tbody", "tfoot", "thead"), + self.startTagTableOther) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + (("tbody", "tfoot", "thead"), self.endTagTableRowGroup), + ("table", self.endTagTable), + (("body", "caption", "col", "colgroup", "html", "td", "th", + "tr"), self.endTagIgnore) + ]) + self.endTagHandler.default = self.endTagOther + + # helper methods + def clearStackToTableBodyContext(self): + while self.tree.openElements[-1].name not in ("tbody", "tfoot", + "thead", "html"): + # self.parser.parseError("unexpected-implied-end-tag-in-table", + # {"name": self.tree.openElements[-1].name}) + self.tree.openElements.pop() + if self.tree.openElements[-1].name == "html": + assert self.parser.innerHTML + + # the rest + def processEOF(self): + self.parser.phases["inTable"].processEOF() + + def processSpaceCharacters(self, token): + return self.parser.phases["inTable"].processSpaceCharacters(token) + + def processCharacters(self, token): + return self.parser.phases["inTable"].processCharacters(token) + + def startTagTr(self, token): + self.clearStackToTableBodyContext() + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inRow"] + + def startTagTableCell(self, token): + self.parser.parseError("unexpected-cell-in-table-body", + {"name": token["name"]}) + self.startTagTr(impliedTagToken("tr", "StartTag")) + return token + + def startTagTableOther(self, token): + # XXX AT Any ideas on how to share this with endTagTable? + if (self.tree.elementInScope("tbody", variant="table") or + self.tree.elementInScope("thead", variant="table") or + self.tree.elementInScope("tfoot", variant="table")): + self.clearStackToTableBodyContext() + self.endTagTableRowGroup( + impliedTagToken(self.tree.openElements[-1].name)) + return token + else: + # innerHTML case + assert self.parser.innerHTML + self.parser.parseError() + + def startTagOther(self, token): + return self.parser.phases["inTable"].processStartTag(token) + + def endTagTableRowGroup(self, token): + if self.tree.elementInScope(token["name"], variant="table"): + self.clearStackToTableBodyContext() + self.tree.openElements.pop() + self.parser.phase = self.parser.phases["inTable"] + else: + self.parser.parseError("unexpected-end-tag-in-table-body", + {"name": token["name"]}) + + def endTagTable(self, token): + if (self.tree.elementInScope("tbody", variant="table") or + self.tree.elementInScope("thead", variant="table") or + self.tree.elementInScope("tfoot", variant="table")): + self.clearStackToTableBodyContext() + self.endTagTableRowGroup( + impliedTagToken(self.tree.openElements[-1].name)) + return token + else: + # innerHTML case + assert self.parser.innerHTML + self.parser.parseError() + + def endTagIgnore(self, token): + self.parser.parseError("unexpected-end-tag-in-table-body", + {"name": token["name"]}) + + def endTagOther(self, token): + return self.parser.phases["inTable"].processEndTag(token) + + class InRowPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#in-row + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + (("td", "th"), self.startTagTableCell), + (("caption", "col", "colgroup", "tbody", "tfoot", "thead", + "tr"), self.startTagTableOther) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + ("tr", self.endTagTr), + ("table", self.endTagTable), + (("tbody", "tfoot", "thead"), self.endTagTableRowGroup), + (("body", "caption", "col", "colgroup", "html", "td", "th"), + self.endTagIgnore) + ]) + self.endTagHandler.default = self.endTagOther + + # helper methods (XXX unify this with other table helper methods) + def clearStackToTableRowContext(self): + while self.tree.openElements[-1].name not in ("tr", "html"): + self.parser.parseError("unexpected-implied-end-tag-in-table-row", + {"name": self.tree.openElements[-1].name}) + self.tree.openElements.pop() + + def ignoreEndTagTr(self): + return not self.tree.elementInScope("tr", variant="table") + + # the rest + def processEOF(self): + self.parser.phases["inTable"].processEOF() + + def processSpaceCharacters(self, token): + return self.parser.phases["inTable"].processSpaceCharacters(token) + + def processCharacters(self, token): + return self.parser.phases["inTable"].processCharacters(token) + + def startTagTableCell(self, token): + self.clearStackToTableRowContext() + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inCell"] + self.tree.activeFormattingElements.append(Marker) + + def startTagTableOther(self, token): + ignoreEndTag = self.ignoreEndTagTr() + self.endTagTr(impliedTagToken("tr")) + # XXX how are we sure it's always ignored in the innerHTML case? + if not ignoreEndTag: + return token + + def startTagOther(self, token): + return self.parser.phases["inTable"].processStartTag(token) + + def endTagTr(self, token): + if not self.ignoreEndTagTr(): + self.clearStackToTableRowContext() + self.tree.openElements.pop() + self.parser.phase = self.parser.phases["inTableBody"] + else: + # innerHTML case + assert self.parser.innerHTML + self.parser.parseError() + + def endTagTable(self, token): + ignoreEndTag = self.ignoreEndTagTr() + self.endTagTr(impliedTagToken("tr")) + # Reprocess the current tag if the tr end tag was not ignored + # XXX how are we sure it's always ignored in the innerHTML case? + if not ignoreEndTag: + return token + + def endTagTableRowGroup(self, token): + if self.tree.elementInScope(token["name"], variant="table"): + self.endTagTr(impliedTagToken("tr")) + return token + else: + self.parser.parseError() + + def endTagIgnore(self, token): + self.parser.parseError("unexpected-end-tag-in-table-row", + {"name": token["name"]}) + + def endTagOther(self, token): + return self.parser.phases["inTable"].processEndTag(token) + + class InCellPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#in-cell + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th", + "thead", "tr"), self.startTagTableOther) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + (("td", "th"), self.endTagTableCell), + (("body", "caption", "col", "colgroup", "html"), self.endTagIgnore), + (("table", "tbody", "tfoot", "thead", "tr"), self.endTagImply) + ]) + self.endTagHandler.default = self.endTagOther + + # helper + def closeCell(self): + if self.tree.elementInScope("td", variant="table"): + self.endTagTableCell(impliedTagToken("td")) + elif self.tree.elementInScope("th", variant="table"): + self.endTagTableCell(impliedTagToken("th")) + + # the rest + def processEOF(self): + self.parser.phases["inBody"].processEOF() + + def processCharacters(self, token): + return self.parser.phases["inBody"].processCharacters(token) + + def startTagTableOther(self, token): + if (self.tree.elementInScope("td", variant="table") or + self.tree.elementInScope("th", variant="table")): + self.closeCell() + return token + else: + # innerHTML case + assert self.parser.innerHTML + self.parser.parseError() + + def startTagOther(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def endTagTableCell(self, token): + if self.tree.elementInScope(token["name"], variant="table"): + self.tree.generateImpliedEndTags(token["name"]) + if self.tree.openElements[-1].name != token["name"]: + self.parser.parseError("unexpected-cell-end-tag", + {"name": token["name"]}) + while True: + node = self.tree.openElements.pop() + if node.name == token["name"]: + break + else: + self.tree.openElements.pop() + self.tree.clearActiveFormattingElements() + self.parser.phase = self.parser.phases["inRow"] + else: + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def endTagIgnore(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def endTagImply(self, token): + if self.tree.elementInScope(token["name"], variant="table"): + self.closeCell() + return token + else: + # sometimes innerHTML case + self.parser.parseError() + + def endTagOther(self, token): + return self.parser.phases["inBody"].processEndTag(token) + + class InSelectPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("option", self.startTagOption), + ("optgroup", self.startTagOptgroup), + ("select", self.startTagSelect), + (("input", "keygen", "textarea"), self.startTagInput), + ("script", self.startTagScript) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + ("option", self.endTagOption), + ("optgroup", self.endTagOptgroup), + ("select", self.endTagSelect) + ]) + self.endTagHandler.default = self.endTagOther + + # http://www.whatwg.org/specs/web-apps/current-work/#in-select + def processEOF(self): + if self.tree.openElements[-1].name != "html": + self.parser.parseError("eof-in-select") + else: + assert self.parser.innerHTML + + def processCharacters(self, token): + if token["data"] == "\u0000": + return + self.tree.insertText(token["data"]) + + def startTagOption(self, token): + # We need to imply </option> if <option> is the current node. + if self.tree.openElements[-1].name == "option": + self.tree.openElements.pop() + self.tree.insertElement(token) + + def startTagOptgroup(self, token): + if self.tree.openElements[-1].name == "option": + self.tree.openElements.pop() + if self.tree.openElements[-1].name == "optgroup": + self.tree.openElements.pop() + self.tree.insertElement(token) + + def startTagSelect(self, token): + self.parser.parseError("unexpected-select-in-select") + self.endTagSelect(impliedTagToken("select")) + + def startTagInput(self, token): + self.parser.parseError("unexpected-input-in-select") + if self.tree.elementInScope("select", variant="select"): + self.endTagSelect(impliedTagToken("select")) + return token + else: + assert self.parser.innerHTML + + def startTagScript(self, token): + return self.parser.phases["inHead"].processStartTag(token) + + def startTagOther(self, token): + self.parser.parseError("unexpected-start-tag-in-select", + {"name": token["name"]}) + + def endTagOption(self, token): + if self.tree.openElements[-1].name == "option": + self.tree.openElements.pop() + else: + self.parser.parseError("unexpected-end-tag-in-select", + {"name": "option"}) + + def endTagOptgroup(self, token): + # </optgroup> implicitly closes <option> + if (self.tree.openElements[-1].name == "option" and + self.tree.openElements[-2].name == "optgroup"): + self.tree.openElements.pop() + # It also closes </optgroup> + if self.tree.openElements[-1].name == "optgroup": + self.tree.openElements.pop() + # But nothing else + else: + self.parser.parseError("unexpected-end-tag-in-select", + {"name": "optgroup"}) + + def endTagSelect(self, token): + if self.tree.elementInScope("select", variant="select"): + node = self.tree.openElements.pop() + while node.name != "select": + node = self.tree.openElements.pop() + self.parser.resetInsertionMode() + else: + # innerHTML case + assert self.parser.innerHTML + self.parser.parseError() + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag-in-select", + {"name": token["name"]}) + + class InSelectInTablePhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"), + self.startTagTable) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"), + self.endTagTable) + ]) + self.endTagHandler.default = self.endTagOther + + def processEOF(self): + self.parser.phases["inSelect"].processEOF() + + def processCharacters(self, token): + return self.parser.phases["inSelect"].processCharacters(token) + + def startTagTable(self, token): + self.parser.parseError("unexpected-table-element-start-tag-in-select-in-table", {"name": token["name"]}) + self.endTagOther(impliedTagToken("select")) + return token + + def startTagOther(self, token): + return self.parser.phases["inSelect"].processStartTag(token) + + def endTagTable(self, token): + self.parser.parseError("unexpected-table-element-end-tag-in-select-in-table", {"name": token["name"]}) + if self.tree.elementInScope(token["name"], variant="table"): + self.endTagOther(impliedTagToken("select")) + return token + + def endTagOther(self, token): + return self.parser.phases["inSelect"].processEndTag(token) + + class InForeignContentPhase(Phase): + breakoutElements = frozenset(["b", "big", "blockquote", "body", "br", + "center", "code", "dd", "div", "dl", "dt", + "em", "embed", "h1", "h2", "h3", + "h4", "h5", "h6", "head", "hr", "i", "img", + "li", "listing", "menu", "meta", "nobr", + "ol", "p", "pre", "ruby", "s", "small", + "span", "strong", "strike", "sub", "sup", + "table", "tt", "u", "ul", "var"]) + + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + def adjustSVGTagNames(self, token): + replacements = {"altglyph": "altGlyph", + "altglyphdef": "altGlyphDef", + "altglyphitem": "altGlyphItem", + "animatecolor": "animateColor", + "animatemotion": "animateMotion", + "animatetransform": "animateTransform", + "clippath": "clipPath", + "feblend": "feBlend", + "fecolormatrix": "feColorMatrix", + "fecomponenttransfer": "feComponentTransfer", + "fecomposite": "feComposite", + "feconvolvematrix": "feConvolveMatrix", + "fediffuselighting": "feDiffuseLighting", + "fedisplacementmap": "feDisplacementMap", + "fedistantlight": "feDistantLight", + "feflood": "feFlood", + "fefunca": "feFuncA", + "fefuncb": "feFuncB", + "fefuncg": "feFuncG", + "fefuncr": "feFuncR", + "fegaussianblur": "feGaussianBlur", + "feimage": "feImage", + "femerge": "feMerge", + "femergenode": "feMergeNode", + "femorphology": "feMorphology", + "feoffset": "feOffset", + "fepointlight": "fePointLight", + "fespecularlighting": "feSpecularLighting", + "fespotlight": "feSpotLight", + "fetile": "feTile", + "feturbulence": "feTurbulence", + "foreignobject": "foreignObject", + "glyphref": "glyphRef", + "lineargradient": "linearGradient", + "radialgradient": "radialGradient", + "textpath": "textPath"} + + if token["name"] in replacements: + token["name"] = replacements[token["name"]] + + def processCharacters(self, token): + if token["data"] == "\u0000": + token["data"] = "\uFFFD" + elif (self.parser.framesetOK and + any(char not in spaceCharacters for char in token["data"])): + self.parser.framesetOK = False + Phase.processCharacters(self, token) + + def processStartTag(self, token): + currentNode = self.tree.openElements[-1] + if (token["name"] in self.breakoutElements or + (token["name"] == "font" and + set(token["data"].keys()) & set(["color", "face", "size"]))): + self.parser.parseError("unexpected-html-element-in-foreign-content", + {"name": token["name"]}) + while (self.tree.openElements[-1].namespace != + self.tree.defaultNamespace and + not self.parser.isHTMLIntegrationPoint(self.tree.openElements[-1]) and + not self.parser.isMathMLTextIntegrationPoint(self.tree.openElements[-1])): + self.tree.openElements.pop() + return token + + else: + if currentNode.namespace == namespaces["mathml"]: + self.parser.adjustMathMLAttributes(token) + elif currentNode.namespace == namespaces["svg"]: + self.adjustSVGTagNames(token) + self.parser.adjustSVGAttributes(token) + self.parser.adjustForeignAttributes(token) + token["namespace"] = currentNode.namespace + self.tree.insertElement(token) + if token["selfClosing"]: + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + + def processEndTag(self, token): + nodeIndex = len(self.tree.openElements) - 1 + node = self.tree.openElements[-1] + if node.name.translate(asciiUpper2Lower) != token["name"]: + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + while True: + if node.name.translate(asciiUpper2Lower) == token["name"]: + # XXX this isn't in the spec but it seems necessary + if self.parser.phase == self.parser.phases["inTableText"]: + self.parser.phase.flushCharacters() + self.parser.phase = self.parser.phase.originalPhase + while self.tree.openElements.pop() != node: + assert self.tree.openElements + new_token = None + break + nodeIndex -= 1 + + node = self.tree.openElements[nodeIndex] + if node.namespace != self.tree.defaultNamespace: + continue + else: + new_token = self.parser.phase.processEndTag(token) + break + return new_token + + class AfterBodyPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([("html", self.endTagHtml)]) + self.endTagHandler.default = self.endTagOther + + def processEOF(self): + # Stop parsing + pass + + def processComment(self, token): + # This is needed because data is to be appended to the <html> element + # here and not to whatever is currently open. + self.tree.insertComment(token, self.tree.openElements[0]) + + def processCharacters(self, token): + self.parser.parseError("unexpected-char-after-body") + self.parser.phase = self.parser.phases["inBody"] + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagOther(self, token): + self.parser.parseError("unexpected-start-tag-after-body", + {"name": token["name"]}) + self.parser.phase = self.parser.phases["inBody"] + return token + + def endTagHtml(self, name): + if self.parser.innerHTML: + self.parser.parseError("unexpected-end-tag-after-body-innerhtml") + else: + self.parser.phase = self.parser.phases["afterAfterBody"] + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag-after-body", + {"name": token["name"]}) + self.parser.phase = self.parser.phases["inBody"] + return token + + class InFramesetPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#in-frameset + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("frameset", self.startTagFrameset), + ("frame", self.startTagFrame), + ("noframes", self.startTagNoframes) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + ("frameset", self.endTagFrameset) + ]) + self.endTagHandler.default = self.endTagOther + + def processEOF(self): + if self.tree.openElements[-1].name != "html": + self.parser.parseError("eof-in-frameset") + else: + assert self.parser.innerHTML + + def processCharacters(self, token): + self.parser.parseError("unexpected-char-in-frameset") + + def startTagFrameset(self, token): + self.tree.insertElement(token) + + def startTagFrame(self, token): + self.tree.insertElement(token) + self.tree.openElements.pop() + + def startTagNoframes(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagOther(self, token): + self.parser.parseError("unexpected-start-tag-in-frameset", + {"name": token["name"]}) + + def endTagFrameset(self, token): + if self.tree.openElements[-1].name == "html": + # innerHTML case + self.parser.parseError("unexpected-frameset-in-frameset-innerhtml") + else: + self.tree.openElements.pop() + if (not self.parser.innerHTML and + self.tree.openElements[-1].name != "frameset"): + # If we're not in innerHTML mode and the current node is not a + # "frameset" element (anymore) then switch. + self.parser.phase = self.parser.phases["afterFrameset"] + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag-in-frameset", + {"name": token["name"]}) + + class AfterFramesetPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#after3 + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("noframes", self.startTagNoframes) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + ("html", self.endTagHtml) + ]) + self.endTagHandler.default = self.endTagOther + + def processEOF(self): + # Stop parsing + pass + + def processCharacters(self, token): + self.parser.parseError("unexpected-char-after-frameset") + + def startTagNoframes(self, token): + return self.parser.phases["inHead"].processStartTag(token) + + def startTagOther(self, token): + self.parser.parseError("unexpected-start-tag-after-frameset", + {"name": token["name"]}) + + def endTagHtml(self, token): + self.parser.phase = self.parser.phases["afterAfterFrameset"] + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag-after-frameset", + {"name": token["name"]}) + + class AfterAfterBodyPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml) + ]) + self.startTagHandler.default = self.startTagOther + + def processEOF(self): + pass + + def processComment(self, token): + self.tree.insertComment(token, self.tree.document) + + def processSpaceCharacters(self, token): + return self.parser.phases["inBody"].processSpaceCharacters(token) + + def processCharacters(self, token): + self.parser.parseError("expected-eof-but-got-char") + self.parser.phase = self.parser.phases["inBody"] + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagOther(self, token): + self.parser.parseError("expected-eof-but-got-start-tag", + {"name": token["name"]}) + self.parser.phase = self.parser.phases["inBody"] + return token + + def processEndTag(self, token): + self.parser.parseError("expected-eof-but-got-end-tag", + {"name": token["name"]}) + self.parser.phase = self.parser.phases["inBody"] + return token + + class AfterAfterFramesetPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("noframes", self.startTagNoFrames) + ]) + self.startTagHandler.default = self.startTagOther + + def processEOF(self): + pass + + def processComment(self, token): + self.tree.insertComment(token, self.tree.document) + + def processSpaceCharacters(self, token): + return self.parser.phases["inBody"].processSpaceCharacters(token) + + def processCharacters(self, token): + self.parser.parseError("expected-eof-but-got-char") + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagNoFrames(self, token): + return self.parser.phases["inHead"].processStartTag(token) + + def startTagOther(self, token): + self.parser.parseError("expected-eof-but-got-start-tag", + {"name": token["name"]}) + + def processEndTag(self, token): + self.parser.parseError("expected-eof-but-got-end-tag", + {"name": token["name"]}) + # pylint:enable=unused-argument + + return { + "initial": InitialPhase, + "beforeHtml": BeforeHtmlPhase, + "beforeHead": BeforeHeadPhase, + "inHead": InHeadPhase, + "inHeadNoscript": InHeadNoscriptPhase, + "afterHead": AfterHeadPhase, + "inBody": InBodyPhase, + "text": TextPhase, + "inTable": InTablePhase, + "inTableText": InTableTextPhase, + "inCaption": InCaptionPhase, + "inColumnGroup": InColumnGroupPhase, + "inTableBody": InTableBodyPhase, + "inRow": InRowPhase, + "inCell": InCellPhase, + "inSelect": InSelectPhase, + "inSelectInTable": InSelectInTablePhase, + "inForeignContent": InForeignContentPhase, + "afterBody": AfterBodyPhase, + "inFrameset": InFramesetPhase, + "afterFrameset": AfterFramesetPhase, + "afterAfterBody": AfterAfterBodyPhase, + "afterAfterFrameset": AfterAfterFramesetPhase, + # XXX after after frameset + } + + +def adjust_attributes(token, replacements): + needs_adjustment = viewkeys(token['data']) & viewkeys(replacements) + if needs_adjustment: + token['data'] = OrderedDict((replacements.get(k, k), v) + for k, v in token['data'].items()) + + +def impliedTagToken(name, type="EndTag", attributes=None, + selfClosing=False): + if attributes is None: + attributes = {} + return {"type": tokenTypes[type], "name": name, "data": attributes, + "selfClosing": selfClosing} + + +class ParseError(Exception): + """Error in parsed document""" + pass diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/serializer.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/serializer.py new file mode 100644 index 0000000..53f4d44 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/serializer.py @@ -0,0 +1,409 @@ +from __future__ import absolute_import, division, unicode_literals +from pip._vendor.six import text_type + +import re + +from codecs import register_error, xmlcharrefreplace_errors + +from .constants import voidElements, booleanAttributes, spaceCharacters +from .constants import rcdataElements, entities, xmlEntities +from . import treewalkers, _utils +from xml.sax.saxutils import escape + +_quoteAttributeSpecChars = "".join(spaceCharacters) + "\"'=<>`" +_quoteAttributeSpec = re.compile("[" + _quoteAttributeSpecChars + "]") +_quoteAttributeLegacy = re.compile("[" + _quoteAttributeSpecChars + + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n" + "\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15" + "\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x2f\x60\xa0\u1680\u180e\u180f\u2000" + "\u2001\u2002\u2003\u2004\u2005\u2006\u2007" + "\u2008\u2009\u200a\u2028\u2029\u202f\u205f" + "\u3000]") + + +_encode_entity_map = {} +_is_ucs4 = len("\U0010FFFF") == 1 +for k, v in list(entities.items()): + # skip multi-character entities + if ((_is_ucs4 and len(v) > 1) or + (not _is_ucs4 and len(v) > 2)): + continue + if v != "&": + if len(v) == 2: + v = _utils.surrogatePairToCodepoint(v) + else: + v = ord(v) + if v not in _encode_entity_map or k.islower(): + # prefer < over < and similarly for &, >, etc. + _encode_entity_map[v] = k + + +def htmlentityreplace_errors(exc): + if isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)): + res = [] + codepoints = [] + skip = False + for i, c in enumerate(exc.object[exc.start:exc.end]): + if skip: + skip = False + continue + index = i + exc.start + if _utils.isSurrogatePair(exc.object[index:min([exc.end, index + 2])]): + codepoint = _utils.surrogatePairToCodepoint(exc.object[index:index + 2]) + skip = True + else: + codepoint = ord(c) + codepoints.append(codepoint) + for cp in codepoints: + e = _encode_entity_map.get(cp) + if e: + res.append("&") + res.append(e) + if not e.endswith(";"): + res.append(";") + else: + res.append("&#x%s;" % (hex(cp)[2:])) + return ("".join(res), exc.end) + else: + return xmlcharrefreplace_errors(exc) + + +register_error("htmlentityreplace", htmlentityreplace_errors) + + +def serialize(input, tree="etree", encoding=None, **serializer_opts): + """Serializes the input token stream using the specified treewalker + + :arg input: the token stream to serialize + + :arg tree: the treewalker to use + + :arg encoding: the encoding to use + + :arg serializer_opts: any options to pass to the + :py:class:`html5lib.serializer.HTMLSerializer` that gets created + + :returns: the tree serialized as a string + + Example: + + >>> from html5lib.html5parser import parse + >>> from html5lib.serializer import serialize + >>> token_stream = parse('<html><body><p>Hi!</p></body></html>') + >>> serialize(token_stream, omit_optional_tags=False) + '<html><head></head><body><p>Hi!</p></body></html>' + + """ + # XXX: Should we cache this? + walker = treewalkers.getTreeWalker(tree) + s = HTMLSerializer(**serializer_opts) + return s.render(walker(input), encoding) + + +class HTMLSerializer(object): + + # attribute quoting options + quote_attr_values = "legacy" # be secure by default + quote_char = '"' + use_best_quote_char = True + + # tag syntax options + omit_optional_tags = True + minimize_boolean_attributes = True + use_trailing_solidus = False + space_before_trailing_solidus = True + + # escaping options + escape_lt_in_attrs = False + escape_rcdata = False + resolve_entities = True + + # miscellaneous options + alphabetical_attributes = False + inject_meta_charset = True + strip_whitespace = False + sanitize = False + + options = ("quote_attr_values", "quote_char", "use_best_quote_char", + "omit_optional_tags", "minimize_boolean_attributes", + "use_trailing_solidus", "space_before_trailing_solidus", + "escape_lt_in_attrs", "escape_rcdata", "resolve_entities", + "alphabetical_attributes", "inject_meta_charset", + "strip_whitespace", "sanitize") + + def __init__(self, **kwargs): + """Initialize HTMLSerializer + + :arg inject_meta_charset: Whether or not to inject the meta charset. + + Defaults to ``True``. + + :arg quote_attr_values: Whether to quote attribute values that don't + require quoting per legacy browser behavior (``"legacy"``), when + required by the standard (``"spec"``), or always (``"always"``). + + Defaults to ``"legacy"``. + + :arg quote_char: Use given quote character for attribute quoting. + + Defaults to ``"`` which will use double quotes unless attribute + value contains a double quote, in which case single quotes are + used. + + :arg escape_lt_in_attrs: Whether or not to escape ``<`` in attribute + values. + + Defaults to ``False``. + + :arg escape_rcdata: Whether to escape characters that need to be + escaped within normal elements within rcdata elements such as + style. + + Defaults to ``False``. + + :arg resolve_entities: Whether to resolve named character entities that + appear in the source tree. The XML predefined entities < > + & " ' are unaffected by this setting. + + Defaults to ``True``. + + :arg strip_whitespace: Whether to remove semantically meaningless + whitespace. (This compresses all whitespace to a single space + except within ``pre``.) + + Defaults to ``False``. + + :arg minimize_boolean_attributes: Shortens boolean attributes to give + just the attribute value, for example:: + + <input disabled="disabled"> + + becomes:: + + <input disabled> + + Defaults to ``True``. + + :arg use_trailing_solidus: Includes a close-tag slash at the end of the + start tag of void elements (empty elements whose end tag is + forbidden). E.g. ``<hr/>``. + + Defaults to ``False``. + + :arg space_before_trailing_solidus: Places a space immediately before + the closing slash in a tag using a trailing solidus. E.g. + ``<hr />``. Requires ``use_trailing_solidus=True``. + + Defaults to ``True``. + + :arg sanitize: Strip all unsafe or unknown constructs from output. + See :py:class:`html5lib.filters.sanitizer.Filter`. + + Defaults to ``False``. + + :arg omit_optional_tags: Omit start/end tags that are optional. + + Defaults to ``True``. + + :arg alphabetical_attributes: Reorder attributes to be in alphabetical order. + + Defaults to ``False``. + + """ + unexpected_args = frozenset(kwargs) - frozenset(self.options) + if len(unexpected_args) > 0: + raise TypeError("__init__() got an unexpected keyword argument '%s'" % next(iter(unexpected_args))) + if 'quote_char' in kwargs: + self.use_best_quote_char = False + for attr in self.options: + setattr(self, attr, kwargs.get(attr, getattr(self, attr))) + self.errors = [] + self.strict = False + + def encode(self, string): + assert(isinstance(string, text_type)) + if self.encoding: + return string.encode(self.encoding, "htmlentityreplace") + else: + return string + + def encodeStrict(self, string): + assert(isinstance(string, text_type)) + if self.encoding: + return string.encode(self.encoding, "strict") + else: + return string + + def serialize(self, treewalker, encoding=None): + # pylint:disable=too-many-nested-blocks + self.encoding = encoding + in_cdata = False + self.errors = [] + + if encoding and self.inject_meta_charset: + from .filters.inject_meta_charset import Filter + treewalker = Filter(treewalker, encoding) + # Alphabetical attributes is here under the assumption that none of + # the later filters add or change order of attributes; it needs to be + # before the sanitizer so escaped elements come out correctly + if self.alphabetical_attributes: + from .filters.alphabeticalattributes import Filter + treewalker = Filter(treewalker) + # WhitespaceFilter should be used before OptionalTagFilter + # for maximum efficiently of this latter filter + if self.strip_whitespace: + from .filters.whitespace import Filter + treewalker = Filter(treewalker) + if self.sanitize: + from .filters.sanitizer import Filter + treewalker = Filter(treewalker) + if self.omit_optional_tags: + from .filters.optionaltags import Filter + treewalker = Filter(treewalker) + + for token in treewalker: + type = token["type"] + if type == "Doctype": + doctype = "<!DOCTYPE %s" % token["name"] + + if token["publicId"]: + doctype += ' PUBLIC "%s"' % token["publicId"] + elif token["systemId"]: + doctype += " SYSTEM" + if token["systemId"]: + if token["systemId"].find('"') >= 0: + if token["systemId"].find("'") >= 0: + self.serializeError("System identifer contains both single and double quote characters") + quote_char = "'" + else: + quote_char = '"' + doctype += " %s%s%s" % (quote_char, token["systemId"], quote_char) + + doctype += ">" + yield self.encodeStrict(doctype) + + elif type in ("Characters", "SpaceCharacters"): + if type == "SpaceCharacters" or in_cdata: + if in_cdata and token["data"].find("</") >= 0: + self.serializeError("Unexpected </ in CDATA") + yield self.encode(token["data"]) + else: + yield self.encode(escape(token["data"])) + + elif type in ("StartTag", "EmptyTag"): + name = token["name"] + yield self.encodeStrict("<%s" % name) + if name in rcdataElements and not self.escape_rcdata: + in_cdata = True + elif in_cdata: + self.serializeError("Unexpected child element of a CDATA element") + for (_, attr_name), attr_value in token["data"].items(): + # TODO: Add namespace support here + k = attr_name + v = attr_value + yield self.encodeStrict(' ') + + yield self.encodeStrict(k) + if not self.minimize_boolean_attributes or \ + (k not in booleanAttributes.get(name, tuple()) and + k not in booleanAttributes.get("", tuple())): + yield self.encodeStrict("=") + if self.quote_attr_values == "always" or len(v) == 0: + quote_attr = True + elif self.quote_attr_values == "spec": + quote_attr = _quoteAttributeSpec.search(v) is not None + elif self.quote_attr_values == "legacy": + quote_attr = _quoteAttributeLegacy.search(v) is not None + else: + raise ValueError("quote_attr_values must be one of: " + "'always', 'spec', or 'legacy'") + v = v.replace("&", "&") + if self.escape_lt_in_attrs: + v = v.replace("<", "<") + if quote_attr: + quote_char = self.quote_char + if self.use_best_quote_char: + if "'" in v and '"' not in v: + quote_char = '"' + elif '"' in v and "'" not in v: + quote_char = "'" + if quote_char == "'": + v = v.replace("'", "'") + else: + v = v.replace('"', """) + yield self.encodeStrict(quote_char) + yield self.encode(v) + yield self.encodeStrict(quote_char) + else: + yield self.encode(v) + if name in voidElements and self.use_trailing_solidus: + if self.space_before_trailing_solidus: + yield self.encodeStrict(" /") + else: + yield self.encodeStrict("/") + yield self.encode(">") + + elif type == "EndTag": + name = token["name"] + if name in rcdataElements: + in_cdata = False + elif in_cdata: + self.serializeError("Unexpected child element of a CDATA element") + yield self.encodeStrict("</%s>" % name) + + elif type == "Comment": + data = token["data"] + if data.find("--") >= 0: + self.serializeError("Comment contains --") + yield self.encodeStrict("<!--%s-->" % token["data"]) + + elif type == "Entity": + name = token["name"] + key = name + ";" + if key not in entities: + self.serializeError("Entity %s not recognized" % name) + if self.resolve_entities and key not in xmlEntities: + data = entities[key] + else: + data = "&%s;" % name + yield self.encodeStrict(data) + + else: + self.serializeError(token["data"]) + + def render(self, treewalker, encoding=None): + """Serializes the stream from the treewalker into a string + + :arg treewalker: the treewalker to serialize + + :arg encoding: the string encoding to use + + :returns: the serialized tree + + Example: + + >>> from html5lib import parse, getTreeWalker + >>> from html5lib.serializer import HTMLSerializer + >>> token_stream = parse('<html><body>Hi!</body></html>') + >>> walker = getTreeWalker('etree') + >>> serializer = HTMLSerializer(omit_optional_tags=False) + >>> serializer.render(walker(token_stream)) + '<html><head></head><body>Hi!</body></html>' + + """ + if encoding: + return b"".join(list(self.serialize(treewalker, encoding))) + else: + return "".join(list(self.serialize(treewalker))) + + def serializeError(self, data="XXX ERROR MESSAGE NEEDED"): + # XXX The idea is to make data mandatory. + self.errors.append(data) + if self.strict: + raise SerializeError + + +class SerializeError(Exception): + """Error in serialized tree""" + pass diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py new file mode 100644 index 0000000..61d5fb6 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py @@ -0,0 +1,54 @@ +from __future__ import absolute_import, division, unicode_literals + +from genshi.core import QName, Attrs +from genshi.core import START, END, TEXT, COMMENT, DOCTYPE + + +def to_genshi(walker): + """Convert a tree to a genshi tree + + :arg walker: the treewalker to use to walk the tree to convert it + + :returns: generator of genshi nodes + + """ + text = [] + for token in walker: + type = token["type"] + if type in ("Characters", "SpaceCharacters"): + text.append(token["data"]) + elif text: + yield TEXT, "".join(text), (None, -1, -1) + text = [] + + if type in ("StartTag", "EmptyTag"): + if token["namespace"]: + name = "{%s}%s" % (token["namespace"], token["name"]) + else: + name = token["name"] + attrs = Attrs([(QName("{%s}%s" % attr if attr[0] is not None else attr[1]), value) + for attr, value in token["data"].items()]) + yield (START, (QName(name), attrs), (None, -1, -1)) + if type == "EmptyTag": + type = "EndTag" + + if type == "EndTag": + if token["namespace"]: + name = "{%s}%s" % (token["namespace"], token["name"]) + else: + name = token["name"] + + yield END, QName(name), (None, -1, -1) + + elif type == "Comment": + yield COMMENT, token["data"], (None, -1, -1) + + elif type == "Doctype": + yield DOCTYPE, (token["name"], token["publicId"], + token["systemId"]), (None, -1, -1) + + else: + pass # FIXME: What to do? + + if text: + yield TEXT, "".join(text), (None, -1, -1) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treeadapters/sax.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treeadapters/sax.py new file mode 100644 index 0000000..f4ccea5 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treeadapters/sax.py @@ -0,0 +1,50 @@ +from __future__ import absolute_import, division, unicode_literals + +from xml.sax.xmlreader import AttributesNSImpl + +from ..constants import adjustForeignAttributes, unadjustForeignAttributes + +prefix_mapping = {} +for prefix, localName, namespace in adjustForeignAttributes.values(): + if prefix is not None: + prefix_mapping[prefix] = namespace + + +def to_sax(walker, handler): + """Call SAX-like content handler based on treewalker walker + + :arg walker: the treewalker to use to walk the tree to convert it + + :arg handler: SAX handler to use + + """ + handler.startDocument() + for prefix, namespace in prefix_mapping.items(): + handler.startPrefixMapping(prefix, namespace) + + for token in walker: + type = token["type"] + if type == "Doctype": + continue + elif type in ("StartTag", "EmptyTag"): + attrs = AttributesNSImpl(token["data"], + unadjustForeignAttributes) + handler.startElementNS((token["namespace"], token["name"]), + token["name"], + attrs) + if type == "EmptyTag": + handler.endElementNS((token["namespace"], token["name"]), + token["name"]) + elif type == "EndTag": + handler.endElementNS((token["namespace"], token["name"]), + token["name"]) + elif type in ("Characters", "SpaceCharacters"): + handler.characters(token["data"]) + elif type == "Comment": + pass + else: + assert False, "Unknown token type" + + for prefix, namespace in prefix_mapping.items(): + handler.endPrefixMapping(prefix) + handler.endDocument() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/base.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/base.py new file mode 100644 index 0000000..73973db --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/base.py @@ -0,0 +1,417 @@ +from __future__ import absolute_import, division, unicode_literals +from pip._vendor.six import text_type + +from ..constants import scopingElements, tableInsertModeElements, namespaces + +# The scope markers are inserted when entering object elements, +# marquees, table cells, and table captions, and are used to prevent formatting +# from "leaking" into tables, object elements, and marquees. +Marker = None + +listElementsMap = { + None: (frozenset(scopingElements), False), + "button": (frozenset(scopingElements | set([(namespaces["html"], "button")])), False), + "list": (frozenset(scopingElements | set([(namespaces["html"], "ol"), + (namespaces["html"], "ul")])), False), + "table": (frozenset([(namespaces["html"], "html"), + (namespaces["html"], "table")]), False), + "select": (frozenset([(namespaces["html"], "optgroup"), + (namespaces["html"], "option")]), True) +} + + +class Node(object): + """Represents an item in the tree""" + def __init__(self, name): + """Creates a Node + + :arg name: The tag name associated with the node + + """ + # The tag name assocaited with the node + self.name = name + # The parent of the current node (or None for the document node) + self.parent = None + # The value of the current node (applies to text nodes and comments) + self.value = None + # A dict holding name -> value pairs for attributes of the node + self.attributes = {} + # A list of child nodes of the current node. This must include all + # elements but not necessarily other node types. + self.childNodes = [] + # A list of miscellaneous flags that can be set on the node. + self._flags = [] + + def __str__(self): + attributesStr = " ".join(["%s=\"%s\"" % (name, value) + for name, value in + self.attributes.items()]) + if attributesStr: + return "<%s %s>" % (self.name, attributesStr) + else: + return "<%s>" % (self.name) + + def __repr__(self): + return "<%s>" % (self.name) + + def appendChild(self, node): + """Insert node as a child of the current node + + :arg node: the node to insert + + """ + raise NotImplementedError + + def insertText(self, data, insertBefore=None): + """Insert data as text in the current node, positioned before the + start of node insertBefore or to the end of the node's text. + + :arg data: the data to insert + + :arg insertBefore: True if you want to insert the text before the node + and False if you want to insert it after the node + + """ + raise NotImplementedError + + def insertBefore(self, node, refNode): + """Insert node as a child of the current node, before refNode in the + list of child nodes. Raises ValueError if refNode is not a child of + the current node + + :arg node: the node to insert + + :arg refNode: the child node to insert the node before + + """ + raise NotImplementedError + + def removeChild(self, node): + """Remove node from the children of the current node + + :arg node: the child node to remove + + """ + raise NotImplementedError + + def reparentChildren(self, newParent): + """Move all the children of the current node to newParent. + This is needed so that trees that don't store text as nodes move the + text in the correct way + + :arg newParent: the node to move all this node's children to + + """ + # XXX - should this method be made more general? + for child in self.childNodes: + newParent.appendChild(child) + self.childNodes = [] + + def cloneNode(self): + """Return a shallow copy of the current node i.e. a node with the same + name and attributes but with no parent or child nodes + """ + raise NotImplementedError + + def hasContent(self): + """Return true if the node has children or text, false otherwise + """ + raise NotImplementedError + + +class ActiveFormattingElements(list): + def append(self, node): + equalCount = 0 + if node != Marker: + for element in self[::-1]: + if element == Marker: + break + if self.nodesEqual(element, node): + equalCount += 1 + if equalCount == 3: + self.remove(element) + break + list.append(self, node) + + def nodesEqual(self, node1, node2): + if not node1.nameTuple == node2.nameTuple: + return False + + if not node1.attributes == node2.attributes: + return False + + return True + + +class TreeBuilder(object): + """Base treebuilder implementation + + * documentClass - the class to use for the bottommost node of a document + * elementClass - the class to use for HTML Elements + * commentClass - the class to use for comments + * doctypeClass - the class to use for doctypes + + """ + # pylint:disable=not-callable + + # Document class + documentClass = None + + # The class to use for creating a node + elementClass = None + + # The class to use for creating comments + commentClass = None + + # The class to use for creating doctypes + doctypeClass = None + + # Fragment class + fragmentClass = None + + def __init__(self, namespaceHTMLElements): + """Create a TreeBuilder + + :arg namespaceHTMLElements: whether or not to namespace HTML elements + + """ + if namespaceHTMLElements: + self.defaultNamespace = "http://www.w3.org/1999/xhtml" + else: + self.defaultNamespace = None + self.reset() + + def reset(self): + self.openElements = [] + self.activeFormattingElements = ActiveFormattingElements() + + # XXX - rename these to headElement, formElement + self.headPointer = None + self.formPointer = None + + self.insertFromTable = False + + self.document = self.documentClass() + + def elementInScope(self, target, variant=None): + + # If we pass a node in we match that. if we pass a string + # match any node with that name + exactNode = hasattr(target, "nameTuple") + if not exactNode: + if isinstance(target, text_type): + target = (namespaces["html"], target) + assert isinstance(target, tuple) + + listElements, invert = listElementsMap[variant] + + for node in reversed(self.openElements): + if exactNode and node == target: + return True + elif not exactNode and node.nameTuple == target: + return True + elif (invert ^ (node.nameTuple in listElements)): + return False + + assert False # We should never reach this point + + def reconstructActiveFormattingElements(self): + # Within this algorithm the order of steps described in the + # specification is not quite the same as the order of steps in the + # code. It should still do the same though. + + # Step 1: stop the algorithm when there's nothing to do. + if not self.activeFormattingElements: + return + + # Step 2 and step 3: we start with the last element. So i is -1. + i = len(self.activeFormattingElements) - 1 + entry = self.activeFormattingElements[i] + if entry == Marker or entry in self.openElements: + return + + # Step 6 + while entry != Marker and entry not in self.openElements: + if i == 0: + # This will be reset to 0 below + i = -1 + break + i -= 1 + # Step 5: let entry be one earlier in the list. + entry = self.activeFormattingElements[i] + + while True: + # Step 7 + i += 1 + + # Step 8 + entry = self.activeFormattingElements[i] + clone = entry.cloneNode() # Mainly to get a new copy of the attributes + + # Step 9 + element = self.insertElement({"type": "StartTag", + "name": clone.name, + "namespace": clone.namespace, + "data": clone.attributes}) + + # Step 10 + self.activeFormattingElements[i] = element + + # Step 11 + if element == self.activeFormattingElements[-1]: + break + + def clearActiveFormattingElements(self): + entry = self.activeFormattingElements.pop() + while self.activeFormattingElements and entry != Marker: + entry = self.activeFormattingElements.pop() + + def elementInActiveFormattingElements(self, name): + """Check if an element exists between the end of the active + formatting elements and the last marker. If it does, return it, else + return false""" + + for item in self.activeFormattingElements[::-1]: + # Check for Marker first because if it's a Marker it doesn't have a + # name attribute. + if item == Marker: + break + elif item.name == name: + return item + return False + + def insertRoot(self, token): + element = self.createElement(token) + self.openElements.append(element) + self.document.appendChild(element) + + def insertDoctype(self, token): + name = token["name"] + publicId = token["publicId"] + systemId = token["systemId"] + + doctype = self.doctypeClass(name, publicId, systemId) + self.document.appendChild(doctype) + + def insertComment(self, token, parent=None): + if parent is None: + parent = self.openElements[-1] + parent.appendChild(self.commentClass(token["data"])) + + def createElement(self, token): + """Create an element but don't insert it anywhere""" + name = token["name"] + namespace = token.get("namespace", self.defaultNamespace) + element = self.elementClass(name, namespace) + element.attributes = token["data"] + return element + + def _getInsertFromTable(self): + return self._insertFromTable + + def _setInsertFromTable(self, value): + """Switch the function used to insert an element from the + normal one to the misnested table one and back again""" + self._insertFromTable = value + if value: + self.insertElement = self.insertElementTable + else: + self.insertElement = self.insertElementNormal + + insertFromTable = property(_getInsertFromTable, _setInsertFromTable) + + def insertElementNormal(self, token): + name = token["name"] + assert isinstance(name, text_type), "Element %s not unicode" % name + namespace = token.get("namespace", self.defaultNamespace) + element = self.elementClass(name, namespace) + element.attributes = token["data"] + self.openElements[-1].appendChild(element) + self.openElements.append(element) + return element + + def insertElementTable(self, token): + """Create an element and insert it into the tree""" + element = self.createElement(token) + if self.openElements[-1].name not in tableInsertModeElements: + return self.insertElementNormal(token) + else: + # We should be in the InTable mode. This means we want to do + # special magic element rearranging + parent, insertBefore = self.getTableMisnestedNodePosition() + if insertBefore is None: + parent.appendChild(element) + else: + parent.insertBefore(element, insertBefore) + self.openElements.append(element) + return element + + def insertText(self, data, parent=None): + """Insert text data.""" + if parent is None: + parent = self.openElements[-1] + + if (not self.insertFromTable or (self.insertFromTable and + self.openElements[-1].name + not in tableInsertModeElements)): + parent.insertText(data) + else: + # We should be in the InTable mode. This means we want to do + # special magic element rearranging + parent, insertBefore = self.getTableMisnestedNodePosition() + parent.insertText(data, insertBefore) + + def getTableMisnestedNodePosition(self): + """Get the foster parent element, and sibling to insert before + (or None) when inserting a misnested table node""" + # The foster parent element is the one which comes before the most + # recently opened table element + # XXX - this is really inelegant + lastTable = None + fosterParent = None + insertBefore = None + for elm in self.openElements[::-1]: + if elm.name == "table": + lastTable = elm + break + if lastTable: + # XXX - we should really check that this parent is actually a + # node here + if lastTable.parent: + fosterParent = lastTable.parent + insertBefore = lastTable + else: + fosterParent = self.openElements[ + self.openElements.index(lastTable) - 1] + else: + fosterParent = self.openElements[0] + return fosterParent, insertBefore + + def generateImpliedEndTags(self, exclude=None): + name = self.openElements[-1].name + # XXX td, th and tr are not actually needed + if (name in frozenset(("dd", "dt", "li", "option", "optgroup", "p", "rp", "rt")) and + name != exclude): + self.openElements.pop() + # XXX This is not entirely what the specification says. We should + # investigate it more closely. + self.generateImpliedEndTags(exclude) + + def getDocument(self): + """Return the final tree""" + return self.document + + def getFragment(self): + """Return the final fragment""" + # assert self.innerHTML + fragment = self.fragmentClass() + self.openElements[0].reparentChildren(fragment) + return fragment + + def testSerializer(self, node): + """Serialize the subtree of node in the format required by unit tests + + :arg node: the node from which to start serializing + + """ + raise NotImplementedError diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/dom.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/dom.py new file mode 100644 index 0000000..dcfac22 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/dom.py @@ -0,0 +1,236 @@ +from __future__ import absolute_import, division, unicode_literals + + +from collections import MutableMapping +from xml.dom import minidom, Node +import weakref + +from . import base +from .. import constants +from ..constants import namespaces +from .._utils import moduleFactoryFactory + + +def getDomBuilder(DomImplementation): + Dom = DomImplementation + + class AttrList(MutableMapping): + def __init__(self, element): + self.element = element + + def __iter__(self): + return iter(self.element.attributes.keys()) + + def __setitem__(self, name, value): + if isinstance(name, tuple): + raise NotImplementedError + else: + attr = self.element.ownerDocument.createAttribute(name) + attr.value = value + self.element.attributes[name] = attr + + def __len__(self): + return len(self.element.attributes) + + def items(self): + return list(self.element.attributes.items()) + + def values(self): + return list(self.element.attributes.values()) + + def __getitem__(self, name): + if isinstance(name, tuple): + raise NotImplementedError + else: + return self.element.attributes[name].value + + def __delitem__(self, name): + if isinstance(name, tuple): + raise NotImplementedError + else: + del self.element.attributes[name] + + class NodeBuilder(base.Node): + def __init__(self, element): + base.Node.__init__(self, element.nodeName) + self.element = element + + namespace = property(lambda self: hasattr(self.element, "namespaceURI") and + self.element.namespaceURI or None) + + def appendChild(self, node): + node.parent = self + self.element.appendChild(node.element) + + def insertText(self, data, insertBefore=None): + text = self.element.ownerDocument.createTextNode(data) + if insertBefore: + self.element.insertBefore(text, insertBefore.element) + else: + self.element.appendChild(text) + + def insertBefore(self, node, refNode): + self.element.insertBefore(node.element, refNode.element) + node.parent = self + + def removeChild(self, node): + if node.element.parentNode == self.element: + self.element.removeChild(node.element) + node.parent = None + + def reparentChildren(self, newParent): + while self.element.hasChildNodes(): + child = self.element.firstChild + self.element.removeChild(child) + newParent.element.appendChild(child) + self.childNodes = [] + + def getAttributes(self): + return AttrList(self.element) + + def setAttributes(self, attributes): + if attributes: + for name, value in list(attributes.items()): + if isinstance(name, tuple): + if name[0] is not None: + qualifiedName = (name[0] + ":" + name[1]) + else: + qualifiedName = name[1] + self.element.setAttributeNS(name[2], qualifiedName, + value) + else: + self.element.setAttribute( + name, value) + attributes = property(getAttributes, setAttributes) + + def cloneNode(self): + return NodeBuilder(self.element.cloneNode(False)) + + def hasContent(self): + return self.element.hasChildNodes() + + def getNameTuple(self): + if self.namespace is None: + return namespaces["html"], self.name + else: + return self.namespace, self.name + + nameTuple = property(getNameTuple) + + class TreeBuilder(base.TreeBuilder): # pylint:disable=unused-variable + def documentClass(self): + self.dom = Dom.getDOMImplementation().createDocument(None, None, None) + return weakref.proxy(self) + + def insertDoctype(self, token): + name = token["name"] + publicId = token["publicId"] + systemId = token["systemId"] + + domimpl = Dom.getDOMImplementation() + doctype = domimpl.createDocumentType(name, publicId, systemId) + self.document.appendChild(NodeBuilder(doctype)) + if Dom == minidom: + doctype.ownerDocument = self.dom + + def elementClass(self, name, namespace=None): + if namespace is None and self.defaultNamespace is None: + node = self.dom.createElement(name) + else: + node = self.dom.createElementNS(namespace, name) + + return NodeBuilder(node) + + def commentClass(self, data): + return NodeBuilder(self.dom.createComment(data)) + + def fragmentClass(self): + return NodeBuilder(self.dom.createDocumentFragment()) + + def appendChild(self, node): + self.dom.appendChild(node.element) + + def testSerializer(self, element): + return testSerializer(element) + + def getDocument(self): + return self.dom + + def getFragment(self): + return base.TreeBuilder.getFragment(self).element + + def insertText(self, data, parent=None): + data = data + if parent != self: + base.TreeBuilder.insertText(self, data, parent) + else: + # HACK: allow text nodes as children of the document node + if hasattr(self.dom, '_child_node_types'): + # pylint:disable=protected-access + if Node.TEXT_NODE not in self.dom._child_node_types: + self.dom._child_node_types = list(self.dom._child_node_types) + self.dom._child_node_types.append(Node.TEXT_NODE) + self.dom.appendChild(self.dom.createTextNode(data)) + + implementation = DomImplementation + name = None + + def testSerializer(element): + element.normalize() + rv = [] + + def serializeElement(element, indent=0): + if element.nodeType == Node.DOCUMENT_TYPE_NODE: + if element.name: + if element.publicId or element.systemId: + publicId = element.publicId or "" + systemId = element.systemId or "" + rv.append("""|%s<!DOCTYPE %s "%s" "%s">""" % + (' ' * indent, element.name, publicId, systemId)) + else: + rv.append("|%s<!DOCTYPE %s>" % (' ' * indent, element.name)) + else: + rv.append("|%s<!DOCTYPE >" % (' ' * indent,)) + elif element.nodeType == Node.DOCUMENT_NODE: + rv.append("#document") + elif element.nodeType == Node.DOCUMENT_FRAGMENT_NODE: + rv.append("#document-fragment") + elif element.nodeType == Node.COMMENT_NODE: + rv.append("|%s<!-- %s -->" % (' ' * indent, element.nodeValue)) + elif element.nodeType == Node.TEXT_NODE: + rv.append("|%s\"%s\"" % (' ' * indent, element.nodeValue)) + else: + if (hasattr(element, "namespaceURI") and + element.namespaceURI is not None): + name = "%s %s" % (constants.prefixes[element.namespaceURI], + element.nodeName) + else: + name = element.nodeName + rv.append("|%s<%s>" % (' ' * indent, name)) + if element.hasAttributes(): + attributes = [] + for i in range(len(element.attributes)): + attr = element.attributes.item(i) + name = attr.nodeName + value = attr.value + ns = attr.namespaceURI + if ns: + name = "%s %s" % (constants.prefixes[ns], attr.localName) + else: + name = attr.nodeName + attributes.append((name, value)) + + for name, value in sorted(attributes): + rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value)) + indent += 2 + for child in element.childNodes: + serializeElement(child, indent) + serializeElement(element, 0) + + return "\n".join(rv) + + return locals() + + +# The actual means to get a module! +getDomModule = moduleFactoryFactory(getDomBuilder) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/etree.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/etree.py new file mode 100644 index 0000000..0dedf44 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/etree.py @@ -0,0 +1,340 @@ +from __future__ import absolute_import, division, unicode_literals +# pylint:disable=protected-access + +from pip._vendor.six import text_type + +import re + +from . import base +from .. import _ihatexml +from .. import constants +from ..constants import namespaces +from .._utils import moduleFactoryFactory + +tag_regexp = re.compile("{([^}]*)}(.*)") + + +def getETreeBuilder(ElementTreeImplementation, fullTree=False): + ElementTree = ElementTreeImplementation + ElementTreeCommentType = ElementTree.Comment("asd").tag + + class Element(base.Node): + def __init__(self, name, namespace=None): + self._name = name + self._namespace = namespace + self._element = ElementTree.Element(self._getETreeTag(name, + namespace)) + if namespace is None: + self.nameTuple = namespaces["html"], self._name + else: + self.nameTuple = self._namespace, self._name + self.parent = None + self._childNodes = [] + self._flags = [] + + def _getETreeTag(self, name, namespace): + if namespace is None: + etree_tag = name + else: + etree_tag = "{%s}%s" % (namespace, name) + return etree_tag + + def _setName(self, name): + self._name = name + self._element.tag = self._getETreeTag(self._name, self._namespace) + + def _getName(self): + return self._name + + name = property(_getName, _setName) + + def _setNamespace(self, namespace): + self._namespace = namespace + self._element.tag = self._getETreeTag(self._name, self._namespace) + + def _getNamespace(self): + return self._namespace + + namespace = property(_getNamespace, _setNamespace) + + def _getAttributes(self): + return self._element.attrib + + def _setAttributes(self, attributes): + # Delete existing attributes first + # XXX - there may be a better way to do this... + for key in list(self._element.attrib.keys()): + del self._element.attrib[key] + for key, value in attributes.items(): + if isinstance(key, tuple): + name = "{%s}%s" % (key[2], key[1]) + else: + name = key + self._element.set(name, value) + + attributes = property(_getAttributes, _setAttributes) + + def _getChildNodes(self): + return self._childNodes + + def _setChildNodes(self, value): + del self._element[:] + self._childNodes = [] + for element in value: + self.insertChild(element) + + childNodes = property(_getChildNodes, _setChildNodes) + + def hasContent(self): + """Return true if the node has children or text""" + return bool(self._element.text or len(self._element)) + + def appendChild(self, node): + self._childNodes.append(node) + self._element.append(node._element) + node.parent = self + + def insertBefore(self, node, refNode): + index = list(self._element).index(refNode._element) + self._element.insert(index, node._element) + node.parent = self + + def removeChild(self, node): + self._childNodes.remove(node) + self._element.remove(node._element) + node.parent = None + + def insertText(self, data, insertBefore=None): + if not(len(self._element)): + if not self._element.text: + self._element.text = "" + self._element.text += data + elif insertBefore is None: + # Insert the text as the tail of the last child element + if not self._element[-1].tail: + self._element[-1].tail = "" + self._element[-1].tail += data + else: + # Insert the text before the specified node + children = list(self._element) + index = children.index(insertBefore._element) + if index > 0: + if not self._element[index - 1].tail: + self._element[index - 1].tail = "" + self._element[index - 1].tail += data + else: + if not self._element.text: + self._element.text = "" + self._element.text += data + + def cloneNode(self): + element = type(self)(self.name, self.namespace) + for name, value in self.attributes.items(): + element.attributes[name] = value + return element + + def reparentChildren(self, newParent): + if newParent.childNodes: + newParent.childNodes[-1]._element.tail += self._element.text + else: + if not newParent._element.text: + newParent._element.text = "" + if self._element.text is not None: + newParent._element.text += self._element.text + self._element.text = "" + base.Node.reparentChildren(self, newParent) + + class Comment(Element): + def __init__(self, data): + # Use the superclass constructor to set all properties on the + # wrapper element + self._element = ElementTree.Comment(data) + self.parent = None + self._childNodes = [] + self._flags = [] + + def _getData(self): + return self._element.text + + def _setData(self, value): + self._element.text = value + + data = property(_getData, _setData) + + class DocumentType(Element): + def __init__(self, name, publicId, systemId): + Element.__init__(self, "<!DOCTYPE>") + self._element.text = name + self.publicId = publicId + self.systemId = systemId + + def _getPublicId(self): + return self._element.get("publicId", "") + + def _setPublicId(self, value): + if value is not None: + self._element.set("publicId", value) + + publicId = property(_getPublicId, _setPublicId) + + def _getSystemId(self): + return self._element.get("systemId", "") + + def _setSystemId(self, value): + if value is not None: + self._element.set("systemId", value) + + systemId = property(_getSystemId, _setSystemId) + + class Document(Element): + def __init__(self): + Element.__init__(self, "DOCUMENT_ROOT") + + class DocumentFragment(Element): + def __init__(self): + Element.__init__(self, "DOCUMENT_FRAGMENT") + + def testSerializer(element): + rv = [] + + def serializeElement(element, indent=0): + if not(hasattr(element, "tag")): + element = element.getroot() + if element.tag == "<!DOCTYPE>": + if element.get("publicId") or element.get("systemId"): + publicId = element.get("publicId") or "" + systemId = element.get("systemId") or "" + rv.append("""<!DOCTYPE %s "%s" "%s">""" % + (element.text, publicId, systemId)) + else: + rv.append("<!DOCTYPE %s>" % (element.text,)) + elif element.tag == "DOCUMENT_ROOT": + rv.append("#document") + if element.text is not None: + rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text)) + if element.tail is not None: + raise TypeError("Document node cannot have tail") + if hasattr(element, "attrib") and len(element.attrib): + raise TypeError("Document node cannot have attributes") + elif element.tag == ElementTreeCommentType: + rv.append("|%s<!-- %s -->" % (' ' * indent, element.text)) + else: + assert isinstance(element.tag, text_type), \ + "Expected unicode, got %s, %s" % (type(element.tag), element.tag) + nsmatch = tag_regexp.match(element.tag) + + if nsmatch is None: + name = element.tag + else: + ns, name = nsmatch.groups() + prefix = constants.prefixes[ns] + name = "%s %s" % (prefix, name) + rv.append("|%s<%s>" % (' ' * indent, name)) + + if hasattr(element, "attrib"): + attributes = [] + for name, value in element.attrib.items(): + nsmatch = tag_regexp.match(name) + if nsmatch is not None: + ns, name = nsmatch.groups() + prefix = constants.prefixes[ns] + attr_string = "%s %s" % (prefix, name) + else: + attr_string = name + attributes.append((attr_string, value)) + + for name, value in sorted(attributes): + rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value)) + if element.text: + rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text)) + indent += 2 + for child in element: + serializeElement(child, indent) + if element.tail: + rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail)) + serializeElement(element, 0) + + return "\n".join(rv) + + def tostring(element): # pylint:disable=unused-variable + """Serialize an element and its child nodes to a string""" + rv = [] + filter = _ihatexml.InfosetFilter() + + def serializeElement(element): + if isinstance(element, ElementTree.ElementTree): + element = element.getroot() + + if element.tag == "<!DOCTYPE>": + if element.get("publicId") or element.get("systemId"): + publicId = element.get("publicId") or "" + systemId = element.get("systemId") or "" + rv.append("""<!DOCTYPE %s PUBLIC "%s" "%s">""" % + (element.text, publicId, systemId)) + else: + rv.append("<!DOCTYPE %s>" % (element.text,)) + elif element.tag == "DOCUMENT_ROOT": + if element.text is not None: + rv.append(element.text) + if element.tail is not None: + raise TypeError("Document node cannot have tail") + if hasattr(element, "attrib") and len(element.attrib): + raise TypeError("Document node cannot have attributes") + + for child in element: + serializeElement(child) + + elif element.tag == ElementTreeCommentType: + rv.append("<!--%s-->" % (element.text,)) + else: + # This is assumed to be an ordinary element + if not element.attrib: + rv.append("<%s>" % (filter.fromXmlName(element.tag),)) + else: + attr = " ".join(["%s=\"%s\"" % ( + filter.fromXmlName(name), value) + for name, value in element.attrib.items()]) + rv.append("<%s %s>" % (element.tag, attr)) + if element.text: + rv.append(element.text) + + for child in element: + serializeElement(child) + + rv.append("</%s>" % (element.tag,)) + + if element.tail: + rv.append(element.tail) + + serializeElement(element) + + return "".join(rv) + + class TreeBuilder(base.TreeBuilder): # pylint:disable=unused-variable + documentClass = Document + doctypeClass = DocumentType + elementClass = Element + commentClass = Comment + fragmentClass = DocumentFragment + implementation = ElementTreeImplementation + + def testSerializer(self, element): + return testSerializer(element) + + def getDocument(self): + if fullTree: + return self.document._element + else: + if self.defaultNamespace is not None: + return self.document._element.find( + "{%s}html" % self.defaultNamespace) + else: + return self.document._element.find("html") + + def getFragment(self): + return base.TreeBuilder.getFragment(self)._element + + return locals() + + +getETreeModule = moduleFactoryFactory(getETreeBuilder) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py new file mode 100644 index 0000000..ca12a99 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py @@ -0,0 +1,366 @@ +"""Module for supporting the lxml.etree library. The idea here is to use as much +of the native library as possible, without using fragile hacks like custom element +names that break between releases. The downside of this is that we cannot represent +all possible trees; specifically the following are known to cause problems: + +Text or comments as siblings of the root element +Docypes with no name + +When any of these things occur, we emit a DataLossWarning +""" + +from __future__ import absolute_import, division, unicode_literals +# pylint:disable=protected-access + +import warnings +import re +import sys + +from . import base +from ..constants import DataLossWarning +from .. import constants +from . import etree as etree_builders +from .. import _ihatexml + +import lxml.etree as etree + + +fullTree = True +tag_regexp = re.compile("{([^}]*)}(.*)") + +comment_type = etree.Comment("asd").tag + + +class DocumentType(object): + def __init__(self, name, publicId, systemId): + self.name = name + self.publicId = publicId + self.systemId = systemId + + +class Document(object): + def __init__(self): + self._elementTree = None + self._childNodes = [] + + def appendChild(self, element): + self._elementTree.getroot().addnext(element._element) + + def _getChildNodes(self): + return self._childNodes + + childNodes = property(_getChildNodes) + + +def testSerializer(element): + rv = [] + infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True) + + def serializeElement(element, indent=0): + if not hasattr(element, "tag"): + if hasattr(element, "getroot"): + # Full tree case + rv.append("#document") + if element.docinfo.internalDTD: + if not (element.docinfo.public_id or + element.docinfo.system_url): + dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_name + else: + dtd_str = """<!DOCTYPE %s "%s" "%s">""" % ( + element.docinfo.root_name, + element.docinfo.public_id, + element.docinfo.system_url) + rv.append("|%s%s" % (' ' * (indent + 2), dtd_str)) + next_element = element.getroot() + while next_element.getprevious() is not None: + next_element = next_element.getprevious() + while next_element is not None: + serializeElement(next_element, indent + 2) + next_element = next_element.getnext() + elif isinstance(element, str) or isinstance(element, bytes): + # Text in a fragment + assert isinstance(element, str) or sys.version_info[0] == 2 + rv.append("|%s\"%s\"" % (' ' * indent, element)) + else: + # Fragment case + rv.append("#document-fragment") + for next_element in element: + serializeElement(next_element, indent + 2) + elif element.tag == comment_type: + rv.append("|%s<!-- %s -->" % (' ' * indent, element.text)) + if hasattr(element, "tail") and element.tail: + rv.append("|%s\"%s\"" % (' ' * indent, element.tail)) + else: + assert isinstance(element, etree._Element) + nsmatch = etree_builders.tag_regexp.match(element.tag) + if nsmatch is not None: + ns = nsmatch.group(1) + tag = nsmatch.group(2) + prefix = constants.prefixes[ns] + rv.append("|%s<%s %s>" % (' ' * indent, prefix, + infosetFilter.fromXmlName(tag))) + else: + rv.append("|%s<%s>" % (' ' * indent, + infosetFilter.fromXmlName(element.tag))) + + if hasattr(element, "attrib"): + attributes = [] + for name, value in element.attrib.items(): + nsmatch = tag_regexp.match(name) + if nsmatch is not None: + ns, name = nsmatch.groups() + name = infosetFilter.fromXmlName(name) + prefix = constants.prefixes[ns] + attr_string = "%s %s" % (prefix, name) + else: + attr_string = infosetFilter.fromXmlName(name) + attributes.append((attr_string, value)) + + for name, value in sorted(attributes): + rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value)) + + if element.text: + rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text)) + indent += 2 + for child in element: + serializeElement(child, indent) + if hasattr(element, "tail") and element.tail: + rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail)) + serializeElement(element, 0) + + return "\n".join(rv) + + +def tostring(element): + """Serialize an element and its child nodes to a string""" + rv = [] + + def serializeElement(element): + if not hasattr(element, "tag"): + if element.docinfo.internalDTD: + if element.docinfo.doctype: + dtd_str = element.docinfo.doctype + else: + dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_name + rv.append(dtd_str) + serializeElement(element.getroot()) + + elif element.tag == comment_type: + rv.append("<!--%s-->" % (element.text,)) + + else: + # This is assumed to be an ordinary element + if not element.attrib: + rv.append("<%s>" % (element.tag,)) + else: + attr = " ".join(["%s=\"%s\"" % (name, value) + for name, value in element.attrib.items()]) + rv.append("<%s %s>" % (element.tag, attr)) + if element.text: + rv.append(element.text) + + for child in element: + serializeElement(child) + + rv.append("</%s>" % (element.tag,)) + + if hasattr(element, "tail") and element.tail: + rv.append(element.tail) + + serializeElement(element) + + return "".join(rv) + + +class TreeBuilder(base.TreeBuilder): + documentClass = Document + doctypeClass = DocumentType + elementClass = None + commentClass = None + fragmentClass = Document + implementation = etree + + def __init__(self, namespaceHTMLElements, fullTree=False): + builder = etree_builders.getETreeModule(etree, fullTree=fullTree) + infosetFilter = self.infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True) + self.namespaceHTMLElements = namespaceHTMLElements + + class Attributes(dict): + def __init__(self, element, value=None): + if value is None: + value = {} + self._element = element + dict.__init__(self, value) # pylint:disable=non-parent-init-called + for key, value in self.items(): + if isinstance(key, tuple): + name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1])) + else: + name = infosetFilter.coerceAttribute(key) + self._element._element.attrib[name] = value + + def __setitem__(self, key, value): + dict.__setitem__(self, key, value) + if isinstance(key, tuple): + name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1])) + else: + name = infosetFilter.coerceAttribute(key) + self._element._element.attrib[name] = value + + class Element(builder.Element): + def __init__(self, name, namespace): + name = infosetFilter.coerceElement(name) + builder.Element.__init__(self, name, namespace=namespace) + self._attributes = Attributes(self) + + def _setName(self, name): + self._name = infosetFilter.coerceElement(name) + self._element.tag = self._getETreeTag( + self._name, self._namespace) + + def _getName(self): + return infosetFilter.fromXmlName(self._name) + + name = property(_getName, _setName) + + def _getAttributes(self): + return self._attributes + + def _setAttributes(self, attributes): + self._attributes = Attributes(self, attributes) + + attributes = property(_getAttributes, _setAttributes) + + def insertText(self, data, insertBefore=None): + data = infosetFilter.coerceCharacters(data) + builder.Element.insertText(self, data, insertBefore) + + def appendChild(self, child): + builder.Element.appendChild(self, child) + + class Comment(builder.Comment): + def __init__(self, data): + data = infosetFilter.coerceComment(data) + builder.Comment.__init__(self, data) + + def _setData(self, data): + data = infosetFilter.coerceComment(data) + self._element.text = data + + def _getData(self): + return self._element.text + + data = property(_getData, _setData) + + self.elementClass = Element + self.commentClass = Comment + # self.fragmentClass = builder.DocumentFragment + base.TreeBuilder.__init__(self, namespaceHTMLElements) + + def reset(self): + base.TreeBuilder.reset(self) + self.insertComment = self.insertCommentInitial + self.initial_comments = [] + self.doctype = None + + def testSerializer(self, element): + return testSerializer(element) + + def getDocument(self): + if fullTree: + return self.document._elementTree + else: + return self.document._elementTree.getroot() + + def getFragment(self): + fragment = [] + element = self.openElements[0]._element + if element.text: + fragment.append(element.text) + fragment.extend(list(element)) + if element.tail: + fragment.append(element.tail) + return fragment + + def insertDoctype(self, token): + name = token["name"] + publicId = token["publicId"] + systemId = token["systemId"] + + if not name: + warnings.warn("lxml cannot represent empty doctype", DataLossWarning) + self.doctype = None + else: + coercedName = self.infosetFilter.coerceElement(name) + if coercedName != name: + warnings.warn("lxml cannot represent non-xml doctype", DataLossWarning) + + doctype = self.doctypeClass(coercedName, publicId, systemId) + self.doctype = doctype + + def insertCommentInitial(self, data, parent=None): + assert parent is None or parent is self.document + assert self.document._elementTree is None + self.initial_comments.append(data) + + def insertCommentMain(self, data, parent=None): + if (parent == self.document and + self.document._elementTree.getroot()[-1].tag == comment_type): + warnings.warn("lxml cannot represent adjacent comments beyond the root elements", DataLossWarning) + super(TreeBuilder, self).insertComment(data, parent) + + def insertRoot(self, token): + # Because of the way libxml2 works, it doesn't seem to be possible to + # alter information like the doctype after the tree has been parsed. + # Therefore we need to use the built-in parser to create our initial + # tree, after which we can add elements like normal + docStr = "" + if self.doctype: + assert self.doctype.name + docStr += "<!DOCTYPE %s" % self.doctype.name + if (self.doctype.publicId is not None or + self.doctype.systemId is not None): + docStr += (' PUBLIC "%s" ' % + (self.infosetFilter.coercePubid(self.doctype.publicId or ""))) + if self.doctype.systemId: + sysid = self.doctype.systemId + if sysid.find("'") >= 0 and sysid.find('"') >= 0: + warnings.warn("DOCTYPE system cannot contain single and double quotes", DataLossWarning) + sysid = sysid.replace("'", 'U00027') + if sysid.find("'") >= 0: + docStr += '"%s"' % sysid + else: + docStr += "'%s'" % sysid + else: + docStr += "''" + docStr += ">" + if self.doctype.name != token["name"]: + warnings.warn("lxml cannot represent doctype with a different name to the root element", DataLossWarning) + docStr += "<THIS_SHOULD_NEVER_APPEAR_PUBLICLY/>" + root = etree.fromstring(docStr) + + # Append the initial comments: + for comment_token in self.initial_comments: + comment = self.commentClass(comment_token["data"]) + root.addprevious(comment._element) + + # Create the root document and add the ElementTree to it + self.document = self.documentClass() + self.document._elementTree = root.getroottree() + + # Give the root element the right name + name = token["name"] + namespace = token.get("namespace", self.defaultNamespace) + if namespace is None: + etree_tag = name + else: + etree_tag = "{%s}%s" % (namespace, name) + root.tag = etree_tag + + # Add the root element to the internal child/open data structures + root_element = self.elementClass(name, namespace) + root_element._element = root + self.document._childNodes.append(root_element) + self.openElements.append(root_element) + + # Reset to the default insert comment function + self.insertComment = self.insertCommentMain diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/base.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/base.py new file mode 100644 index 0000000..80c474c --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/base.py @@ -0,0 +1,252 @@ +from __future__ import absolute_import, division, unicode_literals + +from xml.dom import Node +from ..constants import namespaces, voidElements, spaceCharacters + +__all__ = ["DOCUMENT", "DOCTYPE", "TEXT", "ELEMENT", "COMMENT", "ENTITY", "UNKNOWN", + "TreeWalker", "NonRecursiveTreeWalker"] + +DOCUMENT = Node.DOCUMENT_NODE +DOCTYPE = Node.DOCUMENT_TYPE_NODE +TEXT = Node.TEXT_NODE +ELEMENT = Node.ELEMENT_NODE +COMMENT = Node.COMMENT_NODE +ENTITY = Node.ENTITY_NODE +UNKNOWN = "<#UNKNOWN#>" + +spaceCharacters = "".join(spaceCharacters) + + +class TreeWalker(object): + """Walks a tree yielding tokens + + Tokens are dicts that all have a ``type`` field specifying the type of the + token. + + """ + def __init__(self, tree): + """Creates a TreeWalker + + :arg tree: the tree to walk + + """ + self.tree = tree + + def __iter__(self): + raise NotImplementedError + + def error(self, msg): + """Generates an error token with the given message + + :arg msg: the error message + + :returns: SerializeError token + + """ + return {"type": "SerializeError", "data": msg} + + def emptyTag(self, namespace, name, attrs, hasChildren=False): + """Generates an EmptyTag token + + :arg namespace: the namespace of the token--can be ``None`` + + :arg name: the name of the element + + :arg attrs: the attributes of the element as a dict + + :arg hasChildren: whether or not to yield a SerializationError because + this tag shouldn't have children + + :returns: EmptyTag token + + """ + yield {"type": "EmptyTag", "name": name, + "namespace": namespace, + "data": attrs} + if hasChildren: + yield self.error("Void element has children") + + def startTag(self, namespace, name, attrs): + """Generates a StartTag token + + :arg namespace: the namespace of the token--can be ``None`` + + :arg name: the name of the element + + :arg attrs: the attributes of the element as a dict + + :returns: StartTag token + + """ + return {"type": "StartTag", + "name": name, + "namespace": namespace, + "data": attrs} + + def endTag(self, namespace, name): + """Generates an EndTag token + + :arg namespace: the namespace of the token--can be ``None`` + + :arg name: the name of the element + + :returns: EndTag token + + """ + return {"type": "EndTag", + "name": name, + "namespace": namespace} + + def text(self, data): + """Generates SpaceCharacters and Characters tokens + + Depending on what's in the data, this generates one or more + ``SpaceCharacters`` and ``Characters`` tokens. + + For example: + + >>> from html5lib.treewalkers.base import TreeWalker + >>> # Give it an empty tree just so it instantiates + >>> walker = TreeWalker([]) + >>> list(walker.text('')) + [] + >>> list(walker.text(' ')) + [{u'data': ' ', u'type': u'SpaceCharacters'}] + >>> list(walker.text(' abc ')) # doctest: +NORMALIZE_WHITESPACE + [{u'data': ' ', u'type': u'SpaceCharacters'}, + {u'data': u'abc', u'type': u'Characters'}, + {u'data': u' ', u'type': u'SpaceCharacters'}] + + :arg data: the text data + + :returns: one or more ``SpaceCharacters`` and ``Characters`` tokens + + """ + data = data + middle = data.lstrip(spaceCharacters) + left = data[:len(data) - len(middle)] + if left: + yield {"type": "SpaceCharacters", "data": left} + data = middle + middle = data.rstrip(spaceCharacters) + right = data[len(middle):] + if middle: + yield {"type": "Characters", "data": middle} + if right: + yield {"type": "SpaceCharacters", "data": right} + + def comment(self, data): + """Generates a Comment token + + :arg data: the comment + + :returns: Comment token + + """ + return {"type": "Comment", "data": data} + + def doctype(self, name, publicId=None, systemId=None): + """Generates a Doctype token + + :arg name: + + :arg publicId: + + :arg systemId: + + :returns: the Doctype token + + """ + return {"type": "Doctype", + "name": name, + "publicId": publicId, + "systemId": systemId} + + def entity(self, name): + """Generates an Entity token + + :arg name: the entity name + + :returns: an Entity token + + """ + return {"type": "Entity", "name": name} + + def unknown(self, nodeType): + """Handles unknown node types""" + return self.error("Unknown node type: " + nodeType) + + +class NonRecursiveTreeWalker(TreeWalker): + def getNodeDetails(self, node): + raise NotImplementedError + + def getFirstChild(self, node): + raise NotImplementedError + + def getNextSibling(self, node): + raise NotImplementedError + + def getParentNode(self, node): + raise NotImplementedError + + def __iter__(self): + currentNode = self.tree + while currentNode is not None: + details = self.getNodeDetails(currentNode) + type, details = details[0], details[1:] + hasChildren = False + + if type == DOCTYPE: + yield self.doctype(*details) + + elif type == TEXT: + for token in self.text(*details): + yield token + + elif type == ELEMENT: + namespace, name, attributes, hasChildren = details + if (not namespace or namespace == namespaces["html"]) and name in voidElements: + for token in self.emptyTag(namespace, name, attributes, + hasChildren): + yield token + hasChildren = False + else: + yield self.startTag(namespace, name, attributes) + + elif type == COMMENT: + yield self.comment(details[0]) + + elif type == ENTITY: + yield self.entity(details[0]) + + elif type == DOCUMENT: + hasChildren = True + + else: + yield self.unknown(details[0]) + + if hasChildren: + firstChild = self.getFirstChild(currentNode) + else: + firstChild = None + + if firstChild is not None: + currentNode = firstChild + else: + while currentNode is not None: + details = self.getNodeDetails(currentNode) + type, details = details[0], details[1:] + if type == ELEMENT: + namespace, name, attributes, hasChildren = details + if (namespace and namespace != namespaces["html"]) or name not in voidElements: + yield self.endTag(namespace, name) + if self.tree is currentNode: + currentNode = None + break + nextSibling = self.getNextSibling(currentNode) + if nextSibling is not None: + currentNode = nextSibling + break + else: + currentNode = self.getParentNode(currentNode) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/dom.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/dom.py new file mode 100644 index 0000000..b0c89b0 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/dom.py @@ -0,0 +1,43 @@ +from __future__ import absolute_import, division, unicode_literals + +from xml.dom import Node + +from . import base + + +class TreeWalker(base.NonRecursiveTreeWalker): + def getNodeDetails(self, node): + if node.nodeType == Node.DOCUMENT_TYPE_NODE: + return base.DOCTYPE, node.name, node.publicId, node.systemId + + elif node.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE): + return base.TEXT, node.nodeValue + + elif node.nodeType == Node.ELEMENT_NODE: + attrs = {} + for attr in list(node.attributes.keys()): + attr = node.getAttributeNode(attr) + if attr.namespaceURI: + attrs[(attr.namespaceURI, attr.localName)] = attr.value + else: + attrs[(None, attr.name)] = attr.value + return (base.ELEMENT, node.namespaceURI, node.nodeName, + attrs, node.hasChildNodes()) + + elif node.nodeType == Node.COMMENT_NODE: + return base.COMMENT, node.nodeValue + + elif node.nodeType in (Node.DOCUMENT_NODE, Node.DOCUMENT_FRAGMENT_NODE): + return (base.DOCUMENT,) + + else: + return base.UNKNOWN, node.nodeType + + def getFirstChild(self, node): + return node.firstChild + + def getNextSibling(self, node): + return node.nextSibling + + def getParentNode(self, node): + return node.parentNode diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/etree.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/etree.py new file mode 100644 index 0000000..95fc0c1 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/etree.py @@ -0,0 +1,130 @@ +from __future__ import absolute_import, division, unicode_literals + +from collections import OrderedDict +import re + +from pip._vendor.six import string_types + +from . import base +from .._utils import moduleFactoryFactory + +tag_regexp = re.compile("{([^}]*)}(.*)") + + +def getETreeBuilder(ElementTreeImplementation): + ElementTree = ElementTreeImplementation + ElementTreeCommentType = ElementTree.Comment("asd").tag + + class TreeWalker(base.NonRecursiveTreeWalker): # pylint:disable=unused-variable + """Given the particular ElementTree representation, this implementation, + to avoid using recursion, returns "nodes" as tuples with the following + content: + + 1. The current element + + 2. The index of the element relative to its parent + + 3. A stack of ancestor elements + + 4. A flag "text", "tail" or None to indicate if the current node is a + text node; either the text or tail of the current element (1) + """ + def getNodeDetails(self, node): + if isinstance(node, tuple): # It might be the root Element + elt, _, _, flag = node + if flag in ("text", "tail"): + return base.TEXT, getattr(elt, flag) + else: + node = elt + + if not(hasattr(node, "tag")): + node = node.getroot() + + if node.tag in ("DOCUMENT_ROOT", "DOCUMENT_FRAGMENT"): + return (base.DOCUMENT,) + + elif node.tag == "<!DOCTYPE>": + return (base.DOCTYPE, node.text, + node.get("publicId"), node.get("systemId")) + + elif node.tag == ElementTreeCommentType: + return base.COMMENT, node.text + + else: + assert isinstance(node.tag, string_types), type(node.tag) + # This is assumed to be an ordinary element + match = tag_regexp.match(node.tag) + if match: + namespace, tag = match.groups() + else: + namespace = None + tag = node.tag + attrs = OrderedDict() + for name, value in list(node.attrib.items()): + match = tag_regexp.match(name) + if match: + attrs[(match.group(1), match.group(2))] = value + else: + attrs[(None, name)] = value + return (base.ELEMENT, namespace, tag, + attrs, len(node) or node.text) + + def getFirstChild(self, node): + if isinstance(node, tuple): + element, key, parents, flag = node + else: + element, key, parents, flag = node, None, [], None + + if flag in ("text", "tail"): + return None + else: + if element.text: + return element, key, parents, "text" + elif len(element): + parents.append(element) + return element[0], 0, parents, None + else: + return None + + def getNextSibling(self, node): + if isinstance(node, tuple): + element, key, parents, flag = node + else: + return None + + if flag == "text": + if len(element): + parents.append(element) + return element[0], 0, parents, None + else: + return None + else: + if element.tail and flag != "tail": + return element, key, parents, "tail" + elif key < len(parents[-1]) - 1: + return parents[-1][key + 1], key + 1, parents, None + else: + return None + + def getParentNode(self, node): + if isinstance(node, tuple): + element, key, parents, flag = node + else: + return None + + if flag == "text": + if not parents: + return element + else: + return element, key, parents, None + else: + parent = parents.pop() + if not parents: + return parent + else: + assert list(parents[-1]).count(parent) == 1 + return parent, list(parents[-1]).index(parent), parents, None + + return locals() + +getETreeModule = moduleFactoryFactory(getETreeBuilder) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py new file mode 100644 index 0000000..e81ddf3 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py @@ -0,0 +1,213 @@ +from __future__ import absolute_import, division, unicode_literals +from pip._vendor.six import text_type + +from lxml import etree +from ..treebuilders.etree import tag_regexp + +from . import base + +from .. import _ihatexml + + +def ensure_str(s): + if s is None: + return None + elif isinstance(s, text_type): + return s + else: + return s.decode("ascii", "strict") + + +class Root(object): + def __init__(self, et): + self.elementtree = et + self.children = [] + + try: + if et.docinfo.internalDTD: + self.children.append(Doctype(self, + ensure_str(et.docinfo.root_name), + ensure_str(et.docinfo.public_id), + ensure_str(et.docinfo.system_url))) + except AttributeError: + pass + + try: + node = et.getroot() + except AttributeError: + node = et + + while node.getprevious() is not None: + node = node.getprevious() + while node is not None: + self.children.append(node) + node = node.getnext() + + self.text = None + self.tail = None + + def __getitem__(self, key): + return self.children[key] + + def getnext(self): + return None + + def __len__(self): + return 1 + + +class Doctype(object): + def __init__(self, root_node, name, public_id, system_id): + self.root_node = root_node + self.name = name + self.public_id = public_id + self.system_id = system_id + + self.text = None + self.tail = None + + def getnext(self): + return self.root_node.children[1] + + +class FragmentRoot(Root): + def __init__(self, children): + self.children = [FragmentWrapper(self, child) for child in children] + self.text = self.tail = None + + def getnext(self): + return None + + +class FragmentWrapper(object): + def __init__(self, fragment_root, obj): + self.root_node = fragment_root + self.obj = obj + if hasattr(self.obj, 'text'): + self.text = ensure_str(self.obj.text) + else: + self.text = None + if hasattr(self.obj, 'tail'): + self.tail = ensure_str(self.obj.tail) + else: + self.tail = None + + def __getattr__(self, name): + return getattr(self.obj, name) + + def getnext(self): + siblings = self.root_node.children + idx = siblings.index(self) + if idx < len(siblings) - 1: + return siblings[idx + 1] + else: + return None + + def __getitem__(self, key): + return self.obj[key] + + def __bool__(self): + return bool(self.obj) + + def getparent(self): + return None + + def __str__(self): + return str(self.obj) + + def __unicode__(self): + return str(self.obj) + + def __len__(self): + return len(self.obj) + + +class TreeWalker(base.NonRecursiveTreeWalker): + def __init__(self, tree): + # pylint:disable=redefined-variable-type + if isinstance(tree, list): + self.fragmentChildren = set(tree) + tree = FragmentRoot(tree) + else: + self.fragmentChildren = set() + tree = Root(tree) + base.NonRecursiveTreeWalker.__init__(self, tree) + self.filter = _ihatexml.InfosetFilter() + + def getNodeDetails(self, node): + if isinstance(node, tuple): # Text node + node, key = node + assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key + return base.TEXT, ensure_str(getattr(node, key)) + + elif isinstance(node, Root): + return (base.DOCUMENT,) + + elif isinstance(node, Doctype): + return base.DOCTYPE, node.name, node.public_id, node.system_id + + elif isinstance(node, FragmentWrapper) and not hasattr(node, "tag"): + return base.TEXT, ensure_str(node.obj) + + elif node.tag == etree.Comment: + return base.COMMENT, ensure_str(node.text) + + elif node.tag == etree.Entity: + return base.ENTITY, ensure_str(node.text)[1:-1] # strip &; + + else: + # This is assumed to be an ordinary element + match = tag_regexp.match(ensure_str(node.tag)) + if match: + namespace, tag = match.groups() + else: + namespace = None + tag = ensure_str(node.tag) + attrs = {} + for name, value in list(node.attrib.items()): + name = ensure_str(name) + value = ensure_str(value) + match = tag_regexp.match(name) + if match: + attrs[(match.group(1), match.group(2))] = value + else: + attrs[(None, name)] = value + return (base.ELEMENT, namespace, self.filter.fromXmlName(tag), + attrs, len(node) > 0 or node.text) + + def getFirstChild(self, node): + assert not isinstance(node, tuple), "Text nodes have no children" + + assert len(node) or node.text, "Node has no children" + if node.text: + return (node, "text") + else: + return node[0] + + def getNextSibling(self, node): + if isinstance(node, tuple): # Text node + node, key = node + assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key + if key == "text": + # XXX: we cannot use a "bool(node) and node[0] or None" construct here + # because node[0] might evaluate to False if it has no child element + if len(node): + return node[0] + else: + return None + else: # tail + return node.getnext() + + return (node, "tail") if node.tail else node.getnext() + + def getParentNode(self, node): + if isinstance(node, tuple): # Text node + node, key = node + assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key + if key == "text": + return node + # else: fallback to "normal" processing + elif node in self.fragmentChildren: + return None + + return node.getparent() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py new file mode 100644 index 0000000..7483be2 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py @@ -0,0 +1,69 @@ +from __future__ import absolute_import, division, unicode_literals + +from genshi.core import QName +from genshi.core import START, END, XML_NAMESPACE, DOCTYPE, TEXT +from genshi.core import START_NS, END_NS, START_CDATA, END_CDATA, PI, COMMENT + +from . import base + +from ..constants import voidElements, namespaces + + +class TreeWalker(base.TreeWalker): + def __iter__(self): + # Buffer the events so we can pass in the following one + previous = None + for event in self.tree: + if previous is not None: + for token in self.tokens(previous, event): + yield token + previous = event + + # Don't forget the final event! + if previous is not None: + for token in self.tokens(previous, None): + yield token + + def tokens(self, event, next): + kind, data, _ = event + if kind == START: + tag, attribs = data + name = tag.localname + namespace = tag.namespace + converted_attribs = {} + for k, v in attribs: + if isinstance(k, QName): + converted_attribs[(k.namespace, k.localname)] = v + else: + converted_attribs[(None, k)] = v + + if namespace == namespaces["html"] and name in voidElements: + for token in self.emptyTag(namespace, name, converted_attribs, + not next or next[0] != END or + next[1] != tag): + yield token + else: + yield self.startTag(namespace, name, converted_attribs) + + elif kind == END: + name = data.localname + namespace = data.namespace + if namespace != namespaces["html"] or name not in voidElements: + yield self.endTag(namespace, name) + + elif kind == COMMENT: + yield self.comment(data) + + elif kind == TEXT: + for token in self.text(data): + yield token + + elif kind == DOCTYPE: + yield self.doctype(*data) + + elif kind in (XML_NAMESPACE, DOCTYPE, START_NS, END_NS, + START_CDATA, END_CDATA, PI): + pass + + else: + yield self.unknown(kind) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/idna/codec.py b/env/lib/python3.6/site-packages/pip/_vendor/idna/codec.py new file mode 100644 index 0000000..98c65ea --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/idna/codec.py @@ -0,0 +1,118 @@ +from .core import encode, decode, alabel, ulabel, IDNAError +import codecs +import re + +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') + +class Codec(codecs.Codec): + + def encode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return "", 0 + + return encode(data), len(data) + + def decode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return u"", 0 + + return decode(data), len(data) + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data, errors, final): + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return ("", 0) + + labels = _unicode_dots_re.split(data) + trailing_dot = u'' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result = ".".join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data, errors, final): + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return (u"", 0) + + # IDNA allows decoding to operate on Unicode strings, too. + if isinstance(data, unicode): + labels = _unicode_dots_re.split(data) + else: + # Must be ASCII string + data = str(data) + unicode(data, "ascii") + labels = data.split(".") + + trailing_dot = u'' + if labels: + if not labels[-1]: + trailing_dot = u'.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = u'.' + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result = u".".join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + +class StreamReader(Codec, codecs.StreamReader): + pass + +def getregentry(): + return codecs.CodecInfo( + name='idna', + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/idna/compat.py b/env/lib/python3.6/site-packages/pip/_vendor/idna/compat.py new file mode 100644 index 0000000..4d47f33 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/idna/compat.py @@ -0,0 +1,12 @@ +from .core import * +from .codec import * + +def ToASCII(label): + return encode(label) + +def ToUnicode(label): + return decode(label) + +def nameprep(s): + raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") + diff --git a/env/lib/python3.6/site-packages/pip/_vendor/idna/core.py b/env/lib/python3.6/site-packages/pip/_vendor/idna/core.py new file mode 100644 index 0000000..090c2c1 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/idna/core.py @@ -0,0 +1,399 @@ +from . import idnadata +import bisect +import unicodedata +import re +import sys +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b'xn--' +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') + +if sys.version_info[0] == 3: + unicode = str + unichr = chr + +class IDNAError(UnicodeError): + """ Base exception for all IDNA-encoding related problems """ + pass + + +class IDNABidiError(IDNAError): + """ Exception when bidirectional requirements are not satisfied """ + pass + + +class InvalidCodepoint(IDNAError): + """ Exception when a disallowed or unallocated codepoint is used """ + pass + + +class InvalidCodepointContext(IDNAError): + """ Exception when the codepoint is not valid in the context it is used """ + pass + + +def _combining_class(cp): + v = unicodedata.combining(unichr(cp)) + if v == 0: + if not unicodedata.name(unichr(cp)): + raise ValueError("Unknown character in unicodedata") + return v + +def _is_script(cp, script): + return intranges_contain(ord(cp), idnadata.scripts[script]) + +def _punycode(s): + return s.encode('punycode') + +def _unot(s): + return 'U+{0:04X}'.format(s) + + +def valid_label_length(label): + + if len(label) > 63: + return False + return True + + +def valid_string_length(label, trailing_dot): + + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label, check_ltr=False): + + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == '': + # String likely comes from a newer version of Unicode + raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx)) + if direction in ['R', 'AL', 'AN']: + bidi_label = True + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ['R', 'AL']: + rtl = True + elif direction == 'L': + rtl = False + else: + raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label))) + + valid_ending = False + number_type = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx)) + # Bidi rule 3 + if direction in ['R', 'AL', 'EN', 'AN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + # Bidi rule 4 + if direction in ['AN', 'EN']: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError('Can not mix numeral types in a right-to-left label') + else: + # Bidi rule 5 + if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx)) + # Bidi rule 6 + if direction in ['L', 'EN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + + if not valid_ending: + raise IDNABidiError('Label ends with illegal codepoint directionality') + + return True + + +def check_initial_combiner(label): + + if unicodedata.category(label[0])[0] == 'M': + raise IDNAError('Label begins with an illegal combining character') + return True + + +def check_hyphen_ok(label): + + if label[2:4] == '--': + raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') + if label[0] == '-' or label[-1] == '-': + raise IDNAError('Label must not start or end with a hyphen') + return True + + +def check_nfc(label): + + if unicodedata.normalize('NFC', label) != label: + raise IDNAError('Label must be in Normalization Form C') + + +def valid_contextj(label, pos): + + cp_value = ord(label[pos]) + + if cp_value == 0x200c: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos-1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('L'), ord('D')]: + ok = True + break + + if not ok: + return False + + ok = False + for i in range(pos+1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('R'), ord('D')]: + ok = True + break + return ok + + if cp_value == 0x200d: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + + return False + + +def valid_contexto(label, pos, exception=False): + + cp_value = ord(label[pos]) + + if cp_value == 0x00b7: + if 0 < pos < len(label)-1: + if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label)-1 and len(label) > 1: + return _is_script(label[pos + 1], 'Greek') + return False + + elif cp_value == 0x05f3 or cp_value == 0x05f4: + if pos > 0: + return _is_script(label[pos - 1], 'Hebrew') + return False + + elif cp_value == 0x30fb: + for cp in label: + if cp == u'\u30fb': + continue + if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6f0 <= ord(cp) <= 0x06f9: + return False + return True + + elif 0x6f0 <= cp_value <= 0x6f9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + +def check_label(label): + + if isinstance(label, (bytes, bytearray)): + label = label.decode('utf-8') + if len(label) == 0: + raise IDNAError('Empty Label') + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for (pos, cp) in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): + try: + if not valid_contextj(label, pos): + raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format( + _unot(cp_value), pos+1, repr(label))) + except ValueError: + raise IDNAError('Unknown codepoint adjacent to joiner {0} at position {1} in {2}'.format( + _unot(cp_value), pos+1, repr(label))) + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): + if not valid_contexto(label, pos): + raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) + else: + raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label))) + + check_bidi(label) + + +def alabel(label): + + try: + label = label.encode('ascii') + try: + ulabel(label) + except IDNAError: + raise IDNAError('The label {0} is not a valid A-label'.format(label)) + if not valid_label_length(label): + raise IDNAError('Label too long') + return label + except UnicodeEncodeError: + pass + + if not label: + raise IDNAError('No Input') + + label = unicode(label) + check_label(label) + label = _punycode(label) + label = _alabel_prefix + label + + if not valid_label_length(label): + raise IDNAError('Label too long') + + return label + + +def ulabel(label): + + if not isinstance(label, (bytes, bytearray)): + try: + label = label.encode('ascii') + except UnicodeEncodeError: + check_label(label) + return label + + label = label.lower() + if label.startswith(_alabel_prefix): + label = label[len(_alabel_prefix):] + else: + check_label(label) + return label.decode('ascii') + + label = label.decode('punycode') + check_label(label) + return label + + +def uts46_remap(domain, std3_rules=True, transitional=False): + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + output = u"" + try: + for pos, char in enumerate(domain): + code_point = ord(char) + uts46row = uts46data[code_point if code_point < 256 else + bisect.bisect_left(uts46data, (code_point, "Z")) - 1] + status = uts46row[1] + replacement = uts46row[2] if len(uts46row) == 3 else None + if (status == "V" or + (status == "D" and not transitional) or + (status == "3" and not std3_rules and replacement is None)): + output += char + elif replacement is not None and (status == "M" or + (status == "3" and not std3_rules) or + (status == "D" and transitional)): + output += replacement + elif status != "I": + raise IndexError() + return unicodedata.normalize("NFC", output) + except IndexError: + raise InvalidCodepoint( + "Codepoint {0} not allowed at position {1} in {2}".format( + _unot(code_point), pos + 1, repr(domain))) + + +def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode("ascii") + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split('.') + else: + labels = _unicode_dots_re.split(s) + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if labels[-1] == '': + del labels[-1] + trailing_dot = True + for label in labels: + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append(b'') + s = b'.'.join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError('Domain too long') + return s + + +def decode(s, strict=False, uts46=False, std3_rules=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode("ascii") + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split(u'.') + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append(u'') + return u'.'.join(result) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/idna/idnadata.py b/env/lib/python3.6/site-packages/pip/_vendor/idna/idnadata.py new file mode 100644 index 0000000..17974e2 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/idna/idnadata.py @@ -0,0 +1,1893 @@ +# This file is automatically generated by tools/idna-data + +__version__ = "10.0.0" +scripts = { + 'Greek': ( + 0x37000000374, + 0x37500000378, + 0x37a0000037e, + 0x37f00000380, + 0x38400000385, + 0x38600000387, + 0x3880000038b, + 0x38c0000038d, + 0x38e000003a2, + 0x3a3000003e2, + 0x3f000000400, + 0x1d2600001d2b, + 0x1d5d00001d62, + 0x1d6600001d6b, + 0x1dbf00001dc0, + 0x1f0000001f16, + 0x1f1800001f1e, + 0x1f2000001f46, + 0x1f4800001f4e, + 0x1f5000001f58, + 0x1f5900001f5a, + 0x1f5b00001f5c, + 0x1f5d00001f5e, + 0x1f5f00001f7e, + 0x1f8000001fb5, + 0x1fb600001fc5, + 0x1fc600001fd4, + 0x1fd600001fdc, + 0x1fdd00001ff0, + 0x1ff200001ff5, + 0x1ff600001fff, + 0x212600002127, + 0xab650000ab66, + 0x101400001018f, + 0x101a0000101a1, + 0x1d2000001d246, + ), + 'Han': ( + 0x2e8000002e9a, + 0x2e9b00002ef4, + 0x2f0000002fd6, + 0x300500003006, + 0x300700003008, + 0x30210000302a, + 0x30380000303c, + 0x340000004db6, + 0x4e0000009feb, + 0xf9000000fa6e, + 0xfa700000fada, + 0x200000002a6d7, + 0x2a7000002b735, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + 0x2f8000002fa1e, + ), + 'Hebrew': ( + 0x591000005c8, + 0x5d0000005eb, + 0x5f0000005f5, + 0xfb1d0000fb37, + 0xfb380000fb3d, + 0xfb3e0000fb3f, + 0xfb400000fb42, + 0xfb430000fb45, + 0xfb460000fb50, + ), + 'Hiragana': ( + 0x304100003097, + 0x309d000030a0, + 0x1b0010001b11f, + 0x1f2000001f201, + ), + 'Katakana': ( + 0x30a1000030fb, + 0x30fd00003100, + 0x31f000003200, + 0x32d0000032ff, + 0x330000003358, + 0xff660000ff70, + 0xff710000ff9e, + 0x1b0000001b001, + ), +} +joining_types = { + 0x600: 85, + 0x601: 85, + 0x602: 85, + 0x603: 85, + 0x604: 85, + 0x605: 85, + 0x608: 85, + 0x60b: 85, + 0x620: 68, + 0x621: 85, + 0x622: 82, + 0x623: 82, + 0x624: 82, + 0x625: 82, + 0x626: 68, + 0x627: 82, + 0x628: 68, + 0x629: 82, + 0x62a: 68, + 0x62b: 68, + 0x62c: 68, + 0x62d: 68, + 0x62e: 68, + 0x62f: 82, + 0x630: 82, + 0x631: 82, + 0x632: 82, + 0x633: 68, + 0x634: 68, + 0x635: 68, + 0x636: 68, + 0x637: 68, + 0x638: 68, + 0x639: 68, + 0x63a: 68, + 0x63b: 68, + 0x63c: 68, + 0x63d: 68, + 0x63e: 68, + 0x63f: 68, + 0x640: 67, + 0x641: 68, + 0x642: 68, + 0x643: 68, + 0x644: 68, + 0x645: 68, + 0x646: 68, + 0x647: 68, + 0x648: 82, + 0x649: 68, + 0x64a: 68, + 0x66e: 68, + 0x66f: 68, + 0x671: 82, + 0x672: 82, + 0x673: 82, + 0x674: 85, + 0x675: 82, + 0x676: 82, + 0x677: 82, + 0x678: 68, + 0x679: 68, + 0x67a: 68, + 0x67b: 68, + 0x67c: 68, + 0x67d: 68, + 0x67e: 68, + 0x67f: 68, + 0x680: 68, + 0x681: 68, + 0x682: 68, + 0x683: 68, + 0x684: 68, + 0x685: 68, + 0x686: 68, + 0x687: 68, + 0x688: 82, + 0x689: 82, + 0x68a: 82, + 0x68b: 82, + 0x68c: 82, + 0x68d: 82, + 0x68e: 82, + 0x68f: 82, + 0x690: 82, + 0x691: 82, + 0x692: 82, + 0x693: 82, + 0x694: 82, + 0x695: 82, + 0x696: 82, + 0x697: 82, + 0x698: 82, + 0x699: 82, + 0x69a: 68, + 0x69b: 68, + 0x69c: 68, + 0x69d: 68, + 0x69e: 68, + 0x69f: 68, + 0x6a0: 68, + 0x6a1: 68, + 0x6a2: 68, + 0x6a3: 68, + 0x6a4: 68, + 0x6a5: 68, + 0x6a6: 68, + 0x6a7: 68, + 0x6a8: 68, + 0x6a9: 68, + 0x6aa: 68, + 0x6ab: 68, + 0x6ac: 68, + 0x6ad: 68, + 0x6ae: 68, + 0x6af: 68, + 0x6b0: 68, + 0x6b1: 68, + 0x6b2: 68, + 0x6b3: 68, + 0x6b4: 68, + 0x6b5: 68, + 0x6b6: 68, + 0x6b7: 68, + 0x6b8: 68, + 0x6b9: 68, + 0x6ba: 68, + 0x6bb: 68, + 0x6bc: 68, + 0x6bd: 68, + 0x6be: 68, + 0x6bf: 68, + 0x6c0: 82, + 0x6c1: 68, + 0x6c2: 68, + 0x6c3: 82, + 0x6c4: 82, + 0x6c5: 82, + 0x6c6: 82, + 0x6c7: 82, + 0x6c8: 82, + 0x6c9: 82, + 0x6ca: 82, + 0x6cb: 82, + 0x6cc: 68, + 0x6cd: 82, + 0x6ce: 68, + 0x6cf: 82, + 0x6d0: 68, + 0x6d1: 68, + 0x6d2: 82, + 0x6d3: 82, + 0x6d5: 82, + 0x6dd: 85, + 0x6ee: 82, + 0x6ef: 82, + 0x6fa: 68, + 0x6fb: 68, + 0x6fc: 68, + 0x6ff: 68, + 0x710: 82, + 0x712: 68, + 0x713: 68, + 0x714: 68, + 0x715: 82, + 0x716: 82, + 0x717: 82, + 0x718: 82, + 0x719: 82, + 0x71a: 68, + 0x71b: 68, + 0x71c: 68, + 0x71d: 68, + 0x71e: 82, + 0x71f: 68, + 0x720: 68, + 0x721: 68, + 0x722: 68, + 0x723: 68, + 0x724: 68, + 0x725: 68, + 0x726: 68, + 0x727: 68, + 0x728: 82, + 0x729: 68, + 0x72a: 82, + 0x72b: 68, + 0x72c: 82, + 0x72d: 68, + 0x72e: 68, + 0x72f: 82, + 0x74d: 82, + 0x74e: 68, + 0x74f: 68, + 0x750: 68, + 0x751: 68, + 0x752: 68, + 0x753: 68, + 0x754: 68, + 0x755: 68, + 0x756: 68, + 0x757: 68, + 0x758: 68, + 0x759: 82, + 0x75a: 82, + 0x75b: 82, + 0x75c: 68, + 0x75d: 68, + 0x75e: 68, + 0x75f: 68, + 0x760: 68, + 0x761: 68, + 0x762: 68, + 0x763: 68, + 0x764: 68, + 0x765: 68, + 0x766: 68, + 0x767: 68, + 0x768: 68, + 0x769: 68, + 0x76a: 68, + 0x76b: 82, + 0x76c: 82, + 0x76d: 68, + 0x76e: 68, + 0x76f: 68, + 0x770: 68, + 0x771: 82, + 0x772: 68, + 0x773: 82, + 0x774: 82, + 0x775: 68, + 0x776: 68, + 0x777: 68, + 0x778: 82, + 0x779: 82, + 0x77a: 68, + 0x77b: 68, + 0x77c: 68, + 0x77d: 68, + 0x77e: 68, + 0x77f: 68, + 0x7ca: 68, + 0x7cb: 68, + 0x7cc: 68, + 0x7cd: 68, + 0x7ce: 68, + 0x7cf: 68, + 0x7d0: 68, + 0x7d1: 68, + 0x7d2: 68, + 0x7d3: 68, + 0x7d4: 68, + 0x7d5: 68, + 0x7d6: 68, + 0x7d7: 68, + 0x7d8: 68, + 0x7d9: 68, + 0x7da: 68, + 0x7db: 68, + 0x7dc: 68, + 0x7dd: 68, + 0x7de: 68, + 0x7df: 68, + 0x7e0: 68, + 0x7e1: 68, + 0x7e2: 68, + 0x7e3: 68, + 0x7e4: 68, + 0x7e5: 68, + 0x7e6: 68, + 0x7e7: 68, + 0x7e8: 68, + 0x7e9: 68, + 0x7ea: 68, + 0x7fa: 67, + 0x840: 82, + 0x841: 68, + 0x842: 68, + 0x843: 68, + 0x844: 68, + 0x845: 68, + 0x846: 82, + 0x847: 82, + 0x848: 68, + 0x849: 82, + 0x84a: 68, + 0x84b: 68, + 0x84c: 68, + 0x84d: 68, + 0x84e: 68, + 0x84f: 68, + 0x850: 68, + 0x851: 68, + 0x852: 68, + 0x853: 68, + 0x854: 82, + 0x855: 68, + 0x856: 85, + 0x857: 85, + 0x858: 85, + 0x860: 68, + 0x861: 85, + 0x862: 68, + 0x863: 68, + 0x864: 68, + 0x865: 68, + 0x866: 85, + 0x867: 82, + 0x868: 68, + 0x869: 82, + 0x86a: 82, + 0x8a0: 68, + 0x8a1: 68, + 0x8a2: 68, + 0x8a3: 68, + 0x8a4: 68, + 0x8a5: 68, + 0x8a6: 68, + 0x8a7: 68, + 0x8a8: 68, + 0x8a9: 68, + 0x8aa: 82, + 0x8ab: 82, + 0x8ac: 82, + 0x8ad: 85, + 0x8ae: 82, + 0x8af: 68, + 0x8b0: 68, + 0x8b1: 82, + 0x8b2: 82, + 0x8b3: 68, + 0x8b4: 68, + 0x8b6: 68, + 0x8b7: 68, + 0x8b8: 68, + 0x8b9: 82, + 0x8ba: 68, + 0x8bb: 68, + 0x8bc: 68, + 0x8bd: 68, + 0x8e2: 85, + 0x1806: 85, + 0x1807: 68, + 0x180a: 67, + 0x180e: 85, + 0x1820: 68, + 0x1821: 68, + 0x1822: 68, + 0x1823: 68, + 0x1824: 68, + 0x1825: 68, + 0x1826: 68, + 0x1827: 68, + 0x1828: 68, + 0x1829: 68, + 0x182a: 68, + 0x182b: 68, + 0x182c: 68, + 0x182d: 68, + 0x182e: 68, + 0x182f: 68, + 0x1830: 68, + 0x1831: 68, + 0x1832: 68, + 0x1833: 68, + 0x1834: 68, + 0x1835: 68, + 0x1836: 68, + 0x1837: 68, + 0x1838: 68, + 0x1839: 68, + 0x183a: 68, + 0x183b: 68, + 0x183c: 68, + 0x183d: 68, + 0x183e: 68, + 0x183f: 68, + 0x1840: 68, + 0x1841: 68, + 0x1842: 68, + 0x1843: 68, + 0x1844: 68, + 0x1845: 68, + 0x1846: 68, + 0x1847: 68, + 0x1848: 68, + 0x1849: 68, + 0x184a: 68, + 0x184b: 68, + 0x184c: 68, + 0x184d: 68, + 0x184e: 68, + 0x184f: 68, + 0x1850: 68, + 0x1851: 68, + 0x1852: 68, + 0x1853: 68, + 0x1854: 68, + 0x1855: 68, + 0x1856: 68, + 0x1857: 68, + 0x1858: 68, + 0x1859: 68, + 0x185a: 68, + 0x185b: 68, + 0x185c: 68, + 0x185d: 68, + 0x185e: 68, + 0x185f: 68, + 0x1860: 68, + 0x1861: 68, + 0x1862: 68, + 0x1863: 68, + 0x1864: 68, + 0x1865: 68, + 0x1866: 68, + 0x1867: 68, + 0x1868: 68, + 0x1869: 68, + 0x186a: 68, + 0x186b: 68, + 0x186c: 68, + 0x186d: 68, + 0x186e: 68, + 0x186f: 68, + 0x1870: 68, + 0x1871: 68, + 0x1872: 68, + 0x1873: 68, + 0x1874: 68, + 0x1875: 68, + 0x1876: 68, + 0x1877: 68, + 0x1880: 85, + 0x1881: 85, + 0x1882: 85, + 0x1883: 85, + 0x1884: 85, + 0x1885: 84, + 0x1886: 84, + 0x1887: 68, + 0x1888: 68, + 0x1889: 68, + 0x188a: 68, + 0x188b: 68, + 0x188c: 68, + 0x188d: 68, + 0x188e: 68, + 0x188f: 68, + 0x1890: 68, + 0x1891: 68, + 0x1892: 68, + 0x1893: 68, + 0x1894: 68, + 0x1895: 68, + 0x1896: 68, + 0x1897: 68, + 0x1898: 68, + 0x1899: 68, + 0x189a: 68, + 0x189b: 68, + 0x189c: 68, + 0x189d: 68, + 0x189e: 68, + 0x189f: 68, + 0x18a0: 68, + 0x18a1: 68, + 0x18a2: 68, + 0x18a3: 68, + 0x18a4: 68, + 0x18a5: 68, + 0x18a6: 68, + 0x18a7: 68, + 0x18a8: 68, + 0x18aa: 68, + 0x200c: 85, + 0x200d: 67, + 0x202f: 85, + 0x2066: 85, + 0x2067: 85, + 0x2068: 85, + 0x2069: 85, + 0xa840: 68, + 0xa841: 68, + 0xa842: 68, + 0xa843: 68, + 0xa844: 68, + 0xa845: 68, + 0xa846: 68, + 0xa847: 68, + 0xa848: 68, + 0xa849: 68, + 0xa84a: 68, + 0xa84b: 68, + 0xa84c: 68, + 0xa84d: 68, + 0xa84e: 68, + 0xa84f: 68, + 0xa850: 68, + 0xa851: 68, + 0xa852: 68, + 0xa853: 68, + 0xa854: 68, + 0xa855: 68, + 0xa856: 68, + 0xa857: 68, + 0xa858: 68, + 0xa859: 68, + 0xa85a: 68, + 0xa85b: 68, + 0xa85c: 68, + 0xa85d: 68, + 0xa85e: 68, + 0xa85f: 68, + 0xa860: 68, + 0xa861: 68, + 0xa862: 68, + 0xa863: 68, + 0xa864: 68, + 0xa865: 68, + 0xa866: 68, + 0xa867: 68, + 0xa868: 68, + 0xa869: 68, + 0xa86a: 68, + 0xa86b: 68, + 0xa86c: 68, + 0xa86d: 68, + 0xa86e: 68, + 0xa86f: 68, + 0xa870: 68, + 0xa871: 68, + 0xa872: 76, + 0xa873: 85, + 0x10ac0: 68, + 0x10ac1: 68, + 0x10ac2: 68, + 0x10ac3: 68, + 0x10ac4: 68, + 0x10ac5: 82, + 0x10ac6: 85, + 0x10ac7: 82, + 0x10ac8: 85, + 0x10ac9: 82, + 0x10aca: 82, + 0x10acb: 85, + 0x10acc: 85, + 0x10acd: 76, + 0x10ace: 82, + 0x10acf: 82, + 0x10ad0: 82, + 0x10ad1: 82, + 0x10ad2: 82, + 0x10ad3: 68, + 0x10ad4: 68, + 0x10ad5: 68, + 0x10ad6: 68, + 0x10ad7: 76, + 0x10ad8: 68, + 0x10ad9: 68, + 0x10ada: 68, + 0x10adb: 68, + 0x10adc: 68, + 0x10add: 82, + 0x10ade: 68, + 0x10adf: 68, + 0x10ae0: 68, + 0x10ae1: 82, + 0x10ae2: 85, + 0x10ae3: 85, + 0x10ae4: 82, + 0x10aeb: 68, + 0x10aec: 68, + 0x10aed: 68, + 0x10aee: 68, + 0x10aef: 82, + 0x10b80: 68, + 0x10b81: 82, + 0x10b82: 68, + 0x10b83: 82, + 0x10b84: 82, + 0x10b85: 82, + 0x10b86: 68, + 0x10b87: 68, + 0x10b88: 68, + 0x10b89: 82, + 0x10b8a: 68, + 0x10b8b: 68, + 0x10b8c: 82, + 0x10b8d: 68, + 0x10b8e: 82, + 0x10b8f: 82, + 0x10b90: 68, + 0x10b91: 82, + 0x10ba9: 82, + 0x10baa: 82, + 0x10bab: 82, + 0x10bac: 82, + 0x10bad: 68, + 0x10bae: 68, + 0x10baf: 85, + 0x1e900: 68, + 0x1e901: 68, + 0x1e902: 68, + 0x1e903: 68, + 0x1e904: 68, + 0x1e905: 68, + 0x1e906: 68, + 0x1e907: 68, + 0x1e908: 68, + 0x1e909: 68, + 0x1e90a: 68, + 0x1e90b: 68, + 0x1e90c: 68, + 0x1e90d: 68, + 0x1e90e: 68, + 0x1e90f: 68, + 0x1e910: 68, + 0x1e911: 68, + 0x1e912: 68, + 0x1e913: 68, + 0x1e914: 68, + 0x1e915: 68, + 0x1e916: 68, + 0x1e917: 68, + 0x1e918: 68, + 0x1e919: 68, + 0x1e91a: 68, + 0x1e91b: 68, + 0x1e91c: 68, + 0x1e91d: 68, + 0x1e91e: 68, + 0x1e91f: 68, + 0x1e920: 68, + 0x1e921: 68, + 0x1e922: 68, + 0x1e923: 68, + 0x1e924: 68, + 0x1e925: 68, + 0x1e926: 68, + 0x1e927: 68, + 0x1e928: 68, + 0x1e929: 68, + 0x1e92a: 68, + 0x1e92b: 68, + 0x1e92c: 68, + 0x1e92d: 68, + 0x1e92e: 68, + 0x1e92f: 68, + 0x1e930: 68, + 0x1e931: 68, + 0x1e932: 68, + 0x1e933: 68, + 0x1e934: 68, + 0x1e935: 68, + 0x1e936: 68, + 0x1e937: 68, + 0x1e938: 68, + 0x1e939: 68, + 0x1e93a: 68, + 0x1e93b: 68, + 0x1e93c: 68, + 0x1e93d: 68, + 0x1e93e: 68, + 0x1e93f: 68, + 0x1e940: 68, + 0x1e941: 68, + 0x1e942: 68, + 0x1e943: 68, +} +codepoint_classes = { + 'PVALID': ( + 0x2d0000002e, + 0x300000003a, + 0x610000007b, + 0xdf000000f7, + 0xf800000100, + 0x10100000102, + 0x10300000104, + 0x10500000106, + 0x10700000108, + 0x1090000010a, + 0x10b0000010c, + 0x10d0000010e, + 0x10f00000110, + 0x11100000112, + 0x11300000114, + 0x11500000116, + 0x11700000118, + 0x1190000011a, + 0x11b0000011c, + 0x11d0000011e, + 0x11f00000120, + 0x12100000122, + 0x12300000124, + 0x12500000126, + 0x12700000128, + 0x1290000012a, + 0x12b0000012c, + 0x12d0000012e, + 0x12f00000130, + 0x13100000132, + 0x13500000136, + 0x13700000139, + 0x13a0000013b, + 0x13c0000013d, + 0x13e0000013f, + 0x14200000143, + 0x14400000145, + 0x14600000147, + 0x14800000149, + 0x14b0000014c, + 0x14d0000014e, + 0x14f00000150, + 0x15100000152, + 0x15300000154, + 0x15500000156, + 0x15700000158, + 0x1590000015a, + 0x15b0000015c, + 0x15d0000015e, + 0x15f00000160, + 0x16100000162, + 0x16300000164, + 0x16500000166, + 0x16700000168, + 0x1690000016a, + 0x16b0000016c, + 0x16d0000016e, + 0x16f00000170, + 0x17100000172, + 0x17300000174, + 0x17500000176, + 0x17700000178, + 0x17a0000017b, + 0x17c0000017d, + 0x17e0000017f, + 0x18000000181, + 0x18300000184, + 0x18500000186, + 0x18800000189, + 0x18c0000018e, + 0x19200000193, + 0x19500000196, + 0x1990000019c, + 0x19e0000019f, + 0x1a1000001a2, + 0x1a3000001a4, + 0x1a5000001a6, + 0x1a8000001a9, + 0x1aa000001ac, + 0x1ad000001ae, + 0x1b0000001b1, + 0x1b4000001b5, + 0x1b6000001b7, + 0x1b9000001bc, + 0x1bd000001c4, + 0x1ce000001cf, + 0x1d0000001d1, + 0x1d2000001d3, + 0x1d4000001d5, + 0x1d6000001d7, + 0x1d8000001d9, + 0x1da000001db, + 0x1dc000001de, + 0x1df000001e0, + 0x1e1000001e2, + 0x1e3000001e4, + 0x1e5000001e6, + 0x1e7000001e8, + 0x1e9000001ea, + 0x1eb000001ec, + 0x1ed000001ee, + 0x1ef000001f1, + 0x1f5000001f6, + 0x1f9000001fa, + 0x1fb000001fc, + 0x1fd000001fe, + 0x1ff00000200, + 0x20100000202, + 0x20300000204, + 0x20500000206, + 0x20700000208, + 0x2090000020a, + 0x20b0000020c, + 0x20d0000020e, + 0x20f00000210, + 0x21100000212, + 0x21300000214, + 0x21500000216, + 0x21700000218, + 0x2190000021a, + 0x21b0000021c, + 0x21d0000021e, + 0x21f00000220, + 0x22100000222, + 0x22300000224, + 0x22500000226, + 0x22700000228, + 0x2290000022a, + 0x22b0000022c, + 0x22d0000022e, + 0x22f00000230, + 0x23100000232, + 0x2330000023a, + 0x23c0000023d, + 0x23f00000241, + 0x24200000243, + 0x24700000248, + 0x2490000024a, + 0x24b0000024c, + 0x24d0000024e, + 0x24f000002b0, + 0x2b9000002c2, + 0x2c6000002d2, + 0x2ec000002ed, + 0x2ee000002ef, + 0x30000000340, + 0x34200000343, + 0x3460000034f, + 0x35000000370, + 0x37100000372, + 0x37300000374, + 0x37700000378, + 0x37b0000037e, + 0x39000000391, + 0x3ac000003cf, + 0x3d7000003d8, + 0x3d9000003da, + 0x3db000003dc, + 0x3dd000003de, + 0x3df000003e0, + 0x3e1000003e2, + 0x3e3000003e4, + 0x3e5000003e6, + 0x3e7000003e8, + 0x3e9000003ea, + 0x3eb000003ec, + 0x3ed000003ee, + 0x3ef000003f0, + 0x3f3000003f4, + 0x3f8000003f9, + 0x3fb000003fd, + 0x43000000460, + 0x46100000462, + 0x46300000464, + 0x46500000466, + 0x46700000468, + 0x4690000046a, + 0x46b0000046c, + 0x46d0000046e, + 0x46f00000470, + 0x47100000472, + 0x47300000474, + 0x47500000476, + 0x47700000478, + 0x4790000047a, + 0x47b0000047c, + 0x47d0000047e, + 0x47f00000480, + 0x48100000482, + 0x48300000488, + 0x48b0000048c, + 0x48d0000048e, + 0x48f00000490, + 0x49100000492, + 0x49300000494, + 0x49500000496, + 0x49700000498, + 0x4990000049a, + 0x49b0000049c, + 0x49d0000049e, + 0x49f000004a0, + 0x4a1000004a2, + 0x4a3000004a4, + 0x4a5000004a6, + 0x4a7000004a8, + 0x4a9000004aa, + 0x4ab000004ac, + 0x4ad000004ae, + 0x4af000004b0, + 0x4b1000004b2, + 0x4b3000004b4, + 0x4b5000004b6, + 0x4b7000004b8, + 0x4b9000004ba, + 0x4bb000004bc, + 0x4bd000004be, + 0x4bf000004c0, + 0x4c2000004c3, + 0x4c4000004c5, + 0x4c6000004c7, + 0x4c8000004c9, + 0x4ca000004cb, + 0x4cc000004cd, + 0x4ce000004d0, + 0x4d1000004d2, + 0x4d3000004d4, + 0x4d5000004d6, + 0x4d7000004d8, + 0x4d9000004da, + 0x4db000004dc, + 0x4dd000004de, + 0x4df000004e0, + 0x4e1000004e2, + 0x4e3000004e4, + 0x4e5000004e6, + 0x4e7000004e8, + 0x4e9000004ea, + 0x4eb000004ec, + 0x4ed000004ee, + 0x4ef000004f0, + 0x4f1000004f2, + 0x4f3000004f4, + 0x4f5000004f6, + 0x4f7000004f8, + 0x4f9000004fa, + 0x4fb000004fc, + 0x4fd000004fe, + 0x4ff00000500, + 0x50100000502, + 0x50300000504, + 0x50500000506, + 0x50700000508, + 0x5090000050a, + 0x50b0000050c, + 0x50d0000050e, + 0x50f00000510, + 0x51100000512, + 0x51300000514, + 0x51500000516, + 0x51700000518, + 0x5190000051a, + 0x51b0000051c, + 0x51d0000051e, + 0x51f00000520, + 0x52100000522, + 0x52300000524, + 0x52500000526, + 0x52700000528, + 0x5290000052a, + 0x52b0000052c, + 0x52d0000052e, + 0x52f00000530, + 0x5590000055a, + 0x56100000587, + 0x591000005be, + 0x5bf000005c0, + 0x5c1000005c3, + 0x5c4000005c6, + 0x5c7000005c8, + 0x5d0000005eb, + 0x5f0000005f3, + 0x6100000061b, + 0x62000000640, + 0x64100000660, + 0x66e00000675, + 0x679000006d4, + 0x6d5000006dd, + 0x6df000006e9, + 0x6ea000006f0, + 0x6fa00000700, + 0x7100000074b, + 0x74d000007b2, + 0x7c0000007f6, + 0x8000000082e, + 0x8400000085c, + 0x8600000086b, + 0x8a0000008b5, + 0x8b6000008be, + 0x8d4000008e2, + 0x8e300000958, + 0x96000000964, + 0x96600000970, + 0x97100000984, + 0x9850000098d, + 0x98f00000991, + 0x993000009a9, + 0x9aa000009b1, + 0x9b2000009b3, + 0x9b6000009ba, + 0x9bc000009c5, + 0x9c7000009c9, + 0x9cb000009cf, + 0x9d7000009d8, + 0x9e0000009e4, + 0x9e6000009f2, + 0x9fc000009fd, + 0xa0100000a04, + 0xa0500000a0b, + 0xa0f00000a11, + 0xa1300000a29, + 0xa2a00000a31, + 0xa3200000a33, + 0xa3500000a36, + 0xa3800000a3a, + 0xa3c00000a3d, + 0xa3e00000a43, + 0xa4700000a49, + 0xa4b00000a4e, + 0xa5100000a52, + 0xa5c00000a5d, + 0xa6600000a76, + 0xa8100000a84, + 0xa8500000a8e, + 0xa8f00000a92, + 0xa9300000aa9, + 0xaaa00000ab1, + 0xab200000ab4, + 0xab500000aba, + 0xabc00000ac6, + 0xac700000aca, + 0xacb00000ace, + 0xad000000ad1, + 0xae000000ae4, + 0xae600000af0, + 0xaf900000b00, + 0xb0100000b04, + 0xb0500000b0d, + 0xb0f00000b11, + 0xb1300000b29, + 0xb2a00000b31, + 0xb3200000b34, + 0xb3500000b3a, + 0xb3c00000b45, + 0xb4700000b49, + 0xb4b00000b4e, + 0xb5600000b58, + 0xb5f00000b64, + 0xb6600000b70, + 0xb7100000b72, + 0xb8200000b84, + 0xb8500000b8b, + 0xb8e00000b91, + 0xb9200000b96, + 0xb9900000b9b, + 0xb9c00000b9d, + 0xb9e00000ba0, + 0xba300000ba5, + 0xba800000bab, + 0xbae00000bba, + 0xbbe00000bc3, + 0xbc600000bc9, + 0xbca00000bce, + 0xbd000000bd1, + 0xbd700000bd8, + 0xbe600000bf0, + 0xc0000000c04, + 0xc0500000c0d, + 0xc0e00000c11, + 0xc1200000c29, + 0xc2a00000c3a, + 0xc3d00000c45, + 0xc4600000c49, + 0xc4a00000c4e, + 0xc5500000c57, + 0xc5800000c5b, + 0xc6000000c64, + 0xc6600000c70, + 0xc8000000c84, + 0xc8500000c8d, + 0xc8e00000c91, + 0xc9200000ca9, + 0xcaa00000cb4, + 0xcb500000cba, + 0xcbc00000cc5, + 0xcc600000cc9, + 0xcca00000cce, + 0xcd500000cd7, + 0xcde00000cdf, + 0xce000000ce4, + 0xce600000cf0, + 0xcf100000cf3, + 0xd0000000d04, + 0xd0500000d0d, + 0xd0e00000d11, + 0xd1200000d45, + 0xd4600000d49, + 0xd4a00000d4f, + 0xd5400000d58, + 0xd5f00000d64, + 0xd6600000d70, + 0xd7a00000d80, + 0xd8200000d84, + 0xd8500000d97, + 0xd9a00000db2, + 0xdb300000dbc, + 0xdbd00000dbe, + 0xdc000000dc7, + 0xdca00000dcb, + 0xdcf00000dd5, + 0xdd600000dd7, + 0xdd800000de0, + 0xde600000df0, + 0xdf200000df4, + 0xe0100000e33, + 0xe3400000e3b, + 0xe4000000e4f, + 0xe5000000e5a, + 0xe8100000e83, + 0xe8400000e85, + 0xe8700000e89, + 0xe8a00000e8b, + 0xe8d00000e8e, + 0xe9400000e98, + 0xe9900000ea0, + 0xea100000ea4, + 0xea500000ea6, + 0xea700000ea8, + 0xeaa00000eac, + 0xead00000eb3, + 0xeb400000eba, + 0xebb00000ebe, + 0xec000000ec5, + 0xec600000ec7, + 0xec800000ece, + 0xed000000eda, + 0xede00000ee0, + 0xf0000000f01, + 0xf0b00000f0c, + 0xf1800000f1a, + 0xf2000000f2a, + 0xf3500000f36, + 0xf3700000f38, + 0xf3900000f3a, + 0xf3e00000f43, + 0xf4400000f48, + 0xf4900000f4d, + 0xf4e00000f52, + 0xf5300000f57, + 0xf5800000f5c, + 0xf5d00000f69, + 0xf6a00000f6d, + 0xf7100000f73, + 0xf7400000f75, + 0xf7a00000f81, + 0xf8200000f85, + 0xf8600000f93, + 0xf9400000f98, + 0xf9900000f9d, + 0xf9e00000fa2, + 0xfa300000fa7, + 0xfa800000fac, + 0xfad00000fb9, + 0xfba00000fbd, + 0xfc600000fc7, + 0x10000000104a, + 0x10500000109e, + 0x10d0000010fb, + 0x10fd00001100, + 0x120000001249, + 0x124a0000124e, + 0x125000001257, + 0x125800001259, + 0x125a0000125e, + 0x126000001289, + 0x128a0000128e, + 0x1290000012b1, + 0x12b2000012b6, + 0x12b8000012bf, + 0x12c0000012c1, + 0x12c2000012c6, + 0x12c8000012d7, + 0x12d800001311, + 0x131200001316, + 0x13180000135b, + 0x135d00001360, + 0x138000001390, + 0x13a0000013f6, + 0x14010000166d, + 0x166f00001680, + 0x16810000169b, + 0x16a0000016eb, + 0x16f1000016f9, + 0x17000000170d, + 0x170e00001715, + 0x172000001735, + 0x174000001754, + 0x17600000176d, + 0x176e00001771, + 0x177200001774, + 0x1780000017b4, + 0x17b6000017d4, + 0x17d7000017d8, + 0x17dc000017de, + 0x17e0000017ea, + 0x18100000181a, + 0x182000001878, + 0x1880000018ab, + 0x18b0000018f6, + 0x19000000191f, + 0x19200000192c, + 0x19300000193c, + 0x19460000196e, + 0x197000001975, + 0x1980000019ac, + 0x19b0000019ca, + 0x19d0000019da, + 0x1a0000001a1c, + 0x1a2000001a5f, + 0x1a6000001a7d, + 0x1a7f00001a8a, + 0x1a9000001a9a, + 0x1aa700001aa8, + 0x1ab000001abe, + 0x1b0000001b4c, + 0x1b5000001b5a, + 0x1b6b00001b74, + 0x1b8000001bf4, + 0x1c0000001c38, + 0x1c4000001c4a, + 0x1c4d00001c7e, + 0x1cd000001cd3, + 0x1cd400001cfa, + 0x1d0000001d2c, + 0x1d2f00001d30, + 0x1d3b00001d3c, + 0x1d4e00001d4f, + 0x1d6b00001d78, + 0x1d7900001d9b, + 0x1dc000001dfa, + 0x1dfb00001e00, + 0x1e0100001e02, + 0x1e0300001e04, + 0x1e0500001e06, + 0x1e0700001e08, + 0x1e0900001e0a, + 0x1e0b00001e0c, + 0x1e0d00001e0e, + 0x1e0f00001e10, + 0x1e1100001e12, + 0x1e1300001e14, + 0x1e1500001e16, + 0x1e1700001e18, + 0x1e1900001e1a, + 0x1e1b00001e1c, + 0x1e1d00001e1e, + 0x1e1f00001e20, + 0x1e2100001e22, + 0x1e2300001e24, + 0x1e2500001e26, + 0x1e2700001e28, + 0x1e2900001e2a, + 0x1e2b00001e2c, + 0x1e2d00001e2e, + 0x1e2f00001e30, + 0x1e3100001e32, + 0x1e3300001e34, + 0x1e3500001e36, + 0x1e3700001e38, + 0x1e3900001e3a, + 0x1e3b00001e3c, + 0x1e3d00001e3e, + 0x1e3f00001e40, + 0x1e4100001e42, + 0x1e4300001e44, + 0x1e4500001e46, + 0x1e4700001e48, + 0x1e4900001e4a, + 0x1e4b00001e4c, + 0x1e4d00001e4e, + 0x1e4f00001e50, + 0x1e5100001e52, + 0x1e5300001e54, + 0x1e5500001e56, + 0x1e5700001e58, + 0x1e5900001e5a, + 0x1e5b00001e5c, + 0x1e5d00001e5e, + 0x1e5f00001e60, + 0x1e6100001e62, + 0x1e6300001e64, + 0x1e6500001e66, + 0x1e6700001e68, + 0x1e6900001e6a, + 0x1e6b00001e6c, + 0x1e6d00001e6e, + 0x1e6f00001e70, + 0x1e7100001e72, + 0x1e7300001e74, + 0x1e7500001e76, + 0x1e7700001e78, + 0x1e7900001e7a, + 0x1e7b00001e7c, + 0x1e7d00001e7e, + 0x1e7f00001e80, + 0x1e8100001e82, + 0x1e8300001e84, + 0x1e8500001e86, + 0x1e8700001e88, + 0x1e8900001e8a, + 0x1e8b00001e8c, + 0x1e8d00001e8e, + 0x1e8f00001e90, + 0x1e9100001e92, + 0x1e9300001e94, + 0x1e9500001e9a, + 0x1e9c00001e9e, + 0x1e9f00001ea0, + 0x1ea100001ea2, + 0x1ea300001ea4, + 0x1ea500001ea6, + 0x1ea700001ea8, + 0x1ea900001eaa, + 0x1eab00001eac, + 0x1ead00001eae, + 0x1eaf00001eb0, + 0x1eb100001eb2, + 0x1eb300001eb4, + 0x1eb500001eb6, + 0x1eb700001eb8, + 0x1eb900001eba, + 0x1ebb00001ebc, + 0x1ebd00001ebe, + 0x1ebf00001ec0, + 0x1ec100001ec2, + 0x1ec300001ec4, + 0x1ec500001ec6, + 0x1ec700001ec8, + 0x1ec900001eca, + 0x1ecb00001ecc, + 0x1ecd00001ece, + 0x1ecf00001ed0, + 0x1ed100001ed2, + 0x1ed300001ed4, + 0x1ed500001ed6, + 0x1ed700001ed8, + 0x1ed900001eda, + 0x1edb00001edc, + 0x1edd00001ede, + 0x1edf00001ee0, + 0x1ee100001ee2, + 0x1ee300001ee4, + 0x1ee500001ee6, + 0x1ee700001ee8, + 0x1ee900001eea, + 0x1eeb00001eec, + 0x1eed00001eee, + 0x1eef00001ef0, + 0x1ef100001ef2, + 0x1ef300001ef4, + 0x1ef500001ef6, + 0x1ef700001ef8, + 0x1ef900001efa, + 0x1efb00001efc, + 0x1efd00001efe, + 0x1eff00001f08, + 0x1f1000001f16, + 0x1f2000001f28, + 0x1f3000001f38, + 0x1f4000001f46, + 0x1f5000001f58, + 0x1f6000001f68, + 0x1f7000001f71, + 0x1f7200001f73, + 0x1f7400001f75, + 0x1f7600001f77, + 0x1f7800001f79, + 0x1f7a00001f7b, + 0x1f7c00001f7d, + 0x1fb000001fb2, + 0x1fb600001fb7, + 0x1fc600001fc7, + 0x1fd000001fd3, + 0x1fd600001fd8, + 0x1fe000001fe3, + 0x1fe400001fe8, + 0x1ff600001ff7, + 0x214e0000214f, + 0x218400002185, + 0x2c3000002c5f, + 0x2c6100002c62, + 0x2c6500002c67, + 0x2c6800002c69, + 0x2c6a00002c6b, + 0x2c6c00002c6d, + 0x2c7100002c72, + 0x2c7300002c75, + 0x2c7600002c7c, + 0x2c8100002c82, + 0x2c8300002c84, + 0x2c8500002c86, + 0x2c8700002c88, + 0x2c8900002c8a, + 0x2c8b00002c8c, + 0x2c8d00002c8e, + 0x2c8f00002c90, + 0x2c9100002c92, + 0x2c9300002c94, + 0x2c9500002c96, + 0x2c9700002c98, + 0x2c9900002c9a, + 0x2c9b00002c9c, + 0x2c9d00002c9e, + 0x2c9f00002ca0, + 0x2ca100002ca2, + 0x2ca300002ca4, + 0x2ca500002ca6, + 0x2ca700002ca8, + 0x2ca900002caa, + 0x2cab00002cac, + 0x2cad00002cae, + 0x2caf00002cb0, + 0x2cb100002cb2, + 0x2cb300002cb4, + 0x2cb500002cb6, + 0x2cb700002cb8, + 0x2cb900002cba, + 0x2cbb00002cbc, + 0x2cbd00002cbe, + 0x2cbf00002cc0, + 0x2cc100002cc2, + 0x2cc300002cc4, + 0x2cc500002cc6, + 0x2cc700002cc8, + 0x2cc900002cca, + 0x2ccb00002ccc, + 0x2ccd00002cce, + 0x2ccf00002cd0, + 0x2cd100002cd2, + 0x2cd300002cd4, + 0x2cd500002cd6, + 0x2cd700002cd8, + 0x2cd900002cda, + 0x2cdb00002cdc, + 0x2cdd00002cde, + 0x2cdf00002ce0, + 0x2ce100002ce2, + 0x2ce300002ce5, + 0x2cec00002ced, + 0x2cee00002cf2, + 0x2cf300002cf4, + 0x2d0000002d26, + 0x2d2700002d28, + 0x2d2d00002d2e, + 0x2d3000002d68, + 0x2d7f00002d97, + 0x2da000002da7, + 0x2da800002daf, + 0x2db000002db7, + 0x2db800002dbf, + 0x2dc000002dc7, + 0x2dc800002dcf, + 0x2dd000002dd7, + 0x2dd800002ddf, + 0x2de000002e00, + 0x2e2f00002e30, + 0x300500003008, + 0x302a0000302e, + 0x303c0000303d, + 0x304100003097, + 0x30990000309b, + 0x309d0000309f, + 0x30a1000030fb, + 0x30fc000030ff, + 0x31050000312f, + 0x31a0000031bb, + 0x31f000003200, + 0x340000004db6, + 0x4e0000009feb, + 0xa0000000a48d, + 0xa4d00000a4fe, + 0xa5000000a60d, + 0xa6100000a62c, + 0xa6410000a642, + 0xa6430000a644, + 0xa6450000a646, + 0xa6470000a648, + 0xa6490000a64a, + 0xa64b0000a64c, + 0xa64d0000a64e, + 0xa64f0000a650, + 0xa6510000a652, + 0xa6530000a654, + 0xa6550000a656, + 0xa6570000a658, + 0xa6590000a65a, + 0xa65b0000a65c, + 0xa65d0000a65e, + 0xa65f0000a660, + 0xa6610000a662, + 0xa6630000a664, + 0xa6650000a666, + 0xa6670000a668, + 0xa6690000a66a, + 0xa66b0000a66c, + 0xa66d0000a670, + 0xa6740000a67e, + 0xa67f0000a680, + 0xa6810000a682, + 0xa6830000a684, + 0xa6850000a686, + 0xa6870000a688, + 0xa6890000a68a, + 0xa68b0000a68c, + 0xa68d0000a68e, + 0xa68f0000a690, + 0xa6910000a692, + 0xa6930000a694, + 0xa6950000a696, + 0xa6970000a698, + 0xa6990000a69a, + 0xa69b0000a69c, + 0xa69e0000a6e6, + 0xa6f00000a6f2, + 0xa7170000a720, + 0xa7230000a724, + 0xa7250000a726, + 0xa7270000a728, + 0xa7290000a72a, + 0xa72b0000a72c, + 0xa72d0000a72e, + 0xa72f0000a732, + 0xa7330000a734, + 0xa7350000a736, + 0xa7370000a738, + 0xa7390000a73a, + 0xa73b0000a73c, + 0xa73d0000a73e, + 0xa73f0000a740, + 0xa7410000a742, + 0xa7430000a744, + 0xa7450000a746, + 0xa7470000a748, + 0xa7490000a74a, + 0xa74b0000a74c, + 0xa74d0000a74e, + 0xa74f0000a750, + 0xa7510000a752, + 0xa7530000a754, + 0xa7550000a756, + 0xa7570000a758, + 0xa7590000a75a, + 0xa75b0000a75c, + 0xa75d0000a75e, + 0xa75f0000a760, + 0xa7610000a762, + 0xa7630000a764, + 0xa7650000a766, + 0xa7670000a768, + 0xa7690000a76a, + 0xa76b0000a76c, + 0xa76d0000a76e, + 0xa76f0000a770, + 0xa7710000a779, + 0xa77a0000a77b, + 0xa77c0000a77d, + 0xa77f0000a780, + 0xa7810000a782, + 0xa7830000a784, + 0xa7850000a786, + 0xa7870000a789, + 0xa78c0000a78d, + 0xa78e0000a790, + 0xa7910000a792, + 0xa7930000a796, + 0xa7970000a798, + 0xa7990000a79a, + 0xa79b0000a79c, + 0xa79d0000a79e, + 0xa79f0000a7a0, + 0xa7a10000a7a2, + 0xa7a30000a7a4, + 0xa7a50000a7a6, + 0xa7a70000a7a8, + 0xa7a90000a7aa, + 0xa7b50000a7b6, + 0xa7b70000a7b8, + 0xa7f70000a7f8, + 0xa7fa0000a828, + 0xa8400000a874, + 0xa8800000a8c6, + 0xa8d00000a8da, + 0xa8e00000a8f8, + 0xa8fb0000a8fc, + 0xa8fd0000a8fe, + 0xa9000000a92e, + 0xa9300000a954, + 0xa9800000a9c1, + 0xa9cf0000a9da, + 0xa9e00000a9ff, + 0xaa000000aa37, + 0xaa400000aa4e, + 0xaa500000aa5a, + 0xaa600000aa77, + 0xaa7a0000aac3, + 0xaadb0000aade, + 0xaae00000aaf0, + 0xaaf20000aaf7, + 0xab010000ab07, + 0xab090000ab0f, + 0xab110000ab17, + 0xab200000ab27, + 0xab280000ab2f, + 0xab300000ab5b, + 0xab600000ab66, + 0xabc00000abeb, + 0xabec0000abee, + 0xabf00000abfa, + 0xac000000d7a4, + 0xfa0e0000fa10, + 0xfa110000fa12, + 0xfa130000fa15, + 0xfa1f0000fa20, + 0xfa210000fa22, + 0xfa230000fa25, + 0xfa270000fa2a, + 0xfb1e0000fb1f, + 0xfe200000fe30, + 0xfe730000fe74, + 0x100000001000c, + 0x1000d00010027, + 0x100280001003b, + 0x1003c0001003e, + 0x1003f0001004e, + 0x100500001005e, + 0x10080000100fb, + 0x101fd000101fe, + 0x102800001029d, + 0x102a0000102d1, + 0x102e0000102e1, + 0x1030000010320, + 0x1032d00010341, + 0x103420001034a, + 0x103500001037b, + 0x103800001039e, + 0x103a0000103c4, + 0x103c8000103d0, + 0x104280001049e, + 0x104a0000104aa, + 0x104d8000104fc, + 0x1050000010528, + 0x1053000010564, + 0x1060000010737, + 0x1074000010756, + 0x1076000010768, + 0x1080000010806, + 0x1080800010809, + 0x1080a00010836, + 0x1083700010839, + 0x1083c0001083d, + 0x1083f00010856, + 0x1086000010877, + 0x108800001089f, + 0x108e0000108f3, + 0x108f4000108f6, + 0x1090000010916, + 0x109200001093a, + 0x10980000109b8, + 0x109be000109c0, + 0x10a0000010a04, + 0x10a0500010a07, + 0x10a0c00010a14, + 0x10a1500010a18, + 0x10a1900010a34, + 0x10a3800010a3b, + 0x10a3f00010a40, + 0x10a6000010a7d, + 0x10a8000010a9d, + 0x10ac000010ac8, + 0x10ac900010ae7, + 0x10b0000010b36, + 0x10b4000010b56, + 0x10b6000010b73, + 0x10b8000010b92, + 0x10c0000010c49, + 0x10cc000010cf3, + 0x1100000011047, + 0x1106600011070, + 0x1107f000110bb, + 0x110d0000110e9, + 0x110f0000110fa, + 0x1110000011135, + 0x1113600011140, + 0x1115000011174, + 0x1117600011177, + 0x11180000111c5, + 0x111ca000111cd, + 0x111d0000111db, + 0x111dc000111dd, + 0x1120000011212, + 0x1121300011238, + 0x1123e0001123f, + 0x1128000011287, + 0x1128800011289, + 0x1128a0001128e, + 0x1128f0001129e, + 0x1129f000112a9, + 0x112b0000112eb, + 0x112f0000112fa, + 0x1130000011304, + 0x113050001130d, + 0x1130f00011311, + 0x1131300011329, + 0x1132a00011331, + 0x1133200011334, + 0x113350001133a, + 0x1133c00011345, + 0x1134700011349, + 0x1134b0001134e, + 0x1135000011351, + 0x1135700011358, + 0x1135d00011364, + 0x113660001136d, + 0x1137000011375, + 0x114000001144b, + 0x114500001145a, + 0x11480000114c6, + 0x114c7000114c8, + 0x114d0000114da, + 0x11580000115b6, + 0x115b8000115c1, + 0x115d8000115de, + 0x1160000011641, + 0x1164400011645, + 0x116500001165a, + 0x11680000116b8, + 0x116c0000116ca, + 0x117000001171a, + 0x1171d0001172c, + 0x117300001173a, + 0x118c0000118ea, + 0x118ff00011900, + 0x11a0000011a3f, + 0x11a4700011a48, + 0x11a5000011a84, + 0x11a8600011a9a, + 0x11ac000011af9, + 0x11c0000011c09, + 0x11c0a00011c37, + 0x11c3800011c41, + 0x11c5000011c5a, + 0x11c7200011c90, + 0x11c9200011ca8, + 0x11ca900011cb7, + 0x11d0000011d07, + 0x11d0800011d0a, + 0x11d0b00011d37, + 0x11d3a00011d3b, + 0x11d3c00011d3e, + 0x11d3f00011d48, + 0x11d5000011d5a, + 0x120000001239a, + 0x1248000012544, + 0x130000001342f, + 0x1440000014647, + 0x1680000016a39, + 0x16a4000016a5f, + 0x16a6000016a6a, + 0x16ad000016aee, + 0x16af000016af5, + 0x16b0000016b37, + 0x16b4000016b44, + 0x16b5000016b5a, + 0x16b6300016b78, + 0x16b7d00016b90, + 0x16f0000016f45, + 0x16f5000016f7f, + 0x16f8f00016fa0, + 0x16fe000016fe2, + 0x17000000187ed, + 0x1880000018af3, + 0x1b0000001b11f, + 0x1b1700001b2fc, + 0x1bc000001bc6b, + 0x1bc700001bc7d, + 0x1bc800001bc89, + 0x1bc900001bc9a, + 0x1bc9d0001bc9f, + 0x1da000001da37, + 0x1da3b0001da6d, + 0x1da750001da76, + 0x1da840001da85, + 0x1da9b0001daa0, + 0x1daa10001dab0, + 0x1e0000001e007, + 0x1e0080001e019, + 0x1e01b0001e022, + 0x1e0230001e025, + 0x1e0260001e02b, + 0x1e8000001e8c5, + 0x1e8d00001e8d7, + 0x1e9220001e94b, + 0x1e9500001e95a, + 0x200000002a6d7, + 0x2a7000002b735, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + ), + 'CONTEXTJ': ( + 0x200c0000200e, + ), + 'CONTEXTO': ( + 0xb7000000b8, + 0x37500000376, + 0x5f3000005f5, + 0x6600000066a, + 0x6f0000006fa, + 0x30fb000030fc, + ), +} diff --git a/env/lib/python3.6/site-packages/pip/_vendor/idna/intranges.py b/env/lib/python3.6/site-packages/pip/_vendor/idna/intranges.py new file mode 100644 index 0000000..fa8a735 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/idna/intranges.py @@ -0,0 +1,53 @@ +""" +Given a list of integers, made up of (hopefully) a small number of long runs +of consecutive integers, compute a representation of the form +((start1, end1), (start2, end2) ...). Then answer the question "was x present +in the original list?" in time O(log(# runs)). +""" + +import bisect + +def intranges_from_list(list_): + """Represent a list of integers as a sequence of ranges: + ((start_0, end_0), (start_1, end_1), ...), such that the original + integers are exactly those x such that start_i <= x < end_i for some i. + + Ranges are encoded as single integers (start << 32 | end), not as tuples. + """ + + sorted_list = sorted(list_) + ranges = [] + last_write = -1 + for i in range(len(sorted_list)): + if i+1 < len(sorted_list): + if sorted_list[i] == sorted_list[i+1]-1: + continue + current_range = sorted_list[last_write+1:i+1] + ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) + last_write = i + + return tuple(ranges) + +def _encode_range(start, end): + return (start << 32) | end + +def _decode_range(r): + return (r >> 32), (r & ((1 << 32) - 1)) + + +def intranges_contain(int_, ranges): + """Determine if `int_` falls into one of the ranges in `ranges`.""" + tuple_ = _encode_range(int_, 0) + pos = bisect.bisect_left(ranges, tuple_) + # we could be immediately ahead of a tuple (start, end) + # with start < int_ <= end + if pos > 0: + left, right = _decode_range(ranges[pos-1]) + if left <= int_ < right: + return True + # or we could be immediately behind a tuple (int_, end) + if pos < len(ranges): + left, _ = _decode_range(ranges[pos]) + if left == int_: + return True + return False diff --git a/env/lib/python3.6/site-packages/pip/_vendor/idna/package_data.py b/env/lib/python3.6/site-packages/pip/_vendor/idna/package_data.py new file mode 100644 index 0000000..39c192b --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/idna/package_data.py @@ -0,0 +1,2 @@ +__version__ = '2.7' + diff --git a/env/lib/python3.6/site-packages/pip/_vendor/idna/uts46data.py b/env/lib/python3.6/site-packages/pip/_vendor/idna/uts46data.py new file mode 100644 index 0000000..79731cb --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/idna/uts46data.py @@ -0,0 +1,8179 @@ +# This file is automatically generated by tools/idna-data +# vim: set fileencoding=utf-8 : + +"""IDNA Mapping Table from UTS46.""" + + +__version__ = "10.0.0" +def _seg_0(): + return [ + (0x0, '3'), + (0x1, '3'), + (0x2, '3'), + (0x3, '3'), + (0x4, '3'), + (0x5, '3'), + (0x6, '3'), + (0x7, '3'), + (0x8, '3'), + (0x9, '3'), + (0xA, '3'), + (0xB, '3'), + (0xC, '3'), + (0xD, '3'), + (0xE, '3'), + (0xF, '3'), + (0x10, '3'), + (0x11, '3'), + (0x12, '3'), + (0x13, '3'), + (0x14, '3'), + (0x15, '3'), + (0x16, '3'), + (0x17, '3'), + (0x18, '3'), + (0x19, '3'), + (0x1A, '3'), + (0x1B, '3'), + (0x1C, '3'), + (0x1D, '3'), + (0x1E, '3'), + (0x1F, '3'), + (0x20, '3'), + (0x21, '3'), + (0x22, '3'), + (0x23, '3'), + (0x24, '3'), + (0x25, '3'), + (0x26, '3'), + (0x27, '3'), + (0x28, '3'), + (0x29, '3'), + (0x2A, '3'), + (0x2B, '3'), + (0x2C, '3'), + (0x2D, 'V'), + (0x2E, 'V'), + (0x2F, '3'), + (0x30, 'V'), + (0x31, 'V'), + (0x32, 'V'), + (0x33, 'V'), + (0x34, 'V'), + (0x35, 'V'), + (0x36, 'V'), + (0x37, 'V'), + (0x38, 'V'), + (0x39, 'V'), + (0x3A, '3'), + (0x3B, '3'), + (0x3C, '3'), + (0x3D, '3'), + (0x3E, '3'), + (0x3F, '3'), + (0x40, '3'), + (0x41, 'M', u'a'), + (0x42, 'M', u'b'), + (0x43, 'M', u'c'), + (0x44, 'M', u'd'), + (0x45, 'M', u'e'), + (0x46, 'M', u'f'), + (0x47, 'M', u'g'), + (0x48, 'M', u'h'), + (0x49, 'M', u'i'), + (0x4A, 'M', u'j'), + (0x4B, 'M', u'k'), + (0x4C, 'M', u'l'), + (0x4D, 'M', u'm'), + (0x4E, 'M', u'n'), + (0x4F, 'M', u'o'), + (0x50, 'M', u'p'), + (0x51, 'M', u'q'), + (0x52, 'M', u'r'), + (0x53, 'M', u's'), + (0x54, 'M', u't'), + (0x55, 'M', u'u'), + (0x56, 'M', u'v'), + (0x57, 'M', u'w'), + (0x58, 'M', u'x'), + (0x59, 'M', u'y'), + (0x5A, 'M', u'z'), + (0x5B, '3'), + (0x5C, '3'), + (0x5D, '3'), + (0x5E, '3'), + (0x5F, '3'), + (0x60, '3'), + (0x61, 'V'), + (0x62, 'V'), + (0x63, 'V'), + ] + +def _seg_1(): + return [ + (0x64, 'V'), + (0x65, 'V'), + (0x66, 'V'), + (0x67, 'V'), + (0x68, 'V'), + (0x69, 'V'), + (0x6A, 'V'), + (0x6B, 'V'), + (0x6C, 'V'), + (0x6D, 'V'), + (0x6E, 'V'), + (0x6F, 'V'), + (0x70, 'V'), + (0x71, 'V'), + (0x72, 'V'), + (0x73, 'V'), + (0x74, 'V'), + (0x75, 'V'), + (0x76, 'V'), + (0x77, 'V'), + (0x78, 'V'), + (0x79, 'V'), + (0x7A, 'V'), + (0x7B, '3'), + (0x7C, '3'), + (0x7D, '3'), + (0x7E, '3'), + (0x7F, '3'), + (0x80, 'X'), + (0x81, 'X'), + (0x82, 'X'), + (0x83, 'X'), + (0x84, 'X'), + (0x85, 'X'), + (0x86, 'X'), + (0x87, 'X'), + (0x88, 'X'), + (0x89, 'X'), + (0x8A, 'X'), + (0x8B, 'X'), + (0x8C, 'X'), + (0x8D, 'X'), + (0x8E, 'X'), + (0x8F, 'X'), + (0x90, 'X'), + (0x91, 'X'), + (0x92, 'X'), + (0x93, 'X'), + (0x94, 'X'), + (0x95, 'X'), + (0x96, 'X'), + (0x97, 'X'), + (0x98, 'X'), + (0x99, 'X'), + (0x9A, 'X'), + (0x9B, 'X'), + (0x9C, 'X'), + (0x9D, 'X'), + (0x9E, 'X'), + (0x9F, 'X'), + (0xA0, '3', u' '), + (0xA1, 'V'), + (0xA2, 'V'), + (0xA3, 'V'), + (0xA4, 'V'), + (0xA5, 'V'), + (0xA6, 'V'), + (0xA7, 'V'), + (0xA8, '3', u' ̈'), + (0xA9, 'V'), + (0xAA, 'M', u'a'), + (0xAB, 'V'), + (0xAC, 'V'), + (0xAD, 'I'), + (0xAE, 'V'), + (0xAF, '3', u' ̄'), + (0xB0, 'V'), + (0xB1, 'V'), + (0xB2, 'M', u'2'), + (0xB3, 'M', u'3'), + (0xB4, '3', u' ́'), + (0xB5, 'M', u'μ'), + (0xB6, 'V'), + (0xB7, 'V'), + (0xB8, '3', u' ̧'), + (0xB9, 'M', u'1'), + (0xBA, 'M', u'o'), + (0xBB, 'V'), + (0xBC, 'M', u'1⁄4'), + (0xBD, 'M', u'1⁄2'), + (0xBE, 'M', u'3⁄4'), + (0xBF, 'V'), + (0xC0, 'M', u'à'), + (0xC1, 'M', u'á'), + (0xC2, 'M', u'â'), + (0xC3, 'M', u'ã'), + (0xC4, 'M', u'ä'), + (0xC5, 'M', u'å'), + (0xC6, 'M', u'æ'), + (0xC7, 'M', u'ç'), + ] + +def _seg_2(): + return [ + (0xC8, 'M', u'è'), + (0xC9, 'M', u'é'), + (0xCA, 'M', u'ê'), + (0xCB, 'M', u'ë'), + (0xCC, 'M', u'ì'), + (0xCD, 'M', u'í'), + (0xCE, 'M', u'î'), + (0xCF, 'M', u'ï'), + (0xD0, 'M', u'ð'), + (0xD1, 'M', u'ñ'), + (0xD2, 'M', u'ò'), + (0xD3, 'M', u'ó'), + (0xD4, 'M', u'ô'), + (0xD5, 'M', u'õ'), + (0xD6, 'M', u'ö'), + (0xD7, 'V'), + (0xD8, 'M', u'ø'), + (0xD9, 'M', u'ù'), + (0xDA, 'M', u'ú'), + (0xDB, 'M', u'û'), + (0xDC, 'M', u'ü'), + (0xDD, 'M', u'ý'), + (0xDE, 'M', u'þ'), + (0xDF, 'D', u'ss'), + (0xE0, 'V'), + (0xE1, 'V'), + (0xE2, 'V'), + (0xE3, 'V'), + (0xE4, 'V'), + (0xE5, 'V'), + (0xE6, 'V'), + (0xE7, 'V'), + (0xE8, 'V'), + (0xE9, 'V'), + (0xEA, 'V'), + (0xEB, 'V'), + (0xEC, 'V'), + (0xED, 'V'), + (0xEE, 'V'), + (0xEF, 'V'), + (0xF0, 'V'), + (0xF1, 'V'), + (0xF2, 'V'), + (0xF3, 'V'), + (0xF4, 'V'), + (0xF5, 'V'), + (0xF6, 'V'), + (0xF7, 'V'), + (0xF8, 'V'), + (0xF9, 'V'), + (0xFA, 'V'), + (0xFB, 'V'), + (0xFC, 'V'), + (0xFD, 'V'), + (0xFE, 'V'), + (0xFF, 'V'), + (0x100, 'M', u'ā'), + (0x101, 'V'), + (0x102, 'M', u'ă'), + (0x103, 'V'), + (0x104, 'M', u'ą'), + (0x105, 'V'), + (0x106, 'M', u'ć'), + (0x107, 'V'), + (0x108, 'M', u'ĉ'), + (0x109, 'V'), + (0x10A, 'M', u'ċ'), + (0x10B, 'V'), + (0x10C, 'M', u'č'), + (0x10D, 'V'), + (0x10E, 'M', u'ď'), + (0x10F, 'V'), + (0x110, 'M', u'đ'), + (0x111, 'V'), + (0x112, 'M', u'ē'), + (0x113, 'V'), + (0x114, 'M', u'ĕ'), + (0x115, 'V'), + (0x116, 'M', u'ė'), + (0x117, 'V'), + (0x118, 'M', u'ę'), + (0x119, 'V'), + (0x11A, 'M', u'ě'), + (0x11B, 'V'), + (0x11C, 'M', u'ĝ'), + (0x11D, 'V'), + (0x11E, 'M', u'ğ'), + (0x11F, 'V'), + (0x120, 'M', u'ġ'), + (0x121, 'V'), + (0x122, 'M', u'ģ'), + (0x123, 'V'), + (0x124, 'M', u'ĥ'), + (0x125, 'V'), + (0x126, 'M', u'ħ'), + (0x127, 'V'), + (0x128, 'M', u'ĩ'), + (0x129, 'V'), + (0x12A, 'M', u'ī'), + (0x12B, 'V'), + ] + +def _seg_3(): + return [ + (0x12C, 'M', u'ĭ'), + (0x12D, 'V'), + (0x12E, 'M', u'į'), + (0x12F, 'V'), + (0x130, 'M', u'i̇'), + (0x131, 'V'), + (0x132, 'M', u'ij'), + (0x134, 'M', u'ĵ'), + (0x135, 'V'), + (0x136, 'M', u'ķ'), + (0x137, 'V'), + (0x139, 'M', u'ĺ'), + (0x13A, 'V'), + (0x13B, 'M', u'ļ'), + (0x13C, 'V'), + (0x13D, 'M', u'ľ'), + (0x13E, 'V'), + (0x13F, 'M', u'l·'), + (0x141, 'M', u'ł'), + (0x142, 'V'), + (0x143, 'M', u'ń'), + (0x144, 'V'), + (0x145, 'M', u'ņ'), + (0x146, 'V'), + (0x147, 'M', u'ň'), + (0x148, 'V'), + (0x149, 'M', u'ʼn'), + (0x14A, 'M', u'ŋ'), + (0x14B, 'V'), + (0x14C, 'M', u'ō'), + (0x14D, 'V'), + (0x14E, 'M', u'ŏ'), + (0x14F, 'V'), + (0x150, 'M', u'ő'), + (0x151, 'V'), + (0x152, 'M', u'œ'), + (0x153, 'V'), + (0x154, 'M', u'ŕ'), + (0x155, 'V'), + (0x156, 'M', u'ŗ'), + (0x157, 'V'), + (0x158, 'M', u'ř'), + (0x159, 'V'), + (0x15A, 'M', u'ś'), + (0x15B, 'V'), + (0x15C, 'M', u'ŝ'), + (0x15D, 'V'), + (0x15E, 'M', u'ş'), + (0x15F, 'V'), + (0x160, 'M', u'š'), + (0x161, 'V'), + (0x162, 'M', u'ţ'), + (0x163, 'V'), + (0x164, 'M', u'ť'), + (0x165, 'V'), + (0x166, 'M', u'ŧ'), + (0x167, 'V'), + (0x168, 'M', u'ũ'), + (0x169, 'V'), + (0x16A, 'M', u'ū'), + (0x16B, 'V'), + (0x16C, 'M', u'ŭ'), + (0x16D, 'V'), + (0x16E, 'M', u'ů'), + (0x16F, 'V'), + (0x170, 'M', u'ű'), + (0x171, 'V'), + (0x172, 'M', u'ų'), + (0x173, 'V'), + (0x174, 'M', u'ŵ'), + (0x175, 'V'), + (0x176, 'M', u'ŷ'), + (0x177, 'V'), + (0x178, 'M', u'ÿ'), + (0x179, 'M', u'ź'), + (0x17A, 'V'), + (0x17B, 'M', u'ż'), + (0x17C, 'V'), + (0x17D, 'M', u'ž'), + (0x17E, 'V'), + (0x17F, 'M', u's'), + (0x180, 'V'), + (0x181, 'M', u'ɓ'), + (0x182, 'M', u'ƃ'), + (0x183, 'V'), + (0x184, 'M', u'ƅ'), + (0x185, 'V'), + (0x186, 'M', u'ɔ'), + (0x187, 'M', u'ƈ'), + (0x188, 'V'), + (0x189, 'M', u'ɖ'), + (0x18A, 'M', u'ɗ'), + (0x18B, 'M', u'ƌ'), + (0x18C, 'V'), + (0x18E, 'M', u'ǝ'), + (0x18F, 'M', u'ə'), + (0x190, 'M', u'ɛ'), + (0x191, 'M', u'ƒ'), + (0x192, 'V'), + (0x193, 'M', u'ɠ'), + ] + +def _seg_4(): + return [ + (0x194, 'M', u'ɣ'), + (0x195, 'V'), + (0x196, 'M', u'ɩ'), + (0x197, 'M', u'ɨ'), + (0x198, 'M', u'ƙ'), + (0x199, 'V'), + (0x19C, 'M', u'ɯ'), + (0x19D, 'M', u'ɲ'), + (0x19E, 'V'), + (0x19F, 'M', u'ɵ'), + (0x1A0, 'M', u'ơ'), + (0x1A1, 'V'), + (0x1A2, 'M', u'ƣ'), + (0x1A3, 'V'), + (0x1A4, 'M', u'ƥ'), + (0x1A5, 'V'), + (0x1A6, 'M', u'ʀ'), + (0x1A7, 'M', u'ƨ'), + (0x1A8, 'V'), + (0x1A9, 'M', u'ʃ'), + (0x1AA, 'V'), + (0x1AC, 'M', u'ƭ'), + (0x1AD, 'V'), + (0x1AE, 'M', u'ʈ'), + (0x1AF, 'M', u'ư'), + (0x1B0, 'V'), + (0x1B1, 'M', u'ʊ'), + (0x1B2, 'M', u'ʋ'), + (0x1B3, 'M', u'ƴ'), + (0x1B4, 'V'), + (0x1B5, 'M', u'ƶ'), + (0x1B6, 'V'), + (0x1B7, 'M', u'ʒ'), + (0x1B8, 'M', u'ƹ'), + (0x1B9, 'V'), + (0x1BC, 'M', u'ƽ'), + (0x1BD, 'V'), + (0x1C4, 'M', u'dž'), + (0x1C7, 'M', u'lj'), + (0x1CA, 'M', u'nj'), + (0x1CD, 'M', u'ǎ'), + (0x1CE, 'V'), + (0x1CF, 'M', u'ǐ'), + (0x1D0, 'V'), + (0x1D1, 'M', u'ǒ'), + (0x1D2, 'V'), + (0x1D3, 'M', u'ǔ'), + (0x1D4, 'V'), + (0x1D5, 'M', u'ǖ'), + (0x1D6, 'V'), + (0x1D7, 'M', u'ǘ'), + (0x1D8, 'V'), + (0x1D9, 'M', u'ǚ'), + (0x1DA, 'V'), + (0x1DB, 'M', u'ǜ'), + (0x1DC, 'V'), + (0x1DE, 'M', u'ǟ'), + (0x1DF, 'V'), + (0x1E0, 'M', u'ǡ'), + (0x1E1, 'V'), + (0x1E2, 'M', u'ǣ'), + (0x1E3, 'V'), + (0x1E4, 'M', u'ǥ'), + (0x1E5, 'V'), + (0x1E6, 'M', u'ǧ'), + (0x1E7, 'V'), + (0x1E8, 'M', u'ǩ'), + (0x1E9, 'V'), + (0x1EA, 'M', u'ǫ'), + (0x1EB, 'V'), + (0x1EC, 'M', u'ǭ'), + (0x1ED, 'V'), + (0x1EE, 'M', u'ǯ'), + (0x1EF, 'V'), + (0x1F1, 'M', u'dz'), + (0x1F4, 'M', u'ǵ'), + (0x1F5, 'V'), + (0x1F6, 'M', u'ƕ'), + (0x1F7, 'M', u'ƿ'), + (0x1F8, 'M', u'ǹ'), + (0x1F9, 'V'), + (0x1FA, 'M', u'ǻ'), + (0x1FB, 'V'), + (0x1FC, 'M', u'ǽ'), + (0x1FD, 'V'), + (0x1FE, 'M', u'ǿ'), + (0x1FF, 'V'), + (0x200, 'M', u'ȁ'), + (0x201, 'V'), + (0x202, 'M', u'ȃ'), + (0x203, 'V'), + (0x204, 'M', u'ȅ'), + (0x205, 'V'), + (0x206, 'M', u'ȇ'), + (0x207, 'V'), + (0x208, 'M', u'ȉ'), + (0x209, 'V'), + (0x20A, 'M', u'ȋ'), + (0x20B, 'V'), + (0x20C, 'M', u'ȍ'), + ] + +def _seg_5(): + return [ + (0x20D, 'V'), + (0x20E, 'M', u'ȏ'), + (0x20F, 'V'), + (0x210, 'M', u'ȑ'), + (0x211, 'V'), + (0x212, 'M', u'ȓ'), + (0x213, 'V'), + (0x214, 'M', u'ȕ'), + (0x215, 'V'), + (0x216, 'M', u'ȗ'), + (0x217, 'V'), + (0x218, 'M', u'ș'), + (0x219, 'V'), + (0x21A, 'M', u'ț'), + (0x21B, 'V'), + (0x21C, 'M', u'ȝ'), + (0x21D, 'V'), + (0x21E, 'M', u'ȟ'), + (0x21F, 'V'), + (0x220, 'M', u'ƞ'), + (0x221, 'V'), + (0x222, 'M', u'ȣ'), + (0x223, 'V'), + (0x224, 'M', u'ȥ'), + (0x225, 'V'), + (0x226, 'M', u'ȧ'), + (0x227, 'V'), + (0x228, 'M', u'ȩ'), + (0x229, 'V'), + (0x22A, 'M', u'ȫ'), + (0x22B, 'V'), + (0x22C, 'M', u'ȭ'), + (0x22D, 'V'), + (0x22E, 'M', u'ȯ'), + (0x22F, 'V'), + (0x230, 'M', u'ȱ'), + (0x231, 'V'), + (0x232, 'M', u'ȳ'), + (0x233, 'V'), + (0x23A, 'M', u'ⱥ'), + (0x23B, 'M', u'ȼ'), + (0x23C, 'V'), + (0x23D, 'M', u'ƚ'), + (0x23E, 'M', u'ⱦ'), + (0x23F, 'V'), + (0x241, 'M', u'ɂ'), + (0x242, 'V'), + (0x243, 'M', u'ƀ'), + (0x244, 'M', u'ʉ'), + (0x245, 'M', u'ʌ'), + (0x246, 'M', u'ɇ'), + (0x247, 'V'), + (0x248, 'M', u'ɉ'), + (0x249, 'V'), + (0x24A, 'M', u'ɋ'), + (0x24B, 'V'), + (0x24C, 'M', u'ɍ'), + (0x24D, 'V'), + (0x24E, 'M', u'ɏ'), + (0x24F, 'V'), + (0x2B0, 'M', u'h'), + (0x2B1, 'M', u'ɦ'), + (0x2B2, 'M', u'j'), + (0x2B3, 'M', u'r'), + (0x2B4, 'M', u'ɹ'), + (0x2B5, 'M', u'ɻ'), + (0x2B6, 'M', u'ʁ'), + (0x2B7, 'M', u'w'), + (0x2B8, 'M', u'y'), + (0x2B9, 'V'), + (0x2D8, '3', u' ̆'), + (0x2D9, '3', u' ̇'), + (0x2DA, '3', u' ̊'), + (0x2DB, '3', u' ̨'), + (0x2DC, '3', u' ̃'), + (0x2DD, '3', u' ̋'), + (0x2DE, 'V'), + (0x2E0, 'M', u'ɣ'), + (0x2E1, 'M', u'l'), + (0x2E2, 'M', u's'), + (0x2E3, 'M', u'x'), + (0x2E4, 'M', u'ʕ'), + (0x2E5, 'V'), + (0x340, 'M', u'̀'), + (0x341, 'M', u'́'), + (0x342, 'V'), + (0x343, 'M', u'̓'), + (0x344, 'M', u'̈́'), + (0x345, 'M', u'ι'), + (0x346, 'V'), + (0x34F, 'I'), + (0x350, 'V'), + (0x370, 'M', u'ͱ'), + (0x371, 'V'), + (0x372, 'M', u'ͳ'), + (0x373, 'V'), + (0x374, 'M', u'ʹ'), + (0x375, 'V'), + (0x376, 'M', u'ͷ'), + (0x377, 'V'), + ] + +def _seg_6(): + return [ + (0x378, 'X'), + (0x37A, '3', u' ι'), + (0x37B, 'V'), + (0x37E, '3', u';'), + (0x37F, 'M', u'ϳ'), + (0x380, 'X'), + (0x384, '3', u' ́'), + (0x385, '3', u' ̈́'), + (0x386, 'M', u'ά'), + (0x387, 'M', u'·'), + (0x388, 'M', u'έ'), + (0x389, 'M', u'ή'), + (0x38A, 'M', u'ί'), + (0x38B, 'X'), + (0x38C, 'M', u'ό'), + (0x38D, 'X'), + (0x38E, 'M', u'ύ'), + (0x38F, 'M', u'ώ'), + (0x390, 'V'), + (0x391, 'M', u'α'), + (0x392, 'M', u'β'), + (0x393, 'M', u'γ'), + (0x394, 'M', u'δ'), + (0x395, 'M', u'ε'), + (0x396, 'M', u'ζ'), + (0x397, 'M', u'η'), + (0x398, 'M', u'θ'), + (0x399, 'M', u'ι'), + (0x39A, 'M', u'κ'), + (0x39B, 'M', u'λ'), + (0x39C, 'M', u'μ'), + (0x39D, 'M', u'ν'), + (0x39E, 'M', u'ξ'), + (0x39F, 'M', u'ο'), + (0x3A0, 'M', u'π'), + (0x3A1, 'M', u'ρ'), + (0x3A2, 'X'), + (0x3A3, 'M', u'σ'), + (0x3A4, 'M', u'τ'), + (0x3A5, 'M', u'υ'), + (0x3A6, 'M', u'φ'), + (0x3A7, 'M', u'χ'), + (0x3A8, 'M', u'ψ'), + (0x3A9, 'M', u'ω'), + (0x3AA, 'M', u'ϊ'), + (0x3AB, 'M', u'ϋ'), + (0x3AC, 'V'), + (0x3C2, 'D', u'σ'), + (0x3C3, 'V'), + (0x3CF, 'M', u'ϗ'), + (0x3D0, 'M', u'β'), + (0x3D1, 'M', u'θ'), + (0x3D2, 'M', u'υ'), + (0x3D3, 'M', u'ύ'), + (0x3D4, 'M', u'ϋ'), + (0x3D5, 'M', u'φ'), + (0x3D6, 'M', u'π'), + (0x3D7, 'V'), + (0x3D8, 'M', u'ϙ'), + (0x3D9, 'V'), + (0x3DA, 'M', u'ϛ'), + (0x3DB, 'V'), + (0x3DC, 'M', u'ϝ'), + (0x3DD, 'V'), + (0x3DE, 'M', u'ϟ'), + (0x3DF, 'V'), + (0x3E0, 'M', u'ϡ'), + (0x3E1, 'V'), + (0x3E2, 'M', u'ϣ'), + (0x3E3, 'V'), + (0x3E4, 'M', u'ϥ'), + (0x3E5, 'V'), + (0x3E6, 'M', u'ϧ'), + (0x3E7, 'V'), + (0x3E8, 'M', u'ϩ'), + (0x3E9, 'V'), + (0x3EA, 'M', u'ϫ'), + (0x3EB, 'V'), + (0x3EC, 'M', u'ϭ'), + (0x3ED, 'V'), + (0x3EE, 'M', u'ϯ'), + (0x3EF, 'V'), + (0x3F0, 'M', u'κ'), + (0x3F1, 'M', u'ρ'), + (0x3F2, 'M', u'σ'), + (0x3F3, 'V'), + (0x3F4, 'M', u'θ'), + (0x3F5, 'M', u'ε'), + (0x3F6, 'V'), + (0x3F7, 'M', u'ϸ'), + (0x3F8, 'V'), + (0x3F9, 'M', u'σ'), + (0x3FA, 'M', u'ϻ'), + (0x3FB, 'V'), + (0x3FD, 'M', u'ͻ'), + (0x3FE, 'M', u'ͼ'), + (0x3FF, 'M', u'ͽ'), + (0x400, 'M', u'ѐ'), + (0x401, 'M', u'ё'), + (0x402, 'M', u'ђ'), + ] + +def _seg_7(): + return [ + (0x403, 'M', u'ѓ'), + (0x404, 'M', u'є'), + (0x405, 'M', u'ѕ'), + (0x406, 'M', u'і'), + (0x407, 'M', u'ї'), + (0x408, 'M', u'ј'), + (0x409, 'M', u'љ'), + (0x40A, 'M', u'њ'), + (0x40B, 'M', u'ћ'), + (0x40C, 'M', u'ќ'), + (0x40D, 'M', u'ѝ'), + (0x40E, 'M', u'ў'), + (0x40F, 'M', u'џ'), + (0x410, 'M', u'а'), + (0x411, 'M', u'б'), + (0x412, 'M', u'в'), + (0x413, 'M', u'г'), + (0x414, 'M', u'д'), + (0x415, 'M', u'е'), + (0x416, 'M', u'ж'), + (0x417, 'M', u'з'), + (0x418, 'M', u'и'), + (0x419, 'M', u'й'), + (0x41A, 'M', u'к'), + (0x41B, 'M', u'л'), + (0x41C, 'M', u'м'), + (0x41D, 'M', u'н'), + (0x41E, 'M', u'о'), + (0x41F, 'M', u'п'), + (0x420, 'M', u'р'), + (0x421, 'M', u'с'), + (0x422, 'M', u'т'), + (0x423, 'M', u'у'), + (0x424, 'M', u'ф'), + (0x425, 'M', u'х'), + (0x426, 'M', u'ц'), + (0x427, 'M', u'ч'), + (0x428, 'M', u'ш'), + (0x429, 'M', u'щ'), + (0x42A, 'M', u'ъ'), + (0x42B, 'M', u'ы'), + (0x42C, 'M', u'ь'), + (0x42D, 'M', u'э'), + (0x42E, 'M', u'ю'), + (0x42F, 'M', u'я'), + (0x430, 'V'), + (0x460, 'M', u'ѡ'), + (0x461, 'V'), + (0x462, 'M', u'ѣ'), + (0x463, 'V'), + (0x464, 'M', u'ѥ'), + (0x465, 'V'), + (0x466, 'M', u'ѧ'), + (0x467, 'V'), + (0x468, 'M', u'ѩ'), + (0x469, 'V'), + (0x46A, 'M', u'ѫ'), + (0x46B, 'V'), + (0x46C, 'M', u'ѭ'), + (0x46D, 'V'), + (0x46E, 'M', u'ѯ'), + (0x46F, 'V'), + (0x470, 'M', u'ѱ'), + (0x471, 'V'), + (0x472, 'M', u'ѳ'), + (0x473, 'V'), + (0x474, 'M', u'ѵ'), + (0x475, 'V'), + (0x476, 'M', u'ѷ'), + (0x477, 'V'), + (0x478, 'M', u'ѹ'), + (0x479, 'V'), + (0x47A, 'M', u'ѻ'), + (0x47B, 'V'), + (0x47C, 'M', u'ѽ'), + (0x47D, 'V'), + (0x47E, 'M', u'ѿ'), + (0x47F, 'V'), + (0x480, 'M', u'ҁ'), + (0x481, 'V'), + (0x48A, 'M', u'ҋ'), + (0x48B, 'V'), + (0x48C, 'M', u'ҍ'), + (0x48D, 'V'), + (0x48E, 'M', u'ҏ'), + (0x48F, 'V'), + (0x490, 'M', u'ґ'), + (0x491, 'V'), + (0x492, 'M', u'ғ'), + (0x493, 'V'), + (0x494, 'M', u'ҕ'), + (0x495, 'V'), + (0x496, 'M', u'җ'), + (0x497, 'V'), + (0x498, 'M', u'ҙ'), + (0x499, 'V'), + (0x49A, 'M', u'қ'), + (0x49B, 'V'), + (0x49C, 'M', u'ҝ'), + (0x49D, 'V'), + ] + +def _seg_8(): + return [ + (0x49E, 'M', u'ҟ'), + (0x49F, 'V'), + (0x4A0, 'M', u'ҡ'), + (0x4A1, 'V'), + (0x4A2, 'M', u'ң'), + (0x4A3, 'V'), + (0x4A4, 'M', u'ҥ'), + (0x4A5, 'V'), + (0x4A6, 'M', u'ҧ'), + (0x4A7, 'V'), + (0x4A8, 'M', u'ҩ'), + (0x4A9, 'V'), + (0x4AA, 'M', u'ҫ'), + (0x4AB, 'V'), + (0x4AC, 'M', u'ҭ'), + (0x4AD, 'V'), + (0x4AE, 'M', u'ү'), + (0x4AF, 'V'), + (0x4B0, 'M', u'ұ'), + (0x4B1, 'V'), + (0x4B2, 'M', u'ҳ'), + (0x4B3, 'V'), + (0x4B4, 'M', u'ҵ'), + (0x4B5, 'V'), + (0x4B6, 'M', u'ҷ'), + (0x4B7, 'V'), + (0x4B8, 'M', u'ҹ'), + (0x4B9, 'V'), + (0x4BA, 'M', u'һ'), + (0x4BB, 'V'), + (0x4BC, 'M', u'ҽ'), + (0x4BD, 'V'), + (0x4BE, 'M', u'ҿ'), + (0x4BF, 'V'), + (0x4C0, 'X'), + (0x4C1, 'M', u'ӂ'), + (0x4C2, 'V'), + (0x4C3, 'M', u'ӄ'), + (0x4C4, 'V'), + (0x4C5, 'M', u'ӆ'), + (0x4C6, 'V'), + (0x4C7, 'M', u'ӈ'), + (0x4C8, 'V'), + (0x4C9, 'M', u'ӊ'), + (0x4CA, 'V'), + (0x4CB, 'M', u'ӌ'), + (0x4CC, 'V'), + (0x4CD, 'M', u'ӎ'), + (0x4CE, 'V'), + (0x4D0, 'M', u'ӑ'), + (0x4D1, 'V'), + (0x4D2, 'M', u'ӓ'), + (0x4D3, 'V'), + (0x4D4, 'M', u'ӕ'), + (0x4D5, 'V'), + (0x4D6, 'M', u'ӗ'), + (0x4D7, 'V'), + (0x4D8, 'M', u'ә'), + (0x4D9, 'V'), + (0x4DA, 'M', u'ӛ'), + (0x4DB, 'V'), + (0x4DC, 'M', u'ӝ'), + (0x4DD, 'V'), + (0x4DE, 'M', u'ӟ'), + (0x4DF, 'V'), + (0x4E0, 'M', u'ӡ'), + (0x4E1, 'V'), + (0x4E2, 'M', u'ӣ'), + (0x4E3, 'V'), + (0x4E4, 'M', u'ӥ'), + (0x4E5, 'V'), + (0x4E6, 'M', u'ӧ'), + (0x4E7, 'V'), + (0x4E8, 'M', u'ө'), + (0x4E9, 'V'), + (0x4EA, 'M', u'ӫ'), + (0x4EB, 'V'), + (0x4EC, 'M', u'ӭ'), + (0x4ED, 'V'), + (0x4EE, 'M', u'ӯ'), + (0x4EF, 'V'), + (0x4F0, 'M', u'ӱ'), + (0x4F1, 'V'), + (0x4F2, 'M', u'ӳ'), + (0x4F3, 'V'), + (0x4F4, 'M', u'ӵ'), + (0x4F5, 'V'), + (0x4F6, 'M', u'ӷ'), + (0x4F7, 'V'), + (0x4F8, 'M', u'ӹ'), + (0x4F9, 'V'), + (0x4FA, 'M', u'ӻ'), + (0x4FB, 'V'), + (0x4FC, 'M', u'ӽ'), + (0x4FD, 'V'), + (0x4FE, 'M', u'ӿ'), + (0x4FF, 'V'), + (0x500, 'M', u'ԁ'), + (0x501, 'V'), + (0x502, 'M', u'ԃ'), + ] + +def _seg_9(): + return [ + (0x503, 'V'), + (0x504, 'M', u'ԅ'), + (0x505, 'V'), + (0x506, 'M', u'ԇ'), + (0x507, 'V'), + (0x508, 'M', u'ԉ'), + (0x509, 'V'), + (0x50A, 'M', u'ԋ'), + (0x50B, 'V'), + (0x50C, 'M', u'ԍ'), + (0x50D, 'V'), + (0x50E, 'M', u'ԏ'), + (0x50F, 'V'), + (0x510, 'M', u'ԑ'), + (0x511, 'V'), + (0x512, 'M', u'ԓ'), + (0x513, 'V'), + (0x514, 'M', u'ԕ'), + (0x515, 'V'), + (0x516, 'M', u'ԗ'), + (0x517, 'V'), + (0x518, 'M', u'ԙ'), + (0x519, 'V'), + (0x51A, 'M', u'ԛ'), + (0x51B, 'V'), + (0x51C, 'M', u'ԝ'), + (0x51D, 'V'), + (0x51E, 'M', u'ԟ'), + (0x51F, 'V'), + (0x520, 'M', u'ԡ'), + (0x521, 'V'), + (0x522, 'M', u'ԣ'), + (0x523, 'V'), + (0x524, 'M', u'ԥ'), + (0x525, 'V'), + (0x526, 'M', u'ԧ'), + (0x527, 'V'), + (0x528, 'M', u'ԩ'), + (0x529, 'V'), + (0x52A, 'M', u'ԫ'), + (0x52B, 'V'), + (0x52C, 'M', u'ԭ'), + (0x52D, 'V'), + (0x52E, 'M', u'ԯ'), + (0x52F, 'V'), + (0x530, 'X'), + (0x531, 'M', u'ա'), + (0x532, 'M', u'բ'), + (0x533, 'M', u'գ'), + (0x534, 'M', u'դ'), + (0x535, 'M', u'ե'), + (0x536, 'M', u'զ'), + (0x537, 'M', u'է'), + (0x538, 'M', u'ը'), + (0x539, 'M', u'թ'), + (0x53A, 'M', u'ժ'), + (0x53B, 'M', u'ի'), + (0x53C, 'M', u'լ'), + (0x53D, 'M', u'խ'), + (0x53E, 'M', u'ծ'), + (0x53F, 'M', u'կ'), + (0x540, 'M', u'հ'), + (0x541, 'M', u'ձ'), + (0x542, 'M', u'ղ'), + (0x543, 'M', u'ճ'), + (0x544, 'M', u'մ'), + (0x545, 'M', u'յ'), + (0x546, 'M', u'ն'), + (0x547, 'M', u'շ'), + (0x548, 'M', u'ո'), + (0x549, 'M', u'չ'), + (0x54A, 'M', u'պ'), + (0x54B, 'M', u'ջ'), + (0x54C, 'M', u'ռ'), + (0x54D, 'M', u'ս'), + (0x54E, 'M', u'վ'), + (0x54F, 'M', u'տ'), + (0x550, 'M', u'ր'), + (0x551, 'M', u'ց'), + (0x552, 'M', u'ւ'), + (0x553, 'M', u'փ'), + (0x554, 'M', u'ք'), + (0x555, 'M', u'օ'), + (0x556, 'M', u'ֆ'), + (0x557, 'X'), + (0x559, 'V'), + (0x560, 'X'), + (0x561, 'V'), + (0x587, 'M', u'եւ'), + (0x588, 'X'), + (0x589, 'V'), + (0x58B, 'X'), + (0x58D, 'V'), + (0x590, 'X'), + (0x591, 'V'), + (0x5C8, 'X'), + (0x5D0, 'V'), + (0x5EB, 'X'), + (0x5F0, 'V'), + (0x5F5, 'X'), + ] + +def _seg_10(): + return [ + (0x606, 'V'), + (0x61C, 'X'), + (0x61E, 'V'), + (0x675, 'M', u'اٴ'), + (0x676, 'M', u'وٴ'), + (0x677, 'M', u'ۇٴ'), + (0x678, 'M', u'يٴ'), + (0x679, 'V'), + (0x6DD, 'X'), + (0x6DE, 'V'), + (0x70E, 'X'), + (0x710, 'V'), + (0x74B, 'X'), + (0x74D, 'V'), + (0x7B2, 'X'), + (0x7C0, 'V'), + (0x7FB, 'X'), + (0x800, 'V'), + (0x82E, 'X'), + (0x830, 'V'), + (0x83F, 'X'), + (0x840, 'V'), + (0x85C, 'X'), + (0x85E, 'V'), + (0x85F, 'X'), + (0x860, 'V'), + (0x86B, 'X'), + (0x8A0, 'V'), + (0x8B5, 'X'), + (0x8B6, 'V'), + (0x8BE, 'X'), + (0x8D4, 'V'), + (0x8E2, 'X'), + (0x8E3, 'V'), + (0x958, 'M', u'क़'), + (0x959, 'M', u'ख़'), + (0x95A, 'M', u'ग़'), + (0x95B, 'M', u'ज़'), + (0x95C, 'M', u'ड़'), + (0x95D, 'M', u'ढ़'), + (0x95E, 'M', u'फ़'), + (0x95F, 'M', u'य़'), + (0x960, 'V'), + (0x984, 'X'), + (0x985, 'V'), + (0x98D, 'X'), + (0x98F, 'V'), + (0x991, 'X'), + (0x993, 'V'), + (0x9A9, 'X'), + (0x9AA, 'V'), + (0x9B1, 'X'), + (0x9B2, 'V'), + (0x9B3, 'X'), + (0x9B6, 'V'), + (0x9BA, 'X'), + (0x9BC, 'V'), + (0x9C5, 'X'), + (0x9C7, 'V'), + (0x9C9, 'X'), + (0x9CB, 'V'), + (0x9CF, 'X'), + (0x9D7, 'V'), + (0x9D8, 'X'), + (0x9DC, 'M', u'ড়'), + (0x9DD, 'M', u'ঢ়'), + (0x9DE, 'X'), + (0x9DF, 'M', u'য়'), + (0x9E0, 'V'), + (0x9E4, 'X'), + (0x9E6, 'V'), + (0x9FE, 'X'), + (0xA01, 'V'), + (0xA04, 'X'), + (0xA05, 'V'), + (0xA0B, 'X'), + (0xA0F, 'V'), + (0xA11, 'X'), + (0xA13, 'V'), + (0xA29, 'X'), + (0xA2A, 'V'), + (0xA31, 'X'), + (0xA32, 'V'), + (0xA33, 'M', u'ਲ਼'), + (0xA34, 'X'), + (0xA35, 'V'), + (0xA36, 'M', u'ਸ਼'), + (0xA37, 'X'), + (0xA38, 'V'), + (0xA3A, 'X'), + (0xA3C, 'V'), + (0xA3D, 'X'), + (0xA3E, 'V'), + (0xA43, 'X'), + (0xA47, 'V'), + (0xA49, 'X'), + (0xA4B, 'V'), + (0xA4E, 'X'), + (0xA51, 'V'), + (0xA52, 'X'), + ] + +def _seg_11(): + return [ + (0xA59, 'M', u'ਖ਼'), + (0xA5A, 'M', u'ਗ਼'), + (0xA5B, 'M', u'ਜ਼'), + (0xA5C, 'V'), + (0xA5D, 'X'), + (0xA5E, 'M', u'ਫ਼'), + (0xA5F, 'X'), + (0xA66, 'V'), + (0xA76, 'X'), + (0xA81, 'V'), + (0xA84, 'X'), + (0xA85, 'V'), + (0xA8E, 'X'), + (0xA8F, 'V'), + (0xA92, 'X'), + (0xA93, 'V'), + (0xAA9, 'X'), + (0xAAA, 'V'), + (0xAB1, 'X'), + (0xAB2, 'V'), + (0xAB4, 'X'), + (0xAB5, 'V'), + (0xABA, 'X'), + (0xABC, 'V'), + (0xAC6, 'X'), + (0xAC7, 'V'), + (0xACA, 'X'), + (0xACB, 'V'), + (0xACE, 'X'), + (0xAD0, 'V'), + (0xAD1, 'X'), + (0xAE0, 'V'), + (0xAE4, 'X'), + (0xAE6, 'V'), + (0xAF2, 'X'), + (0xAF9, 'V'), + (0xB00, 'X'), + (0xB01, 'V'), + (0xB04, 'X'), + (0xB05, 'V'), + (0xB0D, 'X'), + (0xB0F, 'V'), + (0xB11, 'X'), + (0xB13, 'V'), + (0xB29, 'X'), + (0xB2A, 'V'), + (0xB31, 'X'), + (0xB32, 'V'), + (0xB34, 'X'), + (0xB35, 'V'), + (0xB3A, 'X'), + (0xB3C, 'V'), + (0xB45, 'X'), + (0xB47, 'V'), + (0xB49, 'X'), + (0xB4B, 'V'), + (0xB4E, 'X'), + (0xB56, 'V'), + (0xB58, 'X'), + (0xB5C, 'M', u'ଡ଼'), + (0xB5D, 'M', u'ଢ଼'), + (0xB5E, 'X'), + (0xB5F, 'V'), + (0xB64, 'X'), + (0xB66, 'V'), + (0xB78, 'X'), + (0xB82, 'V'), + (0xB84, 'X'), + (0xB85, 'V'), + (0xB8B, 'X'), + (0xB8E, 'V'), + (0xB91, 'X'), + (0xB92, 'V'), + (0xB96, 'X'), + (0xB99, 'V'), + (0xB9B, 'X'), + (0xB9C, 'V'), + (0xB9D, 'X'), + (0xB9E, 'V'), + (0xBA0, 'X'), + (0xBA3, 'V'), + (0xBA5, 'X'), + (0xBA8, 'V'), + (0xBAB, 'X'), + (0xBAE, 'V'), + (0xBBA, 'X'), + (0xBBE, 'V'), + (0xBC3, 'X'), + (0xBC6, 'V'), + (0xBC9, 'X'), + (0xBCA, 'V'), + (0xBCE, 'X'), + (0xBD0, 'V'), + (0xBD1, 'X'), + (0xBD7, 'V'), + (0xBD8, 'X'), + (0xBE6, 'V'), + (0xBFB, 'X'), + (0xC00, 'V'), + (0xC04, 'X'), + ] + +def _seg_12(): + return [ + (0xC05, 'V'), + (0xC0D, 'X'), + (0xC0E, 'V'), + (0xC11, 'X'), + (0xC12, 'V'), + (0xC29, 'X'), + (0xC2A, 'V'), + (0xC3A, 'X'), + (0xC3D, 'V'), + (0xC45, 'X'), + (0xC46, 'V'), + (0xC49, 'X'), + (0xC4A, 'V'), + (0xC4E, 'X'), + (0xC55, 'V'), + (0xC57, 'X'), + (0xC58, 'V'), + (0xC5B, 'X'), + (0xC60, 'V'), + (0xC64, 'X'), + (0xC66, 'V'), + (0xC70, 'X'), + (0xC78, 'V'), + (0xC84, 'X'), + (0xC85, 'V'), + (0xC8D, 'X'), + (0xC8E, 'V'), + (0xC91, 'X'), + (0xC92, 'V'), + (0xCA9, 'X'), + (0xCAA, 'V'), + (0xCB4, 'X'), + (0xCB5, 'V'), + (0xCBA, 'X'), + (0xCBC, 'V'), + (0xCC5, 'X'), + (0xCC6, 'V'), + (0xCC9, 'X'), + (0xCCA, 'V'), + (0xCCE, 'X'), + (0xCD5, 'V'), + (0xCD7, 'X'), + (0xCDE, 'V'), + (0xCDF, 'X'), + (0xCE0, 'V'), + (0xCE4, 'X'), + (0xCE6, 'V'), + (0xCF0, 'X'), + (0xCF1, 'V'), + (0xCF3, 'X'), + (0xD00, 'V'), + (0xD04, 'X'), + (0xD05, 'V'), + (0xD0D, 'X'), + (0xD0E, 'V'), + (0xD11, 'X'), + (0xD12, 'V'), + (0xD45, 'X'), + (0xD46, 'V'), + (0xD49, 'X'), + (0xD4A, 'V'), + (0xD50, 'X'), + (0xD54, 'V'), + (0xD64, 'X'), + (0xD66, 'V'), + (0xD80, 'X'), + (0xD82, 'V'), + (0xD84, 'X'), + (0xD85, 'V'), + (0xD97, 'X'), + (0xD9A, 'V'), + (0xDB2, 'X'), + (0xDB3, 'V'), + (0xDBC, 'X'), + (0xDBD, 'V'), + (0xDBE, 'X'), + (0xDC0, 'V'), + (0xDC7, 'X'), + (0xDCA, 'V'), + (0xDCB, 'X'), + (0xDCF, 'V'), + (0xDD5, 'X'), + (0xDD6, 'V'), + (0xDD7, 'X'), + (0xDD8, 'V'), + (0xDE0, 'X'), + (0xDE6, 'V'), + (0xDF0, 'X'), + (0xDF2, 'V'), + (0xDF5, 'X'), + (0xE01, 'V'), + (0xE33, 'M', u'ํา'), + (0xE34, 'V'), + (0xE3B, 'X'), + (0xE3F, 'V'), + (0xE5C, 'X'), + (0xE81, 'V'), + (0xE83, 'X'), + (0xE84, 'V'), + (0xE85, 'X'), + ] + +def _seg_13(): + return [ + (0xE87, 'V'), + (0xE89, 'X'), + (0xE8A, 'V'), + (0xE8B, 'X'), + (0xE8D, 'V'), + (0xE8E, 'X'), + (0xE94, 'V'), + (0xE98, 'X'), + (0xE99, 'V'), + (0xEA0, 'X'), + (0xEA1, 'V'), + (0xEA4, 'X'), + (0xEA5, 'V'), + (0xEA6, 'X'), + (0xEA7, 'V'), + (0xEA8, 'X'), + (0xEAA, 'V'), + (0xEAC, 'X'), + (0xEAD, 'V'), + (0xEB3, 'M', u'ໍາ'), + (0xEB4, 'V'), + (0xEBA, 'X'), + (0xEBB, 'V'), + (0xEBE, 'X'), + (0xEC0, 'V'), + (0xEC5, 'X'), + (0xEC6, 'V'), + (0xEC7, 'X'), + (0xEC8, 'V'), + (0xECE, 'X'), + (0xED0, 'V'), + (0xEDA, 'X'), + (0xEDC, 'M', u'ຫນ'), + (0xEDD, 'M', u'ຫມ'), + (0xEDE, 'V'), + (0xEE0, 'X'), + (0xF00, 'V'), + (0xF0C, 'M', u'་'), + (0xF0D, 'V'), + (0xF43, 'M', u'གྷ'), + (0xF44, 'V'), + (0xF48, 'X'), + (0xF49, 'V'), + (0xF4D, 'M', u'ཌྷ'), + (0xF4E, 'V'), + (0xF52, 'M', u'དྷ'), + (0xF53, 'V'), + (0xF57, 'M', u'བྷ'), + (0xF58, 'V'), + (0xF5C, 'M', u'ཛྷ'), + (0xF5D, 'V'), + (0xF69, 'M', u'ཀྵ'), + (0xF6A, 'V'), + (0xF6D, 'X'), + (0xF71, 'V'), + (0xF73, 'M', u'ཱི'), + (0xF74, 'V'), + (0xF75, 'M', u'ཱུ'), + (0xF76, 'M', u'ྲྀ'), + (0xF77, 'M', u'ྲཱྀ'), + (0xF78, 'M', u'ླྀ'), + (0xF79, 'M', u'ླཱྀ'), + (0xF7A, 'V'), + (0xF81, 'M', u'ཱྀ'), + (0xF82, 'V'), + (0xF93, 'M', u'ྒྷ'), + (0xF94, 'V'), + (0xF98, 'X'), + (0xF99, 'V'), + (0xF9D, 'M', u'ྜྷ'), + (0xF9E, 'V'), + (0xFA2, 'M', u'ྡྷ'), + (0xFA3, 'V'), + (0xFA7, 'M', u'ྦྷ'), + (0xFA8, 'V'), + (0xFAC, 'M', u'ྫྷ'), + (0xFAD, 'V'), + (0xFB9, 'M', u'ྐྵ'), + (0xFBA, 'V'), + (0xFBD, 'X'), + (0xFBE, 'V'), + (0xFCD, 'X'), + (0xFCE, 'V'), + (0xFDB, 'X'), + (0x1000, 'V'), + (0x10A0, 'X'), + (0x10C7, 'M', u'ⴧ'), + (0x10C8, 'X'), + (0x10CD, 'M', u'ⴭ'), + (0x10CE, 'X'), + (0x10D0, 'V'), + (0x10FC, 'M', u'ნ'), + (0x10FD, 'V'), + (0x115F, 'X'), + (0x1161, 'V'), + (0x1249, 'X'), + (0x124A, 'V'), + (0x124E, 'X'), + (0x1250, 'V'), + (0x1257, 'X'), + ] + +def _seg_14(): + return [ + (0x1258, 'V'), + (0x1259, 'X'), + (0x125A, 'V'), + (0x125E, 'X'), + (0x1260, 'V'), + (0x1289, 'X'), + (0x128A, 'V'), + (0x128E, 'X'), + (0x1290, 'V'), + (0x12B1, 'X'), + (0x12B2, 'V'), + (0x12B6, 'X'), + (0x12B8, 'V'), + (0x12BF, 'X'), + (0x12C0, 'V'), + (0x12C1, 'X'), + (0x12C2, 'V'), + (0x12C6, 'X'), + (0x12C8, 'V'), + (0x12D7, 'X'), + (0x12D8, 'V'), + (0x1311, 'X'), + (0x1312, 'V'), + (0x1316, 'X'), + (0x1318, 'V'), + (0x135B, 'X'), + (0x135D, 'V'), + (0x137D, 'X'), + (0x1380, 'V'), + (0x139A, 'X'), + (0x13A0, 'V'), + (0x13F6, 'X'), + (0x13F8, 'M', u'Ᏸ'), + (0x13F9, 'M', u'Ᏹ'), + (0x13FA, 'M', u'Ᏺ'), + (0x13FB, 'M', u'Ᏻ'), + (0x13FC, 'M', u'Ᏼ'), + (0x13FD, 'M', u'Ᏽ'), + (0x13FE, 'X'), + (0x1400, 'V'), + (0x1680, 'X'), + (0x1681, 'V'), + (0x169D, 'X'), + (0x16A0, 'V'), + (0x16F9, 'X'), + (0x1700, 'V'), + (0x170D, 'X'), + (0x170E, 'V'), + (0x1715, 'X'), + (0x1720, 'V'), + (0x1737, 'X'), + (0x1740, 'V'), + (0x1754, 'X'), + (0x1760, 'V'), + (0x176D, 'X'), + (0x176E, 'V'), + (0x1771, 'X'), + (0x1772, 'V'), + (0x1774, 'X'), + (0x1780, 'V'), + (0x17B4, 'X'), + (0x17B6, 'V'), + (0x17DE, 'X'), + (0x17E0, 'V'), + (0x17EA, 'X'), + (0x17F0, 'V'), + (0x17FA, 'X'), + (0x1800, 'V'), + (0x1806, 'X'), + (0x1807, 'V'), + (0x180B, 'I'), + (0x180E, 'X'), + (0x1810, 'V'), + (0x181A, 'X'), + (0x1820, 'V'), + (0x1878, 'X'), + (0x1880, 'V'), + (0x18AB, 'X'), + (0x18B0, 'V'), + (0x18F6, 'X'), + (0x1900, 'V'), + (0x191F, 'X'), + (0x1920, 'V'), + (0x192C, 'X'), + (0x1930, 'V'), + (0x193C, 'X'), + (0x1940, 'V'), + (0x1941, 'X'), + (0x1944, 'V'), + (0x196E, 'X'), + (0x1970, 'V'), + (0x1975, 'X'), + (0x1980, 'V'), + (0x19AC, 'X'), + (0x19B0, 'V'), + (0x19CA, 'X'), + (0x19D0, 'V'), + (0x19DB, 'X'), + (0x19DE, 'V'), + (0x1A1C, 'X'), + ] + +def _seg_15(): + return [ + (0x1A1E, 'V'), + (0x1A5F, 'X'), + (0x1A60, 'V'), + (0x1A7D, 'X'), + (0x1A7F, 'V'), + (0x1A8A, 'X'), + (0x1A90, 'V'), + (0x1A9A, 'X'), + (0x1AA0, 'V'), + (0x1AAE, 'X'), + (0x1AB0, 'V'), + (0x1ABF, 'X'), + (0x1B00, 'V'), + (0x1B4C, 'X'), + (0x1B50, 'V'), + (0x1B7D, 'X'), + (0x1B80, 'V'), + (0x1BF4, 'X'), + (0x1BFC, 'V'), + (0x1C38, 'X'), + (0x1C3B, 'V'), + (0x1C4A, 'X'), + (0x1C4D, 'V'), + (0x1C80, 'M', u'в'), + (0x1C81, 'M', u'д'), + (0x1C82, 'M', u'о'), + (0x1C83, 'M', u'с'), + (0x1C84, 'M', u'т'), + (0x1C86, 'M', u'ъ'), + (0x1C87, 'M', u'ѣ'), + (0x1C88, 'M', u'ꙋ'), + (0x1C89, 'X'), + (0x1CC0, 'V'), + (0x1CC8, 'X'), + (0x1CD0, 'V'), + (0x1CFA, 'X'), + (0x1D00, 'V'), + (0x1D2C, 'M', u'a'), + (0x1D2D, 'M', u'æ'), + (0x1D2E, 'M', u'b'), + (0x1D2F, 'V'), + (0x1D30, 'M', u'd'), + (0x1D31, 'M', u'e'), + (0x1D32, 'M', u'ǝ'), + (0x1D33, 'M', u'g'), + (0x1D34, 'M', u'h'), + (0x1D35, 'M', u'i'), + (0x1D36, 'M', u'j'), + (0x1D37, 'M', u'k'), + (0x1D38, 'M', u'l'), + (0x1D39, 'M', u'm'), + (0x1D3A, 'M', u'n'), + (0x1D3B, 'V'), + (0x1D3C, 'M', u'o'), + (0x1D3D, 'M', u'ȣ'), + (0x1D3E, 'M', u'p'), + (0x1D3F, 'M', u'r'), + (0x1D40, 'M', u't'), + (0x1D41, 'M', u'u'), + (0x1D42, 'M', u'w'), + (0x1D43, 'M', u'a'), + (0x1D44, 'M', u'ɐ'), + (0x1D45, 'M', u'ɑ'), + (0x1D46, 'M', u'ᴂ'), + (0x1D47, 'M', u'b'), + (0x1D48, 'M', u'd'), + (0x1D49, 'M', u'e'), + (0x1D4A, 'M', u'ə'), + (0x1D4B, 'M', u'ɛ'), + (0x1D4C, 'M', u'ɜ'), + (0x1D4D, 'M', u'g'), + (0x1D4E, 'V'), + (0x1D4F, 'M', u'k'), + (0x1D50, 'M', u'm'), + (0x1D51, 'M', u'ŋ'), + (0x1D52, 'M', u'o'), + (0x1D53, 'M', u'ɔ'), + (0x1D54, 'M', u'ᴖ'), + (0x1D55, 'M', u'ᴗ'), + (0x1D56, 'M', u'p'), + (0x1D57, 'M', u't'), + (0x1D58, 'M', u'u'), + (0x1D59, 'M', u'ᴝ'), + (0x1D5A, 'M', u'ɯ'), + (0x1D5B, 'M', u'v'), + (0x1D5C, 'M', u'ᴥ'), + (0x1D5D, 'M', u'β'), + (0x1D5E, 'M', u'γ'), + (0x1D5F, 'M', u'δ'), + (0x1D60, 'M', u'φ'), + (0x1D61, 'M', u'χ'), + (0x1D62, 'M', u'i'), + (0x1D63, 'M', u'r'), + (0x1D64, 'M', u'u'), + (0x1D65, 'M', u'v'), + (0x1D66, 'M', u'β'), + (0x1D67, 'M', u'γ'), + (0x1D68, 'M', u'ρ'), + (0x1D69, 'M', u'φ'), + (0x1D6A, 'M', u'χ'), + ] + +def _seg_16(): + return [ + (0x1D6B, 'V'), + (0x1D78, 'M', u'н'), + (0x1D79, 'V'), + (0x1D9B, 'M', u'ɒ'), + (0x1D9C, 'M', u'c'), + (0x1D9D, 'M', u'ɕ'), + (0x1D9E, 'M', u'ð'), + (0x1D9F, 'M', u'ɜ'), + (0x1DA0, 'M', u'f'), + (0x1DA1, 'M', u'ɟ'), + (0x1DA2, 'M', u'ɡ'), + (0x1DA3, 'M', u'ɥ'), + (0x1DA4, 'M', u'ɨ'), + (0x1DA5, 'M', u'ɩ'), + (0x1DA6, 'M', u'ɪ'), + (0x1DA7, 'M', u'ᵻ'), + (0x1DA8, 'M', u'ʝ'), + (0x1DA9, 'M', u'ɭ'), + (0x1DAA, 'M', u'ᶅ'), + (0x1DAB, 'M', u'ʟ'), + (0x1DAC, 'M', u'ɱ'), + (0x1DAD, 'M', u'ɰ'), + (0x1DAE, 'M', u'ɲ'), + (0x1DAF, 'M', u'ɳ'), + (0x1DB0, 'M', u'ɴ'), + (0x1DB1, 'M', u'ɵ'), + (0x1DB2, 'M', u'ɸ'), + (0x1DB3, 'M', u'ʂ'), + (0x1DB4, 'M', u'ʃ'), + (0x1DB5, 'M', u'ƫ'), + (0x1DB6, 'M', u'ʉ'), + (0x1DB7, 'M', u'ʊ'), + (0x1DB8, 'M', u'ᴜ'), + (0x1DB9, 'M', u'ʋ'), + (0x1DBA, 'M', u'ʌ'), + (0x1DBB, 'M', u'z'), + (0x1DBC, 'M', u'ʐ'), + (0x1DBD, 'M', u'ʑ'), + (0x1DBE, 'M', u'ʒ'), + (0x1DBF, 'M', u'θ'), + (0x1DC0, 'V'), + (0x1DFA, 'X'), + (0x1DFB, 'V'), + (0x1E00, 'M', u'ḁ'), + (0x1E01, 'V'), + (0x1E02, 'M', u'ḃ'), + (0x1E03, 'V'), + (0x1E04, 'M', u'ḅ'), + (0x1E05, 'V'), + (0x1E06, 'M', u'ḇ'), + (0x1E07, 'V'), + (0x1E08, 'M', u'ḉ'), + (0x1E09, 'V'), + (0x1E0A, 'M', u'ḋ'), + (0x1E0B, 'V'), + (0x1E0C, 'M', u'ḍ'), + (0x1E0D, 'V'), + (0x1E0E, 'M', u'ḏ'), + (0x1E0F, 'V'), + (0x1E10, 'M', u'ḑ'), + (0x1E11, 'V'), + (0x1E12, 'M', u'ḓ'), + (0x1E13, 'V'), + (0x1E14, 'M', u'ḕ'), + (0x1E15, 'V'), + (0x1E16, 'M', u'ḗ'), + (0x1E17, 'V'), + (0x1E18, 'M', u'ḙ'), + (0x1E19, 'V'), + (0x1E1A, 'M', u'ḛ'), + (0x1E1B, 'V'), + (0x1E1C, 'M', u'ḝ'), + (0x1E1D, 'V'), + (0x1E1E, 'M', u'ḟ'), + (0x1E1F, 'V'), + (0x1E20, 'M', u'ḡ'), + (0x1E21, 'V'), + (0x1E22, 'M', u'ḣ'), + (0x1E23, 'V'), + (0x1E24, 'M', u'ḥ'), + (0x1E25, 'V'), + (0x1E26, 'M', u'ḧ'), + (0x1E27, 'V'), + (0x1E28, 'M', u'ḩ'), + (0x1E29, 'V'), + (0x1E2A, 'M', u'ḫ'), + (0x1E2B, 'V'), + (0x1E2C, 'M', u'ḭ'), + (0x1E2D, 'V'), + (0x1E2E, 'M', u'ḯ'), + (0x1E2F, 'V'), + (0x1E30, 'M', u'ḱ'), + (0x1E31, 'V'), + (0x1E32, 'M', u'ḳ'), + (0x1E33, 'V'), + (0x1E34, 'M', u'ḵ'), + (0x1E35, 'V'), + (0x1E36, 'M', u'ḷ'), + (0x1E37, 'V'), + (0x1E38, 'M', u'ḹ'), + ] + +def _seg_17(): + return [ + (0x1E39, 'V'), + (0x1E3A, 'M', u'ḻ'), + (0x1E3B, 'V'), + (0x1E3C, 'M', u'ḽ'), + (0x1E3D, 'V'), + (0x1E3E, 'M', u'ḿ'), + (0x1E3F, 'V'), + (0x1E40, 'M', u'ṁ'), + (0x1E41, 'V'), + (0x1E42, 'M', u'ṃ'), + (0x1E43, 'V'), + (0x1E44, 'M', u'ṅ'), + (0x1E45, 'V'), + (0x1E46, 'M', u'ṇ'), + (0x1E47, 'V'), + (0x1E48, 'M', u'ṉ'), + (0x1E49, 'V'), + (0x1E4A, 'M', u'ṋ'), + (0x1E4B, 'V'), + (0x1E4C, 'M', u'ṍ'), + (0x1E4D, 'V'), + (0x1E4E, 'M', u'ṏ'), + (0x1E4F, 'V'), + (0x1E50, 'M', u'ṑ'), + (0x1E51, 'V'), + (0x1E52, 'M', u'ṓ'), + (0x1E53, 'V'), + (0x1E54, 'M', u'ṕ'), + (0x1E55, 'V'), + (0x1E56, 'M', u'ṗ'), + (0x1E57, 'V'), + (0x1E58, 'M', u'ṙ'), + (0x1E59, 'V'), + (0x1E5A, 'M', u'ṛ'), + (0x1E5B, 'V'), + (0x1E5C, 'M', u'ṝ'), + (0x1E5D, 'V'), + (0x1E5E, 'M', u'ṟ'), + (0x1E5F, 'V'), + (0x1E60, 'M', u'ṡ'), + (0x1E61, 'V'), + (0x1E62, 'M', u'ṣ'), + (0x1E63, 'V'), + (0x1E64, 'M', u'ṥ'), + (0x1E65, 'V'), + (0x1E66, 'M', u'ṧ'), + (0x1E67, 'V'), + (0x1E68, 'M', u'ṩ'), + (0x1E69, 'V'), + (0x1E6A, 'M', u'ṫ'), + (0x1E6B, 'V'), + (0x1E6C, 'M', u'ṭ'), + (0x1E6D, 'V'), + (0x1E6E, 'M', u'ṯ'), + (0x1E6F, 'V'), + (0x1E70, 'M', u'ṱ'), + (0x1E71, 'V'), + (0x1E72, 'M', u'ṳ'), + (0x1E73, 'V'), + (0x1E74, 'M', u'ṵ'), + (0x1E75, 'V'), + (0x1E76, 'M', u'ṷ'), + (0x1E77, 'V'), + (0x1E78, 'M', u'ṹ'), + (0x1E79, 'V'), + (0x1E7A, 'M', u'ṻ'), + (0x1E7B, 'V'), + (0x1E7C, 'M', u'ṽ'), + (0x1E7D, 'V'), + (0x1E7E, 'M', u'ṿ'), + (0x1E7F, 'V'), + (0x1E80, 'M', u'ẁ'), + (0x1E81, 'V'), + (0x1E82, 'M', u'ẃ'), + (0x1E83, 'V'), + (0x1E84, 'M', u'ẅ'), + (0x1E85, 'V'), + (0x1E86, 'M', u'ẇ'), + (0x1E87, 'V'), + (0x1E88, 'M', u'ẉ'), + (0x1E89, 'V'), + (0x1E8A, 'M', u'ẋ'), + (0x1E8B, 'V'), + (0x1E8C, 'M', u'ẍ'), + (0x1E8D, 'V'), + (0x1E8E, 'M', u'ẏ'), + (0x1E8F, 'V'), + (0x1E90, 'M', u'ẑ'), + (0x1E91, 'V'), + (0x1E92, 'M', u'ẓ'), + (0x1E93, 'V'), + (0x1E94, 'M', u'ẕ'), + (0x1E95, 'V'), + (0x1E9A, 'M', u'aʾ'), + (0x1E9B, 'M', u'ṡ'), + (0x1E9C, 'V'), + (0x1E9E, 'M', u'ss'), + (0x1E9F, 'V'), + (0x1EA0, 'M', u'ạ'), + (0x1EA1, 'V'), + ] + +def _seg_18(): + return [ + (0x1EA2, 'M', u'ả'), + (0x1EA3, 'V'), + (0x1EA4, 'M', u'ấ'), + (0x1EA5, 'V'), + (0x1EA6, 'M', u'ầ'), + (0x1EA7, 'V'), + (0x1EA8, 'M', u'ẩ'), + (0x1EA9, 'V'), + (0x1EAA, 'M', u'ẫ'), + (0x1EAB, 'V'), + (0x1EAC, 'M', u'ậ'), + (0x1EAD, 'V'), + (0x1EAE, 'M', u'ắ'), + (0x1EAF, 'V'), + (0x1EB0, 'M', u'ằ'), + (0x1EB1, 'V'), + (0x1EB2, 'M', u'ẳ'), + (0x1EB3, 'V'), + (0x1EB4, 'M', u'ẵ'), + (0x1EB5, 'V'), + (0x1EB6, 'M', u'ặ'), + (0x1EB7, 'V'), + (0x1EB8, 'M', u'ẹ'), + (0x1EB9, 'V'), + (0x1EBA, 'M', u'ẻ'), + (0x1EBB, 'V'), + (0x1EBC, 'M', u'ẽ'), + (0x1EBD, 'V'), + (0x1EBE, 'M', u'ế'), + (0x1EBF, 'V'), + (0x1EC0, 'M', u'ề'), + (0x1EC1, 'V'), + (0x1EC2, 'M', u'ể'), + (0x1EC3, 'V'), + (0x1EC4, 'M', u'ễ'), + (0x1EC5, 'V'), + (0x1EC6, 'M', u'ệ'), + (0x1EC7, 'V'), + (0x1EC8, 'M', u'ỉ'), + (0x1EC9, 'V'), + (0x1ECA, 'M', u'ị'), + (0x1ECB, 'V'), + (0x1ECC, 'M', u'ọ'), + (0x1ECD, 'V'), + (0x1ECE, 'M', u'ỏ'), + (0x1ECF, 'V'), + (0x1ED0, 'M', u'ố'), + (0x1ED1, 'V'), + (0x1ED2, 'M', u'ồ'), + (0x1ED3, 'V'), + (0x1ED4, 'M', u'ổ'), + (0x1ED5, 'V'), + (0x1ED6, 'M', u'ỗ'), + (0x1ED7, 'V'), + (0x1ED8, 'M', u'ộ'), + (0x1ED9, 'V'), + (0x1EDA, 'M', u'ớ'), + (0x1EDB, 'V'), + (0x1EDC, 'M', u'ờ'), + (0x1EDD, 'V'), + (0x1EDE, 'M', u'ở'), + (0x1EDF, 'V'), + (0x1EE0, 'M', u'ỡ'), + (0x1EE1, 'V'), + (0x1EE2, 'M', u'ợ'), + (0x1EE3, 'V'), + (0x1EE4, 'M', u'ụ'), + (0x1EE5, 'V'), + (0x1EE6, 'M', u'ủ'), + (0x1EE7, 'V'), + (0x1EE8, 'M', u'ứ'), + (0x1EE9, 'V'), + (0x1EEA, 'M', u'ừ'), + (0x1EEB, 'V'), + (0x1EEC, 'M', u'ử'), + (0x1EED, 'V'), + (0x1EEE, 'M', u'ữ'), + (0x1EEF, 'V'), + (0x1EF0, 'M', u'ự'), + (0x1EF1, 'V'), + (0x1EF2, 'M', u'ỳ'), + (0x1EF3, 'V'), + (0x1EF4, 'M', u'ỵ'), + (0x1EF5, 'V'), + (0x1EF6, 'M', u'ỷ'), + (0x1EF7, 'V'), + (0x1EF8, 'M', u'ỹ'), + (0x1EF9, 'V'), + (0x1EFA, 'M', u'ỻ'), + (0x1EFB, 'V'), + (0x1EFC, 'M', u'ỽ'), + (0x1EFD, 'V'), + (0x1EFE, 'M', u'ỿ'), + (0x1EFF, 'V'), + (0x1F08, 'M', u'ἀ'), + (0x1F09, 'M', u'ἁ'), + (0x1F0A, 'M', u'ἂ'), + (0x1F0B, 'M', u'ἃ'), + (0x1F0C, 'M', u'ἄ'), + (0x1F0D, 'M', u'ἅ'), + ] + +def _seg_19(): + return [ + (0x1F0E, 'M', u'ἆ'), + (0x1F0F, 'M', u'ἇ'), + (0x1F10, 'V'), + (0x1F16, 'X'), + (0x1F18, 'M', u'ἐ'), + (0x1F19, 'M', u'ἑ'), + (0x1F1A, 'M', u'ἒ'), + (0x1F1B, 'M', u'ἓ'), + (0x1F1C, 'M', u'ἔ'), + (0x1F1D, 'M', u'ἕ'), + (0x1F1E, 'X'), + (0x1F20, 'V'), + (0x1F28, 'M', u'ἠ'), + (0x1F29, 'M', u'ἡ'), + (0x1F2A, 'M', u'ἢ'), + (0x1F2B, 'M', u'ἣ'), + (0x1F2C, 'M', u'ἤ'), + (0x1F2D, 'M', u'ἥ'), + (0x1F2E, 'M', u'ἦ'), + (0x1F2F, 'M', u'ἧ'), + (0x1F30, 'V'), + (0x1F38, 'M', u'ἰ'), + (0x1F39, 'M', u'ἱ'), + (0x1F3A, 'M', u'ἲ'), + (0x1F3B, 'M', u'ἳ'), + (0x1F3C, 'M', u'ἴ'), + (0x1F3D, 'M', u'ἵ'), + (0x1F3E, 'M', u'ἶ'), + (0x1F3F, 'M', u'ἷ'), + (0x1F40, 'V'), + (0x1F46, 'X'), + (0x1F48, 'M', u'ὀ'), + (0x1F49, 'M', u'ὁ'), + (0x1F4A, 'M', u'ὂ'), + (0x1F4B, 'M', u'ὃ'), + (0x1F4C, 'M', u'ὄ'), + (0x1F4D, 'M', u'ὅ'), + (0x1F4E, 'X'), + (0x1F50, 'V'), + (0x1F58, 'X'), + (0x1F59, 'M', u'ὑ'), + (0x1F5A, 'X'), + (0x1F5B, 'M', u'ὓ'), + (0x1F5C, 'X'), + (0x1F5D, 'M', u'ὕ'), + (0x1F5E, 'X'), + (0x1F5F, 'M', u'ὗ'), + (0x1F60, 'V'), + (0x1F68, 'M', u'ὠ'), + (0x1F69, 'M', u'ὡ'), + (0x1F6A, 'M', u'ὢ'), + (0x1F6B, 'M', u'ὣ'), + (0x1F6C, 'M', u'ὤ'), + (0x1F6D, 'M', u'ὥ'), + (0x1F6E, 'M', u'ὦ'), + (0x1F6F, 'M', u'ὧ'), + (0x1F70, 'V'), + (0x1F71, 'M', u'ά'), + (0x1F72, 'V'), + (0x1F73, 'M', u'έ'), + (0x1F74, 'V'), + (0x1F75, 'M', u'ή'), + (0x1F76, 'V'), + (0x1F77, 'M', u'ί'), + (0x1F78, 'V'), + (0x1F79, 'M', u'ό'), + (0x1F7A, 'V'), + (0x1F7B, 'M', u'ύ'), + (0x1F7C, 'V'), + (0x1F7D, 'M', u'ώ'), + (0x1F7E, 'X'), + (0x1F80, 'M', u'ἀι'), + (0x1F81, 'M', u'ἁι'), + (0x1F82, 'M', u'ἂι'), + (0x1F83, 'M', u'ἃι'), + (0x1F84, 'M', u'ἄι'), + (0x1F85, 'M', u'ἅι'), + (0x1F86, 'M', u'ἆι'), + (0x1F87, 'M', u'ἇι'), + (0x1F88, 'M', u'ἀι'), + (0x1F89, 'M', u'ἁι'), + (0x1F8A, 'M', u'ἂι'), + (0x1F8B, 'M', u'ἃι'), + (0x1F8C, 'M', u'ἄι'), + (0x1F8D, 'M', u'ἅι'), + (0x1F8E, 'M', u'ἆι'), + (0x1F8F, 'M', u'ἇι'), + (0x1F90, 'M', u'ἠι'), + (0x1F91, 'M', u'ἡι'), + (0x1F92, 'M', u'ἢι'), + (0x1F93, 'M', u'ἣι'), + (0x1F94, 'M', u'ἤι'), + (0x1F95, 'M', u'ἥι'), + (0x1F96, 'M', u'ἦι'), + (0x1F97, 'M', u'ἧι'), + (0x1F98, 'M', u'ἠι'), + (0x1F99, 'M', u'ἡι'), + (0x1F9A, 'M', u'ἢι'), + (0x1F9B, 'M', u'ἣι'), + (0x1F9C, 'M', u'ἤι'), + ] + +def _seg_20(): + return [ + (0x1F9D, 'M', u'ἥι'), + (0x1F9E, 'M', u'ἦι'), + (0x1F9F, 'M', u'ἧι'), + (0x1FA0, 'M', u'ὠι'), + (0x1FA1, 'M', u'ὡι'), + (0x1FA2, 'M', u'ὢι'), + (0x1FA3, 'M', u'ὣι'), + (0x1FA4, 'M', u'ὤι'), + (0x1FA5, 'M', u'ὥι'), + (0x1FA6, 'M', u'ὦι'), + (0x1FA7, 'M', u'ὧι'), + (0x1FA8, 'M', u'ὠι'), + (0x1FA9, 'M', u'ὡι'), + (0x1FAA, 'M', u'ὢι'), + (0x1FAB, 'M', u'ὣι'), + (0x1FAC, 'M', u'ὤι'), + (0x1FAD, 'M', u'ὥι'), + (0x1FAE, 'M', u'ὦι'), + (0x1FAF, 'M', u'ὧι'), + (0x1FB0, 'V'), + (0x1FB2, 'M', u'ὰι'), + (0x1FB3, 'M', u'αι'), + (0x1FB4, 'M', u'άι'), + (0x1FB5, 'X'), + (0x1FB6, 'V'), + (0x1FB7, 'M', u'ᾶι'), + (0x1FB8, 'M', u'ᾰ'), + (0x1FB9, 'M', u'ᾱ'), + (0x1FBA, 'M', u'ὰ'), + (0x1FBB, 'M', u'ά'), + (0x1FBC, 'M', u'αι'), + (0x1FBD, '3', u' ̓'), + (0x1FBE, 'M', u'ι'), + (0x1FBF, '3', u' ̓'), + (0x1FC0, '3', u' ͂'), + (0x1FC1, '3', u' ̈͂'), + (0x1FC2, 'M', u'ὴι'), + (0x1FC3, 'M', u'ηι'), + (0x1FC4, 'M', u'ήι'), + (0x1FC5, 'X'), + (0x1FC6, 'V'), + (0x1FC7, 'M', u'ῆι'), + (0x1FC8, 'M', u'ὲ'), + (0x1FC9, 'M', u'έ'), + (0x1FCA, 'M', u'ὴ'), + (0x1FCB, 'M', u'ή'), + (0x1FCC, 'M', u'ηι'), + (0x1FCD, '3', u' ̓̀'), + (0x1FCE, '3', u' ̓́'), + (0x1FCF, '3', u' ̓͂'), + (0x1FD0, 'V'), + (0x1FD3, 'M', u'ΐ'), + (0x1FD4, 'X'), + (0x1FD6, 'V'), + (0x1FD8, 'M', u'ῐ'), + (0x1FD9, 'M', u'ῑ'), + (0x1FDA, 'M', u'ὶ'), + (0x1FDB, 'M', u'ί'), + (0x1FDC, 'X'), + (0x1FDD, '3', u' ̔̀'), + (0x1FDE, '3', u' ̔́'), + (0x1FDF, '3', u' ̔͂'), + (0x1FE0, 'V'), + (0x1FE3, 'M', u'ΰ'), + (0x1FE4, 'V'), + (0x1FE8, 'M', u'ῠ'), + (0x1FE9, 'M', u'ῡ'), + (0x1FEA, 'M', u'ὺ'), + (0x1FEB, 'M', u'ύ'), + (0x1FEC, 'M', u'ῥ'), + (0x1FED, '3', u' ̈̀'), + (0x1FEE, '3', u' ̈́'), + (0x1FEF, '3', u'`'), + (0x1FF0, 'X'), + (0x1FF2, 'M', u'ὼι'), + (0x1FF3, 'M', u'ωι'), + (0x1FF4, 'M', u'ώι'), + (0x1FF5, 'X'), + (0x1FF6, 'V'), + (0x1FF7, 'M', u'ῶι'), + (0x1FF8, 'M', u'ὸ'), + (0x1FF9, 'M', u'ό'), + (0x1FFA, 'M', u'ὼ'), + (0x1FFB, 'M', u'ώ'), + (0x1FFC, 'M', u'ωι'), + (0x1FFD, '3', u' ́'), + (0x1FFE, '3', u' ̔'), + (0x1FFF, 'X'), + (0x2000, '3', u' '), + (0x200B, 'I'), + (0x200C, 'D', u''), + (0x200E, 'X'), + (0x2010, 'V'), + (0x2011, 'M', u'‐'), + (0x2012, 'V'), + (0x2017, '3', u' ̳'), + (0x2018, 'V'), + (0x2024, 'X'), + (0x2027, 'V'), + (0x2028, 'X'), + ] + +def _seg_21(): + return [ + (0x202F, '3', u' '), + (0x2030, 'V'), + (0x2033, 'M', u'′′'), + (0x2034, 'M', u'′′′'), + (0x2035, 'V'), + (0x2036, 'M', u'‵‵'), + (0x2037, 'M', u'‵‵‵'), + (0x2038, 'V'), + (0x203C, '3', u'!!'), + (0x203D, 'V'), + (0x203E, '3', u' ̅'), + (0x203F, 'V'), + (0x2047, '3', u'??'), + (0x2048, '3', u'?!'), + (0x2049, '3', u'!?'), + (0x204A, 'V'), + (0x2057, 'M', u'′′′′'), + (0x2058, 'V'), + (0x205F, '3', u' '), + (0x2060, 'I'), + (0x2061, 'X'), + (0x2064, 'I'), + (0x2065, 'X'), + (0x2070, 'M', u'0'), + (0x2071, 'M', u'i'), + (0x2072, 'X'), + (0x2074, 'M', u'4'), + (0x2075, 'M', u'5'), + (0x2076, 'M', u'6'), + (0x2077, 'M', u'7'), + (0x2078, 'M', u'8'), + (0x2079, 'M', u'9'), + (0x207A, '3', u'+'), + (0x207B, 'M', u'−'), + (0x207C, '3', u'='), + (0x207D, '3', u'('), + (0x207E, '3', u')'), + (0x207F, 'M', u'n'), + (0x2080, 'M', u'0'), + (0x2081, 'M', u'1'), + (0x2082, 'M', u'2'), + (0x2083, 'M', u'3'), + (0x2084, 'M', u'4'), + (0x2085, 'M', u'5'), + (0x2086, 'M', u'6'), + (0x2087, 'M', u'7'), + (0x2088, 'M', u'8'), + (0x2089, 'M', u'9'), + (0x208A, '3', u'+'), + (0x208B, 'M', u'−'), + (0x208C, '3', u'='), + (0x208D, '3', u'('), + (0x208E, '3', u')'), + (0x208F, 'X'), + (0x2090, 'M', u'a'), + (0x2091, 'M', u'e'), + (0x2092, 'M', u'o'), + (0x2093, 'M', u'x'), + (0x2094, 'M', u'ə'), + (0x2095, 'M', u'h'), + (0x2096, 'M', u'k'), + (0x2097, 'M', u'l'), + (0x2098, 'M', u'm'), + (0x2099, 'M', u'n'), + (0x209A, 'M', u'p'), + (0x209B, 'M', u's'), + (0x209C, 'M', u't'), + (0x209D, 'X'), + (0x20A0, 'V'), + (0x20A8, 'M', u'rs'), + (0x20A9, 'V'), + (0x20C0, 'X'), + (0x20D0, 'V'), + (0x20F1, 'X'), + (0x2100, '3', u'a/c'), + (0x2101, '3', u'a/s'), + (0x2102, 'M', u'c'), + (0x2103, 'M', u'°c'), + (0x2104, 'V'), + (0x2105, '3', u'c/o'), + (0x2106, '3', u'c/u'), + (0x2107, 'M', u'ɛ'), + (0x2108, 'V'), + (0x2109, 'M', u'°f'), + (0x210A, 'M', u'g'), + (0x210B, 'M', u'h'), + (0x210F, 'M', u'ħ'), + (0x2110, 'M', u'i'), + (0x2112, 'M', u'l'), + (0x2114, 'V'), + (0x2115, 'M', u'n'), + (0x2116, 'M', u'no'), + (0x2117, 'V'), + (0x2119, 'M', u'p'), + (0x211A, 'M', u'q'), + (0x211B, 'M', u'r'), + (0x211E, 'V'), + (0x2120, 'M', u'sm'), + (0x2121, 'M', u'tel'), + (0x2122, 'M', u'tm'), + ] + +def _seg_22(): + return [ + (0x2123, 'V'), + (0x2124, 'M', u'z'), + (0x2125, 'V'), + (0x2126, 'M', u'ω'), + (0x2127, 'V'), + (0x2128, 'M', u'z'), + (0x2129, 'V'), + (0x212A, 'M', u'k'), + (0x212B, 'M', u'å'), + (0x212C, 'M', u'b'), + (0x212D, 'M', u'c'), + (0x212E, 'V'), + (0x212F, 'M', u'e'), + (0x2131, 'M', u'f'), + (0x2132, 'X'), + (0x2133, 'M', u'm'), + (0x2134, 'M', u'o'), + (0x2135, 'M', u'א'), + (0x2136, 'M', u'ב'), + (0x2137, 'M', u'ג'), + (0x2138, 'M', u'ד'), + (0x2139, 'M', u'i'), + (0x213A, 'V'), + (0x213B, 'M', u'fax'), + (0x213C, 'M', u'π'), + (0x213D, 'M', u'γ'), + (0x213F, 'M', u'π'), + (0x2140, 'M', u'∑'), + (0x2141, 'V'), + (0x2145, 'M', u'd'), + (0x2147, 'M', u'e'), + (0x2148, 'M', u'i'), + (0x2149, 'M', u'j'), + (0x214A, 'V'), + (0x2150, 'M', u'1⁄7'), + (0x2151, 'M', u'1⁄9'), + (0x2152, 'M', u'1⁄10'), + (0x2153, 'M', u'1⁄3'), + (0x2154, 'M', u'2⁄3'), + (0x2155, 'M', u'1⁄5'), + (0x2156, 'M', u'2⁄5'), + (0x2157, 'M', u'3⁄5'), + (0x2158, 'M', u'4⁄5'), + (0x2159, 'M', u'1⁄6'), + (0x215A, 'M', u'5⁄6'), + (0x215B, 'M', u'1⁄8'), + (0x215C, 'M', u'3⁄8'), + (0x215D, 'M', u'5⁄8'), + (0x215E, 'M', u'7⁄8'), + (0x215F, 'M', u'1⁄'), + (0x2160, 'M', u'i'), + (0x2161, 'M', u'ii'), + (0x2162, 'M', u'iii'), + (0x2163, 'M', u'iv'), + (0x2164, 'M', u'v'), + (0x2165, 'M', u'vi'), + (0x2166, 'M', u'vii'), + (0x2167, 'M', u'viii'), + (0x2168, 'M', u'ix'), + (0x2169, 'M', u'x'), + (0x216A, 'M', u'xi'), + (0x216B, 'M', u'xii'), + (0x216C, 'M', u'l'), + (0x216D, 'M', u'c'), + (0x216E, 'M', u'd'), + (0x216F, 'M', u'm'), + (0x2170, 'M', u'i'), + (0x2171, 'M', u'ii'), + (0x2172, 'M', u'iii'), + (0x2173, 'M', u'iv'), + (0x2174, 'M', u'v'), + (0x2175, 'M', u'vi'), + (0x2176, 'M', u'vii'), + (0x2177, 'M', u'viii'), + (0x2178, 'M', u'ix'), + (0x2179, 'M', u'x'), + (0x217A, 'M', u'xi'), + (0x217B, 'M', u'xii'), + (0x217C, 'M', u'l'), + (0x217D, 'M', u'c'), + (0x217E, 'M', u'd'), + (0x217F, 'M', u'm'), + (0x2180, 'V'), + (0x2183, 'X'), + (0x2184, 'V'), + (0x2189, 'M', u'0⁄3'), + (0x218A, 'V'), + (0x218C, 'X'), + (0x2190, 'V'), + (0x222C, 'M', u'∫∫'), + (0x222D, 'M', u'∫∫∫'), + (0x222E, 'V'), + (0x222F, 'M', u'∮∮'), + (0x2230, 'M', u'∮∮∮'), + (0x2231, 'V'), + (0x2260, '3'), + (0x2261, 'V'), + (0x226E, '3'), + (0x2270, 'V'), + (0x2329, 'M', u'〈'), + ] + +def _seg_23(): + return [ + (0x232A, 'M', u'〉'), + (0x232B, 'V'), + (0x2427, 'X'), + (0x2440, 'V'), + (0x244B, 'X'), + (0x2460, 'M', u'1'), + (0x2461, 'M', u'2'), + (0x2462, 'M', u'3'), + (0x2463, 'M', u'4'), + (0x2464, 'M', u'5'), + (0x2465, 'M', u'6'), + (0x2466, 'M', u'7'), + (0x2467, 'M', u'8'), + (0x2468, 'M', u'9'), + (0x2469, 'M', u'10'), + (0x246A, 'M', u'11'), + (0x246B, 'M', u'12'), + (0x246C, 'M', u'13'), + (0x246D, 'M', u'14'), + (0x246E, 'M', u'15'), + (0x246F, 'M', u'16'), + (0x2470, 'M', u'17'), + (0x2471, 'M', u'18'), + (0x2472, 'M', u'19'), + (0x2473, 'M', u'20'), + (0x2474, '3', u'(1)'), + (0x2475, '3', u'(2)'), + (0x2476, '3', u'(3)'), + (0x2477, '3', u'(4)'), + (0x2478, '3', u'(5)'), + (0x2479, '3', u'(6)'), + (0x247A, '3', u'(7)'), + (0x247B, '3', u'(8)'), + (0x247C, '3', u'(9)'), + (0x247D, '3', u'(10)'), + (0x247E, '3', u'(11)'), + (0x247F, '3', u'(12)'), + (0x2480, '3', u'(13)'), + (0x2481, '3', u'(14)'), + (0x2482, '3', u'(15)'), + (0x2483, '3', u'(16)'), + (0x2484, '3', u'(17)'), + (0x2485, '3', u'(18)'), + (0x2486, '3', u'(19)'), + (0x2487, '3', u'(20)'), + (0x2488, 'X'), + (0x249C, '3', u'(a)'), + (0x249D, '3', u'(b)'), + (0x249E, '3', u'(c)'), + (0x249F, '3', u'(d)'), + (0x24A0, '3', u'(e)'), + (0x24A1, '3', u'(f)'), + (0x24A2, '3', u'(g)'), + (0x24A3, '3', u'(h)'), + (0x24A4, '3', u'(i)'), + (0x24A5, '3', u'(j)'), + (0x24A6, '3', u'(k)'), + (0x24A7, '3', u'(l)'), + (0x24A8, '3', u'(m)'), + (0x24A9, '3', u'(n)'), + (0x24AA, '3', u'(o)'), + (0x24AB, '3', u'(p)'), + (0x24AC, '3', u'(q)'), + (0x24AD, '3', u'(r)'), + (0x24AE, '3', u'(s)'), + (0x24AF, '3', u'(t)'), + (0x24B0, '3', u'(u)'), + (0x24B1, '3', u'(v)'), + (0x24B2, '3', u'(w)'), + (0x24B3, '3', u'(x)'), + (0x24B4, '3', u'(y)'), + (0x24B5, '3', u'(z)'), + (0x24B6, 'M', u'a'), + (0x24B7, 'M', u'b'), + (0x24B8, 'M', u'c'), + (0x24B9, 'M', u'd'), + (0x24BA, 'M', u'e'), + (0x24BB, 'M', u'f'), + (0x24BC, 'M', u'g'), + (0x24BD, 'M', u'h'), + (0x24BE, 'M', u'i'), + (0x24BF, 'M', u'j'), + (0x24C0, 'M', u'k'), + (0x24C1, 'M', u'l'), + (0x24C2, 'M', u'm'), + (0x24C3, 'M', u'n'), + (0x24C4, 'M', u'o'), + (0x24C5, 'M', u'p'), + (0x24C6, 'M', u'q'), + (0x24C7, 'M', u'r'), + (0x24C8, 'M', u's'), + (0x24C9, 'M', u't'), + (0x24CA, 'M', u'u'), + (0x24CB, 'M', u'v'), + (0x24CC, 'M', u'w'), + (0x24CD, 'M', u'x'), + (0x24CE, 'M', u'y'), + (0x24CF, 'M', u'z'), + (0x24D0, 'M', u'a'), + (0x24D1, 'M', u'b'), + ] + +def _seg_24(): + return [ + (0x24D2, 'M', u'c'), + (0x24D3, 'M', u'd'), + (0x24D4, 'M', u'e'), + (0x24D5, 'M', u'f'), + (0x24D6, 'M', u'g'), + (0x24D7, 'M', u'h'), + (0x24D8, 'M', u'i'), + (0x24D9, 'M', u'j'), + (0x24DA, 'M', u'k'), + (0x24DB, 'M', u'l'), + (0x24DC, 'M', u'm'), + (0x24DD, 'M', u'n'), + (0x24DE, 'M', u'o'), + (0x24DF, 'M', u'p'), + (0x24E0, 'M', u'q'), + (0x24E1, 'M', u'r'), + (0x24E2, 'M', u's'), + (0x24E3, 'M', u't'), + (0x24E4, 'M', u'u'), + (0x24E5, 'M', u'v'), + (0x24E6, 'M', u'w'), + (0x24E7, 'M', u'x'), + (0x24E8, 'M', u'y'), + (0x24E9, 'M', u'z'), + (0x24EA, 'M', u'0'), + (0x24EB, 'V'), + (0x2A0C, 'M', u'∫∫∫∫'), + (0x2A0D, 'V'), + (0x2A74, '3', u'::='), + (0x2A75, '3', u'=='), + (0x2A76, '3', u'==='), + (0x2A77, 'V'), + (0x2ADC, 'M', u'⫝̸'), + (0x2ADD, 'V'), + (0x2B74, 'X'), + (0x2B76, 'V'), + (0x2B96, 'X'), + (0x2B98, 'V'), + (0x2BBA, 'X'), + (0x2BBD, 'V'), + (0x2BC9, 'X'), + (0x2BCA, 'V'), + (0x2BD3, 'X'), + (0x2BEC, 'V'), + (0x2BF0, 'X'), + (0x2C00, 'M', u'ⰰ'), + (0x2C01, 'M', u'ⰱ'), + (0x2C02, 'M', u'ⰲ'), + (0x2C03, 'M', u'ⰳ'), + (0x2C04, 'M', u'ⰴ'), + (0x2C05, 'M', u'ⰵ'), + (0x2C06, 'M', u'ⰶ'), + (0x2C07, 'M', u'ⰷ'), + (0x2C08, 'M', u'ⰸ'), + (0x2C09, 'M', u'ⰹ'), + (0x2C0A, 'M', u'ⰺ'), + (0x2C0B, 'M', u'ⰻ'), + (0x2C0C, 'M', u'ⰼ'), + (0x2C0D, 'M', u'ⰽ'), + (0x2C0E, 'M', u'ⰾ'), + (0x2C0F, 'M', u'ⰿ'), + (0x2C10, 'M', u'ⱀ'), + (0x2C11, 'M', u'ⱁ'), + (0x2C12, 'M', u'ⱂ'), + (0x2C13, 'M', u'ⱃ'), + (0x2C14, 'M', u'ⱄ'), + (0x2C15, 'M', u'ⱅ'), + (0x2C16, 'M', u'ⱆ'), + (0x2C17, 'M', u'ⱇ'), + (0x2C18, 'M', u'ⱈ'), + (0x2C19, 'M', u'ⱉ'), + (0x2C1A, 'M', u'ⱊ'), + (0x2C1B, 'M', u'ⱋ'), + (0x2C1C, 'M', u'ⱌ'), + (0x2C1D, 'M', u'ⱍ'), + (0x2C1E, 'M', u'ⱎ'), + (0x2C1F, 'M', u'ⱏ'), + (0x2C20, 'M', u'ⱐ'), + (0x2C21, 'M', u'ⱑ'), + (0x2C22, 'M', u'ⱒ'), + (0x2C23, 'M', u'ⱓ'), + (0x2C24, 'M', u'ⱔ'), + (0x2C25, 'M', u'ⱕ'), + (0x2C26, 'M', u'ⱖ'), + (0x2C27, 'M', u'ⱗ'), + (0x2C28, 'M', u'ⱘ'), + (0x2C29, 'M', u'ⱙ'), + (0x2C2A, 'M', u'ⱚ'), + (0x2C2B, 'M', u'ⱛ'), + (0x2C2C, 'M', u'ⱜ'), + (0x2C2D, 'M', u'ⱝ'), + (0x2C2E, 'M', u'ⱞ'), + (0x2C2F, 'X'), + (0x2C30, 'V'), + (0x2C5F, 'X'), + (0x2C60, 'M', u'ⱡ'), + (0x2C61, 'V'), + (0x2C62, 'M', u'ɫ'), + (0x2C63, 'M', u'ᵽ'), + (0x2C64, 'M', u'ɽ'), + ] + +def _seg_25(): + return [ + (0x2C65, 'V'), + (0x2C67, 'M', u'ⱨ'), + (0x2C68, 'V'), + (0x2C69, 'M', u'ⱪ'), + (0x2C6A, 'V'), + (0x2C6B, 'M', u'ⱬ'), + (0x2C6C, 'V'), + (0x2C6D, 'M', u'ɑ'), + (0x2C6E, 'M', u'ɱ'), + (0x2C6F, 'M', u'ɐ'), + (0x2C70, 'M', u'ɒ'), + (0x2C71, 'V'), + (0x2C72, 'M', u'ⱳ'), + (0x2C73, 'V'), + (0x2C75, 'M', u'ⱶ'), + (0x2C76, 'V'), + (0x2C7C, 'M', u'j'), + (0x2C7D, 'M', u'v'), + (0x2C7E, 'M', u'ȿ'), + (0x2C7F, 'M', u'ɀ'), + (0x2C80, 'M', u'ⲁ'), + (0x2C81, 'V'), + (0x2C82, 'M', u'ⲃ'), + (0x2C83, 'V'), + (0x2C84, 'M', u'ⲅ'), + (0x2C85, 'V'), + (0x2C86, 'M', u'ⲇ'), + (0x2C87, 'V'), + (0x2C88, 'M', u'ⲉ'), + (0x2C89, 'V'), + (0x2C8A, 'M', u'ⲋ'), + (0x2C8B, 'V'), + (0x2C8C, 'M', u'ⲍ'), + (0x2C8D, 'V'), + (0x2C8E, 'M', u'ⲏ'), + (0x2C8F, 'V'), + (0x2C90, 'M', u'ⲑ'), + (0x2C91, 'V'), + (0x2C92, 'M', u'ⲓ'), + (0x2C93, 'V'), + (0x2C94, 'M', u'ⲕ'), + (0x2C95, 'V'), + (0x2C96, 'M', u'ⲗ'), + (0x2C97, 'V'), + (0x2C98, 'M', u'ⲙ'), + (0x2C99, 'V'), + (0x2C9A, 'M', u'ⲛ'), + (0x2C9B, 'V'), + (0x2C9C, 'M', u'ⲝ'), + (0x2C9D, 'V'), + (0x2C9E, 'M', u'ⲟ'), + (0x2C9F, 'V'), + (0x2CA0, 'M', u'ⲡ'), + (0x2CA1, 'V'), + (0x2CA2, 'M', u'ⲣ'), + (0x2CA3, 'V'), + (0x2CA4, 'M', u'ⲥ'), + (0x2CA5, 'V'), + (0x2CA6, 'M', u'ⲧ'), + (0x2CA7, 'V'), + (0x2CA8, 'M', u'ⲩ'), + (0x2CA9, 'V'), + (0x2CAA, 'M', u'ⲫ'), + (0x2CAB, 'V'), + (0x2CAC, 'M', u'ⲭ'), + (0x2CAD, 'V'), + (0x2CAE, 'M', u'ⲯ'), + (0x2CAF, 'V'), + (0x2CB0, 'M', u'ⲱ'), + (0x2CB1, 'V'), + (0x2CB2, 'M', u'ⲳ'), + (0x2CB3, 'V'), + (0x2CB4, 'M', u'ⲵ'), + (0x2CB5, 'V'), + (0x2CB6, 'M', u'ⲷ'), + (0x2CB7, 'V'), + (0x2CB8, 'M', u'ⲹ'), + (0x2CB9, 'V'), + (0x2CBA, 'M', u'ⲻ'), + (0x2CBB, 'V'), + (0x2CBC, 'M', u'ⲽ'), + (0x2CBD, 'V'), + (0x2CBE, 'M', u'ⲿ'), + (0x2CBF, 'V'), + (0x2CC0, 'M', u'ⳁ'), + (0x2CC1, 'V'), + (0x2CC2, 'M', u'ⳃ'), + (0x2CC3, 'V'), + (0x2CC4, 'M', u'ⳅ'), + (0x2CC5, 'V'), + (0x2CC6, 'M', u'ⳇ'), + (0x2CC7, 'V'), + (0x2CC8, 'M', u'ⳉ'), + (0x2CC9, 'V'), + (0x2CCA, 'M', u'ⳋ'), + (0x2CCB, 'V'), + (0x2CCC, 'M', u'ⳍ'), + (0x2CCD, 'V'), + (0x2CCE, 'M', u'ⳏ'), + (0x2CCF, 'V'), + ] + +def _seg_26(): + return [ + (0x2CD0, 'M', u'ⳑ'), + (0x2CD1, 'V'), + (0x2CD2, 'M', u'ⳓ'), + (0x2CD3, 'V'), + (0x2CD4, 'M', u'ⳕ'), + (0x2CD5, 'V'), + (0x2CD6, 'M', u'ⳗ'), + (0x2CD7, 'V'), + (0x2CD8, 'M', u'ⳙ'), + (0x2CD9, 'V'), + (0x2CDA, 'M', u'ⳛ'), + (0x2CDB, 'V'), + (0x2CDC, 'M', u'ⳝ'), + (0x2CDD, 'V'), + (0x2CDE, 'M', u'ⳟ'), + (0x2CDF, 'V'), + (0x2CE0, 'M', u'ⳡ'), + (0x2CE1, 'V'), + (0x2CE2, 'M', u'ⳣ'), + (0x2CE3, 'V'), + (0x2CEB, 'M', u'ⳬ'), + (0x2CEC, 'V'), + (0x2CED, 'M', u'ⳮ'), + (0x2CEE, 'V'), + (0x2CF2, 'M', u'ⳳ'), + (0x2CF3, 'V'), + (0x2CF4, 'X'), + (0x2CF9, 'V'), + (0x2D26, 'X'), + (0x2D27, 'V'), + (0x2D28, 'X'), + (0x2D2D, 'V'), + (0x2D2E, 'X'), + (0x2D30, 'V'), + (0x2D68, 'X'), + (0x2D6F, 'M', u'ⵡ'), + (0x2D70, 'V'), + (0x2D71, 'X'), + (0x2D7F, 'V'), + (0x2D97, 'X'), + (0x2DA0, 'V'), + (0x2DA7, 'X'), + (0x2DA8, 'V'), + (0x2DAF, 'X'), + (0x2DB0, 'V'), + (0x2DB7, 'X'), + (0x2DB8, 'V'), + (0x2DBF, 'X'), + (0x2DC0, 'V'), + (0x2DC7, 'X'), + (0x2DC8, 'V'), + (0x2DCF, 'X'), + (0x2DD0, 'V'), + (0x2DD7, 'X'), + (0x2DD8, 'V'), + (0x2DDF, 'X'), + (0x2DE0, 'V'), + (0x2E4A, 'X'), + (0x2E80, 'V'), + (0x2E9A, 'X'), + (0x2E9B, 'V'), + (0x2E9F, 'M', u'母'), + (0x2EA0, 'V'), + (0x2EF3, 'M', u'龟'), + (0x2EF4, 'X'), + (0x2F00, 'M', u'一'), + (0x2F01, 'M', u'丨'), + (0x2F02, 'M', u'丶'), + (0x2F03, 'M', u'丿'), + (0x2F04, 'M', u'乙'), + (0x2F05, 'M', u'亅'), + (0x2F06, 'M', u'二'), + (0x2F07, 'M', u'亠'), + (0x2F08, 'M', u'人'), + (0x2F09, 'M', u'儿'), + (0x2F0A, 'M', u'入'), + (0x2F0B, 'M', u'八'), + (0x2F0C, 'M', u'冂'), + (0x2F0D, 'M', u'冖'), + (0x2F0E, 'M', u'冫'), + (0x2F0F, 'M', u'几'), + (0x2F10, 'M', u'凵'), + (0x2F11, 'M', u'刀'), + (0x2F12, 'M', u'力'), + (0x2F13, 'M', u'勹'), + (0x2F14, 'M', u'匕'), + (0x2F15, 'M', u'匚'), + (0x2F16, 'M', u'匸'), + (0x2F17, 'M', u'十'), + (0x2F18, 'M', u'卜'), + (0x2F19, 'M', u'卩'), + (0x2F1A, 'M', u'厂'), + (0x2F1B, 'M', u'厶'), + (0x2F1C, 'M', u'又'), + (0x2F1D, 'M', u'口'), + (0x2F1E, 'M', u'囗'), + (0x2F1F, 'M', u'土'), + (0x2F20, 'M', u'士'), + (0x2F21, 'M', u'夂'), + (0x2F22, 'M', u'夊'), + ] + +def _seg_27(): + return [ + (0x2F23, 'M', u'夕'), + (0x2F24, 'M', u'大'), + (0x2F25, 'M', u'女'), + (0x2F26, 'M', u'子'), + (0x2F27, 'M', u'宀'), + (0x2F28, 'M', u'寸'), + (0x2F29, 'M', u'小'), + (0x2F2A, 'M', u'尢'), + (0x2F2B, 'M', u'尸'), + (0x2F2C, 'M', u'屮'), + (0x2F2D, 'M', u'山'), + (0x2F2E, 'M', u'巛'), + (0x2F2F, 'M', u'工'), + (0x2F30, 'M', u'己'), + (0x2F31, 'M', u'巾'), + (0x2F32, 'M', u'干'), + (0x2F33, 'M', u'幺'), + (0x2F34, 'M', u'广'), + (0x2F35, 'M', u'廴'), + (0x2F36, 'M', u'廾'), + (0x2F37, 'M', u'弋'), + (0x2F38, 'M', u'弓'), + (0x2F39, 'M', u'彐'), + (0x2F3A, 'M', u'彡'), + (0x2F3B, 'M', u'彳'), + (0x2F3C, 'M', u'心'), + (0x2F3D, 'M', u'戈'), + (0x2F3E, 'M', u'戶'), + (0x2F3F, 'M', u'手'), + (0x2F40, 'M', u'支'), + (0x2F41, 'M', u'攴'), + (0x2F42, 'M', u'文'), + (0x2F43, 'M', u'斗'), + (0x2F44, 'M', u'斤'), + (0x2F45, 'M', u'方'), + (0x2F46, 'M', u'无'), + (0x2F47, 'M', u'日'), + (0x2F48, 'M', u'曰'), + (0x2F49, 'M', u'月'), + (0x2F4A, 'M', u'木'), + (0x2F4B, 'M', u'欠'), + (0x2F4C, 'M', u'止'), + (0x2F4D, 'M', u'歹'), + (0x2F4E, 'M', u'殳'), + (0x2F4F, 'M', u'毋'), + (0x2F50, 'M', u'比'), + (0x2F51, 'M', u'毛'), + (0x2F52, 'M', u'氏'), + (0x2F53, 'M', u'气'), + (0x2F54, 'M', u'水'), + (0x2F55, 'M', u'火'), + (0x2F56, 'M', u'爪'), + (0x2F57, 'M', u'父'), + (0x2F58, 'M', u'爻'), + (0x2F59, 'M', u'爿'), + (0x2F5A, 'M', u'片'), + (0x2F5B, 'M', u'牙'), + (0x2F5C, 'M', u'牛'), + (0x2F5D, 'M', u'犬'), + (0x2F5E, 'M', u'玄'), + (0x2F5F, 'M', u'玉'), + (0x2F60, 'M', u'瓜'), + (0x2F61, 'M', u'瓦'), + (0x2F62, 'M', u'甘'), + (0x2F63, 'M', u'生'), + (0x2F64, 'M', u'用'), + (0x2F65, 'M', u'田'), + (0x2F66, 'M', u'疋'), + (0x2F67, 'M', u'疒'), + (0x2F68, 'M', u'癶'), + (0x2F69, 'M', u'白'), + (0x2F6A, 'M', u'皮'), + (0x2F6B, 'M', u'皿'), + (0x2F6C, 'M', u'目'), + (0x2F6D, 'M', u'矛'), + (0x2F6E, 'M', u'矢'), + (0x2F6F, 'M', u'石'), + (0x2F70, 'M', u'示'), + (0x2F71, 'M', u'禸'), + (0x2F72, 'M', u'禾'), + (0x2F73, 'M', u'穴'), + (0x2F74, 'M', u'立'), + (0x2F75, 'M', u'竹'), + (0x2F76, 'M', u'米'), + (0x2F77, 'M', u'糸'), + (0x2F78, 'M', u'缶'), + (0x2F79, 'M', u'网'), + (0x2F7A, 'M', u'羊'), + (0x2F7B, 'M', u'羽'), + (0x2F7C, 'M', u'老'), + (0x2F7D, 'M', u'而'), + (0x2F7E, 'M', u'耒'), + (0x2F7F, 'M', u'耳'), + (0x2F80, 'M', u'聿'), + (0x2F81, 'M', u'肉'), + (0x2F82, 'M', u'臣'), + (0x2F83, 'M', u'自'), + (0x2F84, 'M', u'至'), + (0x2F85, 'M', u'臼'), + (0x2F86, 'M', u'舌'), + ] + +def _seg_28(): + return [ + (0x2F87, 'M', u'舛'), + (0x2F88, 'M', u'舟'), + (0x2F89, 'M', u'艮'), + (0x2F8A, 'M', u'色'), + (0x2F8B, 'M', u'艸'), + (0x2F8C, 'M', u'虍'), + (0x2F8D, 'M', u'虫'), + (0x2F8E, 'M', u'血'), + (0x2F8F, 'M', u'行'), + (0x2F90, 'M', u'衣'), + (0x2F91, 'M', u'襾'), + (0x2F92, 'M', u'見'), + (0x2F93, 'M', u'角'), + (0x2F94, 'M', u'言'), + (0x2F95, 'M', u'谷'), + (0x2F96, 'M', u'豆'), + (0x2F97, 'M', u'豕'), + (0x2F98, 'M', u'豸'), + (0x2F99, 'M', u'貝'), + (0x2F9A, 'M', u'赤'), + (0x2F9B, 'M', u'走'), + (0x2F9C, 'M', u'足'), + (0x2F9D, 'M', u'身'), + (0x2F9E, 'M', u'車'), + (0x2F9F, 'M', u'辛'), + (0x2FA0, 'M', u'辰'), + (0x2FA1, 'M', u'辵'), + (0x2FA2, 'M', u'邑'), + (0x2FA3, 'M', u'酉'), + (0x2FA4, 'M', u'釆'), + (0x2FA5, 'M', u'里'), + (0x2FA6, 'M', u'金'), + (0x2FA7, 'M', u'長'), + (0x2FA8, 'M', u'門'), + (0x2FA9, 'M', u'阜'), + (0x2FAA, 'M', u'隶'), + (0x2FAB, 'M', u'隹'), + (0x2FAC, 'M', u'雨'), + (0x2FAD, 'M', u'靑'), + (0x2FAE, 'M', u'非'), + (0x2FAF, 'M', u'面'), + (0x2FB0, 'M', u'革'), + (0x2FB1, 'M', u'韋'), + (0x2FB2, 'M', u'韭'), + (0x2FB3, 'M', u'音'), + (0x2FB4, 'M', u'頁'), + (0x2FB5, 'M', u'風'), + (0x2FB6, 'M', u'飛'), + (0x2FB7, 'M', u'食'), + (0x2FB8, 'M', u'首'), + (0x2FB9, 'M', u'香'), + (0x2FBA, 'M', u'馬'), + (0x2FBB, 'M', u'骨'), + (0x2FBC, 'M', u'高'), + (0x2FBD, 'M', u'髟'), + (0x2FBE, 'M', u'鬥'), + (0x2FBF, 'M', u'鬯'), + (0x2FC0, 'M', u'鬲'), + (0x2FC1, 'M', u'鬼'), + (0x2FC2, 'M', u'魚'), + (0x2FC3, 'M', u'鳥'), + (0x2FC4, 'M', u'鹵'), + (0x2FC5, 'M', u'鹿'), + (0x2FC6, 'M', u'麥'), + (0x2FC7, 'M', u'麻'), + (0x2FC8, 'M', u'黃'), + (0x2FC9, 'M', u'黍'), + (0x2FCA, 'M', u'黑'), + (0x2FCB, 'M', u'黹'), + (0x2FCC, 'M', u'黽'), + (0x2FCD, 'M', u'鼎'), + (0x2FCE, 'M', u'鼓'), + (0x2FCF, 'M', u'鼠'), + (0x2FD0, 'M', u'鼻'), + (0x2FD1, 'M', u'齊'), + (0x2FD2, 'M', u'齒'), + (0x2FD3, 'M', u'龍'), + (0x2FD4, 'M', u'龜'), + (0x2FD5, 'M', u'龠'), + (0x2FD6, 'X'), + (0x3000, '3', u' '), + (0x3001, 'V'), + (0x3002, 'M', u'.'), + (0x3003, 'V'), + (0x3036, 'M', u'〒'), + (0x3037, 'V'), + (0x3038, 'M', u'十'), + (0x3039, 'M', u'卄'), + (0x303A, 'M', u'卅'), + (0x303B, 'V'), + (0x3040, 'X'), + (0x3041, 'V'), + (0x3097, 'X'), + (0x3099, 'V'), + (0x309B, '3', u' ゙'), + (0x309C, '3', u' ゚'), + (0x309D, 'V'), + (0x309F, 'M', u'より'), + (0x30A0, 'V'), + (0x30FF, 'M', u'コト'), + ] + +def _seg_29(): + return [ + (0x3100, 'X'), + (0x3105, 'V'), + (0x312F, 'X'), + (0x3131, 'M', u'ᄀ'), + (0x3132, 'M', u'ᄁ'), + (0x3133, 'M', u'ᆪ'), + (0x3134, 'M', u'ᄂ'), + (0x3135, 'M', u'ᆬ'), + (0x3136, 'M', u'ᆭ'), + (0x3137, 'M', u'ᄃ'), + (0x3138, 'M', u'ᄄ'), + (0x3139, 'M', u'ᄅ'), + (0x313A, 'M', u'ᆰ'), + (0x313B, 'M', u'ᆱ'), + (0x313C, 'M', u'ᆲ'), + (0x313D, 'M', u'ᆳ'), + (0x313E, 'M', u'ᆴ'), + (0x313F, 'M', u'ᆵ'), + (0x3140, 'M', u'ᄚ'), + (0x3141, 'M', u'ᄆ'), + (0x3142, 'M', u'ᄇ'), + (0x3143, 'M', u'ᄈ'), + (0x3144, 'M', u'ᄡ'), + (0x3145, 'M', u'ᄉ'), + (0x3146, 'M', u'ᄊ'), + (0x3147, 'M', u'ᄋ'), + (0x3148, 'M', u'ᄌ'), + (0x3149, 'M', u'ᄍ'), + (0x314A, 'M', u'ᄎ'), + (0x314B, 'M', u'ᄏ'), + (0x314C, 'M', u'ᄐ'), + (0x314D, 'M', u'ᄑ'), + (0x314E, 'M', u'ᄒ'), + (0x314F, 'M', u'ᅡ'), + (0x3150, 'M', u'ᅢ'), + (0x3151, 'M', u'ᅣ'), + (0x3152, 'M', u'ᅤ'), + (0x3153, 'M', u'ᅥ'), + (0x3154, 'M', u'ᅦ'), + (0x3155, 'M', u'ᅧ'), + (0x3156, 'M', u'ᅨ'), + (0x3157, 'M', u'ᅩ'), + (0x3158, 'M', u'ᅪ'), + (0x3159, 'M', u'ᅫ'), + (0x315A, 'M', u'ᅬ'), + (0x315B, 'M', u'ᅭ'), + (0x315C, 'M', u'ᅮ'), + (0x315D, 'M', u'ᅯ'), + (0x315E, 'M', u'ᅰ'), + (0x315F, 'M', u'ᅱ'), + (0x3160, 'M', u'ᅲ'), + (0x3161, 'M', u'ᅳ'), + (0x3162, 'M', u'ᅴ'), + (0x3163, 'M', u'ᅵ'), + (0x3164, 'X'), + (0x3165, 'M', u'ᄔ'), + (0x3166, 'M', u'ᄕ'), + (0x3167, 'M', u'ᇇ'), + (0x3168, 'M', u'ᇈ'), + (0x3169, 'M', u'ᇌ'), + (0x316A, 'M', u'ᇎ'), + (0x316B, 'M', u'ᇓ'), + (0x316C, 'M', u'ᇗ'), + (0x316D, 'M', u'ᇙ'), + (0x316E, 'M', u'ᄜ'), + (0x316F, 'M', u'ᇝ'), + (0x3170, 'M', u'ᇟ'), + (0x3171, 'M', u'ᄝ'), + (0x3172, 'M', u'ᄞ'), + (0x3173, 'M', u'ᄠ'), + (0x3174, 'M', u'ᄢ'), + (0x3175, 'M', u'ᄣ'), + (0x3176, 'M', u'ᄧ'), + (0x3177, 'M', u'ᄩ'), + (0x3178, 'M', u'ᄫ'), + (0x3179, 'M', u'ᄬ'), + (0x317A, 'M', u'ᄭ'), + (0x317B, 'M', u'ᄮ'), + (0x317C, 'M', u'ᄯ'), + (0x317D, 'M', u'ᄲ'), + (0x317E, 'M', u'ᄶ'), + (0x317F, 'M', u'ᅀ'), + (0x3180, 'M', u'ᅇ'), + (0x3181, 'M', u'ᅌ'), + (0x3182, 'M', u'ᇱ'), + (0x3183, 'M', u'ᇲ'), + (0x3184, 'M', u'ᅗ'), + (0x3185, 'M', u'ᅘ'), + (0x3186, 'M', u'ᅙ'), + (0x3187, 'M', u'ᆄ'), + (0x3188, 'M', u'ᆅ'), + (0x3189, 'M', u'ᆈ'), + (0x318A, 'M', u'ᆑ'), + (0x318B, 'M', u'ᆒ'), + (0x318C, 'M', u'ᆔ'), + (0x318D, 'M', u'ᆞ'), + (0x318E, 'M', u'ᆡ'), + (0x318F, 'X'), + (0x3190, 'V'), + (0x3192, 'M', u'一'), + ] + +def _seg_30(): + return [ + (0x3193, 'M', u'二'), + (0x3194, 'M', u'三'), + (0x3195, 'M', u'四'), + (0x3196, 'M', u'上'), + (0x3197, 'M', u'中'), + (0x3198, 'M', u'下'), + (0x3199, 'M', u'甲'), + (0x319A, 'M', u'乙'), + (0x319B, 'M', u'丙'), + (0x319C, 'M', u'丁'), + (0x319D, 'M', u'天'), + (0x319E, 'M', u'地'), + (0x319F, 'M', u'人'), + (0x31A0, 'V'), + (0x31BB, 'X'), + (0x31C0, 'V'), + (0x31E4, 'X'), + (0x31F0, 'V'), + (0x3200, '3', u'(ᄀ)'), + (0x3201, '3', u'(ᄂ)'), + (0x3202, '3', u'(ᄃ)'), + (0x3203, '3', u'(ᄅ)'), + (0x3204, '3', u'(ᄆ)'), + (0x3205, '3', u'(ᄇ)'), + (0x3206, '3', u'(ᄉ)'), + (0x3207, '3', u'(ᄋ)'), + (0x3208, '3', u'(ᄌ)'), + (0x3209, '3', u'(ᄎ)'), + (0x320A, '3', u'(ᄏ)'), + (0x320B, '3', u'(ᄐ)'), + (0x320C, '3', u'(ᄑ)'), + (0x320D, '3', u'(ᄒ)'), + (0x320E, '3', u'(가)'), + (0x320F, '3', u'(나)'), + (0x3210, '3', u'(다)'), + (0x3211, '3', u'(라)'), + (0x3212, '3', u'(마)'), + (0x3213, '3', u'(바)'), + (0x3214, '3', u'(사)'), + (0x3215, '3', u'(아)'), + (0x3216, '3', u'(자)'), + (0x3217, '3', u'(차)'), + (0x3218, '3', u'(카)'), + (0x3219, '3', u'(타)'), + (0x321A, '3', u'(파)'), + (0x321B, '3', u'(하)'), + (0x321C, '3', u'(주)'), + (0x321D, '3', u'(오전)'), + (0x321E, '3', u'(오후)'), + (0x321F, 'X'), + (0x3220, '3', u'(一)'), + (0x3221, '3', u'(二)'), + (0x3222, '3', u'(三)'), + (0x3223, '3', u'(四)'), + (0x3224, '3', u'(五)'), + (0x3225, '3', u'(六)'), + (0x3226, '3', u'(七)'), + (0x3227, '3', u'(八)'), + (0x3228, '3', u'(九)'), + (0x3229, '3', u'(十)'), + (0x322A, '3', u'(月)'), + (0x322B, '3', u'(火)'), + (0x322C, '3', u'(水)'), + (0x322D, '3', u'(木)'), + (0x322E, '3', u'(金)'), + (0x322F, '3', u'(土)'), + (0x3230, '3', u'(日)'), + (0x3231, '3', u'(株)'), + (0x3232, '3', u'(有)'), + (0x3233, '3', u'(社)'), + (0x3234, '3', u'(名)'), + (0x3235, '3', u'(特)'), + (0x3236, '3', u'(財)'), + (0x3237, '3', u'(祝)'), + (0x3238, '3', u'(労)'), + (0x3239, '3', u'(代)'), + (0x323A, '3', u'(呼)'), + (0x323B, '3', u'(学)'), + (0x323C, '3', u'(監)'), + (0x323D, '3', u'(企)'), + (0x323E, '3', u'(資)'), + (0x323F, '3', u'(協)'), + (0x3240, '3', u'(祭)'), + (0x3241, '3', u'(休)'), + (0x3242, '3', u'(自)'), + (0x3243, '3', u'(至)'), + (0x3244, 'M', u'問'), + (0x3245, 'M', u'幼'), + (0x3246, 'M', u'文'), + (0x3247, 'M', u'箏'), + (0x3248, 'V'), + (0x3250, 'M', u'pte'), + (0x3251, 'M', u'21'), + (0x3252, 'M', u'22'), + (0x3253, 'M', u'23'), + (0x3254, 'M', u'24'), + (0x3255, 'M', u'25'), + (0x3256, 'M', u'26'), + (0x3257, 'M', u'27'), + (0x3258, 'M', u'28'), + ] + +def _seg_31(): + return [ + (0x3259, 'M', u'29'), + (0x325A, 'M', u'30'), + (0x325B, 'M', u'31'), + (0x325C, 'M', u'32'), + (0x325D, 'M', u'33'), + (0x325E, 'M', u'34'), + (0x325F, 'M', u'35'), + (0x3260, 'M', u'ᄀ'), + (0x3261, 'M', u'ᄂ'), + (0x3262, 'M', u'ᄃ'), + (0x3263, 'M', u'ᄅ'), + (0x3264, 'M', u'ᄆ'), + (0x3265, 'M', u'ᄇ'), + (0x3266, 'M', u'ᄉ'), + (0x3267, 'M', u'ᄋ'), + (0x3268, 'M', u'ᄌ'), + (0x3269, 'M', u'ᄎ'), + (0x326A, 'M', u'ᄏ'), + (0x326B, 'M', u'ᄐ'), + (0x326C, 'M', u'ᄑ'), + (0x326D, 'M', u'ᄒ'), + (0x326E, 'M', u'가'), + (0x326F, 'M', u'나'), + (0x3270, 'M', u'다'), + (0x3271, 'M', u'라'), + (0x3272, 'M', u'마'), + (0x3273, 'M', u'바'), + (0x3274, 'M', u'사'), + (0x3275, 'M', u'아'), + (0x3276, 'M', u'자'), + (0x3277, 'M', u'차'), + (0x3278, 'M', u'카'), + (0x3279, 'M', u'타'), + (0x327A, 'M', u'파'), + (0x327B, 'M', u'하'), + (0x327C, 'M', u'참고'), + (0x327D, 'M', u'주의'), + (0x327E, 'M', u'우'), + (0x327F, 'V'), + (0x3280, 'M', u'一'), + (0x3281, 'M', u'二'), + (0x3282, 'M', u'三'), + (0x3283, 'M', u'四'), + (0x3284, 'M', u'五'), + (0x3285, 'M', u'六'), + (0x3286, 'M', u'七'), + (0x3287, 'M', u'八'), + (0x3288, 'M', u'九'), + (0x3289, 'M', u'十'), + (0x328A, 'M', u'月'), + (0x328B, 'M', u'火'), + (0x328C, 'M', u'水'), + (0x328D, 'M', u'木'), + (0x328E, 'M', u'金'), + (0x328F, 'M', u'土'), + (0x3290, 'M', u'日'), + (0x3291, 'M', u'株'), + (0x3292, 'M', u'有'), + (0x3293, 'M', u'社'), + (0x3294, 'M', u'名'), + (0x3295, 'M', u'特'), + (0x3296, 'M', u'財'), + (0x3297, 'M', u'祝'), + (0x3298, 'M', u'労'), + (0x3299, 'M', u'秘'), + (0x329A, 'M', u'男'), + (0x329B, 'M', u'女'), + (0x329C, 'M', u'適'), + (0x329D, 'M', u'優'), + (0x329E, 'M', u'印'), + (0x329F, 'M', u'注'), + (0x32A0, 'M', u'項'), + (0x32A1, 'M', u'休'), + (0x32A2, 'M', u'写'), + (0x32A3, 'M', u'正'), + (0x32A4, 'M', u'上'), + (0x32A5, 'M', u'中'), + (0x32A6, 'M', u'下'), + (0x32A7, 'M', u'左'), + (0x32A8, 'M', u'右'), + (0x32A9, 'M', u'医'), + (0x32AA, 'M', u'宗'), + (0x32AB, 'M', u'学'), + (0x32AC, 'M', u'監'), + (0x32AD, 'M', u'企'), + (0x32AE, 'M', u'資'), + (0x32AF, 'M', u'協'), + (0x32B0, 'M', u'夜'), + (0x32B1, 'M', u'36'), + (0x32B2, 'M', u'37'), + (0x32B3, 'M', u'38'), + (0x32B4, 'M', u'39'), + (0x32B5, 'M', u'40'), + (0x32B6, 'M', u'41'), + (0x32B7, 'M', u'42'), + (0x32B8, 'M', u'43'), + (0x32B9, 'M', u'44'), + (0x32BA, 'M', u'45'), + (0x32BB, 'M', u'46'), + (0x32BC, 'M', u'47'), + ] + +def _seg_32(): + return [ + (0x32BD, 'M', u'48'), + (0x32BE, 'M', u'49'), + (0x32BF, 'M', u'50'), + (0x32C0, 'M', u'1月'), + (0x32C1, 'M', u'2月'), + (0x32C2, 'M', u'3月'), + (0x32C3, 'M', u'4月'), + (0x32C4, 'M', u'5月'), + (0x32C5, 'M', u'6月'), + (0x32C6, 'M', u'7月'), + (0x32C7, 'M', u'8月'), + (0x32C8, 'M', u'9月'), + (0x32C9, 'M', u'10月'), + (0x32CA, 'M', u'11月'), + (0x32CB, 'M', u'12月'), + (0x32CC, 'M', u'hg'), + (0x32CD, 'M', u'erg'), + (0x32CE, 'M', u'ev'), + (0x32CF, 'M', u'ltd'), + (0x32D0, 'M', u'ア'), + (0x32D1, 'M', u'イ'), + (0x32D2, 'M', u'ウ'), + (0x32D3, 'M', u'エ'), + (0x32D4, 'M', u'オ'), + (0x32D5, 'M', u'カ'), + (0x32D6, 'M', u'キ'), + (0x32D7, 'M', u'ク'), + (0x32D8, 'M', u'ケ'), + (0x32D9, 'M', u'コ'), + (0x32DA, 'M', u'サ'), + (0x32DB, 'M', u'シ'), + (0x32DC, 'M', u'ス'), + (0x32DD, 'M', u'セ'), + (0x32DE, 'M', u'ソ'), + (0x32DF, 'M', u'タ'), + (0x32E0, 'M', u'チ'), + (0x32E1, 'M', u'ツ'), + (0x32E2, 'M', u'テ'), + (0x32E3, 'M', u'ト'), + (0x32E4, 'M', u'ナ'), + (0x32E5, 'M', u'ニ'), + (0x32E6, 'M', u'ヌ'), + (0x32E7, 'M', u'ネ'), + (0x32E8, 'M', u'ノ'), + (0x32E9, 'M', u'ハ'), + (0x32EA, 'M', u'ヒ'), + (0x32EB, 'M', u'フ'), + (0x32EC, 'M', u'ヘ'), + (0x32ED, 'M', u'ホ'), + (0x32EE, 'M', u'マ'), + (0x32EF, 'M', u'ミ'), + (0x32F0, 'M', u'ム'), + (0x32F1, 'M', u'メ'), + (0x32F2, 'M', u'モ'), + (0x32F3, 'M', u'ヤ'), + (0x32F4, 'M', u'ユ'), + (0x32F5, 'M', u'ヨ'), + (0x32F6, 'M', u'ラ'), + (0x32F7, 'M', u'リ'), + (0x32F8, 'M', u'ル'), + (0x32F9, 'M', u'レ'), + (0x32FA, 'M', u'ロ'), + (0x32FB, 'M', u'ワ'), + (0x32FC, 'M', u'ヰ'), + (0x32FD, 'M', u'ヱ'), + (0x32FE, 'M', u'ヲ'), + (0x32FF, 'X'), + (0x3300, 'M', u'アパート'), + (0x3301, 'M', u'アルファ'), + (0x3302, 'M', u'アンペア'), + (0x3303, 'M', u'アール'), + (0x3304, 'M', u'イニング'), + (0x3305, 'M', u'インチ'), + (0x3306, 'M', u'ウォン'), + (0x3307, 'M', u'エスクード'), + (0x3308, 'M', u'エーカー'), + (0x3309, 'M', u'オンス'), + (0x330A, 'M', u'オーム'), + (0x330B, 'M', u'カイリ'), + (0x330C, 'M', u'カラット'), + (0x330D, 'M', u'カロリー'), + (0x330E, 'M', u'ガロン'), + (0x330F, 'M', u'ガンマ'), + (0x3310, 'M', u'ギガ'), + (0x3311, 'M', u'ギニー'), + (0x3312, 'M', u'キュリー'), + (0x3313, 'M', u'ギルダー'), + (0x3314, 'M', u'キロ'), + (0x3315, 'M', u'キログラム'), + (0x3316, 'M', u'キロメートル'), + (0x3317, 'M', u'キロワット'), + (0x3318, 'M', u'グラム'), + (0x3319, 'M', u'グラムトン'), + (0x331A, 'M', u'クルゼイロ'), + (0x331B, 'M', u'クローネ'), + (0x331C, 'M', u'ケース'), + (0x331D, 'M', u'コルナ'), + (0x331E, 'M', u'コーポ'), + (0x331F, 'M', u'サイクル'), + (0x3320, 'M', u'サンチーム'), + ] + +def _seg_33(): + return [ + (0x3321, 'M', u'シリング'), + (0x3322, 'M', u'センチ'), + (0x3323, 'M', u'セント'), + (0x3324, 'M', u'ダース'), + (0x3325, 'M', u'デシ'), + (0x3326, 'M', u'ドル'), + (0x3327, 'M', u'トン'), + (0x3328, 'M', u'ナノ'), + (0x3329, 'M', u'ノット'), + (0x332A, 'M', u'ハイツ'), + (0x332B, 'M', u'パーセント'), + (0x332C, 'M', u'パーツ'), + (0x332D, 'M', u'バーレル'), + (0x332E, 'M', u'ピアストル'), + (0x332F, 'M', u'ピクル'), + (0x3330, 'M', u'ピコ'), + (0x3331, 'M', u'ビル'), + (0x3332, 'M', u'ファラッド'), + (0x3333, 'M', u'フィート'), + (0x3334, 'M', u'ブッシェル'), + (0x3335, 'M', u'フラン'), + (0x3336, 'M', u'ヘクタール'), + (0x3337, 'M', u'ペソ'), + (0x3338, 'M', u'ペニヒ'), + (0x3339, 'M', u'ヘルツ'), + (0x333A, 'M', u'ペンス'), + (0x333B, 'M', u'ページ'), + (0x333C, 'M', u'ベータ'), + (0x333D, 'M', u'ポイント'), + (0x333E, 'M', u'ボルト'), + (0x333F, 'M', u'ホン'), + (0x3340, 'M', u'ポンド'), + (0x3341, 'M', u'ホール'), + (0x3342, 'M', u'ホーン'), + (0x3343, 'M', u'マイクロ'), + (0x3344, 'M', u'マイル'), + (0x3345, 'M', u'マッハ'), + (0x3346, 'M', u'マルク'), + (0x3347, 'M', u'マンション'), + (0x3348, 'M', u'ミクロン'), + (0x3349, 'M', u'ミリ'), + (0x334A, 'M', u'ミリバール'), + (0x334B, 'M', u'メガ'), + (0x334C, 'M', u'メガトン'), + (0x334D, 'M', u'メートル'), + (0x334E, 'M', u'ヤード'), + (0x334F, 'M', u'ヤール'), + (0x3350, 'M', u'ユアン'), + (0x3351, 'M', u'リットル'), + (0x3352, 'M', u'リラ'), + (0x3353, 'M', u'ルピー'), + (0x3354, 'M', u'ルーブル'), + (0x3355, 'M', u'レム'), + (0x3356, 'M', u'レントゲン'), + (0x3357, 'M', u'ワット'), + (0x3358, 'M', u'0点'), + (0x3359, 'M', u'1点'), + (0x335A, 'M', u'2点'), + (0x335B, 'M', u'3点'), + (0x335C, 'M', u'4点'), + (0x335D, 'M', u'5点'), + (0x335E, 'M', u'6点'), + (0x335F, 'M', u'7点'), + (0x3360, 'M', u'8点'), + (0x3361, 'M', u'9点'), + (0x3362, 'M', u'10点'), + (0x3363, 'M', u'11点'), + (0x3364, 'M', u'12点'), + (0x3365, 'M', u'13点'), + (0x3366, 'M', u'14点'), + (0x3367, 'M', u'15点'), + (0x3368, 'M', u'16点'), + (0x3369, 'M', u'17点'), + (0x336A, 'M', u'18点'), + (0x336B, 'M', u'19点'), + (0x336C, 'M', u'20点'), + (0x336D, 'M', u'21点'), + (0x336E, 'M', u'22点'), + (0x336F, 'M', u'23点'), + (0x3370, 'M', u'24点'), + (0x3371, 'M', u'hpa'), + (0x3372, 'M', u'da'), + (0x3373, 'M', u'au'), + (0x3374, 'M', u'bar'), + (0x3375, 'M', u'ov'), + (0x3376, 'M', u'pc'), + (0x3377, 'M', u'dm'), + (0x3378, 'M', u'dm2'), + (0x3379, 'M', u'dm3'), + (0x337A, 'M', u'iu'), + (0x337B, 'M', u'平成'), + (0x337C, 'M', u'昭和'), + (0x337D, 'M', u'大正'), + (0x337E, 'M', u'明治'), + (0x337F, 'M', u'株式会社'), + (0x3380, 'M', u'pa'), + (0x3381, 'M', u'na'), + (0x3382, 'M', u'μa'), + (0x3383, 'M', u'ma'), + (0x3384, 'M', u'ka'), + ] + +def _seg_34(): + return [ + (0x3385, 'M', u'kb'), + (0x3386, 'M', u'mb'), + (0x3387, 'M', u'gb'), + (0x3388, 'M', u'cal'), + (0x3389, 'M', u'kcal'), + (0x338A, 'M', u'pf'), + (0x338B, 'M', u'nf'), + (0x338C, 'M', u'μf'), + (0x338D, 'M', u'μg'), + (0x338E, 'M', u'mg'), + (0x338F, 'M', u'kg'), + (0x3390, 'M', u'hz'), + (0x3391, 'M', u'khz'), + (0x3392, 'M', u'mhz'), + (0x3393, 'M', u'ghz'), + (0x3394, 'M', u'thz'), + (0x3395, 'M', u'μl'), + (0x3396, 'M', u'ml'), + (0x3397, 'M', u'dl'), + (0x3398, 'M', u'kl'), + (0x3399, 'M', u'fm'), + (0x339A, 'M', u'nm'), + (0x339B, 'M', u'μm'), + (0x339C, 'M', u'mm'), + (0x339D, 'M', u'cm'), + (0x339E, 'M', u'km'), + (0x339F, 'M', u'mm2'), + (0x33A0, 'M', u'cm2'), + (0x33A1, 'M', u'm2'), + (0x33A2, 'M', u'km2'), + (0x33A3, 'M', u'mm3'), + (0x33A4, 'M', u'cm3'), + (0x33A5, 'M', u'm3'), + (0x33A6, 'M', u'km3'), + (0x33A7, 'M', u'm∕s'), + (0x33A8, 'M', u'm∕s2'), + (0x33A9, 'M', u'pa'), + (0x33AA, 'M', u'kpa'), + (0x33AB, 'M', u'mpa'), + (0x33AC, 'M', u'gpa'), + (0x33AD, 'M', u'rad'), + (0x33AE, 'M', u'rad∕s'), + (0x33AF, 'M', u'rad∕s2'), + (0x33B0, 'M', u'ps'), + (0x33B1, 'M', u'ns'), + (0x33B2, 'M', u'μs'), + (0x33B3, 'M', u'ms'), + (0x33B4, 'M', u'pv'), + (0x33B5, 'M', u'nv'), + (0x33B6, 'M', u'μv'), + (0x33B7, 'M', u'mv'), + (0x33B8, 'M', u'kv'), + (0x33B9, 'M', u'mv'), + (0x33BA, 'M', u'pw'), + (0x33BB, 'M', u'nw'), + (0x33BC, 'M', u'μw'), + (0x33BD, 'M', u'mw'), + (0x33BE, 'M', u'kw'), + (0x33BF, 'M', u'mw'), + (0x33C0, 'M', u'kω'), + (0x33C1, 'M', u'mω'), + (0x33C2, 'X'), + (0x33C3, 'M', u'bq'), + (0x33C4, 'M', u'cc'), + (0x33C5, 'M', u'cd'), + (0x33C6, 'M', u'c∕kg'), + (0x33C7, 'X'), + (0x33C8, 'M', u'db'), + (0x33C9, 'M', u'gy'), + (0x33CA, 'M', u'ha'), + (0x33CB, 'M', u'hp'), + (0x33CC, 'M', u'in'), + (0x33CD, 'M', u'kk'), + (0x33CE, 'M', u'km'), + (0x33CF, 'M', u'kt'), + (0x33D0, 'M', u'lm'), + (0x33D1, 'M', u'ln'), + (0x33D2, 'M', u'log'), + (0x33D3, 'M', u'lx'), + (0x33D4, 'M', u'mb'), + (0x33D5, 'M', u'mil'), + (0x33D6, 'M', u'mol'), + (0x33D7, 'M', u'ph'), + (0x33D8, 'X'), + (0x33D9, 'M', u'ppm'), + (0x33DA, 'M', u'pr'), + (0x33DB, 'M', u'sr'), + (0x33DC, 'M', u'sv'), + (0x33DD, 'M', u'wb'), + (0x33DE, 'M', u'v∕m'), + (0x33DF, 'M', u'a∕m'), + (0x33E0, 'M', u'1日'), + (0x33E1, 'M', u'2日'), + (0x33E2, 'M', u'3日'), + (0x33E3, 'M', u'4日'), + (0x33E4, 'M', u'5日'), + (0x33E5, 'M', u'6日'), + (0x33E6, 'M', u'7日'), + (0x33E7, 'M', u'8日'), + (0x33E8, 'M', u'9日'), + ] + +def _seg_35(): + return [ + (0x33E9, 'M', u'10日'), + (0x33EA, 'M', u'11日'), + (0x33EB, 'M', u'12日'), + (0x33EC, 'M', u'13日'), + (0x33ED, 'M', u'14日'), + (0x33EE, 'M', u'15日'), + (0x33EF, 'M', u'16日'), + (0x33F0, 'M', u'17日'), + (0x33F1, 'M', u'18日'), + (0x33F2, 'M', u'19日'), + (0x33F3, 'M', u'20日'), + (0x33F4, 'M', u'21日'), + (0x33F5, 'M', u'22日'), + (0x33F6, 'M', u'23日'), + (0x33F7, 'M', u'24日'), + (0x33F8, 'M', u'25日'), + (0x33F9, 'M', u'26日'), + (0x33FA, 'M', u'27日'), + (0x33FB, 'M', u'28日'), + (0x33FC, 'M', u'29日'), + (0x33FD, 'M', u'30日'), + (0x33FE, 'M', u'31日'), + (0x33FF, 'M', u'gal'), + (0x3400, 'V'), + (0x4DB6, 'X'), + (0x4DC0, 'V'), + (0x9FEB, 'X'), + (0xA000, 'V'), + (0xA48D, 'X'), + (0xA490, 'V'), + (0xA4C7, 'X'), + (0xA4D0, 'V'), + (0xA62C, 'X'), + (0xA640, 'M', u'ꙁ'), + (0xA641, 'V'), + (0xA642, 'M', u'ꙃ'), + (0xA643, 'V'), + (0xA644, 'M', u'ꙅ'), + (0xA645, 'V'), + (0xA646, 'M', u'ꙇ'), + (0xA647, 'V'), + (0xA648, 'M', u'ꙉ'), + (0xA649, 'V'), + (0xA64A, 'M', u'ꙋ'), + (0xA64B, 'V'), + (0xA64C, 'M', u'ꙍ'), + (0xA64D, 'V'), + (0xA64E, 'M', u'ꙏ'), + (0xA64F, 'V'), + (0xA650, 'M', u'ꙑ'), + (0xA651, 'V'), + (0xA652, 'M', u'ꙓ'), + (0xA653, 'V'), + (0xA654, 'M', u'ꙕ'), + (0xA655, 'V'), + (0xA656, 'M', u'ꙗ'), + (0xA657, 'V'), + (0xA658, 'M', u'ꙙ'), + (0xA659, 'V'), + (0xA65A, 'M', u'ꙛ'), + (0xA65B, 'V'), + (0xA65C, 'M', u'ꙝ'), + (0xA65D, 'V'), + (0xA65E, 'M', u'ꙟ'), + (0xA65F, 'V'), + (0xA660, 'M', u'ꙡ'), + (0xA661, 'V'), + (0xA662, 'M', u'ꙣ'), + (0xA663, 'V'), + (0xA664, 'M', u'ꙥ'), + (0xA665, 'V'), + (0xA666, 'M', u'ꙧ'), + (0xA667, 'V'), + (0xA668, 'M', u'ꙩ'), + (0xA669, 'V'), + (0xA66A, 'M', u'ꙫ'), + (0xA66B, 'V'), + (0xA66C, 'M', u'ꙭ'), + (0xA66D, 'V'), + (0xA680, 'M', u'ꚁ'), + (0xA681, 'V'), + (0xA682, 'M', u'ꚃ'), + (0xA683, 'V'), + (0xA684, 'M', u'ꚅ'), + (0xA685, 'V'), + (0xA686, 'M', u'ꚇ'), + (0xA687, 'V'), + (0xA688, 'M', u'ꚉ'), + (0xA689, 'V'), + (0xA68A, 'M', u'ꚋ'), + (0xA68B, 'V'), + (0xA68C, 'M', u'ꚍ'), + (0xA68D, 'V'), + (0xA68E, 'M', u'ꚏ'), + (0xA68F, 'V'), + (0xA690, 'M', u'ꚑ'), + (0xA691, 'V'), + (0xA692, 'M', u'ꚓ'), + (0xA693, 'V'), + (0xA694, 'M', u'ꚕ'), + ] + +def _seg_36(): + return [ + (0xA695, 'V'), + (0xA696, 'M', u'ꚗ'), + (0xA697, 'V'), + (0xA698, 'M', u'ꚙ'), + (0xA699, 'V'), + (0xA69A, 'M', u'ꚛ'), + (0xA69B, 'V'), + (0xA69C, 'M', u'ъ'), + (0xA69D, 'M', u'ь'), + (0xA69E, 'V'), + (0xA6F8, 'X'), + (0xA700, 'V'), + (0xA722, 'M', u'ꜣ'), + (0xA723, 'V'), + (0xA724, 'M', u'ꜥ'), + (0xA725, 'V'), + (0xA726, 'M', u'ꜧ'), + (0xA727, 'V'), + (0xA728, 'M', u'ꜩ'), + (0xA729, 'V'), + (0xA72A, 'M', u'ꜫ'), + (0xA72B, 'V'), + (0xA72C, 'M', u'ꜭ'), + (0xA72D, 'V'), + (0xA72E, 'M', u'ꜯ'), + (0xA72F, 'V'), + (0xA732, 'M', u'ꜳ'), + (0xA733, 'V'), + (0xA734, 'M', u'ꜵ'), + (0xA735, 'V'), + (0xA736, 'M', u'ꜷ'), + (0xA737, 'V'), + (0xA738, 'M', u'ꜹ'), + (0xA739, 'V'), + (0xA73A, 'M', u'ꜻ'), + (0xA73B, 'V'), + (0xA73C, 'M', u'ꜽ'), + (0xA73D, 'V'), + (0xA73E, 'M', u'ꜿ'), + (0xA73F, 'V'), + (0xA740, 'M', u'ꝁ'), + (0xA741, 'V'), + (0xA742, 'M', u'ꝃ'), + (0xA743, 'V'), + (0xA744, 'M', u'ꝅ'), + (0xA745, 'V'), + (0xA746, 'M', u'ꝇ'), + (0xA747, 'V'), + (0xA748, 'M', u'ꝉ'), + (0xA749, 'V'), + (0xA74A, 'M', u'ꝋ'), + (0xA74B, 'V'), + (0xA74C, 'M', u'ꝍ'), + (0xA74D, 'V'), + (0xA74E, 'M', u'ꝏ'), + (0xA74F, 'V'), + (0xA750, 'M', u'ꝑ'), + (0xA751, 'V'), + (0xA752, 'M', u'ꝓ'), + (0xA753, 'V'), + (0xA754, 'M', u'ꝕ'), + (0xA755, 'V'), + (0xA756, 'M', u'ꝗ'), + (0xA757, 'V'), + (0xA758, 'M', u'ꝙ'), + (0xA759, 'V'), + (0xA75A, 'M', u'ꝛ'), + (0xA75B, 'V'), + (0xA75C, 'M', u'ꝝ'), + (0xA75D, 'V'), + (0xA75E, 'M', u'ꝟ'), + (0xA75F, 'V'), + (0xA760, 'M', u'ꝡ'), + (0xA761, 'V'), + (0xA762, 'M', u'ꝣ'), + (0xA763, 'V'), + (0xA764, 'M', u'ꝥ'), + (0xA765, 'V'), + (0xA766, 'M', u'ꝧ'), + (0xA767, 'V'), + (0xA768, 'M', u'ꝩ'), + (0xA769, 'V'), + (0xA76A, 'M', u'ꝫ'), + (0xA76B, 'V'), + (0xA76C, 'M', u'ꝭ'), + (0xA76D, 'V'), + (0xA76E, 'M', u'ꝯ'), + (0xA76F, 'V'), + (0xA770, 'M', u'ꝯ'), + (0xA771, 'V'), + (0xA779, 'M', u'ꝺ'), + (0xA77A, 'V'), + (0xA77B, 'M', u'ꝼ'), + (0xA77C, 'V'), + (0xA77D, 'M', u'ᵹ'), + (0xA77E, 'M', u'ꝿ'), + (0xA77F, 'V'), + (0xA780, 'M', u'ꞁ'), + (0xA781, 'V'), + (0xA782, 'M', u'ꞃ'), + ] + +def _seg_37(): + return [ + (0xA783, 'V'), + (0xA784, 'M', u'ꞅ'), + (0xA785, 'V'), + (0xA786, 'M', u'ꞇ'), + (0xA787, 'V'), + (0xA78B, 'M', u'ꞌ'), + (0xA78C, 'V'), + (0xA78D, 'M', u'ɥ'), + (0xA78E, 'V'), + (0xA790, 'M', u'ꞑ'), + (0xA791, 'V'), + (0xA792, 'M', u'ꞓ'), + (0xA793, 'V'), + (0xA796, 'M', u'ꞗ'), + (0xA797, 'V'), + (0xA798, 'M', u'ꞙ'), + (0xA799, 'V'), + (0xA79A, 'M', u'ꞛ'), + (0xA79B, 'V'), + (0xA79C, 'M', u'ꞝ'), + (0xA79D, 'V'), + (0xA79E, 'M', u'ꞟ'), + (0xA79F, 'V'), + (0xA7A0, 'M', u'ꞡ'), + (0xA7A1, 'V'), + (0xA7A2, 'M', u'ꞣ'), + (0xA7A3, 'V'), + (0xA7A4, 'M', u'ꞥ'), + (0xA7A5, 'V'), + (0xA7A6, 'M', u'ꞧ'), + (0xA7A7, 'V'), + (0xA7A8, 'M', u'ꞩ'), + (0xA7A9, 'V'), + (0xA7AA, 'M', u'ɦ'), + (0xA7AB, 'M', u'ɜ'), + (0xA7AC, 'M', u'ɡ'), + (0xA7AD, 'M', u'ɬ'), + (0xA7AE, 'M', u'ɪ'), + (0xA7AF, 'X'), + (0xA7B0, 'M', u'ʞ'), + (0xA7B1, 'M', u'ʇ'), + (0xA7B2, 'M', u'ʝ'), + (0xA7B3, 'M', u'ꭓ'), + (0xA7B4, 'M', u'ꞵ'), + (0xA7B5, 'V'), + (0xA7B6, 'M', u'ꞷ'), + (0xA7B7, 'V'), + (0xA7B8, 'X'), + (0xA7F7, 'V'), + (0xA7F8, 'M', u'ħ'), + (0xA7F9, 'M', u'œ'), + (0xA7FA, 'V'), + (0xA82C, 'X'), + (0xA830, 'V'), + (0xA83A, 'X'), + (0xA840, 'V'), + (0xA878, 'X'), + (0xA880, 'V'), + (0xA8C6, 'X'), + (0xA8CE, 'V'), + (0xA8DA, 'X'), + (0xA8E0, 'V'), + (0xA8FE, 'X'), + (0xA900, 'V'), + (0xA954, 'X'), + (0xA95F, 'V'), + (0xA97D, 'X'), + (0xA980, 'V'), + (0xA9CE, 'X'), + (0xA9CF, 'V'), + (0xA9DA, 'X'), + (0xA9DE, 'V'), + (0xA9FF, 'X'), + (0xAA00, 'V'), + (0xAA37, 'X'), + (0xAA40, 'V'), + (0xAA4E, 'X'), + (0xAA50, 'V'), + (0xAA5A, 'X'), + (0xAA5C, 'V'), + (0xAAC3, 'X'), + (0xAADB, 'V'), + (0xAAF7, 'X'), + (0xAB01, 'V'), + (0xAB07, 'X'), + (0xAB09, 'V'), + (0xAB0F, 'X'), + (0xAB11, 'V'), + (0xAB17, 'X'), + (0xAB20, 'V'), + (0xAB27, 'X'), + (0xAB28, 'V'), + (0xAB2F, 'X'), + (0xAB30, 'V'), + (0xAB5C, 'M', u'ꜧ'), + (0xAB5D, 'M', u'ꬷ'), + (0xAB5E, 'M', u'ɫ'), + (0xAB5F, 'M', u'ꭒ'), + (0xAB60, 'V'), + (0xAB66, 'X'), + ] + +def _seg_38(): + return [ + (0xAB70, 'M', u'Ꭰ'), + (0xAB71, 'M', u'Ꭱ'), + (0xAB72, 'M', u'Ꭲ'), + (0xAB73, 'M', u'Ꭳ'), + (0xAB74, 'M', u'Ꭴ'), + (0xAB75, 'M', u'Ꭵ'), + (0xAB76, 'M', u'Ꭶ'), + (0xAB77, 'M', u'Ꭷ'), + (0xAB78, 'M', u'Ꭸ'), + (0xAB79, 'M', u'Ꭹ'), + (0xAB7A, 'M', u'Ꭺ'), + (0xAB7B, 'M', u'Ꭻ'), + (0xAB7C, 'M', u'Ꭼ'), + (0xAB7D, 'M', u'Ꭽ'), + (0xAB7E, 'M', u'Ꭾ'), + (0xAB7F, 'M', u'Ꭿ'), + (0xAB80, 'M', u'Ꮀ'), + (0xAB81, 'M', u'Ꮁ'), + (0xAB82, 'M', u'Ꮂ'), + (0xAB83, 'M', u'Ꮃ'), + (0xAB84, 'M', u'Ꮄ'), + (0xAB85, 'M', u'Ꮅ'), + (0xAB86, 'M', u'Ꮆ'), + (0xAB87, 'M', u'Ꮇ'), + (0xAB88, 'M', u'Ꮈ'), + (0xAB89, 'M', u'Ꮉ'), + (0xAB8A, 'M', u'Ꮊ'), + (0xAB8B, 'M', u'Ꮋ'), + (0xAB8C, 'M', u'Ꮌ'), + (0xAB8D, 'M', u'Ꮍ'), + (0xAB8E, 'M', u'Ꮎ'), + (0xAB8F, 'M', u'Ꮏ'), + (0xAB90, 'M', u'Ꮐ'), + (0xAB91, 'M', u'Ꮑ'), + (0xAB92, 'M', u'Ꮒ'), + (0xAB93, 'M', u'Ꮓ'), + (0xAB94, 'M', u'Ꮔ'), + (0xAB95, 'M', u'Ꮕ'), + (0xAB96, 'M', u'Ꮖ'), + (0xAB97, 'M', u'Ꮗ'), + (0xAB98, 'M', u'Ꮘ'), + (0xAB99, 'M', u'Ꮙ'), + (0xAB9A, 'M', u'Ꮚ'), + (0xAB9B, 'M', u'Ꮛ'), + (0xAB9C, 'M', u'Ꮜ'), + (0xAB9D, 'M', u'Ꮝ'), + (0xAB9E, 'M', u'Ꮞ'), + (0xAB9F, 'M', u'Ꮟ'), + (0xABA0, 'M', u'Ꮠ'), + (0xABA1, 'M', u'Ꮡ'), + (0xABA2, 'M', u'Ꮢ'), + (0xABA3, 'M', u'Ꮣ'), + (0xABA4, 'M', u'Ꮤ'), + (0xABA5, 'M', u'Ꮥ'), + (0xABA6, 'M', u'Ꮦ'), + (0xABA7, 'M', u'Ꮧ'), + (0xABA8, 'M', u'Ꮨ'), + (0xABA9, 'M', u'Ꮩ'), + (0xABAA, 'M', u'Ꮪ'), + (0xABAB, 'M', u'Ꮫ'), + (0xABAC, 'M', u'Ꮬ'), + (0xABAD, 'M', u'Ꮭ'), + (0xABAE, 'M', u'Ꮮ'), + (0xABAF, 'M', u'Ꮯ'), + (0xABB0, 'M', u'Ꮰ'), + (0xABB1, 'M', u'Ꮱ'), + (0xABB2, 'M', u'Ꮲ'), + (0xABB3, 'M', u'Ꮳ'), + (0xABB4, 'M', u'Ꮴ'), + (0xABB5, 'M', u'Ꮵ'), + (0xABB6, 'M', u'Ꮶ'), + (0xABB7, 'M', u'Ꮷ'), + (0xABB8, 'M', u'Ꮸ'), + (0xABB9, 'M', u'Ꮹ'), + (0xABBA, 'M', u'Ꮺ'), + (0xABBB, 'M', u'Ꮻ'), + (0xABBC, 'M', u'Ꮼ'), + (0xABBD, 'M', u'Ꮽ'), + (0xABBE, 'M', u'Ꮾ'), + (0xABBF, 'M', u'Ꮿ'), + (0xABC0, 'V'), + (0xABEE, 'X'), + (0xABF0, 'V'), + (0xABFA, 'X'), + (0xAC00, 'V'), + (0xD7A4, 'X'), + (0xD7B0, 'V'), + (0xD7C7, 'X'), + (0xD7CB, 'V'), + (0xD7FC, 'X'), + (0xF900, 'M', u'豈'), + (0xF901, 'M', u'更'), + (0xF902, 'M', u'車'), + (0xF903, 'M', u'賈'), + (0xF904, 'M', u'滑'), + (0xF905, 'M', u'串'), + (0xF906, 'M', u'句'), + (0xF907, 'M', u'龜'), + (0xF909, 'M', u'契'), + (0xF90A, 'M', u'金'), + ] + +def _seg_39(): + return [ + (0xF90B, 'M', u'喇'), + (0xF90C, 'M', u'奈'), + (0xF90D, 'M', u'懶'), + (0xF90E, 'M', u'癩'), + (0xF90F, 'M', u'羅'), + (0xF910, 'M', u'蘿'), + (0xF911, 'M', u'螺'), + (0xF912, 'M', u'裸'), + (0xF913, 'M', u'邏'), + (0xF914, 'M', u'樂'), + (0xF915, 'M', u'洛'), + (0xF916, 'M', u'烙'), + (0xF917, 'M', u'珞'), + (0xF918, 'M', u'落'), + (0xF919, 'M', u'酪'), + (0xF91A, 'M', u'駱'), + (0xF91B, 'M', u'亂'), + (0xF91C, 'M', u'卵'), + (0xF91D, 'M', u'欄'), + (0xF91E, 'M', u'爛'), + (0xF91F, 'M', u'蘭'), + (0xF920, 'M', u'鸞'), + (0xF921, 'M', u'嵐'), + (0xF922, 'M', u'濫'), + (0xF923, 'M', u'藍'), + (0xF924, 'M', u'襤'), + (0xF925, 'M', u'拉'), + (0xF926, 'M', u'臘'), + (0xF927, 'M', u'蠟'), + (0xF928, 'M', u'廊'), + (0xF929, 'M', u'朗'), + (0xF92A, 'M', u'浪'), + (0xF92B, 'M', u'狼'), + (0xF92C, 'M', u'郎'), + (0xF92D, 'M', u'來'), + (0xF92E, 'M', u'冷'), + (0xF92F, 'M', u'勞'), + (0xF930, 'M', u'擄'), + (0xF931, 'M', u'櫓'), + (0xF932, 'M', u'爐'), + (0xF933, 'M', u'盧'), + (0xF934, 'M', u'老'), + (0xF935, 'M', u'蘆'), + (0xF936, 'M', u'虜'), + (0xF937, 'M', u'路'), + (0xF938, 'M', u'露'), + (0xF939, 'M', u'魯'), + (0xF93A, 'M', u'鷺'), + (0xF93B, 'M', u'碌'), + (0xF93C, 'M', u'祿'), + (0xF93D, 'M', u'綠'), + (0xF93E, 'M', u'菉'), + (0xF93F, 'M', u'錄'), + (0xF940, 'M', u'鹿'), + (0xF941, 'M', u'論'), + (0xF942, 'M', u'壟'), + (0xF943, 'M', u'弄'), + (0xF944, 'M', u'籠'), + (0xF945, 'M', u'聾'), + (0xF946, 'M', u'牢'), + (0xF947, 'M', u'磊'), + (0xF948, 'M', u'賂'), + (0xF949, 'M', u'雷'), + (0xF94A, 'M', u'壘'), + (0xF94B, 'M', u'屢'), + (0xF94C, 'M', u'樓'), + (0xF94D, 'M', u'淚'), + (0xF94E, 'M', u'漏'), + (0xF94F, 'M', u'累'), + (0xF950, 'M', u'縷'), + (0xF951, 'M', u'陋'), + (0xF952, 'M', u'勒'), + (0xF953, 'M', u'肋'), + (0xF954, 'M', u'凜'), + (0xF955, 'M', u'凌'), + (0xF956, 'M', u'稜'), + (0xF957, 'M', u'綾'), + (0xF958, 'M', u'菱'), + (0xF959, 'M', u'陵'), + (0xF95A, 'M', u'讀'), + (0xF95B, 'M', u'拏'), + (0xF95C, 'M', u'樂'), + (0xF95D, 'M', u'諾'), + (0xF95E, 'M', u'丹'), + (0xF95F, 'M', u'寧'), + (0xF960, 'M', u'怒'), + (0xF961, 'M', u'率'), + (0xF962, 'M', u'異'), + (0xF963, 'M', u'北'), + (0xF964, 'M', u'磻'), + (0xF965, 'M', u'便'), + (0xF966, 'M', u'復'), + (0xF967, 'M', u'不'), + (0xF968, 'M', u'泌'), + (0xF969, 'M', u'數'), + (0xF96A, 'M', u'索'), + (0xF96B, 'M', u'參'), + (0xF96C, 'M', u'塞'), + (0xF96D, 'M', u'省'), + (0xF96E, 'M', u'葉'), + ] + +def _seg_40(): + return [ + (0xF96F, 'M', u'說'), + (0xF970, 'M', u'殺'), + (0xF971, 'M', u'辰'), + (0xF972, 'M', u'沈'), + (0xF973, 'M', u'拾'), + (0xF974, 'M', u'若'), + (0xF975, 'M', u'掠'), + (0xF976, 'M', u'略'), + (0xF977, 'M', u'亮'), + (0xF978, 'M', u'兩'), + (0xF979, 'M', u'凉'), + (0xF97A, 'M', u'梁'), + (0xF97B, 'M', u'糧'), + (0xF97C, 'M', u'良'), + (0xF97D, 'M', u'諒'), + (0xF97E, 'M', u'量'), + (0xF97F, 'M', u'勵'), + (0xF980, 'M', u'呂'), + (0xF981, 'M', u'女'), + (0xF982, 'M', u'廬'), + (0xF983, 'M', u'旅'), + (0xF984, 'M', u'濾'), + (0xF985, 'M', u'礪'), + (0xF986, 'M', u'閭'), + (0xF987, 'M', u'驪'), + (0xF988, 'M', u'麗'), + (0xF989, 'M', u'黎'), + (0xF98A, 'M', u'力'), + (0xF98B, 'M', u'曆'), + (0xF98C, 'M', u'歷'), + (0xF98D, 'M', u'轢'), + (0xF98E, 'M', u'年'), + (0xF98F, 'M', u'憐'), + (0xF990, 'M', u'戀'), + (0xF991, 'M', u'撚'), + (0xF992, 'M', u'漣'), + (0xF993, 'M', u'煉'), + (0xF994, 'M', u'璉'), + (0xF995, 'M', u'秊'), + (0xF996, 'M', u'練'), + (0xF997, 'M', u'聯'), + (0xF998, 'M', u'輦'), + (0xF999, 'M', u'蓮'), + (0xF99A, 'M', u'連'), + (0xF99B, 'M', u'鍊'), + (0xF99C, 'M', u'列'), + (0xF99D, 'M', u'劣'), + (0xF99E, 'M', u'咽'), + (0xF99F, 'M', u'烈'), + (0xF9A0, 'M', u'裂'), + (0xF9A1, 'M', u'說'), + (0xF9A2, 'M', u'廉'), + (0xF9A3, 'M', u'念'), + (0xF9A4, 'M', u'捻'), + (0xF9A5, 'M', u'殮'), + (0xF9A6, 'M', u'簾'), + (0xF9A7, 'M', u'獵'), + (0xF9A8, 'M', u'令'), + (0xF9A9, 'M', u'囹'), + (0xF9AA, 'M', u'寧'), + (0xF9AB, 'M', u'嶺'), + (0xF9AC, 'M', u'怜'), + (0xF9AD, 'M', u'玲'), + (0xF9AE, 'M', u'瑩'), + (0xF9AF, 'M', u'羚'), + (0xF9B0, 'M', u'聆'), + (0xF9B1, 'M', u'鈴'), + (0xF9B2, 'M', u'零'), + (0xF9B3, 'M', u'靈'), + (0xF9B4, 'M', u'領'), + (0xF9B5, 'M', u'例'), + (0xF9B6, 'M', u'禮'), + (0xF9B7, 'M', u'醴'), + (0xF9B8, 'M', u'隸'), + (0xF9B9, 'M', u'惡'), + (0xF9BA, 'M', u'了'), + (0xF9BB, 'M', u'僚'), + (0xF9BC, 'M', u'寮'), + (0xF9BD, 'M', u'尿'), + (0xF9BE, 'M', u'料'), + (0xF9BF, 'M', u'樂'), + (0xF9C0, 'M', u'燎'), + (0xF9C1, 'M', u'療'), + (0xF9C2, 'M', u'蓼'), + (0xF9C3, 'M', u'遼'), + (0xF9C4, 'M', u'龍'), + (0xF9C5, 'M', u'暈'), + (0xF9C6, 'M', u'阮'), + (0xF9C7, 'M', u'劉'), + (0xF9C8, 'M', u'杻'), + (0xF9C9, 'M', u'柳'), + (0xF9CA, 'M', u'流'), + (0xF9CB, 'M', u'溜'), + (0xF9CC, 'M', u'琉'), + (0xF9CD, 'M', u'留'), + (0xF9CE, 'M', u'硫'), + (0xF9CF, 'M', u'紐'), + (0xF9D0, 'M', u'類'), + (0xF9D1, 'M', u'六'), + (0xF9D2, 'M', u'戮'), + ] + +def _seg_41(): + return [ + (0xF9D3, 'M', u'陸'), + (0xF9D4, 'M', u'倫'), + (0xF9D5, 'M', u'崙'), + (0xF9D6, 'M', u'淪'), + (0xF9D7, 'M', u'輪'), + (0xF9D8, 'M', u'律'), + (0xF9D9, 'M', u'慄'), + (0xF9DA, 'M', u'栗'), + (0xF9DB, 'M', u'率'), + (0xF9DC, 'M', u'隆'), + (0xF9DD, 'M', u'利'), + (0xF9DE, 'M', u'吏'), + (0xF9DF, 'M', u'履'), + (0xF9E0, 'M', u'易'), + (0xF9E1, 'M', u'李'), + (0xF9E2, 'M', u'梨'), + (0xF9E3, 'M', u'泥'), + (0xF9E4, 'M', u'理'), + (0xF9E5, 'M', u'痢'), + (0xF9E6, 'M', u'罹'), + (0xF9E7, 'M', u'裏'), + (0xF9E8, 'M', u'裡'), + (0xF9E9, 'M', u'里'), + (0xF9EA, 'M', u'離'), + (0xF9EB, 'M', u'匿'), + (0xF9EC, 'M', u'溺'), + (0xF9ED, 'M', u'吝'), + (0xF9EE, 'M', u'燐'), + (0xF9EF, 'M', u'璘'), + (0xF9F0, 'M', u'藺'), + (0xF9F1, 'M', u'隣'), + (0xF9F2, 'M', u'鱗'), + (0xF9F3, 'M', u'麟'), + (0xF9F4, 'M', u'林'), + (0xF9F5, 'M', u'淋'), + (0xF9F6, 'M', u'臨'), + (0xF9F7, 'M', u'立'), + (0xF9F8, 'M', u'笠'), + (0xF9F9, 'M', u'粒'), + (0xF9FA, 'M', u'狀'), + (0xF9FB, 'M', u'炙'), + (0xF9FC, 'M', u'識'), + (0xF9FD, 'M', u'什'), + (0xF9FE, 'M', u'茶'), + (0xF9FF, 'M', u'刺'), + (0xFA00, 'M', u'切'), + (0xFA01, 'M', u'度'), + (0xFA02, 'M', u'拓'), + (0xFA03, 'M', u'糖'), + (0xFA04, 'M', u'宅'), + (0xFA05, 'M', u'洞'), + (0xFA06, 'M', u'暴'), + (0xFA07, 'M', u'輻'), + (0xFA08, 'M', u'行'), + (0xFA09, 'M', u'降'), + (0xFA0A, 'M', u'見'), + (0xFA0B, 'M', u'廓'), + (0xFA0C, 'M', u'兀'), + (0xFA0D, 'M', u'嗀'), + (0xFA0E, 'V'), + (0xFA10, 'M', u'塚'), + (0xFA11, 'V'), + (0xFA12, 'M', u'晴'), + (0xFA13, 'V'), + (0xFA15, 'M', u'凞'), + (0xFA16, 'M', u'猪'), + (0xFA17, 'M', u'益'), + (0xFA18, 'M', u'礼'), + (0xFA19, 'M', u'神'), + (0xFA1A, 'M', u'祥'), + (0xFA1B, 'M', u'福'), + (0xFA1C, 'M', u'靖'), + (0xFA1D, 'M', u'精'), + (0xFA1E, 'M', u'羽'), + (0xFA1F, 'V'), + (0xFA20, 'M', u'蘒'), + (0xFA21, 'V'), + (0xFA22, 'M', u'諸'), + (0xFA23, 'V'), + (0xFA25, 'M', u'逸'), + (0xFA26, 'M', u'都'), + (0xFA27, 'V'), + (0xFA2A, 'M', u'飯'), + (0xFA2B, 'M', u'飼'), + (0xFA2C, 'M', u'館'), + (0xFA2D, 'M', u'鶴'), + (0xFA2E, 'M', u'郞'), + (0xFA2F, 'M', u'隷'), + (0xFA30, 'M', u'侮'), + (0xFA31, 'M', u'僧'), + (0xFA32, 'M', u'免'), + (0xFA33, 'M', u'勉'), + (0xFA34, 'M', u'勤'), + (0xFA35, 'M', u'卑'), + (0xFA36, 'M', u'喝'), + (0xFA37, 'M', u'嘆'), + (0xFA38, 'M', u'器'), + (0xFA39, 'M', u'塀'), + (0xFA3A, 'M', u'墨'), + (0xFA3B, 'M', u'層'), + ] + +def _seg_42(): + return [ + (0xFA3C, 'M', u'屮'), + (0xFA3D, 'M', u'悔'), + (0xFA3E, 'M', u'慨'), + (0xFA3F, 'M', u'憎'), + (0xFA40, 'M', u'懲'), + (0xFA41, 'M', u'敏'), + (0xFA42, 'M', u'既'), + (0xFA43, 'M', u'暑'), + (0xFA44, 'M', u'梅'), + (0xFA45, 'M', u'海'), + (0xFA46, 'M', u'渚'), + (0xFA47, 'M', u'漢'), + (0xFA48, 'M', u'煮'), + (0xFA49, 'M', u'爫'), + (0xFA4A, 'M', u'琢'), + (0xFA4B, 'M', u'碑'), + (0xFA4C, 'M', u'社'), + (0xFA4D, 'M', u'祉'), + (0xFA4E, 'M', u'祈'), + (0xFA4F, 'M', u'祐'), + (0xFA50, 'M', u'祖'), + (0xFA51, 'M', u'祝'), + (0xFA52, 'M', u'禍'), + (0xFA53, 'M', u'禎'), + (0xFA54, 'M', u'穀'), + (0xFA55, 'M', u'突'), + (0xFA56, 'M', u'節'), + (0xFA57, 'M', u'練'), + (0xFA58, 'M', u'縉'), + (0xFA59, 'M', u'繁'), + (0xFA5A, 'M', u'署'), + (0xFA5B, 'M', u'者'), + (0xFA5C, 'M', u'臭'), + (0xFA5D, 'M', u'艹'), + (0xFA5F, 'M', u'著'), + (0xFA60, 'M', u'褐'), + (0xFA61, 'M', u'視'), + (0xFA62, 'M', u'謁'), + (0xFA63, 'M', u'謹'), + (0xFA64, 'M', u'賓'), + (0xFA65, 'M', u'贈'), + (0xFA66, 'M', u'辶'), + (0xFA67, 'M', u'逸'), + (0xFA68, 'M', u'難'), + (0xFA69, 'M', u'響'), + (0xFA6A, 'M', u'頻'), + (0xFA6B, 'M', u'恵'), + (0xFA6C, 'M', u'𤋮'), + (0xFA6D, 'M', u'舘'), + (0xFA6E, 'X'), + (0xFA70, 'M', u'並'), + (0xFA71, 'M', u'况'), + (0xFA72, 'M', u'全'), + (0xFA73, 'M', u'侀'), + (0xFA74, 'M', u'充'), + (0xFA75, 'M', u'冀'), + (0xFA76, 'M', u'勇'), + (0xFA77, 'M', u'勺'), + (0xFA78, 'M', u'喝'), + (0xFA79, 'M', u'啕'), + (0xFA7A, 'M', u'喙'), + (0xFA7B, 'M', u'嗢'), + (0xFA7C, 'M', u'塚'), + (0xFA7D, 'M', u'墳'), + (0xFA7E, 'M', u'奄'), + (0xFA7F, 'M', u'奔'), + (0xFA80, 'M', u'婢'), + (0xFA81, 'M', u'嬨'), + (0xFA82, 'M', u'廒'), + (0xFA83, 'M', u'廙'), + (0xFA84, 'M', u'彩'), + (0xFA85, 'M', u'徭'), + (0xFA86, 'M', u'惘'), + (0xFA87, 'M', u'慎'), + (0xFA88, 'M', u'愈'), + (0xFA89, 'M', u'憎'), + (0xFA8A, 'M', u'慠'), + (0xFA8B, 'M', u'懲'), + (0xFA8C, 'M', u'戴'), + (0xFA8D, 'M', u'揄'), + (0xFA8E, 'M', u'搜'), + (0xFA8F, 'M', u'摒'), + (0xFA90, 'M', u'敖'), + (0xFA91, 'M', u'晴'), + (0xFA92, 'M', u'朗'), + (0xFA93, 'M', u'望'), + (0xFA94, 'M', u'杖'), + (0xFA95, 'M', u'歹'), + (0xFA96, 'M', u'殺'), + (0xFA97, 'M', u'流'), + (0xFA98, 'M', u'滛'), + (0xFA99, 'M', u'滋'), + (0xFA9A, 'M', u'漢'), + (0xFA9B, 'M', u'瀞'), + (0xFA9C, 'M', u'煮'), + (0xFA9D, 'M', u'瞧'), + (0xFA9E, 'M', u'爵'), + (0xFA9F, 'M', u'犯'), + (0xFAA0, 'M', u'猪'), + (0xFAA1, 'M', u'瑱'), + ] + +def _seg_43(): + return [ + (0xFAA2, 'M', u'甆'), + (0xFAA3, 'M', u'画'), + (0xFAA4, 'M', u'瘝'), + (0xFAA5, 'M', u'瘟'), + (0xFAA6, 'M', u'益'), + (0xFAA7, 'M', u'盛'), + (0xFAA8, 'M', u'直'), + (0xFAA9, 'M', u'睊'), + (0xFAAA, 'M', u'着'), + (0xFAAB, 'M', u'磌'), + (0xFAAC, 'M', u'窱'), + (0xFAAD, 'M', u'節'), + (0xFAAE, 'M', u'类'), + (0xFAAF, 'M', u'絛'), + (0xFAB0, 'M', u'練'), + (0xFAB1, 'M', u'缾'), + (0xFAB2, 'M', u'者'), + (0xFAB3, 'M', u'荒'), + (0xFAB4, 'M', u'華'), + (0xFAB5, 'M', u'蝹'), + (0xFAB6, 'M', u'襁'), + (0xFAB7, 'M', u'覆'), + (0xFAB8, 'M', u'視'), + (0xFAB9, 'M', u'調'), + (0xFABA, 'M', u'諸'), + (0xFABB, 'M', u'請'), + (0xFABC, 'M', u'謁'), + (0xFABD, 'M', u'諾'), + (0xFABE, 'M', u'諭'), + (0xFABF, 'M', u'謹'), + (0xFAC0, 'M', u'變'), + (0xFAC1, 'M', u'贈'), + (0xFAC2, 'M', u'輸'), + (0xFAC3, 'M', u'遲'), + (0xFAC4, 'M', u'醙'), + (0xFAC5, 'M', u'鉶'), + (0xFAC6, 'M', u'陼'), + (0xFAC7, 'M', u'難'), + (0xFAC8, 'M', u'靖'), + (0xFAC9, 'M', u'韛'), + (0xFACA, 'M', u'響'), + (0xFACB, 'M', u'頋'), + (0xFACC, 'M', u'頻'), + (0xFACD, 'M', u'鬒'), + (0xFACE, 'M', u'龜'), + (0xFACF, 'M', u'𢡊'), + (0xFAD0, 'M', u'𢡄'), + (0xFAD1, 'M', u'𣏕'), + (0xFAD2, 'M', u'㮝'), + (0xFAD3, 'M', u'䀘'), + (0xFAD4, 'M', u'䀹'), + (0xFAD5, 'M', u'𥉉'), + (0xFAD6, 'M', u'𥳐'), + (0xFAD7, 'M', u'𧻓'), + (0xFAD8, 'M', u'齃'), + (0xFAD9, 'M', u'龎'), + (0xFADA, 'X'), + (0xFB00, 'M', u'ff'), + (0xFB01, 'M', u'fi'), + (0xFB02, 'M', u'fl'), + (0xFB03, 'M', u'ffi'), + (0xFB04, 'M', u'ffl'), + (0xFB05, 'M', u'st'), + (0xFB07, 'X'), + (0xFB13, 'M', u'մն'), + (0xFB14, 'M', u'մե'), + (0xFB15, 'M', u'մի'), + (0xFB16, 'M', u'վն'), + (0xFB17, 'M', u'մխ'), + (0xFB18, 'X'), + (0xFB1D, 'M', u'יִ'), + (0xFB1E, 'V'), + (0xFB1F, 'M', u'ײַ'), + (0xFB20, 'M', u'ע'), + (0xFB21, 'M', u'א'), + (0xFB22, 'M', u'ד'), + (0xFB23, 'M', u'ה'), + (0xFB24, 'M', u'כ'), + (0xFB25, 'M', u'ל'), + (0xFB26, 'M', u'ם'), + (0xFB27, 'M', u'ר'), + (0xFB28, 'M', u'ת'), + (0xFB29, '3', u'+'), + (0xFB2A, 'M', u'שׁ'), + (0xFB2B, 'M', u'שׂ'), + (0xFB2C, 'M', u'שּׁ'), + (0xFB2D, 'M', u'שּׂ'), + (0xFB2E, 'M', u'אַ'), + (0xFB2F, 'M', u'אָ'), + (0xFB30, 'M', u'אּ'), + (0xFB31, 'M', u'בּ'), + (0xFB32, 'M', u'גּ'), + (0xFB33, 'M', u'דּ'), + (0xFB34, 'M', u'הּ'), + (0xFB35, 'M', u'וּ'), + (0xFB36, 'M', u'זּ'), + (0xFB37, 'X'), + (0xFB38, 'M', u'טּ'), + (0xFB39, 'M', u'יּ'), + (0xFB3A, 'M', u'ךּ'), + ] + +def _seg_44(): + return [ + (0xFB3B, 'M', u'כּ'), + (0xFB3C, 'M', u'לּ'), + (0xFB3D, 'X'), + (0xFB3E, 'M', u'מּ'), + (0xFB3F, 'X'), + (0xFB40, 'M', u'נּ'), + (0xFB41, 'M', u'סּ'), + (0xFB42, 'X'), + (0xFB43, 'M', u'ףּ'), + (0xFB44, 'M', u'פּ'), + (0xFB45, 'X'), + (0xFB46, 'M', u'צּ'), + (0xFB47, 'M', u'קּ'), + (0xFB48, 'M', u'רּ'), + (0xFB49, 'M', u'שּ'), + (0xFB4A, 'M', u'תּ'), + (0xFB4B, 'M', u'וֹ'), + (0xFB4C, 'M', u'בֿ'), + (0xFB4D, 'M', u'כֿ'), + (0xFB4E, 'M', u'פֿ'), + (0xFB4F, 'M', u'אל'), + (0xFB50, 'M', u'ٱ'), + (0xFB52, 'M', u'ٻ'), + (0xFB56, 'M', u'پ'), + (0xFB5A, 'M', u'ڀ'), + (0xFB5E, 'M', u'ٺ'), + (0xFB62, 'M', u'ٿ'), + (0xFB66, 'M', u'ٹ'), + (0xFB6A, 'M', u'ڤ'), + (0xFB6E, 'M', u'ڦ'), + (0xFB72, 'M', u'ڄ'), + (0xFB76, 'M', u'ڃ'), + (0xFB7A, 'M', u'چ'), + (0xFB7E, 'M', u'ڇ'), + (0xFB82, 'M', u'ڍ'), + (0xFB84, 'M', u'ڌ'), + (0xFB86, 'M', u'ڎ'), + (0xFB88, 'M', u'ڈ'), + (0xFB8A, 'M', u'ژ'), + (0xFB8C, 'M', u'ڑ'), + (0xFB8E, 'M', u'ک'), + (0xFB92, 'M', u'گ'), + (0xFB96, 'M', u'ڳ'), + (0xFB9A, 'M', u'ڱ'), + (0xFB9E, 'M', u'ں'), + (0xFBA0, 'M', u'ڻ'), + (0xFBA4, 'M', u'ۀ'), + (0xFBA6, 'M', u'ہ'), + (0xFBAA, 'M', u'ھ'), + (0xFBAE, 'M', u'ے'), + (0xFBB0, 'M', u'ۓ'), + (0xFBB2, 'V'), + (0xFBC2, 'X'), + (0xFBD3, 'M', u'ڭ'), + (0xFBD7, 'M', u'ۇ'), + (0xFBD9, 'M', u'ۆ'), + (0xFBDB, 'M', u'ۈ'), + (0xFBDD, 'M', u'ۇٴ'), + (0xFBDE, 'M', u'ۋ'), + (0xFBE0, 'M', u'ۅ'), + (0xFBE2, 'M', u'ۉ'), + (0xFBE4, 'M', u'ې'), + (0xFBE8, 'M', u'ى'), + (0xFBEA, 'M', u'ئا'), + (0xFBEC, 'M', u'ئە'), + (0xFBEE, 'M', u'ئو'), + (0xFBF0, 'M', u'ئۇ'), + (0xFBF2, 'M', u'ئۆ'), + (0xFBF4, 'M', u'ئۈ'), + (0xFBF6, 'M', u'ئې'), + (0xFBF9, 'M', u'ئى'), + (0xFBFC, 'M', u'ی'), + (0xFC00, 'M', u'ئج'), + (0xFC01, 'M', u'ئح'), + (0xFC02, 'M', u'ئم'), + (0xFC03, 'M', u'ئى'), + (0xFC04, 'M', u'ئي'), + (0xFC05, 'M', u'بج'), + (0xFC06, 'M', u'بح'), + (0xFC07, 'M', u'بخ'), + (0xFC08, 'M', u'بم'), + (0xFC09, 'M', u'بى'), + (0xFC0A, 'M', u'بي'), + (0xFC0B, 'M', u'تج'), + (0xFC0C, 'M', u'تح'), + (0xFC0D, 'M', u'تخ'), + (0xFC0E, 'M', u'تم'), + (0xFC0F, 'M', u'تى'), + (0xFC10, 'M', u'تي'), + (0xFC11, 'M', u'ثج'), + (0xFC12, 'M', u'ثم'), + (0xFC13, 'M', u'ثى'), + (0xFC14, 'M', u'ثي'), + (0xFC15, 'M', u'جح'), + (0xFC16, 'M', u'جم'), + (0xFC17, 'M', u'حج'), + (0xFC18, 'M', u'حم'), + (0xFC19, 'M', u'خج'), + (0xFC1A, 'M', u'خح'), + (0xFC1B, 'M', u'خم'), + ] + +def _seg_45(): + return [ + (0xFC1C, 'M', u'سج'), + (0xFC1D, 'M', u'سح'), + (0xFC1E, 'M', u'سخ'), + (0xFC1F, 'M', u'سم'), + (0xFC20, 'M', u'صح'), + (0xFC21, 'M', u'صم'), + (0xFC22, 'M', u'ضج'), + (0xFC23, 'M', u'ضح'), + (0xFC24, 'M', u'ضخ'), + (0xFC25, 'M', u'ضم'), + (0xFC26, 'M', u'طح'), + (0xFC27, 'M', u'طم'), + (0xFC28, 'M', u'ظم'), + (0xFC29, 'M', u'عج'), + (0xFC2A, 'M', u'عم'), + (0xFC2B, 'M', u'غج'), + (0xFC2C, 'M', u'غم'), + (0xFC2D, 'M', u'فج'), + (0xFC2E, 'M', u'فح'), + (0xFC2F, 'M', u'فخ'), + (0xFC30, 'M', u'فم'), + (0xFC31, 'M', u'فى'), + (0xFC32, 'M', u'في'), + (0xFC33, 'M', u'قح'), + (0xFC34, 'M', u'قم'), + (0xFC35, 'M', u'قى'), + (0xFC36, 'M', u'قي'), + (0xFC37, 'M', u'كا'), + (0xFC38, 'M', u'كج'), + (0xFC39, 'M', u'كح'), + (0xFC3A, 'M', u'كخ'), + (0xFC3B, 'M', u'كل'), + (0xFC3C, 'M', u'كم'), + (0xFC3D, 'M', u'كى'), + (0xFC3E, 'M', u'كي'), + (0xFC3F, 'M', u'لج'), + (0xFC40, 'M', u'لح'), + (0xFC41, 'M', u'لخ'), + (0xFC42, 'M', u'لم'), + (0xFC43, 'M', u'لى'), + (0xFC44, 'M', u'لي'), + (0xFC45, 'M', u'مج'), + (0xFC46, 'M', u'مح'), + (0xFC47, 'M', u'مخ'), + (0xFC48, 'M', u'مم'), + (0xFC49, 'M', u'مى'), + (0xFC4A, 'M', u'مي'), + (0xFC4B, 'M', u'نج'), + (0xFC4C, 'M', u'نح'), + (0xFC4D, 'M', u'نخ'), + (0xFC4E, 'M', u'نم'), + (0xFC4F, 'M', u'نى'), + (0xFC50, 'M', u'ني'), + (0xFC51, 'M', u'هج'), + (0xFC52, 'M', u'هم'), + (0xFC53, 'M', u'هى'), + (0xFC54, 'M', u'هي'), + (0xFC55, 'M', u'يج'), + (0xFC56, 'M', u'يح'), + (0xFC57, 'M', u'يخ'), + (0xFC58, 'M', u'يم'), + (0xFC59, 'M', u'يى'), + (0xFC5A, 'M', u'يي'), + (0xFC5B, 'M', u'ذٰ'), + (0xFC5C, 'M', u'رٰ'), + (0xFC5D, 'M', u'ىٰ'), + (0xFC5E, '3', u' ٌّ'), + (0xFC5F, '3', u' ٍّ'), + (0xFC60, '3', u' َّ'), + (0xFC61, '3', u' ُّ'), + (0xFC62, '3', u' ِّ'), + (0xFC63, '3', u' ّٰ'), + (0xFC64, 'M', u'ئر'), + (0xFC65, 'M', u'ئز'), + (0xFC66, 'M', u'ئم'), + (0xFC67, 'M', u'ئن'), + (0xFC68, 'M', u'ئى'), + (0xFC69, 'M', u'ئي'), + (0xFC6A, 'M', u'بر'), + (0xFC6B, 'M', u'بز'), + (0xFC6C, 'M', u'بم'), + (0xFC6D, 'M', u'بن'), + (0xFC6E, 'M', u'بى'), + (0xFC6F, 'M', u'بي'), + (0xFC70, 'M', u'تر'), + (0xFC71, 'M', u'تز'), + (0xFC72, 'M', u'تم'), + (0xFC73, 'M', u'تن'), + (0xFC74, 'M', u'تى'), + (0xFC75, 'M', u'تي'), + (0xFC76, 'M', u'ثر'), + (0xFC77, 'M', u'ثز'), + (0xFC78, 'M', u'ثم'), + (0xFC79, 'M', u'ثن'), + (0xFC7A, 'M', u'ثى'), + (0xFC7B, 'M', u'ثي'), + (0xFC7C, 'M', u'فى'), + (0xFC7D, 'M', u'في'), + (0xFC7E, 'M', u'قى'), + (0xFC7F, 'M', u'قي'), + ] + +def _seg_46(): + return [ + (0xFC80, 'M', u'كا'), + (0xFC81, 'M', u'كل'), + (0xFC82, 'M', u'كم'), + (0xFC83, 'M', u'كى'), + (0xFC84, 'M', u'كي'), + (0xFC85, 'M', u'لم'), + (0xFC86, 'M', u'لى'), + (0xFC87, 'M', u'لي'), + (0xFC88, 'M', u'ما'), + (0xFC89, 'M', u'مم'), + (0xFC8A, 'M', u'نر'), + (0xFC8B, 'M', u'نز'), + (0xFC8C, 'M', u'نم'), + (0xFC8D, 'M', u'نن'), + (0xFC8E, 'M', u'نى'), + (0xFC8F, 'M', u'ني'), + (0xFC90, 'M', u'ىٰ'), + (0xFC91, 'M', u'ير'), + (0xFC92, 'M', u'يز'), + (0xFC93, 'M', u'يم'), + (0xFC94, 'M', u'ين'), + (0xFC95, 'M', u'يى'), + (0xFC96, 'M', u'يي'), + (0xFC97, 'M', u'ئج'), + (0xFC98, 'M', u'ئح'), + (0xFC99, 'M', u'ئخ'), + (0xFC9A, 'M', u'ئم'), + (0xFC9B, 'M', u'ئه'), + (0xFC9C, 'M', u'بج'), + (0xFC9D, 'M', u'بح'), + (0xFC9E, 'M', u'بخ'), + (0xFC9F, 'M', u'بم'), + (0xFCA0, 'M', u'به'), + (0xFCA1, 'M', u'تج'), + (0xFCA2, 'M', u'تح'), + (0xFCA3, 'M', u'تخ'), + (0xFCA4, 'M', u'تم'), + (0xFCA5, 'M', u'ته'), + (0xFCA6, 'M', u'ثم'), + (0xFCA7, 'M', u'جح'), + (0xFCA8, 'M', u'جم'), + (0xFCA9, 'M', u'حج'), + (0xFCAA, 'M', u'حم'), + (0xFCAB, 'M', u'خج'), + (0xFCAC, 'M', u'خم'), + (0xFCAD, 'M', u'سج'), + (0xFCAE, 'M', u'سح'), + (0xFCAF, 'M', u'سخ'), + (0xFCB0, 'M', u'سم'), + (0xFCB1, 'M', u'صح'), + (0xFCB2, 'M', u'صخ'), + (0xFCB3, 'M', u'صم'), + (0xFCB4, 'M', u'ضج'), + (0xFCB5, 'M', u'ضح'), + (0xFCB6, 'M', u'ضخ'), + (0xFCB7, 'M', u'ضم'), + (0xFCB8, 'M', u'طح'), + (0xFCB9, 'M', u'ظم'), + (0xFCBA, 'M', u'عج'), + (0xFCBB, 'M', u'عم'), + (0xFCBC, 'M', u'غج'), + (0xFCBD, 'M', u'غم'), + (0xFCBE, 'M', u'فج'), + (0xFCBF, 'M', u'فح'), + (0xFCC0, 'M', u'فخ'), + (0xFCC1, 'M', u'فم'), + (0xFCC2, 'M', u'قح'), + (0xFCC3, 'M', u'قم'), + (0xFCC4, 'M', u'كج'), + (0xFCC5, 'M', u'كح'), + (0xFCC6, 'M', u'كخ'), + (0xFCC7, 'M', u'كل'), + (0xFCC8, 'M', u'كم'), + (0xFCC9, 'M', u'لج'), + (0xFCCA, 'M', u'لح'), + (0xFCCB, 'M', u'لخ'), + (0xFCCC, 'M', u'لم'), + (0xFCCD, 'M', u'له'), + (0xFCCE, 'M', u'مج'), + (0xFCCF, 'M', u'مح'), + (0xFCD0, 'M', u'مخ'), + (0xFCD1, 'M', u'مم'), + (0xFCD2, 'M', u'نج'), + (0xFCD3, 'M', u'نح'), + (0xFCD4, 'M', u'نخ'), + (0xFCD5, 'M', u'نم'), + (0xFCD6, 'M', u'نه'), + (0xFCD7, 'M', u'هج'), + (0xFCD8, 'M', u'هم'), + (0xFCD9, 'M', u'هٰ'), + (0xFCDA, 'M', u'يج'), + (0xFCDB, 'M', u'يح'), + (0xFCDC, 'M', u'يخ'), + (0xFCDD, 'M', u'يم'), + (0xFCDE, 'M', u'يه'), + (0xFCDF, 'M', u'ئم'), + (0xFCE0, 'M', u'ئه'), + (0xFCE1, 'M', u'بم'), + (0xFCE2, 'M', u'به'), + (0xFCE3, 'M', u'تم'), + ] + +def _seg_47(): + return [ + (0xFCE4, 'M', u'ته'), + (0xFCE5, 'M', u'ثم'), + (0xFCE6, 'M', u'ثه'), + (0xFCE7, 'M', u'سم'), + (0xFCE8, 'M', u'سه'), + (0xFCE9, 'M', u'شم'), + (0xFCEA, 'M', u'شه'), + (0xFCEB, 'M', u'كل'), + (0xFCEC, 'M', u'كم'), + (0xFCED, 'M', u'لم'), + (0xFCEE, 'M', u'نم'), + (0xFCEF, 'M', u'نه'), + (0xFCF0, 'M', u'يم'), + (0xFCF1, 'M', u'يه'), + (0xFCF2, 'M', u'ـَّ'), + (0xFCF3, 'M', u'ـُّ'), + (0xFCF4, 'M', u'ـِّ'), + (0xFCF5, 'M', u'طى'), + (0xFCF6, 'M', u'طي'), + (0xFCF7, 'M', u'عى'), + (0xFCF8, 'M', u'عي'), + (0xFCF9, 'M', u'غى'), + (0xFCFA, 'M', u'غي'), + (0xFCFB, 'M', u'سى'), + (0xFCFC, 'M', u'سي'), + (0xFCFD, 'M', u'شى'), + (0xFCFE, 'M', u'شي'), + (0xFCFF, 'M', u'حى'), + (0xFD00, 'M', u'حي'), + (0xFD01, 'M', u'جى'), + (0xFD02, 'M', u'جي'), + (0xFD03, 'M', u'خى'), + (0xFD04, 'M', u'خي'), + (0xFD05, 'M', u'صى'), + (0xFD06, 'M', u'صي'), + (0xFD07, 'M', u'ضى'), + (0xFD08, 'M', u'ضي'), + (0xFD09, 'M', u'شج'), + (0xFD0A, 'M', u'شح'), + (0xFD0B, 'M', u'شخ'), + (0xFD0C, 'M', u'شم'), + (0xFD0D, 'M', u'شر'), + (0xFD0E, 'M', u'سر'), + (0xFD0F, 'M', u'صر'), + (0xFD10, 'M', u'ضر'), + (0xFD11, 'M', u'طى'), + (0xFD12, 'M', u'طي'), + (0xFD13, 'M', u'عى'), + (0xFD14, 'M', u'عي'), + (0xFD15, 'M', u'غى'), + (0xFD16, 'M', u'غي'), + (0xFD17, 'M', u'سى'), + (0xFD18, 'M', u'سي'), + (0xFD19, 'M', u'شى'), + (0xFD1A, 'M', u'شي'), + (0xFD1B, 'M', u'حى'), + (0xFD1C, 'M', u'حي'), + (0xFD1D, 'M', u'جى'), + (0xFD1E, 'M', u'جي'), + (0xFD1F, 'M', u'خى'), + (0xFD20, 'M', u'خي'), + (0xFD21, 'M', u'صى'), + (0xFD22, 'M', u'صي'), + (0xFD23, 'M', u'ضى'), + (0xFD24, 'M', u'ضي'), + (0xFD25, 'M', u'شج'), + (0xFD26, 'M', u'شح'), + (0xFD27, 'M', u'شخ'), + (0xFD28, 'M', u'شم'), + (0xFD29, 'M', u'شر'), + (0xFD2A, 'M', u'سر'), + (0xFD2B, 'M', u'صر'), + (0xFD2C, 'M', u'ضر'), + (0xFD2D, 'M', u'شج'), + (0xFD2E, 'M', u'شح'), + (0xFD2F, 'M', u'شخ'), + (0xFD30, 'M', u'شم'), + (0xFD31, 'M', u'سه'), + (0xFD32, 'M', u'شه'), + (0xFD33, 'M', u'طم'), + (0xFD34, 'M', u'سج'), + (0xFD35, 'M', u'سح'), + (0xFD36, 'M', u'سخ'), + (0xFD37, 'M', u'شج'), + (0xFD38, 'M', u'شح'), + (0xFD39, 'M', u'شخ'), + (0xFD3A, 'M', u'طم'), + (0xFD3B, 'M', u'ظم'), + (0xFD3C, 'M', u'اً'), + (0xFD3E, 'V'), + (0xFD40, 'X'), + (0xFD50, 'M', u'تجم'), + (0xFD51, 'M', u'تحج'), + (0xFD53, 'M', u'تحم'), + (0xFD54, 'M', u'تخم'), + (0xFD55, 'M', u'تمج'), + (0xFD56, 'M', u'تمح'), + (0xFD57, 'M', u'تمخ'), + (0xFD58, 'M', u'جمح'), + (0xFD5A, 'M', u'حمي'), + ] + +def _seg_48(): + return [ + (0xFD5B, 'M', u'حمى'), + (0xFD5C, 'M', u'سحج'), + (0xFD5D, 'M', u'سجح'), + (0xFD5E, 'M', u'سجى'), + (0xFD5F, 'M', u'سمح'), + (0xFD61, 'M', u'سمج'), + (0xFD62, 'M', u'سمم'), + (0xFD64, 'M', u'صحح'), + (0xFD66, 'M', u'صمم'), + (0xFD67, 'M', u'شحم'), + (0xFD69, 'M', u'شجي'), + (0xFD6A, 'M', u'شمخ'), + (0xFD6C, 'M', u'شمم'), + (0xFD6E, 'M', u'ضحى'), + (0xFD6F, 'M', u'ضخم'), + (0xFD71, 'M', u'طمح'), + (0xFD73, 'M', u'طمم'), + (0xFD74, 'M', u'طمي'), + (0xFD75, 'M', u'عجم'), + (0xFD76, 'M', u'عمم'), + (0xFD78, 'M', u'عمى'), + (0xFD79, 'M', u'غمم'), + (0xFD7A, 'M', u'غمي'), + (0xFD7B, 'M', u'غمى'), + (0xFD7C, 'M', u'فخم'), + (0xFD7E, 'M', u'قمح'), + (0xFD7F, 'M', u'قمم'), + (0xFD80, 'M', u'لحم'), + (0xFD81, 'M', u'لحي'), + (0xFD82, 'M', u'لحى'), + (0xFD83, 'M', u'لجج'), + (0xFD85, 'M', u'لخم'), + (0xFD87, 'M', u'لمح'), + (0xFD89, 'M', u'محج'), + (0xFD8A, 'M', u'محم'), + (0xFD8B, 'M', u'محي'), + (0xFD8C, 'M', u'مجح'), + (0xFD8D, 'M', u'مجم'), + (0xFD8E, 'M', u'مخج'), + (0xFD8F, 'M', u'مخم'), + (0xFD90, 'X'), + (0xFD92, 'M', u'مجخ'), + (0xFD93, 'M', u'همج'), + (0xFD94, 'M', u'همم'), + (0xFD95, 'M', u'نحم'), + (0xFD96, 'M', u'نحى'), + (0xFD97, 'M', u'نجم'), + (0xFD99, 'M', u'نجى'), + (0xFD9A, 'M', u'نمي'), + (0xFD9B, 'M', u'نمى'), + (0xFD9C, 'M', u'يمم'), + (0xFD9E, 'M', u'بخي'), + (0xFD9F, 'M', u'تجي'), + (0xFDA0, 'M', u'تجى'), + (0xFDA1, 'M', u'تخي'), + (0xFDA2, 'M', u'تخى'), + (0xFDA3, 'M', u'تمي'), + (0xFDA4, 'M', u'تمى'), + (0xFDA5, 'M', u'جمي'), + (0xFDA6, 'M', u'جحى'), + (0xFDA7, 'M', u'جمى'), + (0xFDA8, 'M', u'سخى'), + (0xFDA9, 'M', u'صحي'), + (0xFDAA, 'M', u'شحي'), + (0xFDAB, 'M', u'ضحي'), + (0xFDAC, 'M', u'لجي'), + (0xFDAD, 'M', u'لمي'), + (0xFDAE, 'M', u'يحي'), + (0xFDAF, 'M', u'يجي'), + (0xFDB0, 'M', u'يمي'), + (0xFDB1, 'M', u'ممي'), + (0xFDB2, 'M', u'قمي'), + (0xFDB3, 'M', u'نحي'), + (0xFDB4, 'M', u'قمح'), + (0xFDB5, 'M', u'لحم'), + (0xFDB6, 'M', u'عمي'), + (0xFDB7, 'M', u'كمي'), + (0xFDB8, 'M', u'نجح'), + (0xFDB9, 'M', u'مخي'), + (0xFDBA, 'M', u'لجم'), + (0xFDBB, 'M', u'كمم'), + (0xFDBC, 'M', u'لجم'), + (0xFDBD, 'M', u'نجح'), + (0xFDBE, 'M', u'جحي'), + (0xFDBF, 'M', u'حجي'), + (0xFDC0, 'M', u'مجي'), + (0xFDC1, 'M', u'فمي'), + (0xFDC2, 'M', u'بحي'), + (0xFDC3, 'M', u'كمم'), + (0xFDC4, 'M', u'عجم'), + (0xFDC5, 'M', u'صمم'), + (0xFDC6, 'M', u'سخي'), + (0xFDC7, 'M', u'نجي'), + (0xFDC8, 'X'), + (0xFDF0, 'M', u'صلے'), + (0xFDF1, 'M', u'قلے'), + (0xFDF2, 'M', u'الله'), + (0xFDF3, 'M', u'اكبر'), + (0xFDF4, 'M', u'محمد'), + (0xFDF5, 'M', u'صلعم'), + ] + +def _seg_49(): + return [ + (0xFDF6, 'M', u'رسول'), + (0xFDF7, 'M', u'عليه'), + (0xFDF8, 'M', u'وسلم'), + (0xFDF9, 'M', u'صلى'), + (0xFDFA, '3', u'صلى الله عليه وسلم'), + (0xFDFB, '3', u'جل جلاله'), + (0xFDFC, 'M', u'ریال'), + (0xFDFD, 'V'), + (0xFDFE, 'X'), + (0xFE00, 'I'), + (0xFE10, '3', u','), + (0xFE11, 'M', u'、'), + (0xFE12, 'X'), + (0xFE13, '3', u':'), + (0xFE14, '3', u';'), + (0xFE15, '3', u'!'), + (0xFE16, '3', u'?'), + (0xFE17, 'M', u'〖'), + (0xFE18, 'M', u'〗'), + (0xFE19, 'X'), + (0xFE20, 'V'), + (0xFE30, 'X'), + (0xFE31, 'M', u'—'), + (0xFE32, 'M', u'–'), + (0xFE33, '3', u'_'), + (0xFE35, '3', u'('), + (0xFE36, '3', u')'), + (0xFE37, '3', u'{'), + (0xFE38, '3', u'}'), + (0xFE39, 'M', u'〔'), + (0xFE3A, 'M', u'〕'), + (0xFE3B, 'M', u'【'), + (0xFE3C, 'M', u'】'), + (0xFE3D, 'M', u'《'), + (0xFE3E, 'M', u'》'), + (0xFE3F, 'M', u'〈'), + (0xFE40, 'M', u'〉'), + (0xFE41, 'M', u'「'), + (0xFE42, 'M', u'」'), + (0xFE43, 'M', u'『'), + (0xFE44, 'M', u'』'), + (0xFE45, 'V'), + (0xFE47, '3', u'['), + (0xFE48, '3', u']'), + (0xFE49, '3', u' ̅'), + (0xFE4D, '3', u'_'), + (0xFE50, '3', u','), + (0xFE51, 'M', u'、'), + (0xFE52, 'X'), + (0xFE54, '3', u';'), + (0xFE55, '3', u':'), + (0xFE56, '3', u'?'), + (0xFE57, '3', u'!'), + (0xFE58, 'M', u'—'), + (0xFE59, '3', u'('), + (0xFE5A, '3', u')'), + (0xFE5B, '3', u'{'), + (0xFE5C, '3', u'}'), + (0xFE5D, 'M', u'〔'), + (0xFE5E, 'M', u'〕'), + (0xFE5F, '3', u'#'), + (0xFE60, '3', u'&'), + (0xFE61, '3', u'*'), + (0xFE62, '3', u'+'), + (0xFE63, 'M', u'-'), + (0xFE64, '3', u'<'), + (0xFE65, '3', u'>'), + (0xFE66, '3', u'='), + (0xFE67, 'X'), + (0xFE68, '3', u'\\'), + (0xFE69, '3', u'$'), + (0xFE6A, '3', u'%'), + (0xFE6B, '3', u'@'), + (0xFE6C, 'X'), + (0xFE70, '3', u' ً'), + (0xFE71, 'M', u'ـً'), + (0xFE72, '3', u' ٌ'), + (0xFE73, 'V'), + (0xFE74, '3', u' ٍ'), + (0xFE75, 'X'), + (0xFE76, '3', u' َ'), + (0xFE77, 'M', u'ـَ'), + (0xFE78, '3', u' ُ'), + (0xFE79, 'M', u'ـُ'), + (0xFE7A, '3', u' ِ'), + (0xFE7B, 'M', u'ـِ'), + (0xFE7C, '3', u' ّ'), + (0xFE7D, 'M', u'ـّ'), + (0xFE7E, '3', u' ْ'), + (0xFE7F, 'M', u'ـْ'), + (0xFE80, 'M', u'ء'), + (0xFE81, 'M', u'آ'), + (0xFE83, 'M', u'أ'), + (0xFE85, 'M', u'ؤ'), + (0xFE87, 'M', u'إ'), + (0xFE89, 'M', u'ئ'), + (0xFE8D, 'M', u'ا'), + (0xFE8F, 'M', u'ب'), + (0xFE93, 'M', u'ة'), + (0xFE95, 'M', u'ت'), + ] + +def _seg_50(): + return [ + (0xFE99, 'M', u'ث'), + (0xFE9D, 'M', u'ج'), + (0xFEA1, 'M', u'ح'), + (0xFEA5, 'M', u'خ'), + (0xFEA9, 'M', u'د'), + (0xFEAB, 'M', u'ذ'), + (0xFEAD, 'M', u'ر'), + (0xFEAF, 'M', u'ز'), + (0xFEB1, 'M', u'س'), + (0xFEB5, 'M', u'ش'), + (0xFEB9, 'M', u'ص'), + (0xFEBD, 'M', u'ض'), + (0xFEC1, 'M', u'ط'), + (0xFEC5, 'M', u'ظ'), + (0xFEC9, 'M', u'ع'), + (0xFECD, 'M', u'غ'), + (0xFED1, 'M', u'ف'), + (0xFED5, 'M', u'ق'), + (0xFED9, 'M', u'ك'), + (0xFEDD, 'M', u'ل'), + (0xFEE1, 'M', u'م'), + (0xFEE5, 'M', u'ن'), + (0xFEE9, 'M', u'ه'), + (0xFEED, 'M', u'و'), + (0xFEEF, 'M', u'ى'), + (0xFEF1, 'M', u'ي'), + (0xFEF5, 'M', u'لآ'), + (0xFEF7, 'M', u'لأ'), + (0xFEF9, 'M', u'لإ'), + (0xFEFB, 'M', u'لا'), + (0xFEFD, 'X'), + (0xFEFF, 'I'), + (0xFF00, 'X'), + (0xFF01, '3', u'!'), + (0xFF02, '3', u'"'), + (0xFF03, '3', u'#'), + (0xFF04, '3', u'$'), + (0xFF05, '3', u'%'), + (0xFF06, '3', u'&'), + (0xFF07, '3', u'\''), + (0xFF08, '3', u'('), + (0xFF09, '3', u')'), + (0xFF0A, '3', u'*'), + (0xFF0B, '3', u'+'), + (0xFF0C, '3', u','), + (0xFF0D, 'M', u'-'), + (0xFF0E, 'M', u'.'), + (0xFF0F, '3', u'/'), + (0xFF10, 'M', u'0'), + (0xFF11, 'M', u'1'), + (0xFF12, 'M', u'2'), + (0xFF13, 'M', u'3'), + (0xFF14, 'M', u'4'), + (0xFF15, 'M', u'5'), + (0xFF16, 'M', u'6'), + (0xFF17, 'M', u'7'), + (0xFF18, 'M', u'8'), + (0xFF19, 'M', u'9'), + (0xFF1A, '3', u':'), + (0xFF1B, '3', u';'), + (0xFF1C, '3', u'<'), + (0xFF1D, '3', u'='), + (0xFF1E, '3', u'>'), + (0xFF1F, '3', u'?'), + (0xFF20, '3', u'@'), + (0xFF21, 'M', u'a'), + (0xFF22, 'M', u'b'), + (0xFF23, 'M', u'c'), + (0xFF24, 'M', u'd'), + (0xFF25, 'M', u'e'), + (0xFF26, 'M', u'f'), + (0xFF27, 'M', u'g'), + (0xFF28, 'M', u'h'), + (0xFF29, 'M', u'i'), + (0xFF2A, 'M', u'j'), + (0xFF2B, 'M', u'k'), + (0xFF2C, 'M', u'l'), + (0xFF2D, 'M', u'm'), + (0xFF2E, 'M', u'n'), + (0xFF2F, 'M', u'o'), + (0xFF30, 'M', u'p'), + (0xFF31, 'M', u'q'), + (0xFF32, 'M', u'r'), + (0xFF33, 'M', u's'), + (0xFF34, 'M', u't'), + (0xFF35, 'M', u'u'), + (0xFF36, 'M', u'v'), + (0xFF37, 'M', u'w'), + (0xFF38, 'M', u'x'), + (0xFF39, 'M', u'y'), + (0xFF3A, 'M', u'z'), + (0xFF3B, '3', u'['), + (0xFF3C, '3', u'\\'), + (0xFF3D, '3', u']'), + (0xFF3E, '3', u'^'), + (0xFF3F, '3', u'_'), + (0xFF40, '3', u'`'), + (0xFF41, 'M', u'a'), + (0xFF42, 'M', u'b'), + (0xFF43, 'M', u'c'), + ] + +def _seg_51(): + return [ + (0xFF44, 'M', u'd'), + (0xFF45, 'M', u'e'), + (0xFF46, 'M', u'f'), + (0xFF47, 'M', u'g'), + (0xFF48, 'M', u'h'), + (0xFF49, 'M', u'i'), + (0xFF4A, 'M', u'j'), + (0xFF4B, 'M', u'k'), + (0xFF4C, 'M', u'l'), + (0xFF4D, 'M', u'm'), + (0xFF4E, 'M', u'n'), + (0xFF4F, 'M', u'o'), + (0xFF50, 'M', u'p'), + (0xFF51, 'M', u'q'), + (0xFF52, 'M', u'r'), + (0xFF53, 'M', u's'), + (0xFF54, 'M', u't'), + (0xFF55, 'M', u'u'), + (0xFF56, 'M', u'v'), + (0xFF57, 'M', u'w'), + (0xFF58, 'M', u'x'), + (0xFF59, 'M', u'y'), + (0xFF5A, 'M', u'z'), + (0xFF5B, '3', u'{'), + (0xFF5C, '3', u'|'), + (0xFF5D, '3', u'}'), + (0xFF5E, '3', u'~'), + (0xFF5F, 'M', u'⦅'), + (0xFF60, 'M', u'⦆'), + (0xFF61, 'M', u'.'), + (0xFF62, 'M', u'「'), + (0xFF63, 'M', u'」'), + (0xFF64, 'M', u'、'), + (0xFF65, 'M', u'・'), + (0xFF66, 'M', u'ヲ'), + (0xFF67, 'M', u'ァ'), + (0xFF68, 'M', u'ィ'), + (0xFF69, 'M', u'ゥ'), + (0xFF6A, 'M', u'ェ'), + (0xFF6B, 'M', u'ォ'), + (0xFF6C, 'M', u'ャ'), + (0xFF6D, 'M', u'ュ'), + (0xFF6E, 'M', u'ョ'), + (0xFF6F, 'M', u'ッ'), + (0xFF70, 'M', u'ー'), + (0xFF71, 'M', u'ア'), + (0xFF72, 'M', u'イ'), + (0xFF73, 'M', u'ウ'), + (0xFF74, 'M', u'エ'), + (0xFF75, 'M', u'オ'), + (0xFF76, 'M', u'カ'), + (0xFF77, 'M', u'キ'), + (0xFF78, 'M', u'ク'), + (0xFF79, 'M', u'ケ'), + (0xFF7A, 'M', u'コ'), + (0xFF7B, 'M', u'サ'), + (0xFF7C, 'M', u'シ'), + (0xFF7D, 'M', u'ス'), + (0xFF7E, 'M', u'セ'), + (0xFF7F, 'M', u'ソ'), + (0xFF80, 'M', u'タ'), + (0xFF81, 'M', u'チ'), + (0xFF82, 'M', u'ツ'), + (0xFF83, 'M', u'テ'), + (0xFF84, 'M', u'ト'), + (0xFF85, 'M', u'ナ'), + (0xFF86, 'M', u'ニ'), + (0xFF87, 'M', u'ヌ'), + (0xFF88, 'M', u'ネ'), + (0xFF89, 'M', u'ノ'), + (0xFF8A, 'M', u'ハ'), + (0xFF8B, 'M', u'ヒ'), + (0xFF8C, 'M', u'フ'), + (0xFF8D, 'M', u'ヘ'), + (0xFF8E, 'M', u'ホ'), + (0xFF8F, 'M', u'マ'), + (0xFF90, 'M', u'ミ'), + (0xFF91, 'M', u'ム'), + (0xFF92, 'M', u'メ'), + (0xFF93, 'M', u'モ'), + (0xFF94, 'M', u'ヤ'), + (0xFF95, 'M', u'ユ'), + (0xFF96, 'M', u'ヨ'), + (0xFF97, 'M', u'ラ'), + (0xFF98, 'M', u'リ'), + (0xFF99, 'M', u'ル'), + (0xFF9A, 'M', u'レ'), + (0xFF9B, 'M', u'ロ'), + (0xFF9C, 'M', u'ワ'), + (0xFF9D, 'M', u'ン'), + (0xFF9E, 'M', u'゙'), + (0xFF9F, 'M', u'゚'), + (0xFFA0, 'X'), + (0xFFA1, 'M', u'ᄀ'), + (0xFFA2, 'M', u'ᄁ'), + (0xFFA3, 'M', u'ᆪ'), + (0xFFA4, 'M', u'ᄂ'), + (0xFFA5, 'M', u'ᆬ'), + (0xFFA6, 'M', u'ᆭ'), + (0xFFA7, 'M', u'ᄃ'), + ] + +def _seg_52(): + return [ + (0xFFA8, 'M', u'ᄄ'), + (0xFFA9, 'M', u'ᄅ'), + (0xFFAA, 'M', u'ᆰ'), + (0xFFAB, 'M', u'ᆱ'), + (0xFFAC, 'M', u'ᆲ'), + (0xFFAD, 'M', u'ᆳ'), + (0xFFAE, 'M', u'ᆴ'), + (0xFFAF, 'M', u'ᆵ'), + (0xFFB0, 'M', u'ᄚ'), + (0xFFB1, 'M', u'ᄆ'), + (0xFFB2, 'M', u'ᄇ'), + (0xFFB3, 'M', u'ᄈ'), + (0xFFB4, 'M', u'ᄡ'), + (0xFFB5, 'M', u'ᄉ'), + (0xFFB6, 'M', u'ᄊ'), + (0xFFB7, 'M', u'ᄋ'), + (0xFFB8, 'M', u'ᄌ'), + (0xFFB9, 'M', u'ᄍ'), + (0xFFBA, 'M', u'ᄎ'), + (0xFFBB, 'M', u'ᄏ'), + (0xFFBC, 'M', u'ᄐ'), + (0xFFBD, 'M', u'ᄑ'), + (0xFFBE, 'M', u'ᄒ'), + (0xFFBF, 'X'), + (0xFFC2, 'M', u'ᅡ'), + (0xFFC3, 'M', u'ᅢ'), + (0xFFC4, 'M', u'ᅣ'), + (0xFFC5, 'M', u'ᅤ'), + (0xFFC6, 'M', u'ᅥ'), + (0xFFC7, 'M', u'ᅦ'), + (0xFFC8, 'X'), + (0xFFCA, 'M', u'ᅧ'), + (0xFFCB, 'M', u'ᅨ'), + (0xFFCC, 'M', u'ᅩ'), + (0xFFCD, 'M', u'ᅪ'), + (0xFFCE, 'M', u'ᅫ'), + (0xFFCF, 'M', u'ᅬ'), + (0xFFD0, 'X'), + (0xFFD2, 'M', u'ᅭ'), + (0xFFD3, 'M', u'ᅮ'), + (0xFFD4, 'M', u'ᅯ'), + (0xFFD5, 'M', u'ᅰ'), + (0xFFD6, 'M', u'ᅱ'), + (0xFFD7, 'M', u'ᅲ'), + (0xFFD8, 'X'), + (0xFFDA, 'M', u'ᅳ'), + (0xFFDB, 'M', u'ᅴ'), + (0xFFDC, 'M', u'ᅵ'), + (0xFFDD, 'X'), + (0xFFE0, 'M', u'¢'), + (0xFFE1, 'M', u'£'), + (0xFFE2, 'M', u'¬'), + (0xFFE3, '3', u' ̄'), + (0xFFE4, 'M', u'¦'), + (0xFFE5, 'M', u'¥'), + (0xFFE6, 'M', u'₩'), + (0xFFE7, 'X'), + (0xFFE8, 'M', u'│'), + (0xFFE9, 'M', u'←'), + (0xFFEA, 'M', u'↑'), + (0xFFEB, 'M', u'→'), + (0xFFEC, 'M', u'↓'), + (0xFFED, 'M', u'■'), + (0xFFEE, 'M', u'○'), + (0xFFEF, 'X'), + (0x10000, 'V'), + (0x1000C, 'X'), + (0x1000D, 'V'), + (0x10027, 'X'), + (0x10028, 'V'), + (0x1003B, 'X'), + (0x1003C, 'V'), + (0x1003E, 'X'), + (0x1003F, 'V'), + (0x1004E, 'X'), + (0x10050, 'V'), + (0x1005E, 'X'), + (0x10080, 'V'), + (0x100FB, 'X'), + (0x10100, 'V'), + (0x10103, 'X'), + (0x10107, 'V'), + (0x10134, 'X'), + (0x10137, 'V'), + (0x1018F, 'X'), + (0x10190, 'V'), + (0x1019C, 'X'), + (0x101A0, 'V'), + (0x101A1, 'X'), + (0x101D0, 'V'), + (0x101FE, 'X'), + (0x10280, 'V'), + (0x1029D, 'X'), + (0x102A0, 'V'), + (0x102D1, 'X'), + (0x102E0, 'V'), + (0x102FC, 'X'), + (0x10300, 'V'), + (0x10324, 'X'), + (0x1032D, 'V'), + ] + +def _seg_53(): + return [ + (0x1034B, 'X'), + (0x10350, 'V'), + (0x1037B, 'X'), + (0x10380, 'V'), + (0x1039E, 'X'), + (0x1039F, 'V'), + (0x103C4, 'X'), + (0x103C8, 'V'), + (0x103D6, 'X'), + (0x10400, 'M', u'𐐨'), + (0x10401, 'M', u'𐐩'), + (0x10402, 'M', u'𐐪'), + (0x10403, 'M', u'𐐫'), + (0x10404, 'M', u'𐐬'), + (0x10405, 'M', u'𐐭'), + (0x10406, 'M', u'𐐮'), + (0x10407, 'M', u'𐐯'), + (0x10408, 'M', u'𐐰'), + (0x10409, 'M', u'𐐱'), + (0x1040A, 'M', u'𐐲'), + (0x1040B, 'M', u'𐐳'), + (0x1040C, 'M', u'𐐴'), + (0x1040D, 'M', u'𐐵'), + (0x1040E, 'M', u'𐐶'), + (0x1040F, 'M', u'𐐷'), + (0x10410, 'M', u'𐐸'), + (0x10411, 'M', u'𐐹'), + (0x10412, 'M', u'𐐺'), + (0x10413, 'M', u'𐐻'), + (0x10414, 'M', u'𐐼'), + (0x10415, 'M', u'𐐽'), + (0x10416, 'M', u'𐐾'), + (0x10417, 'M', u'𐐿'), + (0x10418, 'M', u'𐑀'), + (0x10419, 'M', u'𐑁'), + (0x1041A, 'M', u'𐑂'), + (0x1041B, 'M', u'𐑃'), + (0x1041C, 'M', u'𐑄'), + (0x1041D, 'M', u'𐑅'), + (0x1041E, 'M', u'𐑆'), + (0x1041F, 'M', u'𐑇'), + (0x10420, 'M', u'𐑈'), + (0x10421, 'M', u'𐑉'), + (0x10422, 'M', u'𐑊'), + (0x10423, 'M', u'𐑋'), + (0x10424, 'M', u'𐑌'), + (0x10425, 'M', u'𐑍'), + (0x10426, 'M', u'𐑎'), + (0x10427, 'M', u'𐑏'), + (0x10428, 'V'), + (0x1049E, 'X'), + (0x104A0, 'V'), + (0x104AA, 'X'), + (0x104B0, 'M', u'𐓘'), + (0x104B1, 'M', u'𐓙'), + (0x104B2, 'M', u'𐓚'), + (0x104B3, 'M', u'𐓛'), + (0x104B4, 'M', u'𐓜'), + (0x104B5, 'M', u'𐓝'), + (0x104B6, 'M', u'𐓞'), + (0x104B7, 'M', u'𐓟'), + (0x104B8, 'M', u'𐓠'), + (0x104B9, 'M', u'𐓡'), + (0x104BA, 'M', u'𐓢'), + (0x104BB, 'M', u'𐓣'), + (0x104BC, 'M', u'𐓤'), + (0x104BD, 'M', u'𐓥'), + (0x104BE, 'M', u'𐓦'), + (0x104BF, 'M', u'𐓧'), + (0x104C0, 'M', u'𐓨'), + (0x104C1, 'M', u'𐓩'), + (0x104C2, 'M', u'𐓪'), + (0x104C3, 'M', u'𐓫'), + (0x104C4, 'M', u'𐓬'), + (0x104C5, 'M', u'𐓭'), + (0x104C6, 'M', u'𐓮'), + (0x104C7, 'M', u'𐓯'), + (0x104C8, 'M', u'𐓰'), + (0x104C9, 'M', u'𐓱'), + (0x104CA, 'M', u'𐓲'), + (0x104CB, 'M', u'𐓳'), + (0x104CC, 'M', u'𐓴'), + (0x104CD, 'M', u'𐓵'), + (0x104CE, 'M', u'𐓶'), + (0x104CF, 'M', u'𐓷'), + (0x104D0, 'M', u'𐓸'), + (0x104D1, 'M', u'𐓹'), + (0x104D2, 'M', u'𐓺'), + (0x104D3, 'M', u'𐓻'), + (0x104D4, 'X'), + (0x104D8, 'V'), + (0x104FC, 'X'), + (0x10500, 'V'), + (0x10528, 'X'), + (0x10530, 'V'), + (0x10564, 'X'), + (0x1056F, 'V'), + (0x10570, 'X'), + (0x10600, 'V'), + (0x10737, 'X'), + ] + +def _seg_54(): + return [ + (0x10740, 'V'), + (0x10756, 'X'), + (0x10760, 'V'), + (0x10768, 'X'), + (0x10800, 'V'), + (0x10806, 'X'), + (0x10808, 'V'), + (0x10809, 'X'), + (0x1080A, 'V'), + (0x10836, 'X'), + (0x10837, 'V'), + (0x10839, 'X'), + (0x1083C, 'V'), + (0x1083D, 'X'), + (0x1083F, 'V'), + (0x10856, 'X'), + (0x10857, 'V'), + (0x1089F, 'X'), + (0x108A7, 'V'), + (0x108B0, 'X'), + (0x108E0, 'V'), + (0x108F3, 'X'), + (0x108F4, 'V'), + (0x108F6, 'X'), + (0x108FB, 'V'), + (0x1091C, 'X'), + (0x1091F, 'V'), + (0x1093A, 'X'), + (0x1093F, 'V'), + (0x10940, 'X'), + (0x10980, 'V'), + (0x109B8, 'X'), + (0x109BC, 'V'), + (0x109D0, 'X'), + (0x109D2, 'V'), + (0x10A04, 'X'), + (0x10A05, 'V'), + (0x10A07, 'X'), + (0x10A0C, 'V'), + (0x10A14, 'X'), + (0x10A15, 'V'), + (0x10A18, 'X'), + (0x10A19, 'V'), + (0x10A34, 'X'), + (0x10A38, 'V'), + (0x10A3B, 'X'), + (0x10A3F, 'V'), + (0x10A48, 'X'), + (0x10A50, 'V'), + (0x10A59, 'X'), + (0x10A60, 'V'), + (0x10AA0, 'X'), + (0x10AC0, 'V'), + (0x10AE7, 'X'), + (0x10AEB, 'V'), + (0x10AF7, 'X'), + (0x10B00, 'V'), + (0x10B36, 'X'), + (0x10B39, 'V'), + (0x10B56, 'X'), + (0x10B58, 'V'), + (0x10B73, 'X'), + (0x10B78, 'V'), + (0x10B92, 'X'), + (0x10B99, 'V'), + (0x10B9D, 'X'), + (0x10BA9, 'V'), + (0x10BB0, 'X'), + (0x10C00, 'V'), + (0x10C49, 'X'), + (0x10C80, 'M', u'𐳀'), + (0x10C81, 'M', u'𐳁'), + (0x10C82, 'M', u'𐳂'), + (0x10C83, 'M', u'𐳃'), + (0x10C84, 'M', u'𐳄'), + (0x10C85, 'M', u'𐳅'), + (0x10C86, 'M', u'𐳆'), + (0x10C87, 'M', u'𐳇'), + (0x10C88, 'M', u'𐳈'), + (0x10C89, 'M', u'𐳉'), + (0x10C8A, 'M', u'𐳊'), + (0x10C8B, 'M', u'𐳋'), + (0x10C8C, 'M', u'𐳌'), + (0x10C8D, 'M', u'𐳍'), + (0x10C8E, 'M', u'𐳎'), + (0x10C8F, 'M', u'𐳏'), + (0x10C90, 'M', u'𐳐'), + (0x10C91, 'M', u'𐳑'), + (0x10C92, 'M', u'𐳒'), + (0x10C93, 'M', u'𐳓'), + (0x10C94, 'M', u'𐳔'), + (0x10C95, 'M', u'𐳕'), + (0x10C96, 'M', u'𐳖'), + (0x10C97, 'M', u'𐳗'), + (0x10C98, 'M', u'𐳘'), + (0x10C99, 'M', u'𐳙'), + (0x10C9A, 'M', u'𐳚'), + (0x10C9B, 'M', u'𐳛'), + (0x10C9C, 'M', u'𐳜'), + (0x10C9D, 'M', u'𐳝'), + ] + +def _seg_55(): + return [ + (0x10C9E, 'M', u'𐳞'), + (0x10C9F, 'M', u'𐳟'), + (0x10CA0, 'M', u'𐳠'), + (0x10CA1, 'M', u'𐳡'), + (0x10CA2, 'M', u'𐳢'), + (0x10CA3, 'M', u'𐳣'), + (0x10CA4, 'M', u'𐳤'), + (0x10CA5, 'M', u'𐳥'), + (0x10CA6, 'M', u'𐳦'), + (0x10CA7, 'M', u'𐳧'), + (0x10CA8, 'M', u'𐳨'), + (0x10CA9, 'M', u'𐳩'), + (0x10CAA, 'M', u'𐳪'), + (0x10CAB, 'M', u'𐳫'), + (0x10CAC, 'M', u'𐳬'), + (0x10CAD, 'M', u'𐳭'), + (0x10CAE, 'M', u'𐳮'), + (0x10CAF, 'M', u'𐳯'), + (0x10CB0, 'M', u'𐳰'), + (0x10CB1, 'M', u'𐳱'), + (0x10CB2, 'M', u'𐳲'), + (0x10CB3, 'X'), + (0x10CC0, 'V'), + (0x10CF3, 'X'), + (0x10CFA, 'V'), + (0x10D00, 'X'), + (0x10E60, 'V'), + (0x10E7F, 'X'), + (0x11000, 'V'), + (0x1104E, 'X'), + (0x11052, 'V'), + (0x11070, 'X'), + (0x1107F, 'V'), + (0x110BD, 'X'), + (0x110BE, 'V'), + (0x110C2, 'X'), + (0x110D0, 'V'), + (0x110E9, 'X'), + (0x110F0, 'V'), + (0x110FA, 'X'), + (0x11100, 'V'), + (0x11135, 'X'), + (0x11136, 'V'), + (0x11144, 'X'), + (0x11150, 'V'), + (0x11177, 'X'), + (0x11180, 'V'), + (0x111CE, 'X'), + (0x111D0, 'V'), + (0x111E0, 'X'), + (0x111E1, 'V'), + (0x111F5, 'X'), + (0x11200, 'V'), + (0x11212, 'X'), + (0x11213, 'V'), + (0x1123F, 'X'), + (0x11280, 'V'), + (0x11287, 'X'), + (0x11288, 'V'), + (0x11289, 'X'), + (0x1128A, 'V'), + (0x1128E, 'X'), + (0x1128F, 'V'), + (0x1129E, 'X'), + (0x1129F, 'V'), + (0x112AA, 'X'), + (0x112B0, 'V'), + (0x112EB, 'X'), + (0x112F0, 'V'), + (0x112FA, 'X'), + (0x11300, 'V'), + (0x11304, 'X'), + (0x11305, 'V'), + (0x1130D, 'X'), + (0x1130F, 'V'), + (0x11311, 'X'), + (0x11313, 'V'), + (0x11329, 'X'), + (0x1132A, 'V'), + (0x11331, 'X'), + (0x11332, 'V'), + (0x11334, 'X'), + (0x11335, 'V'), + (0x1133A, 'X'), + (0x1133C, 'V'), + (0x11345, 'X'), + (0x11347, 'V'), + (0x11349, 'X'), + (0x1134B, 'V'), + (0x1134E, 'X'), + (0x11350, 'V'), + (0x11351, 'X'), + (0x11357, 'V'), + (0x11358, 'X'), + (0x1135D, 'V'), + (0x11364, 'X'), + (0x11366, 'V'), + (0x1136D, 'X'), + (0x11370, 'V'), + (0x11375, 'X'), + ] + +def _seg_56(): + return [ + (0x11400, 'V'), + (0x1145A, 'X'), + (0x1145B, 'V'), + (0x1145C, 'X'), + (0x1145D, 'V'), + (0x1145E, 'X'), + (0x11480, 'V'), + (0x114C8, 'X'), + (0x114D0, 'V'), + (0x114DA, 'X'), + (0x11580, 'V'), + (0x115B6, 'X'), + (0x115B8, 'V'), + (0x115DE, 'X'), + (0x11600, 'V'), + (0x11645, 'X'), + (0x11650, 'V'), + (0x1165A, 'X'), + (0x11660, 'V'), + (0x1166D, 'X'), + (0x11680, 'V'), + (0x116B8, 'X'), + (0x116C0, 'V'), + (0x116CA, 'X'), + (0x11700, 'V'), + (0x1171A, 'X'), + (0x1171D, 'V'), + (0x1172C, 'X'), + (0x11730, 'V'), + (0x11740, 'X'), + (0x118A0, 'M', u'𑣀'), + (0x118A1, 'M', u'𑣁'), + (0x118A2, 'M', u'𑣂'), + (0x118A3, 'M', u'𑣃'), + (0x118A4, 'M', u'𑣄'), + (0x118A5, 'M', u'𑣅'), + (0x118A6, 'M', u'𑣆'), + (0x118A7, 'M', u'𑣇'), + (0x118A8, 'M', u'𑣈'), + (0x118A9, 'M', u'𑣉'), + (0x118AA, 'M', u'𑣊'), + (0x118AB, 'M', u'𑣋'), + (0x118AC, 'M', u'𑣌'), + (0x118AD, 'M', u'𑣍'), + (0x118AE, 'M', u'𑣎'), + (0x118AF, 'M', u'𑣏'), + (0x118B0, 'M', u'𑣐'), + (0x118B1, 'M', u'𑣑'), + (0x118B2, 'M', u'𑣒'), + (0x118B3, 'M', u'𑣓'), + (0x118B4, 'M', u'𑣔'), + (0x118B5, 'M', u'𑣕'), + (0x118B6, 'M', u'𑣖'), + (0x118B7, 'M', u'𑣗'), + (0x118B8, 'M', u'𑣘'), + (0x118B9, 'M', u'𑣙'), + (0x118BA, 'M', u'𑣚'), + (0x118BB, 'M', u'𑣛'), + (0x118BC, 'M', u'𑣜'), + (0x118BD, 'M', u'𑣝'), + (0x118BE, 'M', u'𑣞'), + (0x118BF, 'M', u'𑣟'), + (0x118C0, 'V'), + (0x118F3, 'X'), + (0x118FF, 'V'), + (0x11900, 'X'), + (0x11A00, 'V'), + (0x11A48, 'X'), + (0x11A50, 'V'), + (0x11A84, 'X'), + (0x11A86, 'V'), + (0x11A9D, 'X'), + (0x11A9E, 'V'), + (0x11AA3, 'X'), + (0x11AC0, 'V'), + (0x11AF9, 'X'), + (0x11C00, 'V'), + (0x11C09, 'X'), + (0x11C0A, 'V'), + (0x11C37, 'X'), + (0x11C38, 'V'), + (0x11C46, 'X'), + (0x11C50, 'V'), + (0x11C6D, 'X'), + (0x11C70, 'V'), + (0x11C90, 'X'), + (0x11C92, 'V'), + (0x11CA8, 'X'), + (0x11CA9, 'V'), + (0x11CB7, 'X'), + (0x11D00, 'V'), + (0x11D07, 'X'), + (0x11D08, 'V'), + (0x11D0A, 'X'), + (0x11D0B, 'V'), + (0x11D37, 'X'), + (0x11D3A, 'V'), + (0x11D3B, 'X'), + (0x11D3C, 'V'), + (0x11D3E, 'X'), + ] + +def _seg_57(): + return [ + (0x11D3F, 'V'), + (0x11D48, 'X'), + (0x11D50, 'V'), + (0x11D5A, 'X'), + (0x12000, 'V'), + (0x1239A, 'X'), + (0x12400, 'V'), + (0x1246F, 'X'), + (0x12470, 'V'), + (0x12475, 'X'), + (0x12480, 'V'), + (0x12544, 'X'), + (0x13000, 'V'), + (0x1342F, 'X'), + (0x14400, 'V'), + (0x14647, 'X'), + (0x16800, 'V'), + (0x16A39, 'X'), + (0x16A40, 'V'), + (0x16A5F, 'X'), + (0x16A60, 'V'), + (0x16A6A, 'X'), + (0x16A6E, 'V'), + (0x16A70, 'X'), + (0x16AD0, 'V'), + (0x16AEE, 'X'), + (0x16AF0, 'V'), + (0x16AF6, 'X'), + (0x16B00, 'V'), + (0x16B46, 'X'), + (0x16B50, 'V'), + (0x16B5A, 'X'), + (0x16B5B, 'V'), + (0x16B62, 'X'), + (0x16B63, 'V'), + (0x16B78, 'X'), + (0x16B7D, 'V'), + (0x16B90, 'X'), + (0x16F00, 'V'), + (0x16F45, 'X'), + (0x16F50, 'V'), + (0x16F7F, 'X'), + (0x16F8F, 'V'), + (0x16FA0, 'X'), + (0x16FE0, 'V'), + (0x16FE2, 'X'), + (0x17000, 'V'), + (0x187ED, 'X'), + (0x18800, 'V'), + (0x18AF3, 'X'), + (0x1B000, 'V'), + (0x1B11F, 'X'), + (0x1B170, 'V'), + (0x1B2FC, 'X'), + (0x1BC00, 'V'), + (0x1BC6B, 'X'), + (0x1BC70, 'V'), + (0x1BC7D, 'X'), + (0x1BC80, 'V'), + (0x1BC89, 'X'), + (0x1BC90, 'V'), + (0x1BC9A, 'X'), + (0x1BC9C, 'V'), + (0x1BCA0, 'I'), + (0x1BCA4, 'X'), + (0x1D000, 'V'), + (0x1D0F6, 'X'), + (0x1D100, 'V'), + (0x1D127, 'X'), + (0x1D129, 'V'), + (0x1D15E, 'M', u'𝅗𝅥'), + (0x1D15F, 'M', u'𝅘𝅥'), + (0x1D160, 'M', u'𝅘𝅥𝅮'), + (0x1D161, 'M', u'𝅘𝅥𝅯'), + (0x1D162, 'M', u'𝅘𝅥𝅰'), + (0x1D163, 'M', u'𝅘𝅥𝅱'), + (0x1D164, 'M', u'𝅘𝅥𝅲'), + (0x1D165, 'V'), + (0x1D173, 'X'), + (0x1D17B, 'V'), + (0x1D1BB, 'M', u'𝆹𝅥'), + (0x1D1BC, 'M', u'𝆺𝅥'), + (0x1D1BD, 'M', u'𝆹𝅥𝅮'), + (0x1D1BE, 'M', u'𝆺𝅥𝅮'), + (0x1D1BF, 'M', u'𝆹𝅥𝅯'), + (0x1D1C0, 'M', u'𝆺𝅥𝅯'), + (0x1D1C1, 'V'), + (0x1D1E9, 'X'), + (0x1D200, 'V'), + (0x1D246, 'X'), + (0x1D300, 'V'), + (0x1D357, 'X'), + (0x1D360, 'V'), + (0x1D372, 'X'), + (0x1D400, 'M', u'a'), + (0x1D401, 'M', u'b'), + (0x1D402, 'M', u'c'), + (0x1D403, 'M', u'd'), + (0x1D404, 'M', u'e'), + (0x1D405, 'M', u'f'), + ] + +def _seg_58(): + return [ + (0x1D406, 'M', u'g'), + (0x1D407, 'M', u'h'), + (0x1D408, 'M', u'i'), + (0x1D409, 'M', u'j'), + (0x1D40A, 'M', u'k'), + (0x1D40B, 'M', u'l'), + (0x1D40C, 'M', u'm'), + (0x1D40D, 'M', u'n'), + (0x1D40E, 'M', u'o'), + (0x1D40F, 'M', u'p'), + (0x1D410, 'M', u'q'), + (0x1D411, 'M', u'r'), + (0x1D412, 'M', u's'), + (0x1D413, 'M', u't'), + (0x1D414, 'M', u'u'), + (0x1D415, 'M', u'v'), + (0x1D416, 'M', u'w'), + (0x1D417, 'M', u'x'), + (0x1D418, 'M', u'y'), + (0x1D419, 'M', u'z'), + (0x1D41A, 'M', u'a'), + (0x1D41B, 'M', u'b'), + (0x1D41C, 'M', u'c'), + (0x1D41D, 'M', u'd'), + (0x1D41E, 'M', u'e'), + (0x1D41F, 'M', u'f'), + (0x1D420, 'M', u'g'), + (0x1D421, 'M', u'h'), + (0x1D422, 'M', u'i'), + (0x1D423, 'M', u'j'), + (0x1D424, 'M', u'k'), + (0x1D425, 'M', u'l'), + (0x1D426, 'M', u'm'), + (0x1D427, 'M', u'n'), + (0x1D428, 'M', u'o'), + (0x1D429, 'M', u'p'), + (0x1D42A, 'M', u'q'), + (0x1D42B, 'M', u'r'), + (0x1D42C, 'M', u's'), + (0x1D42D, 'M', u't'), + (0x1D42E, 'M', u'u'), + (0x1D42F, 'M', u'v'), + (0x1D430, 'M', u'w'), + (0x1D431, 'M', u'x'), + (0x1D432, 'M', u'y'), + (0x1D433, 'M', u'z'), + (0x1D434, 'M', u'a'), + (0x1D435, 'M', u'b'), + (0x1D436, 'M', u'c'), + (0x1D437, 'M', u'd'), + (0x1D438, 'M', u'e'), + (0x1D439, 'M', u'f'), + (0x1D43A, 'M', u'g'), + (0x1D43B, 'M', u'h'), + (0x1D43C, 'M', u'i'), + (0x1D43D, 'M', u'j'), + (0x1D43E, 'M', u'k'), + (0x1D43F, 'M', u'l'), + (0x1D440, 'M', u'm'), + (0x1D441, 'M', u'n'), + (0x1D442, 'M', u'o'), + (0x1D443, 'M', u'p'), + (0x1D444, 'M', u'q'), + (0x1D445, 'M', u'r'), + (0x1D446, 'M', u's'), + (0x1D447, 'M', u't'), + (0x1D448, 'M', u'u'), + (0x1D449, 'M', u'v'), + (0x1D44A, 'M', u'w'), + (0x1D44B, 'M', u'x'), + (0x1D44C, 'M', u'y'), + (0x1D44D, 'M', u'z'), + (0x1D44E, 'M', u'a'), + (0x1D44F, 'M', u'b'), + (0x1D450, 'M', u'c'), + (0x1D451, 'M', u'd'), + (0x1D452, 'M', u'e'), + (0x1D453, 'M', u'f'), + (0x1D454, 'M', u'g'), + (0x1D455, 'X'), + (0x1D456, 'M', u'i'), + (0x1D457, 'M', u'j'), + (0x1D458, 'M', u'k'), + (0x1D459, 'M', u'l'), + (0x1D45A, 'M', u'm'), + (0x1D45B, 'M', u'n'), + (0x1D45C, 'M', u'o'), + (0x1D45D, 'M', u'p'), + (0x1D45E, 'M', u'q'), + (0x1D45F, 'M', u'r'), + (0x1D460, 'M', u's'), + (0x1D461, 'M', u't'), + (0x1D462, 'M', u'u'), + (0x1D463, 'M', u'v'), + (0x1D464, 'M', u'w'), + (0x1D465, 'M', u'x'), + (0x1D466, 'M', u'y'), + (0x1D467, 'M', u'z'), + (0x1D468, 'M', u'a'), + (0x1D469, 'M', u'b'), + ] + +def _seg_59(): + return [ + (0x1D46A, 'M', u'c'), + (0x1D46B, 'M', u'd'), + (0x1D46C, 'M', u'e'), + (0x1D46D, 'M', u'f'), + (0x1D46E, 'M', u'g'), + (0x1D46F, 'M', u'h'), + (0x1D470, 'M', u'i'), + (0x1D471, 'M', u'j'), + (0x1D472, 'M', u'k'), + (0x1D473, 'M', u'l'), + (0x1D474, 'M', u'm'), + (0x1D475, 'M', u'n'), + (0x1D476, 'M', u'o'), + (0x1D477, 'M', u'p'), + (0x1D478, 'M', u'q'), + (0x1D479, 'M', u'r'), + (0x1D47A, 'M', u's'), + (0x1D47B, 'M', u't'), + (0x1D47C, 'M', u'u'), + (0x1D47D, 'M', u'v'), + (0x1D47E, 'M', u'w'), + (0x1D47F, 'M', u'x'), + (0x1D480, 'M', u'y'), + (0x1D481, 'M', u'z'), + (0x1D482, 'M', u'a'), + (0x1D483, 'M', u'b'), + (0x1D484, 'M', u'c'), + (0x1D485, 'M', u'd'), + (0x1D486, 'M', u'e'), + (0x1D487, 'M', u'f'), + (0x1D488, 'M', u'g'), + (0x1D489, 'M', u'h'), + (0x1D48A, 'M', u'i'), + (0x1D48B, 'M', u'j'), + (0x1D48C, 'M', u'k'), + (0x1D48D, 'M', u'l'), + (0x1D48E, 'M', u'm'), + (0x1D48F, 'M', u'n'), + (0x1D490, 'M', u'o'), + (0x1D491, 'M', u'p'), + (0x1D492, 'M', u'q'), + (0x1D493, 'M', u'r'), + (0x1D494, 'M', u's'), + (0x1D495, 'M', u't'), + (0x1D496, 'M', u'u'), + (0x1D497, 'M', u'v'), + (0x1D498, 'M', u'w'), + (0x1D499, 'M', u'x'), + (0x1D49A, 'M', u'y'), + (0x1D49B, 'M', u'z'), + (0x1D49C, 'M', u'a'), + (0x1D49D, 'X'), + (0x1D49E, 'M', u'c'), + (0x1D49F, 'M', u'd'), + (0x1D4A0, 'X'), + (0x1D4A2, 'M', u'g'), + (0x1D4A3, 'X'), + (0x1D4A5, 'M', u'j'), + (0x1D4A6, 'M', u'k'), + (0x1D4A7, 'X'), + (0x1D4A9, 'M', u'n'), + (0x1D4AA, 'M', u'o'), + (0x1D4AB, 'M', u'p'), + (0x1D4AC, 'M', u'q'), + (0x1D4AD, 'X'), + (0x1D4AE, 'M', u's'), + (0x1D4AF, 'M', u't'), + (0x1D4B0, 'M', u'u'), + (0x1D4B1, 'M', u'v'), + (0x1D4B2, 'M', u'w'), + (0x1D4B3, 'M', u'x'), + (0x1D4B4, 'M', u'y'), + (0x1D4B5, 'M', u'z'), + (0x1D4B6, 'M', u'a'), + (0x1D4B7, 'M', u'b'), + (0x1D4B8, 'M', u'c'), + (0x1D4B9, 'M', u'd'), + (0x1D4BA, 'X'), + (0x1D4BB, 'M', u'f'), + (0x1D4BC, 'X'), + (0x1D4BD, 'M', u'h'), + (0x1D4BE, 'M', u'i'), + (0x1D4BF, 'M', u'j'), + (0x1D4C0, 'M', u'k'), + (0x1D4C1, 'M', u'l'), + (0x1D4C2, 'M', u'm'), + (0x1D4C3, 'M', u'n'), + (0x1D4C4, 'X'), + (0x1D4C5, 'M', u'p'), + (0x1D4C6, 'M', u'q'), + (0x1D4C7, 'M', u'r'), + (0x1D4C8, 'M', u's'), + (0x1D4C9, 'M', u't'), + (0x1D4CA, 'M', u'u'), + (0x1D4CB, 'M', u'v'), + (0x1D4CC, 'M', u'w'), + (0x1D4CD, 'M', u'x'), + (0x1D4CE, 'M', u'y'), + (0x1D4CF, 'M', u'z'), + (0x1D4D0, 'M', u'a'), + ] + +def _seg_60(): + return [ + (0x1D4D1, 'M', u'b'), + (0x1D4D2, 'M', u'c'), + (0x1D4D3, 'M', u'd'), + (0x1D4D4, 'M', u'e'), + (0x1D4D5, 'M', u'f'), + (0x1D4D6, 'M', u'g'), + (0x1D4D7, 'M', u'h'), + (0x1D4D8, 'M', u'i'), + (0x1D4D9, 'M', u'j'), + (0x1D4DA, 'M', u'k'), + (0x1D4DB, 'M', u'l'), + (0x1D4DC, 'M', u'm'), + (0x1D4DD, 'M', u'n'), + (0x1D4DE, 'M', u'o'), + (0x1D4DF, 'M', u'p'), + (0x1D4E0, 'M', u'q'), + (0x1D4E1, 'M', u'r'), + (0x1D4E2, 'M', u's'), + (0x1D4E3, 'M', u't'), + (0x1D4E4, 'M', u'u'), + (0x1D4E5, 'M', u'v'), + (0x1D4E6, 'M', u'w'), + (0x1D4E7, 'M', u'x'), + (0x1D4E8, 'M', u'y'), + (0x1D4E9, 'M', u'z'), + (0x1D4EA, 'M', u'a'), + (0x1D4EB, 'M', u'b'), + (0x1D4EC, 'M', u'c'), + (0x1D4ED, 'M', u'd'), + (0x1D4EE, 'M', u'e'), + (0x1D4EF, 'M', u'f'), + (0x1D4F0, 'M', u'g'), + (0x1D4F1, 'M', u'h'), + (0x1D4F2, 'M', u'i'), + (0x1D4F3, 'M', u'j'), + (0x1D4F4, 'M', u'k'), + (0x1D4F5, 'M', u'l'), + (0x1D4F6, 'M', u'm'), + (0x1D4F7, 'M', u'n'), + (0x1D4F8, 'M', u'o'), + (0x1D4F9, 'M', u'p'), + (0x1D4FA, 'M', u'q'), + (0x1D4FB, 'M', u'r'), + (0x1D4FC, 'M', u's'), + (0x1D4FD, 'M', u't'), + (0x1D4FE, 'M', u'u'), + (0x1D4FF, 'M', u'v'), + (0x1D500, 'M', u'w'), + (0x1D501, 'M', u'x'), + (0x1D502, 'M', u'y'), + (0x1D503, 'M', u'z'), + (0x1D504, 'M', u'a'), + (0x1D505, 'M', u'b'), + (0x1D506, 'X'), + (0x1D507, 'M', u'd'), + (0x1D508, 'M', u'e'), + (0x1D509, 'M', u'f'), + (0x1D50A, 'M', u'g'), + (0x1D50B, 'X'), + (0x1D50D, 'M', u'j'), + (0x1D50E, 'M', u'k'), + (0x1D50F, 'M', u'l'), + (0x1D510, 'M', u'm'), + (0x1D511, 'M', u'n'), + (0x1D512, 'M', u'o'), + (0x1D513, 'M', u'p'), + (0x1D514, 'M', u'q'), + (0x1D515, 'X'), + (0x1D516, 'M', u's'), + (0x1D517, 'M', u't'), + (0x1D518, 'M', u'u'), + (0x1D519, 'M', u'v'), + (0x1D51A, 'M', u'w'), + (0x1D51B, 'M', u'x'), + (0x1D51C, 'M', u'y'), + (0x1D51D, 'X'), + (0x1D51E, 'M', u'a'), + (0x1D51F, 'M', u'b'), + (0x1D520, 'M', u'c'), + (0x1D521, 'M', u'd'), + (0x1D522, 'M', u'e'), + (0x1D523, 'M', u'f'), + (0x1D524, 'M', u'g'), + (0x1D525, 'M', u'h'), + (0x1D526, 'M', u'i'), + (0x1D527, 'M', u'j'), + (0x1D528, 'M', u'k'), + (0x1D529, 'M', u'l'), + (0x1D52A, 'M', u'm'), + (0x1D52B, 'M', u'n'), + (0x1D52C, 'M', u'o'), + (0x1D52D, 'M', u'p'), + (0x1D52E, 'M', u'q'), + (0x1D52F, 'M', u'r'), + (0x1D530, 'M', u's'), + (0x1D531, 'M', u't'), + (0x1D532, 'M', u'u'), + (0x1D533, 'M', u'v'), + (0x1D534, 'M', u'w'), + (0x1D535, 'M', u'x'), + ] + +def _seg_61(): + return [ + (0x1D536, 'M', u'y'), + (0x1D537, 'M', u'z'), + (0x1D538, 'M', u'a'), + (0x1D539, 'M', u'b'), + (0x1D53A, 'X'), + (0x1D53B, 'M', u'd'), + (0x1D53C, 'M', u'e'), + (0x1D53D, 'M', u'f'), + (0x1D53E, 'M', u'g'), + (0x1D53F, 'X'), + (0x1D540, 'M', u'i'), + (0x1D541, 'M', u'j'), + (0x1D542, 'M', u'k'), + (0x1D543, 'M', u'l'), + (0x1D544, 'M', u'm'), + (0x1D545, 'X'), + (0x1D546, 'M', u'o'), + (0x1D547, 'X'), + (0x1D54A, 'M', u's'), + (0x1D54B, 'M', u't'), + (0x1D54C, 'M', u'u'), + (0x1D54D, 'M', u'v'), + (0x1D54E, 'M', u'w'), + (0x1D54F, 'M', u'x'), + (0x1D550, 'M', u'y'), + (0x1D551, 'X'), + (0x1D552, 'M', u'a'), + (0x1D553, 'M', u'b'), + (0x1D554, 'M', u'c'), + (0x1D555, 'M', u'd'), + (0x1D556, 'M', u'e'), + (0x1D557, 'M', u'f'), + (0x1D558, 'M', u'g'), + (0x1D559, 'M', u'h'), + (0x1D55A, 'M', u'i'), + (0x1D55B, 'M', u'j'), + (0x1D55C, 'M', u'k'), + (0x1D55D, 'M', u'l'), + (0x1D55E, 'M', u'm'), + (0x1D55F, 'M', u'n'), + (0x1D560, 'M', u'o'), + (0x1D561, 'M', u'p'), + (0x1D562, 'M', u'q'), + (0x1D563, 'M', u'r'), + (0x1D564, 'M', u's'), + (0x1D565, 'M', u't'), + (0x1D566, 'M', u'u'), + (0x1D567, 'M', u'v'), + (0x1D568, 'M', u'w'), + (0x1D569, 'M', u'x'), + (0x1D56A, 'M', u'y'), + (0x1D56B, 'M', u'z'), + (0x1D56C, 'M', u'a'), + (0x1D56D, 'M', u'b'), + (0x1D56E, 'M', u'c'), + (0x1D56F, 'M', u'd'), + (0x1D570, 'M', u'e'), + (0x1D571, 'M', u'f'), + (0x1D572, 'M', u'g'), + (0x1D573, 'M', u'h'), + (0x1D574, 'M', u'i'), + (0x1D575, 'M', u'j'), + (0x1D576, 'M', u'k'), + (0x1D577, 'M', u'l'), + (0x1D578, 'M', u'm'), + (0x1D579, 'M', u'n'), + (0x1D57A, 'M', u'o'), + (0x1D57B, 'M', u'p'), + (0x1D57C, 'M', u'q'), + (0x1D57D, 'M', u'r'), + (0x1D57E, 'M', u's'), + (0x1D57F, 'M', u't'), + (0x1D580, 'M', u'u'), + (0x1D581, 'M', u'v'), + (0x1D582, 'M', u'w'), + (0x1D583, 'M', u'x'), + (0x1D584, 'M', u'y'), + (0x1D585, 'M', u'z'), + (0x1D586, 'M', u'a'), + (0x1D587, 'M', u'b'), + (0x1D588, 'M', u'c'), + (0x1D589, 'M', u'd'), + (0x1D58A, 'M', u'e'), + (0x1D58B, 'M', u'f'), + (0x1D58C, 'M', u'g'), + (0x1D58D, 'M', u'h'), + (0x1D58E, 'M', u'i'), + (0x1D58F, 'M', u'j'), + (0x1D590, 'M', u'k'), + (0x1D591, 'M', u'l'), + (0x1D592, 'M', u'm'), + (0x1D593, 'M', u'n'), + (0x1D594, 'M', u'o'), + (0x1D595, 'M', u'p'), + (0x1D596, 'M', u'q'), + (0x1D597, 'M', u'r'), + (0x1D598, 'M', u's'), + (0x1D599, 'M', u't'), + (0x1D59A, 'M', u'u'), + (0x1D59B, 'M', u'v'), + ] + +def _seg_62(): + return [ + (0x1D59C, 'M', u'w'), + (0x1D59D, 'M', u'x'), + (0x1D59E, 'M', u'y'), + (0x1D59F, 'M', u'z'), + (0x1D5A0, 'M', u'a'), + (0x1D5A1, 'M', u'b'), + (0x1D5A2, 'M', u'c'), + (0x1D5A3, 'M', u'd'), + (0x1D5A4, 'M', u'e'), + (0x1D5A5, 'M', u'f'), + (0x1D5A6, 'M', u'g'), + (0x1D5A7, 'M', u'h'), + (0x1D5A8, 'M', u'i'), + (0x1D5A9, 'M', u'j'), + (0x1D5AA, 'M', u'k'), + (0x1D5AB, 'M', u'l'), + (0x1D5AC, 'M', u'm'), + (0x1D5AD, 'M', u'n'), + (0x1D5AE, 'M', u'o'), + (0x1D5AF, 'M', u'p'), + (0x1D5B0, 'M', u'q'), + (0x1D5B1, 'M', u'r'), + (0x1D5B2, 'M', u's'), + (0x1D5B3, 'M', u't'), + (0x1D5B4, 'M', u'u'), + (0x1D5B5, 'M', u'v'), + (0x1D5B6, 'M', u'w'), + (0x1D5B7, 'M', u'x'), + (0x1D5B8, 'M', u'y'), + (0x1D5B9, 'M', u'z'), + (0x1D5BA, 'M', u'a'), + (0x1D5BB, 'M', u'b'), + (0x1D5BC, 'M', u'c'), + (0x1D5BD, 'M', u'd'), + (0x1D5BE, 'M', u'e'), + (0x1D5BF, 'M', u'f'), + (0x1D5C0, 'M', u'g'), + (0x1D5C1, 'M', u'h'), + (0x1D5C2, 'M', u'i'), + (0x1D5C3, 'M', u'j'), + (0x1D5C4, 'M', u'k'), + (0x1D5C5, 'M', u'l'), + (0x1D5C6, 'M', u'm'), + (0x1D5C7, 'M', u'n'), + (0x1D5C8, 'M', u'o'), + (0x1D5C9, 'M', u'p'), + (0x1D5CA, 'M', u'q'), + (0x1D5CB, 'M', u'r'), + (0x1D5CC, 'M', u's'), + (0x1D5CD, 'M', u't'), + (0x1D5CE, 'M', u'u'), + (0x1D5CF, 'M', u'v'), + (0x1D5D0, 'M', u'w'), + (0x1D5D1, 'M', u'x'), + (0x1D5D2, 'M', u'y'), + (0x1D5D3, 'M', u'z'), + (0x1D5D4, 'M', u'a'), + (0x1D5D5, 'M', u'b'), + (0x1D5D6, 'M', u'c'), + (0x1D5D7, 'M', u'd'), + (0x1D5D8, 'M', u'e'), + (0x1D5D9, 'M', u'f'), + (0x1D5DA, 'M', u'g'), + (0x1D5DB, 'M', u'h'), + (0x1D5DC, 'M', u'i'), + (0x1D5DD, 'M', u'j'), + (0x1D5DE, 'M', u'k'), + (0x1D5DF, 'M', u'l'), + (0x1D5E0, 'M', u'm'), + (0x1D5E1, 'M', u'n'), + (0x1D5E2, 'M', u'o'), + (0x1D5E3, 'M', u'p'), + (0x1D5E4, 'M', u'q'), + (0x1D5E5, 'M', u'r'), + (0x1D5E6, 'M', u's'), + (0x1D5E7, 'M', u't'), + (0x1D5E8, 'M', u'u'), + (0x1D5E9, 'M', u'v'), + (0x1D5EA, 'M', u'w'), + (0x1D5EB, 'M', u'x'), + (0x1D5EC, 'M', u'y'), + (0x1D5ED, 'M', u'z'), + (0x1D5EE, 'M', u'a'), + (0x1D5EF, 'M', u'b'), + (0x1D5F0, 'M', u'c'), + (0x1D5F1, 'M', u'd'), + (0x1D5F2, 'M', u'e'), + (0x1D5F3, 'M', u'f'), + (0x1D5F4, 'M', u'g'), + (0x1D5F5, 'M', u'h'), + (0x1D5F6, 'M', u'i'), + (0x1D5F7, 'M', u'j'), + (0x1D5F8, 'M', u'k'), + (0x1D5F9, 'M', u'l'), + (0x1D5FA, 'M', u'm'), + (0x1D5FB, 'M', u'n'), + (0x1D5FC, 'M', u'o'), + (0x1D5FD, 'M', u'p'), + (0x1D5FE, 'M', u'q'), + (0x1D5FF, 'M', u'r'), + ] + +def _seg_63(): + return [ + (0x1D600, 'M', u's'), + (0x1D601, 'M', u't'), + (0x1D602, 'M', u'u'), + (0x1D603, 'M', u'v'), + (0x1D604, 'M', u'w'), + (0x1D605, 'M', u'x'), + (0x1D606, 'M', u'y'), + (0x1D607, 'M', u'z'), + (0x1D608, 'M', u'a'), + (0x1D609, 'M', u'b'), + (0x1D60A, 'M', u'c'), + (0x1D60B, 'M', u'd'), + (0x1D60C, 'M', u'e'), + (0x1D60D, 'M', u'f'), + (0x1D60E, 'M', u'g'), + (0x1D60F, 'M', u'h'), + (0x1D610, 'M', u'i'), + (0x1D611, 'M', u'j'), + (0x1D612, 'M', u'k'), + (0x1D613, 'M', u'l'), + (0x1D614, 'M', u'm'), + (0x1D615, 'M', u'n'), + (0x1D616, 'M', u'o'), + (0x1D617, 'M', u'p'), + (0x1D618, 'M', u'q'), + (0x1D619, 'M', u'r'), + (0x1D61A, 'M', u's'), + (0x1D61B, 'M', u't'), + (0x1D61C, 'M', u'u'), + (0x1D61D, 'M', u'v'), + (0x1D61E, 'M', u'w'), + (0x1D61F, 'M', u'x'), + (0x1D620, 'M', u'y'), + (0x1D621, 'M', u'z'), + (0x1D622, 'M', u'a'), + (0x1D623, 'M', u'b'), + (0x1D624, 'M', u'c'), + (0x1D625, 'M', u'd'), + (0x1D626, 'M', u'e'), + (0x1D627, 'M', u'f'), + (0x1D628, 'M', u'g'), + (0x1D629, 'M', u'h'), + (0x1D62A, 'M', u'i'), + (0x1D62B, 'M', u'j'), + (0x1D62C, 'M', u'k'), + (0x1D62D, 'M', u'l'), + (0x1D62E, 'M', u'm'), + (0x1D62F, 'M', u'n'), + (0x1D630, 'M', u'o'), + (0x1D631, 'M', u'p'), + (0x1D632, 'M', u'q'), + (0x1D633, 'M', u'r'), + (0x1D634, 'M', u's'), + (0x1D635, 'M', u't'), + (0x1D636, 'M', u'u'), + (0x1D637, 'M', u'v'), + (0x1D638, 'M', u'w'), + (0x1D639, 'M', u'x'), + (0x1D63A, 'M', u'y'), + (0x1D63B, 'M', u'z'), + (0x1D63C, 'M', u'a'), + (0x1D63D, 'M', u'b'), + (0x1D63E, 'M', u'c'), + (0x1D63F, 'M', u'd'), + (0x1D640, 'M', u'e'), + (0x1D641, 'M', u'f'), + (0x1D642, 'M', u'g'), + (0x1D643, 'M', u'h'), + (0x1D644, 'M', u'i'), + (0x1D645, 'M', u'j'), + (0x1D646, 'M', u'k'), + (0x1D647, 'M', u'l'), + (0x1D648, 'M', u'm'), + (0x1D649, 'M', u'n'), + (0x1D64A, 'M', u'o'), + (0x1D64B, 'M', u'p'), + (0x1D64C, 'M', u'q'), + (0x1D64D, 'M', u'r'), + (0x1D64E, 'M', u's'), + (0x1D64F, 'M', u't'), + (0x1D650, 'M', u'u'), + (0x1D651, 'M', u'v'), + (0x1D652, 'M', u'w'), + (0x1D653, 'M', u'x'), + (0x1D654, 'M', u'y'), + (0x1D655, 'M', u'z'), + (0x1D656, 'M', u'a'), + (0x1D657, 'M', u'b'), + (0x1D658, 'M', u'c'), + (0x1D659, 'M', u'd'), + (0x1D65A, 'M', u'e'), + (0x1D65B, 'M', u'f'), + (0x1D65C, 'M', u'g'), + (0x1D65D, 'M', u'h'), + (0x1D65E, 'M', u'i'), + (0x1D65F, 'M', u'j'), + (0x1D660, 'M', u'k'), + (0x1D661, 'M', u'l'), + (0x1D662, 'M', u'm'), + (0x1D663, 'M', u'n'), + ] + +def _seg_64(): + return [ + (0x1D664, 'M', u'o'), + (0x1D665, 'M', u'p'), + (0x1D666, 'M', u'q'), + (0x1D667, 'M', u'r'), + (0x1D668, 'M', u's'), + (0x1D669, 'M', u't'), + (0x1D66A, 'M', u'u'), + (0x1D66B, 'M', u'v'), + (0x1D66C, 'M', u'w'), + (0x1D66D, 'M', u'x'), + (0x1D66E, 'M', u'y'), + (0x1D66F, 'M', u'z'), + (0x1D670, 'M', u'a'), + (0x1D671, 'M', u'b'), + (0x1D672, 'M', u'c'), + (0x1D673, 'M', u'd'), + (0x1D674, 'M', u'e'), + (0x1D675, 'M', u'f'), + (0x1D676, 'M', u'g'), + (0x1D677, 'M', u'h'), + (0x1D678, 'M', u'i'), + (0x1D679, 'M', u'j'), + (0x1D67A, 'M', u'k'), + (0x1D67B, 'M', u'l'), + (0x1D67C, 'M', u'm'), + (0x1D67D, 'M', u'n'), + (0x1D67E, 'M', u'o'), + (0x1D67F, 'M', u'p'), + (0x1D680, 'M', u'q'), + (0x1D681, 'M', u'r'), + (0x1D682, 'M', u's'), + (0x1D683, 'M', u't'), + (0x1D684, 'M', u'u'), + (0x1D685, 'M', u'v'), + (0x1D686, 'M', u'w'), + (0x1D687, 'M', u'x'), + (0x1D688, 'M', u'y'), + (0x1D689, 'M', u'z'), + (0x1D68A, 'M', u'a'), + (0x1D68B, 'M', u'b'), + (0x1D68C, 'M', u'c'), + (0x1D68D, 'M', u'd'), + (0x1D68E, 'M', u'e'), + (0x1D68F, 'M', u'f'), + (0x1D690, 'M', u'g'), + (0x1D691, 'M', u'h'), + (0x1D692, 'M', u'i'), + (0x1D693, 'M', u'j'), + (0x1D694, 'M', u'k'), + (0x1D695, 'M', u'l'), + (0x1D696, 'M', u'm'), + (0x1D697, 'M', u'n'), + (0x1D698, 'M', u'o'), + (0x1D699, 'M', u'p'), + (0x1D69A, 'M', u'q'), + (0x1D69B, 'M', u'r'), + (0x1D69C, 'M', u's'), + (0x1D69D, 'M', u't'), + (0x1D69E, 'M', u'u'), + (0x1D69F, 'M', u'v'), + (0x1D6A0, 'M', u'w'), + (0x1D6A1, 'M', u'x'), + (0x1D6A2, 'M', u'y'), + (0x1D6A3, 'M', u'z'), + (0x1D6A4, 'M', u'ı'), + (0x1D6A5, 'M', u'ȷ'), + (0x1D6A6, 'X'), + (0x1D6A8, 'M', u'α'), + (0x1D6A9, 'M', u'β'), + (0x1D6AA, 'M', u'γ'), + (0x1D6AB, 'M', u'δ'), + (0x1D6AC, 'M', u'ε'), + (0x1D6AD, 'M', u'ζ'), + (0x1D6AE, 'M', u'η'), + (0x1D6AF, 'M', u'θ'), + (0x1D6B0, 'M', u'ι'), + (0x1D6B1, 'M', u'κ'), + (0x1D6B2, 'M', u'λ'), + (0x1D6B3, 'M', u'μ'), + (0x1D6B4, 'M', u'ν'), + (0x1D6B5, 'M', u'ξ'), + (0x1D6B6, 'M', u'ο'), + (0x1D6B7, 'M', u'π'), + (0x1D6B8, 'M', u'ρ'), + (0x1D6B9, 'M', u'θ'), + (0x1D6BA, 'M', u'σ'), + (0x1D6BB, 'M', u'τ'), + (0x1D6BC, 'M', u'υ'), + (0x1D6BD, 'M', u'φ'), + (0x1D6BE, 'M', u'χ'), + (0x1D6BF, 'M', u'ψ'), + (0x1D6C0, 'M', u'ω'), + (0x1D6C1, 'M', u'∇'), + (0x1D6C2, 'M', u'α'), + (0x1D6C3, 'M', u'β'), + (0x1D6C4, 'M', u'γ'), + (0x1D6C5, 'M', u'δ'), + (0x1D6C6, 'M', u'ε'), + (0x1D6C7, 'M', u'ζ'), + (0x1D6C8, 'M', u'η'), + ] + +def _seg_65(): + return [ + (0x1D6C9, 'M', u'θ'), + (0x1D6CA, 'M', u'ι'), + (0x1D6CB, 'M', u'κ'), + (0x1D6CC, 'M', u'λ'), + (0x1D6CD, 'M', u'μ'), + (0x1D6CE, 'M', u'ν'), + (0x1D6CF, 'M', u'ξ'), + (0x1D6D0, 'M', u'ο'), + (0x1D6D1, 'M', u'π'), + (0x1D6D2, 'M', u'ρ'), + (0x1D6D3, 'M', u'σ'), + (0x1D6D5, 'M', u'τ'), + (0x1D6D6, 'M', u'υ'), + (0x1D6D7, 'M', u'φ'), + (0x1D6D8, 'M', u'χ'), + (0x1D6D9, 'M', u'ψ'), + (0x1D6DA, 'M', u'ω'), + (0x1D6DB, 'M', u'∂'), + (0x1D6DC, 'M', u'ε'), + (0x1D6DD, 'M', u'θ'), + (0x1D6DE, 'M', u'κ'), + (0x1D6DF, 'M', u'φ'), + (0x1D6E0, 'M', u'ρ'), + (0x1D6E1, 'M', u'π'), + (0x1D6E2, 'M', u'α'), + (0x1D6E3, 'M', u'β'), + (0x1D6E4, 'M', u'γ'), + (0x1D6E5, 'M', u'δ'), + (0x1D6E6, 'M', u'ε'), + (0x1D6E7, 'M', u'ζ'), + (0x1D6E8, 'M', u'η'), + (0x1D6E9, 'M', u'θ'), + (0x1D6EA, 'M', u'ι'), + (0x1D6EB, 'M', u'κ'), + (0x1D6EC, 'M', u'λ'), + (0x1D6ED, 'M', u'μ'), + (0x1D6EE, 'M', u'ν'), + (0x1D6EF, 'M', u'ξ'), + (0x1D6F0, 'M', u'ο'), + (0x1D6F1, 'M', u'π'), + (0x1D6F2, 'M', u'ρ'), + (0x1D6F3, 'M', u'θ'), + (0x1D6F4, 'M', u'σ'), + (0x1D6F5, 'M', u'τ'), + (0x1D6F6, 'M', u'υ'), + (0x1D6F7, 'M', u'φ'), + (0x1D6F8, 'M', u'χ'), + (0x1D6F9, 'M', u'ψ'), + (0x1D6FA, 'M', u'ω'), + (0x1D6FB, 'M', u'∇'), + (0x1D6FC, 'M', u'α'), + (0x1D6FD, 'M', u'β'), + (0x1D6FE, 'M', u'γ'), + (0x1D6FF, 'M', u'δ'), + (0x1D700, 'M', u'ε'), + (0x1D701, 'M', u'ζ'), + (0x1D702, 'M', u'η'), + (0x1D703, 'M', u'θ'), + (0x1D704, 'M', u'ι'), + (0x1D705, 'M', u'κ'), + (0x1D706, 'M', u'λ'), + (0x1D707, 'M', u'μ'), + (0x1D708, 'M', u'ν'), + (0x1D709, 'M', u'ξ'), + (0x1D70A, 'M', u'ο'), + (0x1D70B, 'M', u'π'), + (0x1D70C, 'M', u'ρ'), + (0x1D70D, 'M', u'σ'), + (0x1D70F, 'M', u'τ'), + (0x1D710, 'M', u'υ'), + (0x1D711, 'M', u'φ'), + (0x1D712, 'M', u'χ'), + (0x1D713, 'M', u'ψ'), + (0x1D714, 'M', u'ω'), + (0x1D715, 'M', u'∂'), + (0x1D716, 'M', u'ε'), + (0x1D717, 'M', u'θ'), + (0x1D718, 'M', u'κ'), + (0x1D719, 'M', u'φ'), + (0x1D71A, 'M', u'ρ'), + (0x1D71B, 'M', u'π'), + (0x1D71C, 'M', u'α'), + (0x1D71D, 'M', u'β'), + (0x1D71E, 'M', u'γ'), + (0x1D71F, 'M', u'δ'), + (0x1D720, 'M', u'ε'), + (0x1D721, 'M', u'ζ'), + (0x1D722, 'M', u'η'), + (0x1D723, 'M', u'θ'), + (0x1D724, 'M', u'ι'), + (0x1D725, 'M', u'κ'), + (0x1D726, 'M', u'λ'), + (0x1D727, 'M', u'μ'), + (0x1D728, 'M', u'ν'), + (0x1D729, 'M', u'ξ'), + (0x1D72A, 'M', u'ο'), + (0x1D72B, 'M', u'π'), + (0x1D72C, 'M', u'ρ'), + (0x1D72D, 'M', u'θ'), + (0x1D72E, 'M', u'σ'), + ] + +def _seg_66(): + return [ + (0x1D72F, 'M', u'τ'), + (0x1D730, 'M', u'υ'), + (0x1D731, 'M', u'φ'), + (0x1D732, 'M', u'χ'), + (0x1D733, 'M', u'ψ'), + (0x1D734, 'M', u'ω'), + (0x1D735, 'M', u'∇'), + (0x1D736, 'M', u'α'), + (0x1D737, 'M', u'β'), + (0x1D738, 'M', u'γ'), + (0x1D739, 'M', u'δ'), + (0x1D73A, 'M', u'ε'), + (0x1D73B, 'M', u'ζ'), + (0x1D73C, 'M', u'η'), + (0x1D73D, 'M', u'θ'), + (0x1D73E, 'M', u'ι'), + (0x1D73F, 'M', u'κ'), + (0x1D740, 'M', u'λ'), + (0x1D741, 'M', u'μ'), + (0x1D742, 'M', u'ν'), + (0x1D743, 'M', u'ξ'), + (0x1D744, 'M', u'ο'), + (0x1D745, 'M', u'π'), + (0x1D746, 'M', u'ρ'), + (0x1D747, 'M', u'σ'), + (0x1D749, 'M', u'τ'), + (0x1D74A, 'M', u'υ'), + (0x1D74B, 'M', u'φ'), + (0x1D74C, 'M', u'χ'), + (0x1D74D, 'M', u'ψ'), + (0x1D74E, 'M', u'ω'), + (0x1D74F, 'M', u'∂'), + (0x1D750, 'M', u'ε'), + (0x1D751, 'M', u'θ'), + (0x1D752, 'M', u'κ'), + (0x1D753, 'M', u'φ'), + (0x1D754, 'M', u'ρ'), + (0x1D755, 'M', u'π'), + (0x1D756, 'M', u'α'), + (0x1D757, 'M', u'β'), + (0x1D758, 'M', u'γ'), + (0x1D759, 'M', u'δ'), + (0x1D75A, 'M', u'ε'), + (0x1D75B, 'M', u'ζ'), + (0x1D75C, 'M', u'η'), + (0x1D75D, 'M', u'θ'), + (0x1D75E, 'M', u'ι'), + (0x1D75F, 'M', u'κ'), + (0x1D760, 'M', u'λ'), + (0x1D761, 'M', u'μ'), + (0x1D762, 'M', u'ν'), + (0x1D763, 'M', u'ξ'), + (0x1D764, 'M', u'ο'), + (0x1D765, 'M', u'π'), + (0x1D766, 'M', u'ρ'), + (0x1D767, 'M', u'θ'), + (0x1D768, 'M', u'σ'), + (0x1D769, 'M', u'τ'), + (0x1D76A, 'M', u'υ'), + (0x1D76B, 'M', u'φ'), + (0x1D76C, 'M', u'χ'), + (0x1D76D, 'M', u'ψ'), + (0x1D76E, 'M', u'ω'), + (0x1D76F, 'M', u'∇'), + (0x1D770, 'M', u'α'), + (0x1D771, 'M', u'β'), + (0x1D772, 'M', u'γ'), + (0x1D773, 'M', u'δ'), + (0x1D774, 'M', u'ε'), + (0x1D775, 'M', u'ζ'), + (0x1D776, 'M', u'η'), + (0x1D777, 'M', u'θ'), + (0x1D778, 'M', u'ι'), + (0x1D779, 'M', u'κ'), + (0x1D77A, 'M', u'λ'), + (0x1D77B, 'M', u'μ'), + (0x1D77C, 'M', u'ν'), + (0x1D77D, 'M', u'ξ'), + (0x1D77E, 'M', u'ο'), + (0x1D77F, 'M', u'π'), + (0x1D780, 'M', u'ρ'), + (0x1D781, 'M', u'σ'), + (0x1D783, 'M', u'τ'), + (0x1D784, 'M', u'υ'), + (0x1D785, 'M', u'φ'), + (0x1D786, 'M', u'χ'), + (0x1D787, 'M', u'ψ'), + (0x1D788, 'M', u'ω'), + (0x1D789, 'M', u'∂'), + (0x1D78A, 'M', u'ε'), + (0x1D78B, 'M', u'θ'), + (0x1D78C, 'M', u'κ'), + (0x1D78D, 'M', u'φ'), + (0x1D78E, 'M', u'ρ'), + (0x1D78F, 'M', u'π'), + (0x1D790, 'M', u'α'), + (0x1D791, 'M', u'β'), + (0x1D792, 'M', u'γ'), + (0x1D793, 'M', u'δ'), + (0x1D794, 'M', u'ε'), + ] + +def _seg_67(): + return [ + (0x1D795, 'M', u'ζ'), + (0x1D796, 'M', u'η'), + (0x1D797, 'M', u'θ'), + (0x1D798, 'M', u'ι'), + (0x1D799, 'M', u'κ'), + (0x1D79A, 'M', u'λ'), + (0x1D79B, 'M', u'μ'), + (0x1D79C, 'M', u'ν'), + (0x1D79D, 'M', u'ξ'), + (0x1D79E, 'M', u'ο'), + (0x1D79F, 'M', u'π'), + (0x1D7A0, 'M', u'ρ'), + (0x1D7A1, 'M', u'θ'), + (0x1D7A2, 'M', u'σ'), + (0x1D7A3, 'M', u'τ'), + (0x1D7A4, 'M', u'υ'), + (0x1D7A5, 'M', u'φ'), + (0x1D7A6, 'M', u'χ'), + (0x1D7A7, 'M', u'ψ'), + (0x1D7A8, 'M', u'ω'), + (0x1D7A9, 'M', u'∇'), + (0x1D7AA, 'M', u'α'), + (0x1D7AB, 'M', u'β'), + (0x1D7AC, 'M', u'γ'), + (0x1D7AD, 'M', u'δ'), + (0x1D7AE, 'M', u'ε'), + (0x1D7AF, 'M', u'ζ'), + (0x1D7B0, 'M', u'η'), + (0x1D7B1, 'M', u'θ'), + (0x1D7B2, 'M', u'ι'), + (0x1D7B3, 'M', u'κ'), + (0x1D7B4, 'M', u'λ'), + (0x1D7B5, 'M', u'μ'), + (0x1D7B6, 'M', u'ν'), + (0x1D7B7, 'M', u'ξ'), + (0x1D7B8, 'M', u'ο'), + (0x1D7B9, 'M', u'π'), + (0x1D7BA, 'M', u'ρ'), + (0x1D7BB, 'M', u'σ'), + (0x1D7BD, 'M', u'τ'), + (0x1D7BE, 'M', u'υ'), + (0x1D7BF, 'M', u'φ'), + (0x1D7C0, 'M', u'χ'), + (0x1D7C1, 'M', u'ψ'), + (0x1D7C2, 'M', u'ω'), + (0x1D7C3, 'M', u'∂'), + (0x1D7C4, 'M', u'ε'), + (0x1D7C5, 'M', u'θ'), + (0x1D7C6, 'M', u'κ'), + (0x1D7C7, 'M', u'φ'), + (0x1D7C8, 'M', u'ρ'), + (0x1D7C9, 'M', u'π'), + (0x1D7CA, 'M', u'ϝ'), + (0x1D7CC, 'X'), + (0x1D7CE, 'M', u'0'), + (0x1D7CF, 'M', u'1'), + (0x1D7D0, 'M', u'2'), + (0x1D7D1, 'M', u'3'), + (0x1D7D2, 'M', u'4'), + (0x1D7D3, 'M', u'5'), + (0x1D7D4, 'M', u'6'), + (0x1D7D5, 'M', u'7'), + (0x1D7D6, 'M', u'8'), + (0x1D7D7, 'M', u'9'), + (0x1D7D8, 'M', u'0'), + (0x1D7D9, 'M', u'1'), + (0x1D7DA, 'M', u'2'), + (0x1D7DB, 'M', u'3'), + (0x1D7DC, 'M', u'4'), + (0x1D7DD, 'M', u'5'), + (0x1D7DE, 'M', u'6'), + (0x1D7DF, 'M', u'7'), + (0x1D7E0, 'M', u'8'), + (0x1D7E1, 'M', u'9'), + (0x1D7E2, 'M', u'0'), + (0x1D7E3, 'M', u'1'), + (0x1D7E4, 'M', u'2'), + (0x1D7E5, 'M', u'3'), + (0x1D7E6, 'M', u'4'), + (0x1D7E7, 'M', u'5'), + (0x1D7E8, 'M', u'6'), + (0x1D7E9, 'M', u'7'), + (0x1D7EA, 'M', u'8'), + (0x1D7EB, 'M', u'9'), + (0x1D7EC, 'M', u'0'), + (0x1D7ED, 'M', u'1'), + (0x1D7EE, 'M', u'2'), + (0x1D7EF, 'M', u'3'), + (0x1D7F0, 'M', u'4'), + (0x1D7F1, 'M', u'5'), + (0x1D7F2, 'M', u'6'), + (0x1D7F3, 'M', u'7'), + (0x1D7F4, 'M', u'8'), + (0x1D7F5, 'M', u'9'), + (0x1D7F6, 'M', u'0'), + (0x1D7F7, 'M', u'1'), + (0x1D7F8, 'M', u'2'), + (0x1D7F9, 'M', u'3'), + (0x1D7FA, 'M', u'4'), + (0x1D7FB, 'M', u'5'), + ] + +def _seg_68(): + return [ + (0x1D7FC, 'M', u'6'), + (0x1D7FD, 'M', u'7'), + (0x1D7FE, 'M', u'8'), + (0x1D7FF, 'M', u'9'), + (0x1D800, 'V'), + (0x1DA8C, 'X'), + (0x1DA9B, 'V'), + (0x1DAA0, 'X'), + (0x1DAA1, 'V'), + (0x1DAB0, 'X'), + (0x1E000, 'V'), + (0x1E007, 'X'), + (0x1E008, 'V'), + (0x1E019, 'X'), + (0x1E01B, 'V'), + (0x1E022, 'X'), + (0x1E023, 'V'), + (0x1E025, 'X'), + (0x1E026, 'V'), + (0x1E02B, 'X'), + (0x1E800, 'V'), + (0x1E8C5, 'X'), + (0x1E8C7, 'V'), + (0x1E8D7, 'X'), + (0x1E900, 'M', u'𞤢'), + (0x1E901, 'M', u'𞤣'), + (0x1E902, 'M', u'𞤤'), + (0x1E903, 'M', u'𞤥'), + (0x1E904, 'M', u'𞤦'), + (0x1E905, 'M', u'𞤧'), + (0x1E906, 'M', u'𞤨'), + (0x1E907, 'M', u'𞤩'), + (0x1E908, 'M', u'𞤪'), + (0x1E909, 'M', u'𞤫'), + (0x1E90A, 'M', u'𞤬'), + (0x1E90B, 'M', u'𞤭'), + (0x1E90C, 'M', u'𞤮'), + (0x1E90D, 'M', u'𞤯'), + (0x1E90E, 'M', u'𞤰'), + (0x1E90F, 'M', u'𞤱'), + (0x1E910, 'M', u'𞤲'), + (0x1E911, 'M', u'𞤳'), + (0x1E912, 'M', u'𞤴'), + (0x1E913, 'M', u'𞤵'), + (0x1E914, 'M', u'𞤶'), + (0x1E915, 'M', u'𞤷'), + (0x1E916, 'M', u'𞤸'), + (0x1E917, 'M', u'𞤹'), + (0x1E918, 'M', u'𞤺'), + (0x1E919, 'M', u'𞤻'), + (0x1E91A, 'M', u'𞤼'), + (0x1E91B, 'M', u'𞤽'), + (0x1E91C, 'M', u'𞤾'), + (0x1E91D, 'M', u'𞤿'), + (0x1E91E, 'M', u'𞥀'), + (0x1E91F, 'M', u'𞥁'), + (0x1E920, 'M', u'𞥂'), + (0x1E921, 'M', u'𞥃'), + (0x1E922, 'V'), + (0x1E94B, 'X'), + (0x1E950, 'V'), + (0x1E95A, 'X'), + (0x1E95E, 'V'), + (0x1E960, 'X'), + (0x1EE00, 'M', u'ا'), + (0x1EE01, 'M', u'ب'), + (0x1EE02, 'M', u'ج'), + (0x1EE03, 'M', u'د'), + (0x1EE04, 'X'), + (0x1EE05, 'M', u'و'), + (0x1EE06, 'M', u'ز'), + (0x1EE07, 'M', u'ح'), + (0x1EE08, 'M', u'ط'), + (0x1EE09, 'M', u'ي'), + (0x1EE0A, 'M', u'ك'), + (0x1EE0B, 'M', u'ل'), + (0x1EE0C, 'M', u'م'), + (0x1EE0D, 'M', u'ن'), + (0x1EE0E, 'M', u'س'), + (0x1EE0F, 'M', u'ع'), + (0x1EE10, 'M', u'ف'), + (0x1EE11, 'M', u'ص'), + (0x1EE12, 'M', u'ق'), + (0x1EE13, 'M', u'ر'), + (0x1EE14, 'M', u'ش'), + (0x1EE15, 'M', u'ت'), + (0x1EE16, 'M', u'ث'), + (0x1EE17, 'M', u'خ'), + (0x1EE18, 'M', u'ذ'), + (0x1EE19, 'M', u'ض'), + (0x1EE1A, 'M', u'ظ'), + (0x1EE1B, 'M', u'غ'), + (0x1EE1C, 'M', u'ٮ'), + (0x1EE1D, 'M', u'ں'), + (0x1EE1E, 'M', u'ڡ'), + (0x1EE1F, 'M', u'ٯ'), + (0x1EE20, 'X'), + (0x1EE21, 'M', u'ب'), + (0x1EE22, 'M', u'ج'), + (0x1EE23, 'X'), + ] + +def _seg_69(): + return [ + (0x1EE24, 'M', u'ه'), + (0x1EE25, 'X'), + (0x1EE27, 'M', u'ح'), + (0x1EE28, 'X'), + (0x1EE29, 'M', u'ي'), + (0x1EE2A, 'M', u'ك'), + (0x1EE2B, 'M', u'ل'), + (0x1EE2C, 'M', u'م'), + (0x1EE2D, 'M', u'ن'), + (0x1EE2E, 'M', u'س'), + (0x1EE2F, 'M', u'ع'), + (0x1EE30, 'M', u'ف'), + (0x1EE31, 'M', u'ص'), + (0x1EE32, 'M', u'ق'), + (0x1EE33, 'X'), + (0x1EE34, 'M', u'ش'), + (0x1EE35, 'M', u'ت'), + (0x1EE36, 'M', u'ث'), + (0x1EE37, 'M', u'خ'), + (0x1EE38, 'X'), + (0x1EE39, 'M', u'ض'), + (0x1EE3A, 'X'), + (0x1EE3B, 'M', u'غ'), + (0x1EE3C, 'X'), + (0x1EE42, 'M', u'ج'), + (0x1EE43, 'X'), + (0x1EE47, 'M', u'ح'), + (0x1EE48, 'X'), + (0x1EE49, 'M', u'ي'), + (0x1EE4A, 'X'), + (0x1EE4B, 'M', u'ل'), + (0x1EE4C, 'X'), + (0x1EE4D, 'M', u'ن'), + (0x1EE4E, 'M', u'س'), + (0x1EE4F, 'M', u'ع'), + (0x1EE50, 'X'), + (0x1EE51, 'M', u'ص'), + (0x1EE52, 'M', u'ق'), + (0x1EE53, 'X'), + (0x1EE54, 'M', u'ش'), + (0x1EE55, 'X'), + (0x1EE57, 'M', u'خ'), + (0x1EE58, 'X'), + (0x1EE59, 'M', u'ض'), + (0x1EE5A, 'X'), + (0x1EE5B, 'M', u'غ'), + (0x1EE5C, 'X'), + (0x1EE5D, 'M', u'ں'), + (0x1EE5E, 'X'), + (0x1EE5F, 'M', u'ٯ'), + (0x1EE60, 'X'), + (0x1EE61, 'M', u'ب'), + (0x1EE62, 'M', u'ج'), + (0x1EE63, 'X'), + (0x1EE64, 'M', u'ه'), + (0x1EE65, 'X'), + (0x1EE67, 'M', u'ح'), + (0x1EE68, 'M', u'ط'), + (0x1EE69, 'M', u'ي'), + (0x1EE6A, 'M', u'ك'), + (0x1EE6B, 'X'), + (0x1EE6C, 'M', u'م'), + (0x1EE6D, 'M', u'ن'), + (0x1EE6E, 'M', u'س'), + (0x1EE6F, 'M', u'ع'), + (0x1EE70, 'M', u'ف'), + (0x1EE71, 'M', u'ص'), + (0x1EE72, 'M', u'ق'), + (0x1EE73, 'X'), + (0x1EE74, 'M', u'ش'), + (0x1EE75, 'M', u'ت'), + (0x1EE76, 'M', u'ث'), + (0x1EE77, 'M', u'خ'), + (0x1EE78, 'X'), + (0x1EE79, 'M', u'ض'), + (0x1EE7A, 'M', u'ظ'), + (0x1EE7B, 'M', u'غ'), + (0x1EE7C, 'M', u'ٮ'), + (0x1EE7D, 'X'), + (0x1EE7E, 'M', u'ڡ'), + (0x1EE7F, 'X'), + (0x1EE80, 'M', u'ا'), + (0x1EE81, 'M', u'ب'), + (0x1EE82, 'M', u'ج'), + (0x1EE83, 'M', u'د'), + (0x1EE84, 'M', u'ه'), + (0x1EE85, 'M', u'و'), + (0x1EE86, 'M', u'ز'), + (0x1EE87, 'M', u'ح'), + (0x1EE88, 'M', u'ط'), + (0x1EE89, 'M', u'ي'), + (0x1EE8A, 'X'), + (0x1EE8B, 'M', u'ل'), + (0x1EE8C, 'M', u'م'), + (0x1EE8D, 'M', u'ن'), + (0x1EE8E, 'M', u'س'), + (0x1EE8F, 'M', u'ع'), + (0x1EE90, 'M', u'ف'), + (0x1EE91, 'M', u'ص'), + (0x1EE92, 'M', u'ق'), + ] + +def _seg_70(): + return [ + (0x1EE93, 'M', u'ر'), + (0x1EE94, 'M', u'ش'), + (0x1EE95, 'M', u'ت'), + (0x1EE96, 'M', u'ث'), + (0x1EE97, 'M', u'خ'), + (0x1EE98, 'M', u'ذ'), + (0x1EE99, 'M', u'ض'), + (0x1EE9A, 'M', u'ظ'), + (0x1EE9B, 'M', u'غ'), + (0x1EE9C, 'X'), + (0x1EEA1, 'M', u'ب'), + (0x1EEA2, 'M', u'ج'), + (0x1EEA3, 'M', u'د'), + (0x1EEA4, 'X'), + (0x1EEA5, 'M', u'و'), + (0x1EEA6, 'M', u'ز'), + (0x1EEA7, 'M', u'ح'), + (0x1EEA8, 'M', u'ط'), + (0x1EEA9, 'M', u'ي'), + (0x1EEAA, 'X'), + (0x1EEAB, 'M', u'ل'), + (0x1EEAC, 'M', u'م'), + (0x1EEAD, 'M', u'ن'), + (0x1EEAE, 'M', u'س'), + (0x1EEAF, 'M', u'ع'), + (0x1EEB0, 'M', u'ف'), + (0x1EEB1, 'M', u'ص'), + (0x1EEB2, 'M', u'ق'), + (0x1EEB3, 'M', u'ر'), + (0x1EEB4, 'M', u'ش'), + (0x1EEB5, 'M', u'ت'), + (0x1EEB6, 'M', u'ث'), + (0x1EEB7, 'M', u'خ'), + (0x1EEB8, 'M', u'ذ'), + (0x1EEB9, 'M', u'ض'), + (0x1EEBA, 'M', u'ظ'), + (0x1EEBB, 'M', u'غ'), + (0x1EEBC, 'X'), + (0x1EEF0, 'V'), + (0x1EEF2, 'X'), + (0x1F000, 'V'), + (0x1F02C, 'X'), + (0x1F030, 'V'), + (0x1F094, 'X'), + (0x1F0A0, 'V'), + (0x1F0AF, 'X'), + (0x1F0B1, 'V'), + (0x1F0C0, 'X'), + (0x1F0C1, 'V'), + (0x1F0D0, 'X'), + (0x1F0D1, 'V'), + (0x1F0F6, 'X'), + (0x1F101, '3', u'0,'), + (0x1F102, '3', u'1,'), + (0x1F103, '3', u'2,'), + (0x1F104, '3', u'3,'), + (0x1F105, '3', u'4,'), + (0x1F106, '3', u'5,'), + (0x1F107, '3', u'6,'), + (0x1F108, '3', u'7,'), + (0x1F109, '3', u'8,'), + (0x1F10A, '3', u'9,'), + (0x1F10B, 'V'), + (0x1F10D, 'X'), + (0x1F110, '3', u'(a)'), + (0x1F111, '3', u'(b)'), + (0x1F112, '3', u'(c)'), + (0x1F113, '3', u'(d)'), + (0x1F114, '3', u'(e)'), + (0x1F115, '3', u'(f)'), + (0x1F116, '3', u'(g)'), + (0x1F117, '3', u'(h)'), + (0x1F118, '3', u'(i)'), + (0x1F119, '3', u'(j)'), + (0x1F11A, '3', u'(k)'), + (0x1F11B, '3', u'(l)'), + (0x1F11C, '3', u'(m)'), + (0x1F11D, '3', u'(n)'), + (0x1F11E, '3', u'(o)'), + (0x1F11F, '3', u'(p)'), + (0x1F120, '3', u'(q)'), + (0x1F121, '3', u'(r)'), + (0x1F122, '3', u'(s)'), + (0x1F123, '3', u'(t)'), + (0x1F124, '3', u'(u)'), + (0x1F125, '3', u'(v)'), + (0x1F126, '3', u'(w)'), + (0x1F127, '3', u'(x)'), + (0x1F128, '3', u'(y)'), + (0x1F129, '3', u'(z)'), + (0x1F12A, 'M', u'〔s〕'), + (0x1F12B, 'M', u'c'), + (0x1F12C, 'M', u'r'), + (0x1F12D, 'M', u'cd'), + (0x1F12E, 'M', u'wz'), + (0x1F12F, 'X'), + (0x1F130, 'M', u'a'), + (0x1F131, 'M', u'b'), + (0x1F132, 'M', u'c'), + (0x1F133, 'M', u'd'), + ] + +def _seg_71(): + return [ + (0x1F134, 'M', u'e'), + (0x1F135, 'M', u'f'), + (0x1F136, 'M', u'g'), + (0x1F137, 'M', u'h'), + (0x1F138, 'M', u'i'), + (0x1F139, 'M', u'j'), + (0x1F13A, 'M', u'k'), + (0x1F13B, 'M', u'l'), + (0x1F13C, 'M', u'm'), + (0x1F13D, 'M', u'n'), + (0x1F13E, 'M', u'o'), + (0x1F13F, 'M', u'p'), + (0x1F140, 'M', u'q'), + (0x1F141, 'M', u'r'), + (0x1F142, 'M', u's'), + (0x1F143, 'M', u't'), + (0x1F144, 'M', u'u'), + (0x1F145, 'M', u'v'), + (0x1F146, 'M', u'w'), + (0x1F147, 'M', u'x'), + (0x1F148, 'M', u'y'), + (0x1F149, 'M', u'z'), + (0x1F14A, 'M', u'hv'), + (0x1F14B, 'M', u'mv'), + (0x1F14C, 'M', u'sd'), + (0x1F14D, 'M', u'ss'), + (0x1F14E, 'M', u'ppv'), + (0x1F14F, 'M', u'wc'), + (0x1F150, 'V'), + (0x1F16A, 'M', u'mc'), + (0x1F16B, 'M', u'md'), + (0x1F16C, 'X'), + (0x1F170, 'V'), + (0x1F190, 'M', u'dj'), + (0x1F191, 'V'), + (0x1F1AD, 'X'), + (0x1F1E6, 'V'), + (0x1F200, 'M', u'ほか'), + (0x1F201, 'M', u'ココ'), + (0x1F202, 'M', u'サ'), + (0x1F203, 'X'), + (0x1F210, 'M', u'手'), + (0x1F211, 'M', u'字'), + (0x1F212, 'M', u'双'), + (0x1F213, 'M', u'デ'), + (0x1F214, 'M', u'二'), + (0x1F215, 'M', u'多'), + (0x1F216, 'M', u'解'), + (0x1F217, 'M', u'天'), + (0x1F218, 'M', u'交'), + (0x1F219, 'M', u'映'), + (0x1F21A, 'M', u'無'), + (0x1F21B, 'M', u'料'), + (0x1F21C, 'M', u'前'), + (0x1F21D, 'M', u'後'), + (0x1F21E, 'M', u'再'), + (0x1F21F, 'M', u'新'), + (0x1F220, 'M', u'初'), + (0x1F221, 'M', u'終'), + (0x1F222, 'M', u'生'), + (0x1F223, 'M', u'販'), + (0x1F224, 'M', u'声'), + (0x1F225, 'M', u'吹'), + (0x1F226, 'M', u'演'), + (0x1F227, 'M', u'投'), + (0x1F228, 'M', u'捕'), + (0x1F229, 'M', u'一'), + (0x1F22A, 'M', u'三'), + (0x1F22B, 'M', u'遊'), + (0x1F22C, 'M', u'左'), + (0x1F22D, 'M', u'中'), + (0x1F22E, 'M', u'右'), + (0x1F22F, 'M', u'指'), + (0x1F230, 'M', u'走'), + (0x1F231, 'M', u'打'), + (0x1F232, 'M', u'禁'), + (0x1F233, 'M', u'空'), + (0x1F234, 'M', u'合'), + (0x1F235, 'M', u'満'), + (0x1F236, 'M', u'有'), + (0x1F237, 'M', u'月'), + (0x1F238, 'M', u'申'), + (0x1F239, 'M', u'割'), + (0x1F23A, 'M', u'営'), + (0x1F23B, 'M', u'配'), + (0x1F23C, 'X'), + (0x1F240, 'M', u'〔本〕'), + (0x1F241, 'M', u'〔三〕'), + (0x1F242, 'M', u'〔二〕'), + (0x1F243, 'M', u'〔安〕'), + (0x1F244, 'M', u'〔点〕'), + (0x1F245, 'M', u'〔打〕'), + (0x1F246, 'M', u'〔盗〕'), + (0x1F247, 'M', u'〔勝〕'), + (0x1F248, 'M', u'〔敗〕'), + (0x1F249, 'X'), + (0x1F250, 'M', u'得'), + (0x1F251, 'M', u'可'), + (0x1F252, 'X'), + (0x1F260, 'V'), + ] + +def _seg_72(): + return [ + (0x1F266, 'X'), + (0x1F300, 'V'), + (0x1F6D5, 'X'), + (0x1F6E0, 'V'), + (0x1F6ED, 'X'), + (0x1F6F0, 'V'), + (0x1F6F9, 'X'), + (0x1F700, 'V'), + (0x1F774, 'X'), + (0x1F780, 'V'), + (0x1F7D5, 'X'), + (0x1F800, 'V'), + (0x1F80C, 'X'), + (0x1F810, 'V'), + (0x1F848, 'X'), + (0x1F850, 'V'), + (0x1F85A, 'X'), + (0x1F860, 'V'), + (0x1F888, 'X'), + (0x1F890, 'V'), + (0x1F8AE, 'X'), + (0x1F900, 'V'), + (0x1F90C, 'X'), + (0x1F910, 'V'), + (0x1F93F, 'X'), + (0x1F940, 'V'), + (0x1F94D, 'X'), + (0x1F950, 'V'), + (0x1F96C, 'X'), + (0x1F980, 'V'), + (0x1F998, 'X'), + (0x1F9C0, 'V'), + (0x1F9C1, 'X'), + (0x1F9D0, 'V'), + (0x1F9E7, 'X'), + (0x20000, 'V'), + (0x2A6D7, 'X'), + (0x2A700, 'V'), + (0x2B735, 'X'), + (0x2B740, 'V'), + (0x2B81E, 'X'), + (0x2B820, 'V'), + (0x2CEA2, 'X'), + (0x2CEB0, 'V'), + (0x2EBE1, 'X'), + (0x2F800, 'M', u'丽'), + (0x2F801, 'M', u'丸'), + (0x2F802, 'M', u'乁'), + (0x2F803, 'M', u'𠄢'), + (0x2F804, 'M', u'你'), + (0x2F805, 'M', u'侮'), + (0x2F806, 'M', u'侻'), + (0x2F807, 'M', u'倂'), + (0x2F808, 'M', u'偺'), + (0x2F809, 'M', u'備'), + (0x2F80A, 'M', u'僧'), + (0x2F80B, 'M', u'像'), + (0x2F80C, 'M', u'㒞'), + (0x2F80D, 'M', u'𠘺'), + (0x2F80E, 'M', u'免'), + (0x2F80F, 'M', u'兔'), + (0x2F810, 'M', u'兤'), + (0x2F811, 'M', u'具'), + (0x2F812, 'M', u'𠔜'), + (0x2F813, 'M', u'㒹'), + (0x2F814, 'M', u'內'), + (0x2F815, 'M', u'再'), + (0x2F816, 'M', u'𠕋'), + (0x2F817, 'M', u'冗'), + (0x2F818, 'M', u'冤'), + (0x2F819, 'M', u'仌'), + (0x2F81A, 'M', u'冬'), + (0x2F81B, 'M', u'况'), + (0x2F81C, 'M', u'𩇟'), + (0x2F81D, 'M', u'凵'), + (0x2F81E, 'M', u'刃'), + (0x2F81F, 'M', u'㓟'), + (0x2F820, 'M', u'刻'), + (0x2F821, 'M', u'剆'), + (0x2F822, 'M', u'割'), + (0x2F823, 'M', u'剷'), + (0x2F824, 'M', u'㔕'), + (0x2F825, 'M', u'勇'), + (0x2F826, 'M', u'勉'), + (0x2F827, 'M', u'勤'), + (0x2F828, 'M', u'勺'), + (0x2F829, 'M', u'包'), + (0x2F82A, 'M', u'匆'), + (0x2F82B, 'M', u'北'), + (0x2F82C, 'M', u'卉'), + (0x2F82D, 'M', u'卑'), + (0x2F82E, 'M', u'博'), + (0x2F82F, 'M', u'即'), + (0x2F830, 'M', u'卽'), + (0x2F831, 'M', u'卿'), + (0x2F834, 'M', u'𠨬'), + (0x2F835, 'M', u'灰'), + (0x2F836, 'M', u'及'), + (0x2F837, 'M', u'叟'), + (0x2F838, 'M', u'𠭣'), + ] + +def _seg_73(): + return [ + (0x2F839, 'M', u'叫'), + (0x2F83A, 'M', u'叱'), + (0x2F83B, 'M', u'吆'), + (0x2F83C, 'M', u'咞'), + (0x2F83D, 'M', u'吸'), + (0x2F83E, 'M', u'呈'), + (0x2F83F, 'M', u'周'), + (0x2F840, 'M', u'咢'), + (0x2F841, 'M', u'哶'), + (0x2F842, 'M', u'唐'), + (0x2F843, 'M', u'啓'), + (0x2F844, 'M', u'啣'), + (0x2F845, 'M', u'善'), + (0x2F847, 'M', u'喙'), + (0x2F848, 'M', u'喫'), + (0x2F849, 'M', u'喳'), + (0x2F84A, 'M', u'嗂'), + (0x2F84B, 'M', u'圖'), + (0x2F84C, 'M', u'嘆'), + (0x2F84D, 'M', u'圗'), + (0x2F84E, 'M', u'噑'), + (0x2F84F, 'M', u'噴'), + (0x2F850, 'M', u'切'), + (0x2F851, 'M', u'壮'), + (0x2F852, 'M', u'城'), + (0x2F853, 'M', u'埴'), + (0x2F854, 'M', u'堍'), + (0x2F855, 'M', u'型'), + (0x2F856, 'M', u'堲'), + (0x2F857, 'M', u'報'), + (0x2F858, 'M', u'墬'), + (0x2F859, 'M', u'𡓤'), + (0x2F85A, 'M', u'売'), + (0x2F85B, 'M', u'壷'), + (0x2F85C, 'M', u'夆'), + (0x2F85D, 'M', u'多'), + (0x2F85E, 'M', u'夢'), + (0x2F85F, 'M', u'奢'), + (0x2F860, 'M', u'𡚨'), + (0x2F861, 'M', u'𡛪'), + (0x2F862, 'M', u'姬'), + (0x2F863, 'M', u'娛'), + (0x2F864, 'M', u'娧'), + (0x2F865, 'M', u'姘'), + (0x2F866, 'M', u'婦'), + (0x2F867, 'M', u'㛮'), + (0x2F868, 'X'), + (0x2F869, 'M', u'嬈'), + (0x2F86A, 'M', u'嬾'), + (0x2F86C, 'M', u'𡧈'), + (0x2F86D, 'M', u'寃'), + (0x2F86E, 'M', u'寘'), + (0x2F86F, 'M', u'寧'), + (0x2F870, 'M', u'寳'), + (0x2F871, 'M', u'𡬘'), + (0x2F872, 'M', u'寿'), + (0x2F873, 'M', u'将'), + (0x2F874, 'X'), + (0x2F875, 'M', u'尢'), + (0x2F876, 'M', u'㞁'), + (0x2F877, 'M', u'屠'), + (0x2F878, 'M', u'屮'), + (0x2F879, 'M', u'峀'), + (0x2F87A, 'M', u'岍'), + (0x2F87B, 'M', u'𡷤'), + (0x2F87C, 'M', u'嵃'), + (0x2F87D, 'M', u'𡷦'), + (0x2F87E, 'M', u'嵮'), + (0x2F87F, 'M', u'嵫'), + (0x2F880, 'M', u'嵼'), + (0x2F881, 'M', u'巡'), + (0x2F882, 'M', u'巢'), + (0x2F883, 'M', u'㠯'), + (0x2F884, 'M', u'巽'), + (0x2F885, 'M', u'帨'), + (0x2F886, 'M', u'帽'), + (0x2F887, 'M', u'幩'), + (0x2F888, 'M', u'㡢'), + (0x2F889, 'M', u'𢆃'), + (0x2F88A, 'M', u'㡼'), + (0x2F88B, 'M', u'庰'), + (0x2F88C, 'M', u'庳'), + (0x2F88D, 'M', u'庶'), + (0x2F88E, 'M', u'廊'), + (0x2F88F, 'M', u'𪎒'), + (0x2F890, 'M', u'廾'), + (0x2F891, 'M', u'𢌱'), + (0x2F893, 'M', u'舁'), + (0x2F894, 'M', u'弢'), + (0x2F896, 'M', u'㣇'), + (0x2F897, 'M', u'𣊸'), + (0x2F898, 'M', u'𦇚'), + (0x2F899, 'M', u'形'), + (0x2F89A, 'M', u'彫'), + (0x2F89B, 'M', u'㣣'), + (0x2F89C, 'M', u'徚'), + (0x2F89D, 'M', u'忍'), + (0x2F89E, 'M', u'志'), + (0x2F89F, 'M', u'忹'), + (0x2F8A0, 'M', u'悁'), + ] + +def _seg_74(): + return [ + (0x2F8A1, 'M', u'㤺'), + (0x2F8A2, 'M', u'㤜'), + (0x2F8A3, 'M', u'悔'), + (0x2F8A4, 'M', u'𢛔'), + (0x2F8A5, 'M', u'惇'), + (0x2F8A6, 'M', u'慈'), + (0x2F8A7, 'M', u'慌'), + (0x2F8A8, 'M', u'慎'), + (0x2F8A9, 'M', u'慌'), + (0x2F8AA, 'M', u'慺'), + (0x2F8AB, 'M', u'憎'), + (0x2F8AC, 'M', u'憲'), + (0x2F8AD, 'M', u'憤'), + (0x2F8AE, 'M', u'憯'), + (0x2F8AF, 'M', u'懞'), + (0x2F8B0, 'M', u'懲'), + (0x2F8B1, 'M', u'懶'), + (0x2F8B2, 'M', u'成'), + (0x2F8B3, 'M', u'戛'), + (0x2F8B4, 'M', u'扝'), + (0x2F8B5, 'M', u'抱'), + (0x2F8B6, 'M', u'拔'), + (0x2F8B7, 'M', u'捐'), + (0x2F8B8, 'M', u'𢬌'), + (0x2F8B9, 'M', u'挽'), + (0x2F8BA, 'M', u'拼'), + (0x2F8BB, 'M', u'捨'), + (0x2F8BC, 'M', u'掃'), + (0x2F8BD, 'M', u'揤'), + (0x2F8BE, 'M', u'𢯱'), + (0x2F8BF, 'M', u'搢'), + (0x2F8C0, 'M', u'揅'), + (0x2F8C1, 'M', u'掩'), + (0x2F8C2, 'M', u'㨮'), + (0x2F8C3, 'M', u'摩'), + (0x2F8C4, 'M', u'摾'), + (0x2F8C5, 'M', u'撝'), + (0x2F8C6, 'M', u'摷'), + (0x2F8C7, 'M', u'㩬'), + (0x2F8C8, 'M', u'敏'), + (0x2F8C9, 'M', u'敬'), + (0x2F8CA, 'M', u'𣀊'), + (0x2F8CB, 'M', u'旣'), + (0x2F8CC, 'M', u'書'), + (0x2F8CD, 'M', u'晉'), + (0x2F8CE, 'M', u'㬙'), + (0x2F8CF, 'M', u'暑'), + (0x2F8D0, 'M', u'㬈'), + (0x2F8D1, 'M', u'㫤'), + (0x2F8D2, 'M', u'冒'), + (0x2F8D3, 'M', u'冕'), + (0x2F8D4, 'M', u'最'), + (0x2F8D5, 'M', u'暜'), + (0x2F8D6, 'M', u'肭'), + (0x2F8D7, 'M', u'䏙'), + (0x2F8D8, 'M', u'朗'), + (0x2F8D9, 'M', u'望'), + (0x2F8DA, 'M', u'朡'), + (0x2F8DB, 'M', u'杞'), + (0x2F8DC, 'M', u'杓'), + (0x2F8DD, 'M', u'𣏃'), + (0x2F8DE, 'M', u'㭉'), + (0x2F8DF, 'M', u'柺'), + (0x2F8E0, 'M', u'枅'), + (0x2F8E1, 'M', u'桒'), + (0x2F8E2, 'M', u'梅'), + (0x2F8E3, 'M', u'𣑭'), + (0x2F8E4, 'M', u'梎'), + (0x2F8E5, 'M', u'栟'), + (0x2F8E6, 'M', u'椔'), + (0x2F8E7, 'M', u'㮝'), + (0x2F8E8, 'M', u'楂'), + (0x2F8E9, 'M', u'榣'), + (0x2F8EA, 'M', u'槪'), + (0x2F8EB, 'M', u'檨'), + (0x2F8EC, 'M', u'𣚣'), + (0x2F8ED, 'M', u'櫛'), + (0x2F8EE, 'M', u'㰘'), + (0x2F8EF, 'M', u'次'), + (0x2F8F0, 'M', u'𣢧'), + (0x2F8F1, 'M', u'歔'), + (0x2F8F2, 'M', u'㱎'), + (0x2F8F3, 'M', u'歲'), + (0x2F8F4, 'M', u'殟'), + (0x2F8F5, 'M', u'殺'), + (0x2F8F6, 'M', u'殻'), + (0x2F8F7, 'M', u'𣪍'), + (0x2F8F8, 'M', u'𡴋'), + (0x2F8F9, 'M', u'𣫺'), + (0x2F8FA, 'M', u'汎'), + (0x2F8FB, 'M', u'𣲼'), + (0x2F8FC, 'M', u'沿'), + (0x2F8FD, 'M', u'泍'), + (0x2F8FE, 'M', u'汧'), + (0x2F8FF, 'M', u'洖'), + (0x2F900, 'M', u'派'), + (0x2F901, 'M', u'海'), + (0x2F902, 'M', u'流'), + (0x2F903, 'M', u'浩'), + (0x2F904, 'M', u'浸'), + ] + +def _seg_75(): + return [ + (0x2F905, 'M', u'涅'), + (0x2F906, 'M', u'𣴞'), + (0x2F907, 'M', u'洴'), + (0x2F908, 'M', u'港'), + (0x2F909, 'M', u'湮'), + (0x2F90A, 'M', u'㴳'), + (0x2F90B, 'M', u'滋'), + (0x2F90C, 'M', u'滇'), + (0x2F90D, 'M', u'𣻑'), + (0x2F90E, 'M', u'淹'), + (0x2F90F, 'M', u'潮'), + (0x2F910, 'M', u'𣽞'), + (0x2F911, 'M', u'𣾎'), + (0x2F912, 'M', u'濆'), + (0x2F913, 'M', u'瀹'), + (0x2F914, 'M', u'瀞'), + (0x2F915, 'M', u'瀛'), + (0x2F916, 'M', u'㶖'), + (0x2F917, 'M', u'灊'), + (0x2F918, 'M', u'災'), + (0x2F919, 'M', u'灷'), + (0x2F91A, 'M', u'炭'), + (0x2F91B, 'M', u'𠔥'), + (0x2F91C, 'M', u'煅'), + (0x2F91D, 'M', u'𤉣'), + (0x2F91E, 'M', u'熜'), + (0x2F91F, 'X'), + (0x2F920, 'M', u'爨'), + (0x2F921, 'M', u'爵'), + (0x2F922, 'M', u'牐'), + (0x2F923, 'M', u'𤘈'), + (0x2F924, 'M', u'犀'), + (0x2F925, 'M', u'犕'), + (0x2F926, 'M', u'𤜵'), + (0x2F927, 'M', u'𤠔'), + (0x2F928, 'M', u'獺'), + (0x2F929, 'M', u'王'), + (0x2F92A, 'M', u'㺬'), + (0x2F92B, 'M', u'玥'), + (0x2F92C, 'M', u'㺸'), + (0x2F92E, 'M', u'瑇'), + (0x2F92F, 'M', u'瑜'), + (0x2F930, 'M', u'瑱'), + (0x2F931, 'M', u'璅'), + (0x2F932, 'M', u'瓊'), + (0x2F933, 'M', u'㼛'), + (0x2F934, 'M', u'甤'), + (0x2F935, 'M', u'𤰶'), + (0x2F936, 'M', u'甾'), + (0x2F937, 'M', u'𤲒'), + (0x2F938, 'M', u'異'), + (0x2F939, 'M', u'𢆟'), + (0x2F93A, 'M', u'瘐'), + (0x2F93B, 'M', u'𤾡'), + (0x2F93C, 'M', u'𤾸'), + (0x2F93D, 'M', u'𥁄'), + (0x2F93E, 'M', u'㿼'), + (0x2F93F, 'M', u'䀈'), + (0x2F940, 'M', u'直'), + (0x2F941, 'M', u'𥃳'), + (0x2F942, 'M', u'𥃲'), + (0x2F943, 'M', u'𥄙'), + (0x2F944, 'M', u'𥄳'), + (0x2F945, 'M', u'眞'), + (0x2F946, 'M', u'真'), + (0x2F948, 'M', u'睊'), + (0x2F949, 'M', u'䀹'), + (0x2F94A, 'M', u'瞋'), + (0x2F94B, 'M', u'䁆'), + (0x2F94C, 'M', u'䂖'), + (0x2F94D, 'M', u'𥐝'), + (0x2F94E, 'M', u'硎'), + (0x2F94F, 'M', u'碌'), + (0x2F950, 'M', u'磌'), + (0x2F951, 'M', u'䃣'), + (0x2F952, 'M', u'𥘦'), + (0x2F953, 'M', u'祖'), + (0x2F954, 'M', u'𥚚'), + (0x2F955, 'M', u'𥛅'), + (0x2F956, 'M', u'福'), + (0x2F957, 'M', u'秫'), + (0x2F958, 'M', u'䄯'), + (0x2F959, 'M', u'穀'), + (0x2F95A, 'M', u'穊'), + (0x2F95B, 'M', u'穏'), + (0x2F95C, 'M', u'𥥼'), + (0x2F95D, 'M', u'𥪧'), + (0x2F95F, 'X'), + (0x2F960, 'M', u'䈂'), + (0x2F961, 'M', u'𥮫'), + (0x2F962, 'M', u'篆'), + (0x2F963, 'M', u'築'), + (0x2F964, 'M', u'䈧'), + (0x2F965, 'M', u'𥲀'), + (0x2F966, 'M', u'糒'), + (0x2F967, 'M', u'䊠'), + (0x2F968, 'M', u'糨'), + (0x2F969, 'M', u'糣'), + (0x2F96A, 'M', u'紀'), + (0x2F96B, 'M', u'𥾆'), + ] + +def _seg_76(): + return [ + (0x2F96C, 'M', u'絣'), + (0x2F96D, 'M', u'䌁'), + (0x2F96E, 'M', u'緇'), + (0x2F96F, 'M', u'縂'), + (0x2F970, 'M', u'繅'), + (0x2F971, 'M', u'䌴'), + (0x2F972, 'M', u'𦈨'), + (0x2F973, 'M', u'𦉇'), + (0x2F974, 'M', u'䍙'), + (0x2F975, 'M', u'𦋙'), + (0x2F976, 'M', u'罺'), + (0x2F977, 'M', u'𦌾'), + (0x2F978, 'M', u'羕'), + (0x2F979, 'M', u'翺'), + (0x2F97A, 'M', u'者'), + (0x2F97B, 'M', u'𦓚'), + (0x2F97C, 'M', u'𦔣'), + (0x2F97D, 'M', u'聠'), + (0x2F97E, 'M', u'𦖨'), + (0x2F97F, 'M', u'聰'), + (0x2F980, 'M', u'𣍟'), + (0x2F981, 'M', u'䏕'), + (0x2F982, 'M', u'育'), + (0x2F983, 'M', u'脃'), + (0x2F984, 'M', u'䐋'), + (0x2F985, 'M', u'脾'), + (0x2F986, 'M', u'媵'), + (0x2F987, 'M', u'𦞧'), + (0x2F988, 'M', u'𦞵'), + (0x2F989, 'M', u'𣎓'), + (0x2F98A, 'M', u'𣎜'), + (0x2F98B, 'M', u'舁'), + (0x2F98C, 'M', u'舄'), + (0x2F98D, 'M', u'辞'), + (0x2F98E, 'M', u'䑫'), + (0x2F98F, 'M', u'芑'), + (0x2F990, 'M', u'芋'), + (0x2F991, 'M', u'芝'), + (0x2F992, 'M', u'劳'), + (0x2F993, 'M', u'花'), + (0x2F994, 'M', u'芳'), + (0x2F995, 'M', u'芽'), + (0x2F996, 'M', u'苦'), + (0x2F997, 'M', u'𦬼'), + (0x2F998, 'M', u'若'), + (0x2F999, 'M', u'茝'), + (0x2F99A, 'M', u'荣'), + (0x2F99B, 'M', u'莭'), + (0x2F99C, 'M', u'茣'), + (0x2F99D, 'M', u'莽'), + (0x2F99E, 'M', u'菧'), + (0x2F99F, 'M', u'著'), + (0x2F9A0, 'M', u'荓'), + (0x2F9A1, 'M', u'菊'), + (0x2F9A2, 'M', u'菌'), + (0x2F9A3, 'M', u'菜'), + (0x2F9A4, 'M', u'𦰶'), + (0x2F9A5, 'M', u'𦵫'), + (0x2F9A6, 'M', u'𦳕'), + (0x2F9A7, 'M', u'䔫'), + (0x2F9A8, 'M', u'蓱'), + (0x2F9A9, 'M', u'蓳'), + (0x2F9AA, 'M', u'蔖'), + (0x2F9AB, 'M', u'𧏊'), + (0x2F9AC, 'M', u'蕤'), + (0x2F9AD, 'M', u'𦼬'), + (0x2F9AE, 'M', u'䕝'), + (0x2F9AF, 'M', u'䕡'), + (0x2F9B0, 'M', u'𦾱'), + (0x2F9B1, 'M', u'𧃒'), + (0x2F9B2, 'M', u'䕫'), + (0x2F9B3, 'M', u'虐'), + (0x2F9B4, 'M', u'虜'), + (0x2F9B5, 'M', u'虧'), + (0x2F9B6, 'M', u'虩'), + (0x2F9B7, 'M', u'蚩'), + (0x2F9B8, 'M', u'蚈'), + (0x2F9B9, 'M', u'蜎'), + (0x2F9BA, 'M', u'蛢'), + (0x2F9BB, 'M', u'蝹'), + (0x2F9BC, 'M', u'蜨'), + (0x2F9BD, 'M', u'蝫'), + (0x2F9BE, 'M', u'螆'), + (0x2F9BF, 'X'), + (0x2F9C0, 'M', u'蟡'), + (0x2F9C1, 'M', u'蠁'), + (0x2F9C2, 'M', u'䗹'), + (0x2F9C3, 'M', u'衠'), + (0x2F9C4, 'M', u'衣'), + (0x2F9C5, 'M', u'𧙧'), + (0x2F9C6, 'M', u'裗'), + (0x2F9C7, 'M', u'裞'), + (0x2F9C8, 'M', u'䘵'), + (0x2F9C9, 'M', u'裺'), + (0x2F9CA, 'M', u'㒻'), + (0x2F9CB, 'M', u'𧢮'), + (0x2F9CC, 'M', u'𧥦'), + (0x2F9CD, 'M', u'䚾'), + (0x2F9CE, 'M', u'䛇'), + (0x2F9CF, 'M', u'誠'), + ] + +def _seg_77(): + return [ + (0x2F9D0, 'M', u'諭'), + (0x2F9D1, 'M', u'變'), + (0x2F9D2, 'M', u'豕'), + (0x2F9D3, 'M', u'𧲨'), + (0x2F9D4, 'M', u'貫'), + (0x2F9D5, 'M', u'賁'), + (0x2F9D6, 'M', u'贛'), + (0x2F9D7, 'M', u'起'), + (0x2F9D8, 'M', u'𧼯'), + (0x2F9D9, 'M', u'𠠄'), + (0x2F9DA, 'M', u'跋'), + (0x2F9DB, 'M', u'趼'), + (0x2F9DC, 'M', u'跰'), + (0x2F9DD, 'M', u'𠣞'), + (0x2F9DE, 'M', u'軔'), + (0x2F9DF, 'M', u'輸'), + (0x2F9E0, 'M', u'𨗒'), + (0x2F9E1, 'M', u'𨗭'), + (0x2F9E2, 'M', u'邔'), + (0x2F9E3, 'M', u'郱'), + (0x2F9E4, 'M', u'鄑'), + (0x2F9E5, 'M', u'𨜮'), + (0x2F9E6, 'M', u'鄛'), + (0x2F9E7, 'M', u'鈸'), + (0x2F9E8, 'M', u'鋗'), + (0x2F9E9, 'M', u'鋘'), + (0x2F9EA, 'M', u'鉼'), + (0x2F9EB, 'M', u'鏹'), + (0x2F9EC, 'M', u'鐕'), + (0x2F9ED, 'M', u'𨯺'), + (0x2F9EE, 'M', u'開'), + (0x2F9EF, 'M', u'䦕'), + (0x2F9F0, 'M', u'閷'), + (0x2F9F1, 'M', u'𨵷'), + (0x2F9F2, 'M', u'䧦'), + (0x2F9F3, 'M', u'雃'), + (0x2F9F4, 'M', u'嶲'), + (0x2F9F5, 'M', u'霣'), + (0x2F9F6, 'M', u'𩅅'), + (0x2F9F7, 'M', u'𩈚'), + (0x2F9F8, 'M', u'䩮'), + (0x2F9F9, 'M', u'䩶'), + (0x2F9FA, 'M', u'韠'), + (0x2F9FB, 'M', u'𩐊'), + (0x2F9FC, 'M', u'䪲'), + (0x2F9FD, 'M', u'𩒖'), + (0x2F9FE, 'M', u'頋'), + (0x2FA00, 'M', u'頩'), + (0x2FA01, 'M', u'𩖶'), + (0x2FA02, 'M', u'飢'), + (0x2FA03, 'M', u'䬳'), + (0x2FA04, 'M', u'餩'), + (0x2FA05, 'M', u'馧'), + (0x2FA06, 'M', u'駂'), + (0x2FA07, 'M', u'駾'), + (0x2FA08, 'M', u'䯎'), + (0x2FA09, 'M', u'𩬰'), + (0x2FA0A, 'M', u'鬒'), + (0x2FA0B, 'M', u'鱀'), + (0x2FA0C, 'M', u'鳽'), + (0x2FA0D, 'M', u'䳎'), + (0x2FA0E, 'M', u'䳭'), + (0x2FA0F, 'M', u'鵧'), + (0x2FA10, 'M', u'𪃎'), + (0x2FA11, 'M', u'䳸'), + (0x2FA12, 'M', u'𪄅'), + (0x2FA13, 'M', u'𪈎'), + (0x2FA14, 'M', u'𪊑'), + (0x2FA15, 'M', u'麻'), + (0x2FA16, 'M', u'䵖'), + (0x2FA17, 'M', u'黹'), + (0x2FA18, 'M', u'黾'), + (0x2FA19, 'M', u'鼅'), + (0x2FA1A, 'M', u'鼏'), + (0x2FA1B, 'M', u'鼖'), + (0x2FA1C, 'M', u'鼻'), + (0x2FA1D, 'M', u'𪘀'), + (0x2FA1E, 'X'), + (0xE0100, 'I'), + (0xE01F0, 'X'), + ] + +uts46data = tuple( + _seg_0() + + _seg_1() + + _seg_2() + + _seg_3() + + _seg_4() + + _seg_5() + + _seg_6() + + _seg_7() + + _seg_8() + + _seg_9() + + _seg_10() + + _seg_11() + + _seg_12() + + _seg_13() + + _seg_14() + + _seg_15() + + _seg_16() + + _seg_17() + + _seg_18() + + _seg_19() + + _seg_20() + + _seg_21() + + _seg_22() + + _seg_23() + + _seg_24() + + _seg_25() + + _seg_26() + + _seg_27() + + _seg_28() + + _seg_29() + + _seg_30() + + _seg_31() + + _seg_32() + + _seg_33() + + _seg_34() + + _seg_35() + + _seg_36() + + _seg_37() + + _seg_38() + + _seg_39() + + _seg_40() + + _seg_41() + + _seg_42() + + _seg_43() + + _seg_44() + + _seg_45() + + _seg_46() + + _seg_47() + + _seg_48() + + _seg_49() + + _seg_50() + + _seg_51() + + _seg_52() + + _seg_53() + + _seg_54() + + _seg_55() + + _seg_56() + + _seg_57() + + _seg_58() + + _seg_59() + + _seg_60() + + _seg_61() + + _seg_62() + + _seg_63() + + _seg_64() + + _seg_65() + + _seg_66() + + _seg_67() + + _seg_68() + + _seg_69() + + _seg_70() + + _seg_71() + + _seg_72() + + _seg_73() + + _seg_74() + + _seg_75() + + _seg_76() + + _seg_77() +) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/ipaddress.py b/env/lib/python3.6/site-packages/pip/_vendor/ipaddress.py new file mode 100644 index 0000000..f2d0766 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/ipaddress.py @@ -0,0 +1,2419 @@ +# Copyright 2007 Google Inc. +# Licensed to PSF under a Contributor Agreement. + +"""A fast, lightweight IPv4/IPv6 manipulation library in Python. + +This library is used to create/poke/manipulate IPv4 and IPv6 addresses +and networks. + +""" + +from __future__ import unicode_literals + + +import itertools +import struct + +__version__ = '1.0.22' + +# Compatibility functions +_compat_int_types = (int,) +try: + _compat_int_types = (int, long) +except NameError: + pass +try: + _compat_str = unicode +except NameError: + _compat_str = str + assert bytes != str +if b'\0'[0] == 0: # Python 3 semantics + def _compat_bytes_to_byte_vals(byt): + return byt +else: + def _compat_bytes_to_byte_vals(byt): + return [struct.unpack(b'!B', b)[0] for b in byt] +try: + _compat_int_from_byte_vals = int.from_bytes +except AttributeError: + def _compat_int_from_byte_vals(bytvals, endianess): + assert endianess == 'big' + res = 0 + for bv in bytvals: + assert isinstance(bv, _compat_int_types) + res = (res << 8) + bv + return res + + +def _compat_to_bytes(intval, length, endianess): + assert isinstance(intval, _compat_int_types) + assert endianess == 'big' + if length == 4: + if intval < 0 or intval >= 2 ** 32: + raise struct.error("integer out of range for 'I' format code") + return struct.pack(b'!I', intval) + elif length == 16: + if intval < 0 or intval >= 2 ** 128: + raise struct.error("integer out of range for 'QQ' format code") + return struct.pack(b'!QQ', intval >> 64, intval & 0xffffffffffffffff) + else: + raise NotImplementedError() + + +if hasattr(int, 'bit_length'): + # Not int.bit_length , since that won't work in 2.7 where long exists + def _compat_bit_length(i): + return i.bit_length() +else: + def _compat_bit_length(i): + for res in itertools.count(): + if i >> res == 0: + return res + + +def _compat_range(start, end, step=1): + assert step > 0 + i = start + while i < end: + yield i + i += step + + +class _TotalOrderingMixin(object): + __slots__ = () + + # Helper that derives the other comparison operations from + # __lt__ and __eq__ + # We avoid functools.total_ordering because it doesn't handle + # NotImplemented correctly yet (http://bugs.python.org/issue10042) + def __eq__(self, other): + raise NotImplementedError + + def __ne__(self, other): + equal = self.__eq__(other) + if equal is NotImplemented: + return NotImplemented + return not equal + + def __lt__(self, other): + raise NotImplementedError + + def __le__(self, other): + less = self.__lt__(other) + if less is NotImplemented or not less: + return self.__eq__(other) + return less + + def __gt__(self, other): + less = self.__lt__(other) + if less is NotImplemented: + return NotImplemented + equal = self.__eq__(other) + if equal is NotImplemented: + return NotImplemented + return not (less or equal) + + def __ge__(self, other): + less = self.__lt__(other) + if less is NotImplemented: + return NotImplemented + return not less + + +IPV4LENGTH = 32 +IPV6LENGTH = 128 + + +class AddressValueError(ValueError): + """A Value Error related to the address.""" + + +class NetmaskValueError(ValueError): + """A Value Error related to the netmask.""" + + +def ip_address(address): + """Take an IP string/int and return an object of the correct type. + + Args: + address: A string or integer, the IP address. Either IPv4 or + IPv6 addresses may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. + + Returns: + An IPv4Address or IPv6Address object. + + Raises: + ValueError: if the *address* passed isn't either a v4 or a v6 + address + + """ + try: + return IPv4Address(address) + except (AddressValueError, NetmaskValueError): + pass + + try: + return IPv6Address(address) + except (AddressValueError, NetmaskValueError): + pass + + if isinstance(address, bytes): + raise AddressValueError( + '%r does not appear to be an IPv4 or IPv6 address. ' + 'Did you pass in a bytes (str in Python 2) instead of' + ' a unicode object?' % address) + + raise ValueError('%r does not appear to be an IPv4 or IPv6 address' % + address) + + +def ip_network(address, strict=True): + """Take an IP string/int and return an object of the correct type. + + Args: + address: A string or integer, the IP network. Either IPv4 or + IPv6 networks may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. + + Returns: + An IPv4Network or IPv6Network object. + + Raises: + ValueError: if the string passed isn't either a v4 or a v6 + address. Or if the network has host bits set. + + """ + try: + return IPv4Network(address, strict) + except (AddressValueError, NetmaskValueError): + pass + + try: + return IPv6Network(address, strict) + except (AddressValueError, NetmaskValueError): + pass + + if isinstance(address, bytes): + raise AddressValueError( + '%r does not appear to be an IPv4 or IPv6 network. ' + 'Did you pass in a bytes (str in Python 2) instead of' + ' a unicode object?' % address) + + raise ValueError('%r does not appear to be an IPv4 or IPv6 network' % + address) + + +def ip_interface(address): + """Take an IP string/int and return an object of the correct type. + + Args: + address: A string or integer, the IP address. Either IPv4 or + IPv6 addresses may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. + + Returns: + An IPv4Interface or IPv6Interface object. + + Raises: + ValueError: if the string passed isn't either a v4 or a v6 + address. + + Notes: + The IPv?Interface classes describe an Address on a particular + Network, so they're basically a combination of both the Address + and Network classes. + + """ + try: + return IPv4Interface(address) + except (AddressValueError, NetmaskValueError): + pass + + try: + return IPv6Interface(address) + except (AddressValueError, NetmaskValueError): + pass + + raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' % + address) + + +def v4_int_to_packed(address): + """Represent an address as 4 packed bytes in network (big-endian) order. + + Args: + address: An integer representation of an IPv4 IP address. + + Returns: + The integer address packed as 4 bytes in network (big-endian) order. + + Raises: + ValueError: If the integer is negative or too large to be an + IPv4 IP address. + + """ + try: + return _compat_to_bytes(address, 4, 'big') + except (struct.error, OverflowError): + raise ValueError("Address negative or too large for IPv4") + + +def v6_int_to_packed(address): + """Represent an address as 16 packed bytes in network (big-endian) order. + + Args: + address: An integer representation of an IPv6 IP address. + + Returns: + The integer address packed as 16 bytes in network (big-endian) order. + + """ + try: + return _compat_to_bytes(address, 16, 'big') + except (struct.error, OverflowError): + raise ValueError("Address negative or too large for IPv6") + + +def _split_optional_netmask(address): + """Helper to split the netmask and raise AddressValueError if needed""" + addr = _compat_str(address).split('/') + if len(addr) > 2: + raise AddressValueError("Only one '/' permitted in %r" % address) + return addr + + +def _find_address_range(addresses): + """Find a sequence of sorted deduplicated IPv#Address. + + Args: + addresses: a list of IPv#Address objects. + + Yields: + A tuple containing the first and last IP addresses in the sequence. + + """ + it = iter(addresses) + first = last = next(it) + for ip in it: + if ip._ip != last._ip + 1: + yield first, last + first = ip + last = ip + yield first, last + + +def _count_righthand_zero_bits(number, bits): + """Count the number of zero bits on the right hand side. + + Args: + number: an integer. + bits: maximum number of bits to count. + + Returns: + The number of zero bits on the right hand side of the number. + + """ + if number == 0: + return bits + return min(bits, _compat_bit_length(~number & (number - 1))) + + +def summarize_address_range(first, last): + """Summarize a network range given the first and last IP addresses. + + Example: + >>> list(summarize_address_range(IPv4Address('192.0.2.0'), + ... IPv4Address('192.0.2.130'))) + ... #doctest: +NORMALIZE_WHITESPACE + [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'), + IPv4Network('192.0.2.130/32')] + + Args: + first: the first IPv4Address or IPv6Address in the range. + last: the last IPv4Address or IPv6Address in the range. + + Returns: + An iterator of the summarized IPv(4|6) network objects. + + Raise: + TypeError: + If the first and last objects are not IP addresses. + If the first and last objects are not the same version. + ValueError: + If the last object is not greater than the first. + If the version of the first address is not 4 or 6. + + """ + if (not (isinstance(first, _BaseAddress) and + isinstance(last, _BaseAddress))): + raise TypeError('first and last must be IP addresses, not networks') + if first.version != last.version: + raise TypeError("%s and %s are not of the same version" % ( + first, last)) + if first > last: + raise ValueError('last IP address must be greater than first') + + if first.version == 4: + ip = IPv4Network + elif first.version == 6: + ip = IPv6Network + else: + raise ValueError('unknown IP version') + + ip_bits = first._max_prefixlen + first_int = first._ip + last_int = last._ip + while first_int <= last_int: + nbits = min(_count_righthand_zero_bits(first_int, ip_bits), + _compat_bit_length(last_int - first_int + 1) - 1) + net = ip((first_int, ip_bits - nbits)) + yield net + first_int += 1 << nbits + if first_int - 1 == ip._ALL_ONES: + break + + +def _collapse_addresses_internal(addresses): + """Loops through the addresses, collapsing concurrent netblocks. + + Example: + + ip1 = IPv4Network('192.0.2.0/26') + ip2 = IPv4Network('192.0.2.64/26') + ip3 = IPv4Network('192.0.2.128/26') + ip4 = IPv4Network('192.0.2.192/26') + + _collapse_addresses_internal([ip1, ip2, ip3, ip4]) -> + [IPv4Network('192.0.2.0/24')] + + This shouldn't be called directly; it is called via + collapse_addresses([]). + + Args: + addresses: A list of IPv4Network's or IPv6Network's + + Returns: + A list of IPv4Network's or IPv6Network's depending on what we were + passed. + + """ + # First merge + to_merge = list(addresses) + subnets = {} + while to_merge: + net = to_merge.pop() + supernet = net.supernet() + existing = subnets.get(supernet) + if existing is None: + subnets[supernet] = net + elif existing != net: + # Merge consecutive subnets + del subnets[supernet] + to_merge.append(supernet) + # Then iterate over resulting networks, skipping subsumed subnets + last = None + for net in sorted(subnets.values()): + if last is not None: + # Since they are sorted, + # last.network_address <= net.network_address is a given. + if last.broadcast_address >= net.broadcast_address: + continue + yield net + last = net + + +def collapse_addresses(addresses): + """Collapse a list of IP objects. + + Example: + collapse_addresses([IPv4Network('192.0.2.0/25'), + IPv4Network('192.0.2.128/25')]) -> + [IPv4Network('192.0.2.0/24')] + + Args: + addresses: An iterator of IPv4Network or IPv6Network objects. + + Returns: + An iterator of the collapsed IPv(4|6)Network objects. + + Raises: + TypeError: If passed a list of mixed version objects. + + """ + addrs = [] + ips = [] + nets = [] + + # split IP addresses and networks + for ip in addresses: + if isinstance(ip, _BaseAddress): + if ips and ips[-1]._version != ip._version: + raise TypeError("%s and %s are not of the same version" % ( + ip, ips[-1])) + ips.append(ip) + elif ip._prefixlen == ip._max_prefixlen: + if ips and ips[-1]._version != ip._version: + raise TypeError("%s and %s are not of the same version" % ( + ip, ips[-1])) + try: + ips.append(ip.ip) + except AttributeError: + ips.append(ip.network_address) + else: + if nets and nets[-1]._version != ip._version: + raise TypeError("%s and %s are not of the same version" % ( + ip, nets[-1])) + nets.append(ip) + + # sort and dedup + ips = sorted(set(ips)) + + # find consecutive address ranges in the sorted sequence and summarize them + if ips: + for first, last in _find_address_range(ips): + addrs.extend(summarize_address_range(first, last)) + + return _collapse_addresses_internal(addrs + nets) + + +def get_mixed_type_key(obj): + """Return a key suitable for sorting between networks and addresses. + + Address and Network objects are not sortable by default; they're + fundamentally different so the expression + + IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24') + + doesn't make any sense. There are some times however, where you may wish + to have ipaddress sort these for you anyway. If you need to do this, you + can use this function as the key= argument to sorted(). + + Args: + obj: either a Network or Address object. + Returns: + appropriate key. + + """ + if isinstance(obj, _BaseNetwork): + return obj._get_networks_key() + elif isinstance(obj, _BaseAddress): + return obj._get_address_key() + return NotImplemented + + +class _IPAddressBase(_TotalOrderingMixin): + + """The mother class.""" + + __slots__ = () + + @property + def exploded(self): + """Return the longhand version of the IP address as a string.""" + return self._explode_shorthand_ip_string() + + @property + def compressed(self): + """Return the shorthand version of the IP address as a string.""" + return _compat_str(self) + + @property + def reverse_pointer(self): + """The name of the reverse DNS pointer for the IP address, e.g.: + >>> ipaddress.ip_address("127.0.0.1").reverse_pointer + '1.0.0.127.in-addr.arpa' + >>> ipaddress.ip_address("2001:db8::1").reverse_pointer + '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa' + + """ + return self._reverse_pointer() + + @property + def version(self): + msg = '%200s has no version specified' % (type(self),) + raise NotImplementedError(msg) + + def _check_int_address(self, address): + if address < 0: + msg = "%d (< 0) is not permitted as an IPv%d address" + raise AddressValueError(msg % (address, self._version)) + if address > self._ALL_ONES: + msg = "%d (>= 2**%d) is not permitted as an IPv%d address" + raise AddressValueError(msg % (address, self._max_prefixlen, + self._version)) + + def _check_packed_address(self, address, expected_len): + address_len = len(address) + if address_len != expected_len: + msg = ( + '%r (len %d != %d) is not permitted as an IPv%d address. ' + 'Did you pass in a bytes (str in Python 2) instead of' + ' a unicode object?') + raise AddressValueError(msg % (address, address_len, + expected_len, self._version)) + + @classmethod + def _ip_int_from_prefix(cls, prefixlen): + """Turn the prefix length into a bitwise netmask + + Args: + prefixlen: An integer, the prefix length. + + Returns: + An integer. + + """ + return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen) + + @classmethod + def _prefix_from_ip_int(cls, ip_int): + """Return prefix length from the bitwise netmask. + + Args: + ip_int: An integer, the netmask in expanded bitwise format + + Returns: + An integer, the prefix length. + + Raises: + ValueError: If the input intermingles zeroes & ones + """ + trailing_zeroes = _count_righthand_zero_bits(ip_int, + cls._max_prefixlen) + prefixlen = cls._max_prefixlen - trailing_zeroes + leading_ones = ip_int >> trailing_zeroes + all_ones = (1 << prefixlen) - 1 + if leading_ones != all_ones: + byteslen = cls._max_prefixlen // 8 + details = _compat_to_bytes(ip_int, byteslen, 'big') + msg = 'Netmask pattern %r mixes zeroes & ones' + raise ValueError(msg % details) + return prefixlen + + @classmethod + def _report_invalid_netmask(cls, netmask_str): + msg = '%r is not a valid netmask' % netmask_str + raise NetmaskValueError(msg) + + @classmethod + def _prefix_from_prefix_string(cls, prefixlen_str): + """Return prefix length from a numeric string + + Args: + prefixlen_str: The string to be converted + + Returns: + An integer, the prefix length. + + Raises: + NetmaskValueError: If the input is not a valid netmask + """ + # int allows a leading +/- as well as surrounding whitespace, + # so we ensure that isn't the case + if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str): + cls._report_invalid_netmask(prefixlen_str) + try: + prefixlen = int(prefixlen_str) + except ValueError: + cls._report_invalid_netmask(prefixlen_str) + if not (0 <= prefixlen <= cls._max_prefixlen): + cls._report_invalid_netmask(prefixlen_str) + return prefixlen + + @classmethod + def _prefix_from_ip_string(cls, ip_str): + """Turn a netmask/hostmask string into a prefix length + + Args: + ip_str: The netmask/hostmask to be converted + + Returns: + An integer, the prefix length. + + Raises: + NetmaskValueError: If the input is not a valid netmask/hostmask + """ + # Parse the netmask/hostmask like an IP address. + try: + ip_int = cls._ip_int_from_string(ip_str) + except AddressValueError: + cls._report_invalid_netmask(ip_str) + + # Try matching a netmask (this would be /1*0*/ as a bitwise regexp). + # Note that the two ambiguous cases (all-ones and all-zeroes) are + # treated as netmasks. + try: + return cls._prefix_from_ip_int(ip_int) + except ValueError: + pass + + # Invert the bits, and try matching a /0+1+/ hostmask instead. + ip_int ^= cls._ALL_ONES + try: + return cls._prefix_from_ip_int(ip_int) + except ValueError: + cls._report_invalid_netmask(ip_str) + + def __reduce__(self): + return self.__class__, (_compat_str(self),) + + +class _BaseAddress(_IPAddressBase): + + """A generic IP object. + + This IP class contains the version independent methods which are + used by single IP addresses. + """ + + __slots__ = () + + def __int__(self): + return self._ip + + def __eq__(self, other): + try: + return (self._ip == other._ip and + self._version == other._version) + except AttributeError: + return NotImplemented + + def __lt__(self, other): + if not isinstance(other, _IPAddressBase): + return NotImplemented + if not isinstance(other, _BaseAddress): + raise TypeError('%s and %s are not of the same type' % ( + self, other)) + if self._version != other._version: + raise TypeError('%s and %s are not of the same version' % ( + self, other)) + if self._ip != other._ip: + return self._ip < other._ip + return False + + # Shorthand for Integer addition and subtraction. This is not + # meant to ever support addition/subtraction of addresses. + def __add__(self, other): + if not isinstance(other, _compat_int_types): + return NotImplemented + return self.__class__(int(self) + other) + + def __sub__(self, other): + if not isinstance(other, _compat_int_types): + return NotImplemented + return self.__class__(int(self) - other) + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, _compat_str(self)) + + def __str__(self): + return _compat_str(self._string_from_ip_int(self._ip)) + + def __hash__(self): + return hash(hex(int(self._ip))) + + def _get_address_key(self): + return (self._version, self) + + def __reduce__(self): + return self.__class__, (self._ip,) + + +class _BaseNetwork(_IPAddressBase): + + """A generic IP network object. + + This IP class contains the version independent methods which are + used by networks. + + """ + def __init__(self, address): + self._cache = {} + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, _compat_str(self)) + + def __str__(self): + return '%s/%d' % (self.network_address, self.prefixlen) + + def hosts(self): + """Generate Iterator over usable hosts in a network. + + This is like __iter__ except it doesn't return the network + or broadcast addresses. + + """ + network = int(self.network_address) + broadcast = int(self.broadcast_address) + for x in _compat_range(network + 1, broadcast): + yield self._address_class(x) + + def __iter__(self): + network = int(self.network_address) + broadcast = int(self.broadcast_address) + for x in _compat_range(network, broadcast + 1): + yield self._address_class(x) + + def __getitem__(self, n): + network = int(self.network_address) + broadcast = int(self.broadcast_address) + if n >= 0: + if network + n > broadcast: + raise IndexError('address out of range') + return self._address_class(network + n) + else: + n += 1 + if broadcast + n < network: + raise IndexError('address out of range') + return self._address_class(broadcast + n) + + def __lt__(self, other): + if not isinstance(other, _IPAddressBase): + return NotImplemented + if not isinstance(other, _BaseNetwork): + raise TypeError('%s and %s are not of the same type' % ( + self, other)) + if self._version != other._version: + raise TypeError('%s and %s are not of the same version' % ( + self, other)) + if self.network_address != other.network_address: + return self.network_address < other.network_address + if self.netmask != other.netmask: + return self.netmask < other.netmask + return False + + def __eq__(self, other): + try: + return (self._version == other._version and + self.network_address == other.network_address and + int(self.netmask) == int(other.netmask)) + except AttributeError: + return NotImplemented + + def __hash__(self): + return hash(int(self.network_address) ^ int(self.netmask)) + + def __contains__(self, other): + # always false if one is v4 and the other is v6. + if self._version != other._version: + return False + # dealing with another network. + if isinstance(other, _BaseNetwork): + return False + # dealing with another address + else: + # address + return (int(self.network_address) <= int(other._ip) <= + int(self.broadcast_address)) + + def overlaps(self, other): + """Tell if self is partly contained in other.""" + return self.network_address in other or ( + self.broadcast_address in other or ( + other.network_address in self or ( + other.broadcast_address in self))) + + @property + def broadcast_address(self): + x = self._cache.get('broadcast_address') + if x is None: + x = self._address_class(int(self.network_address) | + int(self.hostmask)) + self._cache['broadcast_address'] = x + return x + + @property + def hostmask(self): + x = self._cache.get('hostmask') + if x is None: + x = self._address_class(int(self.netmask) ^ self._ALL_ONES) + self._cache['hostmask'] = x + return x + + @property + def with_prefixlen(self): + return '%s/%d' % (self.network_address, self._prefixlen) + + @property + def with_netmask(self): + return '%s/%s' % (self.network_address, self.netmask) + + @property + def with_hostmask(self): + return '%s/%s' % (self.network_address, self.hostmask) + + @property + def num_addresses(self): + """Number of hosts in the current subnet.""" + return int(self.broadcast_address) - int(self.network_address) + 1 + + @property + def _address_class(self): + # Returning bare address objects (rather than interfaces) allows for + # more consistent behaviour across the network address, broadcast + # address and individual host addresses. + msg = '%200s has no associated address class' % (type(self),) + raise NotImplementedError(msg) + + @property + def prefixlen(self): + return self._prefixlen + + def address_exclude(self, other): + """Remove an address from a larger block. + + For example: + + addr1 = ip_network('192.0.2.0/28') + addr2 = ip_network('192.0.2.1/32') + list(addr1.address_exclude(addr2)) = + [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'), + IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')] + + or IPv6: + + addr1 = ip_network('2001:db8::1/32') + addr2 = ip_network('2001:db8::1/128') + list(addr1.address_exclude(addr2)) = + [ip_network('2001:db8::1/128'), + ip_network('2001:db8::2/127'), + ip_network('2001:db8::4/126'), + ip_network('2001:db8::8/125'), + ... + ip_network('2001:db8:8000::/33')] + + Args: + other: An IPv4Network or IPv6Network object of the same type. + + Returns: + An iterator of the IPv(4|6)Network objects which is self + minus other. + + Raises: + TypeError: If self and other are of differing address + versions, or if other is not a network object. + ValueError: If other is not completely contained by self. + + """ + if not self._version == other._version: + raise TypeError("%s and %s are not of the same version" % ( + self, other)) + + if not isinstance(other, _BaseNetwork): + raise TypeError("%s is not a network object" % other) + + if not other.subnet_of(self): + raise ValueError('%s not contained in %s' % (other, self)) + if other == self: + return + + # Make sure we're comparing the network of other. + other = other.__class__('%s/%s' % (other.network_address, + other.prefixlen)) + + s1, s2 = self.subnets() + while s1 != other and s2 != other: + if other.subnet_of(s1): + yield s2 + s1, s2 = s1.subnets() + elif other.subnet_of(s2): + yield s1 + s1, s2 = s2.subnets() + else: + # If we got here, there's a bug somewhere. + raise AssertionError('Error performing exclusion: ' + 's1: %s s2: %s other: %s' % + (s1, s2, other)) + if s1 == other: + yield s2 + elif s2 == other: + yield s1 + else: + # If we got here, there's a bug somewhere. + raise AssertionError('Error performing exclusion: ' + 's1: %s s2: %s other: %s' % + (s1, s2, other)) + + def compare_networks(self, other): + """Compare two IP objects. + + This is only concerned about the comparison of the integer + representation of the network addresses. This means that the + host bits aren't considered at all in this method. If you want + to compare host bits, you can easily enough do a + 'HostA._ip < HostB._ip' + + Args: + other: An IP object. + + Returns: + If the IP versions of self and other are the same, returns: + + -1 if self < other: + eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25') + IPv6Network('2001:db8::1000/124') < + IPv6Network('2001:db8::2000/124') + 0 if self == other + eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24') + IPv6Network('2001:db8::1000/124') == + IPv6Network('2001:db8::1000/124') + 1 if self > other + eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25') + IPv6Network('2001:db8::2000/124') > + IPv6Network('2001:db8::1000/124') + + Raises: + TypeError if the IP versions are different. + + """ + # does this need to raise a ValueError? + if self._version != other._version: + raise TypeError('%s and %s are not of the same type' % ( + self, other)) + # self._version == other._version below here: + if self.network_address < other.network_address: + return -1 + if self.network_address > other.network_address: + return 1 + # self.network_address == other.network_address below here: + if self.netmask < other.netmask: + return -1 + if self.netmask > other.netmask: + return 1 + return 0 + + def _get_networks_key(self): + """Network-only key function. + + Returns an object that identifies this address' network and + netmask. This function is a suitable "key" argument for sorted() + and list.sort(). + + """ + return (self._version, self.network_address, self.netmask) + + def subnets(self, prefixlen_diff=1, new_prefix=None): + """The subnets which join to make the current subnet. + + In the case that self contains only one IP + (self._prefixlen == 32 for IPv4 or self._prefixlen == 128 + for IPv6), yield an iterator with just ourself. + + Args: + prefixlen_diff: An integer, the amount the prefix length + should be increased by. This should not be set if + new_prefix is also set. + new_prefix: The desired new prefix length. This must be a + larger number (smaller prefix) than the existing prefix. + This should not be set if prefixlen_diff is also set. + + Returns: + An iterator of IPv(4|6) objects. + + Raises: + ValueError: The prefixlen_diff is too small or too large. + OR + prefixlen_diff and new_prefix are both set or new_prefix + is a smaller number than the current prefix (smaller + number means a larger network) + + """ + if self._prefixlen == self._max_prefixlen: + yield self + return + + if new_prefix is not None: + if new_prefix < self._prefixlen: + raise ValueError('new prefix must be longer') + if prefixlen_diff != 1: + raise ValueError('cannot set prefixlen_diff and new_prefix') + prefixlen_diff = new_prefix - self._prefixlen + + if prefixlen_diff < 0: + raise ValueError('prefix length diff must be > 0') + new_prefixlen = self._prefixlen + prefixlen_diff + + if new_prefixlen > self._max_prefixlen: + raise ValueError( + 'prefix length diff %d is invalid for netblock %s' % ( + new_prefixlen, self)) + + start = int(self.network_address) + end = int(self.broadcast_address) + 1 + step = (int(self.hostmask) + 1) >> prefixlen_diff + for new_addr in _compat_range(start, end, step): + current = self.__class__((new_addr, new_prefixlen)) + yield current + + def supernet(self, prefixlen_diff=1, new_prefix=None): + """The supernet containing the current network. + + Args: + prefixlen_diff: An integer, the amount the prefix length of + the network should be decreased by. For example, given a + /24 network and a prefixlen_diff of 3, a supernet with a + /21 netmask is returned. + + Returns: + An IPv4 network object. + + Raises: + ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have + a negative prefix length. + OR + If prefixlen_diff and new_prefix are both set or new_prefix is a + larger number than the current prefix (larger number means a + smaller network) + + """ + if self._prefixlen == 0: + return self + + if new_prefix is not None: + if new_prefix > self._prefixlen: + raise ValueError('new prefix must be shorter') + if prefixlen_diff != 1: + raise ValueError('cannot set prefixlen_diff and new_prefix') + prefixlen_diff = self._prefixlen - new_prefix + + new_prefixlen = self.prefixlen - prefixlen_diff + if new_prefixlen < 0: + raise ValueError( + 'current prefixlen is %d, cannot have a prefixlen_diff of %d' % + (self.prefixlen, prefixlen_diff)) + return self.__class__(( + int(self.network_address) & (int(self.netmask) << prefixlen_diff), + new_prefixlen)) + + @property + def is_multicast(self): + """Test if the address is reserved for multicast use. + + Returns: + A boolean, True if the address is a multicast address. + See RFC 2373 2.7 for details. + + """ + return (self.network_address.is_multicast and + self.broadcast_address.is_multicast) + + @staticmethod + def _is_subnet_of(a, b): + try: + # Always false if one is v4 and the other is v6. + if a._version != b._version: + raise TypeError("%s and %s are not of the same version" (a, b)) + return (b.network_address <= a.network_address and + b.broadcast_address >= a.broadcast_address) + except AttributeError: + raise TypeError("Unable to test subnet containment " + "between %s and %s" % (a, b)) + + def subnet_of(self, other): + """Return True if this network is a subnet of other.""" + return self._is_subnet_of(self, other) + + def supernet_of(self, other): + """Return True if this network is a supernet of other.""" + return self._is_subnet_of(other, self) + + @property + def is_reserved(self): + """Test if the address is otherwise IETF reserved. + + Returns: + A boolean, True if the address is within one of the + reserved IPv6 Network ranges. + + """ + return (self.network_address.is_reserved and + self.broadcast_address.is_reserved) + + @property + def is_link_local(self): + """Test if the address is reserved for link-local. + + Returns: + A boolean, True if the address is reserved per RFC 4291. + + """ + return (self.network_address.is_link_local and + self.broadcast_address.is_link_local) + + @property + def is_private(self): + """Test if this address is allocated for private networks. + + Returns: + A boolean, True if the address is reserved per + iana-ipv4-special-registry or iana-ipv6-special-registry. + + """ + return (self.network_address.is_private and + self.broadcast_address.is_private) + + @property + def is_global(self): + """Test if this address is allocated for public networks. + + Returns: + A boolean, True if the address is not reserved per + iana-ipv4-special-registry or iana-ipv6-special-registry. + + """ + return not self.is_private + + @property + def is_unspecified(self): + """Test if the address is unspecified. + + Returns: + A boolean, True if this is the unspecified address as defined in + RFC 2373 2.5.2. + + """ + return (self.network_address.is_unspecified and + self.broadcast_address.is_unspecified) + + @property + def is_loopback(self): + """Test if the address is a loopback address. + + Returns: + A boolean, True if the address is a loopback address as defined in + RFC 2373 2.5.3. + + """ + return (self.network_address.is_loopback and + self.broadcast_address.is_loopback) + + +class _BaseV4(object): + + """Base IPv4 object. + + The following methods are used by IPv4 objects in both single IP + addresses and networks. + + """ + + __slots__ = () + _version = 4 + # Equivalent to 255.255.255.255 or 32 bits of 1's. + _ALL_ONES = (2 ** IPV4LENGTH) - 1 + _DECIMAL_DIGITS = frozenset('0123456789') + + # the valid octets for host and netmasks. only useful for IPv4. + _valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0]) + + _max_prefixlen = IPV4LENGTH + # There are only a handful of valid v4 netmasks, so we cache them all + # when constructed (see _make_netmask()). + _netmask_cache = {} + + def _explode_shorthand_ip_string(self): + return _compat_str(self) + + @classmethod + def _make_netmask(cls, arg): + """Make a (netmask, prefix_len) tuple from the given argument. + + Argument can be: + - an integer (the prefix length) + - a string representing the prefix length (e.g. "24") + - a string representing the prefix netmask (e.g. "255.255.255.0") + """ + if arg not in cls._netmask_cache: + if isinstance(arg, _compat_int_types): + prefixlen = arg + else: + try: + # Check for a netmask in prefix length form + prefixlen = cls._prefix_from_prefix_string(arg) + except NetmaskValueError: + # Check for a netmask or hostmask in dotted-quad form. + # This may raise NetmaskValueError. + prefixlen = cls._prefix_from_ip_string(arg) + netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen)) + cls._netmask_cache[arg] = netmask, prefixlen + return cls._netmask_cache[arg] + + @classmethod + def _ip_int_from_string(cls, ip_str): + """Turn the given IP string into an integer for comparison. + + Args: + ip_str: A string, the IP ip_str. + + Returns: + The IP ip_str as an integer. + + Raises: + AddressValueError: if ip_str isn't a valid IPv4 Address. + + """ + if not ip_str: + raise AddressValueError('Address cannot be empty') + + octets = ip_str.split('.') + if len(octets) != 4: + raise AddressValueError("Expected 4 octets in %r" % ip_str) + + try: + return _compat_int_from_byte_vals( + map(cls._parse_octet, octets), 'big') + except ValueError as exc: + raise AddressValueError("%s in %r" % (exc, ip_str)) + + @classmethod + def _parse_octet(cls, octet_str): + """Convert a decimal octet into an integer. + + Args: + octet_str: A string, the number to parse. + + Returns: + The octet as an integer. + + Raises: + ValueError: if the octet isn't strictly a decimal from [0..255]. + + """ + if not octet_str: + raise ValueError("Empty octet not permitted") + # Whitelist the characters, since int() allows a lot of bizarre stuff. + if not cls._DECIMAL_DIGITS.issuperset(octet_str): + msg = "Only decimal digits permitted in %r" + raise ValueError(msg % octet_str) + # We do the length check second, since the invalid character error + # is likely to be more informative for the user + if len(octet_str) > 3: + msg = "At most 3 characters permitted in %r" + raise ValueError(msg % octet_str) + # Convert to integer (we know digits are legal) + octet_int = int(octet_str, 10) + # Any octets that look like they *might* be written in octal, + # and which don't look exactly the same in both octal and + # decimal are rejected as ambiguous + if octet_int > 7 and octet_str[0] == '0': + msg = "Ambiguous (octal/decimal) value in %r not permitted" + raise ValueError(msg % octet_str) + if octet_int > 255: + raise ValueError("Octet %d (> 255) not permitted" % octet_int) + return octet_int + + @classmethod + def _string_from_ip_int(cls, ip_int): + """Turns a 32-bit integer into dotted decimal notation. + + Args: + ip_int: An integer, the IP address. + + Returns: + The IP address as a string in dotted decimal notation. + + """ + return '.'.join(_compat_str(struct.unpack(b'!B', b)[0] + if isinstance(b, bytes) + else b) + for b in _compat_to_bytes(ip_int, 4, 'big')) + + def _is_hostmask(self, ip_str): + """Test if the IP string is a hostmask (rather than a netmask). + + Args: + ip_str: A string, the potential hostmask. + + Returns: + A boolean, True if the IP string is a hostmask. + + """ + bits = ip_str.split('.') + try: + parts = [x for x in map(int, bits) if x in self._valid_mask_octets] + except ValueError: + return False + if len(parts) != len(bits): + return False + if parts[0] < parts[-1]: + return True + return False + + def _reverse_pointer(self): + """Return the reverse DNS pointer name for the IPv4 address. + + This implements the method described in RFC1035 3.5. + + """ + reverse_octets = _compat_str(self).split('.')[::-1] + return '.'.join(reverse_octets) + '.in-addr.arpa' + + @property + def max_prefixlen(self): + return self._max_prefixlen + + @property + def version(self): + return self._version + + +class IPv4Address(_BaseV4, _BaseAddress): + + """Represent and manipulate single IPv4 Addresses.""" + + __slots__ = ('_ip', '__weakref__') + + def __init__(self, address): + + """ + Args: + address: A string or integer representing the IP + + Additionally, an integer can be passed, so + IPv4Address('192.0.2.1') == IPv4Address(3221225985). + or, more generally + IPv4Address(int(IPv4Address('192.0.2.1'))) == + IPv4Address('192.0.2.1') + + Raises: + AddressValueError: If ipaddress isn't a valid IPv4 address. + + """ + # Efficient constructor from integer. + if isinstance(address, _compat_int_types): + self._check_int_address(address) + self._ip = address + return + + # Constructing from a packed address + if isinstance(address, bytes): + self._check_packed_address(address, 4) + bvs = _compat_bytes_to_byte_vals(address) + self._ip = _compat_int_from_byte_vals(bvs, 'big') + return + + # Assume input argument to be string or any object representation + # which converts into a formatted IP string. + addr_str = _compat_str(address) + if '/' in addr_str: + raise AddressValueError("Unexpected '/' in %r" % address) + self._ip = self._ip_int_from_string(addr_str) + + @property + def packed(self): + """The binary representation of this address.""" + return v4_int_to_packed(self._ip) + + @property + def is_reserved(self): + """Test if the address is otherwise IETF reserved. + + Returns: + A boolean, True if the address is within the + reserved IPv4 Network range. + + """ + return self in self._constants._reserved_network + + @property + def is_private(self): + """Test if this address is allocated for private networks. + + Returns: + A boolean, True if the address is reserved per + iana-ipv4-special-registry. + + """ + return any(self in net for net in self._constants._private_networks) + + @property + def is_global(self): + return ( + self not in self._constants._public_network and + not self.is_private) + + @property + def is_multicast(self): + """Test if the address is reserved for multicast use. + + Returns: + A boolean, True if the address is multicast. + See RFC 3171 for details. + + """ + return self in self._constants._multicast_network + + @property + def is_unspecified(self): + """Test if the address is unspecified. + + Returns: + A boolean, True if this is the unspecified address as defined in + RFC 5735 3. + + """ + return self == self._constants._unspecified_address + + @property + def is_loopback(self): + """Test if the address is a loopback address. + + Returns: + A boolean, True if the address is a loopback per RFC 3330. + + """ + return self in self._constants._loopback_network + + @property + def is_link_local(self): + """Test if the address is reserved for link-local. + + Returns: + A boolean, True if the address is link-local per RFC 3927. + + """ + return self in self._constants._linklocal_network + + +class IPv4Interface(IPv4Address): + + def __init__(self, address): + if isinstance(address, (bytes, _compat_int_types)): + IPv4Address.__init__(self, address) + self.network = IPv4Network(self._ip) + self._prefixlen = self._max_prefixlen + return + + if isinstance(address, tuple): + IPv4Address.__init__(self, address[0]) + if len(address) > 1: + self._prefixlen = int(address[1]) + else: + self._prefixlen = self._max_prefixlen + + self.network = IPv4Network(address, strict=False) + self.netmask = self.network.netmask + self.hostmask = self.network.hostmask + return + + addr = _split_optional_netmask(address) + IPv4Address.__init__(self, addr[0]) + + self.network = IPv4Network(address, strict=False) + self._prefixlen = self.network._prefixlen + + self.netmask = self.network.netmask + self.hostmask = self.network.hostmask + + def __str__(self): + return '%s/%d' % (self._string_from_ip_int(self._ip), + self.network.prefixlen) + + def __eq__(self, other): + address_equal = IPv4Address.__eq__(self, other) + if not address_equal or address_equal is NotImplemented: + return address_equal + try: + return self.network == other.network + except AttributeError: + # An interface with an associated network is NOT the + # same as an unassociated address. That's why the hash + # takes the extra info into account. + return False + + def __lt__(self, other): + address_less = IPv4Address.__lt__(self, other) + if address_less is NotImplemented: + return NotImplemented + try: + return (self.network < other.network or + self.network == other.network and address_less) + except AttributeError: + # We *do* allow addresses and interfaces to be sorted. The + # unassociated address is considered less than all interfaces. + return False + + def __hash__(self): + return self._ip ^ self._prefixlen ^ int(self.network.network_address) + + __reduce__ = _IPAddressBase.__reduce__ + + @property + def ip(self): + return IPv4Address(self._ip) + + @property + def with_prefixlen(self): + return '%s/%s' % (self._string_from_ip_int(self._ip), + self._prefixlen) + + @property + def with_netmask(self): + return '%s/%s' % (self._string_from_ip_int(self._ip), + self.netmask) + + @property + def with_hostmask(self): + return '%s/%s' % (self._string_from_ip_int(self._ip), + self.hostmask) + + +class IPv4Network(_BaseV4, _BaseNetwork): + + """This class represents and manipulates 32-bit IPv4 network + addresses.. + + Attributes: [examples for IPv4Network('192.0.2.0/27')] + .network_address: IPv4Address('192.0.2.0') + .hostmask: IPv4Address('0.0.0.31') + .broadcast_address: IPv4Address('192.0.2.32') + .netmask: IPv4Address('255.255.255.224') + .prefixlen: 27 + + """ + # Class to use when creating address objects + _address_class = IPv4Address + + def __init__(self, address, strict=True): + + """Instantiate a new IPv4 network object. + + Args: + address: A string or integer representing the IP [& network]. + '192.0.2.0/24' + '192.0.2.0/255.255.255.0' + '192.0.0.2/0.0.0.255' + are all functionally the same in IPv4. Similarly, + '192.0.2.1' + '192.0.2.1/255.255.255.255' + '192.0.2.1/32' + are also functionally equivalent. That is to say, failing to + provide a subnetmask will create an object with a mask of /32. + + If the mask (portion after the / in the argument) is given in + dotted quad form, it is treated as a netmask if it starts with a + non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it + starts with a zero field (e.g. 0.255.255.255 == /8), with the + single exception of an all-zero mask which is treated as a + netmask == /0. If no mask is given, a default of /32 is used. + + Additionally, an integer can be passed, so + IPv4Network('192.0.2.1') == IPv4Network(3221225985) + or, more generally + IPv4Interface(int(IPv4Interface('192.0.2.1'))) == + IPv4Interface('192.0.2.1') + + Raises: + AddressValueError: If ipaddress isn't a valid IPv4 address. + NetmaskValueError: If the netmask isn't valid for + an IPv4 address. + ValueError: If strict is True and a network address is not + supplied. + + """ + _BaseNetwork.__init__(self, address) + + # Constructing from a packed address or integer + if isinstance(address, (_compat_int_types, bytes)): + self.network_address = IPv4Address(address) + self.netmask, self._prefixlen = self._make_netmask( + self._max_prefixlen) + # fixme: address/network test here. + return + + if isinstance(address, tuple): + if len(address) > 1: + arg = address[1] + else: + # We weren't given an address[1] + arg = self._max_prefixlen + self.network_address = IPv4Address(address[0]) + self.netmask, self._prefixlen = self._make_netmask(arg) + packed = int(self.network_address) + if packed & int(self.netmask) != packed: + if strict: + raise ValueError('%s has host bits set' % self) + else: + self.network_address = IPv4Address(packed & + int(self.netmask)) + return + + # Assume input argument to be string or any object representation + # which converts into a formatted IP prefix string. + addr = _split_optional_netmask(address) + self.network_address = IPv4Address(self._ip_int_from_string(addr[0])) + + if len(addr) == 2: + arg = addr[1] + else: + arg = self._max_prefixlen + self.netmask, self._prefixlen = self._make_netmask(arg) + + if strict: + if (IPv4Address(int(self.network_address) & int(self.netmask)) != + self.network_address): + raise ValueError('%s has host bits set' % self) + self.network_address = IPv4Address(int(self.network_address) & + int(self.netmask)) + + if self._prefixlen == (self._max_prefixlen - 1): + self.hosts = self.__iter__ + + @property + def is_global(self): + """Test if this address is allocated for public networks. + + Returns: + A boolean, True if the address is not reserved per + iana-ipv4-special-registry. + + """ + return (not (self.network_address in IPv4Network('100.64.0.0/10') and + self.broadcast_address in IPv4Network('100.64.0.0/10')) and + not self.is_private) + + +class _IPv4Constants(object): + + _linklocal_network = IPv4Network('169.254.0.0/16') + + _loopback_network = IPv4Network('127.0.0.0/8') + + _multicast_network = IPv4Network('224.0.0.0/4') + + _public_network = IPv4Network('100.64.0.0/10') + + _private_networks = [ + IPv4Network('0.0.0.0/8'), + IPv4Network('10.0.0.0/8'), + IPv4Network('127.0.0.0/8'), + IPv4Network('169.254.0.0/16'), + IPv4Network('172.16.0.0/12'), + IPv4Network('192.0.0.0/29'), + IPv4Network('192.0.0.170/31'), + IPv4Network('192.0.2.0/24'), + IPv4Network('192.168.0.0/16'), + IPv4Network('198.18.0.0/15'), + IPv4Network('198.51.100.0/24'), + IPv4Network('203.0.113.0/24'), + IPv4Network('240.0.0.0/4'), + IPv4Network('255.255.255.255/32'), + ] + + _reserved_network = IPv4Network('240.0.0.0/4') + + _unspecified_address = IPv4Address('0.0.0.0') + + +IPv4Address._constants = _IPv4Constants + + +class _BaseV6(object): + + """Base IPv6 object. + + The following methods are used by IPv6 objects in both single IP + addresses and networks. + + """ + + __slots__ = () + _version = 6 + _ALL_ONES = (2 ** IPV6LENGTH) - 1 + _HEXTET_COUNT = 8 + _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef') + _max_prefixlen = IPV6LENGTH + + # There are only a bunch of valid v6 netmasks, so we cache them all + # when constructed (see _make_netmask()). + _netmask_cache = {} + + @classmethod + def _make_netmask(cls, arg): + """Make a (netmask, prefix_len) tuple from the given argument. + + Argument can be: + - an integer (the prefix length) + - a string representing the prefix length (e.g. "24") + - a string representing the prefix netmask (e.g. "255.255.255.0") + """ + if arg not in cls._netmask_cache: + if isinstance(arg, _compat_int_types): + prefixlen = arg + else: + prefixlen = cls._prefix_from_prefix_string(arg) + netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen)) + cls._netmask_cache[arg] = netmask, prefixlen + return cls._netmask_cache[arg] + + @classmethod + def _ip_int_from_string(cls, ip_str): + """Turn an IPv6 ip_str into an integer. + + Args: + ip_str: A string, the IPv6 ip_str. + + Returns: + An int, the IPv6 address + + Raises: + AddressValueError: if ip_str isn't a valid IPv6 Address. + + """ + if not ip_str: + raise AddressValueError('Address cannot be empty') + + parts = ip_str.split(':') + + # An IPv6 address needs at least 2 colons (3 parts). + _min_parts = 3 + if len(parts) < _min_parts: + msg = "At least %d parts expected in %r" % (_min_parts, ip_str) + raise AddressValueError(msg) + + # If the address has an IPv4-style suffix, convert it to hexadecimal. + if '.' in parts[-1]: + try: + ipv4_int = IPv4Address(parts.pop())._ip + except AddressValueError as exc: + raise AddressValueError("%s in %r" % (exc, ip_str)) + parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF)) + parts.append('%x' % (ipv4_int & 0xFFFF)) + + # An IPv6 address can't have more than 8 colons (9 parts). + # The extra colon comes from using the "::" notation for a single + # leading or trailing zero part. + _max_parts = cls._HEXTET_COUNT + 1 + if len(parts) > _max_parts: + msg = "At most %d colons permitted in %r" % ( + _max_parts - 1, ip_str) + raise AddressValueError(msg) + + # Disregarding the endpoints, find '::' with nothing in between. + # This indicates that a run of zeroes has been skipped. + skip_index = None + for i in _compat_range(1, len(parts) - 1): + if not parts[i]: + if skip_index is not None: + # Can't have more than one '::' + msg = "At most one '::' permitted in %r" % ip_str + raise AddressValueError(msg) + skip_index = i + + # parts_hi is the number of parts to copy from above/before the '::' + # parts_lo is the number of parts to copy from below/after the '::' + if skip_index is not None: + # If we found a '::', then check if it also covers the endpoints. + parts_hi = skip_index + parts_lo = len(parts) - skip_index - 1 + if not parts[0]: + parts_hi -= 1 + if parts_hi: + msg = "Leading ':' only permitted as part of '::' in %r" + raise AddressValueError(msg % ip_str) # ^: requires ^:: + if not parts[-1]: + parts_lo -= 1 + if parts_lo: + msg = "Trailing ':' only permitted as part of '::' in %r" + raise AddressValueError(msg % ip_str) # :$ requires ::$ + parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo) + if parts_skipped < 1: + msg = "Expected at most %d other parts with '::' in %r" + raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str)) + else: + # Otherwise, allocate the entire address to parts_hi. The + # endpoints could still be empty, but _parse_hextet() will check + # for that. + if len(parts) != cls._HEXTET_COUNT: + msg = "Exactly %d parts expected without '::' in %r" + raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str)) + if not parts[0]: + msg = "Leading ':' only permitted as part of '::' in %r" + raise AddressValueError(msg % ip_str) # ^: requires ^:: + if not parts[-1]: + msg = "Trailing ':' only permitted as part of '::' in %r" + raise AddressValueError(msg % ip_str) # :$ requires ::$ + parts_hi = len(parts) + parts_lo = 0 + parts_skipped = 0 + + try: + # Now, parse the hextets into a 128-bit integer. + ip_int = 0 + for i in range(parts_hi): + ip_int <<= 16 + ip_int |= cls._parse_hextet(parts[i]) + ip_int <<= 16 * parts_skipped + for i in range(-parts_lo, 0): + ip_int <<= 16 + ip_int |= cls._parse_hextet(parts[i]) + return ip_int + except ValueError as exc: + raise AddressValueError("%s in %r" % (exc, ip_str)) + + @classmethod + def _parse_hextet(cls, hextet_str): + """Convert an IPv6 hextet string into an integer. + + Args: + hextet_str: A string, the number to parse. + + Returns: + The hextet as an integer. + + Raises: + ValueError: if the input isn't strictly a hex number from + [0..FFFF]. + + """ + # Whitelist the characters, since int() allows a lot of bizarre stuff. + if not cls._HEX_DIGITS.issuperset(hextet_str): + raise ValueError("Only hex digits permitted in %r" % hextet_str) + # We do the length check second, since the invalid character error + # is likely to be more informative for the user + if len(hextet_str) > 4: + msg = "At most 4 characters permitted in %r" + raise ValueError(msg % hextet_str) + # Length check means we can skip checking the integer value + return int(hextet_str, 16) + + @classmethod + def _compress_hextets(cls, hextets): + """Compresses a list of hextets. + + Compresses a list of strings, replacing the longest continuous + sequence of "0" in the list with "" and adding empty strings at + the beginning or at the end of the string such that subsequently + calling ":".join(hextets) will produce the compressed version of + the IPv6 address. + + Args: + hextets: A list of strings, the hextets to compress. + + Returns: + A list of strings. + + """ + best_doublecolon_start = -1 + best_doublecolon_len = 0 + doublecolon_start = -1 + doublecolon_len = 0 + for index, hextet in enumerate(hextets): + if hextet == '0': + doublecolon_len += 1 + if doublecolon_start == -1: + # Start of a sequence of zeros. + doublecolon_start = index + if doublecolon_len > best_doublecolon_len: + # This is the longest sequence of zeros so far. + best_doublecolon_len = doublecolon_len + best_doublecolon_start = doublecolon_start + else: + doublecolon_len = 0 + doublecolon_start = -1 + + if best_doublecolon_len > 1: + best_doublecolon_end = (best_doublecolon_start + + best_doublecolon_len) + # For zeros at the end of the address. + if best_doublecolon_end == len(hextets): + hextets += [''] + hextets[best_doublecolon_start:best_doublecolon_end] = [''] + # For zeros at the beginning of the address. + if best_doublecolon_start == 0: + hextets = [''] + hextets + + return hextets + + @classmethod + def _string_from_ip_int(cls, ip_int=None): + """Turns a 128-bit integer into hexadecimal notation. + + Args: + ip_int: An integer, the IP address. + + Returns: + A string, the hexadecimal representation of the address. + + Raises: + ValueError: The address is bigger than 128 bits of all ones. + + """ + if ip_int is None: + ip_int = int(cls._ip) + + if ip_int > cls._ALL_ONES: + raise ValueError('IPv6 address is too large') + + hex_str = '%032x' % ip_int + hextets = ['%x' % int(hex_str[x:x + 4], 16) for x in range(0, 32, 4)] + + hextets = cls._compress_hextets(hextets) + return ':'.join(hextets) + + def _explode_shorthand_ip_string(self): + """Expand a shortened IPv6 address. + + Args: + ip_str: A string, the IPv6 address. + + Returns: + A string, the expanded IPv6 address. + + """ + if isinstance(self, IPv6Network): + ip_str = _compat_str(self.network_address) + elif isinstance(self, IPv6Interface): + ip_str = _compat_str(self.ip) + else: + ip_str = _compat_str(self) + + ip_int = self._ip_int_from_string(ip_str) + hex_str = '%032x' % ip_int + parts = [hex_str[x:x + 4] for x in range(0, 32, 4)] + if isinstance(self, (_BaseNetwork, IPv6Interface)): + return '%s/%d' % (':'.join(parts), self._prefixlen) + return ':'.join(parts) + + def _reverse_pointer(self): + """Return the reverse DNS pointer name for the IPv6 address. + + This implements the method described in RFC3596 2.5. + + """ + reverse_chars = self.exploded[::-1].replace(':', '') + return '.'.join(reverse_chars) + '.ip6.arpa' + + @property + def max_prefixlen(self): + return self._max_prefixlen + + @property + def version(self): + return self._version + + +class IPv6Address(_BaseV6, _BaseAddress): + + """Represent and manipulate single IPv6 Addresses.""" + + __slots__ = ('_ip', '__weakref__') + + def __init__(self, address): + """Instantiate a new IPv6 address object. + + Args: + address: A string or integer representing the IP + + Additionally, an integer can be passed, so + IPv6Address('2001:db8::') == + IPv6Address(42540766411282592856903984951653826560) + or, more generally + IPv6Address(int(IPv6Address('2001:db8::'))) == + IPv6Address('2001:db8::') + + Raises: + AddressValueError: If address isn't a valid IPv6 address. + + """ + # Efficient constructor from integer. + if isinstance(address, _compat_int_types): + self._check_int_address(address) + self._ip = address + return + + # Constructing from a packed address + if isinstance(address, bytes): + self._check_packed_address(address, 16) + bvs = _compat_bytes_to_byte_vals(address) + self._ip = _compat_int_from_byte_vals(bvs, 'big') + return + + # Assume input argument to be string or any object representation + # which converts into a formatted IP string. + addr_str = _compat_str(address) + if '/' in addr_str: + raise AddressValueError("Unexpected '/' in %r" % address) + self._ip = self._ip_int_from_string(addr_str) + + @property + def packed(self): + """The binary representation of this address.""" + return v6_int_to_packed(self._ip) + + @property + def is_multicast(self): + """Test if the address is reserved for multicast use. + + Returns: + A boolean, True if the address is a multicast address. + See RFC 2373 2.7 for details. + + """ + return self in self._constants._multicast_network + + @property + def is_reserved(self): + """Test if the address is otherwise IETF reserved. + + Returns: + A boolean, True if the address is within one of the + reserved IPv6 Network ranges. + + """ + return any(self in x for x in self._constants._reserved_networks) + + @property + def is_link_local(self): + """Test if the address is reserved for link-local. + + Returns: + A boolean, True if the address is reserved per RFC 4291. + + """ + return self in self._constants._linklocal_network + + @property + def is_site_local(self): + """Test if the address is reserved for site-local. + + Note that the site-local address space has been deprecated by RFC 3879. + Use is_private to test if this address is in the space of unique local + addresses as defined by RFC 4193. + + Returns: + A boolean, True if the address is reserved per RFC 3513 2.5.6. + + """ + return self in self._constants._sitelocal_network + + @property + def is_private(self): + """Test if this address is allocated for private networks. + + Returns: + A boolean, True if the address is reserved per + iana-ipv6-special-registry. + + """ + return any(self in net for net in self._constants._private_networks) + + @property + def is_global(self): + """Test if this address is allocated for public networks. + + Returns: + A boolean, true if the address is not reserved per + iana-ipv6-special-registry. + + """ + return not self.is_private + + @property + def is_unspecified(self): + """Test if the address is unspecified. + + Returns: + A boolean, True if this is the unspecified address as defined in + RFC 2373 2.5.2. + + """ + return self._ip == 0 + + @property + def is_loopback(self): + """Test if the address is a loopback address. + + Returns: + A boolean, True if the address is a loopback address as defined in + RFC 2373 2.5.3. + + """ + return self._ip == 1 + + @property + def ipv4_mapped(self): + """Return the IPv4 mapped address. + + Returns: + If the IPv6 address is a v4 mapped address, return the + IPv4 mapped address. Return None otherwise. + + """ + if (self._ip >> 32) != 0xFFFF: + return None + return IPv4Address(self._ip & 0xFFFFFFFF) + + @property + def teredo(self): + """Tuple of embedded teredo IPs. + + Returns: + Tuple of the (server, client) IPs or None if the address + doesn't appear to be a teredo address (doesn't start with + 2001::/32) + + """ + if (self._ip >> 96) != 0x20010000: + return None + return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF), + IPv4Address(~self._ip & 0xFFFFFFFF)) + + @property + def sixtofour(self): + """Return the IPv4 6to4 embedded address. + + Returns: + The IPv4 6to4-embedded address if present or None if the + address doesn't appear to contain a 6to4 embedded address. + + """ + if (self._ip >> 112) != 0x2002: + return None + return IPv4Address((self._ip >> 80) & 0xFFFFFFFF) + + +class IPv6Interface(IPv6Address): + + def __init__(self, address): + if isinstance(address, (bytes, _compat_int_types)): + IPv6Address.__init__(self, address) + self.network = IPv6Network(self._ip) + self._prefixlen = self._max_prefixlen + return + if isinstance(address, tuple): + IPv6Address.__init__(self, address[0]) + if len(address) > 1: + self._prefixlen = int(address[1]) + else: + self._prefixlen = self._max_prefixlen + self.network = IPv6Network(address, strict=False) + self.netmask = self.network.netmask + self.hostmask = self.network.hostmask + return + + addr = _split_optional_netmask(address) + IPv6Address.__init__(self, addr[0]) + self.network = IPv6Network(address, strict=False) + self.netmask = self.network.netmask + self._prefixlen = self.network._prefixlen + self.hostmask = self.network.hostmask + + def __str__(self): + return '%s/%d' % (self._string_from_ip_int(self._ip), + self.network.prefixlen) + + def __eq__(self, other): + address_equal = IPv6Address.__eq__(self, other) + if not address_equal or address_equal is NotImplemented: + return address_equal + try: + return self.network == other.network + except AttributeError: + # An interface with an associated network is NOT the + # same as an unassociated address. That's why the hash + # takes the extra info into account. + return False + + def __lt__(self, other): + address_less = IPv6Address.__lt__(self, other) + if address_less is NotImplemented: + return NotImplemented + try: + return (self.network < other.network or + self.network == other.network and address_less) + except AttributeError: + # We *do* allow addresses and interfaces to be sorted. The + # unassociated address is considered less than all interfaces. + return False + + def __hash__(self): + return self._ip ^ self._prefixlen ^ int(self.network.network_address) + + __reduce__ = _IPAddressBase.__reduce__ + + @property + def ip(self): + return IPv6Address(self._ip) + + @property + def with_prefixlen(self): + return '%s/%s' % (self._string_from_ip_int(self._ip), + self._prefixlen) + + @property + def with_netmask(self): + return '%s/%s' % (self._string_from_ip_int(self._ip), + self.netmask) + + @property + def with_hostmask(self): + return '%s/%s' % (self._string_from_ip_int(self._ip), + self.hostmask) + + @property + def is_unspecified(self): + return self._ip == 0 and self.network.is_unspecified + + @property + def is_loopback(self): + return self._ip == 1 and self.network.is_loopback + + +class IPv6Network(_BaseV6, _BaseNetwork): + + """This class represents and manipulates 128-bit IPv6 networks. + + Attributes: [examples for IPv6('2001:db8::1000/124')] + .network_address: IPv6Address('2001:db8::1000') + .hostmask: IPv6Address('::f') + .broadcast_address: IPv6Address('2001:db8::100f') + .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0') + .prefixlen: 124 + + """ + + # Class to use when creating address objects + _address_class = IPv6Address + + def __init__(self, address, strict=True): + """Instantiate a new IPv6 Network object. + + Args: + address: A string or integer representing the IPv6 network or the + IP and prefix/netmask. + '2001:db8::/128' + '2001:db8:0000:0000:0000:0000:0000:0000/128' + '2001:db8::' + are all functionally the same in IPv6. That is to say, + failing to provide a subnetmask will create an object with + a mask of /128. + + Additionally, an integer can be passed, so + IPv6Network('2001:db8::') == + IPv6Network(42540766411282592856903984951653826560) + or, more generally + IPv6Network(int(IPv6Network('2001:db8::'))) == + IPv6Network('2001:db8::') + + strict: A boolean. If true, ensure that we have been passed + A true network address, eg, 2001:db8::1000/124 and not an + IP address on a network, eg, 2001:db8::1/124. + + Raises: + AddressValueError: If address isn't a valid IPv6 address. + NetmaskValueError: If the netmask isn't valid for + an IPv6 address. + ValueError: If strict was True and a network address was not + supplied. + + """ + _BaseNetwork.__init__(self, address) + + # Efficient constructor from integer or packed address + if isinstance(address, (bytes, _compat_int_types)): + self.network_address = IPv6Address(address) + self.netmask, self._prefixlen = self._make_netmask( + self._max_prefixlen) + return + + if isinstance(address, tuple): + if len(address) > 1: + arg = address[1] + else: + arg = self._max_prefixlen + self.netmask, self._prefixlen = self._make_netmask(arg) + self.network_address = IPv6Address(address[0]) + packed = int(self.network_address) + if packed & int(self.netmask) != packed: + if strict: + raise ValueError('%s has host bits set' % self) + else: + self.network_address = IPv6Address(packed & + int(self.netmask)) + return + + # Assume input argument to be string or any object representation + # which converts into a formatted IP prefix string. + addr = _split_optional_netmask(address) + + self.network_address = IPv6Address(self._ip_int_from_string(addr[0])) + + if len(addr) == 2: + arg = addr[1] + else: + arg = self._max_prefixlen + self.netmask, self._prefixlen = self._make_netmask(arg) + + if strict: + if (IPv6Address(int(self.network_address) & int(self.netmask)) != + self.network_address): + raise ValueError('%s has host bits set' % self) + self.network_address = IPv6Address(int(self.network_address) & + int(self.netmask)) + + if self._prefixlen == (self._max_prefixlen - 1): + self.hosts = self.__iter__ + + def hosts(self): + """Generate Iterator over usable hosts in a network. + + This is like __iter__ except it doesn't return the + Subnet-Router anycast address. + + """ + network = int(self.network_address) + broadcast = int(self.broadcast_address) + for x in _compat_range(network + 1, broadcast + 1): + yield self._address_class(x) + + @property + def is_site_local(self): + """Test if the address is reserved for site-local. + + Note that the site-local address space has been deprecated by RFC 3879. + Use is_private to test if this address is in the space of unique local + addresses as defined by RFC 4193. + + Returns: + A boolean, True if the address is reserved per RFC 3513 2.5.6. + + """ + return (self.network_address.is_site_local and + self.broadcast_address.is_site_local) + + +class _IPv6Constants(object): + + _linklocal_network = IPv6Network('fe80::/10') + + _multicast_network = IPv6Network('ff00::/8') + + _private_networks = [ + IPv6Network('::1/128'), + IPv6Network('::/128'), + IPv6Network('::ffff:0:0/96'), + IPv6Network('100::/64'), + IPv6Network('2001::/23'), + IPv6Network('2001:2::/48'), + IPv6Network('2001:db8::/32'), + IPv6Network('2001:10::/28'), + IPv6Network('fc00::/7'), + IPv6Network('fe80::/10'), + ] + + _reserved_networks = [ + IPv6Network('::/8'), IPv6Network('100::/8'), + IPv6Network('200::/7'), IPv6Network('400::/6'), + IPv6Network('800::/5'), IPv6Network('1000::/4'), + IPv6Network('4000::/3'), IPv6Network('6000::/3'), + IPv6Network('8000::/3'), IPv6Network('A000::/3'), + IPv6Network('C000::/3'), IPv6Network('E000::/4'), + IPv6Network('F000::/5'), IPv6Network('F800::/6'), + IPv6Network('FE00::/9'), + ] + + _sitelocal_network = IPv6Network('fec0::/10') + + +IPv6Address._constants = _IPv6Constants diff --git a/env/lib/python3.6/site-packages/pip/_vendor/lockfile/linklockfile.py b/env/lib/python3.6/site-packages/pip/_vendor/lockfile/linklockfile.py new file mode 100644 index 0000000..2ca9be0 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/lockfile/linklockfile.py @@ -0,0 +1,73 @@ +from __future__ import absolute_import + +import time +import os + +from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout, + AlreadyLocked) + + +class LinkLockFile(LockBase): + """Lock access to a file using atomic property of link(2). + + >>> lock = LinkLockFile('somefile') + >>> lock = LinkLockFile('somefile', threaded=False) + """ + + def acquire(self, timeout=None): + try: + open(self.unique_name, "wb").close() + except IOError: + raise LockFailed("failed to create %s" % self.unique_name) + + timeout = timeout if timeout is not None else self.timeout + end_time = time.time() + if timeout is not None and timeout > 0: + end_time += timeout + + while True: + # Try and create a hard link to it. + try: + os.link(self.unique_name, self.lock_file) + except OSError: + # Link creation failed. Maybe we've double-locked? + nlinks = os.stat(self.unique_name).st_nlink + if nlinks == 2: + # The original link plus the one I created == 2. We're + # good to go. + return + else: + # Otherwise the lock creation failed. + if timeout is not None and time.time() > end_time: + os.unlink(self.unique_name) + if timeout > 0: + raise LockTimeout("Timeout waiting to acquire" + " lock for %s" % + self.path) + else: + raise AlreadyLocked("%s is already locked" % + self.path) + time.sleep(timeout is not None and timeout / 10 or 0.1) + else: + # Link creation succeeded. We're good to go. + return + + def release(self): + if not self.is_locked(): + raise NotLocked("%s is not locked" % self.path) + elif not os.path.exists(self.unique_name): + raise NotMyLock("%s is locked, but not by me" % self.path) + os.unlink(self.unique_name) + os.unlink(self.lock_file) + + def is_locked(self): + return os.path.exists(self.lock_file) + + def i_am_locking(self): + return (self.is_locked() and + os.path.exists(self.unique_name) and + os.stat(self.unique_name).st_nlink == 2) + + def break_lock(self): + if os.path.exists(self.lock_file): + os.unlink(self.lock_file) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/lockfile/mkdirlockfile.py b/env/lib/python3.6/site-packages/pip/_vendor/lockfile/mkdirlockfile.py new file mode 100644 index 0000000..05a8c96 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/lockfile/mkdirlockfile.py @@ -0,0 +1,84 @@ +from __future__ import absolute_import, division + +import time +import os +import sys +import errno + +from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout, + AlreadyLocked) + + +class MkdirLockFile(LockBase): + """Lock file by creating a directory.""" + def __init__(self, path, threaded=True, timeout=None): + """ + >>> lock = MkdirLockFile('somefile') + >>> lock = MkdirLockFile('somefile', threaded=False) + """ + LockBase.__init__(self, path, threaded, timeout) + # Lock file itself is a directory. Place the unique file name into + # it. + self.unique_name = os.path.join(self.lock_file, + "%s.%s%s" % (self.hostname, + self.tname, + self.pid)) + + def acquire(self, timeout=None): + timeout = timeout if timeout is not None else self.timeout + end_time = time.time() + if timeout is not None and timeout > 0: + end_time += timeout + + if timeout is None: + wait = 0.1 + else: + wait = max(0, timeout / 10) + + while True: + try: + os.mkdir(self.lock_file) + except OSError: + err = sys.exc_info()[1] + if err.errno == errno.EEXIST: + # Already locked. + if os.path.exists(self.unique_name): + # Already locked by me. + return + if timeout is not None and time.time() > end_time: + if timeout > 0: + raise LockTimeout("Timeout waiting to acquire" + " lock for %s" % + self.path) + else: + # Someone else has the lock. + raise AlreadyLocked("%s is already locked" % + self.path) + time.sleep(wait) + else: + # Couldn't create the lock for some other reason + raise LockFailed("failed to create %s" % self.lock_file) + else: + open(self.unique_name, "wb").close() + return + + def release(self): + if not self.is_locked(): + raise NotLocked("%s is not locked" % self.path) + elif not os.path.exists(self.unique_name): + raise NotMyLock("%s is locked, but not by me" % self.path) + os.unlink(self.unique_name) + os.rmdir(self.lock_file) + + def is_locked(self): + return os.path.exists(self.lock_file) + + def i_am_locking(self): + return (self.is_locked() and + os.path.exists(self.unique_name)) + + def break_lock(self): + if os.path.exists(self.lock_file): + for name in os.listdir(self.lock_file): + os.unlink(os.path.join(self.lock_file, name)) + os.rmdir(self.lock_file) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/lockfile/pidlockfile.py b/env/lib/python3.6/site-packages/pip/_vendor/lockfile/pidlockfile.py new file mode 100644 index 0000000..069e85b --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/lockfile/pidlockfile.py @@ -0,0 +1,190 @@ +# -*- coding: utf-8 -*- + +# pidlockfile.py +# +# Copyright © 2008–2009 Ben Finney <ben+python@benfinney.id.au> +# +# This is free software: you may copy, modify, and/or distribute this work +# under the terms of the Python Software Foundation License, version 2 or +# later as published by the Python Software Foundation. +# No warranty expressed or implied. See the file LICENSE.PSF-2 for details. + +""" Lockfile behaviour implemented via Unix PID files. + """ + +from __future__ import absolute_import + +import errno +import os +import time + +from . import (LockBase, AlreadyLocked, LockFailed, NotLocked, NotMyLock, + LockTimeout) + + +class PIDLockFile(LockBase): + """ Lockfile implemented as a Unix PID file. + + The lock file is a normal file named by the attribute `path`. + A lock's PID file contains a single line of text, containing + the process ID (PID) of the process that acquired the lock. + + >>> lock = PIDLockFile('somefile') + >>> lock = PIDLockFile('somefile') + """ + + def __init__(self, path, threaded=False, timeout=None): + # pid lockfiles don't support threaded operation, so always force + # False as the threaded arg. + LockBase.__init__(self, path, False, timeout) + self.unique_name = self.path + + def read_pid(self): + """ Get the PID from the lock file. + """ + return read_pid_from_pidfile(self.path) + + def is_locked(self): + """ Test if the lock is currently held. + + The lock is held if the PID file for this lock exists. + + """ + return os.path.exists(self.path) + + def i_am_locking(self): + """ Test if the lock is held by the current process. + + Returns ``True`` if the current process ID matches the + number stored in the PID file. + """ + return self.is_locked() and os.getpid() == self.read_pid() + + def acquire(self, timeout=None): + """ Acquire the lock. + + Creates the PID file for this lock, or raises an error if + the lock could not be acquired. + """ + + timeout = timeout if timeout is not None else self.timeout + end_time = time.time() + if timeout is not None and timeout > 0: + end_time += timeout + + while True: + try: + write_pid_to_pidfile(self.path) + except OSError as exc: + if exc.errno == errno.EEXIST: + # The lock creation failed. Maybe sleep a bit. + if time.time() > end_time: + if timeout is not None and timeout > 0: + raise LockTimeout("Timeout waiting to acquire" + " lock for %s" % + self.path) + else: + raise AlreadyLocked("%s is already locked" % + self.path) + time.sleep(timeout is not None and timeout / 10 or 0.1) + else: + raise LockFailed("failed to create %s" % self.path) + else: + return + + def release(self): + """ Release the lock. + + Removes the PID file to release the lock, or raises an + error if the current process does not hold the lock. + + """ + if not self.is_locked(): + raise NotLocked("%s is not locked" % self.path) + if not self.i_am_locking(): + raise NotMyLock("%s is locked, but not by me" % self.path) + remove_existing_pidfile(self.path) + + def break_lock(self): + """ Break an existing lock. + + Removes the PID file if it already exists, otherwise does + nothing. + + """ + remove_existing_pidfile(self.path) + + +def read_pid_from_pidfile(pidfile_path): + """ Read the PID recorded in the named PID file. + + Read and return the numeric PID recorded as text in the named + PID file. If the PID file cannot be read, or if the content is + not a valid PID, return ``None``. + + """ + pid = None + try: + pidfile = open(pidfile_path, 'r') + except IOError: + pass + else: + # According to the FHS 2.3 section on PID files in /var/run: + # + # The file must consist of the process identifier in + # ASCII-encoded decimal, followed by a newline character. + # + # Programs that read PID files should be somewhat flexible + # in what they accept; i.e., they should ignore extra + # whitespace, leading zeroes, absence of the trailing + # newline, or additional lines in the PID file. + + line = pidfile.readline().strip() + try: + pid = int(line) + except ValueError: + pass + pidfile.close() + + return pid + + +def write_pid_to_pidfile(pidfile_path): + """ Write the PID in the named PID file. + + Get the numeric process ID (“PID”) of the current process + and write it to the named file as a line of text. + + """ + open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY) + open_mode = 0o644 + pidfile_fd = os.open(pidfile_path, open_flags, open_mode) + pidfile = os.fdopen(pidfile_fd, 'w') + + # According to the FHS 2.3 section on PID files in /var/run: + # + # The file must consist of the process identifier in + # ASCII-encoded decimal, followed by a newline character. For + # example, if crond was process number 25, /var/run/crond.pid + # would contain three characters: two, five, and newline. + + pid = os.getpid() + pidfile.write("%s\n" % pid) + pidfile.close() + + +def remove_existing_pidfile(pidfile_path): + """ Remove the named PID file if it exists. + + Removing a PID file that doesn't already exist puts us in the + desired state, so we ignore the condition if the file does not + exist. + + """ + try: + os.remove(pidfile_path) + except OSError as exc: + if exc.errno == errno.ENOENT: + pass + else: + raise diff --git a/env/lib/python3.6/site-packages/pip/_vendor/lockfile/sqlitelockfile.py b/env/lib/python3.6/site-packages/pip/_vendor/lockfile/sqlitelockfile.py new file mode 100644 index 0000000..f997e24 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/lockfile/sqlitelockfile.py @@ -0,0 +1,156 @@ +from __future__ import absolute_import, division + +import time +import os + +try: + unicode +except NameError: + unicode = str + +from . import LockBase, NotLocked, NotMyLock, LockTimeout, AlreadyLocked + + +class SQLiteLockFile(LockBase): + "Demonstrate SQL-based locking." + + testdb = None + + def __init__(self, path, threaded=True, timeout=None): + """ + >>> lock = SQLiteLockFile('somefile') + >>> lock = SQLiteLockFile('somefile', threaded=False) + """ + LockBase.__init__(self, path, threaded, timeout) + self.lock_file = unicode(self.lock_file) + self.unique_name = unicode(self.unique_name) + + if SQLiteLockFile.testdb is None: + import tempfile + _fd, testdb = tempfile.mkstemp() + os.close(_fd) + os.unlink(testdb) + del _fd, tempfile + SQLiteLockFile.testdb = testdb + + import sqlite3 + self.connection = sqlite3.connect(SQLiteLockFile.testdb) + + c = self.connection.cursor() + try: + c.execute("create table locks" + "(" + " lock_file varchar(32)," + " unique_name varchar(32)" + ")") + except sqlite3.OperationalError: + pass + else: + self.connection.commit() + import atexit + atexit.register(os.unlink, SQLiteLockFile.testdb) + + def acquire(self, timeout=None): + timeout = timeout if timeout is not None else self.timeout + end_time = time.time() + if timeout is not None and timeout > 0: + end_time += timeout + + if timeout is None: + wait = 0.1 + elif timeout <= 0: + wait = 0 + else: + wait = timeout / 10 + + cursor = self.connection.cursor() + + while True: + if not self.is_locked(): + # Not locked. Try to lock it. + cursor.execute("insert into locks" + " (lock_file, unique_name)" + " values" + " (?, ?)", + (self.lock_file, self.unique_name)) + self.connection.commit() + + # Check to see if we are the only lock holder. + cursor.execute("select * from locks" + " where unique_name = ?", + (self.unique_name,)) + rows = cursor.fetchall() + if len(rows) > 1: + # Nope. Someone else got there. Remove our lock. + cursor.execute("delete from locks" + " where unique_name = ?", + (self.unique_name,)) + self.connection.commit() + else: + # Yup. We're done, so go home. + return + else: + # Check to see if we are the only lock holder. + cursor.execute("select * from locks" + " where unique_name = ?", + (self.unique_name,)) + rows = cursor.fetchall() + if len(rows) == 1: + # We're the locker, so go home. + return + + # Maybe we should wait a bit longer. + if timeout is not None and time.time() > end_time: + if timeout > 0: + # No more waiting. + raise LockTimeout("Timeout waiting to acquire" + " lock for %s" % + self.path) + else: + # Someone else has the lock and we are impatient.. + raise AlreadyLocked("%s is already locked" % self.path) + + # Well, okay. We'll give it a bit longer. + time.sleep(wait) + + def release(self): + if not self.is_locked(): + raise NotLocked("%s is not locked" % self.path) + if not self.i_am_locking(): + raise NotMyLock("%s is locked, but not by me (by %s)" % + (self.unique_name, self._who_is_locking())) + cursor = self.connection.cursor() + cursor.execute("delete from locks" + " where unique_name = ?", + (self.unique_name,)) + self.connection.commit() + + def _who_is_locking(self): + cursor = self.connection.cursor() + cursor.execute("select unique_name from locks" + " where lock_file = ?", + (self.lock_file,)) + return cursor.fetchone()[0] + + def is_locked(self): + cursor = self.connection.cursor() + cursor.execute("select * from locks" + " where lock_file = ?", + (self.lock_file,)) + rows = cursor.fetchall() + return not not rows + + def i_am_locking(self): + cursor = self.connection.cursor() + cursor.execute("select * from locks" + " where lock_file = ?" + " and unique_name = ?", + (self.lock_file, self.unique_name)) + return not not cursor.fetchall() + + def break_lock(self): + cursor = self.connection.cursor() + cursor.execute("delete from locks" + " where lock_file = ?", + (self.lock_file,)) + self.connection.commit() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/lockfile/symlinklockfile.py b/env/lib/python3.6/site-packages/pip/_vendor/lockfile/symlinklockfile.py new file mode 100644 index 0000000..23b41f5 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/lockfile/symlinklockfile.py @@ -0,0 +1,70 @@ +from __future__ import absolute_import + +import os +import time + +from . import (LockBase, NotLocked, NotMyLock, LockTimeout, + AlreadyLocked) + + +class SymlinkLockFile(LockBase): + """Lock access to a file using symlink(2).""" + + def __init__(self, path, threaded=True, timeout=None): + # super(SymlinkLockFile).__init(...) + LockBase.__init__(self, path, threaded, timeout) + # split it back! + self.unique_name = os.path.split(self.unique_name)[1] + + def acquire(self, timeout=None): + # Hopefully unnecessary for symlink. + # try: + # open(self.unique_name, "wb").close() + # except IOError: + # raise LockFailed("failed to create %s" % self.unique_name) + timeout = timeout if timeout is not None else self.timeout + end_time = time.time() + if timeout is not None and timeout > 0: + end_time += timeout + + while True: + # Try and create a symbolic link to it. + try: + os.symlink(self.unique_name, self.lock_file) + except OSError: + # Link creation failed. Maybe we've double-locked? + if self.i_am_locking(): + # Linked to out unique name. Proceed. + return + else: + # Otherwise the lock creation failed. + if timeout is not None and time.time() > end_time: + if timeout > 0: + raise LockTimeout("Timeout waiting to acquire" + " lock for %s" % + self.path) + else: + raise AlreadyLocked("%s is already locked" % + self.path) + time.sleep(timeout / 10 if timeout is not None else 0.1) + else: + # Link creation succeeded. We're good to go. + return + + def release(self): + if not self.is_locked(): + raise NotLocked("%s is not locked" % self.path) + elif not self.i_am_locking(): + raise NotMyLock("%s is locked, but not by me" % self.path) + os.unlink(self.lock_file) + + def is_locked(self): + return os.path.islink(self.lock_file) + + def i_am_locking(self): + return (os.path.islink(self.lock_file) + and os.readlink(self.lock_file) == self.unique_name) + + def break_lock(self): + if os.path.islink(self.lock_file): # exists && link + os.unlink(self.lock_file) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/msgpack/_version.py b/env/lib/python3.6/site-packages/pip/_vendor/msgpack/_version.py new file mode 100644 index 0000000..d28f0de --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/msgpack/_version.py @@ -0,0 +1 @@ +version = (0, 5, 6) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/msgpack/exceptions.py b/env/lib/python3.6/site-packages/pip/_vendor/msgpack/exceptions.py new file mode 100644 index 0000000..9766881 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/msgpack/exceptions.py @@ -0,0 +1,41 @@ +class UnpackException(Exception): + """Deprecated. Use Exception instead to catch all exception during unpacking.""" + + +class BufferFull(UnpackException): + pass + + +class OutOfData(UnpackException): + pass + + +class UnpackValueError(UnpackException, ValueError): + """Deprecated. Use ValueError instead.""" + + +class ExtraData(UnpackValueError): + def __init__(self, unpacked, extra): + self.unpacked = unpacked + self.extra = extra + + def __str__(self): + return "unpack(b) received extra data." + + +class PackException(Exception): + """Deprecated. Use Exception instead to catch all exception during packing.""" + + +class PackValueError(PackException, ValueError): + """PackValueError is raised when type of input data is supported but it's value is unsupported. + + Deprecated. Use ValueError instead. + """ + + +class PackOverflowError(PackValueError, OverflowError): + """PackOverflowError is raised when integer value is out of range of msgpack support [-2**31, 2**32). + + Deprecated. Use ValueError instead. + """ diff --git a/env/lib/python3.6/site-packages/pip/_vendor/msgpack/fallback.py b/env/lib/python3.6/site-packages/pip/_vendor/msgpack/fallback.py new file mode 100644 index 0000000..9418421 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/msgpack/fallback.py @@ -0,0 +1,977 @@ +"""Fallback pure Python implementation of msgpack""" + +import sys +import struct +import warnings + +if sys.version_info[0] == 3: + PY3 = True + int_types = int + Unicode = str + xrange = range + def dict_iteritems(d): + return d.items() +else: + PY3 = False + int_types = (int, long) + Unicode = unicode + def dict_iteritems(d): + return d.iteritems() + + +if hasattr(sys, 'pypy_version_info'): + # cStringIO is slow on PyPy, StringIO is faster. However: PyPy's own + # StringBuilder is fastest. + from __pypy__ import newlist_hint + try: + from __pypy__.builders import BytesBuilder as StringBuilder + except ImportError: + from __pypy__.builders import StringBuilder + USING_STRINGBUILDER = True + class StringIO(object): + def __init__(self, s=b''): + if s: + self.builder = StringBuilder(len(s)) + self.builder.append(s) + else: + self.builder = StringBuilder() + def write(self, s): + if isinstance(s, memoryview): + s = s.tobytes() + elif isinstance(s, bytearray): + s = bytes(s) + self.builder.append(s) + def getvalue(self): + return self.builder.build() +else: + USING_STRINGBUILDER = False + from io import BytesIO as StringIO + newlist_hint = lambda size: [] + + +from pip._vendor.msgpack.exceptions import ( + BufferFull, + OutOfData, + UnpackValueError, + PackValueError, + PackOverflowError, + ExtraData) + +from pip._vendor.msgpack import ExtType + + +EX_SKIP = 0 +EX_CONSTRUCT = 1 +EX_READ_ARRAY_HEADER = 2 +EX_READ_MAP_HEADER = 3 + +TYPE_IMMEDIATE = 0 +TYPE_ARRAY = 1 +TYPE_MAP = 2 +TYPE_RAW = 3 +TYPE_BIN = 4 +TYPE_EXT = 5 + +DEFAULT_RECURSE_LIMIT = 511 + + +def _check_type_strict(obj, t, type=type, tuple=tuple): + if type(t) is tuple: + return type(obj) in t + else: + return type(obj) is t + + +def _get_data_from_buffer(obj): + try: + view = memoryview(obj) + except TypeError: + # try to use legacy buffer protocol if 2.7, otherwise re-raise + if not PY3: + view = memoryview(buffer(obj)) + warnings.warn("using old buffer interface to unpack %s; " + "this leads to unpacking errors if slicing is used and " + "will be removed in a future version" % type(obj), + RuntimeWarning) + else: + raise + if view.itemsize != 1: + raise ValueError("cannot unpack from multi-byte object") + return view + + +def unpack(stream, **kwargs): + warnings.warn( + "Direct calling implementation's unpack() is deprecated, Use msgpack.unpack() or unpackb() instead.", + PendingDeprecationWarning) + data = stream.read() + return unpackb(data, **kwargs) + + +def unpackb(packed, **kwargs): + """ + Unpack an object from `packed`. + + Raises `ExtraData` when `packed` contains extra bytes. + See :class:`Unpacker` for options. + """ + unpacker = Unpacker(None, **kwargs) + unpacker.feed(packed) + try: + ret = unpacker._unpack() + except OutOfData: + raise UnpackValueError("Data is not enough.") + if unpacker._got_extradata(): + raise ExtraData(ret, unpacker._get_extradata()) + return ret + + +class Unpacker(object): + """Streaming unpacker. + + arguments: + + :param file_like: + File-like object having `.read(n)` method. + If specified, unpacker reads serialized data from it and :meth:`feed()` is not usable. + + :param int read_size: + Used as `file_like.read(read_size)`. (default: `min(16*1024, max_buffer_size)`) + + :param bool use_list: + If true, unpack msgpack array to Python list. + Otherwise, unpack to Python tuple. (default: True) + + :param bool raw: + If true, unpack msgpack raw to Python bytes (default). + Otherwise, unpack to Python str (or unicode on Python 2) by decoding + with UTF-8 encoding (recommended). + Currently, the default is true, but it will be changed to false in + near future. So you must specify it explicitly for keeping backward + compatibility. + + *encoding* option which is deprecated overrides this option. + + :param callable object_hook: + When specified, it should be callable. + Unpacker calls it with a dict argument after unpacking msgpack map. + (See also simplejson) + + :param callable object_pairs_hook: + When specified, it should be callable. + Unpacker calls it with a list of key-value pairs after unpacking msgpack map. + (See also simplejson) + + :param str encoding: + Encoding used for decoding msgpack raw. + If it is None (default), msgpack raw is deserialized to Python bytes. + + :param str unicode_errors: + (deprecated) Used for decoding msgpack raw with *encoding*. + (default: `'strict'`) + + :param int max_buffer_size: + Limits size of data waiting unpacked. 0 means system's INT_MAX (default). + Raises `BufferFull` exception when it is insufficient. + You should set this parameter when unpacking data from untrusted source. + + :param int max_str_len: + Limits max length of str. (default: 2**31-1) + + :param int max_bin_len: + Limits max length of bin. (default: 2**31-1) + + :param int max_array_len: + Limits max length of array. (default: 2**31-1) + + :param int max_map_len: + Limits max length of map. (default: 2**31-1) + + + example of streaming deserialize from file-like object:: + + unpacker = Unpacker(file_like, raw=False) + for o in unpacker: + process(o) + + example of streaming deserialize from socket:: + + unpacker = Unpacker(raw=False) + while True: + buf = sock.recv(1024**2) + if not buf: + break + unpacker.feed(buf) + for o in unpacker: + process(o) + """ + + def __init__(self, file_like=None, read_size=0, use_list=True, raw=True, + object_hook=None, object_pairs_hook=None, list_hook=None, + encoding=None, unicode_errors=None, max_buffer_size=0, + ext_hook=ExtType, + max_str_len=2147483647, # 2**32-1 + max_bin_len=2147483647, + max_array_len=2147483647, + max_map_len=2147483647, + max_ext_len=2147483647): + + if encoding is not None: + warnings.warn( + "encoding is deprecated, Use raw=False instead.", + PendingDeprecationWarning) + + if unicode_errors is None: + unicode_errors = 'strict' + + if file_like is None: + self._feeding = True + else: + if not callable(file_like.read): + raise TypeError("`file_like.read` must be callable") + self.file_like = file_like + self._feeding = False + + #: array of bytes fed. + self._buffer = bytearray() + # Some very old pythons don't support `struct.unpack_from()` with a + # `bytearray`. So we wrap it in a `buffer()` there. + if sys.version_info < (2, 7, 6): + self._buffer_view = buffer(self._buffer) + else: + self._buffer_view = self._buffer + #: Which position we currently reads + self._buff_i = 0 + + # When Unpacker is used as an iterable, between the calls to next(), + # the buffer is not "consumed" completely, for efficiency sake. + # Instead, it is done sloppily. To make sure we raise BufferFull at + # the correct moments, we have to keep track of how sloppy we were. + # Furthermore, when the buffer is incomplete (that is: in the case + # we raise an OutOfData) we need to rollback the buffer to the correct + # state, which _buf_checkpoint records. + self._buf_checkpoint = 0 + + self._max_buffer_size = max_buffer_size or 2**31-1 + if read_size > self._max_buffer_size: + raise ValueError("read_size must be smaller than max_buffer_size") + self._read_size = read_size or min(self._max_buffer_size, 16*1024) + self._raw = bool(raw) + self._encoding = encoding + self._unicode_errors = unicode_errors + self._use_list = use_list + self._list_hook = list_hook + self._object_hook = object_hook + self._object_pairs_hook = object_pairs_hook + self._ext_hook = ext_hook + self._max_str_len = max_str_len + self._max_bin_len = max_bin_len + self._max_array_len = max_array_len + self._max_map_len = max_map_len + self._max_ext_len = max_ext_len + self._stream_offset = 0 + + if list_hook is not None and not callable(list_hook): + raise TypeError('`list_hook` is not callable') + if object_hook is not None and not callable(object_hook): + raise TypeError('`object_hook` is not callable') + if object_pairs_hook is not None and not callable(object_pairs_hook): + raise TypeError('`object_pairs_hook` is not callable') + if object_hook is not None and object_pairs_hook is not None: + raise TypeError("object_pairs_hook and object_hook are mutually " + "exclusive") + if not callable(ext_hook): + raise TypeError("`ext_hook` is not callable") + + def feed(self, next_bytes): + assert self._feeding + view = _get_data_from_buffer(next_bytes) + if (len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size): + raise BufferFull + + # Strip buffer before checkpoint before reading file. + if self._buf_checkpoint > 0: + del self._buffer[:self._buf_checkpoint] + self._buff_i -= self._buf_checkpoint + self._buf_checkpoint = 0 + + self._buffer += view + + def _consume(self): + """ Gets rid of the used parts of the buffer. """ + self._stream_offset += self._buff_i - self._buf_checkpoint + self._buf_checkpoint = self._buff_i + + def _got_extradata(self): + return self._buff_i < len(self._buffer) + + def _get_extradata(self): + return self._buffer[self._buff_i:] + + def read_bytes(self, n): + return self._read(n) + + def _read(self, n): + # (int) -> bytearray + self._reserve(n) + i = self._buff_i + self._buff_i = i+n + return self._buffer[i:i+n] + + def _reserve(self, n): + remain_bytes = len(self._buffer) - self._buff_i - n + + # Fast path: buffer has n bytes already + if remain_bytes >= 0: + return + + if self._feeding: + self._buff_i = self._buf_checkpoint + raise OutOfData + + # Strip buffer before checkpoint before reading file. + if self._buf_checkpoint > 0: + del self._buffer[:self._buf_checkpoint] + self._buff_i -= self._buf_checkpoint + self._buf_checkpoint = 0 + + # Read from file + remain_bytes = -remain_bytes + while remain_bytes > 0: + to_read_bytes = max(self._read_size, remain_bytes) + read_data = self.file_like.read(to_read_bytes) + if not read_data: + break + assert isinstance(read_data, bytes) + self._buffer += read_data + remain_bytes -= len(read_data) + + if len(self._buffer) < n + self._buff_i: + self._buff_i = 0 # rollback + raise OutOfData + + def _read_header(self, execute=EX_CONSTRUCT): + typ = TYPE_IMMEDIATE + n = 0 + obj = None + self._reserve(1) + b = self._buffer[self._buff_i] + self._buff_i += 1 + if b & 0b10000000 == 0: + obj = b + elif b & 0b11100000 == 0b11100000: + obj = -1 - (b ^ 0xff) + elif b & 0b11100000 == 0b10100000: + n = b & 0b00011111 + typ = TYPE_RAW + if n > self._max_str_len: + raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) + obj = self._read(n) + elif b & 0b11110000 == 0b10010000: + n = b & 0b00001111 + typ = TYPE_ARRAY + if n > self._max_array_len: + raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) + elif b & 0b11110000 == 0b10000000: + n = b & 0b00001111 + typ = TYPE_MAP + if n > self._max_map_len: + raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) + elif b == 0xc0: + obj = None + elif b == 0xc2: + obj = False + elif b == 0xc3: + obj = True + elif b == 0xc4: + typ = TYPE_BIN + self._reserve(1) + n = self._buffer[self._buff_i] + self._buff_i += 1 + if n > self._max_bin_len: + raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) + obj = self._read(n) + elif b == 0xc5: + typ = TYPE_BIN + self._reserve(2) + n = struct.unpack_from(">H", self._buffer_view, self._buff_i)[0] + self._buff_i += 2 + if n > self._max_bin_len: + raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) + obj = self._read(n) + elif b == 0xc6: + typ = TYPE_BIN + self._reserve(4) + n = struct.unpack_from(">I", self._buffer_view, self._buff_i)[0] + self._buff_i += 4 + if n > self._max_bin_len: + raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) + obj = self._read(n) + elif b == 0xc7: # ext 8 + typ = TYPE_EXT + self._reserve(2) + L, n = struct.unpack_from('Bb', self._buffer_view, self._buff_i) + self._buff_i += 2 + if L > self._max_ext_len: + raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) + obj = self._read(L) + elif b == 0xc8: # ext 16 + typ = TYPE_EXT + self._reserve(3) + L, n = struct.unpack_from('>Hb', self._buffer_view, self._buff_i) + self._buff_i += 3 + if L > self._max_ext_len: + raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) + obj = self._read(L) + elif b == 0xc9: # ext 32 + typ = TYPE_EXT + self._reserve(5) + L, n = struct.unpack_from('>Ib', self._buffer_view, self._buff_i) + self._buff_i += 5 + if L > self._max_ext_len: + raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) + obj = self._read(L) + elif b == 0xca: + self._reserve(4) + obj = struct.unpack_from(">f", self._buffer_view, self._buff_i)[0] + self._buff_i += 4 + elif b == 0xcb: + self._reserve(8) + obj = struct.unpack_from(">d", self._buffer_view, self._buff_i)[0] + self._buff_i += 8 + elif b == 0xcc: + self._reserve(1) + obj = self._buffer[self._buff_i] + self._buff_i += 1 + elif b == 0xcd: + self._reserve(2) + obj = struct.unpack_from(">H", self._buffer_view, self._buff_i)[0] + self._buff_i += 2 + elif b == 0xce: + self._reserve(4) + obj = struct.unpack_from(">I", self._buffer_view, self._buff_i)[0] + self._buff_i += 4 + elif b == 0xcf: + self._reserve(8) + obj = struct.unpack_from(">Q", self._buffer_view, self._buff_i)[0] + self._buff_i += 8 + elif b == 0xd0: + self._reserve(1) + obj = struct.unpack_from("b", self._buffer_view, self._buff_i)[0] + self._buff_i += 1 + elif b == 0xd1: + self._reserve(2) + obj = struct.unpack_from(">h", self._buffer_view, self._buff_i)[0] + self._buff_i += 2 + elif b == 0xd2: + self._reserve(4) + obj = struct.unpack_from(">i", self._buffer_view, self._buff_i)[0] + self._buff_i += 4 + elif b == 0xd3: + self._reserve(8) + obj = struct.unpack_from(">q", self._buffer_view, self._buff_i)[0] + self._buff_i += 8 + elif b == 0xd4: # fixext 1 + typ = TYPE_EXT + if self._max_ext_len < 1: + raise UnpackValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len)) + self._reserve(2) + n, obj = struct.unpack_from("b1s", self._buffer_view, self._buff_i) + self._buff_i += 2 + elif b == 0xd5: # fixext 2 + typ = TYPE_EXT + if self._max_ext_len < 2: + raise UnpackValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len)) + self._reserve(3) + n, obj = struct.unpack_from("b2s", self._buffer_view, self._buff_i) + self._buff_i += 3 + elif b == 0xd6: # fixext 4 + typ = TYPE_EXT + if self._max_ext_len < 4: + raise UnpackValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len)) + self._reserve(5) + n, obj = struct.unpack_from("b4s", self._buffer_view, self._buff_i) + self._buff_i += 5 + elif b == 0xd7: # fixext 8 + typ = TYPE_EXT + if self._max_ext_len < 8: + raise UnpackValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len)) + self._reserve(9) + n, obj = struct.unpack_from("b8s", self._buffer_view, self._buff_i) + self._buff_i += 9 + elif b == 0xd8: # fixext 16 + typ = TYPE_EXT + if self._max_ext_len < 16: + raise UnpackValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len)) + self._reserve(17) + n, obj = struct.unpack_from("b16s", self._buffer_view, self._buff_i) + self._buff_i += 17 + elif b == 0xd9: + typ = TYPE_RAW + self._reserve(1) + n = self._buffer[self._buff_i] + self._buff_i += 1 + if n > self._max_str_len: + raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) + obj = self._read(n) + elif b == 0xda: + typ = TYPE_RAW + self._reserve(2) + n, = struct.unpack_from(">H", self._buffer_view, self._buff_i) + self._buff_i += 2 + if n > self._max_str_len: + raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) + obj = self._read(n) + elif b == 0xdb: + typ = TYPE_RAW + self._reserve(4) + n, = struct.unpack_from(">I", self._buffer_view, self._buff_i) + self._buff_i += 4 + if n > self._max_str_len: + raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) + obj = self._read(n) + elif b == 0xdc: + typ = TYPE_ARRAY + self._reserve(2) + n, = struct.unpack_from(">H", self._buffer_view, self._buff_i) + self._buff_i += 2 + if n > self._max_array_len: + raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) + elif b == 0xdd: + typ = TYPE_ARRAY + self._reserve(4) + n, = struct.unpack_from(">I", self._buffer_view, self._buff_i) + self._buff_i += 4 + if n > self._max_array_len: + raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) + elif b == 0xde: + self._reserve(2) + n, = struct.unpack_from(">H", self._buffer_view, self._buff_i) + self._buff_i += 2 + if n > self._max_map_len: + raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) + typ = TYPE_MAP + elif b == 0xdf: + self._reserve(4) + n, = struct.unpack_from(">I", self._buffer_view, self._buff_i) + self._buff_i += 4 + if n > self._max_map_len: + raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) + typ = TYPE_MAP + else: + raise UnpackValueError("Unknown header: 0x%x" % b) + return typ, n, obj + + def _unpack(self, execute=EX_CONSTRUCT): + typ, n, obj = self._read_header(execute) + + if execute == EX_READ_ARRAY_HEADER: + if typ != TYPE_ARRAY: + raise UnpackValueError("Expected array") + return n + if execute == EX_READ_MAP_HEADER: + if typ != TYPE_MAP: + raise UnpackValueError("Expected map") + return n + # TODO should we eliminate the recursion? + if typ == TYPE_ARRAY: + if execute == EX_SKIP: + for i in xrange(n): + # TODO check whether we need to call `list_hook` + self._unpack(EX_SKIP) + return + ret = newlist_hint(n) + for i in xrange(n): + ret.append(self._unpack(EX_CONSTRUCT)) + if self._list_hook is not None: + ret = self._list_hook(ret) + # TODO is the interaction between `list_hook` and `use_list` ok? + return ret if self._use_list else tuple(ret) + if typ == TYPE_MAP: + if execute == EX_SKIP: + for i in xrange(n): + # TODO check whether we need to call hooks + self._unpack(EX_SKIP) + self._unpack(EX_SKIP) + return + if self._object_pairs_hook is not None: + ret = self._object_pairs_hook( + (self._unpack(EX_CONSTRUCT), + self._unpack(EX_CONSTRUCT)) + for _ in xrange(n)) + else: + ret = {} + for _ in xrange(n): + key = self._unpack(EX_CONSTRUCT) + ret[key] = self._unpack(EX_CONSTRUCT) + if self._object_hook is not None: + ret = self._object_hook(ret) + return ret + if execute == EX_SKIP: + return + if typ == TYPE_RAW: + if self._encoding is not None: + obj = obj.decode(self._encoding, self._unicode_errors) + elif self._raw: + obj = bytes(obj) + else: + obj = obj.decode('utf_8') + return obj + if typ == TYPE_EXT: + return self._ext_hook(n, bytes(obj)) + if typ == TYPE_BIN: + return bytes(obj) + assert typ == TYPE_IMMEDIATE + return obj + + def __iter__(self): + return self + + def __next__(self): + try: + ret = self._unpack(EX_CONSTRUCT) + self._consume() + return ret + except OutOfData: + self._consume() + raise StopIteration + + next = __next__ + + def skip(self, write_bytes=None): + self._unpack(EX_SKIP) + if write_bytes is not None: + warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) + write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) + self._consume() + + def unpack(self, write_bytes=None): + ret = self._unpack(EX_CONSTRUCT) + if write_bytes is not None: + warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) + write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) + self._consume() + return ret + + def read_array_header(self, write_bytes=None): + ret = self._unpack(EX_READ_ARRAY_HEADER) + if write_bytes is not None: + warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) + write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) + self._consume() + return ret + + def read_map_header(self, write_bytes=None): + ret = self._unpack(EX_READ_MAP_HEADER) + if write_bytes is not None: + warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) + write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) + self._consume() + return ret + + def tell(self): + return self._stream_offset + + +class Packer(object): + """ + MessagePack Packer + + usage: + + packer = Packer() + astream.write(packer.pack(a)) + astream.write(packer.pack(b)) + + Packer's constructor has some keyword arguments: + + :param callable default: + Convert user type to builtin type that Packer supports. + See also simplejson's document. + + :param bool use_single_float: + Use single precision float type for float. (default: False) + + :param bool autoreset: + Reset buffer after each pack and return its content as `bytes`. (default: True). + If set this to false, use `bytes()` to get content and `.reset()` to clear buffer. + + :param bool use_bin_type: + Use bin type introduced in msgpack spec 2.0 for bytes. + It also enables str8 type for unicode. + + :param bool strict_types: + If set to true, types will be checked to be exact. Derived classes + from serializeable types will not be serialized and will be + treated as unsupported type and forwarded to default. + Additionally tuples will not be serialized as lists. + This is useful when trying to implement accurate serialization + for python types. + + :param str encoding: + (deprecated) Convert unicode to bytes with this encoding. (default: 'utf-8') + + :param str unicode_errors: + Error handler for encoding unicode. (default: 'strict') + """ + def __init__(self, default=None, encoding=None, unicode_errors=None, + use_single_float=False, autoreset=True, use_bin_type=False, + strict_types=False): + if encoding is None: + encoding = 'utf_8' + else: + warnings.warn( + "encoding is deprecated, Use raw=False instead.", + PendingDeprecationWarning) + + if unicode_errors is None: + unicode_errors = 'strict' + + self._strict_types = strict_types + self._use_float = use_single_float + self._autoreset = autoreset + self._use_bin_type = use_bin_type + self._encoding = encoding + self._unicode_errors = unicode_errors + self._buffer = StringIO() + if default is not None: + if not callable(default): + raise TypeError("default must be callable") + self._default = default + + def _pack(self, obj, nest_limit=DEFAULT_RECURSE_LIMIT, + check=isinstance, check_type_strict=_check_type_strict): + default_used = False + if self._strict_types: + check = check_type_strict + list_types = list + else: + list_types = (list, tuple) + while True: + if nest_limit < 0: + raise PackValueError("recursion limit exceeded") + if obj is None: + return self._buffer.write(b"\xc0") + if check(obj, bool): + if obj: + return self._buffer.write(b"\xc3") + return self._buffer.write(b"\xc2") + if check(obj, int_types): + if 0 <= obj < 0x80: + return self._buffer.write(struct.pack("B", obj)) + if -0x20 <= obj < 0: + return self._buffer.write(struct.pack("b", obj)) + if 0x80 <= obj <= 0xff: + return self._buffer.write(struct.pack("BB", 0xcc, obj)) + if -0x80 <= obj < 0: + return self._buffer.write(struct.pack(">Bb", 0xd0, obj)) + if 0xff < obj <= 0xffff: + return self._buffer.write(struct.pack(">BH", 0xcd, obj)) + if -0x8000 <= obj < -0x80: + return self._buffer.write(struct.pack(">Bh", 0xd1, obj)) + if 0xffff < obj <= 0xffffffff: + return self._buffer.write(struct.pack(">BI", 0xce, obj)) + if -0x80000000 <= obj < -0x8000: + return self._buffer.write(struct.pack(">Bi", 0xd2, obj)) + if 0xffffffff < obj <= 0xffffffffffffffff: + return self._buffer.write(struct.pack(">BQ", 0xcf, obj)) + if -0x8000000000000000 <= obj < -0x80000000: + return self._buffer.write(struct.pack(">Bq", 0xd3, obj)) + if not default_used and self._default is not None: + obj = self._default(obj) + default_used = True + continue + raise PackOverflowError("Integer value out of range") + if check(obj, (bytes, bytearray)): + n = len(obj) + if n >= 2**32: + raise PackValueError("%s is too large" % type(obj).__name__) + self._pack_bin_header(n) + return self._buffer.write(obj) + if check(obj, Unicode): + if self._encoding is None: + raise TypeError( + "Can't encode unicode string: " + "no encoding is specified") + obj = obj.encode(self._encoding, self._unicode_errors) + n = len(obj) + if n >= 2**32: + raise PackValueError("String is too large") + self._pack_raw_header(n) + return self._buffer.write(obj) + if check(obj, memoryview): + n = len(obj) * obj.itemsize + if n >= 2**32: + raise PackValueError("Memoryview is too large") + self._pack_bin_header(n) + return self._buffer.write(obj) + if check(obj, float): + if self._use_float: + return self._buffer.write(struct.pack(">Bf", 0xca, obj)) + return self._buffer.write(struct.pack(">Bd", 0xcb, obj)) + if check(obj, ExtType): + code = obj.code + data = obj.data + assert isinstance(code, int) + assert isinstance(data, bytes) + L = len(data) + if L == 1: + self._buffer.write(b'\xd4') + elif L == 2: + self._buffer.write(b'\xd5') + elif L == 4: + self._buffer.write(b'\xd6') + elif L == 8: + self._buffer.write(b'\xd7') + elif L == 16: + self._buffer.write(b'\xd8') + elif L <= 0xff: + self._buffer.write(struct.pack(">BB", 0xc7, L)) + elif L <= 0xffff: + self._buffer.write(struct.pack(">BH", 0xc8, L)) + else: + self._buffer.write(struct.pack(">BI", 0xc9, L)) + self._buffer.write(struct.pack("b", code)) + self._buffer.write(data) + return + if check(obj, list_types): + n = len(obj) + self._pack_array_header(n) + for i in xrange(n): + self._pack(obj[i], nest_limit - 1) + return + if check(obj, dict): + return self._pack_map_pairs(len(obj), dict_iteritems(obj), + nest_limit - 1) + if not default_used and self._default is not None: + obj = self._default(obj) + default_used = 1 + continue + raise TypeError("Cannot serialize %r" % (obj, )) + + def pack(self, obj): + try: + self._pack(obj) + except: + self._buffer = StringIO() # force reset + raise + ret = self._buffer.getvalue() + if self._autoreset: + self._buffer = StringIO() + elif USING_STRINGBUILDER: + self._buffer = StringIO(ret) + return ret + + def pack_map_pairs(self, pairs): + self._pack_map_pairs(len(pairs), pairs) + ret = self._buffer.getvalue() + if self._autoreset: + self._buffer = StringIO() + elif USING_STRINGBUILDER: + self._buffer = StringIO(ret) + return ret + + def pack_array_header(self, n): + if n >= 2**32: + raise PackValueError + self._pack_array_header(n) + ret = self._buffer.getvalue() + if self._autoreset: + self._buffer = StringIO() + elif USING_STRINGBUILDER: + self._buffer = StringIO(ret) + return ret + + def pack_map_header(self, n): + if n >= 2**32: + raise PackValueError + self._pack_map_header(n) + ret = self._buffer.getvalue() + if self._autoreset: + self._buffer = StringIO() + elif USING_STRINGBUILDER: + self._buffer = StringIO(ret) + return ret + + def pack_ext_type(self, typecode, data): + if not isinstance(typecode, int): + raise TypeError("typecode must have int type.") + if not 0 <= typecode <= 127: + raise ValueError("typecode should be 0-127") + if not isinstance(data, bytes): + raise TypeError("data must have bytes type") + L = len(data) + if L > 0xffffffff: + raise PackValueError("Too large data") + if L == 1: + self._buffer.write(b'\xd4') + elif L == 2: + self._buffer.write(b'\xd5') + elif L == 4: + self._buffer.write(b'\xd6') + elif L == 8: + self._buffer.write(b'\xd7') + elif L == 16: + self._buffer.write(b'\xd8') + elif L <= 0xff: + self._buffer.write(b'\xc7' + struct.pack('B', L)) + elif L <= 0xffff: + self._buffer.write(b'\xc8' + struct.pack('>H', L)) + else: + self._buffer.write(b'\xc9' + struct.pack('>I', L)) + self._buffer.write(struct.pack('B', typecode)) + self._buffer.write(data) + + def _pack_array_header(self, n): + if n <= 0x0f: + return self._buffer.write(struct.pack('B', 0x90 + n)) + if n <= 0xffff: + return self._buffer.write(struct.pack(">BH", 0xdc, n)) + if n <= 0xffffffff: + return self._buffer.write(struct.pack(">BI", 0xdd, n)) + raise PackValueError("Array is too large") + + def _pack_map_header(self, n): + if n <= 0x0f: + return self._buffer.write(struct.pack('B', 0x80 + n)) + if n <= 0xffff: + return self._buffer.write(struct.pack(">BH", 0xde, n)) + if n <= 0xffffffff: + return self._buffer.write(struct.pack(">BI", 0xdf, n)) + raise PackValueError("Dict is too large") + + def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT): + self._pack_map_header(n) + for (k, v) in pairs: + self._pack(k, nest_limit - 1) + self._pack(v, nest_limit - 1) + + def _pack_raw_header(self, n): + if n <= 0x1f: + self._buffer.write(struct.pack('B', 0xa0 + n)) + elif self._use_bin_type and n <= 0xff: + self._buffer.write(struct.pack('>BB', 0xd9, n)) + elif n <= 0xffff: + self._buffer.write(struct.pack(">BH", 0xda, n)) + elif n <= 0xffffffff: + self._buffer.write(struct.pack(">BI", 0xdb, n)) + else: + raise PackValueError('Raw is too large') + + def _pack_bin_header(self, n): + if not self._use_bin_type: + return self._pack_raw_header(n) + elif n <= 0xff: + return self._buffer.write(struct.pack('>BB', 0xc4, n)) + elif n <= 0xffff: + return self._buffer.write(struct.pack(">BH", 0xc5, n)) + elif n <= 0xffffffff: + return self._buffer.write(struct.pack(">BI", 0xc6, n)) + else: + raise PackValueError('Bin is too large') + + def bytes(self): + return self._buffer.getvalue() + + def reset(self): + self._buffer = StringIO() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/packaging/_compat.py b/env/lib/python3.6/site-packages/pip/_vendor/packaging/_compat.py new file mode 100644 index 0000000..210bb80 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/packaging/_compat.py @@ -0,0 +1,30 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import sys + + +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + +# flake8: noqa + +if PY3: + string_types = str, +else: + string_types = basestring, + + +def with_metaclass(meta, *bases): + """ + Create a base class with a metaclass. + """ + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/packaging/_structures.py b/env/lib/python3.6/site-packages/pip/_vendor/packaging/_structures.py new file mode 100644 index 0000000..e9fc4a0 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/packaging/_structures.py @@ -0,0 +1,70 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + + +class Infinity(object): + + def __repr__(self): + return "Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return False + + def __le__(self, other): + return False + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return True + + def __ge__(self, other): + return True + + def __neg__(self): + return NegativeInfinity + + +Infinity = Infinity() + + +class NegativeInfinity(object): + + def __repr__(self): + return "-Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return True + + def __le__(self, other): + return True + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return False + + def __ge__(self, other): + return False + + def __neg__(self): + return Infinity + + +NegativeInfinity = NegativeInfinity() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/packaging/markers.py b/env/lib/python3.6/site-packages/pip/_vendor/packaging/markers.py new file mode 100644 index 0000000..e5834ce --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/packaging/markers.py @@ -0,0 +1,301 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import operator +import os +import platform +import sys + +from pip._vendor.pyparsing import ParseException, ParseResults, stringStart, stringEnd +from pip._vendor.pyparsing import ZeroOrMore, Group, Forward, QuotedString +from pip._vendor.pyparsing import Literal as L # noqa + +from ._compat import string_types +from .specifiers import Specifier, InvalidSpecifier + + +__all__ = [ + "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName", + "Marker", "default_environment", +] + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +class Node(object): + + def __init__(self, value): + self.value = value + + def __str__(self): + return str(self.value) + + def __repr__(self): + return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) + + def serialize(self): + raise NotImplementedError + + +class Variable(Node): + + def serialize(self): + return str(self) + + +class Value(Node): + + def serialize(self): + return '"{0}"'.format(self) + + +class Op(Node): + + def serialize(self): + return str(self) + + +VARIABLE = ( + L("implementation_version") | + L("platform_python_implementation") | + L("implementation_name") | + L("python_full_version") | + L("platform_release") | + L("platform_version") | + L("platform_machine") | + L("platform_system") | + L("python_version") | + L("sys_platform") | + L("os_name") | + L("os.name") | # PEP-345 + L("sys.platform") | # PEP-345 + L("platform.version") | # PEP-345 + L("platform.machine") | # PEP-345 + L("platform.python_implementation") | # PEP-345 + L("python_implementation") | # undocumented setuptools legacy + L("extra") +) +ALIASES = { + 'os.name': 'os_name', + 'sys.platform': 'sys_platform', + 'platform.version': 'platform_version', + 'platform.machine': 'platform_machine', + 'platform.python_implementation': 'platform_python_implementation', + 'python_implementation': 'platform_python_implementation' +} +VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) + +VERSION_CMP = ( + L("===") | + L("==") | + L(">=") | + L("<=") | + L("!=") | + L("~=") | + L(">") | + L("<") +) + +MARKER_OP = VERSION_CMP | L("not in") | L("in") +MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) + +MARKER_VALUE = QuotedString("'") | QuotedString('"') +MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) + +BOOLOP = L("and") | L("or") + +MARKER_VAR = VARIABLE | MARKER_VALUE + +MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) +MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) + +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() + +MARKER_EXPR = Forward() +MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) +MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) + +MARKER = stringStart + MARKER_EXPR + stringEnd + + +def _coerce_parse_result(results): + if isinstance(results, ParseResults): + return [_coerce_parse_result(i) for i in results] + else: + return results + + +def _format_marker(marker, first=True): + assert isinstance(marker, (list, tuple, string_types)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if (isinstance(marker, list) and len(marker) == 1 and + isinstance(marker[0], (list, tuple))): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return " ".join([m.serialize() for m in marker]) + else: + return marker + + +_operators = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, +} + + +def _eval_op(lhs, op, rhs): + try: + spec = Specifier("".join([op.serialize(), rhs])) + except InvalidSpecifier: + pass + else: + return spec.contains(lhs) + + oper = _operators.get(op.serialize()) + if oper is None: + raise UndefinedComparison( + "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) + ) + + return oper(lhs, rhs) + + +_undefined = object() + + +def _get_env(environment, name): + value = environment.get(name, _undefined) + + if value is _undefined: + raise UndefinedEnvironmentName( + "{0!r} does not exist in evaluation environment.".format(name) + ) + + return value + + +def _evaluate_markers(markers, environment): + groups = [[]] + + for marker in markers: + assert isinstance(marker, (list, tuple, string_types)) + + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + lhs_value = _get_env(environment, lhs.value) + rhs_value = rhs.value + else: + lhs_value = lhs.value + rhs_value = _get_env(environment, rhs.value) + + groups[-1].append(_eval_op(lhs_value, op, rhs_value)) + else: + assert marker in ["and", "or"] + if marker == "or": + groups.append([]) + + return any(all(item) for item in groups) + + +def format_full_version(info): + version = '{0.major}.{0.minor}.{0.micro}'.format(info) + kind = info.releaselevel + if kind != 'final': + version += kind[0] + str(info.serial) + return version + + +def default_environment(): + if hasattr(sys, 'implementation'): + iver = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + else: + iver = '0' + implementation_name = '' + + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": platform.python_version()[:3], + "sys_platform": sys.platform, + } + + +class Marker(object): + + def __init__(self, marker): + try: + self._markers = _coerce_parse_result(MARKER.parseString(marker)) + except ParseException as e: + err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( + marker, marker[e.loc:e.loc + 8]) + raise InvalidMarker(err_str) + + def __str__(self): + return _format_marker(self._markers) + + def __repr__(self): + return "<Marker({0!r})>".format(str(self)) + + def evaluate(self, environment=None): + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. + + The environment is determined from the current Python process. + """ + current_environment = default_environment() + if environment is not None: + current_environment.update(environment) + + return _evaluate_markers(self._markers, current_environment) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/packaging/requirements.py b/env/lib/python3.6/site-packages/pip/_vendor/packaging/requirements.py new file mode 100644 index 0000000..2760483 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/packaging/requirements.py @@ -0,0 +1,130 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import string +import re + +from pip._vendor.pyparsing import stringStart, stringEnd, originalTextFor, ParseException +from pip._vendor.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine +from pip._vendor.pyparsing import Literal as L # noqa +from pip._vendor.six.moves.urllib import parse as urlparse + +from .markers import MARKER_EXPR, Marker +from .specifiers import LegacySpecifier, Specifier, SpecifierSet + + +class InvalidRequirement(ValueError): + """ + An invalid requirement was found, users should refer to PEP 508. + """ + + +ALPHANUM = Word(string.ascii_letters + string.digits) + +LBRACKET = L("[").suppress() +RBRACKET = L("]").suppress() +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() +COMMA = L(",").suppress() +SEMICOLON = L(";").suppress() +AT = L("@").suppress() + +PUNCTUATION = Word("-_.") +IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) +IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) + +NAME = IDENTIFIER("name") +EXTRA = IDENTIFIER + +URI = Regex(r'[^ ]+')("url") +URL = (AT + URI) + +EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) +EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") + +VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) +VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) + +VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY +VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), + joinString=",", adjacent=False)("_raw_spec") +_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) +_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '') + +VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") +VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) + +MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") +MARKER_EXPR.setParseAction( + lambda s, l, t: Marker(s[t._original_start:t._original_end]) +) +MARKER_SEPARATOR = SEMICOLON +MARKER = MARKER_SEPARATOR + MARKER_EXPR + +VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) +URL_AND_MARKER = URL + Optional(MARKER) + +NAMED_REQUIREMENT = \ + NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) + +REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd +# pyparsing isn't thread safe during initialization, so we do it eagerly, see +# issue #104 +REQUIREMENT.parseString("x[]") + + +class Requirement(object): + """Parse a requirement. + + Parse a given requirement string into its parts, such as name, specifier, + URL, and extras. Raises InvalidRequirement on a badly-formed requirement + string. + """ + + # TODO: Can we test whether something is contained within a requirement? + # If so how do we do that? Do we need to test against the _name_ of + # the thing as well as the version? What about the markers? + # TODO: Can we normalize the name and extra name? + + def __init__(self, requirement_string): + try: + req = REQUIREMENT.parseString(requirement_string) + except ParseException as e: + raise InvalidRequirement( + "Invalid requirement, parse error at \"{0!r}\"".format( + requirement_string[e.loc:e.loc + 8])) + + self.name = req.name + if req.url: + parsed_url = urlparse.urlparse(req.url) + if not (parsed_url.scheme and parsed_url.netloc) or ( + not parsed_url.scheme and not parsed_url.netloc): + raise InvalidRequirement("Invalid URL given") + self.url = req.url + else: + self.url = None + self.extras = set(req.extras.asList() if req.extras else []) + self.specifier = SpecifierSet(req.specifier) + self.marker = req.marker if req.marker else None + + def __str__(self): + parts = [self.name] + + if self.extras: + parts.append("[{0}]".format(",".join(sorted(self.extras)))) + + if self.specifier: + parts.append(str(self.specifier)) + + if self.url: + parts.append("@ {0}".format(self.url)) + + if self.marker: + parts.append("; {0}".format(self.marker)) + + return "".join(parts) + + def __repr__(self): + return "<Requirement({0!r})>".format(str(self)) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/packaging/specifiers.py b/env/lib/python3.6/site-packages/pip/_vendor/packaging/specifiers.py new file mode 100644 index 0000000..9b6353f --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/packaging/specifiers.py @@ -0,0 +1,774 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import abc +import functools +import itertools +import re + +from ._compat import string_types, with_metaclass +from .version import Version, LegacyVersion, parse + + +class InvalidSpecifier(ValueError): + """ + An invalid specifier was found, users should refer to PEP 440. + """ + + +class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): + + @abc.abstractmethod + def __str__(self): + """ + Returns the str representation of this Specifier like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self): + """ + Returns a hash value for this Specifier like object. + """ + + @abc.abstractmethod + def __eq__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are equal. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are not equal. + """ + + @abc.abstractproperty + def prereleases(self): + """ + Returns whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @prereleases.setter + def prereleases(self, value): + """ + Sets whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @abc.abstractmethod + def contains(self, item, prereleases=None): + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter(self, iterable, prereleases=None): + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class _IndividualSpecifier(BaseSpecifier): + + _operators = {} + + def __init__(self, spec="", prereleases=None): + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) + + self._spec = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<{0}({1!r}{2})>".format( + self.__class__.__name__, + str(self), + pre, + ) + + def __str__(self): + return "{0}{1}".format(*self._spec) + + def __hash__(self): + return hash(self._spec) + + def __eq__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec == other._spec + + def __ne__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec != other._spec + + def _get_operator(self, op): + return getattr(self, "_compare_{0}".format(self._operators[op])) + + def _coerce_version(self, version): + if not isinstance(version, (LegacyVersion, Version)): + version = parse(version) + return version + + @property + def operator(self): + return self._spec[0] + + @property + def version(self): + return self._spec[1] + + @property + def prereleases(self): + return self._prereleases + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def __contains__(self, item): + return self.contains(item) + + def contains(self, item, prereleases=None): + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version or LegacyVersion, this allows us to have + # a shortcut for ``"2.0" in Specifier(">=2") + item = self._coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + return self._get_operator(self.operator)(item, self.version) + + def filter(self, iterable, prereleases=None): + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = self._coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later incase nothing + # else matches this specifier. + if (parsed_version.is_prerelease and not + (prereleases or self.prereleases)): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the beginning. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +class LegacySpecifier(_IndividualSpecifier): + + _regex_str = ( + r""" + (?P<operator>(==|!=|<=|>=|<|>)) + \s* + (?P<version> + [^,;\s)]* # Since this is a "legacy" specifier, and the version + # string can be just about anything, we match everything + # except for whitespace, a semi-colon for marker support, + # a closing paren since versions can be enclosed in + # them, and a comma since it's a version separator. + ) + """ + ) + + _regex = re.compile( + r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + + _operators = { + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + } + + def _coerce_version(self, version): + if not isinstance(version, LegacyVersion): + version = LegacyVersion(str(version)) + return version + + def _compare_equal(self, prospective, spec): + return prospective == self._coerce_version(spec) + + def _compare_not_equal(self, prospective, spec): + return prospective != self._coerce_version(spec) + + def _compare_less_than_equal(self, prospective, spec): + return prospective <= self._coerce_version(spec) + + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= self._coerce_version(spec) + + def _compare_less_than(self, prospective, spec): + return prospective < self._coerce_version(spec) + + def _compare_greater_than(self, prospective, spec): + return prospective > self._coerce_version(spec) + + +def _require_version_compare(fn): + @functools.wraps(fn) + def wrapped(self, prospective, spec): + if not isinstance(prospective, Version): + return False + return fn(self, prospective, spec) + return wrapped + + +class Specifier(_IndividualSpecifier): + + _regex_str = ( + r""" + (?P<operator>(~=|==|!=|<=|>=|<|>|===)) + (?P<version> + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s]* # We just match everything, except for whitespace + # since we are only testing for strict identity. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + + # You cannot use a wild card and a dev or local version + # together so group them with a | and make them optional. + (?: + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + | + \.\* # Wild card syntax of .* + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?<!==|!=|~=) # We have special cases for these + # operators so we want to make sure they + # don't match here. + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + ) + """ + ) + + _regex = re.compile( + r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + + _operators = { + "~=": "compatible", + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + @_require_version_compare + def _compare_compatible(self, prospective, spec): + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore post and dev releases and we want to treat the pre-release as + # it's own separate segment. + prefix = ".".join( + list( + itertools.takewhile( + lambda x: (not x.startswith("post") and not + x.startswith("dev")), + _version_split(spec), + ) + )[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return (self._get_operator(">=")(prospective, spec) and + self._get_operator("==")(prospective, prefix)) + + @_require_version_compare + def _compare_equal(self, prospective, spec): + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + prospective = Version(prospective.public) + # Split the spec out by dots, and pretend that there is an implicit + # dot in between a release segment and a pre-release segment. + spec = _version_split(spec[:-2]) # Remove the trailing .* + + # Split the prospective version out by dots, and pretend that there + # is an implicit dot in between a release segment and a pre-release + # segment. + prospective = _version_split(str(prospective)) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + prospective = prospective[:len(spec)] + + # Pad out our two sides with zeros so that they both equal the same + # length. + spec, prospective = _pad_version(spec, prospective) + else: + # Convert our spec string into a Version + spec = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec.local: + prospective = Version(prospective.public) + + return prospective == spec + + @_require_version_compare + def _compare_not_equal(self, prospective, spec): + return not self._compare_equal(prospective, spec) + + @_require_version_compare + def _compare_less_than_equal(self, prospective, spec): + return prospective <= Version(spec) + + @_require_version_compare + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= Version(spec) + + @_require_version_compare + def _compare_less_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + @_require_version_compare + def _compare_greater_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is techincally greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective, spec): + return str(prospective).lower() == str(spec).lower() + + @property + def prereleases(self): + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "==="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if parse(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version): + result = [] + for item in version.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _pad_version(left, right): + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]):]) + right_split.append(right[len(right_split[0]):]) + + # Insert our padding + left_split.insert( + 1, + ["0"] * max(0, len(right_split[0]) - len(left_split[0])), + ) + right_split.insert( + 1, + ["0"] * max(0, len(left_split[0]) - len(right_split[0])), + ) + + return ( + list(itertools.chain(*left_split)), + list(itertools.chain(*right_split)), + ) + + +class SpecifierSet(BaseSpecifier): + + def __init__(self, specifiers="", prereleases=None): + # Split on , to break each indidivual specifier into it's own item, and + # strip each item to remove leading/trailing whitespace. + specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Parsed each individual specifier, attempting first to make it a + # Specifier and falling back to a LegacySpecifier. + parsed = set() + for specifier in specifiers: + try: + parsed.add(Specifier(specifier)) + except InvalidSpecifier: + parsed.add(LegacySpecifier(specifier)) + + # Turn our parsed specifiers into a frozen set and save them for later. + self._specs = frozenset(parsed) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<SpecifierSet({0!r}{1})>".format(str(self), pre) + + def __str__(self): + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self): + return hash(self._specs) + + def __and__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease " + "overrides." + ) + + return specifier + + def __eq__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __ne__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs != other._specs + + def __len__(self): + return len(self._specs) + + def __iter__(self): + return iter(self._specs) + + @property + def prereleases(self): + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def __contains__(self, item): + return self.contains(item) + + def contains(self, item, prereleases=None): + # Ensure that our item is a Version or LegacyVersion instance. + if not isinstance(item, (LegacyVersion, Version)): + item = parse(item) + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if not prereleases and item.is_prerelease: + return False + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all( + s.contains(item, prereleases=prereleases) + for s in self._specs + ) + + def filter(self, iterable, prereleases=None): + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=bool(prereleases)) + return iterable + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases, and which will filter out LegacyVersion in general. + else: + filtered = [] + found_prereleases = [] + + for item in iterable: + # Ensure that we some kind of Version class for this item. + if not isinstance(item, (LegacyVersion, Version)): + parsed_version = parse(item) + else: + parsed_version = item + + # Filter out any item which is parsed as a LegacyVersion + if isinstance(parsed_version, LegacyVersion): + continue + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return found_prereleases + + return filtered diff --git a/env/lib/python3.6/site-packages/pip/_vendor/packaging/utils.py b/env/lib/python3.6/site-packages/pip/_vendor/packaging/utils.py new file mode 100644 index 0000000..4b94a82 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/packaging/utils.py @@ -0,0 +1,63 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import re + +from .version import InvalidVersion, Version + + +_canonicalize_regex = re.compile(r"[-_.]+") + + +def canonicalize_name(name): + # This is taken from PEP 503. + return _canonicalize_regex.sub("-", name).lower() + + +def canonicalize_version(version): + """ + This is very similar to Version.__str__, but has one subtle differences + with the way it handles the release segment. + """ + + try: + version = Version(version) + except InvalidVersion: + # Legacy versions cannot be normalized + return version + + parts = [] + + # Epoch + if version.epoch != 0: + parts.append("{0}!".format(version.epoch)) + + # Release segment + # NB: This strips trailing '.0's to normalize + parts.append( + re.sub( + r'(\.0)+$', + '', + ".".join(str(x) for x in version.release) + ) + ) + + # Pre-release + if version.pre is not None: + parts.append("".join(str(x) for x in version.pre)) + + # Post-release + if version.post is not None: + parts.append(".post{0}".format(version.post)) + + # Development release + if version.dev is not None: + parts.append(".dev{0}".format(version.dev)) + + # Local version segment + if version.local is not None: + parts.append("+{0}".format(version.local)) + + return "".join(parts) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/packaging/version.py b/env/lib/python3.6/site-packages/pip/_vendor/packaging/version.py new file mode 100644 index 0000000..6ed5cbb --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/packaging/version.py @@ -0,0 +1,441 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import collections +import itertools +import re + +from ._structures import Infinity + + +__all__ = [ + "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN" +] + + +_Version = collections.namedtuple( + "_Version", + ["epoch", "release", "dev", "pre", "post", "local"], +) + + +def parse(version): + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return LegacyVersion(version) + + +class InvalidVersion(ValueError): + """ + An invalid version was found, users should refer to PEP 440. + """ + + +class _BaseVersion(object): + + def __hash__(self): + return hash(self._key) + + def __lt__(self, other): + return self._compare(other, lambda s, o: s < o) + + def __le__(self, other): + return self._compare(other, lambda s, o: s <= o) + + def __eq__(self, other): + return self._compare(other, lambda s, o: s == o) + + def __ge__(self, other): + return self._compare(other, lambda s, o: s >= o) + + def __gt__(self, other): + return self._compare(other, lambda s, o: s > o) + + def __ne__(self, other): + return self._compare(other, lambda s, o: s != o) + + def _compare(self, other, method): + if not isinstance(other, _BaseVersion): + return NotImplemented + + return method(self._key, other._key) + + +class LegacyVersion(_BaseVersion): + + def __init__(self, version): + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + + def __str__(self): + return self._version + + def __repr__(self): + return "<LegacyVersion({0})>".format(repr(str(self))) + + @property + def public(self): + return self._version + + @property + def base_version(self): + return self._version + + @property + def epoch(self): + return -1 + + @property + def release(self): + return None + + @property + def pre(self): + return None + + @property + def post(self): + return None + + @property + def dev(self): + return None + + @property + def local(self): + return None + + @property + def is_prerelease(self): + return False + + @property + def is_postrelease(self): + return False + + @property + def is_devrelease(self): + return False + + +_legacy_version_component_re = re.compile( + r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE, +) + +_legacy_version_replacement_map = { + "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", +} + + +def _parse_version_parts(s): + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + + if not part or part == ".": + continue + + if part[:1] in "0123456789": + # pad for numeric comparison + yield part.zfill(8) + else: + yield "*" + part + + # ensure that alpha/beta/candidate are before final + yield "*final" + + +def _legacy_cmpkey(version): + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch + # greater than or equal to 0. This will effectively put the LegacyVersion, + # which uses the defacto standard originally implemented by setuptools, + # as before all PEP 440 versions. + epoch = -1 + + # This scheme is taken from pkg_resources.parse_version setuptools prior to + # it's adoption of the packaging library. + parts = [] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + # remove "-" before a prerelease tag + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + parts = tuple(parts) + + return epoch, parts + + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +VERSION_PATTERN = r""" + v? + (?: + (?:(?P<epoch>[0-9]+)!)? # epoch + (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment + (?P<pre> # pre-release + [-_\.]? + (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview)) + [-_\.]? + (?P<pre_n>[0-9]+)? + )? + (?P<post> # post release + (?:-(?P<post_n1>[0-9]+)) + | + (?: + [-_\.]? + (?P<post_l>post|rev|r) + [-_\.]? + (?P<post_n2>[0-9]+)? + ) + )? + (?P<dev> # dev release + [-_\.]? + (?P<dev_l>dev) + [-_\.]? + (?P<dev_n>[0-9]+)? + )? + ) + (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version +""" + + +class Version(_BaseVersion): + + _regex = re.compile( + r"^\s*" + VERSION_PATTERN + r"\s*$", + re.VERBOSE | re.IGNORECASE, + ) + + def __init__(self, version): + # Validate the version and parse it into pieces + match = self._regex.search(version) + if not match: + raise InvalidVersion("Invalid version: '{0}'".format(version)) + + # Store the parsed out pieces of the version + self._version = _Version( + epoch=int(match.group("epoch")) if match.group("epoch") else 0, + release=tuple(int(i) for i in match.group("release").split(".")), + pre=_parse_letter_version( + match.group("pre_l"), + match.group("pre_n"), + ), + post=_parse_letter_version( + match.group("post_l"), + match.group("post_n1") or match.group("post_n2"), + ), + dev=_parse_letter_version( + match.group("dev_l"), + match.group("dev_n"), + ), + local=_parse_local_version(match.group("local")), + ) + + # Generate a key which will be used for sorting + self._key = _cmpkey( + self._version.epoch, + self._version.release, + self._version.pre, + self._version.post, + self._version.dev, + self._version.local, + ) + + def __repr__(self): + return "<Version({0})>".format(repr(str(self))) + + def __str__(self): + parts = [] + + # Epoch + if self.epoch != 0: + parts.append("{0}!".format(self.epoch)) + + # Release segment + parts.append(".".join(str(x) for x in self.release)) + + # Pre-release + if self.pre is not None: + parts.append("".join(str(x) for x in self.pre)) + + # Post-release + if self.post is not None: + parts.append(".post{0}".format(self.post)) + + # Development release + if self.dev is not None: + parts.append(".dev{0}".format(self.dev)) + + # Local version segment + if self.local is not None: + parts.append("+{0}".format(self.local)) + + return "".join(parts) + + @property + def epoch(self): + return self._version.epoch + + @property + def release(self): + return self._version.release + + @property + def pre(self): + return self._version.pre + + @property + def post(self): + return self._version.post[1] if self._version.post else None + + @property + def dev(self): + return self._version.dev[1] if self._version.dev else None + + @property + def local(self): + if self._version.local: + return ".".join(str(x) for x in self._version.local) + else: + return None + + @property + def public(self): + return str(self).split("+", 1)[0] + + @property + def base_version(self): + parts = [] + + # Epoch + if self.epoch != 0: + parts.append("{0}!".format(self.epoch)) + + # Release segment + parts.append(".".join(str(x) for x in self.release)) + + return "".join(parts) + + @property + def is_prerelease(self): + return self.dev is not None or self.pre is not None + + @property + def is_postrelease(self): + return self.post is not None + + @property + def is_devrelease(self): + return self.dev is not None + + +def _parse_letter_version(letter, number): + if letter: + # We consider there to be an implicit 0 in a pre-release if there is + # not a numeral associated with it. + if number is None: + number = 0 + + # We normalize any letters to their lower case form + letter = letter.lower() + + # We consider some words to be alternate spellings of other words and + # in those cases we want to normalize the spellings to our preferred + # spelling. + if letter == "alpha": + letter = "a" + elif letter == "beta": + letter = "b" + elif letter in ["c", "pre", "preview"]: + letter = "rc" + elif letter in ["rev", "r"]: + letter = "post" + + return letter, int(number) + if not letter and number: + # We assume if we are given a number, but we are not given a letter + # then this is using the implicit post release syntax (e.g. 1.0-1) + letter = "post" + + return letter, int(number) + + +_local_version_separators = re.compile(r"[\._-]") + + +def _parse_local_version(local): + """ + Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). + """ + if local is not None: + return tuple( + part.lower() if not part.isdigit() else int(part) + for part in _local_version_separators.split(local) + ) + + +def _cmpkey(epoch, release, pre, post, dev, local): + # When we compare a release version, we want to compare it with all of the + # trailing zeros removed. So we'll use a reverse the list, drop all the now + # leading zeros until we come to something non zero, then take the rest + # re-reverse it back into the correct order and make it a tuple and use + # that for our sorting key. + release = tuple( + reversed(list( + itertools.dropwhile( + lambda x: x == 0, + reversed(release), + ) + )) + ) + + # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0. + # We'll do this by abusing the pre segment, but we _only_ want to do this + # if there is not a pre or a post segment. If we have one of those then + # the normal sorting rules will handle this case correctly. + if pre is None and post is None and dev is not None: + pre = -Infinity + # Versions without a pre-release (except as noted above) should sort after + # those with one. + elif pre is None: + pre = Infinity + + # Versions without a post segment should sort before those with one. + if post is None: + post = -Infinity + + # Versions without a development segment should sort after those with one. + if dev is None: + dev = Infinity + + if local is None: + # Versions without a local segment should sort before those with one. + local = -Infinity + else: + # Versions with a local segment need that segment parsed to implement + # the sorting rules in PEP440. + # - Alpha numeric segments sort before numeric segments + # - Alpha numeric segments sort lexicographically + # - Numeric segments sort numerically + # - Shorter versions sort before longer versions when the prefixes + # match exactly + local = tuple( + (i, "") if isinstance(i, int) else (-Infinity, i) + for i in local + ) + + return epoch, release, pre, post, dev, local diff --git a/env/lib/python3.6/site-packages/pip/_vendor/pkg_resources/py31compat.py b/env/lib/python3.6/site-packages/pip/_vendor/pkg_resources/py31compat.py new file mode 100644 index 0000000..331a51b --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/pkg_resources/py31compat.py @@ -0,0 +1,22 @@ +import os +import errno +import sys + + +def _makedirs_31(path, exist_ok=False): + try: + os.makedirs(path) + except OSError as exc: + if not exist_ok or exc.errno != errno.EEXIST: + raise + + +# rely on compatibility behavior until mode considerations +# and exists_ok considerations are disentangled. +# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663 +needs_makedirs = ( + sys.version_info < (3, 2, 5) or + (3, 3) <= sys.version_info < (3, 3, 6) or + (3, 4) <= sys.version_info < (3, 4, 1) +) +makedirs = _makedirs_31 if needs_makedirs else os.makedirs diff --git a/env/lib/python3.6/site-packages/pip/_vendor/progress/bar.py b/env/lib/python3.6/site-packages/pip/_vendor/progress/bar.py new file mode 100644 index 0000000..025e61c --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/progress/bar.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com> +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +from __future__ import unicode_literals + +import sys + +from . import Progress +from .helpers import WritelnMixin + + +class Bar(WritelnMixin, Progress): + width = 32 + message = '' + suffix = '%(index)d/%(max)d' + bar_prefix = ' |' + bar_suffix = '| ' + empty_fill = ' ' + fill = '#' + hide_cursor = True + + def update(self): + filled_length = int(self.width * self.progress) + empty_length = self.width - filled_length + + message = self.message % self + bar = self.fill * filled_length + empty = self.empty_fill * empty_length + suffix = self.suffix % self + line = ''.join([message, self.bar_prefix, bar, empty, self.bar_suffix, + suffix]) + self.writeln(line) + + +class ChargingBar(Bar): + suffix = '%(percent)d%%' + bar_prefix = ' ' + bar_suffix = ' ' + empty_fill = '∙' + fill = '█' + + +class FillingSquaresBar(ChargingBar): + empty_fill = '▢' + fill = '▣' + + +class FillingCirclesBar(ChargingBar): + empty_fill = '◯' + fill = '◉' + + +class IncrementalBar(Bar): + if sys.platform.startswith('win'): + phases = (u' ', u'▌', u'█') + else: + phases = (' ', '▏', '▎', '▍', '▌', '▋', '▊', '▉', '█') + + def update(self): + nphases = len(self.phases) + filled_len = self.width * self.progress + nfull = int(filled_len) # Number of full chars + phase = int((filled_len - nfull) * nphases) # Phase of last char + nempty = self.width - nfull # Number of empty chars + + message = self.message % self + bar = self.phases[-1] * nfull + current = self.phases[phase] if phase > 0 else '' + empty = self.empty_fill * max(0, nempty - len(current)) + suffix = self.suffix % self + line = ''.join([message, self.bar_prefix, bar, current, empty, + self.bar_suffix, suffix]) + self.writeln(line) + + +class PixelBar(IncrementalBar): + phases = ('⡀', '⡄', '⡆', '⡇', '⣇', '⣧', '⣷', '⣿') + + +class ShadyBar(IncrementalBar): + phases = (' ', '░', '▒', '▓', '█') diff --git a/env/lib/python3.6/site-packages/pip/_vendor/progress/counter.py b/env/lib/python3.6/site-packages/pip/_vendor/progress/counter.py new file mode 100644 index 0000000..6b45a1e --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/progress/counter.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com> +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +from __future__ import unicode_literals +from . import Infinite, Progress +from .helpers import WriteMixin + + +class Counter(WriteMixin, Infinite): + message = '' + hide_cursor = True + + def update(self): + self.write(str(self.index)) + + +class Countdown(WriteMixin, Progress): + hide_cursor = True + + def update(self): + self.write(str(self.remaining)) + + +class Stack(WriteMixin, Progress): + phases = (' ', '▁', '▂', '▃', '▄', '▅', '▆', '▇', '█') + hide_cursor = True + + def update(self): + nphases = len(self.phases) + i = min(nphases - 1, int(self.progress * nphases)) + self.write(self.phases[i]) + + +class Pie(Stack): + phases = ('○', '◔', '◑', '◕', '●') diff --git a/env/lib/python3.6/site-packages/pip/_vendor/progress/helpers.py b/env/lib/python3.6/site-packages/pip/_vendor/progress/helpers.py new file mode 100644 index 0000000..0cde44e --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/progress/helpers.py @@ -0,0 +1,91 @@ +# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com> +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +from __future__ import print_function + + +HIDE_CURSOR = '\x1b[?25l' +SHOW_CURSOR = '\x1b[?25h' + + +class WriteMixin(object): + hide_cursor = False + + def __init__(self, message=None, **kwargs): + super(WriteMixin, self).__init__(**kwargs) + self._width = 0 + if message: + self.message = message + + if self.file and self.file.isatty(): + if self.hide_cursor: + print(HIDE_CURSOR, end='', file=self.file) + print(self.message, end='', file=self.file) + self.file.flush() + + def write(self, s): + if self.file and self.file.isatty(): + b = '\b' * self._width + c = s.ljust(self._width) + print(b + c, end='', file=self.file) + self._width = max(self._width, len(s)) + self.file.flush() + + def finish(self): + if self.file and self.file.isatty() and self.hide_cursor: + print(SHOW_CURSOR, end='', file=self.file) + + +class WritelnMixin(object): + hide_cursor = False + + def __init__(self, message=None, **kwargs): + super(WritelnMixin, self).__init__(**kwargs) + if message: + self.message = message + + if self.file and self.file.isatty() and self.hide_cursor: + print(HIDE_CURSOR, end='', file=self.file) + + def clearln(self): + if self.file and self.file.isatty(): + print('\r\x1b[K', end='', file=self.file) + + def writeln(self, line): + if self.file and self.file.isatty(): + self.clearln() + print(line, end='', file=self.file) + self.file.flush() + + def finish(self): + if self.file and self.file.isatty(): + print(file=self.file) + if self.hide_cursor: + print(SHOW_CURSOR, end='', file=self.file) + + +from signal import signal, SIGINT +from sys import exit + + +class SigIntMixin(object): + """Registers a signal handler that calls finish on SIGINT""" + + def __init__(self, *args, **kwargs): + super(SigIntMixin, self).__init__(*args, **kwargs) + signal(SIGINT, self._sigint_handler) + + def _sigint_handler(self, signum, frame): + self.finish() + exit(0) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/progress/spinner.py b/env/lib/python3.6/site-packages/pip/_vendor/progress/spinner.py new file mode 100644 index 0000000..464c7b2 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/progress/spinner.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com> +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +from __future__ import unicode_literals +from . import Infinite +from .helpers import WriteMixin + + +class Spinner(WriteMixin, Infinite): + message = '' + phases = ('-', '\\', '|', '/') + hide_cursor = True + + def update(self): + i = self.index % len(self.phases) + self.write(self.phases[i]) + + +class PieSpinner(Spinner): + phases = ['◷', '◶', '◵', '◴'] + + +class MoonSpinner(Spinner): + phases = ['◑', '◒', '◐', '◓'] + + +class LineSpinner(Spinner): + phases = ['⎺', '⎻', '⎼', '⎽', '⎼', '⎻'] + +class PixelSpinner(Spinner): + phases = ['⣾','⣷', '⣯', '⣟', '⡿', '⢿', '⣻', '⣽'] diff --git a/env/lib/python3.6/site-packages/pip/_vendor/pyparsing.py b/env/lib/python3.6/site-packages/pip/_vendor/pyparsing.py new file mode 100644 index 0000000..ba2619c --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/pyparsing.py @@ -0,0 +1,5720 @@ +# module pyparsing.py +# +# Copyright (c) 2003-2016 Paul T. McGuire +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__doc__ = \ +""" +pyparsing module - Classes and methods to define and execute parsing grammars + +The pyparsing module is an alternative approach to creating and executing simple grammars, +vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you +don't need to learn a new syntax for defining grammars or matching expressions - the parsing module +provides a library of classes that you use to construct the grammar directly in Python. + +Here is a program to parse "Hello, World!" (or any greeting of the form +C{"<salutation>, <addressee>!"}), built up using L{Word}, L{Literal}, and L{And} elements +(L{'+'<ParserElement.__add__>} operator gives L{And} expressions, strings are auto-converted to +L{Literal} expressions):: + + from pip._vendor.pyparsing import Word, alphas + + # define grammar of a greeting + greet = Word(alphas) + "," + Word(alphas) + "!" + + hello = "Hello, World!" + print (hello, "->", greet.parseString(hello)) + +The program outputs the following:: + + Hello, World! -> ['Hello', ',', 'World', '!'] + +The Python representation of the grammar is quite readable, owing to the self-explanatory +class names, and the use of '+', '|' and '^' operators. + +The L{ParseResults} object returned from L{ParserElement.parseString<ParserElement.parseString>} can be accessed as a nested list, a dictionary, or an +object with named attributes. + +The pyparsing module handles some of the problems that are typically vexing when writing text parsers: + - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) + - quoted strings + - embedded comments +""" + +__version__ = "2.2.0" +__versionTime__ = "06 Mar 2017 02:06 UTC" +__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>" + +import string +from weakref import ref as wkref +import copy +import sys +import warnings +import re +import sre_constants +import collections +import pprint +import traceback +import types +from datetime import datetime + +try: + from _thread import RLock +except ImportError: + from threading import RLock + +try: + from collections import OrderedDict as _OrderedDict +except ImportError: + try: + from ordereddict import OrderedDict as _OrderedDict + except ImportError: + _OrderedDict = None + +#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) + +__all__ = [ +'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', +'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', +'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', +'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', +'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', +'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', +'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', +'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', +'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', +'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', +'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', +'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', +'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', +'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', +'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', +'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', +'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass', +'CloseMatch', 'tokenMap', 'pyparsing_common', +] + +system_version = tuple(sys.version_info)[:3] +PY_3 = system_version[0] == 3 +if PY_3: + _MAX_INT = sys.maxsize + basestring = str + unichr = chr + _ustr = str + + # build list of single arg builtins, that can be used as parse actions + singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max] + +else: + _MAX_INT = sys.maxint + range = xrange + + def _ustr(obj): + """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries + str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It + then < returns the unicode object | encodes it with the default encoding | ... >. + """ + if isinstance(obj,unicode): + return obj + + try: + # If this works, then _ustr(obj) has the same behaviour as str(obj), so + # it won't break any existing code. + return str(obj) + + except UnicodeEncodeError: + # Else encode it + ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace') + xmlcharref = Regex(r'&#\d+;') + xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:]) + return xmlcharref.transformString(ret) + + # build list of single arg builtins, tolerant of Python version, that can be used as parse actions + singleArgBuiltins = [] + import __builtin__ + for fname in "sum len sorted reversed list tuple set any all min max".split(): + try: + singleArgBuiltins.append(getattr(__builtin__,fname)) + except AttributeError: + continue + +_generatorType = type((y for y in range(1))) + +def _xml_escape(data): + """Escape &, <, >, ", ', etc. in a string of data.""" + + # ampersand must be replaced first + from_symbols = '&><"\'' + to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split()) + for from_,to_ in zip(from_symbols, to_symbols): + data = data.replace(from_, to_) + return data + +class _Constants(object): + pass + +alphas = string.ascii_uppercase + string.ascii_lowercase +nums = "0123456789" +hexnums = nums + "ABCDEFabcdef" +alphanums = alphas + nums +_bslash = chr(92) +printables = "".join(c for c in string.printable if c not in string.whitespace) + +class ParseBaseException(Exception): + """base exception class for all parsing runtime exceptions""" + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( self, pstr, loc=0, msg=None, elem=None ): + self.loc = loc + if msg is None: + self.msg = pstr + self.pstr = "" + else: + self.msg = msg + self.pstr = pstr + self.parserElement = elem + self.args = (pstr, loc, msg) + + @classmethod + def _from_exception(cls, pe): + """ + internal factory method to simplify creating one type of ParseException + from another - avoids having __init__ signature conflicts among subclasses + """ + return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) + + def __getattr__( self, aname ): + """supported attributes by name are: + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + """ + if( aname == "lineno" ): + return lineno( self.loc, self.pstr ) + elif( aname in ("col", "column") ): + return col( self.loc, self.pstr ) + elif( aname == "line" ): + return line( self.loc, self.pstr ) + else: + raise AttributeError(aname) + + def __str__( self ): + return "%s (at char %d), (line:%d, col:%d)" % \ + ( self.msg, self.loc, self.lineno, self.column ) + def __repr__( self ): + return _ustr(self) + def markInputline( self, markerString = ">!<" ): + """Extracts the exception line from the input string, and marks + the location of the exception with a special symbol. + """ + line_str = self.line + line_column = self.column - 1 + if markerString: + line_str = "".join((line_str[:line_column], + markerString, line_str[line_column:])) + return line_str.strip() + def __dir__(self): + return "lineno col line".split() + dir(type(self)) + +class ParseException(ParseBaseException): + """ + Exception thrown when parse expressions don't match class; + supported attributes by name are: + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + + Example:: + try: + Word(nums).setName("integer").parseString("ABC") + except ParseException as pe: + print(pe) + print("column: {}".format(pe.col)) + + prints:: + Expected integer (at char 0), (line:1, col:1) + column: 1 + """ + pass + +class ParseFatalException(ParseBaseException): + """user-throwable exception thrown when inconsistent parse content + is found; stops all parsing immediately""" + pass + +class ParseSyntaxException(ParseFatalException): + """just like L{ParseFatalException}, but thrown internally when an + L{ErrorStop<And._ErrorStop>} ('-' operator) indicates that parsing is to stop + immediately because an unbacktrackable syntax error has been found""" + pass + +#~ class ReparseException(ParseBaseException): + #~ """Experimental class - parse actions can raise this exception to cause + #~ pyparsing to reparse the input string: + #~ - with a modified input string, and/or + #~ - with a modified start location + #~ Set the values of the ReparseException in the constructor, and raise the + #~ exception in a parse action to cause pyparsing to use the new string/location. + #~ Setting the values as None causes no change to be made. + #~ """ + #~ def __init_( self, newstring, restartLoc ): + #~ self.newParseText = newstring + #~ self.reparseLoc = restartLoc + +class RecursiveGrammarException(Exception): + """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive""" + def __init__( self, parseElementList ): + self.parseElementTrace = parseElementList + + def __str__( self ): + return "RecursiveGrammarException: %s" % self.parseElementTrace + +class _ParseResultsWithOffset(object): + def __init__(self,p1,p2): + self.tup = (p1,p2) + def __getitem__(self,i): + return self.tup[i] + def __repr__(self): + return repr(self.tup[0]) + def setOffset(self,i): + self.tup = (self.tup[0],i) + +class ParseResults(object): + """ + Structured parse results, to provide multiple means of access to the parsed data: + - as a list (C{len(results)}) + - by list index (C{results[0], results[1]}, etc.) + - by attribute (C{results.<resultsName>} - see L{ParserElement.setResultsName}) + + Example:: + integer = Word(nums) + date_str = (integer.setResultsName("year") + '/' + + integer.setResultsName("month") + '/' + + integer.setResultsName("day")) + # equivalent form: + # date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + # parseString returns a ParseResults object + result = date_str.parseString("1999/12/31") + + def test(s, fn=repr): + print("%s -> %s" % (s, fn(eval(s)))) + test("list(result)") + test("result[0]") + test("result['month']") + test("result.day") + test("'month' in result") + test("'minutes' in result") + test("result.dump()", str) + prints:: + list(result) -> ['1999', '/', '12', '/', '31'] + result[0] -> '1999' + result['month'] -> '12' + result.day -> '31' + 'month' in result -> True + 'minutes' in result -> False + result.dump() -> ['1999', '/', '12', '/', '31'] + - day: 31 + - month: 12 + - year: 1999 + """ + def __new__(cls, toklist=None, name=None, asList=True, modal=True ): + if isinstance(toklist, cls): + return toklist + retobj = object.__new__(cls) + retobj.__doinit = True + return retobj + + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ): + if self.__doinit: + self.__doinit = False + self.__name = None + self.__parent = None + self.__accumNames = {} + self.__asList = asList + self.__modal = modal + if toklist is None: + toklist = [] + if isinstance(toklist, list): + self.__toklist = toklist[:] + elif isinstance(toklist, _generatorType): + self.__toklist = list(toklist) + else: + self.__toklist = [toklist] + self.__tokdict = dict() + + if name is not None and name: + if not modal: + self.__accumNames[name] = 0 + if isinstance(name,int): + name = _ustr(name) # will always return a str, but use _ustr for consistency + self.__name = name + if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])): + if isinstance(toklist,basestring): + toklist = [ toklist ] + if asList: + if isinstance(toklist,ParseResults): + self[name] = _ParseResultsWithOffset(toklist.copy(),0) + else: + self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) + self[name].__name = name + else: + try: + self[name] = toklist[0] + except (KeyError,TypeError,IndexError): + self[name] = toklist + + def __getitem__( self, i ): + if isinstance( i, (int,slice) ): + return self.__toklist[i] + else: + if i not in self.__accumNames: + return self.__tokdict[i][-1][0] + else: + return ParseResults([ v[0] for v in self.__tokdict[i] ]) + + def __setitem__( self, k, v, isinstance=isinstance ): + if isinstance(v,_ParseResultsWithOffset): + self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] + sub = v[0] + elif isinstance(k,(int,slice)): + self.__toklist[k] = v + sub = v + else: + self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)] + sub = v + if isinstance(sub,ParseResults): + sub.__parent = wkref(self) + + def __delitem__( self, i ): + if isinstance(i,(int,slice)): + mylen = len( self.__toklist ) + del self.__toklist[i] + + # convert int to slice + if isinstance(i, int): + if i < 0: + i += mylen + i = slice(i, i+1) + # get removed indices + removed = list(range(*i.indices(mylen))) + removed.reverse() + # fixup indices in token dictionary + for name,occurrences in self.__tokdict.items(): + for j in removed: + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) + else: + del self.__tokdict[i] + + def __contains__( self, k ): + return k in self.__tokdict + + def __len__( self ): return len( self.__toklist ) + def __bool__(self): return ( not not self.__toklist ) + __nonzero__ = __bool__ + def __iter__( self ): return iter( self.__toklist ) + def __reversed__( self ): return iter( self.__toklist[::-1] ) + def _iterkeys( self ): + if hasattr(self.__tokdict, "iterkeys"): + return self.__tokdict.iterkeys() + else: + return iter(self.__tokdict) + + def _itervalues( self ): + return (self[k] for k in self._iterkeys()) + + def _iteritems( self ): + return ((k, self[k]) for k in self._iterkeys()) + + if PY_3: + keys = _iterkeys + """Returns an iterator of all named result keys (Python 3.x only).""" + + values = _itervalues + """Returns an iterator of all named result values (Python 3.x only).""" + + items = _iteritems + """Returns an iterator of all named result key-value tuples (Python 3.x only).""" + + else: + iterkeys = _iterkeys + """Returns an iterator of all named result keys (Python 2.x only).""" + + itervalues = _itervalues + """Returns an iterator of all named result values (Python 2.x only).""" + + iteritems = _iteritems + """Returns an iterator of all named result key-value tuples (Python 2.x only).""" + + def keys( self ): + """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).""" + return list(self.iterkeys()) + + def values( self ): + """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).""" + return list(self.itervalues()) + + def items( self ): + """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).""" + return list(self.iteritems()) + + def haskeys( self ): + """Since keys() returns an iterator, this method is helpful in bypassing + code that looks for the existence of any defined results names.""" + return bool(self.__tokdict) + + def pop( self, *args, **kwargs): + """ + Removes and returns item at specified index (default=C{last}). + Supports both C{list} and C{dict} semantics for C{pop()}. If passed no + argument or an integer argument, it will use C{list} semantics + and pop tokens from the list of parsed tokens. If passed a + non-integer argument (most likely a string), it will use C{dict} + semantics and pop the corresponding value from any defined + results names. A second default return value argument is + supported, just as in C{dict.pop()}. + + Example:: + def remove_first(tokens): + tokens.pop(0) + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321'] + + label = Word(alphas) + patt = label("LABEL") + OneOrMore(Word(nums)) + print(patt.parseString("AAB 123 321").dump()) + + # Use pop() in a parse action to remove named result (note that corresponding value is not + # removed from list form of results) + def remove_LABEL(tokens): + tokens.pop("LABEL") + return tokens + patt.addParseAction(remove_LABEL) + print(patt.parseString("AAB 123 321").dump()) + prints:: + ['AAB', '123', '321'] + - LABEL: AAB + + ['AAB', '123', '321'] + """ + if not args: + args = [-1] + for k,v in kwargs.items(): + if k == 'default': + args = (args[0], v) + else: + raise TypeError("pop() got an unexpected keyword argument '%s'" % k) + if (isinstance(args[0], int) or + len(args) == 1 or + args[0] in self): + index = args[0] + ret = self[index] + del self[index] + return ret + else: + defaultvalue = args[1] + return defaultvalue + + def get(self, key, defaultValue=None): + """ + Returns named result matching the given key, or if there is no + such name, then returns the given C{defaultValue} or C{None} if no + C{defaultValue} is specified. + + Similar to C{dict.get()}. + + Example:: + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString("1999/12/31") + print(result.get("year")) # -> '1999' + print(result.get("hour", "not specified")) # -> 'not specified' + print(result.get("hour")) # -> None + """ + if key in self: + return self[key] + else: + return defaultValue + + def insert( self, index, insStr ): + """ + Inserts new element at location index in the list of parsed tokens. + + Similar to C{list.insert()}. + + Example:: + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + + # use a parse action to insert the parse location in the front of the parsed results + def insert_locn(locn, tokens): + tokens.insert(0, locn) + print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321'] + """ + self.__toklist.insert(index, insStr) + # fixup indices in token dictionary + for name,occurrences in self.__tokdict.items(): + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) + + def append( self, item ): + """ + Add single element to end of ParseResults list of elements. + + Example:: + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + + # use a parse action to compute the sum of the parsed integers, and add it to the end + def append_sum(tokens): + tokens.append(sum(map(int, tokens))) + print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444] + """ + self.__toklist.append(item) + + def extend( self, itemseq ): + """ + Add sequence of elements to end of ParseResults list of elements. + + Example:: + patt = OneOrMore(Word(alphas)) + + # use a parse action to append the reverse of the matched strings, to make a palindrome + def make_palindrome(tokens): + tokens.extend(reversed([t[::-1] for t in tokens])) + return ''.join(tokens) + print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' + """ + if isinstance(itemseq, ParseResults): + self += itemseq + else: + self.__toklist.extend(itemseq) + + def clear( self ): + """ + Clear all elements and results names. + """ + del self.__toklist[:] + self.__tokdict.clear() + + def __getattr__( self, name ): + try: + return self[name] + except KeyError: + return "" + + if name in self.__tokdict: + if name not in self.__accumNames: + return self.__tokdict[name][-1][0] + else: + return ParseResults([ v[0] for v in self.__tokdict[name] ]) + else: + return "" + + def __add__( self, other ): + ret = self.copy() + ret += other + return ret + + def __iadd__( self, other ): + if other.__tokdict: + offset = len(self.__toklist) + addoffset = lambda a: offset if a<0 else a+offset + otheritems = other.__tokdict.items() + otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) ) + for (k,vlist) in otheritems for v in vlist] + for k,v in otherdictitems: + self[k] = v + if isinstance(v[0],ParseResults): + v[0].__parent = wkref(self) + + self.__toklist += other.__toklist + self.__accumNames.update( other.__accumNames ) + return self + + def __radd__(self, other): + if isinstance(other,int) and other == 0: + # useful for merging many ParseResults using sum() builtin + return self.copy() + else: + # this may raise a TypeError - so be it + return other + self + + def __repr__( self ): + return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) + + def __str__( self ): + return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']' + + def _asStringList( self, sep='' ): + out = [] + for item in self.__toklist: + if out and sep: + out.append(sep) + if isinstance( item, ParseResults ): + out += item._asStringList() + else: + out.append( _ustr(item) ) + return out + + def asList( self ): + """ + Returns the parse results as a nested list of matching tokens, all converted to strings. + + Example:: + patt = OneOrMore(Word(alphas)) + result = patt.parseString("sldkj lsdkj sldkj") + # even though the result prints in string-like form, it is actually a pyparsing ParseResults + print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj'] + + # Use asList() to create an actual list + result_list = result.asList() + print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj'] + """ + return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist] + + def asDict( self ): + """ + Returns the named parse results as a nested dictionary. + + Example:: + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString('12/31/1999') + print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) + + result_dict = result.asDict() + print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'} + + # even though a ParseResults supports dict-like access, sometime you just need to have a dict + import json + print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable + print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"} + """ + if PY_3: + item_fn = self.items + else: + item_fn = self.iteritems + + def toItem(obj): + if isinstance(obj, ParseResults): + if obj.haskeys(): + return obj.asDict() + else: + return [toItem(v) for v in obj] + else: + return obj + + return dict((k,toItem(v)) for k,v in item_fn()) + + def copy( self ): + """ + Returns a new copy of a C{ParseResults} object. + """ + ret = ParseResults( self.__toklist ) + ret.__tokdict = self.__tokdict.copy() + ret.__parent = self.__parent + ret.__accumNames.update( self.__accumNames ) + ret.__name = self.__name + return ret + + def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): + """ + (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. + """ + nl = "\n" + out = [] + namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items() + for v in vlist) + nextLevelIndent = indent + " " + + # collapse out indents if formatting is not desired + if not formatted: + indent = "" + nextLevelIndent = "" + nl = "" + + selfTag = None + if doctag is not None: + selfTag = doctag + else: + if self.__name: + selfTag = self.__name + + if not selfTag: + if namedItemsOnly: + return "" + else: + selfTag = "ITEM" + + out += [ nl, indent, "<", selfTag, ">" ] + + for i,res in enumerate(self.__toklist): + if isinstance(res,ParseResults): + if i in namedItems: + out += [ res.asXML(namedItems[i], + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] + else: + out += [ res.asXML(None, + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] + else: + # individual token, see if there is a name for it + resTag = None + if i in namedItems: + resTag = namedItems[i] + if not resTag: + if namedItemsOnly: + continue + else: + resTag = "ITEM" + xmlBodyText = _xml_escape(_ustr(res)) + out += [ nl, nextLevelIndent, "<", resTag, ">", + xmlBodyText, + "</", resTag, ">" ] + + out += [ nl, indent, "</", selfTag, ">" ] + return "".join(out) + + def __lookup(self,sub): + for k,vlist in self.__tokdict.items(): + for v,loc in vlist: + if sub is v: + return k + return None + + def getName(self): + r""" + Returns the results name for this token expression. Useful when several + different expressions might match at a particular location. + + Example:: + integer = Word(nums) + ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") + house_number_expr = Suppress('#') + Word(nums, alphanums) + user_data = (Group(house_number_expr)("house_number") + | Group(ssn_expr)("ssn") + | Group(integer)("age")) + user_info = OneOrMore(user_data) + + result = user_info.parseString("22 111-22-3333 #221B") + for item in result: + print(item.getName(), ':', item[0]) + prints:: + age : 22 + ssn : 111-22-3333 + house_number : 221B + """ + if self.__name: + return self.__name + elif self.__parent: + par = self.__parent() + if par: + return par.__lookup(self) + else: + return None + elif (len(self) == 1 and + len(self.__tokdict) == 1 and + next(iter(self.__tokdict.values()))[0][1] in (0,-1)): + return next(iter(self.__tokdict.keys())) + else: + return None + + def dump(self, indent='', depth=0, full=True): + """ + Diagnostic method for listing out the contents of a C{ParseResults}. + Accepts an optional C{indent} argument so that this string can be embedded + in a nested display of other data. + + Example:: + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString('12/31/1999') + print(result.dump()) + prints:: + ['12', '/', '31', '/', '1999'] + - day: 1999 + - month: 31 + - year: 12 + """ + out = [] + NL = '\n' + out.append( indent+_ustr(self.asList()) ) + if full: + if self.haskeys(): + items = sorted((str(k), v) for k,v in self.items()) + for k,v in items: + if out: + out.append(NL) + out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) + if isinstance(v,ParseResults): + if v: + out.append( v.dump(indent,depth+1) ) + else: + out.append(_ustr(v)) + else: + out.append(repr(v)) + elif any(isinstance(vv,ParseResults) for vv in self): + v = self + for i,vv in enumerate(v): + if isinstance(vv,ParseResults): + out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) + else: + out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) + + return "".join(out) + + def pprint(self, *args, **kwargs): + """ + Pretty-printer for parsed results as a list, using the C{pprint} module. + Accepts additional positional or keyword args as defined for the + C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint}) + + Example:: + ident = Word(alphas, alphanums) + num = Word(nums) + func = Forward() + term = ident | num | Group('(' + func + ')') + func <<= ident + Group(Optional(delimitedList(term))) + result = func.parseString("fna a,b,(fnb c,d,200),100") + result.pprint(width=40) + prints:: + ['fna', + ['a', + 'b', + ['(', 'fnb', ['c', 'd', '200'], ')'], + '100']] + """ + pprint.pprint(self.asList(), *args, **kwargs) + + # add support for pickle protocol + def __getstate__(self): + return ( self.__toklist, + ( self.__tokdict.copy(), + self.__parent is not None and self.__parent() or None, + self.__accumNames, + self.__name ) ) + + def __setstate__(self,state): + self.__toklist = state[0] + (self.__tokdict, + par, + inAccumNames, + self.__name) = state[1] + self.__accumNames = {} + self.__accumNames.update(inAccumNames) + if par is not None: + self.__parent = wkref(par) + else: + self.__parent = None + + def __getnewargs__(self): + return self.__toklist, self.__name, self.__asList, self.__modal + + def __dir__(self): + return (dir(type(self)) + list(self.keys())) + +collections.MutableMapping.register(ParseResults) + +def col (loc,strg): + """Returns current column within a string, counting newlines as line separators. + The first column is number 1. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information + on parsing strings containing C{<TAB>}s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + """ + s = strg + return 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc) + +def lineno(loc,strg): + """Returns current line number within a string, counting newlines as line separators. + The first line is number 1. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information + on parsing strings containing C{<TAB>}s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + """ + return strg.count("\n",0,loc) + 1 + +def line( loc, strg ): + """Returns the line of text containing loc within a string, counting newlines as line separators. + """ + lastCR = strg.rfind("\n", 0, loc) + nextCR = strg.find("\n", loc) + if nextCR >= 0: + return strg[lastCR+1:nextCR] + else: + return strg[lastCR+1:] + +def _defaultStartDebugAction( instring, loc, expr ): + print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))) + +def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ): + print ("Matched " + _ustr(expr) + " -> " + str(toks.asList())) + +def _defaultExceptionDebugAction( instring, loc, expr, exc ): + print ("Exception raised:" + _ustr(exc)) + +def nullDebugAction(*args): + """'Do-nothing' debug action, to suppress debugging output during parsing.""" + pass + +# Only works on Python 3.x - nonlocal is toxic to Python 2 installs +#~ 'decorator to trim function calls to match the arity of the target' +#~ def _trim_arity(func, maxargs=3): + #~ if func in singleArgBuiltins: + #~ return lambda s,l,t: func(t) + #~ limit = 0 + #~ foundArity = False + #~ def wrapper(*args): + #~ nonlocal limit,foundArity + #~ while 1: + #~ try: + #~ ret = func(*args[limit:]) + #~ foundArity = True + #~ return ret + #~ except TypeError: + #~ if limit == maxargs or foundArity: + #~ raise + #~ limit += 1 + #~ continue + #~ return wrapper + +# this version is Python 2.x-3.x cross-compatible +'decorator to trim function calls to match the arity of the target' +def _trim_arity(func, maxargs=2): + if func in singleArgBuiltins: + return lambda s,l,t: func(t) + limit = [0] + foundArity = [False] + + # traceback return data structure changed in Py3.5 - normalize back to plain tuples + if system_version[:2] >= (3,5): + def extract_stack(limit=0): + # special handling for Python 3.5.0 - extra deep call stack by 1 + offset = -3 if system_version == (3,5,0) else -2 + frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset] + return [(frame_summary.filename, frame_summary.lineno)] + def extract_tb(tb, limit=0): + frames = traceback.extract_tb(tb, limit=limit) + frame_summary = frames[-1] + return [(frame_summary.filename, frame_summary.lineno)] + else: + extract_stack = traceback.extract_stack + extract_tb = traceback.extract_tb + + # synthesize what would be returned by traceback.extract_stack at the call to + # user's parse action 'func', so that we don't incur call penalty at parse time + + LINE_DIFF = 6 + # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND + # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! + this_line = extract_stack(limit=2)[-1] + pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF) + + def wrapper(*args): + while 1: + try: + ret = func(*args[limit[0]:]) + foundArity[0] = True + return ret + except TypeError: + # re-raise TypeErrors if they did not come from our arity testing + if foundArity[0]: + raise + else: + try: + tb = sys.exc_info()[-1] + if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth: + raise + finally: + del tb + + if limit[0] <= maxargs: + limit[0] += 1 + continue + raise + + # copy func name to wrapper for sensible debug output + func_name = "<parse action>" + try: + func_name = getattr(func, '__name__', + getattr(func, '__class__').__name__) + except Exception: + func_name = str(func) + wrapper.__name__ = func_name + + return wrapper + +class ParserElement(object): + """Abstract base level parser element class.""" + DEFAULT_WHITE_CHARS = " \n\t\r" + verbose_stacktrace = False + + @staticmethod + def setDefaultWhitespaceChars( chars ): + r""" + Overrides the default whitespace chars + + Example:: + # default whitespace chars are space, <TAB> and newline + OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] + + # change to just treat newline as significant + ParserElement.setDefaultWhitespaceChars(" \t") + OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def'] + """ + ParserElement.DEFAULT_WHITE_CHARS = chars + + @staticmethod + def inlineLiteralsUsing(cls): + """ + Set class to be used for inclusion of string literals into a parser. + + Example:: + # default literal class used is Literal + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] + + + # change to Suppress + ParserElement.inlineLiteralsUsing(Suppress) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + date_str.parseString("1999/12/31") # -> ['1999', '12', '31'] + """ + ParserElement._literalStringClass = cls + + def __init__( self, savelist=False ): + self.parseAction = list() + self.failAction = None + #~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall + self.strRepr = None + self.resultsName = None + self.saveAsList = savelist + self.skipWhitespace = True + self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS + self.copyDefaultWhiteChars = True + self.mayReturnEmpty = False # used when checking for left-recursion + self.keepTabs = False + self.ignoreExprs = list() + self.debug = False + self.streamlined = False + self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index + self.errmsg = "" + self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) + self.debugActions = ( None, None, None ) #custom debug actions + self.re = None + self.callPreparse = True # used to avoid redundant calls to preParse + self.callDuringTry = False + + def copy( self ): + """ + Make a copy of this C{ParserElement}. Useful for defining different parse actions + for the same parsing pattern, using copies of the original parse element. + + Example:: + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") + integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") + + print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) + prints:: + [5120, 100, 655360, 268435456] + Equivalent form of C{expr.copy()} is just C{expr()}:: + integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") + """ + cpy = copy.copy( self ) + cpy.parseAction = self.parseAction[:] + cpy.ignoreExprs = self.ignoreExprs[:] + if self.copyDefaultWhiteChars: + cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS + return cpy + + def setName( self, name ): + """ + Define name for this expression, makes debugging and exception messages clearer. + + Example:: + Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1) + Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) + """ + self.name = name + self.errmsg = "Expected " + self.name + if hasattr(self,"exception"): + self.exception.msg = self.errmsg + return self + + def setResultsName( self, name, listAllMatches=False ): + """ + Define name for referencing matching tokens as a nested attribute + of the returned parse results. + NOTE: this returns a *copy* of the original C{ParserElement} object; + this is so that the client can define a basic element, such as an + integer, and reference it in multiple places with different names. + + You can also set results names using the abbreviated syntax, + C{expr("name")} in place of C{expr.setResultsName("name")} - + see L{I{__call__}<__call__>}. + + Example:: + date_str = (integer.setResultsName("year") + '/' + + integer.setResultsName("month") + '/' + + integer.setResultsName("day")) + + # equivalent form: + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + """ + newself = self.copy() + if name.endswith("*"): + name = name[:-1] + listAllMatches=True + newself.resultsName = name + newself.modalResults = not listAllMatches + return newself + + def setBreak(self,breakFlag = True): + """Method to invoke the Python pdb debugger when this element is + about to be parsed. Set C{breakFlag} to True to enable, False to + disable. + """ + if breakFlag: + _parseMethod = self._parse + def breaker(instring, loc, doActions=True, callPreParse=True): + import pdb + pdb.set_trace() + return _parseMethod( instring, loc, doActions, callPreParse ) + breaker._originalParseMethod = _parseMethod + self._parse = breaker + else: + if hasattr(self._parse,"_originalParseMethod"): + self._parse = self._parse._originalParseMethod + return self + + def setParseAction( self, *fns, **kwargs ): + """ + Define one or more actions to perform when successfully matching parse element definition. + Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, + C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: + - s = the original string being parsed (see note below) + - loc = the location of the matching substring + - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object + If the functions in fns modify the tokens, they can return them as the return + value from fn, and the modified list of tokens will replace the original. + Otherwise, fn does not need to return any value. + + Optional keyword arguments: + - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{parseString}<parseString>} for more information + on parsing strings containing C{<TAB>}s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + + Example:: + integer = Word(nums) + date_str = integer + '/' + integer + '/' + integer + + date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] + + # use parse action to convert to ints at parse time + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + date_str = integer + '/' + integer + '/' + integer + + # note that integer fields are now ints, not strings + date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] + """ + self.parseAction = list(map(_trim_arity, list(fns))) + self.callDuringTry = kwargs.get("callDuringTry", False) + return self + + def addParseAction( self, *fns, **kwargs ): + """ + Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}. + + See examples in L{I{copy}<copy>}. + """ + self.parseAction += list(map(_trim_arity, list(fns))) + self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) + return self + + def addCondition(self, *fns, **kwargs): + """Add a boolean predicate function to expression's list of parse actions. See + L{I{setParseAction}<setParseAction>} for function call signatures. Unlike C{setParseAction}, + functions passed to C{addCondition} need to return boolean success/fail of the condition. + + Optional keyword arguments: + - message = define a custom message to be used in the raised exception + - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException + + Example:: + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + year_int = integer.copy() + year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") + date_str = year_int + '/' + integer + '/' + integer + + result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) + """ + msg = kwargs.get("message", "failed user-defined condition") + exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException + for fn in fns: + def pa(s,l,t): + if not bool(_trim_arity(fn)(s,l,t)): + raise exc_type(s,l,msg) + self.parseAction.append(pa) + self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) + return self + + def setFailAction( self, fn ): + """Define action to perform if parsing fails at this expression. + Fail acton fn is a callable function that takes the arguments + C{fn(s,loc,expr,err)} where: + - s = string being parsed + - loc = location where expression match was attempted and failed + - expr = the parse expression that failed + - err = the exception thrown + The function returns no value. It may throw C{L{ParseFatalException}} + if it is desired to stop parsing immediately.""" + self.failAction = fn + return self + + def _skipIgnorables( self, instring, loc ): + exprsFound = True + while exprsFound: + exprsFound = False + for e in self.ignoreExprs: + try: + while 1: + loc,dummy = e._parse( instring, loc ) + exprsFound = True + except ParseException: + pass + return loc + + def preParse( self, instring, loc ): + if self.ignoreExprs: + loc = self._skipIgnorables( instring, loc ) + + if self.skipWhitespace: + wt = self.whiteChars + instrlen = len(instring) + while loc < instrlen and instring[loc] in wt: + loc += 1 + + return loc + + def parseImpl( self, instring, loc, doActions=True ): + return loc, [] + + def postParse( self, instring, loc, tokenlist ): + return tokenlist + + #~ @profile + def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): + debugging = ( self.debug ) #and doActions ) + + if debugging or self.failAction: + #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) + if (self.debugActions[0] ): + self.debugActions[0]( instring, loc, self ) + if callPreParse and self.callPreparse: + preloc = self.preParse( instring, loc ) + else: + preloc = loc + tokensStart = preloc + try: + try: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + except IndexError: + raise ParseException( instring, len(instring), self.errmsg, self ) + except ParseBaseException as err: + #~ print ("Exception raised:", err) + if self.debugActions[2]: + self.debugActions[2]( instring, tokensStart, self, err ) + if self.failAction: + self.failAction( instring, tokensStart, self, err ) + raise + else: + if callPreParse and self.callPreparse: + preloc = self.preParse( instring, loc ) + else: + preloc = loc + tokensStart = preloc + if self.mayIndexError or loc >= len(instring): + try: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + except IndexError: + raise ParseException( instring, len(instring), self.errmsg, self ) + else: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + + tokens = self.postParse( instring, loc, tokens ) + + retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults ) + if self.parseAction and (doActions or self.callDuringTry): + if debugging: + try: + for fn in self.parseAction: + tokens = fn( instring, tokensStart, retTokens ) + if tokens is not None: + retTokens = ParseResults( tokens, + self.resultsName, + asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), + modal=self.modalResults ) + except ParseBaseException as err: + #~ print "Exception raised in user parse action:", err + if (self.debugActions[2] ): + self.debugActions[2]( instring, tokensStart, self, err ) + raise + else: + for fn in self.parseAction: + tokens = fn( instring, tokensStart, retTokens ) + if tokens is not None: + retTokens = ParseResults( tokens, + self.resultsName, + asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), + modal=self.modalResults ) + + if debugging: + #~ print ("Matched",self,"->",retTokens.asList()) + if (self.debugActions[1] ): + self.debugActions[1]( instring, tokensStart, loc, self, retTokens ) + + return loc, retTokens + + def tryParse( self, instring, loc ): + try: + return self._parse( instring, loc, doActions=False )[0] + except ParseFatalException: + raise ParseException( instring, loc, self.errmsg, self) + + def canParseNext(self, instring, loc): + try: + self.tryParse(instring, loc) + except (ParseException, IndexError): + return False + else: + return True + + class _UnboundedCache(object): + def __init__(self): + cache = {} + self.not_in_cache = not_in_cache = object() + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + + def clear(self): + cache.clear() + + def cache_len(self): + return len(cache) + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + self.__len__ = types.MethodType(cache_len, self) + + if _OrderedDict is not None: + class _FifoCache(object): + def __init__(self, size): + self.not_in_cache = not_in_cache = object() + + cache = _OrderedDict() + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + while len(cache) > size: + try: + cache.popitem(False) + except KeyError: + pass + + def clear(self): + cache.clear() + + def cache_len(self): + return len(cache) + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + self.__len__ = types.MethodType(cache_len, self) + + else: + class _FifoCache(object): + def __init__(self, size): + self.not_in_cache = not_in_cache = object() + + cache = {} + key_fifo = collections.deque([], size) + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + while len(key_fifo) > size: + cache.pop(key_fifo.popleft(), None) + key_fifo.append(key) + + def clear(self): + cache.clear() + key_fifo.clear() + + def cache_len(self): + return len(cache) + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + self.__len__ = types.MethodType(cache_len, self) + + # argument cache for optimizing repeated calls when backtracking through recursive expressions + packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail + packrat_cache_lock = RLock() + packrat_cache_stats = [0, 0] + + # this method gets repeatedly called during backtracking with the same arguments - + # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression + def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): + HIT, MISS = 0, 1 + lookup = (self, instring, loc, callPreParse, doActions) + with ParserElement.packrat_cache_lock: + cache = ParserElement.packrat_cache + value = cache.get(lookup) + if value is cache.not_in_cache: + ParserElement.packrat_cache_stats[MISS] += 1 + try: + value = self._parseNoCache(instring, loc, doActions, callPreParse) + except ParseBaseException as pe: + # cache a copy of the exception, without the traceback + cache.set(lookup, pe.__class__(*pe.args)) + raise + else: + cache.set(lookup, (value[0], value[1].copy())) + return value + else: + ParserElement.packrat_cache_stats[HIT] += 1 + if isinstance(value, Exception): + raise value + return (value[0], value[1].copy()) + + _parse = _parseNoCache + + @staticmethod + def resetCache(): + ParserElement.packrat_cache.clear() + ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats) + + _packratEnabled = False + @staticmethod + def enablePackrat(cache_size_limit=128): + """Enables "packrat" parsing, which adds memoizing to the parsing logic. + Repeated parse attempts at the same string location (which happens + often in many complex grammars) can immediately return a cached value, + instead of re-executing parsing/validating code. Memoizing is done of + both valid results and parsing exceptions. + + Parameters: + - cache_size_limit - (default=C{128}) - if an integer value is provided + will limit the size of the packrat cache; if None is passed, then + the cache size will be unbounded; if 0 is passed, the cache will + be effectively disabled. + + This speedup may break existing programs that use parse actions that + have side-effects. For this reason, packrat parsing is disabled when + you first import pyparsing. To activate the packrat feature, your + program must call the class method C{ParserElement.enablePackrat()}. If + your program uses C{psyco} to "compile as you go", you must call + C{enablePackrat} before calling C{psyco.full()}. If you do not do this, + Python will crash. For best results, call C{enablePackrat()} immediately + after importing pyparsing. + + Example:: + from pip._vendor import pyparsing + pyparsing.ParserElement.enablePackrat() + """ + if not ParserElement._packratEnabled: + ParserElement._packratEnabled = True + if cache_size_limit is None: + ParserElement.packrat_cache = ParserElement._UnboundedCache() + else: + ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit) + ParserElement._parse = ParserElement._parseCache + + def parseString( self, instring, parseAll=False ): + """ + Execute the parse expression with the given string. + This is the main interface to the client code, once the complete + expression has been built. + + If you want the grammar to require that the entire input string be + successfully parsed, then set C{parseAll} to True (equivalent to ending + the grammar with C{L{StringEnd()}}). + + Note: C{parseString} implicitly calls C{expandtabs()} on the input string, + in order to report proper column numbers in parse actions. + If the input string contains tabs and + the grammar uses parse actions that use the C{loc} argument to index into the + string being parsed, you can ensure you have a consistent view of the input + string by: + - calling C{parseWithTabs} on your grammar before calling C{parseString} + (see L{I{parseWithTabs}<parseWithTabs>}) + - define your parse action using the full C{(s,loc,toks)} signature, and + reference the input string using the parse action's C{s} argument + - explictly expand the tabs in your input string before calling + C{parseString} + + Example:: + Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] + Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text + """ + ParserElement.resetCache() + if not self.streamlined: + self.streamline() + #~ self.saveAsList = True + for e in self.ignoreExprs: + e.streamline() + if not self.keepTabs: + instring = instring.expandtabs() + try: + loc, tokens = self._parse( instring, 0 ) + if parseAll: + loc = self.preParse( instring, loc ) + se = Empty() + StringEnd() + se._parse( instring, loc ) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + else: + return tokens + + def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): + """ + Scan the input string for expression matches. Each match will return the + matching tokens, start location, and end location. May be called with optional + C{maxMatches} argument, to clip scanning after 'n' matches are found. If + C{overlap} is specified, then overlapping matches will be reported. + + Note that the start and end locations are reported relative to the string + being parsed. See L{I{parseString}<parseString>} for more information on parsing + strings with embedded tabs. + + Example:: + source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" + print(source) + for tokens,start,end in Word(alphas).scanString(source): + print(' '*start + '^'*(end-start)) + print(' '*start + tokens[0]) + + prints:: + + sldjf123lsdjjkf345sldkjf879lkjsfd987 + ^^^^^ + sldjf + ^^^^^^^ + lsdjjkf + ^^^^^^ + sldkjf + ^^^^^^ + lkjsfd + """ + if not self.streamlined: + self.streamline() + for e in self.ignoreExprs: + e.streamline() + + if not self.keepTabs: + instring = _ustr(instring).expandtabs() + instrlen = len(instring) + loc = 0 + preparseFn = self.preParse + parseFn = self._parse + ParserElement.resetCache() + matches = 0 + try: + while loc <= instrlen and matches < maxMatches: + try: + preloc = preparseFn( instring, loc ) + nextLoc,tokens = parseFn( instring, preloc, callPreParse=False ) + except ParseException: + loc = preloc+1 + else: + if nextLoc > loc: + matches += 1 + yield tokens, preloc, nextLoc + if overlap: + nextloc = preparseFn( instring, loc ) + if nextloc > loc: + loc = nextLoc + else: + loc += 1 + else: + loc = nextLoc + else: + loc = preloc+1 + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def transformString( self, instring ): + """ + Extension to C{L{scanString}}, to modify matching text with modified tokens that may + be returned from a parse action. To use C{transformString}, define a grammar and + attach a parse action to it that modifies the returned token list. + Invoking C{transformString()} on a target string will then scan for matches, + and replace the matched text patterns according to the logic in the parse + action. C{transformString()} returns the resulting transformed string. + + Example:: + wd = Word(alphas) + wd.setParseAction(lambda toks: toks[0].title()) + + print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york.")) + Prints:: + Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. + """ + out = [] + lastE = 0 + # force preservation of <TAB>s, to minimize unwanted transformation of string, and to + # keep string locs straight between transformString and scanString + self.keepTabs = True + try: + for t,s,e in self.scanString( instring ): + out.append( instring[lastE:s] ) + if t: + if isinstance(t,ParseResults): + out += t.asList() + elif isinstance(t,list): + out += t + else: + out.append(t) + lastE = e + out.append(instring[lastE:]) + out = [o for o in out if o] + return "".join(map(_ustr,_flatten(out))) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def searchString( self, instring, maxMatches=_MAX_INT ): + """ + Another extension to C{L{scanString}}, simplifying the access to the tokens found + to match the given parse expression. May be called with optional + C{maxMatches} argument, to clip searching after 'n' matches are found. + + Example:: + # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters + cap_word = Word(alphas.upper(), alphas.lower()) + + print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) + + # the sum() builtin can be used to merge results into a single ParseResults object + print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))) + prints:: + [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']] + ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity'] + """ + try: + return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): + """ + Generator method to split a string using the given expression as a separator. + May be called with optional C{maxsplit} argument, to limit the number of splits; + and the optional C{includeSeparators} argument (default=C{False}), if the separating + matching text should be included in the split results. + + Example:: + punc = oneOf(list(".,;:/-!?")) + print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) + prints:: + ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] + """ + splits = 0 + last = 0 + for t,s,e in self.scanString(instring, maxMatches=maxsplit): + yield instring[last:s] + if includeSeparators: + yield t[0] + last = e + yield instring[last:] + + def __add__(self, other ): + """ + Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement + converts them to L{Literal}s by default. + + Example:: + greet = Word(alphas) + "," + Word(alphas) + "!" + hello = "Hello, World!" + print (hello, "->", greet.parseString(hello)) + Prints:: + Hello, World! -> ['Hello', ',', 'World', '!'] + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return And( [ self, other ] ) + + def __radd__(self, other ): + """ + Implementation of + operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other + self + + def __sub__(self, other): + """ + Implementation of - operator, returns C{L{And}} with error stop + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return self + And._ErrorStop() + other + + def __rsub__(self, other ): + """ + Implementation of - operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other - self + + def __mul__(self,other): + """ + Implementation of * operator, allows use of C{expr * 3} in place of + C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer + tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples + may also include C{None} as in: + - C{expr*(n,None)} or C{expr*(n,)} is equivalent + to C{expr*n + L{ZeroOrMore}(expr)} + (read as "at least n instances of C{expr}") + - C{expr*(None,n)} is equivalent to C{expr*(0,n)} + (read as "0 to n instances of C{expr}") + - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} + - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} + + Note that C{expr*(None,n)} does not raise an exception if + more than n exprs exist in the input stream; that is, + C{expr*(None,n)} does not enforce a maximum number of expr + occurrences. If this behavior is desired, then write + C{expr*(None,n) + ~expr} + """ + if isinstance(other,int): + minElements, optElements = other,0 + elif isinstance(other,tuple): + other = (other + (None, None))[:2] + if other[0] is None: + other = (0, other[1]) + if isinstance(other[0],int) and other[1] is None: + if other[0] == 0: + return ZeroOrMore(self) + if other[0] == 1: + return OneOrMore(self) + else: + return self*other[0] + ZeroOrMore(self) + elif isinstance(other[0],int) and isinstance(other[1],int): + minElements, optElements = other + optElements -= minElements + else: + raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) + else: + raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) + + if minElements < 0: + raise ValueError("cannot multiply ParserElement by negative value") + if optElements < 0: + raise ValueError("second tuple value must be greater or equal to first tuple value") + if minElements == optElements == 0: + raise ValueError("cannot multiply ParserElement by 0 or (0,0)") + + if (optElements): + def makeOptionalList(n): + if n>1: + return Optional(self + makeOptionalList(n-1)) + else: + return Optional(self) + if minElements: + if minElements == 1: + ret = self + makeOptionalList(optElements) + else: + ret = And([self]*minElements) + makeOptionalList(optElements) + else: + ret = makeOptionalList(optElements) + else: + if minElements == 1: + ret = self + else: + ret = And([self]*minElements) + return ret + + def __rmul__(self, other): + return self.__mul__(other) + + def __or__(self, other ): + """ + Implementation of | operator - returns C{L{MatchFirst}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return MatchFirst( [ self, other ] ) + + def __ror__(self, other ): + """ + Implementation of | operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other | self + + def __xor__(self, other ): + """ + Implementation of ^ operator - returns C{L{Or}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return Or( [ self, other ] ) + + def __rxor__(self, other ): + """ + Implementation of ^ operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other ^ self + + def __and__(self, other ): + """ + Implementation of & operator - returns C{L{Each}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return Each( [ self, other ] ) + + def __rand__(self, other ): + """ + Implementation of & operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other & self + + def __invert__( self ): + """ + Implementation of ~ operator - returns C{L{NotAny}} + """ + return NotAny( self ) + + def __call__(self, name=None): + """ + Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}. + + If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be + passed as C{True}. + + If C{name} is omitted, same as calling C{L{copy}}. + + Example:: + # these are equivalent + userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") + userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") + """ + if name is not None: + return self.setResultsName(name) + else: + return self.copy() + + def suppress( self ): + """ + Suppresses the output of this C{ParserElement}; useful to keep punctuation from + cluttering up returned output. + """ + return Suppress( self ) + + def leaveWhitespace( self ): + """ + Disables the skipping of whitespace before matching the characters in the + C{ParserElement}'s defined pattern. This is normally only used internally by + the pyparsing module, but may be needed in some whitespace-sensitive grammars. + """ + self.skipWhitespace = False + return self + + def setWhitespaceChars( self, chars ): + """ + Overrides the default whitespace chars + """ + self.skipWhitespace = True + self.whiteChars = chars + self.copyDefaultWhiteChars = False + return self + + def parseWithTabs( self ): + """ + Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string. + Must be called before C{parseString} when the input grammar contains elements that + match C{<TAB>} characters. + """ + self.keepTabs = True + return self + + def ignore( self, other ): + """ + Define expression to be ignored (e.g., comments) while doing pattern + matching; may be called repeatedly, to define multiple comment or other + ignorable patterns. + + Example:: + patt = OneOrMore(Word(alphas)) + patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] + + patt.ignore(cStyleComment) + patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] + """ + if isinstance(other, basestring): + other = Suppress(other) + + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + self.ignoreExprs.append(other) + else: + self.ignoreExprs.append( Suppress( other.copy() ) ) + return self + + def setDebugActions( self, startAction, successAction, exceptionAction ): + """ + Enable display of debugging messages while doing pattern matching. + """ + self.debugActions = (startAction or _defaultStartDebugAction, + successAction or _defaultSuccessDebugAction, + exceptionAction or _defaultExceptionDebugAction) + self.debug = True + return self + + def setDebug( self, flag=True ): + """ + Enable display of debugging messages while doing pattern matching. + Set C{flag} to True to enable, False to disable. + + Example:: + wd = Word(alphas).setName("alphaword") + integer = Word(nums).setName("numword") + term = wd | integer + + # turn on debugging for wd + wd.setDebug() + + OneOrMore(term).parseString("abc 123 xyz 890") + + prints:: + Match alphaword at loc 0(1,1) + Matched alphaword -> ['abc'] + Match alphaword at loc 3(1,4) + Exception raised:Expected alphaword (at char 4), (line:1, col:5) + Match alphaword at loc 7(1,8) + Matched alphaword -> ['xyz'] + Match alphaword at loc 11(1,12) + Exception raised:Expected alphaword (at char 12), (line:1, col:13) + Match alphaword at loc 15(1,16) + Exception raised:Expected alphaword (at char 15), (line:1, col:16) + + The output shown is that produced by the default debug actions - custom debug actions can be + specified using L{setDebugActions}. Prior to attempting + to match the C{wd} expression, the debugging message C{"Match <exprname> at loc <n>(<line>,<col>)"} + is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"} + message is shown. Also note the use of L{setName} to assign a human-readable name to the expression, + which makes debugging and exception messages easier to understand - for instance, the default + name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}. + """ + if flag: + self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) + else: + self.debug = False + return self + + def __str__( self ): + return self.name + + def __repr__( self ): + return _ustr(self) + + def streamline( self ): + self.streamlined = True + self.strRepr = None + return self + + def checkRecursion( self, parseElementList ): + pass + + def validate( self, validateTrace=[] ): + """ + Check defined expressions for valid structure, check for infinite recursive definitions. + """ + self.checkRecursion( [] ) + + def parseFile( self, file_or_filename, parseAll=False ): + """ + Execute the parse expression on the given file or filename. + If a filename is specified (instead of a file object), + the entire file is opened, read, and closed before parsing. + """ + try: + file_contents = file_or_filename.read() + except AttributeError: + with open(file_or_filename, "r") as f: + file_contents = f.read() + try: + return self.parseString(file_contents, parseAll) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def __eq__(self,other): + if isinstance(other, ParserElement): + return self is other or vars(self) == vars(other) + elif isinstance(other, basestring): + return self.matches(other) + else: + return super(ParserElement,self)==other + + def __ne__(self,other): + return not (self == other) + + def __hash__(self): + return hash(id(self)) + + def __req__(self,other): + return self == other + + def __rne__(self,other): + return not (self == other) + + def matches(self, testString, parseAll=True): + """ + Method for quick testing of a parser against a test string. Good for simple + inline microtests of sub expressions while building up larger parser. + + Parameters: + - testString - to test against this expression for a match + - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests + + Example:: + expr = Word(nums) + assert expr.matches("100") + """ + try: + self.parseString(_ustr(testString), parseAll=parseAll) + return True + except ParseBaseException: + return False + + def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False): + """ + Execute the parse expression on a series of test strings, showing each + test, the parsed results or where the parse failed. Quick and easy way to + run a parse expression against a list of sample strings. + + Parameters: + - tests - a list of separate test strings, or a multiline string of test strings + - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests + - comment - (default=C{'#'}) - expression for indicating embedded comments in the test + string; pass None to disable comment filtering + - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline; + if False, only dump nested list + - printResults - (default=C{True}) prints test output to stdout + - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing + + Returns: a (success, results) tuple, where success indicates that all tests succeeded + (or failed if C{failureTests} is True), and the results contain a list of lines of each + test's output + + Example:: + number_expr = pyparsing_common.number.copy() + + result = number_expr.runTests(''' + # unsigned integer + 100 + # negative integer + -100 + # float with scientific notation + 6.02e23 + # integer with scientific notation + 1e-12 + ''') + print("Success" if result[0] else "Failed!") + + result = number_expr.runTests(''' + # stray character + 100Z + # missing leading digit before '.' + -.100 + # too many '.' + 3.14.159 + ''', failureTests=True) + print("Success" if result[0] else "Failed!") + prints:: + # unsigned integer + 100 + [100] + + # negative integer + -100 + [-100] + + # float with scientific notation + 6.02e23 + [6.02e+23] + + # integer with scientific notation + 1e-12 + [1e-12] + + Success + + # stray character + 100Z + ^ + FAIL: Expected end of text (at char 3), (line:1, col:4) + + # missing leading digit before '.' + -.100 + ^ + FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1) + + # too many '.' + 3.14.159 + ^ + FAIL: Expected end of text (at char 4), (line:1, col:5) + + Success + + Each test string must be on a single line. If you want to test a string that spans multiple + lines, create a test like this:: + + expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines") + + (Note that this is a raw string literal, you must include the leading 'r'.) + """ + if isinstance(tests, basestring): + tests = list(map(str.strip, tests.rstrip().splitlines())) + if isinstance(comment, basestring): + comment = Literal(comment) + allResults = [] + comments = [] + success = True + for t in tests: + if comment is not None and comment.matches(t, False) or comments and not t: + comments.append(t) + continue + if not t: + continue + out = ['\n'.join(comments), t] + comments = [] + try: + t = t.replace(r'\n','\n') + result = self.parseString(t, parseAll=parseAll) + out.append(result.dump(full=fullDump)) + success = success and not failureTests + except ParseBaseException as pe: + fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" + if '\n' in t: + out.append(line(pe.loc, t)) + out.append(' '*(col(pe.loc,t)-1) + '^' + fatal) + else: + out.append(' '*pe.loc + '^' + fatal) + out.append("FAIL: " + str(pe)) + success = success and failureTests + result = pe + except Exception as exc: + out.append("FAIL-EXCEPTION: " + str(exc)) + success = success and failureTests + result = exc + + if printResults: + if fullDump: + out.append('') + print('\n'.join(out)) + + allResults.append((t, result)) + + return success, allResults + + +class Token(ParserElement): + """ + Abstract C{ParserElement} subclass, for defining atomic matching patterns. + """ + def __init__( self ): + super(Token,self).__init__( savelist=False ) + + +class Empty(Token): + """ + An empty token, will always match. + """ + def __init__( self ): + super(Empty,self).__init__() + self.name = "Empty" + self.mayReturnEmpty = True + self.mayIndexError = False + + +class NoMatch(Token): + """ + A token that will never match. + """ + def __init__( self ): + super(NoMatch,self).__init__() + self.name = "NoMatch" + self.mayReturnEmpty = True + self.mayIndexError = False + self.errmsg = "Unmatchable token" + + def parseImpl( self, instring, loc, doActions=True ): + raise ParseException(instring, loc, self.errmsg, self) + + +class Literal(Token): + """ + Token to exactly match a specified string. + + Example:: + Literal('blah').parseString('blah') # -> ['blah'] + Literal('blah').parseString('blahfooblah') # -> ['blah'] + Literal('blah').parseString('bla') # -> Exception: Expected "blah" + + For case-insensitive matching, use L{CaselessLiteral}. + + For keyword matching (force word break before and after the matched string), + use L{Keyword} or L{CaselessKeyword}. + """ + def __init__( self, matchString ): + super(Literal,self).__init__() + self.match = matchString + self.matchLen = len(matchString) + try: + self.firstMatchChar = matchString[0] + except IndexError: + warnings.warn("null string passed to Literal; use Empty() instead", + SyntaxWarning, stacklevel=2) + self.__class__ = Empty + self.name = '"%s"' % _ustr(self.match) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = False + self.mayIndexError = False + + # Performance tuning: this routine gets called a *lot* + # if this is a single character match string and the first character matches, + # short-circuit as quickly as possible, and avoid calling startswith + #~ @profile + def parseImpl( self, instring, loc, doActions=True ): + if (instring[loc] == self.firstMatchChar and + (self.matchLen==1 or instring.startswith(self.match,loc)) ): + return loc+self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) +_L = Literal +ParserElement._literalStringClass = Literal + +class Keyword(Token): + """ + Token to exactly match a specified string as a keyword, that is, it must be + immediately followed by a non-keyword character. Compare with C{L{Literal}}: + - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}. + - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} + Accepts two optional constructor arguments in addition to the keyword string: + - C{identChars} is a string of characters that would be valid identifier characters, + defaulting to all alphanumerics + "_" and "$" + - C{caseless} allows case-insensitive matching, default is C{False}. + + Example:: + Keyword("start").parseString("start") # -> ['start'] + Keyword("start").parseString("starting") # -> Exception + + For case-insensitive matching, use L{CaselessKeyword}. + """ + DEFAULT_KEYWORD_CHARS = alphanums+"_$" + + def __init__( self, matchString, identChars=None, caseless=False ): + super(Keyword,self).__init__() + if identChars is None: + identChars = Keyword.DEFAULT_KEYWORD_CHARS + self.match = matchString + self.matchLen = len(matchString) + try: + self.firstMatchChar = matchString[0] + except IndexError: + warnings.warn("null string passed to Keyword; use Empty() instead", + SyntaxWarning, stacklevel=2) + self.name = '"%s"' % self.match + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = False + self.mayIndexError = False + self.caseless = caseless + if caseless: + self.caselessmatch = matchString.upper() + identChars = identChars.upper() + self.identChars = set(identChars) + + def parseImpl( self, instring, loc, doActions=True ): + if self.caseless: + if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and + (loc == 0 or instring[loc-1].upper() not in self.identChars) ): + return loc+self.matchLen, self.match + else: + if (instring[loc] == self.firstMatchChar and + (self.matchLen==1 or instring.startswith(self.match,loc)) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and + (loc == 0 or instring[loc-1] not in self.identChars) ): + return loc+self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) + + def copy(self): + c = super(Keyword,self).copy() + c.identChars = Keyword.DEFAULT_KEYWORD_CHARS + return c + + @staticmethod + def setDefaultKeywordChars( chars ): + """Overrides the default Keyword chars + """ + Keyword.DEFAULT_KEYWORD_CHARS = chars + +class CaselessLiteral(Literal): + """ + Token to match a specified string, ignoring case of letters. + Note: the matched results will always be in the case of the given + match string, NOT the case of the input text. + + Example:: + OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'] + + (Contrast with example for L{CaselessKeyword}.) + """ + def __init__( self, matchString ): + super(CaselessLiteral,self).__init__( matchString.upper() ) + # Preserve the defining literal. + self.returnString = matchString + self.name = "'%s'" % self.returnString + self.errmsg = "Expected " + self.name + + def parseImpl( self, instring, loc, doActions=True ): + if instring[ loc:loc+self.matchLen ].upper() == self.match: + return loc+self.matchLen, self.returnString + raise ParseException(instring, loc, self.errmsg, self) + +class CaselessKeyword(Keyword): + """ + Caseless version of L{Keyword}. + + Example:: + OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'] + + (Contrast with example for L{CaselessLiteral}.) + """ + def __init__( self, matchString, identChars=None ): + super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) + + def parseImpl( self, instring, loc, doActions=True ): + if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ): + return loc+self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) + +class CloseMatch(Token): + """ + A variation on L{Literal} which matches "close" matches, that is, + strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters: + - C{match_string} - string to be matched + - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match + + The results from a successful parse will contain the matched text from the input string and the following named results: + - C{mismatches} - a list of the positions within the match_string where mismatches were found + - C{original} - the original match_string used to compare against the input string + + If C{mismatches} is an empty list, then the match was an exact match. + + Example:: + patt = CloseMatch("ATCATCGAATGGA") + patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) + patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) + + # exact match + patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']}) + + # close match allowing up to 2 mismatches + patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2) + patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) + """ + def __init__(self, match_string, maxMismatches=1): + super(CloseMatch,self).__init__() + self.name = match_string + self.match_string = match_string + self.maxMismatches = maxMismatches + self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches) + self.mayIndexError = False + self.mayReturnEmpty = False + + def parseImpl( self, instring, loc, doActions=True ): + start = loc + instrlen = len(instring) + maxloc = start + len(self.match_string) + + if maxloc <= instrlen: + match_string = self.match_string + match_stringloc = 0 + mismatches = [] + maxMismatches = self.maxMismatches + + for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)): + src,mat = s_m + if src != mat: + mismatches.append(match_stringloc) + if len(mismatches) > maxMismatches: + break + else: + loc = match_stringloc + 1 + results = ParseResults([instring[start:loc]]) + results['original'] = self.match_string + results['mismatches'] = mismatches + return loc, results + + raise ParseException(instring, loc, self.errmsg, self) + + +class Word(Token): + """ + Token for matching words composed of allowed character sets. + Defined with string containing all allowed initial characters, + an optional string containing allowed body characters (if omitted, + defaults to the initial character set), and an optional minimum, + maximum, and/or exact length. The default value for C{min} is 1 (a + minimum value < 1 is not valid); the default values for C{max} and C{exact} + are 0, meaning no maximum or exact length restriction. An optional + C{excludeChars} parameter can list characters that might be found in + the input C{bodyChars} string; useful to define a word of all printables + except for one or two characters, for instance. + + L{srange} is useful for defining custom character set strings for defining + C{Word} expressions, using range notation from regular expression character sets. + + A common mistake is to use C{Word} to match a specific literal string, as in + C{Word("Address")}. Remember that C{Word} uses the string argument to define + I{sets} of matchable characters. This expression would match "Add", "AAA", + "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'. + To match an exact literal string, use L{Literal} or L{Keyword}. + + pyparsing includes helper strings for building Words: + - L{alphas} + - L{nums} + - L{alphanums} + - L{hexnums} + - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.) + - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.) + - L{printables} (any non-whitespace character) + + Example:: + # a word composed of digits + integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) + + # a word with a leading capital, and zero or more lowercase + capital_word = Word(alphas.upper(), alphas.lower()) + + # hostnames are alphanumeric, with leading alpha, and '-' + hostname = Word(alphas, alphanums+'-') + + # roman numeral (not a strict parser, accepts invalid mix of characters) + roman = Word("IVXLCDM") + + # any string of non-whitespace characters, except for ',' + csv_value = Word(printables, excludeChars=",") + """ + def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ): + super(Word,self).__init__() + if excludeChars: + initChars = ''.join(c for c in initChars if c not in excludeChars) + if bodyChars: + bodyChars = ''.join(c for c in bodyChars if c not in excludeChars) + self.initCharsOrig = initChars + self.initChars = set(initChars) + if bodyChars : + self.bodyCharsOrig = bodyChars + self.bodyChars = set(bodyChars) + else: + self.bodyCharsOrig = initChars + self.bodyChars = set(initChars) + + self.maxSpecified = max > 0 + + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted") + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.asKeyword = asKeyword + + if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0): + if self.bodyCharsOrig == self.initCharsOrig: + self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) + elif len(self.initCharsOrig) == 1: + self.reString = "%s[%s]*" % \ + (re.escape(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) + else: + self.reString = "[%s][%s]*" % \ + (_escapeRegexRangeChars(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) + if self.asKeyword: + self.reString = r"\b"+self.reString+r"\b" + try: + self.re = re.compile( self.reString ) + except Exception: + self.re = None + + def parseImpl( self, instring, loc, doActions=True ): + if self.re: + result = self.re.match(instring,loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + return loc, result.group() + + if not(instring[ loc ] in self.initChars): + raise ParseException(instring, loc, self.errmsg, self) + + start = loc + loc += 1 + instrlen = len(instring) + bodychars = self.bodyChars + maxloc = start + self.maxLen + maxloc = min( maxloc, instrlen ) + while loc < maxloc and instring[loc] in bodychars: + loc += 1 + + throwException = False + if loc - start < self.minLen: + throwException = True + if self.maxSpecified and loc < instrlen and instring[loc] in bodychars: + throwException = True + if self.asKeyword: + if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars): + throwException = True + + if throwException: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + def __str__( self ): + try: + return super(Word,self).__str__() + except Exception: + pass + + + if self.strRepr is None: + + def charsAsStr(s): + if len(s)>4: + return s[:4]+"..." + else: + return s + + if ( self.initCharsOrig != self.bodyCharsOrig ): + self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) ) + else: + self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) + + return self.strRepr + + +class Regex(Token): + r""" + Token for matching strings that match a given regular expression. + Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. + If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved as + named parse results. + + Example:: + realnum = Regex(r"[+-]?\d+\.\d*") + date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)') + # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression + roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") + """ + compiledREtype = type(re.compile("[A-Z]")) + def __init__( self, pattern, flags=0): + """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.""" + super(Regex,self).__init__() + + if isinstance(pattern, basestring): + if not pattern: + warnings.warn("null string passed to Regex; use Empty() instead", + SyntaxWarning, stacklevel=2) + + self.pattern = pattern + self.flags = flags + + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + except sre_constants.error: + warnings.warn("invalid pattern (%s) passed to Regex" % pattern, + SyntaxWarning, stacklevel=2) + raise + + elif isinstance(pattern, Regex.compiledREtype): + self.re = pattern + self.pattern = \ + self.reString = str(pattern) + self.flags = flags + + else: + raise ValueError("Regex may only be constructed with a string or a compiled RE object") + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + result = self.re.match(instring,loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + d = result.groupdict() + ret = ParseResults(result.group()) + if d: + for k in d: + ret[k] = d[k] + return loc,ret + + def __str__( self ): + try: + return super(Regex,self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "Re:(%s)" % repr(self.pattern) + + return self.strRepr + + +class QuotedString(Token): + r""" + Token for matching strings that are delimited by quoting characters. + + Defined with the following parameters: + - quoteChar - string of one or more characters defining the quote delimiting string + - escChar - character to escape quotes, typically backslash (default=C{None}) + - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None}) + - multiline - boolean indicating whether quotes can span multiple lines (default=C{False}) + - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True}) + - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar) + - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True}) + + Example:: + qs = QuotedString('"') + print(qs.searchString('lsjdf "This is the quote" sldjf')) + complex_qs = QuotedString('{{', endQuoteChar='}}') + print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) + sql_qs = QuotedString('"', escQuote='""') + print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) + prints:: + [['This is the quote']] + [['This is the "quote"']] + [['This is the quote with "embedded" quotes']] + """ + def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): + super(QuotedString,self).__init__() + + # remove white space from quote chars - wont work anyway + quoteChar = quoteChar.strip() + if not quoteChar: + warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + raise SyntaxError() + + if endQuoteChar is None: + endQuoteChar = quoteChar + else: + endQuoteChar = endQuoteChar.strip() + if not endQuoteChar: + warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + raise SyntaxError() + + self.quoteChar = quoteChar + self.quoteCharLen = len(quoteChar) + self.firstQuoteChar = quoteChar[0] + self.endQuoteChar = endQuoteChar + self.endQuoteCharLen = len(endQuoteChar) + self.escChar = escChar + self.escQuote = escQuote + self.unquoteResults = unquoteResults + self.convertWhitespaceEscapes = convertWhitespaceEscapes + + if multiline: + self.flags = re.MULTILINE | re.DOTALL + self.pattern = r'%s(?:[^%s%s]' % \ + ( re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + else: + self.flags = 0 + self.pattern = r'%s(?:[^%s\n\r%s]' % \ + ( re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + if len(self.endQuoteChar) > 1: + self.pattern += ( + '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), + _escapeRegexRangeChars(self.endQuoteChar[i])) + for i in range(len(self.endQuoteChar)-1,0,-1)) + ')' + ) + if escQuote: + self.pattern += (r'|(?:%s)' % re.escape(escQuote)) + if escChar: + self.pattern += (r'|(?:%s.)' % re.escape(escChar)) + self.escCharReplacePattern = re.escape(self.escChar)+"(.)" + self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) + + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + except sre_constants.error: + warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, + SyntaxWarning, stacklevel=2) + raise + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + ret = result.group() + + if self.unquoteResults: + + # strip off quotes + ret = ret[self.quoteCharLen:-self.endQuoteCharLen] + + if isinstance(ret,basestring): + # replace escaped whitespace + if '\\' in ret and self.convertWhitespaceEscapes: + ws_map = { + r'\t' : '\t', + r'\n' : '\n', + r'\f' : '\f', + r'\r' : '\r', + } + for wslit,wschar in ws_map.items(): + ret = ret.replace(wslit, wschar) + + # replace escaped characters + if self.escChar: + ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret) + + # replace escaped quotes + if self.escQuote: + ret = ret.replace(self.escQuote, self.endQuoteChar) + + return loc, ret + + def __str__( self ): + try: + return super(QuotedString,self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) + + return self.strRepr + + +class CharsNotIn(Token): + """ + Token for matching words composed of characters I{not} in a given set (will + include whitespace in matched characters if not listed in the provided exclusion set - see example). + Defined with string containing all disallowed characters, and an optional + minimum, maximum, and/or exact length. The default value for C{min} is 1 (a + minimum value < 1 is not valid); the default values for C{max} and C{exact} + are 0, meaning no maximum or exact length restriction. + + Example:: + # define a comma-separated-value as anything that is not a ',' + csv_value = CharsNotIn(',') + print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) + prints:: + ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] + """ + def __init__( self, notChars, min=1, max=0, exact=0 ): + super(CharsNotIn,self).__init__() + self.skipWhitespace = False + self.notChars = notChars + + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted") + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = ( self.minLen == 0 ) + self.mayIndexError = False + + def parseImpl( self, instring, loc, doActions=True ): + if instring[loc] in self.notChars: + raise ParseException(instring, loc, self.errmsg, self) + + start = loc + loc += 1 + notchars = self.notChars + maxlen = min( start+self.maxLen, len(instring) ) + while loc < maxlen and \ + (instring[loc] not in notchars): + loc += 1 + + if loc - start < self.minLen: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + def __str__( self ): + try: + return super(CharsNotIn, self).__str__() + except Exception: + pass + + if self.strRepr is None: + if len(self.notChars) > 4: + self.strRepr = "!W:(%s...)" % self.notChars[:4] + else: + self.strRepr = "!W:(%s)" % self.notChars + + return self.strRepr + +class White(Token): + """ + Special matching class for matching whitespace. Normally, whitespace is ignored + by pyparsing grammars. This class is included when some whitespace structures + are significant. Define with a string containing the whitespace characters to be + matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, + as defined for the C{L{Word}} class. + """ + whiteStrs = { + " " : "<SPC>", + "\t": "<TAB>", + "\n": "<LF>", + "\r": "<CR>", + "\f": "<FF>", + } + def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): + super(White,self).__init__() + self.matchWhite = ws + self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) ) + #~ self.leaveWhitespace() + self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) + self.mayReturnEmpty = True + self.errmsg = "Expected " + self.name + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + def parseImpl( self, instring, loc, doActions=True ): + if not(instring[ loc ] in self.matchWhite): + raise ParseException(instring, loc, self.errmsg, self) + start = loc + loc += 1 + maxloc = start + self.maxLen + maxloc = min( maxloc, len(instring) ) + while loc < maxloc and instring[loc] in self.matchWhite: + loc += 1 + + if loc - start < self.minLen: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + +class _PositionToken(Token): + def __init__( self ): + super(_PositionToken,self).__init__() + self.name=self.__class__.__name__ + self.mayReturnEmpty = True + self.mayIndexError = False + +class GoToColumn(_PositionToken): + """ + Token to advance to a specific column of input text; useful for tabular report scraping. + """ + def __init__( self, colno ): + super(GoToColumn,self).__init__() + self.col = colno + + def preParse( self, instring, loc ): + if col(loc,instring) != self.col: + instrlen = len(instring) + if self.ignoreExprs: + loc = self._skipIgnorables( instring, loc ) + while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col : + loc += 1 + return loc + + def parseImpl( self, instring, loc, doActions=True ): + thiscol = col( loc, instring ) + if thiscol > self.col: + raise ParseException( instring, loc, "Text not in expected column", self ) + newloc = loc + self.col - thiscol + ret = instring[ loc: newloc ] + return newloc, ret + + +class LineStart(_PositionToken): + """ + Matches if current position is at the beginning of a line within the parse string + + Example:: + + test = '''\ + AAA this line + AAA and this line + AAA but not this one + B AAA and definitely not this one + ''' + + for t in (LineStart() + 'AAA' + restOfLine).searchString(test): + print(t) + + Prints:: + ['AAA', ' this line'] + ['AAA', ' and this line'] + + """ + def __init__( self ): + super(LineStart,self).__init__() + self.errmsg = "Expected start of line" + + def parseImpl( self, instring, loc, doActions=True ): + if col(loc, instring) == 1: + return loc, [] + raise ParseException(instring, loc, self.errmsg, self) + +class LineEnd(_PositionToken): + """ + Matches if current position is at the end of a line within the parse string + """ + def __init__( self ): + super(LineEnd,self).__init__() + self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) + self.errmsg = "Expected end of line" + + def parseImpl( self, instring, loc, doActions=True ): + if loc<len(instring): + if instring[loc] == "\n": + return loc+1, "\n" + else: + raise ParseException(instring, loc, self.errmsg, self) + elif loc == len(instring): + return loc+1, [] + else: + raise ParseException(instring, loc, self.errmsg, self) + +class StringStart(_PositionToken): + """ + Matches if current position is at the beginning of the parse string + """ + def __init__( self ): + super(StringStart,self).__init__() + self.errmsg = "Expected start of text" + + def parseImpl( self, instring, loc, doActions=True ): + if loc != 0: + # see if entire string up to here is just whitespace and ignoreables + if loc != self.preParse( instring, 0 ): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + +class StringEnd(_PositionToken): + """ + Matches if current position is at the end of the parse string + """ + def __init__( self ): + super(StringEnd,self).__init__() + self.errmsg = "Expected end of text" + + def parseImpl( self, instring, loc, doActions=True ): + if loc < len(instring): + raise ParseException(instring, loc, self.errmsg, self) + elif loc == len(instring): + return loc+1, [] + elif loc > len(instring): + return loc, [] + else: + raise ParseException(instring, loc, self.errmsg, self) + +class WordStart(_PositionToken): + """ + Matches if the current position is at the beginning of a Word, and + is not preceded by any character in a given set of C{wordChars} + (default=C{printables}). To emulate the C{\b} behavior of regular expressions, + use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of + the string being parsed, or at the beginning of a line. + """ + def __init__(self, wordChars = printables): + super(WordStart,self).__init__() + self.wordChars = set(wordChars) + self.errmsg = "Not at the start of a word" + + def parseImpl(self, instring, loc, doActions=True ): + if loc != 0: + if (instring[loc-1] in self.wordChars or + instring[loc] not in self.wordChars): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + +class WordEnd(_PositionToken): + """ + Matches if the current position is at the end of a Word, and + is not followed by any character in a given set of C{wordChars} + (default=C{printables}). To emulate the C{\b} behavior of regular expressions, + use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of + the string being parsed, or at the end of a line. + """ + def __init__(self, wordChars = printables): + super(WordEnd,self).__init__() + self.wordChars = set(wordChars) + self.skipWhitespace = False + self.errmsg = "Not at the end of a word" + + def parseImpl(self, instring, loc, doActions=True ): + instrlen = len(instring) + if instrlen>0 and loc<instrlen: + if (instring[loc] in self.wordChars or + instring[loc-1] not in self.wordChars): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + + +class ParseExpression(ParserElement): + """ + Abstract subclass of ParserElement, for combining and post-processing parsed tokens. + """ + def __init__( self, exprs, savelist = False ): + super(ParseExpression,self).__init__(savelist) + if isinstance( exprs, _generatorType ): + exprs = list(exprs) + + if isinstance( exprs, basestring ): + self.exprs = [ ParserElement._literalStringClass( exprs ) ] + elif isinstance( exprs, collections.Iterable ): + exprs = list(exprs) + # if sequence of strings provided, wrap with Literal + if all(isinstance(expr, basestring) for expr in exprs): + exprs = map(ParserElement._literalStringClass, exprs) + self.exprs = list(exprs) + else: + try: + self.exprs = list( exprs ) + except TypeError: + self.exprs = [ exprs ] + self.callPreparse = False + + def __getitem__( self, i ): + return self.exprs[i] + + def append( self, other ): + self.exprs.append( other ) + self.strRepr = None + return self + + def leaveWhitespace( self ): + """Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on + all contained expressions.""" + self.skipWhitespace = False + self.exprs = [ e.copy() for e in self.exprs ] + for e in self.exprs: + e.leaveWhitespace() + return self + + def ignore( self, other ): + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + super( ParseExpression, self).ignore( other ) + for e in self.exprs: + e.ignore( self.ignoreExprs[-1] ) + else: + super( ParseExpression, self).ignore( other ) + for e in self.exprs: + e.ignore( self.ignoreExprs[-1] ) + return self + + def __str__( self ): + try: + return super(ParseExpression,self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) ) + return self.strRepr + + def streamline( self ): + super(ParseExpression,self).streamline() + + for e in self.exprs: + e.streamline() + + # collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d ) + # but only if there are no parse actions or resultsNames on the nested And's + # (likewise for Or's and MatchFirst's) + if ( len(self.exprs) == 2 ): + other = self.exprs[0] + if ( isinstance( other, self.__class__ ) and + not(other.parseAction) and + other.resultsName is None and + not other.debug ): + self.exprs = other.exprs[:] + [ self.exprs[1] ] + self.strRepr = None + self.mayReturnEmpty |= other.mayReturnEmpty + self.mayIndexError |= other.mayIndexError + + other = self.exprs[-1] + if ( isinstance( other, self.__class__ ) and + not(other.parseAction) and + other.resultsName is None and + not other.debug ): + self.exprs = self.exprs[:-1] + other.exprs[:] + self.strRepr = None + self.mayReturnEmpty |= other.mayReturnEmpty + self.mayIndexError |= other.mayIndexError + + self.errmsg = "Expected " + _ustr(self) + + return self + + def setResultsName( self, name, listAllMatches=False ): + ret = super(ParseExpression,self).setResultsName(name,listAllMatches) + return ret + + def validate( self, validateTrace=[] ): + tmp = validateTrace[:]+[self] + for e in self.exprs: + e.validate(tmp) + self.checkRecursion( [] ) + + def copy(self): + ret = super(ParseExpression,self).copy() + ret.exprs = [e.copy() for e in self.exprs] + return ret + +class And(ParseExpression): + """ + Requires all given C{ParseExpression}s to be found in the given order. + Expressions may be separated by whitespace. + May be constructed using the C{'+'} operator. + May also be constructed using the C{'-'} operator, which will suppress backtracking. + + Example:: + integer = Word(nums) + name_expr = OneOrMore(Word(alphas)) + + expr = And([integer("id"),name_expr("name"),integer("age")]) + # more easily written as: + expr = integer("id") + name_expr("name") + integer("age") + """ + + class _ErrorStop(Empty): + def __init__(self, *args, **kwargs): + super(And._ErrorStop,self).__init__(*args, **kwargs) + self.name = '-' + self.leaveWhitespace() + + def __init__( self, exprs, savelist = True ): + super(And,self).__init__(exprs, savelist) + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + self.setWhitespaceChars( self.exprs[0].whiteChars ) + self.skipWhitespace = self.exprs[0].skipWhitespace + self.callPreparse = True + + def parseImpl( self, instring, loc, doActions=True ): + # pass False as last arg to _parse for first element, since we already + # pre-parsed the string as part of our And pre-parsing + loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False ) + errorStop = False + for e in self.exprs[1:]: + if isinstance(e, And._ErrorStop): + errorStop = True + continue + if errorStop: + try: + loc, exprtokens = e._parse( instring, loc, doActions ) + except ParseSyntaxException: + raise + except ParseBaseException as pe: + pe.__traceback__ = None + raise ParseSyntaxException._from_exception(pe) + except IndexError: + raise ParseSyntaxException(instring, len(instring), self.errmsg, self) + else: + loc, exprtokens = e._parse( instring, loc, doActions ) + if exprtokens or exprtokens.haskeys(): + resultlist += exprtokens + return loc, resultlist + + def __iadd__(self, other ): + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + return self.append( other ) #And( [ self, other ] ) + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + if not e.mayReturnEmpty: + break + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + +class Or(ParseExpression): + """ + Requires that at least one C{ParseExpression} is found. + If two expressions match, the expression that matches the longest string will be used. + May be constructed using the C{'^'} operator. + + Example:: + # construct Or using '^' operator + + number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums)) + print(number.searchString("123 3.1416 789")) + prints:: + [['123'], ['3.1416'], ['789']] + """ + def __init__( self, exprs, savelist = False ): + super(Or,self).__init__(exprs, savelist) + if self.exprs: + self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) + else: + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + maxExcLoc = -1 + maxException = None + matches = [] + for e in self.exprs: + try: + loc2 = e.tryParse( instring, loc ) + except ParseException as err: + err.__traceback__ = None + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring,len(instring),e.errmsg,self) + maxExcLoc = len(instring) + else: + # save match among all matches, to retry longest to shortest + matches.append((loc2, e)) + + if matches: + matches.sort(key=lambda x: -x[0]) + for _,e in matches: + try: + return e._parse( instring, loc, doActions ) + except ParseException as err: + err.__traceback__ = None + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + + if maxException is not None: + maxException.msg = self.errmsg + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + + def __ixor__(self, other ): + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + return self.append( other ) #Or( [ self, other ] ) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class MatchFirst(ParseExpression): + """ + Requires that at least one C{ParseExpression} is found. + If two expressions match, the first one listed is the one that will match. + May be constructed using the C{'|'} operator. + + Example:: + # construct MatchFirst using '|' operator + + # watch the order of expressions to match + number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) + print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] + + # put more selective expression first + number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) + print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] + """ + def __init__( self, exprs, savelist = False ): + super(MatchFirst,self).__init__(exprs, savelist) + if self.exprs: + self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) + else: + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + maxExcLoc = -1 + maxException = None + for e in self.exprs: + try: + ret = e._parse( instring, loc, doActions ) + return ret + except ParseException as err: + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring,len(instring),e.errmsg,self) + maxExcLoc = len(instring) + + # only got here if no expression matched, raise exception for match that made it the furthest + else: + if maxException is not None: + maxException.msg = self.errmsg + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + def __ior__(self, other ): + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + return self.append( other ) #MatchFirst( [ self, other ] ) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class Each(ParseExpression): + """ + Requires all given C{ParseExpression}s to be found, but in any order. + Expressions may be separated by whitespace. + May be constructed using the C{'&'} operator. + + Example:: + color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") + shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") + integer = Word(nums) + shape_attr = "shape:" + shape_type("shape") + posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") + color_attr = "color:" + color("color") + size_attr = "size:" + integer("size") + + # use Each (using operator '&') to accept attributes in any order + # (shape and posn are required, color and size are optional) + shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) + + shape_spec.runTests(''' + shape: SQUARE color: BLACK posn: 100, 120 + shape: CIRCLE size: 50 color: BLUE posn: 50,80 + color:GREEN size:20 shape:TRIANGLE posn:20,40 + ''' + ) + prints:: + shape: SQUARE color: BLACK posn: 100, 120 + ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] + - color: BLACK + - posn: ['100', ',', '120'] + - x: 100 + - y: 120 + - shape: SQUARE + + + shape: CIRCLE size: 50 color: BLUE posn: 50,80 + ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] + - color: BLUE + - posn: ['50', ',', '80'] + - x: 50 + - y: 80 + - shape: CIRCLE + - size: 50 + + + color: GREEN size: 20 shape: TRIANGLE posn: 20,40 + ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] + - color: GREEN + - posn: ['20', ',', '40'] + - x: 20 + - y: 40 + - shape: TRIANGLE + - size: 20 + """ + def __init__( self, exprs, savelist = True ): + super(Each,self).__init__(exprs, savelist) + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + self.skipWhitespace = True + self.initExprGroups = True + + def parseImpl( self, instring, loc, doActions=True ): + if self.initExprGroups: + self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional)) + opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ] + opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)] + self.optionals = opt1 + opt2 + self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ] + self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ] + self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ] + self.required += self.multirequired + self.initExprGroups = False + tmpLoc = loc + tmpReqd = self.required[:] + tmpOpt = self.optionals[:] + matchOrder = [] + + keepMatching = True + while keepMatching: + tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired + failed = [] + for e in tmpExprs: + try: + tmpLoc = e.tryParse( instring, tmpLoc ) + except ParseException: + failed.append(e) + else: + matchOrder.append(self.opt1map.get(id(e),e)) + if e in tmpReqd: + tmpReqd.remove(e) + elif e in tmpOpt: + tmpOpt.remove(e) + if len(failed) == len(tmpExprs): + keepMatching = False + + if tmpReqd: + missing = ", ".join(_ustr(e) for e in tmpReqd) + raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing ) + + # add any unmatched Optionals, in case they have default values defined + matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt] + + resultlist = [] + for e in matchOrder: + loc,results = e._parse(instring,loc,doActions) + resultlist.append(results) + + finalResults = sum(resultlist, ParseResults([])) + return loc, finalResults + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class ParseElementEnhance(ParserElement): + """ + Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens. + """ + def __init__( self, expr, savelist=False ): + super(ParseElementEnhance,self).__init__(savelist) + if isinstance( expr, basestring ): + if issubclass(ParserElement._literalStringClass, Token): + expr = ParserElement._literalStringClass(expr) + else: + expr = ParserElement._literalStringClass(Literal(expr)) + self.expr = expr + self.strRepr = None + if expr is not None: + self.mayIndexError = expr.mayIndexError + self.mayReturnEmpty = expr.mayReturnEmpty + self.setWhitespaceChars( expr.whiteChars ) + self.skipWhitespace = expr.skipWhitespace + self.saveAsList = expr.saveAsList + self.callPreparse = expr.callPreparse + self.ignoreExprs.extend(expr.ignoreExprs) + + def parseImpl( self, instring, loc, doActions=True ): + if self.expr is not None: + return self.expr._parse( instring, loc, doActions, callPreParse=False ) + else: + raise ParseException("",loc,self.errmsg,self) + + def leaveWhitespace( self ): + self.skipWhitespace = False + self.expr = self.expr.copy() + if self.expr is not None: + self.expr.leaveWhitespace() + return self + + def ignore( self, other ): + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + super( ParseElementEnhance, self).ignore( other ) + if self.expr is not None: + self.expr.ignore( self.ignoreExprs[-1] ) + else: + super( ParseElementEnhance, self).ignore( other ) + if self.expr is not None: + self.expr.ignore( self.ignoreExprs[-1] ) + return self + + def streamline( self ): + super(ParseElementEnhance,self).streamline() + if self.expr is not None: + self.expr.streamline() + return self + + def checkRecursion( self, parseElementList ): + if self in parseElementList: + raise RecursiveGrammarException( parseElementList+[self] ) + subRecCheckList = parseElementList[:] + [ self ] + if self.expr is not None: + self.expr.checkRecursion( subRecCheckList ) + + def validate( self, validateTrace=[] ): + tmp = validateTrace[:]+[self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion( [] ) + + def __str__( self ): + try: + return super(ParseElementEnhance,self).__str__() + except Exception: + pass + + if self.strRepr is None and self.expr is not None: + self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) + return self.strRepr + + +class FollowedBy(ParseElementEnhance): + """ + Lookahead matching of the given parse expression. C{FollowedBy} + does I{not} advance the parsing position within the input string, it only + verifies that the specified parse expression matches at the current + position. C{FollowedBy} always returns a null token list. + + Example:: + # use FollowedBy to match a label only if it is followed by a ':' + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + + OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() + prints:: + [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] + """ + def __init__( self, expr ): + super(FollowedBy,self).__init__(expr) + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + self.expr.tryParse( instring, loc ) + return loc, [] + + +class NotAny(ParseElementEnhance): + """ + Lookahead to disallow matching with the given parse expression. C{NotAny} + does I{not} advance the parsing position within the input string, it only + verifies that the specified parse expression does I{not} match at the current + position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny} + always returns a null token list. May be constructed using the '~' operator. + + Example:: + + """ + def __init__( self, expr ): + super(NotAny,self).__init__(expr) + #~ self.leaveWhitespace() + self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs + self.mayReturnEmpty = True + self.errmsg = "Found unwanted token, "+_ustr(self.expr) + + def parseImpl( self, instring, loc, doActions=True ): + if self.expr.canParseNext(instring, loc): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "~{" + _ustr(self.expr) + "}" + + return self.strRepr + +class _MultipleMatch(ParseElementEnhance): + def __init__( self, expr, stopOn=None): + super(_MultipleMatch, self).__init__(expr) + self.saveAsList = True + ender = stopOn + if isinstance(ender, basestring): + ender = ParserElement._literalStringClass(ender) + self.not_ender = ~ender if ender is not None else None + + def parseImpl( self, instring, loc, doActions=True ): + self_expr_parse = self.expr._parse + self_skip_ignorables = self._skipIgnorables + check_ender = self.not_ender is not None + if check_ender: + try_not_ender = self.not_ender.tryParse + + # must be at least one (but first see if we are the stopOn sentinel; + # if so, fail) + if check_ender: + try_not_ender(instring, loc) + loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False ) + try: + hasIgnoreExprs = (not not self.ignoreExprs) + while 1: + if check_ender: + try_not_ender(instring, loc) + if hasIgnoreExprs: + preloc = self_skip_ignorables( instring, loc ) + else: + preloc = loc + loc, tmptokens = self_expr_parse( instring, preloc, doActions ) + if tmptokens or tmptokens.haskeys(): + tokens += tmptokens + except (ParseException,IndexError): + pass + + return loc, tokens + +class OneOrMore(_MultipleMatch): + """ + Repetition of one or more of the given expression. + + Parameters: + - expr - expression that must match one or more times + - stopOn - (default=C{None}) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) + + Example:: + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) + + text = "shape: SQUARE posn: upper left color: BLACK" + OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] + + # use stopOn attribute for OneOrMore to avoid reading label string as part of the data + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] + + # could also be written as + (attr_expr * (1,)).parseString(text).pprint() + """ + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + _ustr(self.expr) + "}..." + + return self.strRepr + +class ZeroOrMore(_MultipleMatch): + """ + Optional repetition of zero or more of the given expression. + + Parameters: + - expr - expression that must match zero or more times + - stopOn - (default=C{None}) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) + + Example: similar to L{OneOrMore} + """ + def __init__( self, expr, stopOn=None): + super(ZeroOrMore,self).__init__(expr, stopOn=stopOn) + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + try: + return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) + except (ParseException,IndexError): + return loc, [] + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]..." + + return self.strRepr + +class _NullToken(object): + def __bool__(self): + return False + __nonzero__ = __bool__ + def __str__(self): + return "" + +_optionalNotMatched = _NullToken() +class Optional(ParseElementEnhance): + """ + Optional matching of the given expression. + + Parameters: + - expr - expression that must match zero or more times + - default (optional) - value to be returned if the optional expression is not found. + + Example:: + # US postal code can be a 5-digit zip, plus optional 4-digit qualifier + zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) + zip.runTests(''' + # traditional ZIP code + 12345 + + # ZIP+4 form + 12101-0001 + + # invalid ZIP + 98765- + ''') + prints:: + # traditional ZIP code + 12345 + ['12345'] + + # ZIP+4 form + 12101-0001 + ['12101-0001'] + + # invalid ZIP + 98765- + ^ + FAIL: Expected end of text (at char 5), (line:1, col:6) + """ + def __init__( self, expr, default=_optionalNotMatched ): + super(Optional,self).__init__( expr, savelist=False ) + self.saveAsList = self.expr.saveAsList + self.defaultValue = default + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + try: + loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) + except (ParseException,IndexError): + if self.defaultValue is not _optionalNotMatched: + if self.expr.resultsName: + tokens = ParseResults([ self.defaultValue ]) + tokens[self.expr.resultsName] = self.defaultValue + else: + tokens = [ self.defaultValue ] + else: + tokens = [] + return loc, tokens + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]" + + return self.strRepr + +class SkipTo(ParseElementEnhance): + """ + Token for skipping over all undefined text until the matched expression is found. + + Parameters: + - expr - target expression marking the end of the data to be skipped + - include - (default=C{False}) if True, the target expression is also parsed + (the skipped text and target expression are returned as a 2-element list). + - ignore - (default=C{None}) used to define grammars (typically quoted strings and + comments) that might contain false matches to the target expression + - failOn - (default=C{None}) define expressions that are not allowed to be + included in the skipped test; if found before the target expression is found, + the SkipTo is not a match + + Example:: + report = ''' + Outstanding Issues Report - 1 Jan 2000 + + # | Severity | Description | Days Open + -----+----------+-------------------------------------------+----------- + 101 | Critical | Intermittent system crash | 6 + 94 | Cosmetic | Spelling error on Login ('log|n') | 14 + 79 | Minor | System slow when running too many reports | 47 + ''' + integer = Word(nums) + SEP = Suppress('|') + # use SkipTo to simply match everything up until the next SEP + # - ignore quoted strings, so that a '|' character inside a quoted string does not match + # - parse action will call token.strip() for each matched token, i.e., the description body + string_data = SkipTo(SEP, ignore=quotedString) + string_data.setParseAction(tokenMap(str.strip)) + ticket_expr = (integer("issue_num") + SEP + + string_data("sev") + SEP + + string_data("desc") + SEP + + integer("days_open")) + + for tkt in ticket_expr.searchString(report): + print tkt.dump() + prints:: + ['101', 'Critical', 'Intermittent system crash', '6'] + - days_open: 6 + - desc: Intermittent system crash + - issue_num: 101 + - sev: Critical + ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] + - days_open: 14 + - desc: Spelling error on Login ('log|n') + - issue_num: 94 + - sev: Cosmetic + ['79', 'Minor', 'System slow when running too many reports', '47'] + - days_open: 47 + - desc: System slow when running too many reports + - issue_num: 79 + - sev: Minor + """ + def __init__( self, other, include=False, ignore=None, failOn=None ): + super( SkipTo, self ).__init__( other ) + self.ignoreExpr = ignore + self.mayReturnEmpty = True + self.mayIndexError = False + self.includeMatch = include + self.asList = False + if isinstance(failOn, basestring): + self.failOn = ParserElement._literalStringClass(failOn) + else: + self.failOn = failOn + self.errmsg = "No match found for "+_ustr(self.expr) + + def parseImpl( self, instring, loc, doActions=True ): + startloc = loc + instrlen = len(instring) + expr = self.expr + expr_parse = self.expr._parse + self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None + self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None + + tmploc = loc + while tmploc <= instrlen: + if self_failOn_canParseNext is not None: + # break if failOn expression matches + if self_failOn_canParseNext(instring, tmploc): + break + + if self_ignoreExpr_tryParse is not None: + # advance past ignore expressions + while 1: + try: + tmploc = self_ignoreExpr_tryParse(instring, tmploc) + except ParseBaseException: + break + + try: + expr_parse(instring, tmploc, doActions=False, callPreParse=False) + except (ParseException, IndexError): + # no match, advance loc in string + tmploc += 1 + else: + # matched skipto expr, done + break + + else: + # ran off the end of the input string without matching skipto expr, fail + raise ParseException(instring, loc, self.errmsg, self) + + # build up return values + loc = tmploc + skiptext = instring[startloc:loc] + skipresult = ParseResults(skiptext) + + if self.includeMatch: + loc, mat = expr_parse(instring,loc,doActions,callPreParse=False) + skipresult += mat + + return loc, skipresult + +class Forward(ParseElementEnhance): + """ + Forward declaration of an expression to be defined later - + used for recursive grammars, such as algebraic infix notation. + When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator. + + Note: take care when assigning to C{Forward} not to overlook precedence of operators. + Specifically, '|' has a lower precedence than '<<', so that:: + fwdExpr << a | b | c + will actually be evaluated as:: + (fwdExpr << a) | b | c + thereby leaving b and c out as parseable alternatives. It is recommended that you + explicitly group the values inserted into the C{Forward}:: + fwdExpr << (a | b | c) + Converting to use the '<<=' operator instead will avoid this problem. + + See L{ParseResults.pprint} for an example of a recursive parser created using + C{Forward}. + """ + def __init__( self, other=None ): + super(Forward,self).__init__( other, savelist=False ) + + def __lshift__( self, other ): + if isinstance( other, basestring ): + other = ParserElement._literalStringClass(other) + self.expr = other + self.strRepr = None + self.mayIndexError = self.expr.mayIndexError + self.mayReturnEmpty = self.expr.mayReturnEmpty + self.setWhitespaceChars( self.expr.whiteChars ) + self.skipWhitespace = self.expr.skipWhitespace + self.saveAsList = self.expr.saveAsList + self.ignoreExprs.extend(self.expr.ignoreExprs) + return self + + def __ilshift__(self, other): + return self << other + + def leaveWhitespace( self ): + self.skipWhitespace = False + return self + + def streamline( self ): + if not self.streamlined: + self.streamlined = True + if self.expr is not None: + self.expr.streamline() + return self + + def validate( self, validateTrace=[] ): + if self not in validateTrace: + tmp = validateTrace[:]+[self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion([]) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + return self.__class__.__name__ + ": ..." + + # stubbed out for now - creates awful memory and perf issues + self._revertClass = self.__class__ + self.__class__ = _ForwardNoRecurse + try: + if self.expr is not None: + retString = _ustr(self.expr) + else: + retString = "None" + finally: + self.__class__ = self._revertClass + return self.__class__.__name__ + ": " + retString + + def copy(self): + if self.expr is not None: + return super(Forward,self).copy() + else: + ret = Forward() + ret <<= self + return ret + +class _ForwardNoRecurse(Forward): + def __str__( self ): + return "..." + +class TokenConverter(ParseElementEnhance): + """ + Abstract subclass of C{ParseExpression}, for converting parsed results. + """ + def __init__( self, expr, savelist=False ): + super(TokenConverter,self).__init__( expr )#, savelist ) + self.saveAsList = False + +class Combine(TokenConverter): + """ + Converter to concatenate all matching tokens to a single string. + By default, the matching patterns must also be contiguous in the input string; + this can be disabled by specifying C{'adjacent=False'} in the constructor. + + Example:: + real = Word(nums) + '.' + Word(nums) + print(real.parseString('3.1416')) # -> ['3', '.', '1416'] + # will also erroneously match the following + print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] + + real = Combine(Word(nums) + '.' + Word(nums)) + print(real.parseString('3.1416')) # -> ['3.1416'] + # no match when there are internal spaces + print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) + """ + def __init__( self, expr, joinString="", adjacent=True ): + super(Combine,self).__init__( expr ) + # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself + if adjacent: + self.leaveWhitespace() + self.adjacent = adjacent + self.skipWhitespace = True + self.joinString = joinString + self.callPreparse = True + + def ignore( self, other ): + if self.adjacent: + ParserElement.ignore(self, other) + else: + super( Combine, self).ignore( other ) + return self + + def postParse( self, instring, loc, tokenlist ): + retToks = tokenlist.copy() + del retToks[:] + retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults) + + if self.resultsName and retToks.haskeys(): + return [ retToks ] + else: + return retToks + +class Group(TokenConverter): + """ + Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions. + + Example:: + ident = Word(alphas) + num = Word(nums) + term = ident | num + func = ident + Optional(delimitedList(term)) + print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100'] + + func = ident + Group(Optional(delimitedList(term))) + print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']] + """ + def __init__( self, expr ): + super(Group,self).__init__( expr ) + self.saveAsList = True + + def postParse( self, instring, loc, tokenlist ): + return [ tokenlist ] + +class Dict(TokenConverter): + """ + Converter to return a repetitive expression as a list, but also as a dictionary. + Each element can also be referenced using the first token in the expression as its key. + Useful for tabular report scraping when the first column can be used as a item key. + + Example:: + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) + + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" + attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + + # print attributes as plain groups + print(OneOrMore(attr_expr).parseString(text).dump()) + + # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names + result = Dict(OneOrMore(Group(attr_expr))).parseString(text) + print(result.dump()) + + # access named fields as dict entries, or output as dict + print(result['shape']) + print(result.asDict()) + prints:: + ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] + + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] + - color: light blue + - posn: upper left + - shape: SQUARE + - texture: burlap + SQUARE + {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} + See more examples at L{ParseResults} of accessing fields by results name. + """ + def __init__( self, expr ): + super(Dict,self).__init__( expr ) + self.saveAsList = True + + def postParse( self, instring, loc, tokenlist ): + for i,tok in enumerate(tokenlist): + if len(tok) == 0: + continue + ikey = tok[0] + if isinstance(ikey,int): + ikey = _ustr(tok[0]).strip() + if len(tok)==1: + tokenlist[ikey] = _ParseResultsWithOffset("",i) + elif len(tok)==2 and not isinstance(tok[1],ParseResults): + tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i) + else: + dictvalue = tok.copy() #ParseResults(i) + del dictvalue[0] + if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()): + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i) + else: + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i) + + if self.resultsName: + return [ tokenlist ] + else: + return tokenlist + + +class Suppress(TokenConverter): + """ + Converter for ignoring the results of a parsed expression. + + Example:: + source = "a, b, c,d" + wd = Word(alphas) + wd_list1 = wd + ZeroOrMore(',' + wd) + print(wd_list1.parseString(source)) + + # often, delimiters that are useful during parsing are just in the + # way afterward - use Suppress to keep them out of the parsed output + wd_list2 = wd + ZeroOrMore(Suppress(',') + wd) + print(wd_list2.parseString(source)) + prints:: + ['a', ',', 'b', ',', 'c', ',', 'd'] + ['a', 'b', 'c', 'd'] + (See also L{delimitedList}.) + """ + def postParse( self, instring, loc, tokenlist ): + return [] + + def suppress( self ): + return self + + +class OnlyOnce(object): + """ + Wrapper for parse actions, to ensure they are only called once. + """ + def __init__(self, methodCall): + self.callable = _trim_arity(methodCall) + self.called = False + def __call__(self,s,l,t): + if not self.called: + results = self.callable(s,l,t) + self.called = True + return results + raise ParseException(s,l,"") + def reset(self): + self.called = False + +def traceParseAction(f): + """ + Decorator for debugging parse actions. + + When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".} + When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised. + + Example:: + wd = Word(alphas) + + @traceParseAction + def remove_duplicate_chars(tokens): + return ''.join(sorted(set(''.join(tokens))) + + wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) + print(wds.parseString("slkdjs sld sldd sdlf sdljf")) + prints:: + >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) + <<leaving remove_duplicate_chars (ret: 'dfjkls') + ['dfjkls'] + """ + f = _trim_arity(f) + def z(*paArgs): + thisFunc = f.__name__ + s,l,t = paArgs[-3:] + if len(paArgs)>3: + thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc + sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) ) + try: + ret = f(*paArgs) + except Exception as exc: + sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) ) + raise + sys.stderr.write( "<<leaving %s (ret: %r)\n" % (thisFunc,ret) ) + return ret + try: + z.__name__ = f.__name__ + except AttributeError: + pass + return z + +# +# global helpers +# +def delimitedList( expr, delim=",", combine=False ): + """ + Helper to define a delimited list of expressions - the delimiter defaults to ','. + By default, the list elements and delimiters can have intervening whitespace, and + comments, but this can be overridden by passing C{combine=True} in the constructor. + If C{combine} is set to C{True}, the matching tokens are returned as a single token + string, with the delimiters included; otherwise, the matching tokens are returned + as a list of tokens, with the delimiters suppressed. + + Example:: + delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc'] + delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] + """ + dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..." + if combine: + return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName) + else: + return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName) + +def countedArray( expr, intExpr=None ): + """ + Helper to define a counted list of expressions. + This helper defines a pattern of the form:: + integer expr expr expr... + where the leading integer tells how many expr expressions follow. + The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed. + + If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value. + + Example:: + countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] + + # in this parser, the leading integer value is given in binary, + # '10' indicating that 2 values are in the array + binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) + countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] + """ + arrayExpr = Forward() + def countFieldParseAction(s,l,t): + n = t[0] + arrayExpr << (n and Group(And([expr]*n)) or Group(empty)) + return [] + if intExpr is None: + intExpr = Word(nums).setParseAction(lambda t:int(t[0])) + else: + intExpr = intExpr.copy() + intExpr.setName("arrayLen") + intExpr.addParseAction(countFieldParseAction, callDuringTry=True) + return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...') + +def _flatten(L): + ret = [] + for i in L: + if isinstance(i,list): + ret.extend(_flatten(i)) + else: + ret.append(i) + return ret + +def matchPreviousLiteral(expr): + """ + Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks + for a 'repeat' of a previous expression. For example:: + first = Word(nums) + second = matchPreviousLiteral(first) + matchExpr = first + ":" + second + will match C{"1:1"}, but not C{"1:2"}. Because this matches a + previous literal, will also match the leading C{"1:1"} in C{"1:10"}. + If this is not desired, use C{matchPreviousExpr}. + Do I{not} use with packrat parsing enabled. + """ + rep = Forward() + def copyTokenToRepeater(s,l,t): + if t: + if len(t) == 1: + rep << t[0] + else: + # flatten t tokens + tflat = _flatten(t.asList()) + rep << And(Literal(tt) for tt in tflat) + else: + rep << Empty() + expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + rep.setName('(prev) ' + _ustr(expr)) + return rep + +def matchPreviousExpr(expr): + """ + Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks + for a 'repeat' of a previous expression. For example:: + first = Word(nums) + second = matchPreviousExpr(first) + matchExpr = first + ":" + second + will match C{"1:1"}, but not C{"1:2"}. Because this matches by + expressions, will I{not} match the leading C{"1:1"} in C{"1:10"}; + the expressions are evaluated first, and then compared, so + C{"1"} is compared with C{"10"}. + Do I{not} use with packrat parsing enabled. + """ + rep = Forward() + e2 = expr.copy() + rep <<= e2 + def copyTokenToRepeater(s,l,t): + matchTokens = _flatten(t.asList()) + def mustMatchTheseTokens(s,l,t): + theseTokens = _flatten(t.asList()) + if theseTokens != matchTokens: + raise ParseException("",0,"") + rep.setParseAction( mustMatchTheseTokens, callDuringTry=True ) + expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + rep.setName('(prev) ' + _ustr(expr)) + return rep + +def _escapeRegexRangeChars(s): + #~ escape these chars: ^-] + for c in r"\^-]": + s = s.replace(c,_bslash+c) + s = s.replace("\n",r"\n") + s = s.replace("\t",r"\t") + return _ustr(s) + +def oneOf( strs, caseless=False, useRegex=True ): + """ + Helper to quickly define a set of alternative Literals, and makes sure to do + longest-first testing when there is a conflict, regardless of the input order, + but returns a C{L{MatchFirst}} for best performance. + + Parameters: + - strs - a string of space-delimited literals, or a collection of string literals + - caseless - (default=C{False}) - treat all literals as caseless + - useRegex - (default=C{True}) - as an optimization, will generate a Regex + object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or + if creating a C{Regex} raises an exception) + + Example:: + comp_oper = oneOf("< = > <= >= !=") + var = Word(alphas) + number = Word(nums) + term = var | number + comparison_expr = term + comp_oper + term + print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) + prints:: + [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] + """ + if caseless: + isequal = ( lambda a,b: a.upper() == b.upper() ) + masks = ( lambda a,b: b.upper().startswith(a.upper()) ) + parseElementClass = CaselessLiteral + else: + isequal = ( lambda a,b: a == b ) + masks = ( lambda a,b: b.startswith(a) ) + parseElementClass = Literal + + symbols = [] + if isinstance(strs,basestring): + symbols = strs.split() + elif isinstance(strs, collections.Iterable): + symbols = list(strs) + else: + warnings.warn("Invalid argument to oneOf, expected string or iterable", + SyntaxWarning, stacklevel=2) + if not symbols: + return NoMatch() + + i = 0 + while i < len(symbols)-1: + cur = symbols[i] + for j,other in enumerate(symbols[i+1:]): + if ( isequal(other, cur) ): + del symbols[i+j+1] + break + elif ( masks(cur, other) ): + del symbols[i+j+1] + symbols.insert(i,other) + cur = other + break + else: + i += 1 + + if not caseless and useRegex: + #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) + try: + if len(symbols)==len("".join(symbols)): + return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols)) + else: + return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols)) + except Exception: + warnings.warn("Exception creating Regex for oneOf, building MatchFirst", + SyntaxWarning, stacklevel=2) + + + # last resort, just use MatchFirst + return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) + +def dictOf( key, value ): + """ + Helper to easily and clearly define a dictionary by specifying the respective patterns + for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens + in the proper order. The key pattern can include delimiting markers or punctuation, + as long as they are suppressed, thereby leaving the significant key text. The value + pattern can include named results, so that the C{Dict} results can include named token + fields. + + Example:: + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" + attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + print(OneOrMore(attr_expr).parseString(text).dump()) + + attr_label = label + attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) + + # similar to Dict, but simpler call format + result = dictOf(attr_label, attr_value).parseString(text) + print(result.dump()) + print(result['shape']) + print(result.shape) # object attribute access works too + print(result.asDict()) + prints:: + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] + - color: light blue + - posn: upper left + - shape: SQUARE + - texture: burlap + SQUARE + SQUARE + {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} + """ + return Dict( ZeroOrMore( Group ( key + value ) ) ) + +def originalTextFor(expr, asString=True): + """ + Helper to return the original, untokenized text for a given expression. Useful to + restore the parsed fields of an HTML start tag into the raw tag text itself, or to + revert separate tokens with intervening whitespace back to the original matching + input text. By default, returns astring containing the original parsed text. + + If the optional C{asString} argument is passed as C{False}, then the return value is a + C{L{ParseResults}} containing any results names that were originally matched, and a + single token containing the original matched text from the input string. So if + the expression passed to C{L{originalTextFor}} contains expressions with defined + results names, you must set C{asString} to C{False} if you want to preserve those + results name values. + + Example:: + src = "this is test <b> bold <i>text</i> </b> normal text " + for tag in ("b","i"): + opener,closer = makeHTMLTags(tag) + patt = originalTextFor(opener + SkipTo(closer) + closer) + print(patt.searchString(src)[0]) + prints:: + ['<b> bold <i>text</i> </b>'] + ['<i>text</i>'] + """ + locMarker = Empty().setParseAction(lambda s,loc,t: loc) + endlocMarker = locMarker.copy() + endlocMarker.callPreparse = False + matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") + if asString: + extractText = lambda s,l,t: s[t._original_start:t._original_end] + else: + def extractText(s,l,t): + t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] + matchExpr.setParseAction(extractText) + matchExpr.ignoreExprs = expr.ignoreExprs + return matchExpr + +def ungroup(expr): + """ + Helper to undo pyparsing's default grouping of And expressions, even + if all but one are non-empty. + """ + return TokenConverter(expr).setParseAction(lambda t:t[0]) + +def locatedExpr(expr): + """ + Helper to decorate a returned token with its starting and ending locations in the input string. + This helper adds the following results names: + - locn_start = location where matched expression begins + - locn_end = location where matched expression ends + - value = the actual parsed results + + Be careful if the input text contains C{<TAB>} characters, you may want to call + C{L{ParserElement.parseWithTabs}} + + Example:: + wd = Word(alphas) + for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): + print(match) + prints:: + [[0, 'ljsdf', 5]] + [[8, 'lksdjjf', 15]] + [[18, 'lkkjj', 23]] + """ + locator = Empty().setParseAction(lambda s,l,t: l) + return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) + + +# convenience constants for positional expressions +empty = Empty().setName("empty") +lineStart = LineStart().setName("lineStart") +lineEnd = LineEnd().setName("lineEnd") +stringStart = StringStart().setName("stringStart") +stringEnd = StringEnd().setName("stringEnd") + +_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) +_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16))) +_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) +_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(printables, excludeChars=r'\]', exact=1) | Regex(r"\w", re.UNICODE) +_charRange = Group(_singleChar + Suppress("-") + _singleChar) +_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" + +def srange(s): + r""" + Helper to easily define string ranges for use in Word construction. Borrows + syntax from regexp '[]' string range definitions:: + srange("[0-9]") -> "0123456789" + srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" + srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" + The input string must be enclosed in []'s, and the returned string is the expanded + character set joined into a single string. + The values enclosed in the []'s may be: + - a single character + - an escaped character with a leading backslash (such as C{\-} or C{\]}) + - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) + (C{\0x##} is also supported for backwards compatibility) + - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character) + - a range of any of the above, separated by a dash (C{'a-z'}, etc.) + - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.) + """ + _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1)) + try: + return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) + except Exception: + return "" + +def matchOnlyAtCol(n): + """ + Helper method for defining parse actions that require matching at a specific + column in the input text. + """ + def verifyCol(strg,locn,toks): + if col(locn,strg) != n: + raise ParseException(strg,locn,"matched token not at column %d" % n) + return verifyCol + +def replaceWith(replStr): + """ + Helper method for common parse actions that simply return a literal value. Especially + useful when used with C{L{transformString<ParserElement.transformString>}()}. + + Example:: + num = Word(nums).setParseAction(lambda toks: int(toks[0])) + na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) + term = na | num + + OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] + """ + return lambda s,l,t: [replStr] + +def removeQuotes(s,l,t): + """ + Helper parse action for removing quotation marks from parsed quoted strings. + + Example:: + # by default, quotation marks are included in parsed results + quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] + + # use removeQuotes to strip quotation marks from parsed results + quotedString.setParseAction(removeQuotes) + quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] + """ + return t[0][1:-1] + +def tokenMap(func, *args): + """ + Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional + args are passed, they are forwarded to the given function as additional arguments after + the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the + parsed data to an integer using base 16. + + Example (compare the last to example in L{ParserElement.transformString}:: + hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) + hex_ints.runTests(''' + 00 11 22 aa FF 0a 0d 1a + ''') + + upperword = Word(alphas).setParseAction(tokenMap(str.upper)) + OneOrMore(upperword).runTests(''' + my kingdom for a horse + ''') + + wd = Word(alphas).setParseAction(tokenMap(str.title)) + OneOrMore(wd).setParseAction(' '.join).runTests(''' + now is the winter of our discontent made glorious summer by this sun of york + ''') + prints:: + 00 11 22 aa FF 0a 0d 1a + [0, 17, 34, 170, 255, 10, 13, 26] + + my kingdom for a horse + ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] + + now is the winter of our discontent made glorious summer by this sun of york + ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] + """ + def pa(s,l,t): + return [func(tokn, *args) for tokn in t] + + try: + func_name = getattr(func, '__name__', + getattr(func, '__class__').__name__) + except Exception: + func_name = str(func) + pa.__name__ = func_name + + return pa + +upcaseTokens = tokenMap(lambda t: _ustr(t).upper()) +"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}""" + +downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) +"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}""" + +def _makeTags(tagStr, xml): + """Internal helper to construct opening and closing tag expressions, given a tag name""" + if isinstance(tagStr,basestring): + resname = tagStr + tagStr = Keyword(tagStr, caseless=not xml) + else: + resname = tagStr.name + + tagAttrName = Word(alphas,alphanums+"_-:") + if (xml): + tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) + openTag = Suppress("<") + tagStr("tag") + \ + Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \ + Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") + else: + printablesLessRAbrack = "".join(c for c in printables if c not in ">") + tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack) + openTag = Suppress("<") + tagStr("tag") + \ + Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \ + Optional( Suppress("=") + tagAttrValue ) ))) + \ + Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") + closeTag = Combine(_L("</") + tagStr + ">") + + openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname) + closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname) + openTag.tag = resname + closeTag.tag = resname + return openTag, closeTag + +def makeHTMLTags(tagStr): + """ + Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches + tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values. + + Example:: + text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>' + # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple + a,a_end = makeHTMLTags("A") + link_expr = a + SkipTo(a_end)("link_text") + a_end + + for link in link_expr.searchString(text): + # attributes in the <A> tag (like "href" shown here) are also accessible as named results + print(link.link_text, '->', link.href) + prints:: + pyparsing -> http://pyparsing.wikispaces.com + """ + return _makeTags( tagStr, False ) + +def makeXMLTags(tagStr): + """ + Helper to construct opening and closing tag expressions for XML, given a tag name. Matches + tags only in the given upper/lower case. + + Example: similar to L{makeHTMLTags} + """ + return _makeTags( tagStr, True ) + +def withAttribute(*args,**attrDict): + """ + Helper to create a validating parse action to be used with start tags created + with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag + with a required attribute value, to avoid false matches on common tags such as + C{<TD>} or C{<DIV>}. + + Call C{withAttribute} with a series of attribute names and values. Specify the list + of filter attributes names and values as: + - keyword arguments, as in C{(align="right")}, or + - as an explicit dict with C{**} operator, when an attribute name is also a Python + reserved word, as in C{**{"class":"Customer", "align":"right"}} + - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) + For attribute names with a namespace prefix, you must use the second form. Attribute + names are matched insensitive to upper/lower case. + + If just testing for C{class} (with or without a namespace), use C{L{withClass}}. + + To verify that the attribute exists, but without specifying a value, pass + C{withAttribute.ANY_VALUE} as the value. + + Example:: + html = ''' + <div> + Some text + <div type="grid">1 4 0 1 0</div> + <div type="graph">1,3 2,3 1,1</div> + <div>this has no type</div> + </div> + + ''' + div,div_end = makeHTMLTags("div") + + # only match div tag having a type attribute with value "grid" + div_grid = div().setParseAction(withAttribute(type="grid")) + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + # construct a match with any div tag having a type attribute, regardless of the value + div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + prints:: + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + if args: + attrs = args[:] + else: + attrs = attrDict.items() + attrs = [(k,v) for k,v in attrs] + def pa(s,l,tokens): + for attrName,attrValue in attrs: + if attrName not in tokens: + raise ParseException(s,l,"no matching attribute " + attrName) + if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: + raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % + (attrName, tokens[attrName], attrValue)) + return pa +withAttribute.ANY_VALUE = object() + +def withClass(classname, namespace=''): + """ + Simplified version of C{L{withAttribute}} when matching on a div class - made + difficult because C{class} is a reserved word in Python. + + Example:: + html = ''' + <div> + Some text + <div class="grid">1 4 0 1 0</div> + <div class="graph">1,3 2,3 1,1</div> + <div>this <div> has no class</div> + </div> + + ''' + div,div_end = makeHTMLTags("div") + div_grid = div().setParseAction(withClass("grid")) + + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + prints:: + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + classattr = "%s:class" % namespace if namespace else "class" + return withAttribute(**{classattr : classname}) + +opAssoc = _Constants() +opAssoc.LEFT = object() +opAssoc.RIGHT = object() + +def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): + """ + Helper method for constructing grammars of expressions made up of + operators working in a precedence hierarchy. Operators may be unary or + binary, left- or right-associative. Parse actions can also be attached + to operator expressions. The generated parser will also recognize the use + of parentheses to override operator precedences (see example below). + + Note: if you define a deep operator list, you may see performance issues + when using infixNotation. See L{ParserElement.enablePackrat} for a + mechanism to potentially improve your parser performance. + + Parameters: + - baseExpr - expression representing the most basic element for the nested + - opList - list of tuples, one for each operator precedence level in the + expression grammar; each tuple is of the form + (opExpr, numTerms, rightLeftAssoc, parseAction), where: + - opExpr is the pyparsing expression for the operator; + may also be a string, which will be converted to a Literal; + if numTerms is 3, opExpr is a tuple of two expressions, for the + two operators separating the 3 terms + - numTerms is the number of terms for this operator (must + be 1, 2, or 3) + - rightLeftAssoc is the indicator whether the operator is + right or left associative, using the pyparsing-defined + constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. + - parseAction is the parse action to be associated with + expressions matching this operator expression (the + parse action tuple member may be omitted); if the parse action + is passed a tuple or list of functions, this is equivalent to + calling C{setParseAction(*fn)} (L{ParserElement.setParseAction}) + - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) + - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) + + Example:: + # simple example of four-function arithmetic with ints and variable names + integer = pyparsing_common.signed_integer + varname = pyparsing_common.identifier + + arith_expr = infixNotation(integer | varname, + [ + ('-', 1, opAssoc.RIGHT), + (oneOf('* /'), 2, opAssoc.LEFT), + (oneOf('+ -'), 2, opAssoc.LEFT), + ]) + + arith_expr.runTests(''' + 5+3*6 + (5+3)*6 + -2--11 + ''', fullDump=False) + prints:: + 5+3*6 + [[5, '+', [3, '*', 6]]] + + (5+3)*6 + [[[5, '+', 3], '*', 6]] + + -2--11 + [[['-', 2], '-', ['-', 11]]] + """ + ret = Forward() + lastExpr = baseExpr | ( lpar + ret + rpar ) + for i,operDef in enumerate(opList): + opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] + termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr + if arity == 3: + if opExpr is None or len(opExpr) != 2: + raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") + opExpr1, opExpr2 = opExpr + thisExpr = Forward().setName(termName) + if rightLeftAssoc == opAssoc.LEFT: + if arity == 1: + matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) + else: + matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ + Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + elif rightLeftAssoc == opAssoc.RIGHT: + if arity == 1: + # try to avoid LR with this extra test + if not isinstance(opExpr, Optional): + opExpr = Optional(opExpr) + matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) + else: + matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ + Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + else: + raise ValueError("operator must indicate right or left associativity") + if pa: + if isinstance(pa, (tuple, list)): + matchExpr.setParseAction(*pa) + else: + matchExpr.setParseAction(pa) + thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) + lastExpr = thisExpr + ret <<= lastExpr + return ret + +operatorPrecedence = infixNotation +"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release.""" + +dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") +sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") +quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| + Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") +unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") + +def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): + """ + Helper method for defining nested lists enclosed in opening and closing + delimiters ("(" and ")" are the default). + + Parameters: + - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression + - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression + - content - expression for items within the nested lists (default=C{None}) + - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) + + If an expression is not provided for the content argument, the nested + expression will capture all whitespace-delimited content between delimiters + as a list of separate values. + + Use the C{ignoreExpr} argument to define expressions that may contain + opening or closing characters that should not be treated as opening + or closing characters for nesting, such as quotedString or a comment + expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. + The default is L{quotedString}, but if no expressions are to be ignored, + then pass C{None} for this argument. + + Example:: + data_type = oneOf("void int short long char float double") + decl_data_type = Combine(data_type + Optional(Word('*'))) + ident = Word(alphas+'_', alphanums+'_') + number = pyparsing_common.number + arg = Group(decl_data_type + ident) + LPAR,RPAR = map(Suppress, "()") + + code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) + + c_function = (decl_data_type("type") + + ident("name") + + LPAR + Optional(delimitedList(arg), [])("args") + RPAR + + code_body("body")) + c_function.ignore(cStyleComment) + + source_code = ''' + int is_odd(int x) { + return (x%2); + } + + int dec_to_hex(char hchar) { + if (hchar >= '0' && hchar <= '9') { + return (ord(hchar)-ord('0')); + } else { + return (10+ord(hchar)-ord('A')); + } + } + ''' + for func in c_function.searchString(source_code): + print("%(name)s (%(type)s) args: %(args)s" % func) + + prints:: + is_odd (int) args: [['int', 'x']] + dec_to_hex (int) args: [['char', 'hchar']] + """ + if opener == closer: + raise ValueError("opening and closing strings cannot be the same") + if content is None: + if isinstance(opener,basestring) and isinstance(closer,basestring): + if len(opener) == 1 and len(closer)==1: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS + ).setParseAction(lambda t:t[0].strip())) + else: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + ~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + raise ValueError("opening and closing arguments must be strings if no content expression is given") + ret = Forward() + if ignoreExpr is not None: + ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) + else: + ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) + ret.setName('nested %s%s expression' % (opener,closer)) + return ret + +def indentedBlock(blockStatementExpr, indentStack, indent=True): + """ + Helper method for defining space-delimited indentation blocks, such as + those used to define block statements in Python source code. + + Parameters: + - blockStatementExpr - expression defining syntax of statement that + is repeated within the indented block + - indentStack - list created by caller to manage indentation stack + (multiple statementWithIndentedBlock expressions within a single grammar + should share a common indentStack) + - indent - boolean indicating whether block must be indented beyond the + the current level; set to False for block of left-most statements + (default=C{True}) + + A valid block must contain at least one C{blockStatement}. + + Example:: + data = ''' + def A(z): + A1 + B = 100 + G = A2 + A2 + A3 + B + def BB(a,b,c): + BB1 + def BBA(): + bba1 + bba2 + bba3 + C + D + def spam(x,y): + def eggs(z): + pass + ''' + + + indentStack = [1] + stmt = Forward() + + identifier = Word(alphas, alphanums) + funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") + func_body = indentedBlock(stmt, indentStack) + funcDef = Group( funcDecl + func_body ) + + rvalue = Forward() + funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") + rvalue << (funcCall | identifier | Word(nums)) + assignment = Group(identifier + "=" + rvalue) + stmt << ( funcDef | assignment | identifier ) + + module_body = OneOrMore(stmt) + + parseTree = module_body.parseString(data) + parseTree.pprint() + prints:: + [['def', + 'A', + ['(', 'z', ')'], + ':', + [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], + 'B', + ['def', + 'BB', + ['(', 'a', 'b', 'c', ')'], + ':', + [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], + 'C', + 'D', + ['def', + 'spam', + ['(', 'x', 'y', ')'], + ':', + [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] + """ + def checkPeerIndent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if curCol != indentStack[-1]: + if curCol > indentStack[-1]: + raise ParseFatalException(s,l,"illegal nesting") + raise ParseException(s,l,"not a peer entry") + + def checkSubIndent(s,l,t): + curCol = col(l,s) + if curCol > indentStack[-1]: + indentStack.append( curCol ) + else: + raise ParseException(s,l,"not a subentry") + + def checkUnindent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): + raise ParseException(s,l,"not an unindent") + indentStack.pop() + + NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) + INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') + PEER = Empty().setParseAction(checkPeerIndent).setName('') + UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') + if indent: + smExpr = Group( Optional(NL) + + #~ FollowedBy(blockStatementExpr) + + INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) + else: + smExpr = Group( Optional(NL) + + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) + blockStatementExpr.ignore(_bslash + LineEnd()) + return smExpr.setName('indented block') + +alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") +punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") + +anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) +_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) +commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") +def replaceHTMLEntity(t): + """Helper parser action to replace common HTML entities with their special characters""" + return _htmlEntityMap.get(t.entity) + +# it's easy to get these comment structures wrong - they're very common, so may as well make them available +cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") +"Comment of the form C{/* ... */}" + +htmlComment = Regex(r"<!--[\s\S]*?-->").setName("HTML comment") +"Comment of the form C{<!-- ... -->}" + +restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") +dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") +"Comment of the form C{// ... (to end of line)}" + +cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") +"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}" + +javaStyleComment = cppStyleComment +"Same as C{L{cppStyleComment}}" + +pythonStyleComment = Regex(r"#.*").setName("Python style comment") +"Comment of the form C{# ... (to end of line)}" + +_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + + Optional( Word(" \t") + + ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") +commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") +"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. + This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}.""" + +# some other useful expressions - using lower-case class name since we are really using this as a namespace +class pyparsing_common: + """ + Here are some common low-level expressions that may be useful in jump-starting parser development: + - numeric forms (L{integers<integer>}, L{reals<real>}, L{scientific notation<sci_real>}) + - common L{programming identifiers<identifier>} + - network addresses (L{MAC<mac_address>}, L{IPv4<ipv4_address>}, L{IPv6<ipv6_address>}) + - ISO8601 L{dates<iso8601_date>} and L{datetime<iso8601_datetime>} + - L{UUID<uuid>} + - L{comma-separated list<comma_separated_list>} + Parse actions: + - C{L{convertToInteger}} + - C{L{convertToFloat}} + - C{L{convertToDate}} + - C{L{convertToDatetime}} + - C{L{stripHTMLTags}} + - C{L{upcaseTokens}} + - C{L{downcaseTokens}} + + Example:: + pyparsing_common.number.runTests(''' + # any int or real number, returned as the appropriate type + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.fnumber.runTests(''' + # any int or real number, returned as float + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.hex_integer.runTests(''' + # hex numbers + 100 + FF + ''') + + pyparsing_common.fraction.runTests(''' + # fractions + 1/2 + -3/4 + ''') + + pyparsing_common.mixed_integer.runTests(''' + # mixed fractions + 1 + 1/2 + -3/4 + 1-3/4 + ''') + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(''' + # uuid + 12345678-1234-5678-1234-567812345678 + ''') + prints:: + # any int or real number, returned as the appropriate type + 100 + [100] + + -100 + [-100] + + +100 + [100] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # any int or real number, returned as float + 100 + [100.0] + + -100 + [-100.0] + + +100 + [100.0] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # hex numbers + 100 + [256] + + FF + [255] + + # fractions + 1/2 + [0.5] + + -3/4 + [-0.75] + + # mixed fractions + 1 + [1] + + 1/2 + [0.5] + + -3/4 + [-0.75] + + 1-3/4 + [1.75] + + # uuid + 12345678-1234-5678-1234-567812345678 + [UUID('12345678-1234-5678-1234-567812345678')] + """ + + convertToInteger = tokenMap(int) + """ + Parse action for converting parsed integers to Python int + """ + + convertToFloat = tokenMap(float) + """ + Parse action for converting parsed numbers to Python float + """ + + integer = Word(nums).setName("integer").setParseAction(convertToInteger) + """expression that parses an unsigned integer, returns an int""" + + hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) + """expression that parses a hexadecimal integer, returns an int""" + + signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) + """expression that parses an integer with optional leading sign, returns an int""" + + fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") + """fractional expression of an integer divided by an integer, returns a float""" + fraction.addParseAction(lambda t: t[0]/t[-1]) + + mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") + """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" + mixed_integer.addParseAction(sum) + + real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) + """expression that parses a floating point number and returns a float""" + + sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) + """expression that parses a floating point number with optional scientific notation and returns a float""" + + # streamlining this expression makes the docs nicer-looking + number = (sci_real | real | signed_integer).streamline() + """any numeric expression, returns the corresponding Python type""" + + fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) + """any int or real number, returned as float""" + + identifier = Word(alphas+'_', alphanums+'_').setName("identifier") + """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" + + ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") + "IPv4 address (C{0.0.0.0 - 255.255.255.255})" + + _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") + _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") + _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") + _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) + _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") + ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") + "IPv6 address (long, short, or mixed form)" + + mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") + "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" + + @staticmethod + def convertToDate(fmt="%Y-%m-%d"): + """ + Helper to create a parse action for converting parsed date string to Python datetime.date + + Params - + - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) + + Example:: + date_expr = pyparsing_common.iso8601_date.copy() + date_expr.setParseAction(pyparsing_common.convertToDate()) + print(date_expr.parseString("1999-12-31")) + prints:: + [datetime.date(1999, 12, 31)] + """ + def cvt_fn(s,l,t): + try: + return datetime.strptime(t[0], fmt).date() + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + @staticmethod + def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): + """ + Helper to create a parse action for converting parsed datetime string to Python datetime.datetime + + Params - + - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) + + Example:: + dt_expr = pyparsing_common.iso8601_datetime.copy() + dt_expr.setParseAction(pyparsing_common.convertToDatetime()) + print(dt_expr.parseString("1999-12-31T23:59:59.999")) + prints:: + [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] + """ + def cvt_fn(s,l,t): + try: + return datetime.strptime(t[0], fmt) + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + iso8601_date = Regex(r'(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?').setName("ISO8601 date") + "ISO8601 date (C{yyyy-mm-dd})" + + iso8601_datetime = Regex(r'(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") + "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}" + + uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") + "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})" + + _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() + @staticmethod + def stripHTMLTags(s, l, tokens): + """ + Parse action to remove HTML tags from web page HTML source + + Example:: + # strip HTML links from normal text + text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>' + td,td_end = makeHTMLTags("TD") + table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end + + print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' + """ + return pyparsing_common._html_stripper.transformString(tokens[0]) + + _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') + + Optional( White(" \t") ) ) ).streamline().setName("commaItem") + comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") + """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" + + upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) + """Parse action to convert tokens to upper case.""" + + downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) + """Parse action to convert tokens to lower case.""" + + +if __name__ == "__main__": + + selectToken = CaselessLiteral("select") + fromToken = CaselessLiteral("from") + + ident = Word(alphas, alphanums + "_$") + + columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + columnNameList = Group(delimitedList(columnName)).setName("columns") + columnSpec = ('*' | columnNameList) + + tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + tableNameList = Group(delimitedList(tableName)).setName("tables") + + simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") + + # demo runTests method, including embedded comments in test string + simpleSQL.runTests(""" + # '*' as column list and dotted table name + select * from SYS.XYZZY + + # caseless match on "SELECT", and casts back to "select" + SELECT * from XYZZY, ABC + + # list of column names, and mixed case SELECT keyword + Select AA,BB,CC from Sys.dual + + # multiple tables + Select A, B, C from Sys.dual, Table2 + + # invalid SELECT keyword - should fail + Xelect A, B, C from Sys.dual + + # incomplete command - should fail + Select + + # invalid column name - should fail + Select ^^^ frox Sys.dual + + """) + + pyparsing_common.number.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + # any int or real number, returned as float + pyparsing_common.fnumber.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + pyparsing_common.hex_integer.runTests(""" + 100 + FF + """) + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(""" + 12345678-1234-5678-1234-567812345678 + """) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/pytoml/core.py b/env/lib/python3.6/site-packages/pip/_vendor/pytoml/core.py new file mode 100644 index 0000000..c182734 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/pytoml/core.py @@ -0,0 +1,13 @@ +class TomlError(RuntimeError): + def __init__(self, message, line, col, filename): + RuntimeError.__init__(self, message, line, col, filename) + self.message = message + self.line = line + self.col = col + self.filename = filename + + def __str__(self): + return '{}({}, {}): {}'.format(self.filename, self.line, self.col, self.message) + + def __repr__(self): + return 'TomlError({!r}, {!r}, {!r}, {!r})'.format(self.message, self.line, self.col, self.filename) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/pytoml/parser.py b/env/lib/python3.6/site-packages/pip/_vendor/pytoml/parser.py new file mode 100644 index 0000000..e03a03f --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/pytoml/parser.py @@ -0,0 +1,374 @@ +import string, re, sys, datetime +from .core import TomlError + +if sys.version_info[0] == 2: + _chr = unichr +else: + _chr = chr + +def load(fin, translate=lambda t, x, v: v, object_pairs_hook=dict): + return loads(fin.read(), translate=translate, object_pairs_hook=object_pairs_hook, filename=getattr(fin, 'name', repr(fin))) + +def loads(s, filename='<string>', translate=lambda t, x, v: v, object_pairs_hook=dict): + if isinstance(s, bytes): + s = s.decode('utf-8') + + s = s.replace('\r\n', '\n') + + root = object_pairs_hook() + tables = object_pairs_hook() + scope = root + + src = _Source(s, filename=filename) + ast = _p_toml(src, object_pairs_hook=object_pairs_hook) + + def error(msg): + raise TomlError(msg, pos[0], pos[1], filename) + + def process_value(v, object_pairs_hook): + kind, text, value, pos = v + if kind == 'str' and value.startswith('\n'): + value = value[1:] + if kind == 'array': + if value and any(k != value[0][0] for k, t, v, p in value[1:]): + error('array-type-mismatch') + value = [process_value(item, object_pairs_hook=object_pairs_hook) for item in value] + elif kind == 'table': + value = object_pairs_hook([(k, process_value(value[k], object_pairs_hook=object_pairs_hook)) for k in value]) + return translate(kind, text, value) + + for kind, value, pos in ast: + if kind == 'kv': + k, v = value + if k in scope: + error('duplicate_keys. Key "{0}" was used more than once.'.format(k)) + scope[k] = process_value(v, object_pairs_hook=object_pairs_hook) + else: + is_table_array = (kind == 'table_array') + cur = tables + for name in value[:-1]: + if isinstance(cur.get(name), list): + d, cur = cur[name][-1] + else: + d, cur = cur.setdefault(name, (None, object_pairs_hook())) + + scope = object_pairs_hook() + name = value[-1] + if name not in cur: + if is_table_array: + cur[name] = [(scope, object_pairs_hook())] + else: + cur[name] = (scope, object_pairs_hook()) + elif isinstance(cur[name], list): + if not is_table_array: + error('table_type_mismatch') + cur[name].append((scope, object_pairs_hook())) + else: + if is_table_array: + error('table_type_mismatch') + old_scope, next_table = cur[name] + if old_scope is not None: + error('duplicate_tables') + cur[name] = (scope, next_table) + + def merge_tables(scope, tables): + if scope is None: + scope = object_pairs_hook() + for k in tables: + if k in scope: + error('key_table_conflict') + v = tables[k] + if isinstance(v, list): + scope[k] = [merge_tables(sc, tbl) for sc, tbl in v] + else: + scope[k] = merge_tables(v[0], v[1]) + return scope + + return merge_tables(root, tables) + +class _Source: + def __init__(self, s, filename=None): + self.s = s + self._pos = (1, 1) + self._last = None + self._filename = filename + self.backtrack_stack = [] + + def last(self): + return self._last + + def pos(self): + return self._pos + + def fail(self): + return self._expect(None) + + def consume_dot(self): + if self.s: + self._last = self.s[0] + self.s = self[1:] + self._advance(self._last) + return self._last + return None + + def expect_dot(self): + return self._expect(self.consume_dot()) + + def consume_eof(self): + if not self.s: + self._last = '' + return True + return False + + def expect_eof(self): + return self._expect(self.consume_eof()) + + def consume(self, s): + if self.s.startswith(s): + self.s = self.s[len(s):] + self._last = s + self._advance(s) + return True + return False + + def expect(self, s): + return self._expect(self.consume(s)) + + def consume_re(self, re): + m = re.match(self.s) + if m: + self.s = self.s[len(m.group(0)):] + self._last = m + self._advance(m.group(0)) + return m + return None + + def expect_re(self, re): + return self._expect(self.consume_re(re)) + + def __enter__(self): + self.backtrack_stack.append((self.s, self._pos)) + + def __exit__(self, type, value, traceback): + if type is None: + self.backtrack_stack.pop() + else: + self.s, self._pos = self.backtrack_stack.pop() + return type == TomlError + + def commit(self): + self.backtrack_stack[-1] = (self.s, self._pos) + + def _expect(self, r): + if not r: + raise TomlError('msg', self._pos[0], self._pos[1], self._filename) + return r + + def _advance(self, s): + suffix_pos = s.rfind('\n') + if suffix_pos == -1: + self._pos = (self._pos[0], self._pos[1] + len(s)) + else: + self._pos = (self._pos[0] + s.count('\n'), len(s) - suffix_pos) + +_ews_re = re.compile(r'(?:[ \t]|#[^\n]*\n|#[^\n]*\Z|\n)*') +def _p_ews(s): + s.expect_re(_ews_re) + +_ws_re = re.compile(r'[ \t]*') +def _p_ws(s): + s.expect_re(_ws_re) + +_escapes = { 'b': '\b', 'n': '\n', 'r': '\r', 't': '\t', '"': '"', '\'': '\'', + '\\': '\\', '/': '/', 'f': '\f' } + +_basicstr_re = re.compile(r'[^"\\\000-\037]*') +_short_uni_re = re.compile(r'u([0-9a-fA-F]{4})') +_long_uni_re = re.compile(r'U([0-9a-fA-F]{8})') +_escapes_re = re.compile('[bnrt"\'\\\\/f]') +_newline_esc_re = re.compile('\n[ \t\n]*') +def _p_basicstr_content(s, content=_basicstr_re): + res = [] + while True: + res.append(s.expect_re(content).group(0)) + if not s.consume('\\'): + break + if s.consume_re(_newline_esc_re): + pass + elif s.consume_re(_short_uni_re) or s.consume_re(_long_uni_re): + res.append(_chr(int(s.last().group(1), 16))) + else: + s.expect_re(_escapes_re) + res.append(_escapes[s.last().group(0)]) + return ''.join(res) + +_key_re = re.compile(r'[0-9a-zA-Z-_]+') +def _p_key(s): + with s: + s.expect('"') + r = _p_basicstr_content(s, _basicstr_re) + s.expect('"') + return r + if s.consume('\''): + if s.consume('\'\''): + r = s.expect_re(_litstr_ml_re).group(0) + s.expect('\'\'\'') + else: + r = s.expect_re(_litstr_re).group(0) + s.expect('\'') + return r + return s.expect_re(_key_re).group(0) + +_float_re = re.compile(r'[+-]?(?:0|[1-9](?:_?\d)*)(?:\.\d(?:_?\d)*)?(?:[eE][+-]?(?:\d(?:_?\d)*))?') +_datetime_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))') + +_basicstr_ml_re = re.compile(r'(?:(?:|"|"")[^"\\\000-\011\013-\037])*') +_litstr_re = re.compile(r"[^'\000-\037]*") +_litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\011\013-\037]))*") +def _p_value(s, object_pairs_hook): + pos = s.pos() + + if s.consume('true'): + return 'bool', s.last(), True, pos + if s.consume('false'): + return 'bool', s.last(), False, pos + + if s.consume('"'): + if s.consume('""'): + r = _p_basicstr_content(s, _basicstr_ml_re) + s.expect('"""') + else: + r = _p_basicstr_content(s, _basicstr_re) + s.expect('"') + return 'str', r, r, pos + + if s.consume('\''): + if s.consume('\'\''): + r = s.expect_re(_litstr_ml_re).group(0) + s.expect('\'\'\'') + else: + r = s.expect_re(_litstr_re).group(0) + s.expect('\'') + return 'str', r, r, pos + + if s.consume_re(_datetime_re): + m = s.last() + s0 = m.group(0) + r = map(int, m.groups()[:6]) + if m.group(7): + micro = float(m.group(7)) + else: + micro = 0 + + if m.group(8): + g = int(m.group(8), 10) * 60 + int(m.group(9), 10) + tz = _TimeZone(datetime.timedelta(0, g * 60)) + else: + tz = _TimeZone(datetime.timedelta(0, 0)) + + y, m, d, H, M, S = r + dt = datetime.datetime(y, m, d, H, M, S, int(micro * 1000000), tz) + return 'datetime', s0, dt, pos + + if s.consume_re(_float_re): + m = s.last().group(0) + r = m.replace('_','') + if '.' in m or 'e' in m or 'E' in m: + return 'float', m, float(r), pos + else: + return 'int', m, int(r, 10), pos + + if s.consume('['): + items = [] + with s: + while True: + _p_ews(s) + items.append(_p_value(s, object_pairs_hook=object_pairs_hook)) + s.commit() + _p_ews(s) + s.expect(',') + s.commit() + _p_ews(s) + s.expect(']') + return 'array', None, items, pos + + if s.consume('{'): + _p_ws(s) + items = object_pairs_hook() + if not s.consume('}'): + k = _p_key(s) + _p_ws(s) + s.expect('=') + _p_ws(s) + items[k] = _p_value(s, object_pairs_hook=object_pairs_hook) + _p_ws(s) + while s.consume(','): + _p_ws(s) + k = _p_key(s) + _p_ws(s) + s.expect('=') + _p_ws(s) + items[k] = _p_value(s, object_pairs_hook=object_pairs_hook) + _p_ws(s) + s.expect('}') + return 'table', None, items, pos + + s.fail() + +def _p_stmt(s, object_pairs_hook): + pos = s.pos() + if s.consume( '['): + is_array = s.consume('[') + _p_ws(s) + keys = [_p_key(s)] + _p_ws(s) + while s.consume('.'): + _p_ws(s) + keys.append(_p_key(s)) + _p_ws(s) + s.expect(']') + if is_array: + s.expect(']') + return 'table_array' if is_array else 'table', keys, pos + + key = _p_key(s) + _p_ws(s) + s.expect('=') + _p_ws(s) + value = _p_value(s, object_pairs_hook=object_pairs_hook) + return 'kv', (key, value), pos + +_stmtsep_re = re.compile(r'(?:[ \t]*(?:#[^\n]*)?\n)+[ \t]*') +def _p_toml(s, object_pairs_hook): + stmts = [] + _p_ews(s) + with s: + stmts.append(_p_stmt(s, object_pairs_hook=object_pairs_hook)) + while True: + s.commit() + s.expect_re(_stmtsep_re) + stmts.append(_p_stmt(s, object_pairs_hook=object_pairs_hook)) + _p_ews(s) + s.expect_eof() + return stmts + +class _TimeZone(datetime.tzinfo): + def __init__(self, offset): + self._offset = offset + + def utcoffset(self, dt): + return self._offset + + def dst(self, dt): + return None + + def tzname(self, dt): + m = self._offset.total_seconds() // 60 + if m < 0: + res = '-' + m = -m + else: + res = '+' + h = m // 60 + m = m - h * 60 + return '{}{:.02}{:.02}'.format(res, h, m) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/pytoml/writer.py b/env/lib/python3.6/site-packages/pip/_vendor/pytoml/writer.py new file mode 100644 index 0000000..6eaf5d7 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/pytoml/writer.py @@ -0,0 +1,127 @@ +from __future__ import unicode_literals +import io, datetime, math, sys + +if sys.version_info[0] == 3: + long = int + unicode = str + + +def dumps(obj, sort_keys=False): + fout = io.StringIO() + dump(obj, fout, sort_keys=sort_keys) + return fout.getvalue() + + +_escapes = {'\n': 'n', '\r': 'r', '\\': '\\', '\t': 't', '\b': 'b', '\f': 'f', '"': '"'} + + +def _escape_string(s): + res = [] + start = 0 + + def flush(): + if start != i: + res.append(s[start:i]) + return i + 1 + + i = 0 + while i < len(s): + c = s[i] + if c in '"\\\n\r\t\b\f': + start = flush() + res.append('\\' + _escapes[c]) + elif ord(c) < 0x20: + start = flush() + res.append('\\u%04x' % ord(c)) + i += 1 + + flush() + return '"' + ''.join(res) + '"' + + +def _escape_id(s): + if any(not c.isalnum() and c not in '-_' for c in s): + return _escape_string(s) + return s + + +def _format_list(v): + return '[{0}]'.format(', '.join(_format_value(obj) for obj in v)) + +# Formula from: +# https://docs.python.org/2/library/datetime.html#datetime.timedelta.total_seconds +# Once support for py26 is dropped, this can be replaced by td.total_seconds() +def _total_seconds(td): + return ((td.microseconds + + (td.seconds + td.days * 24 * 3600) * 10**6) / 10.0**6) + +def _format_value(v): + if isinstance(v, bool): + return 'true' if v else 'false' + if isinstance(v, int) or isinstance(v, long): + return unicode(v) + if isinstance(v, float): + if math.isnan(v) or math.isinf(v): + raise ValueError("{0} is not a valid TOML value".format(v)) + else: + return repr(v) + elif isinstance(v, unicode) or isinstance(v, bytes): + return _escape_string(v) + elif isinstance(v, datetime.datetime): + offs = v.utcoffset() + offs = _total_seconds(offs) // 60 if offs is not None else 0 + + if offs == 0: + suffix = 'Z' + else: + if offs > 0: + suffix = '+' + else: + suffix = '-' + offs = -offs + suffix = '{0}{1:.02}{2:.02}'.format(suffix, offs // 60, offs % 60) + + if v.microsecond: + return v.strftime('%Y-%m-%dT%H:%M:%S.%f') + suffix + else: + return v.strftime('%Y-%m-%dT%H:%M:%S') + suffix + elif isinstance(v, list): + return _format_list(v) + else: + raise RuntimeError(v) + + +def dump(obj, fout, sort_keys=False): + tables = [((), obj, False)] + + while tables: + name, table, is_array = tables.pop() + if name: + section_name = '.'.join(_escape_id(c) for c in name) + if is_array: + fout.write('[[{0}]]\n'.format(section_name)) + else: + fout.write('[{0}]\n'.format(section_name)) + + table_keys = sorted(table.keys()) if sort_keys else table.keys() + new_tables = [] + has_kv = False + for k in table_keys: + v = table[k] + if isinstance(v, dict): + new_tables.append((name + (k,), v, False)) + elif isinstance(v, list) and v and all(isinstance(o, dict) for o in v): + new_tables.extend((name + (k,), d, True) for d in v) + elif v is None: + # based on mojombo's comment: https://github.com/toml-lang/toml/issues/146#issuecomment-25019344 + fout.write( + '#{} = null # To use: uncomment and replace null with value\n'.format(_escape_id(k))) + has_kv = True + else: + fout.write('{0} = {1}\n'.format(_escape_id(k), _format_value(v))) + has_kv = True + + tables.extend(reversed(new_tables)) + + if (name or has_kv) and tables: + fout.write('\n') diff --git a/env/lib/python3.6/site-packages/pip/_vendor/requests/_internal_utils.py b/env/lib/python3.6/site-packages/pip/_vendor/requests/_internal_utils.py new file mode 100644 index 0000000..759d9a5 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/requests/_internal_utils.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- + +""" +requests._internal_utils +~~~~~~~~~~~~~~ + +Provides utility functions that are consumed internally by Requests +which depend on extremely few external helpers (such as compat) +""" + +from .compat import is_py2, builtin_str, str + + +def to_native_string(string, encoding='ascii'): + """Given a string object, regardless of type, returns a representation of + that string in the native string type, encoding and decoding where + necessary. This assumes ASCII unless told otherwise. + """ + if isinstance(string, builtin_str): + out = string + else: + if is_py2: + out = string.encode(encoding) + else: + out = string.decode(encoding) + + return out + + +def unicode_is_ascii(u_string): + """Determine if unicode string only contains ASCII characters. + + :param str u_string: unicode string to check. Must be unicode + and not Python 2 `str`. + :rtype: bool + """ + assert isinstance(u_string, str) + try: + u_string.encode('ascii') + return True + except UnicodeEncodeError: + return False diff --git a/env/lib/python3.6/site-packages/pip/_vendor/requests/adapters.py b/env/lib/python3.6/site-packages/pip/_vendor/requests/adapters.py new file mode 100644 index 0000000..f6f3f99 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/requests/adapters.py @@ -0,0 +1,530 @@ +# -*- coding: utf-8 -*- + +""" +requests.adapters +~~~~~~~~~~~~~~~~~ + +This module contains the transport adapters that Requests uses to define +and maintain connections. +""" + +import os.path +import socket + +from pip._vendor.urllib3.poolmanager import PoolManager, proxy_from_url +from pip._vendor.urllib3.response import HTTPResponse +from pip._vendor.urllib3.util import parse_url +from pip._vendor.urllib3.util import Timeout as TimeoutSauce +from pip._vendor.urllib3.util.retry import Retry +from pip._vendor.urllib3.exceptions import ClosedPoolError +from pip._vendor.urllib3.exceptions import ConnectTimeoutError +from pip._vendor.urllib3.exceptions import HTTPError as _HTTPError +from pip._vendor.urllib3.exceptions import MaxRetryError +from pip._vendor.urllib3.exceptions import NewConnectionError +from pip._vendor.urllib3.exceptions import ProxyError as _ProxyError +from pip._vendor.urllib3.exceptions import ProtocolError +from pip._vendor.urllib3.exceptions import ReadTimeoutError +from pip._vendor.urllib3.exceptions import SSLError as _SSLError +from pip._vendor.urllib3.exceptions import ResponseError + +from .models import Response +from .compat import urlparse, basestring +from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths, + get_encoding_from_headers, prepend_scheme_if_needed, + get_auth_from_url, urldefragauth, select_proxy) +from .structures import CaseInsensitiveDict +from .cookies import extract_cookies_to_jar +from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, + ProxyError, RetryError, InvalidSchema, InvalidProxyURL) +from .auth import _basic_auth_str + +try: + from pip._vendor.urllib3.contrib.socks import SOCKSProxyManager +except ImportError: + def SOCKSProxyManager(*args, **kwargs): + raise InvalidSchema("Missing dependencies for SOCKS support.") + +DEFAULT_POOLBLOCK = False +DEFAULT_POOLSIZE = 10 +DEFAULT_RETRIES = 0 +DEFAULT_POOL_TIMEOUT = None + + +class BaseAdapter(object): + """The Base Transport Adapter""" + + def __init__(self): + super(BaseAdapter, self).__init__() + + def send(self, request, stream=False, timeout=None, verify=True, + cert=None, proxies=None): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) <timeouts>` tuple. + :type timeout: float or tuple + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + """ + raise NotImplementedError + + def close(self): + """Cleans up adapter specific items.""" + raise NotImplementedError + + +class HTTPAdapter(BaseAdapter): + """The built-in HTTP Adapter for urllib3. + + Provides a general-case interface for Requests sessions to contact HTTP and + HTTPS urls by implementing the Transport Adapter interface. This class will + usually be created by the :class:`Session <Session>` class under the + covers. + + :param pool_connections: The number of urllib3 connection pools to cache. + :param pool_maxsize: The maximum number of connections to save in the pool. + :param max_retries: The maximum number of retries each connection + should attempt. Note, this applies only to failed DNS lookups, socket + connections and connection timeouts, never to requests where data has + made it to the server. By default, Requests does not retry failed + connections. If you need granular control over the conditions under + which we retry a request, import urllib3's ``Retry`` class and pass + that instead. + :param pool_block: Whether the connection pool should block for connections. + + Usage:: + + >>> import requests + >>> s = requests.Session() + >>> a = requests.adapters.HTTPAdapter(max_retries=3) + >>> s.mount('http://', a) + """ + __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize', + '_pool_block'] + + def __init__(self, pool_connections=DEFAULT_POOLSIZE, + pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES, + pool_block=DEFAULT_POOLBLOCK): + if max_retries == DEFAULT_RETRIES: + self.max_retries = Retry(0, read=False) + else: + self.max_retries = Retry.from_int(max_retries) + self.config = {} + self.proxy_manager = {} + + super(HTTPAdapter, self).__init__() + + self._pool_connections = pool_connections + self._pool_maxsize = pool_maxsize + self._pool_block = pool_block + + self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) + + def __getstate__(self): + return dict((attr, getattr(self, attr, None)) for attr in + self.__attrs__) + + def __setstate__(self, state): + # Can't handle by adding 'proxy_manager' to self.__attrs__ because + # self.poolmanager uses a lambda function, which isn't pickleable. + self.proxy_manager = {} + self.config = {} + + for attr, value in state.items(): + setattr(self, attr, value) + + self.init_poolmanager(self._pool_connections, self._pool_maxsize, + block=self._pool_block) + + def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs): + """Initializes a urllib3 PoolManager. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param connections: The number of urllib3 connection pools to cache. + :param maxsize: The maximum number of connections to save in the pool. + :param block: Block when no free connections are available. + :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. + """ + # save these values for pickling + self._pool_connections = connections + self._pool_maxsize = maxsize + self._pool_block = block + + self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, + block=block, strict=True, **pool_kwargs) + + def proxy_manager_for(self, proxy, **proxy_kwargs): + """Return urllib3 ProxyManager for the given proxy. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param proxy: The proxy to return a urllib3 ProxyManager for. + :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. + :returns: ProxyManager + :rtype: urllib3.ProxyManager + """ + if proxy in self.proxy_manager: + manager = self.proxy_manager[proxy] + elif proxy.lower().startswith('socks'): + username, password = get_auth_from_url(proxy) + manager = self.proxy_manager[proxy] = SOCKSProxyManager( + proxy, + username=username, + password=password, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs + ) + else: + proxy_headers = self.proxy_headers(proxy) + manager = self.proxy_manager[proxy] = proxy_from_url( + proxy, + proxy_headers=proxy_headers, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs) + + return manager + + def cert_verify(self, conn, url, verify, cert): + """Verify a SSL certificate. This method should not be called from user + code, and is only exposed for use when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param conn: The urllib3 connection object associated with the cert. + :param url: The requested URL. + :param verify: Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: The SSL certificate to verify. + """ + if url.lower().startswith('https') and verify: + + cert_loc = None + + # Allow self-specified cert location. + if verify is not True: + cert_loc = verify + + if not cert_loc: + cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) + + if not cert_loc or not os.path.exists(cert_loc): + raise IOError("Could not find a suitable TLS CA certificate bundle, " + "invalid path: {0}".format(cert_loc)) + + conn.cert_reqs = 'CERT_REQUIRED' + + if not os.path.isdir(cert_loc): + conn.ca_certs = cert_loc + else: + conn.ca_cert_dir = cert_loc + else: + conn.cert_reqs = 'CERT_NONE' + conn.ca_certs = None + conn.ca_cert_dir = None + + if cert: + if not isinstance(cert, basestring): + conn.cert_file = cert[0] + conn.key_file = cert[1] + else: + conn.cert_file = cert + conn.key_file = None + if conn.cert_file and not os.path.exists(conn.cert_file): + raise IOError("Could not find the TLS certificate file, " + "invalid path: {0}".format(conn.cert_file)) + if conn.key_file and not os.path.exists(conn.key_file): + raise IOError("Could not find the TLS key file, " + "invalid path: {0}".format(conn.key_file)) + + def build_response(self, req, resp): + """Builds a :class:`Response <requests.Response>` object from a urllib3 + response. This should not be called from user code, and is only exposed + for use when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>` + + :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response. + :param resp: The urllib3 response object. + :rtype: requests.Response + """ + response = Response() + + # Fallback to None if there's no status_code, for whatever reason. + response.status_code = getattr(resp, 'status', None) + + # Make headers case-insensitive. + response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) + + # Set encoding. + response.encoding = get_encoding_from_headers(response.headers) + response.raw = resp + response.reason = response.raw.reason + + if isinstance(req.url, bytes): + response.url = req.url.decode('utf-8') + else: + response.url = req.url + + # Add new cookies from the server. + extract_cookies_to_jar(response.cookies, req, resp) + + # Give the Response some context. + response.request = req + response.connection = self + + return response + + def get_connection(self, url, proxies=None): + """Returns a urllib3 connection for the given URL. This should not be + called from user code, and is only exposed for use when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param url: The URL to connect to. + :param proxies: (optional) A Requests-style dictionary of proxies used on this request. + :rtype: urllib3.ConnectionPool + """ + proxy = select_proxy(url, proxies) + + if proxy: + proxy = prepend_scheme_if_needed(proxy, 'http') + proxy_url = parse_url(proxy) + if not proxy_url.host: + raise InvalidProxyURL("Please check proxy URL. It is malformed" + " and could be missing the host.") + proxy_manager = self.proxy_manager_for(proxy) + conn = proxy_manager.connection_from_url(url) + else: + # Only scheme should be lower case + parsed = urlparse(url) + url = parsed.geturl() + conn = self.poolmanager.connection_from_url(url) + + return conn + + def close(self): + """Disposes of any internal state. + + Currently, this closes the PoolManager and any active ProxyManager, + which closes any pooled connections. + """ + self.poolmanager.clear() + for proxy in self.proxy_manager.values(): + proxy.clear() + + def request_url(self, request, proxies): + """Obtain the url to use when making the final request. + + If the message is being sent through a HTTP proxy, the full URL has to + be used. Otherwise, we should only use the path portion of the URL. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. + :rtype: str + """ + proxy = select_proxy(request.url, proxies) + scheme = urlparse(request.url).scheme + + is_proxied_http_request = (proxy and scheme != 'https') + using_socks_proxy = False + if proxy: + proxy_scheme = urlparse(proxy).scheme.lower() + using_socks_proxy = proxy_scheme.startswith('socks') + + url = request.path_url + if is_proxied_http_request and not using_socks_proxy: + url = urldefragauth(request.url) + + return url + + def add_headers(self, request, **kwargs): + """Add any headers needed by the connection. As of v2.0 this does + nothing by default, but is left for overriding by users that subclass + the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to. + :param kwargs: The keyword arguments from the call to send(). + """ + pass + + def proxy_headers(self, proxy): + """Returns a dictionary of the headers to add to any request sent + through a proxy. This works with urllib3 magic to ensure that they are + correctly sent to the proxy, rather than in a tunnelled request if + CONNECT is being used. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param proxies: The url of the proxy being used for this request. + :rtype: dict + """ + headers = {} + username, password = get_auth_from_url(proxy) + + if username: + headers['Proxy-Authorization'] = _basic_auth_str(username, + password) + + return headers + + def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) <timeouts>` tuple. + :type timeout: float or tuple or urllib3 Timeout object + :param verify: (optional) Either a boolean, in which case it controls whether + we verify the server's TLS certificate, or a string, in which case it + must be a path to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + :rtype: requests.Response + """ + + conn = self.get_connection(request.url, proxies) + + self.cert_verify(conn, request.url, verify, cert) + url = self.request_url(request, proxies) + self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies) + + chunked = not (request.body is None or 'Content-Length' in request.headers) + + if isinstance(timeout, tuple): + try: + connect, read = timeout + timeout = TimeoutSauce(connect=connect, read=read) + except ValueError as e: + # this may raise a string formatting error. + err = ("Invalid timeout {0}. Pass a (connect, read) " + "timeout tuple, or a single float to set " + "both timeouts to the same value".format(timeout)) + raise ValueError(err) + elif isinstance(timeout, TimeoutSauce): + pass + else: + timeout = TimeoutSauce(connect=timeout, read=timeout) + + try: + if not chunked: + resp = conn.urlopen( + method=request.method, + url=url, + body=request.body, + headers=request.headers, + redirect=False, + assert_same_host=False, + preload_content=False, + decode_content=False, + retries=self.max_retries, + timeout=timeout + ) + + # Send the request. + else: + if hasattr(conn, 'proxy_pool'): + conn = conn.proxy_pool + + low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) + + try: + low_conn.putrequest(request.method, + url, + skip_accept_encoding=True) + + for header, value in request.headers.items(): + low_conn.putheader(header, value) + + low_conn.endheaders() + + for i in request.body: + low_conn.send(hex(len(i))[2:].encode('utf-8')) + low_conn.send(b'\r\n') + low_conn.send(i) + low_conn.send(b'\r\n') + low_conn.send(b'0\r\n\r\n') + + # Receive the response from the server + try: + # For Python 2.7+ versions, use buffering of HTTP + # responses + r = low_conn.getresponse(buffering=True) + except TypeError: + # For compatibility with Python 2.6 versions and back + r = low_conn.getresponse() + + resp = HTTPResponse.from_httplib( + r, + pool=conn, + connection=low_conn, + preload_content=False, + decode_content=False + ) + except: + # If we hit any problems here, clean up the connection. + # Then, reraise so that we can handle the actual exception. + low_conn.close() + raise + + except (ProtocolError, socket.error) as err: + raise ConnectionError(err, request=request) + + except MaxRetryError as e: + if isinstance(e.reason, ConnectTimeoutError): + # TODO: Remove this in 3.0.0: see #2811 + if not isinstance(e.reason, NewConnectionError): + raise ConnectTimeout(e, request=request) + + if isinstance(e.reason, ResponseError): + raise RetryError(e, request=request) + + if isinstance(e.reason, _ProxyError): + raise ProxyError(e, request=request) + + if isinstance(e.reason, _SSLError): + # This branch is for urllib3 v1.22 and later. + raise SSLError(e, request=request) + + raise ConnectionError(e, request=request) + + except ClosedPoolError as e: + raise ConnectionError(e, request=request) + + except _ProxyError as e: + raise ProxyError(e) + + except (_SSLError, _HTTPError) as e: + if isinstance(e, _SSLError): + # This branch is for urllib3 versions earlier than v1.22 + raise SSLError(e, request=request) + elif isinstance(e, ReadTimeoutError): + raise ReadTimeout(e, request=request) + else: + raise + + return self.build_response(request, resp) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/requests/api.py b/env/lib/python3.6/site-packages/pip/_vendor/requests/api.py new file mode 100644 index 0000000..a2cc84d --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/requests/api.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- + +""" +requests.api +~~~~~~~~~~~~ + +This module implements the Requests API. + +:copyright: (c) 2012 by Kenneth Reitz. +:license: Apache2, see LICENSE for more details. +""" + +from . import sessions + + +def request(method, url, **kwargs): + """Constructs and sends a :class:`Request <Request>`. + + :param method: method for the new :class:`Request` object. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. + :param data: (optional) Dictionary or list of tuples ``[(key, value)]`` (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. + :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. + ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` + or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string + defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers + to add for the file. + :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How many seconds to wait for the server to send data + before giving up, as a float, or a :ref:`(connect timeout, read + timeout) <timeouts>` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. + :param stream: (optional) if ``False``, the response content will be immediately downloaded. + :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. + :return: :class:`Response <Response>` object + :rtype: requests.Response + + Usage:: + + >>> import requests + >>> req = requests.request('GET', 'http://httpbin.org/get') + <Response [200]> + """ + + # By using the 'with' statement we are sure the session is closed, thus we + # avoid leaving sockets open which can trigger a ResourceWarning in some + # cases, and look like a memory leak in others. + with sessions.Session() as session: + return session.request(method=method, url=url, **kwargs) + + +def get(url, params=None, **kwargs): + r"""Sends a GET request. + + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response <Response>` object + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', True) + return request('get', url, params=params, **kwargs) + + +def options(url, **kwargs): + r"""Sends an OPTIONS request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response <Response>` object + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', True) + return request('options', url, **kwargs) + + +def head(url, **kwargs): + r"""Sends a HEAD request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response <Response>` object + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', False) + return request('head', url, **kwargs) + + +def post(url, data=None, json=None, **kwargs): + r"""Sends a POST request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response <Response>` object + :rtype: requests.Response + """ + + return request('post', url, data=data, json=json, **kwargs) + + +def put(url, data=None, **kwargs): + r"""Sends a PUT request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response <Response>` object + :rtype: requests.Response + """ + + return request('put', url, data=data, **kwargs) + + +def patch(url, data=None, **kwargs): + r"""Sends a PATCH request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response <Response>` object + :rtype: requests.Response + """ + + return request('patch', url, data=data, **kwargs) + + +def delete(url, **kwargs): + r"""Sends a DELETE request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response <Response>` object + :rtype: requests.Response + """ + + return request('delete', url, **kwargs) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/requests/auth.py b/env/lib/python3.6/site-packages/pip/_vendor/requests/auth.py new file mode 100644 index 0000000..4ae4594 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/requests/auth.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- + +""" +requests.auth +~~~~~~~~~~~~~ + +This module contains the authentication handlers for Requests. +""" + +import os +import re +import time +import hashlib +import threading +import warnings + +from base64 import b64encode + +from .compat import urlparse, str, basestring +from .cookies import extract_cookies_to_jar +from ._internal_utils import to_native_string +from .utils import parse_dict_header + +CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' +CONTENT_TYPE_MULTI_PART = 'multipart/form-data' + + +def _basic_auth_str(username, password): + """Returns a Basic Auth string.""" + + # "I want us to put a big-ol' comment on top of it that + # says that this behaviour is dumb but we need to preserve + # it because people are relying on it." + # - Lukasa + # + # These are here solely to maintain backwards compatibility + # for things like ints. This will be removed in 3.0.0. + if not isinstance(username, basestring): + warnings.warn( + "Non-string usernames will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({0!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(username), + category=DeprecationWarning, + ) + username = str(username) + + if not isinstance(password, basestring): + warnings.warn( + "Non-string passwords will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({0!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(password), + category=DeprecationWarning, + ) + password = str(password) + # -- End Removal -- + + if isinstance(username, str): + username = username.encode('latin1') + + if isinstance(password, str): + password = password.encode('latin1') + + authstr = 'Basic ' + to_native_string( + b64encode(b':'.join((username, password))).strip() + ) + + return authstr + + +class AuthBase(object): + """Base class that all auth implementations derive from""" + + def __call__(self, r): + raise NotImplementedError('Auth hooks must be callable.') + + +class HTTPBasicAuth(AuthBase): + """Attaches HTTP Basic Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + + def __eq__(self, other): + return all([ + self.username == getattr(other, 'username', None), + self.password == getattr(other, 'password', None) + ]) + + def __ne__(self, other): + return not self == other + + def __call__(self, r): + r.headers['Authorization'] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPProxyAuth(HTTPBasicAuth): + """Attaches HTTP Proxy Authentication to a given Request object.""" + + def __call__(self, r): + r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPDigestAuth(AuthBase): + """Attaches HTTP Digest Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + # Keep state in per-thread local storage + self._thread_local = threading.local() + + def init_per_thread_state(self): + # Ensure state is initialized just once per-thread + if not hasattr(self._thread_local, 'init'): + self._thread_local.init = True + self._thread_local.last_nonce = '' + self._thread_local.nonce_count = 0 + self._thread_local.chal = {} + self._thread_local.pos = None + self._thread_local.num_401_calls = None + + def build_digest_header(self, method, url): + """ + :rtype: str + """ + + realm = self._thread_local.chal['realm'] + nonce = self._thread_local.chal['nonce'] + qop = self._thread_local.chal.get('qop') + algorithm = self._thread_local.chal.get('algorithm') + opaque = self._thread_local.chal.get('opaque') + hash_utf8 = None + + if algorithm is None: + _algorithm = 'MD5' + else: + _algorithm = algorithm.upper() + # lambdas assume digest modules are imported at the top level + if _algorithm == 'MD5' or _algorithm == 'MD5-SESS': + def md5_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.md5(x).hexdigest() + hash_utf8 = md5_utf8 + elif _algorithm == 'SHA': + def sha_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.sha1(x).hexdigest() + hash_utf8 = sha_utf8 + elif _algorithm == 'SHA-256': + def sha256_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.sha256(x).hexdigest() + hash_utf8 = sha256_utf8 + elif _algorithm == 'SHA-512': + def sha512_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.sha512(x).hexdigest() + hash_utf8 = sha512_utf8 + + KD = lambda s, d: hash_utf8("%s:%s" % (s, d)) + + if hash_utf8 is None: + return None + + # XXX not implemented yet + entdig = None + p_parsed = urlparse(url) + #: path is request-uri defined in RFC 2616 which should not be empty + path = p_parsed.path or "/" + if p_parsed.query: + path += '?' + p_parsed.query + + A1 = '%s:%s:%s' % (self.username, realm, self.password) + A2 = '%s:%s' % (method, path) + + HA1 = hash_utf8(A1) + HA2 = hash_utf8(A2) + + if nonce == self._thread_local.last_nonce: + self._thread_local.nonce_count += 1 + else: + self._thread_local.nonce_count = 1 + ncvalue = '%08x' % self._thread_local.nonce_count + s = str(self._thread_local.nonce_count).encode('utf-8') + s += nonce.encode('utf-8') + s += time.ctime().encode('utf-8') + s += os.urandom(8) + + cnonce = (hashlib.sha1(s).hexdigest()[:16]) + if _algorithm == 'MD5-SESS': + HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) + + if not qop: + respdig = KD(HA1, "%s:%s" % (nonce, HA2)) + elif qop == 'auth' or 'auth' in qop.split(','): + noncebit = "%s:%s:%s:%s:%s" % ( + nonce, ncvalue, cnonce, 'auth', HA2 + ) + respdig = KD(HA1, noncebit) + else: + # XXX handle auth-int. + return None + + self._thread_local.last_nonce = nonce + + # XXX should the partial digests be encoded too? + base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ + 'response="%s"' % (self.username, realm, nonce, path, respdig) + if opaque: + base += ', opaque="%s"' % opaque + if algorithm: + base += ', algorithm="%s"' % algorithm + if entdig: + base += ', digest="%s"' % entdig + if qop: + base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce) + + return 'Digest %s' % (base) + + def handle_redirect(self, r, **kwargs): + """Reset num_401_calls counter on redirects.""" + if r.is_redirect: + self._thread_local.num_401_calls = 1 + + def handle_401(self, r, **kwargs): + """ + Takes the given response and tries digest-auth, if needed. + + :rtype: requests.Response + """ + + # If response is not 4xx, do not auth + # See https://github.com/requests/requests/issues/3772 + if not 400 <= r.status_code < 500: + self._thread_local.num_401_calls = 1 + return r + + if self._thread_local.pos is not None: + # Rewind the file position indicator of the body to where + # it was to resend the request. + r.request.body.seek(self._thread_local.pos) + s_auth = r.headers.get('www-authenticate', '') + + if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2: + + self._thread_local.num_401_calls += 1 + pat = re.compile(r'digest ', flags=re.IGNORECASE) + self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1)) + + # Consume content and release the original connection + # to allow our new request to reuse the same one. + r.content + r.close() + prep = r.request.copy() + extract_cookies_to_jar(prep._cookies, r.request, r.raw) + prep.prepare_cookies(prep._cookies) + + prep.headers['Authorization'] = self.build_digest_header( + prep.method, prep.url) + _r = r.connection.send(prep, **kwargs) + _r.history.append(r) + _r.request = prep + + return _r + + self._thread_local.num_401_calls = 1 + return r + + def __call__(self, r): + # Initialize per-thread state, if needed + self.init_per_thread_state() + # If we have a saved nonce, skip the 401 + if self._thread_local.last_nonce: + r.headers['Authorization'] = self.build_digest_header(r.method, r.url) + try: + self._thread_local.pos = r.body.tell() + except AttributeError: + # In the case of HTTPDigestAuth being reused and the body of + # the previous request was a file-like object, pos has the + # file position of the previous body. Ensure it's set to + # None. + self._thread_local.pos = None + r.register_hook('response', self.handle_401) + r.register_hook('response', self.handle_redirect) + self._thread_local.num_401_calls = 1 + + return r + + def __eq__(self, other): + return all([ + self.username == getattr(other, 'username', None), + self.password == getattr(other, 'password', None) + ]) + + def __ne__(self, other): + return not self == other diff --git a/env/lib/python3.6/site-packages/pip/_vendor/requests/certs.py b/env/lib/python3.6/site-packages/pip/_vendor/requests/certs.py new file mode 100644 index 0000000..06a594e --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/requests/certs.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +""" +requests.certs +~~~~~~~~~~~~~~ + +This module returns the preferred default CA certificate bundle. There is +only one — the one from the certifi package. + +If you are packaging Requests, e.g., for a Linux distribution or a managed +environment, you can change the definition of where() to return a separately +packaged CA bundle. +""" +from pip._vendor.certifi import where + +if __name__ == '__main__': + print(where()) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/requests/compat.py b/env/lib/python3.6/site-packages/pip/_vendor/requests/compat.py new file mode 100644 index 0000000..ec5d305 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/requests/compat.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- + +""" +requests.compat +~~~~~~~~~~~~~~~ + +This module handles import compatibility issues between Python 2 and +Python 3. +""" + +from pip._vendor import chardet + +import sys + +# ------- +# Pythons +# ------- + +# Syntax sugar. +_ver = sys.version_info + +#: Python 2.x? +is_py2 = (_ver[0] == 2) + +#: Python 3.x? +is_py3 = (_ver[0] == 3) + +# Note: We've patched out simplejson support in pip because it prevents +# upgrading simplejson on Windows. +# try: +# import simplejson as json +# except (ImportError, SyntaxError): +# # simplejson does not support Python 3.2, it throws a SyntaxError +# # because of u'...' Unicode literals. +import json + +# --------- +# Specifics +# --------- + +if is_py2: + from urllib import ( + quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, + proxy_bypass, proxy_bypass_environment, getproxies_environment) + from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag + from urllib2 import parse_http_list + import cookielib + from Cookie import Morsel + from StringIO import StringIO + from collections import Callable, Mapping, MutableMapping + + from pip._vendor.urllib3.packages.ordered_dict import OrderedDict + + builtin_str = str + bytes = str + str = unicode + basestring = basestring + numeric_types = (int, long, float) + integer_types = (int, long) + +elif is_py3: + from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag + from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment + from http import cookiejar as cookielib + from http.cookies import Morsel + from io import StringIO + from collections import OrderedDict + from collections.abc import Callable, Mapping, MutableMapping + + builtin_str = str + str = str + bytes = bytes + basestring = (str, bytes) + numeric_types = (int, float) + integer_types = (int,) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/requests/cookies.py b/env/lib/python3.6/site-packages/pip/_vendor/requests/cookies.py new file mode 100644 index 0000000..50883a8 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/requests/cookies.py @@ -0,0 +1,546 @@ +# -*- coding: utf-8 -*- + +""" +requests.cookies +~~~~~~~~~~~~~~~~ + +Compatibility code to be able to use `cookielib.CookieJar` with requests. + +requests.utils imports from here, so be careful with imports. +""" + +import copy +import time +import calendar + +from ._internal_utils import to_native_string +from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping + +try: + import threading +except ImportError: + import dummy_threading as threading + + +class MockRequest(object): + """Wraps a `requests.Request` to mimic a `urllib2.Request`. + + The code in `cookielib.CookieJar` expects this interface in order to correctly + manage cookie policies, i.e., determine whether a cookie can be set, given the + domains of the request and the cookie. + + The original request object is read-only. The client is responsible for collecting + the new headers via `get_new_headers()` and interpreting them appropriately. You + probably want `get_cookie_header`, defined below. + """ + + def __init__(self, request): + self._r = request + self._new_headers = {} + self.type = urlparse(self._r.url).scheme + + def get_type(self): + return self.type + + def get_host(self): + return urlparse(self._r.url).netloc + + def get_origin_req_host(self): + return self.get_host() + + def get_full_url(self): + # Only return the response's URL if the user hadn't set the Host + # header + if not self._r.headers.get('Host'): + return self._r.url + # If they did set it, retrieve it and reconstruct the expected domain + host = to_native_string(self._r.headers['Host'], encoding='utf-8') + parsed = urlparse(self._r.url) + # Reconstruct the URL as we expect it + return urlunparse([ + parsed.scheme, host, parsed.path, parsed.params, parsed.query, + parsed.fragment + ]) + + def is_unverifiable(self): + return True + + def has_header(self, name): + return name in self._r.headers or name in self._new_headers + + def get_header(self, name, default=None): + return self._r.headers.get(name, self._new_headers.get(name, default)) + + def add_header(self, key, val): + """cookielib has no legitimate use for this method; add it back if you find one.""" + raise NotImplementedError("Cookie headers should be added with add_unredirected_header()") + + def add_unredirected_header(self, name, value): + self._new_headers[name] = value + + def get_new_headers(self): + return self._new_headers + + @property + def unverifiable(self): + return self.is_unverifiable() + + @property + def origin_req_host(self): + return self.get_origin_req_host() + + @property + def host(self): + return self.get_host() + + +class MockResponse(object): + """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. + + ...what? Basically, expose the parsed HTTP headers from the server response + the way `cookielib` expects to see them. + """ + + def __init__(self, headers): + """Make a MockResponse for `cookielib` to read. + + :param headers: a httplib.HTTPMessage or analogous carrying the headers + """ + self._headers = headers + + def info(self): + return self._headers + + def getheaders(self, name): + self._headers.getheaders(name) + + +def extract_cookies_to_jar(jar, request, response): + """Extract the cookies from the response into a CookieJar. + + :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) + :param request: our own requests.Request object + :param response: urllib3.HTTPResponse object + """ + if not (hasattr(response, '_original_response') and + response._original_response): + return + # the _original_response field is the wrapped httplib.HTTPResponse object, + req = MockRequest(request) + # pull out the HTTPMessage with the headers and put it in the mock: + res = MockResponse(response._original_response.msg) + jar.extract_cookies(res, req) + + +def get_cookie_header(jar, request): + """ + Produce an appropriate Cookie header string to be sent with `request`, or None. + + :rtype: str + """ + r = MockRequest(request) + jar.add_cookie_header(r) + return r.get_new_headers().get('Cookie') + + +def remove_cookie_by_name(cookiejar, name, domain=None, path=None): + """Unsets a cookie by name, by default over all domains and paths. + + Wraps CookieJar.clear(), is O(n). + """ + clearables = [] + for cookie in cookiejar: + if cookie.name != name: + continue + if domain is not None and domain != cookie.domain: + continue + if path is not None and path != cookie.path: + continue + clearables.append((cookie.domain, cookie.path, cookie.name)) + + for domain, path, name in clearables: + cookiejar.clear(domain, path, name) + + +class CookieConflictError(RuntimeError): + """There are two cookies that meet the criteria specified in the cookie jar. + Use .get and .set and include domain and path args in order to be more specific. + """ + + +class RequestsCookieJar(cookielib.CookieJar, MutableMapping): + """Compatibility class; is a cookielib.CookieJar, but exposes a dict + interface. + + This is the CookieJar we create by default for requests and sessions that + don't specify one, since some clients may expect response.cookies and + session.cookies to support dict operations. + + Requests does not use the dict interface internally; it's just for + compatibility with external client code. All requests code should work + out of the box with externally provided instances of ``CookieJar``, e.g. + ``LWPCookieJar`` and ``FileCookieJar``. + + Unlike a regular CookieJar, this class is pickleable. + + .. warning:: dictionary operations that are normally O(1) may be O(n). + """ + + def get(self, name, default=None, domain=None, path=None): + """Dict-like get() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + + .. warning:: operation is O(n), not O(1). + """ + try: + return self._find_no_duplicates(name, domain, path) + except KeyError: + return default + + def set(self, name, value, **kwargs): + """Dict-like set() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + """ + # support client code that unsets cookies by assignment of a None value: + if value is None: + remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path')) + return + + if isinstance(value, Morsel): + c = morsel_to_cookie(value) + else: + c = create_cookie(name, value, **kwargs) + self.set_cookie(c) + return c + + def iterkeys(self): + """Dict-like iterkeys() that returns an iterator of names of cookies + from the jar. + + .. seealso:: itervalues() and iteritems(). + """ + for cookie in iter(self): + yield cookie.name + + def keys(self): + """Dict-like keys() that returns a list of names of cookies from the + jar. + + .. seealso:: values() and items(). + """ + return list(self.iterkeys()) + + def itervalues(self): + """Dict-like itervalues() that returns an iterator of values of cookies + from the jar. + + .. seealso:: iterkeys() and iteritems(). + """ + for cookie in iter(self): + yield cookie.value + + def values(self): + """Dict-like values() that returns a list of values of cookies from the + jar. + + .. seealso:: keys() and items(). + """ + return list(self.itervalues()) + + def iteritems(self): + """Dict-like iteritems() that returns an iterator of name-value tuples + from the jar. + + .. seealso:: iterkeys() and itervalues(). + """ + for cookie in iter(self): + yield cookie.name, cookie.value + + def items(self): + """Dict-like items() that returns a list of name-value tuples from the + jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a + vanilla python dict of key value pairs. + + .. seealso:: keys() and values(). + """ + return list(self.iteritems()) + + def list_domains(self): + """Utility method to list all the domains in the jar.""" + domains = [] + for cookie in iter(self): + if cookie.domain not in domains: + domains.append(cookie.domain) + return domains + + def list_paths(self): + """Utility method to list all the paths in the jar.""" + paths = [] + for cookie in iter(self): + if cookie.path not in paths: + paths.append(cookie.path) + return paths + + def multiple_domains(self): + """Returns True if there are multiple domains in the jar. + Returns False otherwise. + + :rtype: bool + """ + domains = [] + for cookie in iter(self): + if cookie.domain is not None and cookie.domain in domains: + return True + domains.append(cookie.domain) + return False # there is only one domain in jar + + def get_dict(self, domain=None, path=None): + """Takes as an argument an optional domain and path and returns a plain + old Python dict of name-value pairs of cookies that meet the + requirements. + + :rtype: dict + """ + dictionary = {} + for cookie in iter(self): + if ( + (domain is None or cookie.domain == domain) and + (path is None or cookie.path == path) + ): + dictionary[cookie.name] = cookie.value + return dictionary + + def __contains__(self, name): + try: + return super(RequestsCookieJar, self).__contains__(name) + except CookieConflictError: + return True + + def __getitem__(self, name): + """Dict-like __getitem__() for compatibility with client code. Throws + exception if there are more than one cookie with name. In that case, + use the more explicit get() method instead. + + .. warning:: operation is O(n), not O(1). + """ + return self._find_no_duplicates(name) + + def __setitem__(self, name, value): + """Dict-like __setitem__ for compatibility with client code. Throws + exception if there is already a cookie of that name in the jar. In that + case, use the more explicit set() method instead. + """ + self.set(name, value) + + def __delitem__(self, name): + """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s + ``remove_cookie_by_name()``. + """ + remove_cookie_by_name(self, name) + + def set_cookie(self, cookie, *args, **kwargs): + if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'): + cookie.value = cookie.value.replace('\\"', '') + return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs) + + def update(self, other): + """Updates this jar with cookies from another CookieJar or dict-like""" + if isinstance(other, cookielib.CookieJar): + for cookie in other: + self.set_cookie(copy.copy(cookie)) + else: + super(RequestsCookieJar, self).update(other) + + def _find(self, name, domain=None, path=None): + """Requests uses this method internally to get cookie values. + + If there are conflicting cookies, _find arbitrarily chooses one. + See _find_no_duplicates if you want an exception thrown if there are + conflicting cookies. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :return: cookie.value + """ + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + return cookie.value + + raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) + + def _find_no_duplicates(self, name, domain=None, path=None): + """Both ``__get_item__`` and ``get`` call this function: it's never + used elsewhere in Requests. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :raises KeyError: if cookie is not found + :raises CookieConflictError: if there are multiple cookies + that match name and optionally domain and path + :return: cookie.value + """ + toReturn = None + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if toReturn is not None: # if there are multiple cookies that meet passed in criteria + raise CookieConflictError('There are multiple cookies with name, %r' % (name)) + toReturn = cookie.value # we will eventually return this as long as no cookie conflict + + if toReturn: + return toReturn + raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) + + def __getstate__(self): + """Unlike a normal CookieJar, this class is pickleable.""" + state = self.__dict__.copy() + # remove the unpickleable RLock object + state.pop('_cookies_lock') + return state + + def __setstate__(self, state): + """Unlike a normal CookieJar, this class is pickleable.""" + self.__dict__.update(state) + if '_cookies_lock' not in self.__dict__: + self._cookies_lock = threading.RLock() + + def copy(self): + """Return a copy of this RequestsCookieJar.""" + new_cj = RequestsCookieJar() + new_cj.set_policy(self.get_policy()) + new_cj.update(self) + return new_cj + + def get_policy(self): + """Return the CookiePolicy instance used.""" + return self._policy + + +def _copy_cookie_jar(jar): + if jar is None: + return None + + if hasattr(jar, 'copy'): + # We're dealing with an instance of RequestsCookieJar + return jar.copy() + # We're dealing with a generic CookieJar instance + new_jar = copy.copy(jar) + new_jar.clear() + for cookie in jar: + new_jar.set_cookie(copy.copy(cookie)) + return new_jar + + +def create_cookie(name, value, **kwargs): + """Make a cookie from underspecified parameters. + + By default, the pair of `name` and `value` will be set for the domain '' + and sent on every request (this is sometimes called a "supercookie"). + """ + result = dict( + version=0, + name=name, + value=value, + port=None, + domain='', + path='/', + secure=False, + expires=None, + discard=True, + comment=None, + comment_url=None, + rest={'HttpOnly': None}, + rfc2109=False,) + + badargs = set(kwargs) - set(result) + if badargs: + err = 'create_cookie() got unexpected keyword arguments: %s' + raise TypeError(err % list(badargs)) + + result.update(kwargs) + result['port_specified'] = bool(result['port']) + result['domain_specified'] = bool(result['domain']) + result['domain_initial_dot'] = result['domain'].startswith('.') + result['path_specified'] = bool(result['path']) + + return cookielib.Cookie(**result) + + +def morsel_to_cookie(morsel): + """Convert a Morsel object into a Cookie containing the one k/v pair.""" + + expires = None + if morsel['max-age']: + try: + expires = int(time.time() + int(morsel['max-age'])) + except ValueError: + raise TypeError('max-age: %s must be integer' % morsel['max-age']) + elif morsel['expires']: + time_template = '%a, %d-%b-%Y %H:%M:%S GMT' + expires = calendar.timegm( + time.strptime(morsel['expires'], time_template) + ) + return create_cookie( + comment=morsel['comment'], + comment_url=bool(morsel['comment']), + discard=False, + domain=morsel['domain'], + expires=expires, + name=morsel.key, + path=morsel['path'], + port=None, + rest={'HttpOnly': morsel['httponly']}, + rfc2109=False, + secure=bool(morsel['secure']), + value=morsel.value, + version=morsel['version'] or 0, + ) + + +def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): + """Returns a CookieJar from a key/value dictionary. + + :param cookie_dict: Dict of key/values to insert into CookieJar. + :param cookiejar: (optional) A cookiejar to add the cookies to. + :param overwrite: (optional) If False, will not replace cookies + already in the jar with new ones. + """ + if cookiejar is None: + cookiejar = RequestsCookieJar() + + if cookie_dict is not None: + names_from_jar = [cookie.name for cookie in cookiejar] + for name in cookie_dict: + if overwrite or (name not in names_from_jar): + cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) + + return cookiejar + + +def merge_cookies(cookiejar, cookies): + """Add cookies to cookiejar and returns a merged CookieJar. + + :param cookiejar: CookieJar object to add the cookies to. + :param cookies: Dictionary or CookieJar object to be added. + """ + if not isinstance(cookiejar, cookielib.CookieJar): + raise ValueError('You can only merge into CookieJar') + + if isinstance(cookies, dict): + cookiejar = cookiejar_from_dict( + cookies, cookiejar=cookiejar, overwrite=False) + elif isinstance(cookies, cookielib.CookieJar): + try: + cookiejar.update(cookies) + except AttributeError: + for cookie_in_jar in cookies: + cookiejar.set_cookie(cookie_in_jar) + + return cookiejar diff --git a/env/lib/python3.6/site-packages/pip/_vendor/requests/exceptions.py b/env/lib/python3.6/site-packages/pip/_vendor/requests/exceptions.py new file mode 100644 index 0000000..a91e1fd --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/requests/exceptions.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- + +""" +requests.exceptions +~~~~~~~~~~~~~~~~~~~ + +This module contains the set of Requests' exceptions. +""" +from pip._vendor.urllib3.exceptions import HTTPError as BaseHTTPError + + +class RequestException(IOError): + """There was an ambiguous exception that occurred while handling your + request. + """ + + def __init__(self, *args, **kwargs): + """Initialize RequestException with `request` and `response` objects.""" + response = kwargs.pop('response', None) + self.response = response + self.request = kwargs.pop('request', None) + if (response is not None and not self.request and + hasattr(response, 'request')): + self.request = self.response.request + super(RequestException, self).__init__(*args, **kwargs) + + +class HTTPError(RequestException): + """An HTTP error occurred.""" + + +class ConnectionError(RequestException): + """A Connection error occurred.""" + + +class ProxyError(ConnectionError): + """A proxy error occurred.""" + + +class SSLError(ConnectionError): + """An SSL error occurred.""" + + +class Timeout(RequestException): + """The request timed out. + + Catching this error will catch both + :exc:`~requests.exceptions.ConnectTimeout` and + :exc:`~requests.exceptions.ReadTimeout` errors. + """ + + +class ConnectTimeout(ConnectionError, Timeout): + """The request timed out while trying to connect to the remote server. + + Requests that produced this error are safe to retry. + """ + + +class ReadTimeout(Timeout): + """The server did not send any data in the allotted amount of time.""" + + +class URLRequired(RequestException): + """A valid URL is required to make a request.""" + + +class TooManyRedirects(RequestException): + """Too many redirects.""" + + +class MissingSchema(RequestException, ValueError): + """The URL schema (e.g. http or https) is missing.""" + + +class InvalidSchema(RequestException, ValueError): + """See defaults.py for valid schemas.""" + + +class InvalidURL(RequestException, ValueError): + """The URL provided was somehow invalid.""" + + +class InvalidHeader(RequestException, ValueError): + """The header value provided was somehow invalid.""" + + +class InvalidProxyURL(InvalidURL): + """The proxy URL provided is invalid.""" + + +class ChunkedEncodingError(RequestException): + """The server declared chunked encoding but sent an invalid chunk.""" + + +class ContentDecodingError(RequestException, BaseHTTPError): + """Failed to decode response content""" + + +class StreamConsumedError(RequestException, TypeError): + """The content for this response was already consumed""" + + +class RetryError(RequestException): + """Custom retries logic failed""" + + +class UnrewindableBodyError(RequestException): + """Requests encountered an error when trying to rewind a body""" + +# Warnings + + +class RequestsWarning(Warning): + """Base warning for Requests.""" + pass + + +class FileModeWarning(RequestsWarning, DeprecationWarning): + """A file was opened in text mode, but Requests determined its binary length.""" + pass + + +class RequestsDependencyWarning(RequestsWarning): + """An imported dependency doesn't match the expected version range.""" + pass diff --git a/env/lib/python3.6/site-packages/pip/_vendor/requests/help.py b/env/lib/python3.6/site-packages/pip/_vendor/requests/help.py new file mode 100644 index 0000000..df1b4eb --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/requests/help.py @@ -0,0 +1,120 @@ +"""Module containing bug report helper(s).""" +from __future__ import print_function + +import json +import platform +import sys +import ssl + +from pip._vendor import idna +from pip._vendor import urllib3 +from pip._vendor import chardet + +from . import __version__ as requests_version + +try: + from pip._vendor.urllib3.contrib import pyopenssl +except ImportError: + pyopenssl = None + OpenSSL = None + cryptography = None +else: + import OpenSSL + import cryptography + + +def _implementation(): + """Return a dict with the Python implementation and version. + + Provide both the name and the version of the Python implementation + currently running. For example, on CPython 2.7.5 it will return + {'name': 'CPython', 'version': '2.7.5'}. + + This function works best on CPython and PyPy: in particular, it probably + doesn't work for Jython or IronPython. Future investigation should be done + to work out the correct shape of the code for those platforms. + """ + implementation = platform.python_implementation() + + if implementation == 'CPython': + implementation_version = platform.python_version() + elif implementation == 'PyPy': + implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, + sys.pypy_version_info.minor, + sys.pypy_version_info.micro) + if sys.pypy_version_info.releaselevel != 'final': + implementation_version = ''.join([ + implementation_version, sys.pypy_version_info.releaselevel + ]) + elif implementation == 'Jython': + implementation_version = platform.python_version() # Complete Guess + elif implementation == 'IronPython': + implementation_version = platform.python_version() # Complete Guess + else: + implementation_version = 'Unknown' + + return {'name': implementation, 'version': implementation_version} + + +def info(): + """Generate information for a bug report.""" + try: + platform_info = { + 'system': platform.system(), + 'release': platform.release(), + } + except IOError: + platform_info = { + 'system': 'Unknown', + 'release': 'Unknown', + } + + implementation_info = _implementation() + urllib3_info = {'version': urllib3.__version__} + chardet_info = {'version': chardet.__version__} + + pyopenssl_info = { + 'version': None, + 'openssl_version': '', + } + if OpenSSL: + pyopenssl_info = { + 'version': OpenSSL.__version__, + 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER, + } + cryptography_info = { + 'version': getattr(cryptography, '__version__', ''), + } + idna_info = { + 'version': getattr(idna, '__version__', ''), + } + + # OPENSSL_VERSION_NUMBER doesn't exist in the Python 2.6 ssl module. + system_ssl = getattr(ssl, 'OPENSSL_VERSION_NUMBER', None) + system_ssl_info = { + 'version': '%x' % system_ssl if system_ssl is not None else '' + } + + return { + 'platform': platform_info, + 'implementation': implementation_info, + 'system_ssl': system_ssl_info, + 'using_pyopenssl': pyopenssl is not None, + 'pyOpenSSL': pyopenssl_info, + 'urllib3': urllib3_info, + 'chardet': chardet_info, + 'cryptography': cryptography_info, + 'idna': idna_info, + 'requests': { + 'version': requests_version, + }, + } + + +def main(): + """Pretty-print the bug information as JSON.""" + print(json.dumps(info(), sort_keys=True, indent=2)) + + +if __name__ == '__main__': + main() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/requests/hooks.py b/env/lib/python3.6/site-packages/pip/_vendor/requests/hooks.py new file mode 100644 index 0000000..32b32de --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/requests/hooks.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- + +""" +requests.hooks +~~~~~~~~~~~~~~ + +This module provides the capabilities for the Requests hooks system. + +Available hooks: + +``response``: + The response generated from a Request. +""" +HOOKS = ['response'] + + +def default_hooks(): + return dict((event, []) for event in HOOKS) + +# TODO: response is the only one + + +def dispatch_hook(key, hooks, hook_data, **kwargs): + """Dispatches a hook dictionary on a given piece of data.""" + hooks = hooks or dict() + hooks = hooks.get(key) + if hooks: + if hasattr(hooks, '__call__'): + hooks = [hooks] + for hook in hooks: + _hook_data = hook(hook_data, **kwargs) + if _hook_data is not None: + hook_data = _hook_data + return hook_data diff --git a/env/lib/python3.6/site-packages/pip/_vendor/requests/models.py b/env/lib/python3.6/site-packages/pip/_vendor/requests/models.py new file mode 100644 index 0000000..4230535 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/requests/models.py @@ -0,0 +1,952 @@ +# -*- coding: utf-8 -*- + +""" +requests.models +~~~~~~~~~~~~~~~ + +This module contains the primary objects that power Requests. +""" + +import datetime +import sys + +# Import encoding now, to avoid implicit import later. +# Implicit import within threads may cause LookupError when standard library is in a ZIP, +# such as in Embedded Python. See https://github.com/requests/requests/issues/3578. +import encodings.idna + +from pip._vendor.urllib3.fields import RequestField +from pip._vendor.urllib3.filepost import encode_multipart_formdata +from pip._vendor.urllib3.util import parse_url +from pip._vendor.urllib3.exceptions import ( + DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) + +from io import UnsupportedOperation +from .hooks import default_hooks +from .structures import CaseInsensitiveDict + +from .auth import HTTPBasicAuth +from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar +from .exceptions import ( + HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, + ContentDecodingError, ConnectionError, StreamConsumedError) +from ._internal_utils import to_native_string, unicode_is_ascii +from .utils import ( + guess_filename, get_auth_from_url, requote_uri, + stream_decode_response_unicode, to_key_val_list, parse_header_links, + iter_slices, guess_json_utf, super_len, check_header_validity) +from .compat import ( + Callable, Mapping, + cookielib, urlunparse, urlsplit, urlencode, str, bytes, + is_py2, chardet, builtin_str, basestring) +from .compat import json as complexjson +from .status_codes import codes + +#: The set of HTTP status codes that indicate an automatically +#: processable redirect. +REDIRECT_STATI = ( + codes.moved, # 301 + codes.found, # 302 + codes.other, # 303 + codes.temporary_redirect, # 307 + codes.permanent_redirect, # 308 +) + +DEFAULT_REDIRECT_LIMIT = 30 +CONTENT_CHUNK_SIZE = 10 * 1024 +ITER_CHUNK_SIZE = 512 + + +class RequestEncodingMixin(object): + @property + def path_url(self): + """Build the path URL to use.""" + + url = [] + + p = urlsplit(self.url) + + path = p.path + if not path: + path = '/' + + url.append(path) + + query = p.query + if query: + url.append('?') + url.append(query) + + return ''.join(url) + + @staticmethod + def _encode_params(data): + """Encode parameters in a piece of data. + + Will successfully encode parameters when passed as a dict or a list of + 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary + if parameters are supplied as a dict. + """ + + if isinstance(data, (str, bytes)): + return data + elif hasattr(data, 'read'): + return data + elif hasattr(data, '__iter__'): + result = [] + for k, vs in to_key_val_list(data): + if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): + vs = [vs] + for v in vs: + if v is not None: + result.append( + (k.encode('utf-8') if isinstance(k, str) else k, + v.encode('utf-8') if isinstance(v, str) else v)) + return urlencode(result, doseq=True) + else: + return data + + @staticmethod + def _encode_files(files, data): + """Build the body for a multipart/form-data request. + + Will successfully encode files when passed as a dict or a list of + tuples. Order is retained if data is a list of tuples but arbitrary + if parameters are supplied as a dict. + The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) + or 4-tuples (filename, fileobj, contentype, custom_headers). + """ + if (not files): + raise ValueError("Files must be provided.") + elif isinstance(data, basestring): + raise ValueError("Data must not be a string.") + + new_fields = [] + fields = to_key_val_list(data or {}) + files = to_key_val_list(files or {}) + + for field, val in fields: + if isinstance(val, basestring) or not hasattr(val, '__iter__'): + val = [val] + for v in val: + if v is not None: + # Don't call str() on bytestrings: in Py3 it all goes wrong. + if not isinstance(v, bytes): + v = str(v) + + new_fields.append( + (field.decode('utf-8') if isinstance(field, bytes) else field, + v.encode('utf-8') if isinstance(v, str) else v)) + + for (k, v) in files: + # support for explicit filename + ft = None + fh = None + if isinstance(v, (tuple, list)): + if len(v) == 2: + fn, fp = v + elif len(v) == 3: + fn, fp, ft = v + else: + fn, fp, ft, fh = v + else: + fn = guess_filename(v) or k + fp = v + + if isinstance(fp, (str, bytes, bytearray)): + fdata = fp + elif hasattr(fp, 'read'): + fdata = fp.read() + elif fp is None: + continue + else: + fdata = fp + + rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) + rf.make_multipart(content_type=ft) + new_fields.append(rf) + + body, content_type = encode_multipart_formdata(new_fields) + + return body, content_type + + +class RequestHooksMixin(object): + def register_hook(self, event, hook): + """Properly register a hook.""" + + if event not in self.hooks: + raise ValueError('Unsupported event specified, with event name "%s"' % (event)) + + if isinstance(hook, Callable): + self.hooks[event].append(hook) + elif hasattr(hook, '__iter__'): + self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) + + def deregister_hook(self, event, hook): + """Deregister a previously registered hook. + Returns True if the hook existed, False if not. + """ + + try: + self.hooks[event].remove(hook) + return True + except ValueError: + return False + + +class Request(RequestHooksMixin): + """A user-created :class:`Request <Request>` object. + + Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server. + + :param method: HTTP method to use. + :param url: URL to send. + :param headers: dictionary of headers to send. + :param files: dictionary of {filename: fileobject} files to multipart upload. + :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place. + :param json: json for the body to attach to the request (if files or data is not specified). + :param params: dictionary of URL parameters to append to the URL. + :param auth: Auth handler or (user, pass) tuple. + :param cookies: dictionary or CookieJar of cookies to attach to this request. + :param hooks: dictionary of callback hooks, for internal usage. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'http://httpbin.org/get') + >>> req.prepare() + <PreparedRequest [GET]> + """ + + def __init__(self, + method=None, url=None, headers=None, files=None, data=None, + params=None, auth=None, cookies=None, hooks=None, json=None): + + # Default empty dicts for dict params. + data = [] if data is None else data + files = [] if files is None else files + headers = {} if headers is None else headers + params = {} if params is None else params + hooks = {} if hooks is None else hooks + + self.hooks = default_hooks() + for (k, v) in list(hooks.items()): + self.register_hook(event=k, hook=v) + + self.method = method + self.url = url + self.headers = headers + self.files = files + self.data = data + self.json = json + self.params = params + self.auth = auth + self.cookies = cookies + + def __repr__(self): + return '<Request [%s]>' % (self.method) + + def prepare(self): + """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.""" + p = PreparedRequest() + p.prepare( + method=self.method, + url=self.url, + headers=self.headers, + files=self.files, + data=self.data, + json=self.json, + params=self.params, + auth=self.auth, + cookies=self.cookies, + hooks=self.hooks, + ) + return p + + +class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): + """The fully mutable :class:`PreparedRequest <PreparedRequest>` object, + containing the exact bytes that will be sent to the server. + + Generated from either a :class:`Request <Request>` object or manually. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'http://httpbin.org/get') + >>> r = req.prepare() + <PreparedRequest [GET]> + + >>> s = requests.Session() + >>> s.send(r) + <Response [200]> + """ + + def __init__(self): + #: HTTP verb to send to the server. + self.method = None + #: HTTP URL to send the request to. + self.url = None + #: dictionary of HTTP headers. + self.headers = None + # The `CookieJar` used to create the Cookie header will be stored here + # after prepare_cookies is called + self._cookies = None + #: request body to send to the server. + self.body = None + #: dictionary of callback hooks, for internal usage. + self.hooks = default_hooks() + #: integer denoting starting position of a readable file-like body. + self._body_position = None + + def prepare(self, + method=None, url=None, headers=None, files=None, data=None, + params=None, auth=None, cookies=None, hooks=None, json=None): + """Prepares the entire request with the given parameters.""" + + self.prepare_method(method) + self.prepare_url(url, params) + self.prepare_headers(headers) + self.prepare_cookies(cookies) + self.prepare_body(data, files, json) + self.prepare_auth(auth, url) + + # Note that prepare_auth must be last to enable authentication schemes + # such as OAuth to work on a fully prepared request. + + # This MUST go after prepare_auth. Authenticators could add a hook + self.prepare_hooks(hooks) + + def __repr__(self): + return '<PreparedRequest [%s]>' % (self.method) + + def copy(self): + p = PreparedRequest() + p.method = self.method + p.url = self.url + p.headers = self.headers.copy() if self.headers is not None else None + p._cookies = _copy_cookie_jar(self._cookies) + p.body = self.body + p.hooks = self.hooks + p._body_position = self._body_position + return p + + def prepare_method(self, method): + """Prepares the given HTTP method.""" + self.method = method + if self.method is not None: + self.method = to_native_string(self.method.upper()) + + @staticmethod + def _get_idna_encoded_host(host): + from pip._vendor import idna + + try: + host = idna.encode(host, uts46=True).decode('utf-8') + except idna.IDNAError: + raise UnicodeError + return host + + def prepare_url(self, url, params): + """Prepares the given HTTP URL.""" + #: Accept objects that have string representations. + #: We're unable to blindly call unicode/str functions + #: as this will include the bytestring indicator (b'') + #: on python 3.x. + #: https://github.com/requests/requests/pull/2238 + if isinstance(url, bytes): + url = url.decode('utf8') + else: + url = unicode(url) if is_py2 else str(url) + + # Remove leading whitespaces from url + url = url.lstrip() + + # Don't do any URL preparation for non-HTTP schemes like `mailto`, + # `data` etc to work around exceptions from `url_parse`, which + # handles RFC 3986 only. + if ':' in url and not url.lower().startswith('http'): + self.url = url + return + + # Support for unicode domain names and paths. + try: + scheme, auth, host, port, path, query, fragment = parse_url(url) + except LocationParseError as e: + raise InvalidURL(*e.args) + + if not scheme: + error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") + error = error.format(to_native_string(url, 'utf8')) + + raise MissingSchema(error) + + if not host: + raise InvalidURL("Invalid URL %r: No host supplied" % url) + + # In general, we want to try IDNA encoding the hostname if the string contains + # non-ASCII characters. This allows users to automatically get the correct IDNA + # behaviour. For strings containing only ASCII characters, we need to also verify + # it doesn't start with a wildcard (*), before allowing the unencoded hostname. + if not unicode_is_ascii(host): + try: + host = self._get_idna_encoded_host(host) + except UnicodeError: + raise InvalidURL('URL has an invalid label.') + elif host.startswith(u'*'): + raise InvalidURL('URL has an invalid label.') + + # Carefully reconstruct the network location + netloc = auth or '' + if netloc: + netloc += '@' + netloc += host + if port: + netloc += ':' + str(port) + + # Bare domains aren't valid URLs. + if not path: + path = '/' + + if is_py2: + if isinstance(scheme, str): + scheme = scheme.encode('utf-8') + if isinstance(netloc, str): + netloc = netloc.encode('utf-8') + if isinstance(path, str): + path = path.encode('utf-8') + if isinstance(query, str): + query = query.encode('utf-8') + if isinstance(fragment, str): + fragment = fragment.encode('utf-8') + + if isinstance(params, (str, bytes)): + params = to_native_string(params) + + enc_params = self._encode_params(params) + if enc_params: + if query: + query = '%s&%s' % (query, enc_params) + else: + query = enc_params + + url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) + self.url = url + + def prepare_headers(self, headers): + """Prepares the given HTTP headers.""" + + self.headers = CaseInsensitiveDict() + if headers: + for header in headers.items(): + # Raise exception on invalid header value. + check_header_validity(header) + name, value = header + self.headers[to_native_string(name)] = value + + def prepare_body(self, data, files, json=None): + """Prepares the given HTTP body data.""" + + # Check if file, fo, generator, iterator. + # If not, run through normal process. + + # Nottin' on you. + body = None + content_type = None + + if not data and json is not None: + # urllib3 requires a bytes-like body. Python 2's json.dumps + # provides this natively, but Python 3 gives a Unicode string. + content_type = 'application/json' + body = complexjson.dumps(json) + if not isinstance(body, bytes): + body = body.encode('utf-8') + + is_stream = all([ + hasattr(data, '__iter__'), + not isinstance(data, (basestring, list, tuple, Mapping)) + ]) + + try: + length = super_len(data) + except (TypeError, AttributeError, UnsupportedOperation): + length = None + + if is_stream: + body = data + + if getattr(body, 'tell', None) is not None: + # Record the current file position before reading. + # This will allow us to rewind a file in the event + # of a redirect. + try: + self._body_position = body.tell() + except (IOError, OSError): + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body + self._body_position = object() + + if files: + raise NotImplementedError('Streamed bodies and files are mutually exclusive.') + + if length: + self.headers['Content-Length'] = builtin_str(length) + else: + self.headers['Transfer-Encoding'] = 'chunked' + else: + # Multi-part file uploads. + if files: + (body, content_type) = self._encode_files(files, data) + else: + if data: + body = self._encode_params(data) + if isinstance(data, basestring) or hasattr(data, 'read'): + content_type = None + else: + content_type = 'application/x-www-form-urlencoded' + + self.prepare_content_length(body) + + # Add content-type if it wasn't explicitly provided. + if content_type and ('content-type' not in self.headers): + self.headers['Content-Type'] = content_type + + self.body = body + + def prepare_content_length(self, body): + """Prepare Content-Length header based on request method and body""" + if body is not None: + length = super_len(body) + if length: + # If length exists, set it. Otherwise, we fallback + # to Transfer-Encoding: chunked. + self.headers['Content-Length'] = builtin_str(length) + elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: + # Set Content-Length to 0 for methods that can have a body + # but don't provide one. (i.e. not GET or HEAD) + self.headers['Content-Length'] = '0' + + def prepare_auth(self, auth, url=''): + """Prepares the given HTTP auth data.""" + + # If no Auth is explicitly provided, extract it from the URL first. + if auth is None: + url_auth = get_auth_from_url(self.url) + auth = url_auth if any(url_auth) else None + + if auth: + if isinstance(auth, tuple) and len(auth) == 2: + # special-case basic HTTP auth + auth = HTTPBasicAuth(*auth) + + # Allow auth to make its changes. + r = auth(self) + + # Update self to reflect the auth changes. + self.__dict__.update(r.__dict__) + + # Recompute Content-Length + self.prepare_content_length(self.body) + + def prepare_cookies(self, cookies): + """Prepares the given HTTP cookie data. + + This function eventually generates a ``Cookie`` header from the + given cookies using cookielib. Due to cookielib's design, the header + will not be regenerated if it already exists, meaning this function + can only be called once for the life of the + :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls + to ``prepare_cookies`` will have no actual effect, unless the "Cookie" + header is removed beforehand. + """ + if isinstance(cookies, cookielib.CookieJar): + self._cookies = cookies + else: + self._cookies = cookiejar_from_dict(cookies) + + cookie_header = get_cookie_header(self._cookies, self) + if cookie_header is not None: + self.headers['Cookie'] = cookie_header + + def prepare_hooks(self, hooks): + """Prepares the given hooks.""" + # hooks can be passed as None to the prepare method and to this + # method. To prevent iterating over None, simply use an empty list + # if hooks is False-y + hooks = hooks or [] + for event in hooks: + self.register_hook(event, hooks[event]) + + +class Response(object): + """The :class:`Response <Response>` object, which contains a + server's response to an HTTP request. + """ + + __attrs__ = [ + '_content', 'status_code', 'headers', 'url', 'history', + 'encoding', 'reason', 'cookies', 'elapsed', 'request' + ] + + def __init__(self): + self._content = False + self._content_consumed = False + self._next = None + + #: Integer Code of responded HTTP Status, e.g. 404 or 200. + self.status_code = None + + #: Case-insensitive Dictionary of Response Headers. + #: For example, ``headers['content-encoding']`` will return the + #: value of a ``'Content-Encoding'`` response header. + self.headers = CaseInsensitiveDict() + + #: File-like object representation of response (for advanced usage). + #: Use of ``raw`` requires that ``stream=True`` be set on the request. + # This requirement does not apply for use internally to Requests. + self.raw = None + + #: Final URL location of Response. + self.url = None + + #: Encoding to decode with when accessing r.text. + self.encoding = None + + #: A list of :class:`Response <Response>` objects from + #: the history of the Request. Any redirect responses will end + #: up here. The list is sorted from the oldest to the most recent request. + self.history = [] + + #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". + self.reason = None + + #: A CookieJar of Cookies the server sent back. + self.cookies = cookiejar_from_dict({}) + + #: The amount of time elapsed between sending the request + #: and the arrival of the response (as a timedelta). + #: This property specifically measures the time taken between sending + #: the first byte of the request and finishing parsing the headers. It + #: is therefore unaffected by consuming the response content or the + #: value of the ``stream`` keyword argument. + self.elapsed = datetime.timedelta(0) + + #: The :class:`PreparedRequest <PreparedRequest>` object to which this + #: is a response. + self.request = None + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def __getstate__(self): + # Consume everything; accessing the content attribute makes + # sure the content has been fully read. + if not self._content_consumed: + self.content + + return dict( + (attr, getattr(self, attr, None)) + for attr in self.__attrs__ + ) + + def __setstate__(self, state): + for name, value in state.items(): + setattr(self, name, value) + + # pickled objects do not have .raw + setattr(self, '_content_consumed', True) + setattr(self, 'raw', None) + + def __repr__(self): + return '<Response [%s]>' % (self.status_code) + + def __bool__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __nonzero__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __iter__(self): + """Allows you to use a response as an iterator.""" + return self.iter_content(128) + + @property + def ok(self): + """Returns True if :attr:`status_code` is less than 400, False if not. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + try: + self.raise_for_status() + except HTTPError: + return False + return True + + @property + def is_redirect(self): + """True if this Response is a well-formed HTTP redirect that could have + been processed automatically (by :meth:`Session.resolve_redirects`). + """ + return ('location' in self.headers and self.status_code in REDIRECT_STATI) + + @property + def is_permanent_redirect(self): + """True if this Response one of the permanent versions of redirect.""" + return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) + + @property + def next(self): + """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" + return self._next + + @property + def apparent_encoding(self): + """The apparent encoding, provided by the chardet library.""" + return chardet.detect(self.content)['encoding'] + + def iter_content(self, chunk_size=1, decode_unicode=False): + """Iterates over the response data. When stream=True is set on the + request, this avoids reading the content at once into memory for + large responses. The chunk size is the number of bytes it should + read into memory. This is not necessarily the length of each item + returned as decoding can take place. + + chunk_size must be of type int or None. A value of None will + function differently depending on the value of `stream`. + stream=True will read data as it arrives in whatever size the + chunks are received. If stream=False, data is returned as + a single chunk. + + If decode_unicode is True, content will be decoded using the best + available encoding based on the response. + """ + + def generate(): + # Special case for urllib3. + if hasattr(self.raw, 'stream'): + try: + for chunk in self.raw.stream(chunk_size, decode_content=True): + yield chunk + except ProtocolError as e: + raise ChunkedEncodingError(e) + except DecodeError as e: + raise ContentDecodingError(e) + except ReadTimeoutError as e: + raise ConnectionError(e) + else: + # Standard file-like object. + while True: + chunk = self.raw.read(chunk_size) + if not chunk: + break + yield chunk + + self._content_consumed = True + + if self._content_consumed and isinstance(self._content, bool): + raise StreamConsumedError() + elif chunk_size is not None and not isinstance(chunk_size, int): + raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) + # simulate reading small chunks of the content + reused_chunks = iter_slices(self._content, chunk_size) + + stream_chunks = generate() + + chunks = reused_chunks if self._content_consumed else stream_chunks + + if decode_unicode: + chunks = stream_decode_response_unicode(chunks, self) + + return chunks + + def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None): + """Iterates over the response data, one line at a time. When + stream=True is set on the request, this avoids reading the + content at once into memory for large responses. + + .. note:: This method is not reentrant safe. + """ + + pending = None + + for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): + + if pending is not None: + chunk = pending + chunk + + if delimiter: + lines = chunk.split(delimiter) + else: + lines = chunk.splitlines() + + if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: + pending = lines.pop() + else: + pending = None + + for line in lines: + yield line + + if pending is not None: + yield pending + + @property + def content(self): + """Content of the response, in bytes.""" + + if self._content is False: + # Read the contents. + if self._content_consumed: + raise RuntimeError( + 'The content for this response was already consumed') + + if self.status_code == 0 or self.raw is None: + self._content = None + else: + self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b'' + + self._content_consumed = True + # don't need to release the connection; that's been handled by urllib3 + # since we exhausted the data. + return self._content + + @property + def text(self): + """Content of the response, in unicode. + + If Response.encoding is None, encoding will be guessed using + ``chardet``. + + The encoding of the response content is determined based solely on HTTP + headers, following RFC 2616 to the letter. If you can take advantage of + non-HTTP knowledge to make a better guess at the encoding, you should + set ``r.encoding`` appropriately before accessing this property. + """ + + # Try charset from content-type + content = None + encoding = self.encoding + + if not self.content: + return str('') + + # Fallback to auto-detected encoding. + if self.encoding is None: + encoding = self.apparent_encoding + + # Decode unicode from given encoding. + try: + content = str(self.content, encoding, errors='replace') + except (LookupError, TypeError): + # A LookupError is raised if the encoding was not found which could + # indicate a misspelling or similar mistake. + # + # A TypeError can be raised if encoding is None + # + # So we try blindly encoding. + content = str(self.content, errors='replace') + + return content + + def json(self, **kwargs): + r"""Returns the json-encoded content of a response, if any. + + :param \*\*kwargs: Optional arguments that ``json.loads`` takes. + :raises ValueError: If the response body does not contain valid json. + """ + + if not self.encoding and self.content and len(self.content) > 3: + # No encoding set. JSON RFC 4627 section 3 states we should expect + # UTF-8, -16 or -32. Detect which one to use; If the detection or + # decoding fails, fall back to `self.text` (using chardet to make + # a best guess). + encoding = guess_json_utf(self.content) + if encoding is not None: + try: + return complexjson.loads( + self.content.decode(encoding), **kwargs + ) + except UnicodeDecodeError: + # Wrong UTF codec detected; usually because it's not UTF-8 + # but some other 8-bit codec. This is an RFC violation, + # and the server didn't bother to tell us what codec *was* + # used. + pass + return complexjson.loads(self.text, **kwargs) + + @property + def links(self): + """Returns the parsed header links of the response, if any.""" + + header = self.headers.get('link') + + # l = MultiDict() + l = {} + + if header: + links = parse_header_links(header) + + for link in links: + key = link.get('rel') or link.get('url') + l[key] = link + + return l + + def raise_for_status(self): + """Raises stored :class:`HTTPError`, if one occurred.""" + + http_error_msg = '' + if isinstance(self.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. (See PR #3538) + try: + reason = self.reason.decode('utf-8') + except UnicodeDecodeError: + reason = self.reason.decode('iso-8859-1') + else: + reason = self.reason + + if 400 <= self.status_code < 500: + http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) + + elif 500 <= self.status_code < 600: + http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) + + if http_error_msg: + raise HTTPError(http_error_msg, response=self) + + def close(self): + """Releases the connection back to the pool. Once this method has been + called the underlying ``raw`` object must not be accessed again. + + *Note: Should not normally need to be called explicitly.* + """ + if not self._content_consumed: + self.raw.close() + + release_conn = getattr(self.raw, 'release_conn', None) + if release_conn is not None: + release_conn() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/requests/packages.py b/env/lib/python3.6/site-packages/pip/_vendor/requests/packages.py new file mode 100644 index 0000000..9582fa7 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/requests/packages.py @@ -0,0 +1,16 @@ +import sys + +# This code exists for backwards compatibility reasons. +# I don't like it either. Just look the other way. :) + +for package in ('urllib3', 'idna', 'chardet'): + vendored_package = "pip._vendor." + package + locals()[package] = __import__(vendored_package) + # This traversal is apparently necessary such that the identities are + # preserved (requests.packages.urllib3.* is urllib3.*) + for mod in list(sys.modules): + if mod == vendored_package or mod.startswith(vendored_package + '.'): + unprefixed_mod = mod[len("pip._vendor."):] + sys.modules['pip._vendor.requests.packages.' + unprefixed_mod] = sys.modules[mod] + +# Kinda cool, though, right? diff --git a/env/lib/python3.6/site-packages/pip/_vendor/requests/sessions.py b/env/lib/python3.6/site-packages/pip/_vendor/requests/sessions.py new file mode 100644 index 0000000..ba13526 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/requests/sessions.py @@ -0,0 +1,741 @@ +# -*- coding: utf-8 -*- + +""" +requests.session +~~~~~~~~~~~~~~~~ + +This module provides a Session object to manage and persist settings across +requests (cookies, auth, proxies). +""" +import os +import sys +import time +from datetime import timedelta + +from .auth import _basic_auth_str +from .compat import cookielib, is_py3, OrderedDict, urljoin, urlparse, Mapping +from .cookies import ( + cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) +from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT +from .hooks import default_hooks, dispatch_hook +from ._internal_utils import to_native_string +from .utils import to_key_val_list, default_headers +from .exceptions import ( + TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) + +from .structures import CaseInsensitiveDict +from .adapters import HTTPAdapter + +from .utils import ( + requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies, + get_auth_from_url, rewind_body +) + +from .status_codes import codes + +# formerly defined here, reexposed here for backward compatibility +from .models import REDIRECT_STATI + +# Preferred clock, based on which one is more accurate on a given system. +if sys.platform == 'win32': + try: # Python 3.4+ + preferred_clock = time.perf_counter + except AttributeError: # Earlier than Python 3. + preferred_clock = time.clock +else: + preferred_clock = time.time + + +def merge_setting(request_setting, session_setting, dict_class=OrderedDict): + """Determines appropriate setting for a given request, taking into account + the explicit setting on that request, and the setting in the session. If a + setting is a dictionary, they will be merged together using `dict_class` + """ + + if session_setting is None: + return request_setting + + if request_setting is None: + return session_setting + + # Bypass if not a dictionary (e.g. verify) + if not ( + isinstance(session_setting, Mapping) and + isinstance(request_setting, Mapping) + ): + return request_setting + + merged_setting = dict_class(to_key_val_list(session_setting)) + merged_setting.update(to_key_val_list(request_setting)) + + # Remove keys that are set to None. Extract keys first to avoid altering + # the dictionary during iteration. + none_keys = [k for (k, v) in merged_setting.items() if v is None] + for key in none_keys: + del merged_setting[key] + + return merged_setting + + +def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): + """Properly merges both requests and session hooks. + + This is necessary because when request_hooks == {'response': []}, the + merge breaks Session hooks entirely. + """ + if session_hooks is None or session_hooks.get('response') == []: + return request_hooks + + if request_hooks is None or request_hooks.get('response') == []: + return session_hooks + + return merge_setting(request_hooks, session_hooks, dict_class) + + +class SessionRedirectMixin(object): + + def get_redirect_target(self, resp): + """Receives a Response. Returns a redirect URI or ``None``""" + # Due to the nature of how requests processes redirects this method will + # be called at least once upon the original response and at least twice + # on each subsequent redirect response (if any). + # If a custom mixin is used to handle this logic, it may be advantageous + # to cache the redirect location onto the response object as a private + # attribute. + if resp.is_redirect: + location = resp.headers['location'] + # Currently the underlying http module on py3 decode headers + # in latin1, but empirical evidence suggests that latin1 is very + # rarely used with non-ASCII characters in HTTP headers. + # It is more likely to get UTF8 header rather than latin1. + # This causes incorrect handling of UTF8 encoded location headers. + # To solve this, we re-encode the location in latin1. + if is_py3: + location = location.encode('latin1') + return to_native_string(location, 'utf8') + return None + + def resolve_redirects(self, resp, req, stream=False, timeout=None, + verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs): + """Receives a Response. Returns a generator of Responses or Requests.""" + + hist = [] # keep track of history + + url = self.get_redirect_target(resp) + previous_fragment = urlparse(req.url).fragment + while url: + prepared_request = req.copy() + + # Update history and keep track of redirects. + # resp.history must ignore the original request in this loop + hist.append(resp) + resp.history = hist[1:] + + try: + resp.content # Consume socket so it can be released + except (ChunkedEncodingError, ContentDecodingError, RuntimeError): + resp.raw.read(decode_content=False) + + if len(resp.history) >= self.max_redirects: + raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects, response=resp) + + # Release the connection back into the pool. + resp.close() + + # Handle redirection without scheme (see: RFC 1808 Section 4) + if url.startswith('//'): + parsed_rurl = urlparse(resp.url) + url = '%s:%s' % (to_native_string(parsed_rurl.scheme), url) + + # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) + parsed = urlparse(url) + if parsed.fragment == '' and previous_fragment: + parsed = parsed._replace(fragment=previous_fragment) + elif parsed.fragment: + previous_fragment = parsed.fragment + url = parsed.geturl() + + # Facilitate relative 'location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + # Compliant with RFC3986, we percent encode the url. + if not parsed.netloc: + url = urljoin(resp.url, requote_uri(url)) + else: + url = requote_uri(url) + + prepared_request.url = to_native_string(url) + + self.rebuild_method(prepared_request, resp) + + # https://github.com/requests/requests/issues/1084 + if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): + # https://github.com/requests/requests/issues/3490 + purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') + for header in purged_headers: + prepared_request.headers.pop(header, None) + prepared_request.body = None + + headers = prepared_request.headers + try: + del headers['Cookie'] + except KeyError: + pass + + # Extract any cookies sent on the response to the cookiejar + # in the new request. Because we've mutated our copied prepared + # request, use the old one that we haven't yet touched. + extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) + merge_cookies(prepared_request._cookies, self.cookies) + prepared_request.prepare_cookies(prepared_request._cookies) + + # Rebuild auth and proxy information. + proxies = self.rebuild_proxies(prepared_request, proxies) + self.rebuild_auth(prepared_request, resp) + + # A failed tell() sets `_body_position` to `object()`. This non-None + # value ensures `rewindable` will be True, allowing us to raise an + # UnrewindableBodyError, instead of hanging the connection. + rewindable = ( + prepared_request._body_position is not None and + ('Content-Length' in headers or 'Transfer-Encoding' in headers) + ) + + # Attempt to rewind consumed file-like object. + if rewindable: + rewind_body(prepared_request) + + # Override the original request. + req = prepared_request + + if yield_requests: + yield req + else: + + resp = self.send( + req, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + allow_redirects=False, + **adapter_kwargs + ) + + extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) + + # extract redirect url, if any, for the next loop + url = self.get_redirect_target(resp) + yield resp + + def rebuild_auth(self, prepared_request, response): + """When being redirected we may want to strip authentication from the + request to avoid leaking credentials. This method intelligently removes + and reapplies authentication where possible to avoid credential loss. + """ + headers = prepared_request.headers + url = prepared_request.url + + if 'Authorization' in headers: + # If we get redirected to a new host, we should strip out any + # authentication headers. + original_parsed = urlparse(response.request.url) + redirect_parsed = urlparse(url) + + if (original_parsed.hostname != redirect_parsed.hostname): + del headers['Authorization'] + + # .netrc might have more auth for us on our new host. + new_auth = get_netrc_auth(url) if self.trust_env else None + if new_auth is not None: + prepared_request.prepare_auth(new_auth) + + return + + def rebuild_proxies(self, prepared_request, proxies): + """This method re-evaluates the proxy configuration by considering the + environment variables. If we are redirected to a URL covered by + NO_PROXY, we strip the proxy configuration. Otherwise, we set missing + proxy keys for this URL (in case they were stripped by a previous + redirect). + + This method also replaces the Proxy-Authorization header where + necessary. + + :rtype: dict + """ + proxies = proxies if proxies is not None else {} + headers = prepared_request.headers + url = prepared_request.url + scheme = urlparse(url).scheme + new_proxies = proxies.copy() + no_proxy = proxies.get('no_proxy') + + bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy) + if self.trust_env and not bypass_proxy: + environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) + + proxy = environ_proxies.get(scheme, environ_proxies.get('all')) + + if proxy: + new_proxies.setdefault(scheme, proxy) + + if 'Proxy-Authorization' in headers: + del headers['Proxy-Authorization'] + + try: + username, password = get_auth_from_url(new_proxies[scheme]) + except KeyError: + username, password = None, None + + if username and password: + headers['Proxy-Authorization'] = _basic_auth_str(username, password) + + return new_proxies + + def rebuild_method(self, prepared_request, response): + """When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = prepared_request.method + + # http://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.see_other and method != 'HEAD': + method = 'GET' + + # Do what the browsers do, despite standards... + # First, turn 302s into GETs. + if response.status_code == codes.found and method != 'HEAD': + method = 'GET' + + # Second, if a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in Issue 1704. + if response.status_code == codes.moved and method == 'POST': + method = 'GET' + + prepared_request.method = method + + +class Session(SessionRedirectMixin): + """A Requests session. + + Provides cookie persistence, connection-pooling, and configuration. + + Basic Usage:: + + >>> import requests + >>> s = requests.Session() + >>> s.get('http://httpbin.org/get') + <Response [200]> + + Or as a context manager:: + + >>> with requests.Session() as s: + >>> s.get('http://httpbin.org/get') + <Response [200]> + """ + + __attrs__ = [ + 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', + 'cert', 'prefetch', 'adapters', 'stream', 'trust_env', + 'max_redirects', + ] + + def __init__(self): + + #: A case-insensitive dictionary of headers to be sent on each + #: :class:`Request <Request>` sent from this + #: :class:`Session <Session>`. + self.headers = default_headers() + + #: Default Authentication tuple or object to attach to + #: :class:`Request <Request>`. + self.auth = None + + #: Dictionary mapping protocol or protocol and host to the URL of the proxy + #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to + #: be used on each :class:`Request <Request>`. + self.proxies = {} + + #: Event-handling hooks. + self.hooks = default_hooks() + + #: Dictionary of querystring data to attach to each + #: :class:`Request <Request>`. The dictionary values may be lists for + #: representing multivalued query parameters. + self.params = {} + + #: Stream response content default. + self.stream = False + + #: SSL Verification default. + self.verify = True + + #: SSL client certificate default, if String, path to ssl client + #: cert file (.pem). If Tuple, ('cert', 'key') pair. + self.cert = None + + #: Maximum number of redirects allowed. If the request exceeds this + #: limit, a :class:`TooManyRedirects` exception is raised. + #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is + #: 30. + self.max_redirects = DEFAULT_REDIRECT_LIMIT + + #: Trust environment settings for proxy configuration, default + #: authentication and similar. + self.trust_env = True + + #: A CookieJar containing all currently outstanding cookies set on this + #: session. By default it is a + #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but + #: may be any other ``cookielib.CookieJar`` compatible object. + self.cookies = cookiejar_from_dict({}) + + # Default connection adapters. + self.adapters = OrderedDict() + self.mount('https://', HTTPAdapter()) + self.mount('http://', HTTPAdapter()) + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def prepare_request(self, request): + """Constructs a :class:`PreparedRequest <PreparedRequest>` for + transmission and returns it. The :class:`PreparedRequest` has settings + merged from the :class:`Request <Request>` instance and those of the + :class:`Session`. + + :param request: :class:`Request` instance to prepare with this + session's settings. + :rtype: requests.PreparedRequest + """ + cookies = request.cookies or {} + + # Bootstrap CookieJar. + if not isinstance(cookies, cookielib.CookieJar): + cookies = cookiejar_from_dict(cookies) + + # Merge with session cookies + merged_cookies = merge_cookies( + merge_cookies(RequestsCookieJar(), self.cookies), cookies) + + # Set environment's basic authentication if not explicitly set. + auth = request.auth + if self.trust_env and not auth and not self.auth: + auth = get_netrc_auth(request.url) + + p = PreparedRequest() + p.prepare( + method=request.method.upper(), + url=request.url, + files=request.files, + data=request.data, + json=request.json, + headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), + params=merge_setting(request.params, self.params), + auth=merge_setting(auth, self.auth), + cookies=merged_cookies, + hooks=merge_hooks(request.hooks, self.hooks), + ) + return p + + def request(self, method, url, + params=None, data=None, headers=None, cookies=None, files=None, + auth=None, timeout=None, allow_redirects=True, proxies=None, + hooks=None, stream=None, verify=None, cert=None, json=None): + """Constructs a :class:`Request <Request>`, prepares it and sends it. + Returns :class:`Response <Response>` object. + + :param method: method for the new :class:`Request` object. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query + string for the :class:`Request`. + :param data: (optional) Dictionary, bytes, or file-like object to send + in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the + :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the + :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the + :class:`Request`. + :param files: (optional) Dictionary of ``'filename': file-like-objects`` + for multipart encoding upload. + :param auth: (optional) Auth tuple or callable to enable + Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) <timeouts>` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Set to True by default. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol or protocol and + hostname to the URL of the proxy. + :param stream: (optional) whether to immediately download the response + content. Defaults to ``False``. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. + :param cert: (optional) if String, path to ssl client cert file (.pem). + If Tuple, ('cert', 'key') pair. + :rtype: requests.Response + """ + # Create the Request. + req = Request( + method=method.upper(), + url=url, + headers=headers, + files=files, + data=data or {}, + json=json, + params=params or {}, + auth=auth, + cookies=cookies, + hooks=hooks, + ) + prep = self.prepare_request(req) + + proxies = proxies or {} + + settings = self.merge_environment_settings( + prep.url, proxies, stream, verify, cert + ) + + # Send the request. + send_kwargs = { + 'timeout': timeout, + 'allow_redirects': allow_redirects, + } + send_kwargs.update(settings) + resp = self.send(prep, **send_kwargs) + + return resp + + def get(self, url, **kwargs): + r"""Sends a GET request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', True) + return self.request('GET', url, **kwargs) + + def options(self, url, **kwargs): + r"""Sends a OPTIONS request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', True) + return self.request('OPTIONS', url, **kwargs) + + def head(self, url, **kwargs): + r"""Sends a HEAD request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', False) + return self.request('HEAD', url, **kwargs) + + def post(self, url, data=None, json=None, **kwargs): + r"""Sends a POST request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request('POST', url, data=data, json=json, **kwargs) + + def put(self, url, data=None, **kwargs): + r"""Sends a PUT request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request('PUT', url, data=data, **kwargs) + + def patch(self, url, data=None, **kwargs): + r"""Sends a PATCH request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request('PATCH', url, data=data, **kwargs) + + def delete(self, url, **kwargs): + r"""Sends a DELETE request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request('DELETE', url, **kwargs) + + def send(self, request, **kwargs): + """Send a given PreparedRequest. + + :rtype: requests.Response + """ + # Set defaults that the hooks can utilize to ensure they always have + # the correct parameters to reproduce the previous request. + kwargs.setdefault('stream', self.stream) + kwargs.setdefault('verify', self.verify) + kwargs.setdefault('cert', self.cert) + kwargs.setdefault('proxies', self.proxies) + + # It's possible that users might accidentally send a Request object. + # Guard against that specific failure case. + if isinstance(request, Request): + raise ValueError('You can only send PreparedRequests.') + + # Set up variables needed for resolve_redirects and dispatching of hooks + allow_redirects = kwargs.pop('allow_redirects', True) + stream = kwargs.get('stream') + hooks = request.hooks + + # Get the appropriate adapter to use + adapter = self.get_adapter(url=request.url) + + # Start time (approximately) of the request + start = preferred_clock() + + # Send the request + r = adapter.send(request, **kwargs) + + # Total elapsed time of the request (approximately) + elapsed = preferred_clock() - start + r.elapsed = timedelta(seconds=elapsed) + + # Response manipulation hooks + r = dispatch_hook('response', hooks, r, **kwargs) + + # Persist cookies + if r.history: + + # If the hooks create history then we want those cookies too + for resp in r.history: + extract_cookies_to_jar(self.cookies, resp.request, resp.raw) + + extract_cookies_to_jar(self.cookies, request, r.raw) + + # Redirect resolving generator. + gen = self.resolve_redirects(r, request, **kwargs) + + # Resolve redirects if allowed. + history = [resp for resp in gen] if allow_redirects else [] + + # Shuffle things around if there's history. + if history: + # Insert the first (original) request at the start + history.insert(0, r) + # Get the last request made + r = history.pop() + r.history = history + + # If redirects aren't being followed, store the response on the Request for Response.next(). + if not allow_redirects: + try: + r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs)) + except StopIteration: + pass + + if not stream: + r.content + + return r + + def merge_environment_settings(self, url, proxies, stream, verify, cert): + """ + Check the environment and merge it with some settings. + + :rtype: dict + """ + # Gather clues from the surrounding environment. + if self.trust_env: + # Set environment's proxies. + no_proxy = proxies.get('no_proxy') if proxies is not None else None + env_proxies = get_environ_proxies(url, no_proxy=no_proxy) + for (k, v) in env_proxies.items(): + proxies.setdefault(k, v) + + # Look for requests environment configuration and be compatible + # with cURL. + if verify is True or verify is None: + verify = (os.environ.get('REQUESTS_CA_BUNDLE') or + os.environ.get('CURL_CA_BUNDLE')) + + # Merge all the kwargs. + proxies = merge_setting(proxies, self.proxies) + stream = merge_setting(stream, self.stream) + verify = merge_setting(verify, self.verify) + cert = merge_setting(cert, self.cert) + + return {'verify': verify, 'proxies': proxies, 'stream': stream, + 'cert': cert} + + def get_adapter(self, url): + """ + Returns the appropriate connection adapter for the given URL. + + :rtype: requests.adapters.BaseAdapter + """ + for (prefix, adapter) in self.adapters.items(): + + if url.lower().startswith(prefix.lower()): + return adapter + + # Nothing matches :-/ + raise InvalidSchema("No connection adapters were found for '%s'" % url) + + def close(self): + """Closes all adapters and as such the session""" + for v in self.adapters.values(): + v.close() + + def mount(self, prefix, adapter): + """Registers a connection adapter to a prefix. + + Adapters are sorted in descending order by prefix length. + """ + self.adapters[prefix] = adapter + keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] + + for key in keys_to_move: + self.adapters[key] = self.adapters.pop(key) + + def __getstate__(self): + state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__) + return state + + def __setstate__(self, state): + for attr, value in state.items(): + setattr(self, attr, value) + + +def session(): + """ + Returns a :class:`Session` for context-management. + + :rtype: Session + """ + + return Session() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/requests/status_codes.py b/env/lib/python3.6/site-packages/pip/_vendor/requests/status_codes.py new file mode 100644 index 0000000..ff462c6 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/requests/status_codes.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- + +""" +The ``codes`` object defines a mapping from common names for HTTP statuses +to their numerical codes, accessible either as attributes or as dictionary +items. + +>>> requests.codes['temporary_redirect'] +307 +>>> requests.codes.teapot +418 +>>> requests.codes['\o/'] +200 + +Some codes have multiple names, and both upper- and lower-case versions of +the names are allowed. For example, ``codes.ok``, ``codes.OK``, and +``codes.okay`` all correspond to the HTTP status code 200. +""" + +from .structures import LookupDict + +_codes = { + + # Informational. + 100: ('continue',), + 101: ('switching_protocols',), + 102: ('processing',), + 103: ('checkpoint',), + 122: ('uri_too_long', 'request_uri_too_long'), + 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), + 201: ('created',), + 202: ('accepted',), + 203: ('non_authoritative_info', 'non_authoritative_information'), + 204: ('no_content',), + 205: ('reset_content', 'reset'), + 206: ('partial_content', 'partial'), + 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), + 208: ('already_reported',), + 226: ('im_used',), + + # Redirection. + 300: ('multiple_choices',), + 301: ('moved_permanently', 'moved', '\\o-'), + 302: ('found',), + 303: ('see_other', 'other'), + 304: ('not_modified',), + 305: ('use_proxy',), + 306: ('switch_proxy',), + 307: ('temporary_redirect', 'temporary_moved', 'temporary'), + 308: ('permanent_redirect', + 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 + + # Client Error. + 400: ('bad_request', 'bad'), + 401: ('unauthorized',), + 402: ('payment_required', 'payment'), + 403: ('forbidden',), + 404: ('not_found', '-o-'), + 405: ('method_not_allowed', 'not_allowed'), + 406: ('not_acceptable',), + 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), + 408: ('request_timeout', 'timeout'), + 409: ('conflict',), + 410: ('gone',), + 411: ('length_required',), + 412: ('precondition_failed', 'precondition'), + 413: ('request_entity_too_large',), + 414: ('request_uri_too_large',), + 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), + 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), + 417: ('expectation_failed',), + 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), + 421: ('misdirected_request',), + 422: ('unprocessable_entity', 'unprocessable'), + 423: ('locked',), + 424: ('failed_dependency', 'dependency'), + 425: ('unordered_collection', 'unordered'), + 426: ('upgrade_required', 'upgrade'), + 428: ('precondition_required', 'precondition'), + 429: ('too_many_requests', 'too_many'), + 431: ('header_fields_too_large', 'fields_too_large'), + 444: ('no_response', 'none'), + 449: ('retry_with', 'retry'), + 450: ('blocked_by_windows_parental_controls', 'parental_controls'), + 451: ('unavailable_for_legal_reasons', 'legal_reasons'), + 499: ('client_closed_request',), + + # Server Error. + 500: ('internal_server_error', 'server_error', '/o\\', '✗'), + 501: ('not_implemented',), + 502: ('bad_gateway',), + 503: ('service_unavailable', 'unavailable'), + 504: ('gateway_timeout',), + 505: ('http_version_not_supported', 'http_version'), + 506: ('variant_also_negotiates',), + 507: ('insufficient_storage',), + 509: ('bandwidth_limit_exceeded', 'bandwidth'), + 510: ('not_extended',), + 511: ('network_authentication_required', 'network_auth', 'network_authentication'), +} + +codes = LookupDict(name='status_codes') + +def _init(): + for code, titles in _codes.items(): + for title in titles: + setattr(codes, title, code) + if not title.startswith(('\\', '/')): + setattr(codes, title.upper(), code) + + def doc(code): + names = ', '.join('``%s``' % n for n in _codes[code]) + return '* %d: %s' % (code, names) + + global __doc__ + __doc__ = (__doc__ + '\n' + + '\n'.join(doc(code) for code in sorted(_codes)) + if __doc__ is not None else None) + +_init() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/requests/structures.py b/env/lib/python3.6/site-packages/pip/_vendor/requests/structures.py new file mode 100644 index 0000000..da930e2 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/requests/structures.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- + +""" +requests.structures +~~~~~~~~~~~~~~~~~~~ + +Data structures that power Requests. +""" + +from .compat import OrderedDict, Mapping, MutableMapping + + +class CaseInsensitiveDict(MutableMapping): + """A case-insensitive ``dict``-like object. + + Implements all methods and operations of + ``MutableMapping`` as well as dict's ``copy``. Also + provides ``lower_items``. + + All keys are expected to be strings. The structure remembers the + case of the last key to be set, and ``iter(instance)``, + ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` + will contain case-sensitive keys. However, querying and contains + testing is case insensitive:: + + cid = CaseInsensitiveDict() + cid['Accept'] = 'application/json' + cid['aCCEPT'] == 'application/json' # True + list(cid) == ['Accept'] # True + + For example, ``headers['content-encoding']`` will return the + value of a ``'Content-Encoding'`` response header, regardless + of how the header name was originally stored. + + If the constructor, ``.update``, or equality comparison + operations are given keys that have equal ``.lower()``s, the + behavior is undefined. + """ + + def __init__(self, data=None, **kwargs): + self._store = OrderedDict() + if data is None: + data = {} + self.update(data, **kwargs) + + def __setitem__(self, key, value): + # Use the lowercased key for lookups, but store the actual + # key alongside the value. + self._store[key.lower()] = (key, value) + + def __getitem__(self, key): + return self._store[key.lower()][1] + + def __delitem__(self, key): + del self._store[key.lower()] + + def __iter__(self): + return (casedkey for casedkey, mappedvalue in self._store.values()) + + def __len__(self): + return len(self._store) + + def lower_items(self): + """Like iteritems(), but with all lowercase keys.""" + return ( + (lowerkey, keyval[1]) + for (lowerkey, keyval) + in self._store.items() + ) + + def __eq__(self, other): + if isinstance(other, Mapping): + other = CaseInsensitiveDict(other) + else: + return NotImplemented + # Compare insensitively + return dict(self.lower_items()) == dict(other.lower_items()) + + # Copy is required + def copy(self): + return CaseInsensitiveDict(self._store.values()) + + def __repr__(self): + return str(dict(self.items())) + + +class LookupDict(dict): + """Dictionary lookup object.""" + + def __init__(self, name=None): + self.name = name + super(LookupDict, self).__init__() + + def __repr__(self): + return '<lookup \'%s\'>' % (self.name) + + def __getitem__(self, key): + # We allow fall-through here, so values default to None + + return self.__dict__.get(key, None) + + def get(self, key, default=None): + return self.__dict__.get(key, default) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/requests/utils.py b/env/lib/python3.6/site-packages/pip/_vendor/requests/utils.py new file mode 100644 index 0000000..431f6be --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/requests/utils.py @@ -0,0 +1,976 @@ +# -*- coding: utf-8 -*- + +""" +requests.utils +~~~~~~~~~~~~~~ + +This module provides utility functions that are used within Requests +that are also useful for external consumption. +""" + +import codecs +import contextlib +import io +import os +import re +import socket +import struct +import sys +import tempfile +import warnings +import zipfile + +from .__version__ import __version__ +from . import certs +# to_native_string is unused here, but imported here for backwards compatibility +from ._internal_utils import to_native_string +from .compat import parse_http_list as _parse_list_header +from .compat import ( + quote, urlparse, bytes, str, OrderedDict, unquote, getproxies, + proxy_bypass, urlunparse, basestring, integer_types, is_py3, + proxy_bypass_environment, getproxies_environment, Mapping) +from .cookies import cookiejar_from_dict +from .structures import CaseInsensitiveDict +from .exceptions import ( + InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError) + +NETRC_FILES = ('.netrc', '_netrc') + +DEFAULT_CA_BUNDLE_PATH = certs.where() + + +if sys.platform == 'win32': + # provide a proxy_bypass version on Windows without DNS lookups + + def proxy_bypass_registry(host): + try: + if is_py3: + import winreg + else: + import _winreg as winreg + except ImportError: + return False + + try: + internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, + r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') + # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it + proxyEnable = int(winreg.QueryValueEx(internetSettings, + 'ProxyEnable')[0]) + # ProxyOverride is almost always a string + proxyOverride = winreg.QueryValueEx(internetSettings, + 'ProxyOverride')[0] + except OSError: + return False + if not proxyEnable or not proxyOverride: + return False + + # make a check value list from the registry entry: replace the + # '<local>' string by the localhost entry and the corresponding + # canonical entry. + proxyOverride = proxyOverride.split(';') + # now check if we match one of the registry values. + for test in proxyOverride: + if test == '<local>': + if '.' not in host: + return True + test = test.replace(".", r"\.") # mask dots + test = test.replace("*", r".*") # change glob sequence + test = test.replace("?", r".") # change glob char + if re.match(test, host, re.I): + return True + return False + + def proxy_bypass(host): # noqa + """Return True, if the host should be bypassed. + + Checks proxy settings gathered from the environment, if specified, + or the registry. + """ + if getproxies_environment(): + return proxy_bypass_environment(host) + else: + return proxy_bypass_registry(host) + + +def dict_to_sequence(d): + """Returns an internal sequence dictionary update.""" + + if hasattr(d, 'items'): + d = d.items() + + return d + + +def super_len(o): + total_length = None + current_position = 0 + + if hasattr(o, '__len__'): + total_length = len(o) + + elif hasattr(o, 'len'): + total_length = o.len + + elif hasattr(o, 'fileno'): + try: + fileno = o.fileno() + except io.UnsupportedOperation: + pass + else: + total_length = os.fstat(fileno).st_size + + # Having used fstat to determine the file length, we need to + # confirm that this file was opened up in binary mode. + if 'b' not in o.mode: + warnings.warn(( + "Requests has determined the content-length for this " + "request using the binary size of the file: however, the " + "file has been opened in text mode (i.e. without the 'b' " + "flag in the mode). This may lead to an incorrect " + "content-length. In Requests 3.0, support will be removed " + "for files in text mode."), + FileModeWarning + ) + + if hasattr(o, 'tell'): + try: + current_position = o.tell() + except (OSError, IOError): + # This can happen in some weird situations, such as when the file + # is actually a special file descriptor like stdin. In this + # instance, we don't know what the length is, so set it to zero and + # let requests chunk it instead. + if total_length is not None: + current_position = total_length + else: + if hasattr(o, 'seek') and total_length is None: + # StringIO and BytesIO have seek but no useable fileno + try: + # seek to end of file + o.seek(0, 2) + total_length = o.tell() + + # seek back to current position to support + # partially read file-like objects + o.seek(current_position or 0) + except (OSError, IOError): + total_length = 0 + + if total_length is None: + total_length = 0 + + return max(0, total_length - current_position) + + +def get_netrc_auth(url, raise_errors=False): + """Returns the Requests tuple auth for a given url from netrc.""" + + try: + from netrc import netrc, NetrcParseError + + netrc_path = None + + for f in NETRC_FILES: + try: + loc = os.path.expanduser('~/{0}'.format(f)) + except KeyError: + # os.path.expanduser can fail when $HOME is undefined and + # getpwuid fails. See http://bugs.python.org/issue20164 & + # https://github.com/requests/requests/issues/1846 + return + + if os.path.exists(loc): + netrc_path = loc + break + + # Abort early if there isn't one. + if netrc_path is None: + return + + ri = urlparse(url) + + # Strip port numbers from netloc. This weird `if...encode`` dance is + # used for Python 3.2, which doesn't support unicode literals. + splitstr = b':' + if isinstance(url, str): + splitstr = splitstr.decode('ascii') + host = ri.netloc.split(splitstr)[0] + + try: + _netrc = netrc(netrc_path).authenticators(host) + if _netrc: + # Return with login / password + login_i = (0 if _netrc[0] else 1) + return (_netrc[login_i], _netrc[2]) + except (NetrcParseError, IOError): + # If there was a parsing error or a permissions issue reading the file, + # we'll just skip netrc auth unless explicitly asked to raise errors. + if raise_errors: + raise + + # AppEngine hackiness. + except (ImportError, AttributeError): + pass + + +def guess_filename(obj): + """Tries to guess the filename of the given object.""" + name = getattr(obj, 'name', None) + if (name and isinstance(name, basestring) and name[0] != '<' and + name[-1] != '>'): + return os.path.basename(name) + + +def extract_zipped_paths(path): + """Replace nonexistent paths that look like they refer to a member of a zip + archive with the location of an extracted copy of the target, or else + just return the provided path unchanged. + """ + if os.path.exists(path): + # this is already a valid path, no need to do anything further + return path + + # find the first valid part of the provided path and treat that as a zip archive + # assume the rest of the path is the name of a member in the archive + archive, member = os.path.split(path) + while archive and not os.path.exists(archive): + archive, prefix = os.path.split(archive) + member = '/'.join([prefix, member]) + + if not zipfile.is_zipfile(archive): + return path + + zip_file = zipfile.ZipFile(archive) + if member not in zip_file.namelist(): + return path + + # we have a valid zip archive and a valid member of that archive + tmp = tempfile.gettempdir() + extracted_path = os.path.join(tmp, *member.split('/')) + if not os.path.exists(extracted_path): + extracted_path = zip_file.extract(member, path=tmp) + + return extracted_path + + +def from_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. Unless it can not be represented as such, return an + OrderedDict, e.g., + + :: + + >>> from_key_val_list([('key', 'val')]) + OrderedDict([('key', 'val')]) + >>> from_key_val_list('string') + ValueError: need more than 1 value to unpack + >>> from_key_val_list({'key': 'val'}) + OrderedDict([('key', 'val')]) + + :rtype: OrderedDict + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError('cannot encode objects that are not 2-tuples') + + return OrderedDict(value) + + +def to_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. If it can be, return a list of tuples, e.g., + + :: + + >>> to_key_val_list([('key', 'val')]) + [('key', 'val')] + >>> to_key_val_list({'key': 'val'}) + [('key', 'val')] + >>> to_key_val_list('string') + ValueError: cannot encode objects that are not 2-tuples. + + :rtype: list + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError('cannot encode objects that are not 2-tuples') + + if isinstance(value, Mapping): + value = value.items() + + return list(value) + + +# From mitsuhiko/werkzeug (used with permission). +def parse_list_header(value): + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Quotes are removed automatically after parsing. + + It basically works like :func:`parse_set_header` just that items + may appear multiple times and case sensitivity is preserved. + + The return value is a standard :class:`list`: + + >>> parse_list_header('token, "quoted value"') + ['token', 'quoted value'] + + To create a header from the :class:`list` again, use the + :func:`dump_header` function. + + :param value: a string with a list header. + :return: :class:`list` + :rtype: list + """ + result = [] + for item in _parse_list_header(value): + if item[:1] == item[-1:] == '"': + item = unquote_header_value(item[1:-1]) + result.append(item) + return result + + +# From mitsuhiko/werkzeug (used with permission). +def parse_dict_header(value): + """Parse lists of key, value pairs as described by RFC 2068 Section 2 and + convert them into a python dict: + + >>> d = parse_dict_header('foo="is a fish", bar="as well"') + >>> type(d) is dict + True + >>> sorted(d.items()) + [('bar', 'as well'), ('foo', 'is a fish')] + + If there is no value for a key it will be `None`: + + >>> parse_dict_header('key_without_value') + {'key_without_value': None} + + To create a header from the :class:`dict` again, use the + :func:`dump_header` function. + + :param value: a string with a dict header. + :return: :class:`dict` + :rtype: dict + """ + result = {} + for item in _parse_list_header(value): + if '=' not in item: + result[item] = None + continue + name, value = item.split('=', 1) + if value[:1] == value[-1:] == '"': + value = unquote_header_value(value[1:-1]) + result[name] = value + return result + + +# From mitsuhiko/werkzeug (used with permission). +def unquote_header_value(value, is_filename=False): + r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). + This does not use the real unquoting but what browsers are actually + using for quoting. + + :param value: the header value to unquote. + :rtype: str + """ + if value and value[0] == value[-1] == '"': + # this is not the real unquoting, but fixing this so that the + # RFC is met will result in bugs with internet explorer and + # probably some other browsers as well. IE for example is + # uploading files with "C:\foo\bar.txt" as filename + value = value[1:-1] + + # if this is a filename and the starting characters look like + # a UNC path, then just return the value without quotes. Using the + # replace sequence below on a UNC path has the effect of turning + # the leading double slash into a single slash and then + # _fix_ie_filename() doesn't work correctly. See #458. + if not is_filename or value[:2] != '\\\\': + return value.replace('\\\\', '\\').replace('\\"', '"') + return value + + +def dict_from_cookiejar(cj): + """Returns a key/value dictionary from a CookieJar. + + :param cj: CookieJar object to extract cookies from. + :rtype: dict + """ + + cookie_dict = {} + + for cookie in cj: + cookie_dict[cookie.name] = cookie.value + + return cookie_dict + + +def add_dict_to_cookiejar(cj, cookie_dict): + """Returns a CookieJar from a key/value dictionary. + + :param cj: CookieJar to insert cookies into. + :param cookie_dict: Dict of key/values to insert into CookieJar. + :rtype: CookieJar + """ + + return cookiejar_from_dict(cookie_dict, cj) + + +def get_encodings_from_content(content): + """Returns encodings from given content string. + + :param content: bytestring to extract encodings from. + """ + warnings.warn(( + 'In requests 3.0, get_encodings_from_content will be removed. For ' + 'more information, please see the discussion on issue #2266. (This' + ' warning should only appear once.)'), + DeprecationWarning) + + charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I) + pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I) + xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') + + return (charset_re.findall(content) + + pragma_re.findall(content) + + xml_re.findall(content)) + + +def _parse_content_type_header(header): + """Returns content type and parameters from given header + + :param header: string + :return: tuple containing content type and dictionary of + parameters + """ + + tokens = header.split(';') + content_type, params = tokens[0].strip(), tokens[1:] + params_dict = {} + items_to_strip = "\"' " + + for param in params: + param = param.strip() + if param: + key, value = param, True + index_of_equals = param.find("=") + if index_of_equals != -1: + key = param[:index_of_equals].strip(items_to_strip) + value = param[index_of_equals + 1:].strip(items_to_strip) + params_dict[key] = value + return content_type, params_dict + + +def get_encoding_from_headers(headers): + """Returns encodings from given HTTP Header Dict. + + :param headers: dictionary to extract encoding from. + :rtype: str + """ + + content_type = headers.get('content-type') + + if not content_type: + return None + + content_type, params = _parse_content_type_header(content_type) + + if 'charset' in params: + return params['charset'].strip("'\"") + + if 'text' in content_type: + return 'ISO-8859-1' + + +def stream_decode_response_unicode(iterator, r): + """Stream decodes a iterator.""" + + if r.encoding is None: + for item in iterator: + yield item + return + + decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') + for chunk in iterator: + rv = decoder.decode(chunk) + if rv: + yield rv + rv = decoder.decode(b'', final=True) + if rv: + yield rv + + +def iter_slices(string, slice_length): + """Iterate over slices of a string.""" + pos = 0 + if slice_length is None or slice_length <= 0: + slice_length = len(string) + while pos < len(string): + yield string[pos:pos + slice_length] + pos += slice_length + + +def get_unicode_from_response(r): + """Returns the requested content back in unicode. + + :param r: Response object to get unicode content from. + + Tried: + + 1. charset from content-type + 2. fall back and replace all unicode characters + + :rtype: str + """ + warnings.warn(( + 'In requests 3.0, get_unicode_from_response will be removed. For ' + 'more information, please see the discussion on issue #2266. (This' + ' warning should only appear once.)'), + DeprecationWarning) + + tried_encodings = [] + + # Try charset from content-type + encoding = get_encoding_from_headers(r.headers) + + if encoding: + try: + return str(r.content, encoding) + except UnicodeError: + tried_encodings.append(encoding) + + # Fall back: + try: + return str(r.content, encoding, errors='replace') + except TypeError: + return r.content + + +# The unreserved URI characters (RFC 3986) +UNRESERVED_SET = frozenset( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~") + + +def unquote_unreserved(uri): + """Un-escape any percent-escape sequences in a URI that are unreserved + characters. This leaves all reserved, illegal and non-ASCII bytes encoded. + + :rtype: str + """ + parts = uri.split('%') + for i in range(1, len(parts)): + h = parts[i][0:2] + if len(h) == 2 and h.isalnum(): + try: + c = chr(int(h, 16)) + except ValueError: + raise InvalidURL("Invalid percent-escape sequence: '%s'" % h) + + if c in UNRESERVED_SET: + parts[i] = c + parts[i][2:] + else: + parts[i] = '%' + parts[i] + else: + parts[i] = '%' + parts[i] + return ''.join(parts) + + +def requote_uri(uri): + """Re-quote the given URI. + + This function passes the given URI through an unquote/quote cycle to + ensure that it is fully and consistently quoted. + + :rtype: str + """ + safe_with_percent = "!#$%&'()*+,/:;=?@[]~" + safe_without_percent = "!#$&'()*+,/:;=?@[]~" + try: + # Unquote only the unreserved characters + # Then quote only illegal characters (do not quote reserved, + # unreserved, or '%') + return quote(unquote_unreserved(uri), safe=safe_with_percent) + except InvalidURL: + # We couldn't unquote the given URI, so let's try quoting it, but + # there may be unquoted '%'s in the URI. We need to make sure they're + # properly quoted so they do not cause issues elsewhere. + return quote(uri, safe=safe_without_percent) + + +def address_in_network(ip, net): + """This function allows you to check if an IP belongs to a network subnet + + Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 + returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 + + :rtype: bool + """ + ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0] + netaddr, bits = net.split('/') + netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0] + network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask + return (ipaddr & netmask) == (network & netmask) + + +def dotted_netmask(mask): + """Converts mask from /xx format to xxx.xxx.xxx.xxx + + Example: if mask is 24 function returns 255.255.255.0 + + :rtype: str + """ + bits = 0xffffffff ^ (1 << 32 - mask) - 1 + return socket.inet_ntoa(struct.pack('>I', bits)) + + +def is_ipv4_address(string_ip): + """ + :rtype: bool + """ + try: + socket.inet_aton(string_ip) + except socket.error: + return False + return True + + +def is_valid_cidr(string_network): + """ + Very simple check of the cidr format in no_proxy variable. + + :rtype: bool + """ + if string_network.count('/') == 1: + try: + mask = int(string_network.split('/')[1]) + except ValueError: + return False + + if mask < 1 or mask > 32: + return False + + try: + socket.inet_aton(string_network.split('/')[0]) + except socket.error: + return False + else: + return False + return True + + +@contextlib.contextmanager +def set_environ(env_name, value): + """Set the environment variable 'env_name' to 'value' + + Save previous value, yield, and then restore the previous value stored in + the environment variable 'env_name'. + + If 'value' is None, do nothing""" + value_changed = value is not None + if value_changed: + old_value = os.environ.get(env_name) + os.environ[env_name] = value + try: + yield + finally: + if value_changed: + if old_value is None: + del os.environ[env_name] + else: + os.environ[env_name] = old_value + + +def should_bypass_proxies(url, no_proxy): + """ + Returns whether we should bypass proxies or not. + + :rtype: bool + """ + # Prioritize lowercase environment variables over uppercase + # to keep a consistent behaviour with other http projects (curl, wget). + get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) + + # First check whether no_proxy is defined. If it is, check that the URL + # we're getting isn't in the no_proxy list. + no_proxy_arg = no_proxy + if no_proxy is None: + no_proxy = get_proxy('no_proxy') + parsed = urlparse(url) + + if no_proxy: + # We need to check whether we match here. We need to see if we match + # the end of the hostname, both with and without the port. + no_proxy = ( + host for host in no_proxy.replace(' ', '').split(',') if host + ) + + if is_ipv4_address(parsed.hostname): + for proxy_ip in no_proxy: + if is_valid_cidr(proxy_ip): + if address_in_network(parsed.hostname, proxy_ip): + return True + elif parsed.hostname == proxy_ip: + # If no_proxy ip was defined in plain IP notation instead of cidr notation & + # matches the IP of the index + return True + else: + host_with_port = parsed.hostname + if parsed.port: + host_with_port += ':{0}'.format(parsed.port) + + for host in no_proxy: + if parsed.hostname.endswith(host) or host_with_port.endswith(host): + # The URL does match something in no_proxy, so we don't want + # to apply the proxies on this URL. + return True + + # If the system proxy settings indicate that this URL should be bypassed, + # don't proxy. + # The proxy_bypass function is incredibly buggy on OS X in early versions + # of Python 2.6, so allow this call to fail. Only catch the specific + # exceptions we've seen, though: this call failing in other ways can reveal + # legitimate problems. + with set_environ('no_proxy', no_proxy_arg): + try: + bypass = proxy_bypass(parsed.hostname) + except (TypeError, socket.gaierror): + bypass = False + + if bypass: + return True + + return False + + +def get_environ_proxies(url, no_proxy=None): + """ + Return a dict of environment proxies. + + :rtype: dict + """ + if should_bypass_proxies(url, no_proxy=no_proxy): + return {} + else: + return getproxies() + + +def select_proxy(url, proxies): + """Select a proxy for the url, if applicable. + + :param url: The url being for the request + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + """ + proxies = proxies or {} + urlparts = urlparse(url) + if urlparts.hostname is None: + return proxies.get(urlparts.scheme, proxies.get('all')) + + proxy_keys = [ + urlparts.scheme + '://' + urlparts.hostname, + urlparts.scheme, + 'all://' + urlparts.hostname, + 'all', + ] + proxy = None + for proxy_key in proxy_keys: + if proxy_key in proxies: + proxy = proxies[proxy_key] + break + + return proxy + + +def default_user_agent(name="python-requests"): + """ + Return a string representing the default user agent. + + :rtype: str + """ + return '%s/%s' % (name, __version__) + + +def default_headers(): + """ + :rtype: requests.structures.CaseInsensitiveDict + """ + return CaseInsensitiveDict({ + 'User-Agent': default_user_agent(), + 'Accept-Encoding': ', '.join(('gzip', 'deflate')), + 'Accept': '*/*', + 'Connection': 'keep-alive', + }) + + +def parse_header_links(value): + """Return a list of parsed link headers proxies. + + i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg" + + :rtype: list + """ + + links = [] + + replace_chars = ' \'"' + + value = value.strip(replace_chars) + if not value: + return links + + for val in re.split(', *<', value): + try: + url, params = val.split(';', 1) + except ValueError: + url, params = val, '' + + link = {'url': url.strip('<> \'"')} + + for param in params.split(';'): + try: + key, value = param.split('=') + except ValueError: + break + + link[key.strip(replace_chars)] = value.strip(replace_chars) + + links.append(link) + + return links + + +# Null bytes; no need to recreate these on each call to guess_json_utf +_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3 +_null2 = _null * 2 +_null3 = _null * 3 + + +def guess_json_utf(data): + """ + :rtype: str + """ + # JSON always starts with two ASCII characters, so detection is as + # easy as counting the nulls and from their location and count + # determine the encoding. Also detect a BOM, if present. + sample = data[:4] + if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): + return 'utf-32' # BOM included + if sample[:3] == codecs.BOM_UTF8: + return 'utf-8-sig' # BOM included, MS style (discouraged) + if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + return 'utf-16' # BOM included + nullcount = sample.count(_null) + if nullcount == 0: + return 'utf-8' + if nullcount == 2: + if sample[::2] == _null2: # 1st and 3rd are null + return 'utf-16-be' + if sample[1::2] == _null2: # 2nd and 4th are null + return 'utf-16-le' + # Did not detect 2 valid UTF-16 ascii-range characters + if nullcount == 3: + if sample[:3] == _null3: + return 'utf-32-be' + if sample[1:] == _null3: + return 'utf-32-le' + # Did not detect a valid UTF-32 ascii-range character + return None + + +def prepend_scheme_if_needed(url, new_scheme): + """Given a URL that may or may not have a scheme, prepend the given scheme. + Does not replace a present scheme with the one provided as an argument. + + :rtype: str + """ + scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme) + + # urlparse is a finicky beast, and sometimes decides that there isn't a + # netloc present. Assume that it's being over-cautious, and switch netloc + # and path if urlparse decided there was no netloc. + if not netloc: + netloc, path = path, netloc + + return urlunparse((scheme, netloc, path, params, query, fragment)) + + +def get_auth_from_url(url): + """Given a url with authentication components, extract them into a tuple of + username,password. + + :rtype: (str,str) + """ + parsed = urlparse(url) + + try: + auth = (unquote(parsed.username), unquote(parsed.password)) + except (AttributeError, TypeError): + auth = ('', '') + + return auth + + +# Moved outside of function to avoid recompile every call +_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$') +_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$') + + +def check_header_validity(header): + """Verifies that header value is a string which doesn't contain + leading whitespace or return characters. This prevents unintended + header injection. + + :param header: tuple, in the format (name, value). + """ + name, value = header + + if isinstance(value, bytes): + pat = _CLEAN_HEADER_REGEX_BYTE + else: + pat = _CLEAN_HEADER_REGEX_STR + try: + if not pat.match(value): + raise InvalidHeader("Invalid return character or leading space in header: %s" % name) + except TypeError: + raise InvalidHeader("Value for header {%s: %s} must be of type str or " + "bytes, not %s" % (name, value, type(value))) + + +def urldefragauth(url): + """ + Given a url remove the fragment and the authentication part. + + :rtype: str + """ + scheme, netloc, path, params, query, fragment = urlparse(url) + + # see func:`prepend_scheme_if_needed` + if not netloc: + netloc, path = path, netloc + + netloc = netloc.rsplit('@', 1)[-1] + + return urlunparse((scheme, netloc, path, params, query, '')) + + +def rewind_body(prepared_request): + """Move file pointer back to its recorded starting position + so it can be read again on redirect. + """ + body_seek = getattr(prepared_request.body, 'seek', None) + if body_seek is not None and isinstance(prepared_request._body_position, integer_types): + try: + body_seek(prepared_request._body_position) + except (IOError, OSError): + raise UnrewindableBodyError("An error occurred when rewinding request " + "body for redirect.") + else: + raise UnrewindableBodyError("Unable to rewind request body for redirect.") diff --git a/env/lib/python3.6/site-packages/pip/_vendor/retrying.py b/env/lib/python3.6/site-packages/pip/_vendor/retrying.py new file mode 100644 index 0000000..6d1e627 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/retrying.py @@ -0,0 +1,267 @@ +## Copyright 2013-2014 Ray Holder +## +## Licensed under the Apache License, Version 2.0 (the "License"); +## you may not use this file except in compliance with the License. +## You may obtain a copy of the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, +## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +## See the License for the specific language governing permissions and +## limitations under the License. + +import random +from pip._vendor import six +import sys +import time +import traceback + + +# sys.maxint / 2, since Python 3.2 doesn't have a sys.maxint... +MAX_WAIT = 1073741823 + + +def retry(*dargs, **dkw): + """ + Decorator function that instantiates the Retrying object + @param *dargs: positional arguments passed to Retrying object + @param **dkw: keyword arguments passed to the Retrying object + """ + # support both @retry and @retry() as valid syntax + if len(dargs) == 1 and callable(dargs[0]): + def wrap_simple(f): + + @six.wraps(f) + def wrapped_f(*args, **kw): + return Retrying().call(f, *args, **kw) + + return wrapped_f + + return wrap_simple(dargs[0]) + + else: + def wrap(f): + + @six.wraps(f) + def wrapped_f(*args, **kw): + return Retrying(*dargs, **dkw).call(f, *args, **kw) + + return wrapped_f + + return wrap + + +class Retrying(object): + + def __init__(self, + stop=None, wait=None, + stop_max_attempt_number=None, + stop_max_delay=None, + wait_fixed=None, + wait_random_min=None, wait_random_max=None, + wait_incrementing_start=None, wait_incrementing_increment=None, + wait_exponential_multiplier=None, wait_exponential_max=None, + retry_on_exception=None, + retry_on_result=None, + wrap_exception=False, + stop_func=None, + wait_func=None, + wait_jitter_max=None): + + self._stop_max_attempt_number = 5 if stop_max_attempt_number is None else stop_max_attempt_number + self._stop_max_delay = 100 if stop_max_delay is None else stop_max_delay + self._wait_fixed = 1000 if wait_fixed is None else wait_fixed + self._wait_random_min = 0 if wait_random_min is None else wait_random_min + self._wait_random_max = 1000 if wait_random_max is None else wait_random_max + self._wait_incrementing_start = 0 if wait_incrementing_start is None else wait_incrementing_start + self._wait_incrementing_increment = 100 if wait_incrementing_increment is None else wait_incrementing_increment + self._wait_exponential_multiplier = 1 if wait_exponential_multiplier is None else wait_exponential_multiplier + self._wait_exponential_max = MAX_WAIT if wait_exponential_max is None else wait_exponential_max + self._wait_jitter_max = 0 if wait_jitter_max is None else wait_jitter_max + + # TODO add chaining of stop behaviors + # stop behavior + stop_funcs = [] + if stop_max_attempt_number is not None: + stop_funcs.append(self.stop_after_attempt) + + if stop_max_delay is not None: + stop_funcs.append(self.stop_after_delay) + + if stop_func is not None: + self.stop = stop_func + + elif stop is None: + self.stop = lambda attempts, delay: any(f(attempts, delay) for f in stop_funcs) + + else: + self.stop = getattr(self, stop) + + # TODO add chaining of wait behaviors + # wait behavior + wait_funcs = [lambda *args, **kwargs: 0] + if wait_fixed is not None: + wait_funcs.append(self.fixed_sleep) + + if wait_random_min is not None or wait_random_max is not None: + wait_funcs.append(self.random_sleep) + + if wait_incrementing_start is not None or wait_incrementing_increment is not None: + wait_funcs.append(self.incrementing_sleep) + + if wait_exponential_multiplier is not None or wait_exponential_max is not None: + wait_funcs.append(self.exponential_sleep) + + if wait_func is not None: + self.wait = wait_func + + elif wait is None: + self.wait = lambda attempts, delay: max(f(attempts, delay) for f in wait_funcs) + + else: + self.wait = getattr(self, wait) + + # retry on exception filter + if retry_on_exception is None: + self._retry_on_exception = self.always_reject + else: + self._retry_on_exception = retry_on_exception + + # TODO simplify retrying by Exception types + # retry on result filter + if retry_on_result is None: + self._retry_on_result = self.never_reject + else: + self._retry_on_result = retry_on_result + + self._wrap_exception = wrap_exception + + def stop_after_attempt(self, previous_attempt_number, delay_since_first_attempt_ms): + """Stop after the previous attempt >= stop_max_attempt_number.""" + return previous_attempt_number >= self._stop_max_attempt_number + + def stop_after_delay(self, previous_attempt_number, delay_since_first_attempt_ms): + """Stop after the time from the first attempt >= stop_max_delay.""" + return delay_since_first_attempt_ms >= self._stop_max_delay + + def no_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): + """Don't sleep at all before retrying.""" + return 0 + + def fixed_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): + """Sleep a fixed amount of time between each retry.""" + return self._wait_fixed + + def random_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): + """Sleep a random amount of time between wait_random_min and wait_random_max""" + return random.randint(self._wait_random_min, self._wait_random_max) + + def incrementing_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): + """ + Sleep an incremental amount of time after each attempt, starting at + wait_incrementing_start and incrementing by wait_incrementing_increment + """ + result = self._wait_incrementing_start + (self._wait_incrementing_increment * (previous_attempt_number - 1)) + if result < 0: + result = 0 + return result + + def exponential_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): + exp = 2 ** previous_attempt_number + result = self._wait_exponential_multiplier * exp + if result > self._wait_exponential_max: + result = self._wait_exponential_max + if result < 0: + result = 0 + return result + + def never_reject(self, result): + return False + + def always_reject(self, result): + return True + + def should_reject(self, attempt): + reject = False + if attempt.has_exception: + reject |= self._retry_on_exception(attempt.value[1]) + else: + reject |= self._retry_on_result(attempt.value) + + return reject + + def call(self, fn, *args, **kwargs): + start_time = int(round(time.time() * 1000)) + attempt_number = 1 + while True: + try: + attempt = Attempt(fn(*args, **kwargs), attempt_number, False) + except: + tb = sys.exc_info() + attempt = Attempt(tb, attempt_number, True) + + if not self.should_reject(attempt): + return attempt.get(self._wrap_exception) + + delay_since_first_attempt_ms = int(round(time.time() * 1000)) - start_time + if self.stop(attempt_number, delay_since_first_attempt_ms): + if not self._wrap_exception and attempt.has_exception: + # get() on an attempt with an exception should cause it to be raised, but raise just in case + raise attempt.get() + else: + raise RetryError(attempt) + else: + sleep = self.wait(attempt_number, delay_since_first_attempt_ms) + if self._wait_jitter_max: + jitter = random.random() * self._wait_jitter_max + sleep = sleep + max(0, jitter) + time.sleep(sleep / 1000.0) + + attempt_number += 1 + + +class Attempt(object): + """ + An Attempt encapsulates a call to a target function that may end as a + normal return value from the function or an Exception depending on what + occurred during the execution. + """ + + def __init__(self, value, attempt_number, has_exception): + self.value = value + self.attempt_number = attempt_number + self.has_exception = has_exception + + def get(self, wrap_exception=False): + """ + Return the return value of this Attempt instance or raise an Exception. + If wrap_exception is true, this Attempt is wrapped inside of a + RetryError before being raised. + """ + if self.has_exception: + if wrap_exception: + raise RetryError(self) + else: + six.reraise(self.value[0], self.value[1], self.value[2]) + else: + return self.value + + def __repr__(self): + if self.has_exception: + return "Attempts: {0}, Error:\n{1}".format(self.attempt_number, "".join(traceback.format_tb(self.value[2]))) + else: + return "Attempts: {0}, Value: {1}".format(self.attempt_number, self.value) + + +class RetryError(Exception): + """ + A RetryError encapsulates the last Attempt instance right before giving up. + """ + + def __init__(self, last_attempt): + self.last_attempt = last_attempt + + def __str__(self): + return "RetryError[{0}]".format(self.last_attempt) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/six.py b/env/lib/python3.6/site-packages/pip/_vendor/six.py new file mode 100644 index 0000000..6bf4fd3 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/six.py @@ -0,0 +1,891 @@ +# Copyright (c) 2010-2017 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Utilities for writing code that runs on Python 2 and 3""" + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson <benjamin@python.org>" +__version__ = "1.11.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + try: + raise tp, value, tb + finally: + tb = None +""") + + +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + try: + if from_value is None: + raise value + raise value from from_value + finally: + value = None +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + try: + raise value from from_value + finally: + value = None +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(type): + + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/_collections.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/_collections.py new file mode 100644 index 0000000..6e36b84 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/_collections.py @@ -0,0 +1,332 @@ +from __future__ import absolute_import +try: + from collections.abc import Mapping, MutableMapping +except ImportError: + from collections import Mapping, MutableMapping +try: + from threading import RLock +except ImportError: # Platform-specific: No threads available + class RLock: + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_value, traceback): + pass + + +try: # Python 2.7+ + from collections import OrderedDict +except ImportError: + from .packages.ordered_dict import OrderedDict +from .exceptions import InvalidHeader +from .packages.six import iterkeys, itervalues, PY3 + + +__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict'] + + +_Null = object() + + +class RecentlyUsedContainer(MutableMapping): + """ + Provides a thread-safe dict-like container which maintains up to + ``maxsize`` keys while throwing away the least-recently-used keys beyond + ``maxsize``. + + :param maxsize: + Maximum number of recent elements to retain. + + :param dispose_func: + Every time an item is evicted from the container, + ``dispose_func(value)`` is called. Callback which will get called + """ + + ContainerCls = OrderedDict + + def __init__(self, maxsize=10, dispose_func=None): + self._maxsize = maxsize + self.dispose_func = dispose_func + + self._container = self.ContainerCls() + self.lock = RLock() + + def __getitem__(self, key): + # Re-insert the item, moving it to the end of the eviction line. + with self.lock: + item = self._container.pop(key) + self._container[key] = item + return item + + def __setitem__(self, key, value): + evicted_value = _Null + with self.lock: + # Possibly evict the existing value of 'key' + evicted_value = self._container.get(key, _Null) + self._container[key] = value + + # If we didn't evict an existing value, we might have to evict the + # least recently used item from the beginning of the container. + if len(self._container) > self._maxsize: + _key, evicted_value = self._container.popitem(last=False) + + if self.dispose_func and evicted_value is not _Null: + self.dispose_func(evicted_value) + + def __delitem__(self, key): + with self.lock: + value = self._container.pop(key) + + if self.dispose_func: + self.dispose_func(value) + + def __len__(self): + with self.lock: + return len(self._container) + + def __iter__(self): + raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.') + + def clear(self): + with self.lock: + # Copy pointers to all values, then wipe the mapping + values = list(itervalues(self._container)) + self._container.clear() + + if self.dispose_func: + for value in values: + self.dispose_func(value) + + def keys(self): + with self.lock: + return list(iterkeys(self._container)) + + +class HTTPHeaderDict(MutableMapping): + """ + :param headers: + An iterable of field-value pairs. Must not contain multiple field names + when compared case-insensitively. + + :param kwargs: + Additional field-value pairs to pass in to ``dict.update``. + + A ``dict`` like container for storing HTTP Headers. + + Field names are stored and compared case-insensitively in compliance with + RFC 7230. Iteration provides the first case-sensitive key seen for each + case-insensitive pair. + + Using ``__setitem__`` syntax overwrites fields that compare equal + case-insensitively in order to maintain ``dict``'s api. For fields that + compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add`` + in a loop. + + If multiple fields that are equal case-insensitively are passed to the + constructor or ``.update``, the behavior is undefined and some will be + lost. + + >>> headers = HTTPHeaderDict() + >>> headers.add('Set-Cookie', 'foo=bar') + >>> headers.add('set-cookie', 'baz=quxx') + >>> headers['content-length'] = '7' + >>> headers['SET-cookie'] + 'foo=bar, baz=quxx' + >>> headers['Content-Length'] + '7' + """ + + def __init__(self, headers=None, **kwargs): + super(HTTPHeaderDict, self).__init__() + self._container = OrderedDict() + if headers is not None: + if isinstance(headers, HTTPHeaderDict): + self._copy_from(headers) + else: + self.extend(headers) + if kwargs: + self.extend(kwargs) + + def __setitem__(self, key, val): + self._container[key.lower()] = [key, val] + return self._container[key.lower()] + + def __getitem__(self, key): + val = self._container[key.lower()] + return ', '.join(val[1:]) + + def __delitem__(self, key): + del self._container[key.lower()] + + def __contains__(self, key): + return key.lower() in self._container + + def __eq__(self, other): + if not isinstance(other, Mapping) and not hasattr(other, 'keys'): + return False + if not isinstance(other, type(self)): + other = type(self)(other) + return (dict((k.lower(), v) for k, v in self.itermerged()) == + dict((k.lower(), v) for k, v in other.itermerged())) + + def __ne__(self, other): + return not self.__eq__(other) + + if not PY3: # Python 2 + iterkeys = MutableMapping.iterkeys + itervalues = MutableMapping.itervalues + + __marker = object() + + def __len__(self): + return len(self._container) + + def __iter__(self): + # Only provide the originally cased names + for vals in self._container.values(): + yield vals[0] + + def pop(self, key, default=__marker): + '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + ''' + # Using the MutableMapping function directly fails due to the private marker. + # Using ordinary dict.pop would expose the internal structures. + # So let's reinvent the wheel. + try: + value = self[key] + except KeyError: + if default is self.__marker: + raise + return default + else: + del self[key] + return value + + def discard(self, key): + try: + del self[key] + except KeyError: + pass + + def add(self, key, val): + """Adds a (name, value) pair, doesn't overwrite the value if it already + exists. + + >>> headers = HTTPHeaderDict(foo='bar') + >>> headers.add('Foo', 'baz') + >>> headers['foo'] + 'bar, baz' + """ + key_lower = key.lower() + new_vals = [key, val] + # Keep the common case aka no item present as fast as possible + vals = self._container.setdefault(key_lower, new_vals) + if new_vals is not vals: + vals.append(val) + + def extend(self, *args, **kwargs): + """Generic import function for any type of header-like object. + Adapted version of MutableMapping.update in order to insert items + with self.add instead of self.__setitem__ + """ + if len(args) > 1: + raise TypeError("extend() takes at most 1 positional " + "arguments ({0} given)".format(len(args))) + other = args[0] if len(args) >= 1 else () + + if isinstance(other, HTTPHeaderDict): + for key, val in other.iteritems(): + self.add(key, val) + elif isinstance(other, Mapping): + for key in other: + self.add(key, other[key]) + elif hasattr(other, "keys"): + for key in other.keys(): + self.add(key, other[key]) + else: + for key, value in other: + self.add(key, value) + + for key, value in kwargs.items(): + self.add(key, value) + + def getlist(self, key, default=__marker): + """Returns a list of all the values for the named field. Returns an + empty list if the key doesn't exist.""" + try: + vals = self._container[key.lower()] + except KeyError: + if default is self.__marker: + return [] + return default + else: + return vals[1:] + + # Backwards compatibility for httplib + getheaders = getlist + getallmatchingheaders = getlist + iget = getlist + + # Backwards compatibility for http.cookiejar + get_all = getlist + + def __repr__(self): + return "%s(%s)" % (type(self).__name__, dict(self.itermerged())) + + def _copy_from(self, other): + for key in other: + val = other.getlist(key) + if isinstance(val, list): + # Don't need to convert tuples + val = list(val) + self._container[key.lower()] = [key] + val + + def copy(self): + clone = type(self)() + clone._copy_from(self) + return clone + + def iteritems(self): + """Iterate over all header lines, including duplicate ones.""" + for key in self: + vals = self._container[key.lower()] + for val in vals[1:]: + yield vals[0], val + + def itermerged(self): + """Iterate over all headers, merging duplicate ones together.""" + for key in self: + val = self._container[key.lower()] + yield val[0], ', '.join(val[1:]) + + def items(self): + return list(self.iteritems()) + + @classmethod + def from_httplib(cls, message): # Python 2 + """Read headers from a Python 2 httplib message object.""" + # python2.7 does not expose a proper API for exporting multiheaders + # efficiently. This function re-reads raw lines from the message + # object and extracts the multiheaders properly. + obs_fold_continued_leaders = (' ', '\t') + headers = [] + + for line in message.headers: + if line.startswith(obs_fold_continued_leaders): + if not headers: + # We received a header line that starts with OWS as described + # in RFC-7230 S3.2.4. This indicates a multiline header, but + # there exists no previous header to which we can attach it. + raise InvalidHeader( + 'Header continuation with no previous header: %s' % line + ) + else: + key, value = headers[-1] + headers[-1] = (key, value + ' ' + line.strip()) + continue + + key, value = line.split(':', 1) + headers.append((key, value.strip())) + + return cls(headers) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/connection.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/connection.py new file mode 100644 index 0000000..a03b573 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/connection.py @@ -0,0 +1,403 @@ +from __future__ import absolute_import +import datetime +import logging +import os +import sys +import socket +from socket import error as SocketError, timeout as SocketTimeout +import warnings +from .packages import six +from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection +from .packages.six.moves.http_client import HTTPException # noqa: F401 + +try: # Compiled with SSL? + import ssl + BaseSSLError = ssl.SSLError +except (ImportError, AttributeError): # Platform-specific: No SSL. + ssl = None + + class BaseSSLError(BaseException): + pass + + +try: # Python 3: + # Not a no-op, we're adding this to the namespace so it can be imported. + ConnectionError = ConnectionError +except NameError: # Python 2: + class ConnectionError(Exception): + pass + + +from .exceptions import ( + NewConnectionError, + ConnectTimeoutError, + SubjectAltNameWarning, + SystemTimeWarning, +) +from .packages.ssl_match_hostname import match_hostname, CertificateError + +from .util.ssl_ import ( + resolve_cert_reqs, + resolve_ssl_version, + assert_fingerprint, + create_urllib3_context, + ssl_wrap_socket +) + + +from .util import connection + +from ._collections import HTTPHeaderDict + +log = logging.getLogger(__name__) + +port_by_scheme = { + 'http': 80, + 'https': 443, +} + +# When updating RECENT_DATE, move it to within two years of the current date, +# and not less than 6 months ago. +# Example: if Today is 2018-01-01, then RECENT_DATE should be any date on or +# after 2016-01-01 (today - 2 years) AND before 2017-07-01 (today - 6 months) +RECENT_DATE = datetime.date(2017, 6, 30) + + +class DummyConnection(object): + """Used to detect a failed ConnectionCls import.""" + pass + + +class HTTPConnection(_HTTPConnection, object): + """ + Based on httplib.HTTPConnection but provides an extra constructor + backwards-compatibility layer between older and newer Pythons. + + Additional keyword parameters are used to configure attributes of the connection. + Accepted parameters include: + + - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` + - ``source_address``: Set the source address for the current connection. + + .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x + + - ``socket_options``: Set specific options on the underlying socket. If not specified, then + defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling + Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. + + For example, if you wish to enable TCP Keep Alive in addition to the defaults, + you might pass:: + + HTTPConnection.default_socket_options + [ + (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), + ] + + Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). + """ + + default_port = port_by_scheme['http'] + + #: Disable Nagle's algorithm by default. + #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` + default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] + + #: Whether this connection verifies the host's certificate. + is_verified = False + + def __init__(self, *args, **kw): + if six.PY3: # Python 3 + kw.pop('strict', None) + + # Pre-set source_address in case we have an older Python like 2.6. + self.source_address = kw.get('source_address') + + if sys.version_info < (2, 7): # Python 2.6 + # _HTTPConnection on Python 2.6 will balk at this keyword arg, but + # not newer versions. We can still use it when creating a + # connection though, so we pop it *after* we have saved it as + # self.source_address. + kw.pop('source_address', None) + + #: The socket options provided by the user. If no options are + #: provided, we use the default options. + self.socket_options = kw.pop('socket_options', self.default_socket_options) + + # Superclass also sets self.source_address in Python 2.7+. + _HTTPConnection.__init__(self, *args, **kw) + + @property + def host(self): + """ + Getter method to remove any trailing dots that indicate the hostname is an FQDN. + + In general, SSL certificates don't include the trailing dot indicating a + fully-qualified domain name, and thus, they don't validate properly when + checked against a domain name that includes the dot. In addition, some + servers may not expect to receive the trailing dot when provided. + + However, the hostname with trailing dot is critical to DNS resolution; doing a + lookup with the trailing dot will properly only resolve the appropriate FQDN, + whereas a lookup without a trailing dot will search the system's search domain + list. Thus, it's important to keep the original host around for use only in + those cases where it's appropriate (i.e., when doing DNS lookup to establish the + actual TCP connection across which we're going to send HTTP requests). + """ + return self._dns_host.rstrip('.') + + @host.setter + def host(self, value): + """ + Setter for the `host` property. + + We assume that only urllib3 uses the _dns_host attribute; httplib itself + only uses `host`, and it seems reasonable that other libraries follow suit. + """ + self._dns_host = value + + def _new_conn(self): + """ Establish a socket connection and set nodelay settings on it. + + :return: New socket connection. + """ + extra_kw = {} + if self.source_address: + extra_kw['source_address'] = self.source_address + + if self.socket_options: + extra_kw['socket_options'] = self.socket_options + + try: + conn = connection.create_connection( + (self._dns_host, self.port), self.timeout, **extra_kw) + + except SocketTimeout as e: + raise ConnectTimeoutError( + self, "Connection to %s timed out. (connect timeout=%s)" % + (self.host, self.timeout)) + + except SocketError as e: + raise NewConnectionError( + self, "Failed to establish a new connection: %s" % e) + + return conn + + def _prepare_conn(self, conn): + self.sock = conn + # the _tunnel_host attribute was added in python 2.6.3 (via + # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do + # not have them. + if getattr(self, '_tunnel_host', None): + # TODO: Fix tunnel so it doesn't depend on self.sock state. + self._tunnel() + # Mark this connection as not reusable + self.auto_open = 0 + + def connect(self): + conn = self._new_conn() + self._prepare_conn(conn) + + def request_chunked(self, method, url, body=None, headers=None): + """ + Alternative to the common request method, which sends the + body with chunked encoding and not as one block + """ + headers = HTTPHeaderDict(headers if headers is not None else {}) + skip_accept_encoding = 'accept-encoding' in headers + skip_host = 'host' in headers + self.putrequest( + method, + url, + skip_accept_encoding=skip_accept_encoding, + skip_host=skip_host + ) + for header, value in headers.items(): + self.putheader(header, value) + if 'transfer-encoding' not in headers: + self.putheader('Transfer-Encoding', 'chunked') + self.endheaders() + + if body is not None: + stringish_types = six.string_types + (six.binary_type,) + if isinstance(body, stringish_types): + body = (body,) + for chunk in body: + if not chunk: + continue + if not isinstance(chunk, six.binary_type): + chunk = chunk.encode('utf8') + len_str = hex(len(chunk))[2:] + self.send(len_str.encode('utf-8')) + self.send(b'\r\n') + self.send(chunk) + self.send(b'\r\n') + + # After the if clause, to always have a closed body + self.send(b'0\r\n\r\n') + + +class HTTPSConnection(HTTPConnection): + default_port = port_by_scheme['https'] + + ssl_version = None + + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + ssl_context=None, **kw): + + HTTPConnection.__init__(self, host, port, strict=strict, + timeout=timeout, **kw) + + self.key_file = key_file + self.cert_file = cert_file + self.ssl_context = ssl_context + + # Required property for Google AppEngine 1.9.0 which otherwise causes + # HTTPS requests to go out as HTTP. (See Issue #356) + self._protocol = 'https' + + def connect(self): + conn = self._new_conn() + self._prepare_conn(conn) + + if self.ssl_context is None: + self.ssl_context = create_urllib3_context( + ssl_version=resolve_ssl_version(None), + cert_reqs=resolve_cert_reqs(None), + ) + + self.sock = ssl_wrap_socket( + sock=conn, + keyfile=self.key_file, + certfile=self.cert_file, + ssl_context=self.ssl_context, + ) + + +class VerifiedHTTPSConnection(HTTPSConnection): + """ + Based on httplib.HTTPSConnection but wraps the socket with + SSL certification. + """ + cert_reqs = None + ca_certs = None + ca_cert_dir = None + ssl_version = None + assert_fingerprint = None + + def set_cert(self, key_file=None, cert_file=None, + cert_reqs=None, ca_certs=None, + assert_hostname=None, assert_fingerprint=None, + ca_cert_dir=None): + """ + This method should only be called once, before the connection is used. + """ + # If cert_reqs is not provided, we can try to guess. If the user gave + # us a cert database, we assume they want to use it: otherwise, if + # they gave us an SSL Context object we should use whatever is set for + # it. + if cert_reqs is None: + if ca_certs or ca_cert_dir: + cert_reqs = 'CERT_REQUIRED' + elif self.ssl_context is not None: + cert_reqs = self.ssl_context.verify_mode + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + self.ca_certs = ca_certs and os.path.expanduser(ca_certs) + self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) + + def connect(self): + # Add certificate verification + conn = self._new_conn() + + hostname = self.host + if getattr(self, '_tunnel_host', None): + # _tunnel_host was added in Python 2.6.3 + # (See: http://hg.python.org/cpython/rev/0f57b30a152f) + + self.sock = conn + # Calls self._set_hostport(), so self.host is + # self._tunnel_host below. + self._tunnel() + # Mark this connection as not reusable + self.auto_open = 0 + + # Override the host with the one we're requesting data from. + hostname = self._tunnel_host + + is_time_off = datetime.date.today() < RECENT_DATE + if is_time_off: + warnings.warn(( + 'System time is way off (before {0}). This will probably ' + 'lead to SSL verification errors').format(RECENT_DATE), + SystemTimeWarning + ) + + # Wrap socket using verification with the root certs in + # trusted_root_certs + if self.ssl_context is None: + self.ssl_context = create_urllib3_context( + ssl_version=resolve_ssl_version(self.ssl_version), + cert_reqs=resolve_cert_reqs(self.cert_reqs), + ) + + context = self.ssl_context + context.verify_mode = resolve_cert_reqs(self.cert_reqs) + self.sock = ssl_wrap_socket( + sock=conn, + keyfile=self.key_file, + certfile=self.cert_file, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + server_hostname=hostname, + ssl_context=context) + + if self.assert_fingerprint: + assert_fingerprint(self.sock.getpeercert(binary_form=True), + self.assert_fingerprint) + elif context.verify_mode != ssl.CERT_NONE \ + and not getattr(context, 'check_hostname', False) \ + and self.assert_hostname is not False: + # While urllib3 attempts to always turn off hostname matching from + # the TLS library, this cannot always be done. So we check whether + # the TLS Library still thinks it's matching hostnames. + cert = self.sock.getpeercert() + if not cert.get('subjectAltName', ()): + warnings.warn(( + 'Certificate for {0} has no `subjectAltName`, falling back to check for a ' + '`commonName` for now. This feature is being removed by major browsers and ' + 'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 ' + 'for details.)'.format(hostname)), + SubjectAltNameWarning + ) + _match_hostname(cert, self.assert_hostname or hostname) + + self.is_verified = ( + context.verify_mode == ssl.CERT_REQUIRED or + self.assert_fingerprint is not None + ) + + +def _match_hostname(cert, asserted_hostname): + try: + match_hostname(cert, asserted_hostname) + except CertificateError as e: + log.error( + 'Certificate did not match expected hostname: %s. ' + 'Certificate: %s', asserted_hostname, cert + ) + # Add cert to exception and reraise so client code can inspect + # the cert when catching the exception, if they want to + e._peer_cert = cert + raise + + +if ssl: + # Make a copy for testing. + UnverifiedHTTPSConnection = HTTPSConnection + HTTPSConnection = VerifiedHTTPSConnection +else: + HTTPSConnection = DummyConnection diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/connectionpool.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/connectionpool.py new file mode 100644 index 0000000..8fcb0bc --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/connectionpool.py @@ -0,0 +1,906 @@ +from __future__ import absolute_import +import errno +import logging +import sys +import warnings + +from socket import error as SocketError, timeout as SocketTimeout +import socket + + +from .exceptions import ( + ClosedPoolError, + ProtocolError, + EmptyPoolError, + HeaderParsingError, + HostChangedError, + LocationValueError, + MaxRetryError, + ProxyError, + ReadTimeoutError, + SSLError, + TimeoutError, + InsecureRequestWarning, + NewConnectionError, +) +from .packages.ssl_match_hostname import CertificateError +from .packages import six +from .packages.six.moves import queue +from .connection import ( + port_by_scheme, + DummyConnection, + HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection, + HTTPException, BaseSSLError, +) +from .request import RequestMethods +from .response import HTTPResponse + +from .util.connection import is_connection_dropped +from .util.request import set_file_position +from .util.response import assert_header_parsing +from .util.retry import Retry +from .util.timeout import Timeout +from .util.url import get_host, Url, NORMALIZABLE_SCHEMES +from .util.queue import LifoQueue + + +xrange = six.moves.xrange + +log = logging.getLogger(__name__) + +_Default = object() + + +# Pool objects +class ConnectionPool(object): + """ + Base class for all connection pools, such as + :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. + """ + + scheme = None + QueueCls = LifoQueue + + def __init__(self, host, port=None): + if not host: + raise LocationValueError("No host specified.") + + self.host = _ipv6_host(host, self.scheme) + self._proxy_host = host.lower() + self.port = port + + def __str__(self): + return '%s(host=%r, port=%r)' % (type(self).__name__, + self.host, self.port) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + # Return False to re-raise any potential exceptions + return False + + def close(self): + """ + Close all pooled connections and disable the pool. + """ + pass + + +# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 +_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK]) + + +class HTTPConnectionPool(ConnectionPool, RequestMethods): + """ + Thread-safe connection pool for one host. + + :param host: + Host used for this HTTP Connection (e.g. "localhost"), passed into + :class:`httplib.HTTPConnection`. + + :param port: + Port used for this HTTP Connection (None is equivalent to 80), passed + into :class:`httplib.HTTPConnection`. + + :param strict: + Causes BadStatusLine to be raised if the status line can't be parsed + as a valid HTTP/1.0 or 1.1 status line, passed into + :class:`httplib.HTTPConnection`. + + .. note:: + Only works in Python 2. This parameter is ignored in Python 3. + + :param timeout: + Socket timeout in seconds for each individual connection. This can + be a float or integer, which sets the timeout for the HTTP request, + or an instance of :class:`urllib3.util.Timeout` which gives you more + fine-grained control over request timeouts. After the constructor has + been parsed, this is always a `urllib3.util.Timeout` object. + + :param maxsize: + Number of connections to save that can be reused. More than 1 is useful + in multithreaded situations. If ``block`` is set to False, more + connections will be created but they will not be saved once they've + been used. + + :param block: + If set to True, no more than ``maxsize`` connections will be used at + a time. When no free connections are available, the call will block + until a connection has been released. This is a useful side effect for + particular multithreaded situations where one does not want to use more + than maxsize connections per host to prevent flooding. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + + :param retries: + Retry configuration to use by default with requests in this pool. + + :param _proxy: + Parsed proxy URL, should not be used directly, instead, see + :class:`urllib3.connectionpool.ProxyManager`" + + :param _proxy_headers: + A dictionary with proxy headers, should not be used directly, + instead, see :class:`urllib3.connectionpool.ProxyManager`" + + :param \\**conn_kw: + Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, + :class:`urllib3.connection.HTTPSConnection` instances. + """ + + scheme = 'http' + ConnectionCls = HTTPConnection + ResponseCls = HTTPResponse + + def __init__(self, host, port=None, strict=False, + timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False, + headers=None, retries=None, + _proxy=None, _proxy_headers=None, + **conn_kw): + ConnectionPool.__init__(self, host, port) + RequestMethods.__init__(self, headers) + + self.strict = strict + + if not isinstance(timeout, Timeout): + timeout = Timeout.from_float(timeout) + + if retries is None: + retries = Retry.DEFAULT + + self.timeout = timeout + self.retries = retries + + self.pool = self.QueueCls(maxsize) + self.block = block + + self.proxy = _proxy + self.proxy_headers = _proxy_headers or {} + + # Fill the queue up so that doing get() on it will block properly + for _ in xrange(maxsize): + self.pool.put(None) + + # These are mostly for testing and debugging purposes. + self.num_connections = 0 + self.num_requests = 0 + self.conn_kw = conn_kw + + if self.proxy: + # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. + # We cannot know if the user has added default socket options, so we cannot replace the + # list. + self.conn_kw.setdefault('socket_options', []) + + def _new_conn(self): + """ + Return a fresh :class:`HTTPConnection`. + """ + self.num_connections += 1 + log.debug("Starting new HTTP connection (%d): %s:%s", + self.num_connections, self.host, self.port or "80") + + conn = self.ConnectionCls(host=self.host, port=self.port, + timeout=self.timeout.connect_timeout, + strict=self.strict, **self.conn_kw) + return conn + + def _get_conn(self, timeout=None): + """ + Get a connection. Will return a pooled connection if one is available. + + If no connections are available and :prop:`.block` is ``False``, then a + fresh connection is returned. + + :param timeout: + Seconds to wait before giving up and raising + :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and + :prop:`.block` is ``True``. + """ + conn = None + try: + conn = self.pool.get(block=self.block, timeout=timeout) + + except AttributeError: # self.pool is None + raise ClosedPoolError(self, "Pool is closed.") + + except queue.Empty: + if self.block: + raise EmptyPoolError(self, + "Pool reached maximum size and no more " + "connections are allowed.") + pass # Oh well, we'll create a new connection then + + # If this is a persistent connection, check if it got disconnected + if conn and is_connection_dropped(conn): + log.debug("Resetting dropped connection: %s", self.host) + conn.close() + if getattr(conn, 'auto_open', 1) == 0: + # This is a proxied connection that has been mutated by + # httplib._tunnel() and cannot be reused (since it would + # attempt to bypass the proxy) + conn = None + + return conn or self._new_conn() + + def _put_conn(self, conn): + """ + Put a connection back into the pool. + + :param conn: + Connection object for the current host and port as returned by + :meth:`._new_conn` or :meth:`._get_conn`. + + If the pool is already full, the connection is closed and discarded + because we exceeded maxsize. If connections are discarded frequently, + then maxsize should be increased. + + If the pool is closed, then the connection will be closed and discarded. + """ + try: + self.pool.put(conn, block=False) + return # Everything is dandy, done. + except AttributeError: + # self.pool is None. + pass + except queue.Full: + # This should never happen if self.block == True + log.warning( + "Connection pool is full, discarding connection: %s", + self.host) + + # Connection never got put back into the pool, close it. + if conn: + conn.close() + + def _validate_conn(self, conn): + """ + Called right before a request is made, after the socket is created. + """ + pass + + def _prepare_proxy(self, conn): + # Nothing to do for HTTP connections. + pass + + def _get_timeout(self, timeout): + """ Helper that always returns a :class:`urllib3.util.Timeout` """ + if timeout is _Default: + return self.timeout.clone() + + if isinstance(timeout, Timeout): + return timeout.clone() + else: + # User passed us an int/float. This is for backwards compatibility, + # can be removed later + return Timeout.from_float(timeout) + + def _raise_timeout(self, err, url, timeout_value): + """Is the error actually a timeout? Will raise a ReadTimeout or pass""" + + if isinstance(err, SocketTimeout): + raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) + + # See the above comment about EAGAIN in Python 3. In Python 2 we have + # to specifically catch it and throw the timeout error + if hasattr(err, 'errno') and err.errno in _blocking_errnos: + raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) + + # Catch possible read timeouts thrown as SSL errors. If not the + # case, rethrow the original. We need to do this because of: + # http://bugs.python.org/issue10272 + if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6 + raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) + + def _make_request(self, conn, method, url, timeout=_Default, chunked=False, + **httplib_request_kw): + """ + Perform a request on a given urllib connection object taken from our + pool. + + :param conn: + a connection from one of our connection pools + + :param timeout: + Socket timeout in seconds for the request. This can be a + float or integer, which will set the same timeout value for + the socket connect and the socket read, or an instance of + :class:`urllib3.util.Timeout`, which gives you more fine-grained + control over your timeouts. + """ + self.num_requests += 1 + + timeout_obj = self._get_timeout(timeout) + timeout_obj.start_connect() + conn.timeout = timeout_obj.connect_timeout + + # Trigger any extra validation we need to do. + try: + self._validate_conn(conn) + except (SocketTimeout, BaseSSLError) as e: + # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout. + self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) + raise + + # conn.request() calls httplib.*.request, not the method in + # urllib3.request. It also calls makefile (recv) on the socket. + if chunked: + conn.request_chunked(method, url, **httplib_request_kw) + else: + conn.request(method, url, **httplib_request_kw) + + # Reset the timeout for the recv() on the socket + read_timeout = timeout_obj.read_timeout + + # App Engine doesn't have a sock attr + if getattr(conn, 'sock', None): + # In Python 3 socket.py will catch EAGAIN and return None when you + # try and read into the file pointer created by http.client, which + # instead raises a BadStatusLine exception. Instead of catching + # the exception and assuming all BadStatusLine exceptions are read + # timeouts, check for a zero timeout before making the request. + if read_timeout == 0: + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % read_timeout) + if read_timeout is Timeout.DEFAULT_TIMEOUT: + conn.sock.settimeout(socket.getdefaulttimeout()) + else: # None or a value + conn.sock.settimeout(read_timeout) + + # Receive the response from the server + try: + try: # Python 2.7, use buffering of HTTP responses + httplib_response = conn.getresponse(buffering=True) + except TypeError: # Python 2.6 and older, Python 3 + try: + httplib_response = conn.getresponse() + except Exception as e: + # Remove the TypeError from the exception chain in Python 3; + # otherwise it looks like a programming error was the cause. + six.raise_from(e, None) + except (SocketTimeout, BaseSSLError, SocketError) as e: + self._raise_timeout(err=e, url=url, timeout_value=read_timeout) + raise + + # AppEngine doesn't have a version attr. + http_version = getattr(conn, '_http_vsn_str', 'HTTP/?') + log.debug("%s://%s:%s \"%s %s %s\" %s %s", self.scheme, self.host, self.port, + method, url, http_version, httplib_response.status, + httplib_response.length) + + try: + assert_header_parsing(httplib_response.msg) + except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3 + log.warning( + 'Failed to parse headers (url=%s): %s', + self._absolute_url(url), hpe, exc_info=True) + + return httplib_response + + def _absolute_url(self, path): + return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url + + def close(self): + """ + Close all pooled connections and disable the pool. + """ + if self.pool is None: + return + # Disable access to the pool + old_pool, self.pool = self.pool, None + + try: + while True: + conn = old_pool.get(block=False) + if conn: + conn.close() + + except queue.Empty: + pass # Done. + + def is_same_host(self, url): + """ + Check if the given ``url`` is a member of the same host as this + connection pool. + """ + if url.startswith('/'): + return True + + # TODO: Add optional support for socket.gethostbyname checking. + scheme, host, port = get_host(url) + + host = _ipv6_host(host, self.scheme) + + # Use explicit default port for comparison when none is given + if self.port and not port: + port = port_by_scheme.get(scheme) + elif not self.port and port == port_by_scheme.get(scheme): + port = None + + return (scheme, host, port) == (self.scheme, self.host, self.port) + + def urlopen(self, method, url, body=None, headers=None, retries=None, + redirect=True, assert_same_host=True, timeout=_Default, + pool_timeout=None, release_conn=None, chunked=False, + body_pos=None, **response_kw): + """ + Get a connection from the pool and perform an HTTP request. This is the + lowest level call for making a request, so you'll need to specify all + the raw details. + + .. note:: + + More commonly, it's appropriate to use a convenience method provided + by :class:`.RequestMethods`, such as :meth:`request`. + + .. note:: + + `release_conn` will only behave as expected if + `preload_content=False` because we want to make + `preload_content=False` the default behaviour someday soon without + breaking backwards compatibility. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param body: + Data to send in the request body (useful for creating + POST requests, see HTTPConnectionPool.post_url for + more convenience). + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param retries: + Configure the number of retries to allow before raising a + :class:`~urllib3.exceptions.MaxRetryError` exception. + + Pass ``None`` to retry until you receive a response. Pass a + :class:`~urllib3.util.retry.Retry` object for fine-grained control + over different types of retries. + Pass an integer number to retry connection errors that many times, + but no other types of errors. Pass zero to never retry. + + If ``False``, then retries are disabled and any exception is raised + immediately. Also, instead of raising a MaxRetryError on redirects, + the redirect response will be returned. + + :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. + + :param redirect: + If True, automatically handle redirects (status codes 301, 302, + 303, 307, 308). Each redirect counts as a retry. Disabling retries + will disable redirect, too. + + :param assert_same_host: + If ``True``, will make sure that the host of the pool requests is + consistent else will raise HostChangedError. When False, you can + use the pool on an HTTP proxy and request foreign hosts. + + :param timeout: + If specified, overrides the default timeout for this one + request. It may be a float (in seconds) or an instance of + :class:`urllib3.util.Timeout`. + + :param pool_timeout: + If set and the pool is set to block=True, then this method will + block for ``pool_timeout`` seconds and raise EmptyPoolError if no + connection is available within the time period. + + :param release_conn: + If False, then the urlopen call will not release the connection + back into the pool once a response is received (but will release if + you read the entire contents of the response such as when + `preload_content=True`). This is useful if you're not preloading + the response's content immediately. You will need to call + ``r.release_conn()`` on the response ``r`` to return the connection + back into the pool. If None, it takes the value of + ``response_kw.get('preload_content', True)``. + + :param chunked: + If True, urllib3 will send the body using chunked transfer + encoding. Otherwise, urllib3 will send the body using the standard + content-length form. Defaults to False. + + :param int body_pos: + Position to seek to in file-like body in the event of a retry or + redirect. Typically this won't need to be set because urllib3 will + auto-populate the value when needed. + + :param \\**response_kw: + Additional parameters are passed to + :meth:`urllib3.response.HTTPResponse.from_httplib` + """ + if headers is None: + headers = self.headers + + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect, default=self.retries) + + if release_conn is None: + release_conn = response_kw.get('preload_content', True) + + # Check host + if assert_same_host and not self.is_same_host(url): + raise HostChangedError(self, url, retries) + + conn = None + + # Track whether `conn` needs to be released before + # returning/raising/recursing. Update this variable if necessary, and + # leave `release_conn` constant throughout the function. That way, if + # the function recurses, the original value of `release_conn` will be + # passed down into the recursive call, and its value will be respected. + # + # See issue #651 [1] for details. + # + # [1] <https://github.com/shazow/urllib3/issues/651> + release_this_conn = release_conn + + # Merge the proxy headers. Only do this in HTTP. We have to copy the + # headers dict so we can safely change it without those changes being + # reflected in anyone else's copy. + if self.scheme == 'http': + headers = headers.copy() + headers.update(self.proxy_headers) + + # Must keep the exception bound to a separate variable or else Python 3 + # complains about UnboundLocalError. + err = None + + # Keep track of whether we cleanly exited the except block. This + # ensures we do proper cleanup in finally. + clean_exit = False + + # Rewind body position, if needed. Record current position + # for future rewinds in the event of a redirect/retry. + body_pos = set_file_position(body, body_pos) + + try: + # Request a connection from the queue. + timeout_obj = self._get_timeout(timeout) + conn = self._get_conn(timeout=pool_timeout) + + conn.timeout = timeout_obj.connect_timeout + + is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None) + if is_new_proxy_conn: + self._prepare_proxy(conn) + + # Make the request on the httplib connection object. + httplib_response = self._make_request(conn, method, url, + timeout=timeout_obj, + body=body, headers=headers, + chunked=chunked) + + # If we're going to release the connection in ``finally:``, then + # the response doesn't need to know about the connection. Otherwise + # it will also try to release it and we'll have a double-release + # mess. + response_conn = conn if not release_conn else None + + # Pass method to Response for length checking + response_kw['request_method'] = method + + # Import httplib's response into our own wrapper object + response = self.ResponseCls.from_httplib(httplib_response, + pool=self, + connection=response_conn, + retries=retries, + **response_kw) + + # Everything went great! + clean_exit = True + + except queue.Empty: + # Timed out by queue. + raise EmptyPoolError(self, "No pool connections are available.") + + except (TimeoutError, HTTPException, SocketError, ProtocolError, + BaseSSLError, SSLError, CertificateError) as e: + # Discard the connection for these exceptions. It will be + # replaced during the next _get_conn() call. + clean_exit = False + if isinstance(e, (BaseSSLError, CertificateError)): + e = SSLError(e) + elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy: + e = ProxyError('Cannot connect to proxy.', e) + elif isinstance(e, (SocketError, HTTPException)): + e = ProtocolError('Connection aborted.', e) + + retries = retries.increment(method, url, error=e, _pool=self, + _stacktrace=sys.exc_info()[2]) + retries.sleep() + + # Keep track of the error for the retry warning. + err = e + + finally: + if not clean_exit: + # We hit some kind of exception, handled or otherwise. We need + # to throw the connection away unless explicitly told not to. + # Close the connection, set the variable to None, and make sure + # we put the None back in the pool to avoid leaking it. + conn = conn and conn.close() + release_this_conn = True + + if release_this_conn: + # Put the connection back to be reused. If the connection is + # expired then it will be None, which will get replaced with a + # fresh connection during _get_conn. + self._put_conn(conn) + + if not conn: + # Try again + log.warning("Retrying (%r) after connection " + "broken by '%r': %s", retries, err, url) + return self.urlopen(method, url, body, headers, retries, + redirect, assert_same_host, + timeout=timeout, pool_timeout=pool_timeout, + release_conn=release_conn, body_pos=body_pos, + **response_kw) + + def drain_and_release_conn(response): + try: + # discard any remaining response body, the connection will be + # released back to the pool once the entire response is read + response.read() + except (TimeoutError, HTTPException, SocketError, ProtocolError, + BaseSSLError, SSLError) as e: + pass + + # Handle redirect? + redirect_location = redirect and response.get_redirect_location() + if redirect_location: + if response.status == 303: + method = 'GET' + + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_redirect: + # Drain and release the connection for this response, since + # we're not returning it to be released manually. + drain_and_release_conn(response) + raise + return response + + # drain and return the connection to the pool before recursing + drain_and_release_conn(response) + + retries.sleep_for_retry(response) + log.debug("Redirecting %s -> %s", url, redirect_location) + return self.urlopen( + method, redirect_location, body, headers, + retries=retries, redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, pool_timeout=pool_timeout, + release_conn=release_conn, body_pos=body_pos, + **response_kw) + + # Check if we should retry the HTTP response. + has_retry_after = bool(response.getheader('Retry-After')) + if retries.is_retry(method, response.status, has_retry_after): + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_status: + # Drain and release the connection for this response, since + # we're not returning it to be released manually. + drain_and_release_conn(response) + raise + return response + + # drain and return the connection to the pool before recursing + drain_and_release_conn(response) + + retries.sleep(response) + log.debug("Retry: %s", url) + return self.urlopen( + method, url, body, headers, + retries=retries, redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, pool_timeout=pool_timeout, + release_conn=release_conn, + body_pos=body_pos, **response_kw) + + return response + + +class HTTPSConnectionPool(HTTPConnectionPool): + """ + Same as :class:`.HTTPConnectionPool`, but HTTPS. + + When Python is compiled with the :mod:`ssl` module, then + :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, + instead of :class:`.HTTPSConnection`. + + :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``, + ``assert_hostname`` and ``host`` in this order to verify connections. + If ``assert_hostname`` is False, no verification is done. + + The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, + ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is + available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade + the connection socket into an SSL socket. + """ + + scheme = 'https' + ConnectionCls = HTTPSConnection + + def __init__(self, host, port=None, + strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, + block=False, headers=None, retries=None, + _proxy=None, _proxy_headers=None, + key_file=None, cert_file=None, cert_reqs=None, + ca_certs=None, ssl_version=None, + assert_hostname=None, assert_fingerprint=None, + ca_cert_dir=None, **conn_kw): + + HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize, + block, headers, retries, _proxy, _proxy_headers, + **conn_kw) + + if ca_certs and cert_reqs is None: + cert_reqs = 'CERT_REQUIRED' + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.ca_certs = ca_certs + self.ca_cert_dir = ca_cert_dir + self.ssl_version = ssl_version + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + + def _prepare_conn(self, conn): + """ + Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket` + and establish the tunnel if proxy is used. + """ + + if isinstance(conn, VerifiedHTTPSConnection): + conn.set_cert(key_file=self.key_file, + cert_file=self.cert_file, + cert_reqs=self.cert_reqs, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + assert_hostname=self.assert_hostname, + assert_fingerprint=self.assert_fingerprint) + conn.ssl_version = self.ssl_version + return conn + + def _prepare_proxy(self, conn): + """ + Establish tunnel connection early, because otherwise httplib + would improperly set Host: header to proxy's IP:port. + """ + # Python 2.7+ + try: + set_tunnel = conn.set_tunnel + except AttributeError: # Platform-specific: Python 2.6 + set_tunnel = conn._set_tunnel + + if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older + set_tunnel(self._proxy_host, self.port) + else: + set_tunnel(self._proxy_host, self.port, self.proxy_headers) + + conn.connect() + + def _new_conn(self): + """ + Return a fresh :class:`httplib.HTTPSConnection`. + """ + self.num_connections += 1 + log.debug("Starting new HTTPS connection (%d): %s:%s", + self.num_connections, self.host, self.port or "443") + + if not self.ConnectionCls or self.ConnectionCls is DummyConnection: + raise SSLError("Can't connect to HTTPS URL because the SSL " + "module is not available.") + + actual_host = self.host + actual_port = self.port + if self.proxy is not None: + actual_host = self.proxy.host + actual_port = self.proxy.port + + conn = self.ConnectionCls(host=actual_host, port=actual_port, + timeout=self.timeout.connect_timeout, + strict=self.strict, **self.conn_kw) + + return self._prepare_conn(conn) + + def _validate_conn(self, conn): + """ + Called right before a request is made, after the socket is created. + """ + super(HTTPSConnectionPool, self)._validate_conn(conn) + + # Force connect early to allow us to validate the connection. + if not getattr(conn, 'sock', None): # AppEngine might not have `.sock` + conn.connect() + + if not conn.is_verified: + warnings.warn(( + 'Unverified HTTPS request is being made. ' + 'Adding certificate verification is strongly advised. See: ' + 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' + '#ssl-warnings'), + InsecureRequestWarning) + + +def connection_from_url(url, **kw): + """ + Given a url, return an :class:`.ConnectionPool` instance of its host. + + This is a shortcut for not having to parse out the scheme, host, and port + of the url before creating an :class:`.ConnectionPool` instance. + + :param url: + Absolute URL string that must include the scheme. Port is optional. + + :param \\**kw: + Passes additional parameters to the constructor of the appropriate + :class:`.ConnectionPool`. Useful for specifying things like + timeout, maxsize, headers, etc. + + Example:: + + >>> conn = connection_from_url('http://google.com/') + >>> r = conn.request('GET', '/') + """ + scheme, host, port = get_host(url) + port = port or port_by_scheme.get(scheme, 80) + if scheme == 'https': + return HTTPSConnectionPool(host, port=port, **kw) + else: + return HTTPConnectionPool(host, port=port, **kw) + + +def _ipv6_host(host, scheme): + """ + Process IPv6 address literals + """ + + # httplib doesn't like it when we include brackets in IPv6 addresses + # Specifically, if we include brackets but also pass the port then + # httplib crazily doubles up the square brackets on the Host header. + # Instead, we need to make sure we never pass ``None`` as the port. + # However, for backward compatibility reasons we can't actually + # *assert* that. See http://bugs.python.org/issue28539 + # + # Also if an IPv6 address literal has a zone identifier, the + # percent sign might be URIencoded, convert it back into ASCII + if host.startswith('[') and host.endswith(']'): + host = host.replace('%25', '%').strip('[]') + if scheme in NORMALIZABLE_SCHEMES: + host = host.lower() + return host diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py new file mode 100644 index 0000000..bcf41c0 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py @@ -0,0 +1,593 @@ +""" +This module uses ctypes to bind a whole bunch of functions and constants from +SecureTransport. The goal here is to provide the low-level API to +SecureTransport. These are essentially the C-level functions and constants, and +they're pretty gross to work with. + +This code is a bastardised version of the code found in Will Bond's oscrypto +library. An enormous debt is owed to him for blazing this trail for us. For +that reason, this code should be considered to be covered both by urllib3's +license and by oscrypto's: + + Copyright (c) 2015-2016 Will Bond <will@wbond.net> + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. +""" +from __future__ import absolute_import + +import platform +from ctypes.util import find_library +from ctypes import ( + c_void_p, c_int32, c_char_p, c_size_t, c_byte, c_uint32, c_ulong, c_long, + c_bool +) +from ctypes import CDLL, POINTER, CFUNCTYPE + + +security_path = find_library('Security') +if not security_path: + raise ImportError('The library Security could not be found') + + +core_foundation_path = find_library('CoreFoundation') +if not core_foundation_path: + raise ImportError('The library CoreFoundation could not be found') + + +version = platform.mac_ver()[0] +version_info = tuple(map(int, version.split('.'))) +if version_info < (10, 8): + raise OSError( + 'Only OS X 10.8 and newer are supported, not %s.%s' % ( + version_info[0], version_info[1] + ) + ) + +Security = CDLL(security_path, use_errno=True) +CoreFoundation = CDLL(core_foundation_path, use_errno=True) + +Boolean = c_bool +CFIndex = c_long +CFStringEncoding = c_uint32 +CFData = c_void_p +CFString = c_void_p +CFArray = c_void_p +CFMutableArray = c_void_p +CFDictionary = c_void_p +CFError = c_void_p +CFType = c_void_p +CFTypeID = c_ulong + +CFTypeRef = POINTER(CFType) +CFAllocatorRef = c_void_p + +OSStatus = c_int32 + +CFDataRef = POINTER(CFData) +CFStringRef = POINTER(CFString) +CFArrayRef = POINTER(CFArray) +CFMutableArrayRef = POINTER(CFMutableArray) +CFDictionaryRef = POINTER(CFDictionary) +CFArrayCallBacks = c_void_p +CFDictionaryKeyCallBacks = c_void_p +CFDictionaryValueCallBacks = c_void_p + +SecCertificateRef = POINTER(c_void_p) +SecExternalFormat = c_uint32 +SecExternalItemType = c_uint32 +SecIdentityRef = POINTER(c_void_p) +SecItemImportExportFlags = c_uint32 +SecItemImportExportKeyParameters = c_void_p +SecKeychainRef = POINTER(c_void_p) +SSLProtocol = c_uint32 +SSLCipherSuite = c_uint32 +SSLContextRef = POINTER(c_void_p) +SecTrustRef = POINTER(c_void_p) +SSLConnectionRef = c_uint32 +SecTrustResultType = c_uint32 +SecTrustOptionFlags = c_uint32 +SSLProtocolSide = c_uint32 +SSLConnectionType = c_uint32 +SSLSessionOption = c_uint32 + + +try: + Security.SecItemImport.argtypes = [ + CFDataRef, + CFStringRef, + POINTER(SecExternalFormat), + POINTER(SecExternalItemType), + SecItemImportExportFlags, + POINTER(SecItemImportExportKeyParameters), + SecKeychainRef, + POINTER(CFArrayRef), + ] + Security.SecItemImport.restype = OSStatus + + Security.SecCertificateGetTypeID.argtypes = [] + Security.SecCertificateGetTypeID.restype = CFTypeID + + Security.SecIdentityGetTypeID.argtypes = [] + Security.SecIdentityGetTypeID.restype = CFTypeID + + Security.SecKeyGetTypeID.argtypes = [] + Security.SecKeyGetTypeID.restype = CFTypeID + + Security.SecCertificateCreateWithData.argtypes = [ + CFAllocatorRef, + CFDataRef + ] + Security.SecCertificateCreateWithData.restype = SecCertificateRef + + Security.SecCertificateCopyData.argtypes = [ + SecCertificateRef + ] + Security.SecCertificateCopyData.restype = CFDataRef + + Security.SecCopyErrorMessageString.argtypes = [ + OSStatus, + c_void_p + ] + Security.SecCopyErrorMessageString.restype = CFStringRef + + Security.SecIdentityCreateWithCertificate.argtypes = [ + CFTypeRef, + SecCertificateRef, + POINTER(SecIdentityRef) + ] + Security.SecIdentityCreateWithCertificate.restype = OSStatus + + Security.SecKeychainCreate.argtypes = [ + c_char_p, + c_uint32, + c_void_p, + Boolean, + c_void_p, + POINTER(SecKeychainRef) + ] + Security.SecKeychainCreate.restype = OSStatus + + Security.SecKeychainDelete.argtypes = [ + SecKeychainRef + ] + Security.SecKeychainDelete.restype = OSStatus + + Security.SecPKCS12Import.argtypes = [ + CFDataRef, + CFDictionaryRef, + POINTER(CFArrayRef) + ] + Security.SecPKCS12Import.restype = OSStatus + + SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t)) + SSLWriteFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)) + + Security.SSLSetIOFuncs.argtypes = [ + SSLContextRef, + SSLReadFunc, + SSLWriteFunc + ] + Security.SSLSetIOFuncs.restype = OSStatus + + Security.SSLSetPeerID.argtypes = [ + SSLContextRef, + c_char_p, + c_size_t + ] + Security.SSLSetPeerID.restype = OSStatus + + Security.SSLSetCertificate.argtypes = [ + SSLContextRef, + CFArrayRef + ] + Security.SSLSetCertificate.restype = OSStatus + + Security.SSLSetCertificateAuthorities.argtypes = [ + SSLContextRef, + CFTypeRef, + Boolean + ] + Security.SSLSetCertificateAuthorities.restype = OSStatus + + Security.SSLSetConnection.argtypes = [ + SSLContextRef, + SSLConnectionRef + ] + Security.SSLSetConnection.restype = OSStatus + + Security.SSLSetPeerDomainName.argtypes = [ + SSLContextRef, + c_char_p, + c_size_t + ] + Security.SSLSetPeerDomainName.restype = OSStatus + + Security.SSLHandshake.argtypes = [ + SSLContextRef + ] + Security.SSLHandshake.restype = OSStatus + + Security.SSLRead.argtypes = [ + SSLContextRef, + c_char_p, + c_size_t, + POINTER(c_size_t) + ] + Security.SSLRead.restype = OSStatus + + Security.SSLWrite.argtypes = [ + SSLContextRef, + c_char_p, + c_size_t, + POINTER(c_size_t) + ] + Security.SSLWrite.restype = OSStatus + + Security.SSLClose.argtypes = [ + SSLContextRef + ] + Security.SSLClose.restype = OSStatus + + Security.SSLGetNumberSupportedCiphers.argtypes = [ + SSLContextRef, + POINTER(c_size_t) + ] + Security.SSLGetNumberSupportedCiphers.restype = OSStatus + + Security.SSLGetSupportedCiphers.argtypes = [ + SSLContextRef, + POINTER(SSLCipherSuite), + POINTER(c_size_t) + ] + Security.SSLGetSupportedCiphers.restype = OSStatus + + Security.SSLSetEnabledCiphers.argtypes = [ + SSLContextRef, + POINTER(SSLCipherSuite), + c_size_t + ] + Security.SSLSetEnabledCiphers.restype = OSStatus + + Security.SSLGetNumberEnabledCiphers.argtype = [ + SSLContextRef, + POINTER(c_size_t) + ] + Security.SSLGetNumberEnabledCiphers.restype = OSStatus + + Security.SSLGetEnabledCiphers.argtypes = [ + SSLContextRef, + POINTER(SSLCipherSuite), + POINTER(c_size_t) + ] + Security.SSLGetEnabledCiphers.restype = OSStatus + + Security.SSLGetNegotiatedCipher.argtypes = [ + SSLContextRef, + POINTER(SSLCipherSuite) + ] + Security.SSLGetNegotiatedCipher.restype = OSStatus + + Security.SSLGetNegotiatedProtocolVersion.argtypes = [ + SSLContextRef, + POINTER(SSLProtocol) + ] + Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus + + Security.SSLCopyPeerTrust.argtypes = [ + SSLContextRef, + POINTER(SecTrustRef) + ] + Security.SSLCopyPeerTrust.restype = OSStatus + + Security.SecTrustSetAnchorCertificates.argtypes = [ + SecTrustRef, + CFArrayRef + ] + Security.SecTrustSetAnchorCertificates.restype = OSStatus + + Security.SecTrustSetAnchorCertificatesOnly.argstypes = [ + SecTrustRef, + Boolean + ] + Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus + + Security.SecTrustEvaluate.argtypes = [ + SecTrustRef, + POINTER(SecTrustResultType) + ] + Security.SecTrustEvaluate.restype = OSStatus + + Security.SecTrustGetCertificateCount.argtypes = [ + SecTrustRef + ] + Security.SecTrustGetCertificateCount.restype = CFIndex + + Security.SecTrustGetCertificateAtIndex.argtypes = [ + SecTrustRef, + CFIndex + ] + Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef + + Security.SSLCreateContext.argtypes = [ + CFAllocatorRef, + SSLProtocolSide, + SSLConnectionType + ] + Security.SSLCreateContext.restype = SSLContextRef + + Security.SSLSetSessionOption.argtypes = [ + SSLContextRef, + SSLSessionOption, + Boolean + ] + Security.SSLSetSessionOption.restype = OSStatus + + Security.SSLSetProtocolVersionMin.argtypes = [ + SSLContextRef, + SSLProtocol + ] + Security.SSLSetProtocolVersionMin.restype = OSStatus + + Security.SSLSetProtocolVersionMax.argtypes = [ + SSLContextRef, + SSLProtocol + ] + Security.SSLSetProtocolVersionMax.restype = OSStatus + + Security.SecCopyErrorMessageString.argtypes = [ + OSStatus, + c_void_p + ] + Security.SecCopyErrorMessageString.restype = CFStringRef + + Security.SSLReadFunc = SSLReadFunc + Security.SSLWriteFunc = SSLWriteFunc + Security.SSLContextRef = SSLContextRef + Security.SSLProtocol = SSLProtocol + Security.SSLCipherSuite = SSLCipherSuite + Security.SecIdentityRef = SecIdentityRef + Security.SecKeychainRef = SecKeychainRef + Security.SecTrustRef = SecTrustRef + Security.SecTrustResultType = SecTrustResultType + Security.SecExternalFormat = SecExternalFormat + Security.OSStatus = OSStatus + + Security.kSecImportExportPassphrase = CFStringRef.in_dll( + Security, 'kSecImportExportPassphrase' + ) + Security.kSecImportItemIdentity = CFStringRef.in_dll( + Security, 'kSecImportItemIdentity' + ) + + # CoreFoundation time! + CoreFoundation.CFRetain.argtypes = [ + CFTypeRef + ] + CoreFoundation.CFRetain.restype = CFTypeRef + + CoreFoundation.CFRelease.argtypes = [ + CFTypeRef + ] + CoreFoundation.CFRelease.restype = None + + CoreFoundation.CFGetTypeID.argtypes = [ + CFTypeRef + ] + CoreFoundation.CFGetTypeID.restype = CFTypeID + + CoreFoundation.CFStringCreateWithCString.argtypes = [ + CFAllocatorRef, + c_char_p, + CFStringEncoding + ] + CoreFoundation.CFStringCreateWithCString.restype = CFStringRef + + CoreFoundation.CFStringGetCStringPtr.argtypes = [ + CFStringRef, + CFStringEncoding + ] + CoreFoundation.CFStringGetCStringPtr.restype = c_char_p + + CoreFoundation.CFStringGetCString.argtypes = [ + CFStringRef, + c_char_p, + CFIndex, + CFStringEncoding + ] + CoreFoundation.CFStringGetCString.restype = c_bool + + CoreFoundation.CFDataCreate.argtypes = [ + CFAllocatorRef, + c_char_p, + CFIndex + ] + CoreFoundation.CFDataCreate.restype = CFDataRef + + CoreFoundation.CFDataGetLength.argtypes = [ + CFDataRef + ] + CoreFoundation.CFDataGetLength.restype = CFIndex + + CoreFoundation.CFDataGetBytePtr.argtypes = [ + CFDataRef + ] + CoreFoundation.CFDataGetBytePtr.restype = c_void_p + + CoreFoundation.CFDictionaryCreate.argtypes = [ + CFAllocatorRef, + POINTER(CFTypeRef), + POINTER(CFTypeRef), + CFIndex, + CFDictionaryKeyCallBacks, + CFDictionaryValueCallBacks + ] + CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef + + CoreFoundation.CFDictionaryGetValue.argtypes = [ + CFDictionaryRef, + CFTypeRef + ] + CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef + + CoreFoundation.CFArrayCreate.argtypes = [ + CFAllocatorRef, + POINTER(CFTypeRef), + CFIndex, + CFArrayCallBacks, + ] + CoreFoundation.CFArrayCreate.restype = CFArrayRef + + CoreFoundation.CFArrayCreateMutable.argtypes = [ + CFAllocatorRef, + CFIndex, + CFArrayCallBacks + ] + CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef + + CoreFoundation.CFArrayAppendValue.argtypes = [ + CFMutableArrayRef, + c_void_p + ] + CoreFoundation.CFArrayAppendValue.restype = None + + CoreFoundation.CFArrayGetCount.argtypes = [ + CFArrayRef + ] + CoreFoundation.CFArrayGetCount.restype = CFIndex + + CoreFoundation.CFArrayGetValueAtIndex.argtypes = [ + CFArrayRef, + CFIndex + ] + CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p + + CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( + CoreFoundation, 'kCFAllocatorDefault' + ) + CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(CoreFoundation, 'kCFTypeArrayCallBacks') + CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll( + CoreFoundation, 'kCFTypeDictionaryKeyCallBacks' + ) + CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll( + CoreFoundation, 'kCFTypeDictionaryValueCallBacks' + ) + + CoreFoundation.CFTypeRef = CFTypeRef + CoreFoundation.CFArrayRef = CFArrayRef + CoreFoundation.CFStringRef = CFStringRef + CoreFoundation.CFDictionaryRef = CFDictionaryRef + +except (AttributeError): + raise ImportError('Error initializing ctypes') + + +class CFConst(object): + """ + A class object that acts as essentially a namespace for CoreFoundation + constants. + """ + kCFStringEncodingUTF8 = CFStringEncoding(0x08000100) + + +class SecurityConst(object): + """ + A class object that acts as essentially a namespace for Security constants. + """ + kSSLSessionOptionBreakOnServerAuth = 0 + + kSSLProtocol2 = 1 + kSSLProtocol3 = 2 + kTLSProtocol1 = 4 + kTLSProtocol11 = 7 + kTLSProtocol12 = 8 + + kSSLClientSide = 1 + kSSLStreamType = 0 + + kSecFormatPEMSequence = 10 + + kSecTrustResultInvalid = 0 + kSecTrustResultProceed = 1 + # This gap is present on purpose: this was kSecTrustResultConfirm, which + # is deprecated. + kSecTrustResultDeny = 3 + kSecTrustResultUnspecified = 4 + kSecTrustResultRecoverableTrustFailure = 5 + kSecTrustResultFatalTrustFailure = 6 + kSecTrustResultOtherError = 7 + + errSSLProtocol = -9800 + errSSLWouldBlock = -9803 + errSSLClosedGraceful = -9805 + errSSLClosedNoNotify = -9816 + errSSLClosedAbort = -9806 + + errSSLXCertChainInvalid = -9807 + errSSLCrypto = -9809 + errSSLInternal = -9810 + errSSLCertExpired = -9814 + errSSLCertNotYetValid = -9815 + errSSLUnknownRootCert = -9812 + errSSLNoRootCert = -9813 + errSSLHostNameMismatch = -9843 + errSSLPeerHandshakeFail = -9824 + errSSLPeerUserCancelled = -9839 + errSSLWeakPeerEphemeralDHKey = -9850 + errSSLServerAuthCompleted = -9841 + errSSLRecordOverflow = -9847 + + errSecVerifyFailed = -67808 + errSecNoTrustSettings = -25263 + errSecItemNotFound = -25300 + errSecInvalidTrustSettings = -25262 + + # Cipher suites. We only pick the ones our default cipher string allows. + TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C + TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030 + TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B + TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F + TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 = 0x00A3 + TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F + TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 = 0x00A2 + TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E + TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024 + TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028 + TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A + TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014 + TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B + TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 = 0x006A + TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039 + TLS_DHE_DSS_WITH_AES_256_CBC_SHA = 0x0038 + TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023 + TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027 + TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009 + TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013 + TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067 + TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 = 0x0040 + TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033 + TLS_DHE_DSS_WITH_AES_128_CBC_SHA = 0x0032 + TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D + TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C + TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D + TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C + TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035 + TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F + TLS_AES_128_GCM_SHA256 = 0x1301 + TLS_AES_256_GCM_SHA384 = 0x1302 + TLS_CHACHA20_POLY1305_SHA256 = 0x1303 diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py new file mode 100644 index 0000000..b13cd9e --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py @@ -0,0 +1,346 @@ +""" +Low-level helpers for the SecureTransport bindings. + +These are Python functions that are not directly related to the high-level APIs +but are necessary to get them to work. They include a whole bunch of low-level +CoreFoundation messing about and memory management. The concerns in this module +are almost entirely about trying to avoid memory leaks and providing +appropriate and useful assistance to the higher-level code. +""" +import base64 +import ctypes +import itertools +import re +import os +import ssl +import tempfile + +from .bindings import Security, CoreFoundation, CFConst + + +# This regular expression is used to grab PEM data out of a PEM bundle. +_PEM_CERTS_RE = re.compile( + b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL +) + + +def _cf_data_from_bytes(bytestring): + """ + Given a bytestring, create a CFData object from it. This CFData object must + be CFReleased by the caller. + """ + return CoreFoundation.CFDataCreate( + CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring) + ) + + +def _cf_dictionary_from_tuples(tuples): + """ + Given a list of Python tuples, create an associated CFDictionary. + """ + dictionary_size = len(tuples) + + # We need to get the dictionary keys and values out in the same order. + keys = (t[0] for t in tuples) + values = (t[1] for t in tuples) + cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys) + cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values) + + return CoreFoundation.CFDictionaryCreate( + CoreFoundation.kCFAllocatorDefault, + cf_keys, + cf_values, + dictionary_size, + CoreFoundation.kCFTypeDictionaryKeyCallBacks, + CoreFoundation.kCFTypeDictionaryValueCallBacks, + ) + + +def _cf_string_to_unicode(value): + """ + Creates a Unicode string from a CFString object. Used entirely for error + reporting. + + Yes, it annoys me quite a lot that this function is this complex. + """ + value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p)) + + string = CoreFoundation.CFStringGetCStringPtr( + value_as_void_p, + CFConst.kCFStringEncodingUTF8 + ) + if string is None: + buffer = ctypes.create_string_buffer(1024) + result = CoreFoundation.CFStringGetCString( + value_as_void_p, + buffer, + 1024, + CFConst.kCFStringEncodingUTF8 + ) + if not result: + raise OSError('Error copying C string from CFStringRef') + string = buffer.value + if string is not None: + string = string.decode('utf-8') + return string + + +def _assert_no_error(error, exception_class=None): + """ + Checks the return code and throws an exception if there is an error to + report + """ + if error == 0: + return + + cf_error_string = Security.SecCopyErrorMessageString(error, None) + output = _cf_string_to_unicode(cf_error_string) + CoreFoundation.CFRelease(cf_error_string) + + if output is None or output == u'': + output = u'OSStatus %s' % error + + if exception_class is None: + exception_class = ssl.SSLError + + raise exception_class(output) + + +def _cert_array_from_pem(pem_bundle): + """ + Given a bundle of certs in PEM format, turns them into a CFArray of certs + that can be used to validate a cert chain. + """ + # Normalize the PEM bundle's line endings. + pem_bundle = pem_bundle.replace(b"\r\n", b"\n") + + der_certs = [ + base64.b64decode(match.group(1)) + for match in _PEM_CERTS_RE.finditer(pem_bundle) + ] + if not der_certs: + raise ssl.SSLError("No root certificates specified") + + cert_array = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks) + ) + if not cert_array: + raise ssl.SSLError("Unable to allocate memory!") + + try: + for der_bytes in der_certs: + certdata = _cf_data_from_bytes(der_bytes) + if not certdata: + raise ssl.SSLError("Unable to allocate memory!") + cert = Security.SecCertificateCreateWithData( + CoreFoundation.kCFAllocatorDefault, certdata + ) + CoreFoundation.CFRelease(certdata) + if not cert: + raise ssl.SSLError("Unable to build cert object!") + + CoreFoundation.CFArrayAppendValue(cert_array, cert) + CoreFoundation.CFRelease(cert) + except Exception: + # We need to free the array before the exception bubbles further. + # We only want to do that if an error occurs: otherwise, the caller + # should free. + CoreFoundation.CFRelease(cert_array) + + return cert_array + + +def _is_cert(item): + """ + Returns True if a given CFTypeRef is a certificate. + """ + expected = Security.SecCertificateGetTypeID() + return CoreFoundation.CFGetTypeID(item) == expected + + +def _is_identity(item): + """ + Returns True if a given CFTypeRef is an identity. + """ + expected = Security.SecIdentityGetTypeID() + return CoreFoundation.CFGetTypeID(item) == expected + + +def _temporary_keychain(): + """ + This function creates a temporary Mac keychain that we can use to work with + credentials. This keychain uses a one-time password and a temporary file to + store the data. We expect to have one keychain per socket. The returned + SecKeychainRef must be freed by the caller, including calling + SecKeychainDelete. + + Returns a tuple of the SecKeychainRef and the path to the temporary + directory that contains it. + """ + # Unfortunately, SecKeychainCreate requires a path to a keychain. This + # means we cannot use mkstemp to use a generic temporary file. Instead, + # we're going to create a temporary directory and a filename to use there. + # This filename will be 8 random bytes expanded into base64. We also need + # some random bytes to password-protect the keychain we're creating, so we + # ask for 40 random bytes. + random_bytes = os.urandom(40) + filename = base64.b16encode(random_bytes[:8]).decode('utf-8') + password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8 + tempdirectory = tempfile.mkdtemp() + + keychain_path = os.path.join(tempdirectory, filename).encode('utf-8') + + # We now want to create the keychain itself. + keychain = Security.SecKeychainRef() + status = Security.SecKeychainCreate( + keychain_path, + len(password), + password, + False, + None, + ctypes.byref(keychain) + ) + _assert_no_error(status) + + # Having created the keychain, we want to pass it off to the caller. + return keychain, tempdirectory + + +def _load_items_from_file(keychain, path): + """ + Given a single file, loads all the trust objects from it into arrays and + the keychain. + Returns a tuple of lists: the first list is a list of identities, the + second a list of certs. + """ + certificates = [] + identities = [] + result_array = None + + with open(path, 'rb') as f: + raw_filedata = f.read() + + try: + filedata = CoreFoundation.CFDataCreate( + CoreFoundation.kCFAllocatorDefault, + raw_filedata, + len(raw_filedata) + ) + result_array = CoreFoundation.CFArrayRef() + result = Security.SecItemImport( + filedata, # cert data + None, # Filename, leaving it out for now + None, # What the type of the file is, we don't care + None, # what's in the file, we don't care + 0, # import flags + None, # key params, can include passphrase in the future + keychain, # The keychain to insert into + ctypes.byref(result_array) # Results + ) + _assert_no_error(result) + + # A CFArray is not very useful to us as an intermediary + # representation, so we are going to extract the objects we want + # and then free the array. We don't need to keep hold of keys: the + # keychain already has them! + result_count = CoreFoundation.CFArrayGetCount(result_array) + for index in range(result_count): + item = CoreFoundation.CFArrayGetValueAtIndex( + result_array, index + ) + item = ctypes.cast(item, CoreFoundation.CFTypeRef) + + if _is_cert(item): + CoreFoundation.CFRetain(item) + certificates.append(item) + elif _is_identity(item): + CoreFoundation.CFRetain(item) + identities.append(item) + finally: + if result_array: + CoreFoundation.CFRelease(result_array) + + CoreFoundation.CFRelease(filedata) + + return (identities, certificates) + + +def _load_client_cert_chain(keychain, *paths): + """ + Load certificates and maybe keys from a number of files. Has the end goal + of returning a CFArray containing one SecIdentityRef, and then zero or more + SecCertificateRef objects, suitable for use as a client certificate trust + chain. + """ + # Ok, the strategy. + # + # This relies on knowing that macOS will not give you a SecIdentityRef + # unless you have imported a key into a keychain. This is a somewhat + # artificial limitation of macOS (for example, it doesn't necessarily + # affect iOS), but there is nothing inside Security.framework that lets you + # get a SecIdentityRef without having a key in a keychain. + # + # So the policy here is we take all the files and iterate them in order. + # Each one will use SecItemImport to have one or more objects loaded from + # it. We will also point at a keychain that macOS can use to work with the + # private key. + # + # Once we have all the objects, we'll check what we actually have. If we + # already have a SecIdentityRef in hand, fab: we'll use that. Otherwise, + # we'll take the first certificate (which we assume to be our leaf) and + # ask the keychain to give us a SecIdentityRef with that cert's associated + # key. + # + # We'll then return a CFArray containing the trust chain: one + # SecIdentityRef and then zero-or-more SecCertificateRef objects. The + # responsibility for freeing this CFArray will be with the caller. This + # CFArray must remain alive for the entire connection, so in practice it + # will be stored with a single SSLSocket, along with the reference to the + # keychain. + certificates = [] + identities = [] + + # Filter out bad paths. + paths = (path for path in paths if path) + + try: + for file_path in paths: + new_identities, new_certs = _load_items_from_file( + keychain, file_path + ) + identities.extend(new_identities) + certificates.extend(new_certs) + + # Ok, we have everything. The question is: do we have an identity? If + # not, we want to grab one from the first cert we have. + if not identities: + new_identity = Security.SecIdentityRef() + status = Security.SecIdentityCreateWithCertificate( + keychain, + certificates[0], + ctypes.byref(new_identity) + ) + _assert_no_error(status) + identities.append(new_identity) + + # We now want to release the original certificate, as we no longer + # need it. + CoreFoundation.CFRelease(certificates.pop(0)) + + # We now need to build a new CFArray that holds the trust chain. + trust_chain = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), + ) + for item in itertools.chain(identities, certificates): + # ArrayAppendValue does a CFRetain on the item. That's fine, + # because the finally block will release our other refs to them. + CoreFoundation.CFArrayAppendValue(trust_chain, item) + + return trust_chain + finally: + for obj in itertools.chain(identities, certificates): + CoreFoundation.CFRelease(obj) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/appengine.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/appengine.py new file mode 100644 index 0000000..59f2a61 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/appengine.py @@ -0,0 +1,305 @@ +""" +This module provides a pool manager that uses Google App Engine's +`URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_. + +Example usage:: + + from pip._vendor.urllib3 import PoolManager + from pip._vendor.urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox + + if is_appengine_sandbox(): + # AppEngineManager uses AppEngine's URLFetch API behind the scenes + http = AppEngineManager() + else: + # PoolManager uses a socket-level API behind the scenes + http = PoolManager() + + r = http.request('GET', 'https://google.com/') + +There are `limitations <https://cloud.google.com/appengine/docs/python/\ +urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be +the best choice for your application. There are three options for using +urllib3 on Google App Engine: + +1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is + cost-effective in many circumstances as long as your usage is within the + limitations. +2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets. + Sockets also have `limitations and restrictions + <https://cloud.google.com/appengine/docs/python/sockets/\ + #limitations-and-restrictions>`_ and have a lower free quota than URLFetch. + To use sockets, be sure to specify the following in your ``app.yaml``:: + + env_variables: + GAE_USE_SOCKETS_HTTPLIB : 'true' + +3. If you are using `App Engine Flexible +<https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard +:class:`PoolManager` without any configuration or special environment variables. +""" + +from __future__ import absolute_import +import logging +import os +import warnings +from ..packages.six.moves.urllib.parse import urljoin + +from ..exceptions import ( + HTTPError, + HTTPWarning, + MaxRetryError, + ProtocolError, + TimeoutError, + SSLError +) + +from ..packages.six import BytesIO +from ..request import RequestMethods +from ..response import HTTPResponse +from ..util.timeout import Timeout +from ..util.retry import Retry + +try: + from google.appengine.api import urlfetch +except ImportError: + urlfetch = None + + +log = logging.getLogger(__name__) + + +class AppEnginePlatformWarning(HTTPWarning): + pass + + +class AppEnginePlatformError(HTTPError): + pass + + +class AppEngineManager(RequestMethods): + """ + Connection manager for Google App Engine sandbox applications. + + This manager uses the URLFetch service directly instead of using the + emulated httplib, and is subject to URLFetch limitations as described in + the App Engine documentation `here + <https://cloud.google.com/appengine/docs/python/urlfetch>`_. + + Notably it will raise an :class:`AppEnginePlatformError` if: + * URLFetch is not available. + * If you attempt to use this on App Engine Flexible, as full socket + support is available. + * If a request size is more than 10 megabytes. + * If a response size is more than 32 megabtyes. + * If you use an unsupported request method such as OPTIONS. + + Beyond those cases, it will raise normal urllib3 errors. + """ + + def __init__(self, headers=None, retries=None, validate_certificate=True, + urlfetch_retries=True): + if not urlfetch: + raise AppEnginePlatformError( + "URLFetch is not available in this environment.") + + if is_prod_appengine_mvms(): + raise AppEnginePlatformError( + "Use normal urllib3.PoolManager instead of AppEngineManager" + "on Managed VMs, as using URLFetch is not necessary in " + "this environment.") + + warnings.warn( + "urllib3 is using URLFetch on Google App Engine sandbox instead " + "of sockets. To use sockets directly instead of URLFetch see " + "https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.", + AppEnginePlatformWarning) + + RequestMethods.__init__(self, headers) + self.validate_certificate = validate_certificate + self.urlfetch_retries = urlfetch_retries + + self.retries = retries or Retry.DEFAULT + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + # Return False to re-raise any potential exceptions + return False + + def urlopen(self, method, url, body=None, headers=None, + retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT, + **response_kw): + + retries = self._get_retries(retries, redirect) + + try: + follow_redirects = ( + redirect and + retries.redirect != 0 and + retries.total) + response = urlfetch.fetch( + url, + payload=body, + method=method, + headers=headers or {}, + allow_truncated=False, + follow_redirects=self.urlfetch_retries and follow_redirects, + deadline=self._get_absolute_timeout(timeout), + validate_certificate=self.validate_certificate, + ) + except urlfetch.DeadlineExceededError as e: + raise TimeoutError(self, e) + + except urlfetch.InvalidURLError as e: + if 'too large' in str(e): + raise AppEnginePlatformError( + "URLFetch request too large, URLFetch only " + "supports requests up to 10mb in size.", e) + raise ProtocolError(e) + + except urlfetch.DownloadError as e: + if 'Too many redirects' in str(e): + raise MaxRetryError(self, url, reason=e) + raise ProtocolError(e) + + except urlfetch.ResponseTooLargeError as e: + raise AppEnginePlatformError( + "URLFetch response too large, URLFetch only supports" + "responses up to 32mb in size.", e) + + except urlfetch.SSLCertificateError as e: + raise SSLError(e) + + except urlfetch.InvalidMethodError as e: + raise AppEnginePlatformError( + "URLFetch does not support method: %s" % method, e) + + http_response = self._urlfetch_response_to_http_response( + response, retries=retries, **response_kw) + + # Handle redirect? + redirect_location = redirect and http_response.get_redirect_location() + if redirect_location: + # Check for redirect response + if (self.urlfetch_retries and retries.raise_on_redirect): + raise MaxRetryError(self, url, "too many redirects") + else: + if http_response.status == 303: + method = 'GET' + + try: + retries = retries.increment(method, url, response=http_response, _pool=self) + except MaxRetryError: + if retries.raise_on_redirect: + raise MaxRetryError(self, url, "too many redirects") + return http_response + + retries.sleep_for_retry(http_response) + log.debug("Redirecting %s -> %s", url, redirect_location) + redirect_url = urljoin(url, redirect_location) + return self.urlopen( + method, redirect_url, body, headers, + retries=retries, redirect=redirect, + timeout=timeout, **response_kw) + + # Check if we should retry the HTTP response. + has_retry_after = bool(http_response.getheader('Retry-After')) + if retries.is_retry(method, http_response.status, has_retry_after): + retries = retries.increment( + method, url, response=http_response, _pool=self) + log.debug("Retry: %s", url) + retries.sleep(http_response) + return self.urlopen( + method, url, + body=body, headers=headers, + retries=retries, redirect=redirect, + timeout=timeout, **response_kw) + + return http_response + + def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw): + + if is_prod_appengine(): + # Production GAE handles deflate encoding automatically, but does + # not remove the encoding header. + content_encoding = urlfetch_resp.headers.get('content-encoding') + + if content_encoding == 'deflate': + del urlfetch_resp.headers['content-encoding'] + + transfer_encoding = urlfetch_resp.headers.get('transfer-encoding') + # We have a full response's content, + # so let's make sure we don't report ourselves as chunked data. + if transfer_encoding == 'chunked': + encodings = transfer_encoding.split(",") + encodings.remove('chunked') + urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings) + + original_response = HTTPResponse( + # In order for decoding to work, we must present the content as + # a file-like object. + body=BytesIO(urlfetch_resp.content), + msg=urlfetch_resp.header_msg, + headers=urlfetch_resp.headers, + status=urlfetch_resp.status_code, + **response_kw + ) + + return HTTPResponse( + body=BytesIO(urlfetch_resp.content), + headers=urlfetch_resp.headers, + status=urlfetch_resp.status_code, + original_response=original_response, + **response_kw + ) + + def _get_absolute_timeout(self, timeout): + if timeout is Timeout.DEFAULT_TIMEOUT: + return None # Defer to URLFetch's default. + if isinstance(timeout, Timeout): + if timeout._read is not None or timeout._connect is not None: + warnings.warn( + "URLFetch does not support granular timeout settings, " + "reverting to total or default URLFetch timeout.", + AppEnginePlatformWarning) + return timeout.total + return timeout + + def _get_retries(self, retries, redirect): + if not isinstance(retries, Retry): + retries = Retry.from_int( + retries, redirect=redirect, default=self.retries) + + if retries.connect or retries.read or retries.redirect: + warnings.warn( + "URLFetch only supports total retries and does not " + "recognize connect, read, or redirect retry parameters.", + AppEnginePlatformWarning) + + return retries + + +def is_appengine(): + return (is_local_appengine() or + is_prod_appengine() or + is_prod_appengine_mvms()) + + +def is_appengine_sandbox(): + return is_appengine() and not is_prod_appengine_mvms() + + +def is_local_appengine(): + return ('APPENGINE_RUNTIME' in os.environ and + 'Development/' in os.environ['SERVER_SOFTWARE']) + + +def is_prod_appengine(): + return ('APPENGINE_RUNTIME' in os.environ and + 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and + not is_prod_appengine_mvms()) + + +def is_prod_appengine_mvms(): + return os.environ.get('GAE_VM', False) == 'true' diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py new file mode 100644 index 0000000..642e99e --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py @@ -0,0 +1,112 @@ +""" +NTLM authenticating pool, contributed by erikcederstran + +Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10 +""" +from __future__ import absolute_import + +from logging import getLogger +from ntlm import ntlm + +from .. import HTTPSConnectionPool +from ..packages.six.moves.http_client import HTTPSConnection + + +log = getLogger(__name__) + + +class NTLMConnectionPool(HTTPSConnectionPool): + """ + Implements an NTLM authentication version of an urllib3 connection pool + """ + + scheme = 'https' + + def __init__(self, user, pw, authurl, *args, **kwargs): + """ + authurl is a random URL on the server that is protected by NTLM. + user is the Windows user, probably in the DOMAIN\\username format. + pw is the password for the user. + """ + super(NTLMConnectionPool, self).__init__(*args, **kwargs) + self.authurl = authurl + self.rawuser = user + user_parts = user.split('\\', 1) + self.domain = user_parts[0].upper() + self.user = user_parts[1] + self.pw = pw + + def _new_conn(self): + # Performs the NTLM handshake that secures the connection. The socket + # must be kept open while requests are performed. + self.num_connections += 1 + log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s', + self.num_connections, self.host, self.authurl) + + headers = {} + headers['Connection'] = 'Keep-Alive' + req_header = 'Authorization' + resp_header = 'www-authenticate' + + conn = HTTPSConnection(host=self.host, port=self.port) + + # Send negotiation message + headers[req_header] = ( + 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser)) + log.debug('Request headers: %s', headers) + conn.request('GET', self.authurl, None, headers) + res = conn.getresponse() + reshdr = dict(res.getheaders()) + log.debug('Response status: %s %s', res.status, res.reason) + log.debug('Response headers: %s', reshdr) + log.debug('Response data: %s [...]', res.read(100)) + + # Remove the reference to the socket, so that it can not be closed by + # the response object (we want to keep the socket open) + res.fp = None + + # Server should respond with a challenge message + auth_header_values = reshdr[resp_header].split(', ') + auth_header_value = None + for s in auth_header_values: + if s[:5] == 'NTLM ': + auth_header_value = s[5:] + if auth_header_value is None: + raise Exception('Unexpected %s response header: %s' % + (resp_header, reshdr[resp_header])) + + # Send authentication message + ServerChallenge, NegotiateFlags = \ + ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value) + auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge, + self.user, + self.domain, + self.pw, + NegotiateFlags) + headers[req_header] = 'NTLM %s' % auth_msg + log.debug('Request headers: %s', headers) + conn.request('GET', self.authurl, None, headers) + res = conn.getresponse() + log.debug('Response status: %s %s', res.status, res.reason) + log.debug('Response headers: %s', dict(res.getheaders())) + log.debug('Response data: %s [...]', res.read()[:100]) + if res.status != 200: + if res.status == 401: + raise Exception('Server rejected request: wrong ' + 'username or password') + raise Exception('Wrong server response: %s %s' % + (res.status, res.reason)) + + res.fp = None + log.debug('Connection established') + return conn + + def urlopen(self, method, url, body=None, headers=None, retries=3, + redirect=True, assert_same_host=True): + if headers is None: + headers = {} + headers['Connection'] = 'Keep-Alive' + return super(NTLMConnectionPool, self).urlopen(method, url, body, + headers, retries, + redirect, + assert_same_host) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py new file mode 100644 index 0000000..6dd3a01 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py @@ -0,0 +1,457 @@ +""" +SSL with SNI_-support for Python 2. Follow these instructions if you would +like to verify SSL certificates in Python 2. Note, the default libraries do +*not* do certificate checking; you need to do additional work to validate +certificates yourself. + +This needs the following packages installed: + +* pyOpenSSL (tested with 16.0.0) +* cryptography (minimum 1.3.4, from pyopenssl) +* idna (minimum 2.0, from cryptography) + +However, pyopenssl depends on cryptography, which depends on idna, so while we +use all three directly here we end up having relatively few packages required. + +You can install them with the following command: + + pip install pyopenssl cryptography idna + +To activate certificate checking, call +:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code +before you begin making HTTP requests. This can be done in a ``sitecustomize`` +module, or at any other time before your application begins using ``urllib3``, +like this:: + + try: + import urllib3.contrib.pyopenssl + urllib3.contrib.pyopenssl.inject_into_urllib3() + except ImportError: + pass + +Now you can use :mod:`urllib3` as you normally would, and it will support SNI +when the required modules are installed. + +Activating this module also has the positive side effect of disabling SSL/TLS +compression in Python 2 (see `CRIME attack`_). + +If you want to configure the default list of supported cipher suites, you can +set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable. + +.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication +.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) +""" +from __future__ import absolute_import + +import OpenSSL.SSL +from cryptography import x509 +from cryptography.hazmat.backends.openssl import backend as openssl_backend +from cryptography.hazmat.backends.openssl.x509 import _Certificate +try: + from cryptography.x509 import UnsupportedExtension +except ImportError: + # UnsupportedExtension is gone in cryptography >= 2.1.0 + class UnsupportedExtension(Exception): + pass + +from socket import timeout, error as SocketError +from io import BytesIO + +try: # Platform-specific: Python 2 + from socket import _fileobject +except ImportError: # Platform-specific: Python 3 + _fileobject = None + from ..packages.backports.makefile import backport_makefile + +import logging +import ssl +from ..packages import six +import sys + +from .. import util + +__all__ = ['inject_into_urllib3', 'extract_from_urllib3'] + +# SNI always works. +HAS_SNI = True + +# Map from urllib3 to PyOpenSSL compatible parameter-values. +_openssl_versions = { + ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, + ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, +} + +if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'): + _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD + +if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): + _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD + +try: + _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD}) +except AttributeError: + pass + +_stdlib_to_openssl_verify = { + ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, + ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, + ssl.CERT_REQUIRED: + OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, +} +_openssl_to_stdlib_verify = dict( + (v, k) for k, v in _stdlib_to_openssl_verify.items() +) + +# OpenSSL will only write 16K at a time +SSL_WRITE_BLOCKSIZE = 16384 + +orig_util_HAS_SNI = util.HAS_SNI +orig_util_SSLContext = util.ssl_.SSLContext + + +log = logging.getLogger(__name__) + + +def inject_into_urllib3(): + 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.' + + _validate_dependencies_met() + + util.ssl_.SSLContext = PyOpenSSLContext + util.HAS_SNI = HAS_SNI + util.ssl_.HAS_SNI = HAS_SNI + util.IS_PYOPENSSL = True + util.ssl_.IS_PYOPENSSL = True + + +def extract_from_urllib3(): + 'Undo monkey-patching by :func:`inject_into_urllib3`.' + + util.ssl_.SSLContext = orig_util_SSLContext + util.HAS_SNI = orig_util_HAS_SNI + util.ssl_.HAS_SNI = orig_util_HAS_SNI + util.IS_PYOPENSSL = False + util.ssl_.IS_PYOPENSSL = False + + +def _validate_dependencies_met(): + """ + Verifies that PyOpenSSL's package-level dependencies have been met. + Throws `ImportError` if they are not met. + """ + # Method added in `cryptography==1.1`; not available in older versions + from cryptography.x509.extensions import Extensions + if getattr(Extensions, "get_extension_for_class", None) is None: + raise ImportError("'cryptography' module missing required functionality. " + "Try upgrading to v1.3.4 or newer.") + + # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509 + # attribute is only present on those versions. + from OpenSSL.crypto import X509 + x509 = X509() + if getattr(x509, "_x509", None) is None: + raise ImportError("'pyOpenSSL' module missing required functionality. " + "Try upgrading to v0.14 or newer.") + + +def _dnsname_to_stdlib(name): + """ + Converts a dNSName SubjectAlternativeName field to the form used by the + standard library on the given Python version. + + Cryptography produces a dNSName as a unicode string that was idna-decoded + from ASCII bytes. We need to idna-encode that string to get it back, and + then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib + uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8). + """ + def idna_encode(name): + """ + Borrowed wholesale from the Python Cryptography Project. It turns out + that we can't just safely call `idna.encode`: it can explode for + wildcard names. This avoids that problem. + """ + from pip._vendor import idna + + for prefix in [u'*.', u'.']: + if name.startswith(prefix): + name = name[len(prefix):] + return prefix.encode('ascii') + idna.encode(name) + return idna.encode(name) + + name = idna_encode(name) + if sys.version_info >= (3, 0): + name = name.decode('utf-8') + return name + + +def get_subj_alt_name(peer_cert): + """ + Given an PyOpenSSL certificate, provides all the subject alternative names. + """ + # Pass the cert to cryptography, which has much better APIs for this. + if hasattr(peer_cert, "to_cryptography"): + cert = peer_cert.to_cryptography() + else: + # This is technically using private APIs, but should work across all + # relevant versions before PyOpenSSL got a proper API for this. + cert = _Certificate(openssl_backend, peer_cert._x509) + + # We want to find the SAN extension. Ask Cryptography to locate it (it's + # faster than looping in Python) + try: + ext = cert.extensions.get_extension_for_class( + x509.SubjectAlternativeName + ).value + except x509.ExtensionNotFound: + # No such extension, return the empty list. + return [] + except (x509.DuplicateExtension, UnsupportedExtension, + x509.UnsupportedGeneralNameType, UnicodeError) as e: + # A problem has been found with the quality of the certificate. Assume + # no SAN field is present. + log.warning( + "A problem was encountered with the certificate that prevented " + "urllib3 from finding the SubjectAlternativeName field. This can " + "affect certificate validation. The error was %s", + e, + ) + return [] + + # We want to return dNSName and iPAddress fields. We need to cast the IPs + # back to strings because the match_hostname function wants them as + # strings. + # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8 + # decoded. This is pretty frustrating, but that's what the standard library + # does with certificates, and so we need to attempt to do the same. + names = [ + ('DNS', _dnsname_to_stdlib(name)) + for name in ext.get_values_for_type(x509.DNSName) + ] + names.extend( + ('IP Address', str(name)) + for name in ext.get_values_for_type(x509.IPAddress) + ) + + return names + + +class WrappedSocket(object): + '''API-compatibility wrapper for Python OpenSSL's Connection-class. + + Note: _makefile_refs, _drop() and _reuse() are needed for the garbage + collector of pypy. + ''' + + def __init__(self, connection, socket, suppress_ragged_eofs=True): + self.connection = connection + self.socket = socket + self.suppress_ragged_eofs = suppress_ragged_eofs + self._makefile_refs = 0 + self._closed = False + + def fileno(self): + return self.socket.fileno() + + # Copy-pasted from Python 3.5 source code + def _decref_socketios(self): + if self._makefile_refs > 0: + self._makefile_refs -= 1 + if self._closed: + self.close() + + def recv(self, *args, **kwargs): + try: + data = self.connection.recv(*args, **kwargs) + except OpenSSL.SSL.SysCallError as e: + if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): + return b'' + else: + raise SocketError(str(e)) + except OpenSSL.SSL.ZeroReturnError as e: + if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: + return b'' + else: + raise + except OpenSSL.SSL.WantReadError: + if not util.wait_for_read(self.socket, self.socket.gettimeout()): + raise timeout('The read operation timed out') + else: + return self.recv(*args, **kwargs) + else: + return data + + def recv_into(self, *args, **kwargs): + try: + return self.connection.recv_into(*args, **kwargs) + except OpenSSL.SSL.SysCallError as e: + if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): + return 0 + else: + raise SocketError(str(e)) + except OpenSSL.SSL.ZeroReturnError as e: + if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: + return 0 + else: + raise + except OpenSSL.SSL.WantReadError: + if not util.wait_for_read(self.socket, self.socket.gettimeout()): + raise timeout('The read operation timed out') + else: + return self.recv_into(*args, **kwargs) + + def settimeout(self, timeout): + return self.socket.settimeout(timeout) + + def _send_until_done(self, data): + while True: + try: + return self.connection.send(data) + except OpenSSL.SSL.WantWriteError: + if not util.wait_for_write(self.socket, self.socket.gettimeout()): + raise timeout() + continue + except OpenSSL.SSL.SysCallError as e: + raise SocketError(str(e)) + + def sendall(self, data): + total_sent = 0 + while total_sent < len(data): + sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) + total_sent += sent + + def shutdown(self): + # FIXME rethrow compatible exceptions should we ever use this + self.connection.shutdown() + + def close(self): + if self._makefile_refs < 1: + try: + self._closed = True + return self.connection.close() + except OpenSSL.SSL.Error: + return + else: + self._makefile_refs -= 1 + + def getpeercert(self, binary_form=False): + x509 = self.connection.get_peer_certificate() + + if not x509: + return x509 + + if binary_form: + return OpenSSL.crypto.dump_certificate( + OpenSSL.crypto.FILETYPE_ASN1, + x509) + + return { + 'subject': ( + (('commonName', x509.get_subject().CN),), + ), + 'subjectAltName': get_subj_alt_name(x509) + } + + def _reuse(self): + self._makefile_refs += 1 + + def _drop(self): + if self._makefile_refs < 1: + self.close() + else: + self._makefile_refs -= 1 + + +if _fileobject: # Platform-specific: Python 2 + def makefile(self, mode, bufsize=-1): + self._makefile_refs += 1 + return _fileobject(self, mode, bufsize, close=True) +else: # Platform-specific: Python 3 + makefile = backport_makefile + +WrappedSocket.makefile = makefile + + +class PyOpenSSLContext(object): + """ + I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible + for translating the interface of the standard library ``SSLContext`` object + to calls into PyOpenSSL. + """ + def __init__(self, protocol): + self.protocol = _openssl_versions[protocol] + self._ctx = OpenSSL.SSL.Context(self.protocol) + self._options = 0 + self.check_hostname = False + + @property + def options(self): + return self._options + + @options.setter + def options(self, value): + self._options = value + self._ctx.set_options(value) + + @property + def verify_mode(self): + return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()] + + @verify_mode.setter + def verify_mode(self, value): + self._ctx.set_verify( + _stdlib_to_openssl_verify[value], + _verify_callback + ) + + def set_default_verify_paths(self): + self._ctx.set_default_verify_paths() + + def set_ciphers(self, ciphers): + if isinstance(ciphers, six.text_type): + ciphers = ciphers.encode('utf-8') + self._ctx.set_cipher_list(ciphers) + + def load_verify_locations(self, cafile=None, capath=None, cadata=None): + if cafile is not None: + cafile = cafile.encode('utf-8') + if capath is not None: + capath = capath.encode('utf-8') + self._ctx.load_verify_locations(cafile, capath) + if cadata is not None: + self._ctx.load_verify_locations(BytesIO(cadata)) + + def load_cert_chain(self, certfile, keyfile=None, password=None): + self._ctx.use_certificate_chain_file(certfile) + if password is not None: + self._ctx.set_passwd_cb(lambda max_length, prompt_twice, userdata: password) + self._ctx.use_privatekey_file(keyfile or certfile) + + def wrap_socket(self, sock, server_side=False, + do_handshake_on_connect=True, suppress_ragged_eofs=True, + server_hostname=None): + cnx = OpenSSL.SSL.Connection(self._ctx, sock) + + if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3 + server_hostname = server_hostname.encode('utf-8') + + if server_hostname is not None: + cnx.set_tlsext_host_name(server_hostname) + + cnx.set_connect_state() + + while True: + try: + cnx.do_handshake() + except OpenSSL.SSL.WantReadError: + if not util.wait_for_read(sock, sock.gettimeout()): + raise timeout('select timed out') + continue + except OpenSSL.SSL.Error as e: + raise ssl.SSLError('bad handshake: %r' % e) + break + + return WrappedSocket(cnx, sock) + + +def _verify_callback(cnx, x509, err_no, err_depth, return_code): + return err_no == 0 diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/securetransport.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/securetransport.py new file mode 100644 index 0000000..77cb59e --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/securetransport.py @@ -0,0 +1,804 @@ +""" +SecureTranport support for urllib3 via ctypes. + +This makes platform-native TLS available to urllib3 users on macOS without the +use of a compiler. This is an important feature because the Python Package +Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL +that ships with macOS is not capable of doing TLSv1.2. The only way to resolve +this is to give macOS users an alternative solution to the problem, and that +solution is to use SecureTransport. + +We use ctypes here because this solution must not require a compiler. That's +because pip is not allowed to require a compiler either. + +This is not intended to be a seriously long-term solution to this problem. +The hope is that PEP 543 will eventually solve this issue for us, at which +point we can retire this contrib module. But in the short term, we need to +solve the impending tire fire that is Python on Mac without this kind of +contrib module. So...here we are. + +To use this module, simply import and inject it:: + + import urllib3.contrib.securetransport + urllib3.contrib.securetransport.inject_into_urllib3() + +Happy TLSing! +""" +from __future__ import absolute_import + +import contextlib +import ctypes +import errno +import os.path +import shutil +import socket +import ssl +import threading +import weakref + +from .. import util +from ._securetransport.bindings import ( + Security, SecurityConst, CoreFoundation +) +from ._securetransport.low_level import ( + _assert_no_error, _cert_array_from_pem, _temporary_keychain, + _load_client_cert_chain +) + +try: # Platform-specific: Python 2 + from socket import _fileobject +except ImportError: # Platform-specific: Python 3 + _fileobject = None + from ..packages.backports.makefile import backport_makefile + +__all__ = ['inject_into_urllib3', 'extract_from_urllib3'] + +# SNI always works +HAS_SNI = True + +orig_util_HAS_SNI = util.HAS_SNI +orig_util_SSLContext = util.ssl_.SSLContext + +# This dictionary is used by the read callback to obtain a handle to the +# calling wrapped socket. This is a pretty silly approach, but for now it'll +# do. I feel like I should be able to smuggle a handle to the wrapped socket +# directly in the SSLConnectionRef, but for now this approach will work I +# guess. +# +# We need to lock around this structure for inserts, but we don't do it for +# reads/writes in the callbacks. The reasoning here goes as follows: +# +# 1. It is not possible to call into the callbacks before the dictionary is +# populated, so once in the callback the id must be in the dictionary. +# 2. The callbacks don't mutate the dictionary, they only read from it, and +# so cannot conflict with any of the insertions. +# +# This is good: if we had to lock in the callbacks we'd drastically slow down +# the performance of this code. +_connection_refs = weakref.WeakValueDictionary() +_connection_ref_lock = threading.Lock() + +# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over +# for no better reason than we need *a* limit, and this one is right there. +SSL_WRITE_BLOCKSIZE = 16384 + +# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to +# individual cipher suites. We need to do this because this is how +# SecureTransport wants them. +CIPHER_SUITES = [ + SecurityConst.TLS_AES_256_GCM_SHA384, + SecurityConst.TLS_CHACHA20_POLY1305_SHA256, + SecurityConst.TLS_AES_128_GCM_SHA256, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_DHE_DSS_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_DHE_DSS_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256, + SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA256, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256, + SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA, +] + +# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of +# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version. +_protocol_to_min_max = { + ssl.PROTOCOL_SSLv23: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12), +} + +if hasattr(ssl, "PROTOCOL_SSLv2"): + _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = ( + SecurityConst.kSSLProtocol2, SecurityConst.kSSLProtocol2 + ) +if hasattr(ssl, "PROTOCOL_SSLv3"): + _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = ( + SecurityConst.kSSLProtocol3, SecurityConst.kSSLProtocol3 + ) +if hasattr(ssl, "PROTOCOL_TLSv1"): + _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = ( + SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol1 + ) +if hasattr(ssl, "PROTOCOL_TLSv1_1"): + _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = ( + SecurityConst.kTLSProtocol11, SecurityConst.kTLSProtocol11 + ) +if hasattr(ssl, "PROTOCOL_TLSv1_2"): + _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = ( + SecurityConst.kTLSProtocol12, SecurityConst.kTLSProtocol12 + ) +if hasattr(ssl, "PROTOCOL_TLS"): + _protocol_to_min_max[ssl.PROTOCOL_TLS] = _protocol_to_min_max[ssl.PROTOCOL_SSLv23] + + +def inject_into_urllib3(): + """ + Monkey-patch urllib3 with SecureTransport-backed SSL-support. + """ + util.ssl_.SSLContext = SecureTransportContext + util.HAS_SNI = HAS_SNI + util.ssl_.HAS_SNI = HAS_SNI + util.IS_SECURETRANSPORT = True + util.ssl_.IS_SECURETRANSPORT = True + + +def extract_from_urllib3(): + """ + Undo monkey-patching by :func:`inject_into_urllib3`. + """ + util.ssl_.SSLContext = orig_util_SSLContext + util.HAS_SNI = orig_util_HAS_SNI + util.ssl_.HAS_SNI = orig_util_HAS_SNI + util.IS_SECURETRANSPORT = False + util.ssl_.IS_SECURETRANSPORT = False + + +def _read_callback(connection_id, data_buffer, data_length_pointer): + """ + SecureTransport read callback. This is called by ST to request that data + be returned from the socket. + """ + wrapped_socket = None + try: + wrapped_socket = _connection_refs.get(connection_id) + if wrapped_socket is None: + return SecurityConst.errSSLInternal + base_socket = wrapped_socket.socket + + requested_length = data_length_pointer[0] + + timeout = wrapped_socket.gettimeout() + error = None + read_count = 0 + + try: + while read_count < requested_length: + if timeout is None or timeout >= 0: + if not util.wait_for_read(base_socket, timeout): + raise socket.error(errno.EAGAIN, 'timed out') + + remaining = requested_length - read_count + buffer = (ctypes.c_char * remaining).from_address( + data_buffer + read_count + ) + chunk_size = base_socket.recv_into(buffer, remaining) + read_count += chunk_size + if not chunk_size: + if not read_count: + return SecurityConst.errSSLClosedGraceful + break + except (socket.error) as e: + error = e.errno + + if error is not None and error != errno.EAGAIN: + data_length_pointer[0] = read_count + if error == errno.ECONNRESET or error == errno.EPIPE: + return SecurityConst.errSSLClosedAbort + raise + + data_length_pointer[0] = read_count + + if read_count != requested_length: + return SecurityConst.errSSLWouldBlock + + return 0 + except Exception as e: + if wrapped_socket is not None: + wrapped_socket._exception = e + return SecurityConst.errSSLInternal + + +def _write_callback(connection_id, data_buffer, data_length_pointer): + """ + SecureTransport write callback. This is called by ST to request that data + actually be sent on the network. + """ + wrapped_socket = None + try: + wrapped_socket = _connection_refs.get(connection_id) + if wrapped_socket is None: + return SecurityConst.errSSLInternal + base_socket = wrapped_socket.socket + + bytes_to_write = data_length_pointer[0] + data = ctypes.string_at(data_buffer, bytes_to_write) + + timeout = wrapped_socket.gettimeout() + error = None + sent = 0 + + try: + while sent < bytes_to_write: + if timeout is None or timeout >= 0: + if not util.wait_for_write(base_socket, timeout): + raise socket.error(errno.EAGAIN, 'timed out') + chunk_sent = base_socket.send(data) + sent += chunk_sent + + # This has some needless copying here, but I'm not sure there's + # much value in optimising this data path. + data = data[chunk_sent:] + except (socket.error) as e: + error = e.errno + + if error is not None and error != errno.EAGAIN: + data_length_pointer[0] = sent + if error == errno.ECONNRESET or error == errno.EPIPE: + return SecurityConst.errSSLClosedAbort + raise + + data_length_pointer[0] = sent + + if sent != bytes_to_write: + return SecurityConst.errSSLWouldBlock + + return 0 + except Exception as e: + if wrapped_socket is not None: + wrapped_socket._exception = e + return SecurityConst.errSSLInternal + + +# We need to keep these two objects references alive: if they get GC'd while +# in use then SecureTransport could attempt to call a function that is in freed +# memory. That would be...uh...bad. Yeah, that's the word. Bad. +_read_callback_pointer = Security.SSLReadFunc(_read_callback) +_write_callback_pointer = Security.SSLWriteFunc(_write_callback) + + +class WrappedSocket(object): + """ + API-compatibility wrapper for Python's OpenSSL wrapped socket object. + + Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage + collector of PyPy. + """ + def __init__(self, socket): + self.socket = socket + self.context = None + self._makefile_refs = 0 + self._closed = False + self._exception = None + self._keychain = None + self._keychain_dir = None + self._client_cert_chain = None + + # We save off the previously-configured timeout and then set it to + # zero. This is done because we use select and friends to handle the + # timeouts, but if we leave the timeout set on the lower socket then + # Python will "kindly" call select on that socket again for us. Avoid + # that by forcing the timeout to zero. + self._timeout = self.socket.gettimeout() + self.socket.settimeout(0) + + @contextlib.contextmanager + def _raise_on_error(self): + """ + A context manager that can be used to wrap calls that do I/O from + SecureTransport. If any of the I/O callbacks hit an exception, this + context manager will correctly propagate the exception after the fact. + This avoids silently swallowing those exceptions. + + It also correctly forces the socket closed. + """ + self._exception = None + + # We explicitly don't catch around this yield because in the unlikely + # event that an exception was hit in the block we don't want to swallow + # it. + yield + if self._exception is not None: + exception, self._exception = self._exception, None + self.close() + raise exception + + def _set_ciphers(self): + """ + Sets up the allowed ciphers. By default this matches the set in + util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done + custom and doesn't allow changing at this time, mostly because parsing + OpenSSL cipher strings is going to be a freaking nightmare. + """ + ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES) + result = Security.SSLSetEnabledCiphers( + self.context, ciphers, len(CIPHER_SUITES) + ) + _assert_no_error(result) + + def _custom_validate(self, verify, trust_bundle): + """ + Called when we have set custom validation. We do this in two cases: + first, when cert validation is entirely disabled; and second, when + using a custom trust DB. + """ + # If we disabled cert validation, just say: cool. + if not verify: + return + + # We want data in memory, so load it up. + if os.path.isfile(trust_bundle): + with open(trust_bundle, 'rb') as f: + trust_bundle = f.read() + + cert_array = None + trust = Security.SecTrustRef() + + try: + # Get a CFArray that contains the certs we want. + cert_array = _cert_array_from_pem(trust_bundle) + + # Ok, now the hard part. We want to get the SecTrustRef that ST has + # created for this connection, shove our CAs into it, tell ST to + # ignore everything else it knows, and then ask if it can build a + # chain. This is a buuuunch of code. + result = Security.SSLCopyPeerTrust( + self.context, ctypes.byref(trust) + ) + _assert_no_error(result) + if not trust: + raise ssl.SSLError("Failed to copy trust reference") + + result = Security.SecTrustSetAnchorCertificates(trust, cert_array) + _assert_no_error(result) + + result = Security.SecTrustSetAnchorCertificatesOnly(trust, True) + _assert_no_error(result) + + trust_result = Security.SecTrustResultType() + result = Security.SecTrustEvaluate( + trust, ctypes.byref(trust_result) + ) + _assert_no_error(result) + finally: + if trust: + CoreFoundation.CFRelease(trust) + + if cert_array is not None: + CoreFoundation.CFRelease(cert_array) + + # Ok, now we can look at what the result was. + successes = ( + SecurityConst.kSecTrustResultUnspecified, + SecurityConst.kSecTrustResultProceed + ) + if trust_result.value not in successes: + raise ssl.SSLError( + "certificate verify failed, error code: %d" % + trust_result.value + ) + + def handshake(self, + server_hostname, + verify, + trust_bundle, + min_version, + max_version, + client_cert, + client_key, + client_key_passphrase): + """ + Actually performs the TLS handshake. This is run automatically by + wrapped socket, and shouldn't be needed in user code. + """ + # First, we do the initial bits of connection setup. We need to create + # a context, set its I/O funcs, and set the connection reference. + self.context = Security.SSLCreateContext( + None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType + ) + result = Security.SSLSetIOFuncs( + self.context, _read_callback_pointer, _write_callback_pointer + ) + _assert_no_error(result) + + # Here we need to compute the handle to use. We do this by taking the + # id of self modulo 2**31 - 1. If this is already in the dictionary, we + # just keep incrementing by one until we find a free space. + with _connection_ref_lock: + handle = id(self) % 2147483647 + while handle in _connection_refs: + handle = (handle + 1) % 2147483647 + _connection_refs[handle] = self + + result = Security.SSLSetConnection(self.context, handle) + _assert_no_error(result) + + # If we have a server hostname, we should set that too. + if server_hostname: + if not isinstance(server_hostname, bytes): + server_hostname = server_hostname.encode('utf-8') + + result = Security.SSLSetPeerDomainName( + self.context, server_hostname, len(server_hostname) + ) + _assert_no_error(result) + + # Setup the ciphers. + self._set_ciphers() + + # Set the minimum and maximum TLS versions. + result = Security.SSLSetProtocolVersionMin(self.context, min_version) + _assert_no_error(result) + result = Security.SSLSetProtocolVersionMax(self.context, max_version) + _assert_no_error(result) + + # If there's a trust DB, we need to use it. We do that by telling + # SecureTransport to break on server auth. We also do that if we don't + # want to validate the certs at all: we just won't actually do any + # authing in that case. + if not verify or trust_bundle is not None: + result = Security.SSLSetSessionOption( + self.context, + SecurityConst.kSSLSessionOptionBreakOnServerAuth, + True + ) + _assert_no_error(result) + + # If there's a client cert, we need to use it. + if client_cert: + self._keychain, self._keychain_dir = _temporary_keychain() + self._client_cert_chain = _load_client_cert_chain( + self._keychain, client_cert, client_key + ) + result = Security.SSLSetCertificate( + self.context, self._client_cert_chain + ) + _assert_no_error(result) + + while True: + with self._raise_on_error(): + result = Security.SSLHandshake(self.context) + + if result == SecurityConst.errSSLWouldBlock: + raise socket.timeout("handshake timed out") + elif result == SecurityConst.errSSLServerAuthCompleted: + self._custom_validate(verify, trust_bundle) + continue + else: + _assert_no_error(result) + break + + def fileno(self): + return self.socket.fileno() + + # Copy-pasted from Python 3.5 source code + def _decref_socketios(self): + if self._makefile_refs > 0: + self._makefile_refs -= 1 + if self._closed: + self.close() + + def recv(self, bufsiz): + buffer = ctypes.create_string_buffer(bufsiz) + bytes_read = self.recv_into(buffer, bufsiz) + data = buffer[:bytes_read] + return data + + def recv_into(self, buffer, nbytes=None): + # Read short on EOF. + if self._closed: + return 0 + + if nbytes is None: + nbytes = len(buffer) + + buffer = (ctypes.c_char * nbytes).from_buffer(buffer) + processed_bytes = ctypes.c_size_t(0) + + with self._raise_on_error(): + result = Security.SSLRead( + self.context, buffer, nbytes, ctypes.byref(processed_bytes) + ) + + # There are some result codes that we want to treat as "not always + # errors". Specifically, those are errSSLWouldBlock, + # errSSLClosedGraceful, and errSSLClosedNoNotify. + if (result == SecurityConst.errSSLWouldBlock): + # If we didn't process any bytes, then this was just a time out. + # However, we can get errSSLWouldBlock in situations when we *did* + # read some data, and in those cases we should just read "short" + # and return. + if processed_bytes.value == 0: + # Timed out, no data read. + raise socket.timeout("recv timed out") + elif result in (SecurityConst.errSSLClosedGraceful, SecurityConst.errSSLClosedNoNotify): + # The remote peer has closed this connection. We should do so as + # well. Note that we don't actually return here because in + # principle this could actually be fired along with return data. + # It's unlikely though. + self.close() + else: + _assert_no_error(result) + + # Ok, we read and probably succeeded. We should return whatever data + # was actually read. + return processed_bytes.value + + def settimeout(self, timeout): + self._timeout = timeout + + def gettimeout(self): + return self._timeout + + def send(self, data): + processed_bytes = ctypes.c_size_t(0) + + with self._raise_on_error(): + result = Security.SSLWrite( + self.context, data, len(data), ctypes.byref(processed_bytes) + ) + + if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0: + # Timed out + raise socket.timeout("send timed out") + else: + _assert_no_error(result) + + # We sent, and probably succeeded. Tell them how much we sent. + return processed_bytes.value + + def sendall(self, data): + total_sent = 0 + while total_sent < len(data): + sent = self.send(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) + total_sent += sent + + def shutdown(self): + with self._raise_on_error(): + Security.SSLClose(self.context) + + def close(self): + # TODO: should I do clean shutdown here? Do I have to? + if self._makefile_refs < 1: + self._closed = True + if self.context: + CoreFoundation.CFRelease(self.context) + self.context = None + if self._client_cert_chain: + CoreFoundation.CFRelease(self._client_cert_chain) + self._client_cert_chain = None + if self._keychain: + Security.SecKeychainDelete(self._keychain) + CoreFoundation.CFRelease(self._keychain) + shutil.rmtree(self._keychain_dir) + self._keychain = self._keychain_dir = None + return self.socket.close() + else: + self._makefile_refs -= 1 + + def getpeercert(self, binary_form=False): + # Urgh, annoying. + # + # Here's how we do this: + # + # 1. Call SSLCopyPeerTrust to get hold of the trust object for this + # connection. + # 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf. + # 3. To get the CN, call SecCertificateCopyCommonName and process that + # string so that it's of the appropriate type. + # 4. To get the SAN, we need to do something a bit more complex: + # a. Call SecCertificateCopyValues to get the data, requesting + # kSecOIDSubjectAltName. + # b. Mess about with this dictionary to try to get the SANs out. + # + # This is gross. Really gross. It's going to be a few hundred LoC extra + # just to repeat something that SecureTransport can *already do*. So my + # operating assumption at this time is that what we want to do is + # instead to just flag to urllib3 that it shouldn't do its own hostname + # validation when using SecureTransport. + if not binary_form: + raise ValueError( + "SecureTransport only supports dumping binary certs" + ) + trust = Security.SecTrustRef() + certdata = None + der_bytes = None + + try: + # Grab the trust store. + result = Security.SSLCopyPeerTrust( + self.context, ctypes.byref(trust) + ) + _assert_no_error(result) + if not trust: + # Probably we haven't done the handshake yet. No biggie. + return None + + cert_count = Security.SecTrustGetCertificateCount(trust) + if not cert_count: + # Also a case that might happen if we haven't handshaked. + # Handshook? Handshaken? + return None + + leaf = Security.SecTrustGetCertificateAtIndex(trust, 0) + assert leaf + + # Ok, now we want the DER bytes. + certdata = Security.SecCertificateCopyData(leaf) + assert certdata + + data_length = CoreFoundation.CFDataGetLength(certdata) + data_buffer = CoreFoundation.CFDataGetBytePtr(certdata) + der_bytes = ctypes.string_at(data_buffer, data_length) + finally: + if certdata: + CoreFoundation.CFRelease(certdata) + if trust: + CoreFoundation.CFRelease(trust) + + return der_bytes + + def _reuse(self): + self._makefile_refs += 1 + + def _drop(self): + if self._makefile_refs < 1: + self.close() + else: + self._makefile_refs -= 1 + + +if _fileobject: # Platform-specific: Python 2 + def makefile(self, mode, bufsize=-1): + self._makefile_refs += 1 + return _fileobject(self, mode, bufsize, close=True) +else: # Platform-specific: Python 3 + def makefile(self, mode="r", buffering=None, *args, **kwargs): + # We disable buffering with SecureTransport because it conflicts with + # the buffering that ST does internally (see issue #1153 for more). + buffering = 0 + return backport_makefile(self, mode, buffering, *args, **kwargs) + +WrappedSocket.makefile = makefile + + +class SecureTransportContext(object): + """ + I am a wrapper class for the SecureTransport library, to translate the + interface of the standard library ``SSLContext`` object to calls into + SecureTransport. + """ + def __init__(self, protocol): + self._min_version, self._max_version = _protocol_to_min_max[protocol] + self._options = 0 + self._verify = False + self._trust_bundle = None + self._client_cert = None + self._client_key = None + self._client_key_passphrase = None + + @property + def check_hostname(self): + """ + SecureTransport cannot have its hostname checking disabled. For more, + see the comment on getpeercert() in this file. + """ + return True + + @check_hostname.setter + def check_hostname(self, value): + """ + SecureTransport cannot have its hostname checking disabled. For more, + see the comment on getpeercert() in this file. + """ + pass + + @property + def options(self): + # TODO: Well, crap. + # + # So this is the bit of the code that is the most likely to cause us + # trouble. Essentially we need to enumerate all of the SSL options that + # users might want to use and try to see if we can sensibly translate + # them, or whether we should just ignore them. + return self._options + + @options.setter + def options(self, value): + # TODO: Update in line with above. + self._options = value + + @property + def verify_mode(self): + return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE + + @verify_mode.setter + def verify_mode(self, value): + self._verify = True if value == ssl.CERT_REQUIRED else False + + def set_default_verify_paths(self): + # So, this has to do something a bit weird. Specifically, what it does + # is nothing. + # + # This means that, if we had previously had load_verify_locations + # called, this does not undo that. We need to do that because it turns + # out that the rest of the urllib3 code will attempt to load the + # default verify paths if it hasn't been told about any paths, even if + # the context itself was sometime earlier. We resolve that by just + # ignoring it. + pass + + def load_default_certs(self): + return self.set_default_verify_paths() + + def set_ciphers(self, ciphers): + # For now, we just require the default cipher string. + if ciphers != util.ssl_.DEFAULT_CIPHERS: + raise ValueError( + "SecureTransport doesn't support custom cipher strings" + ) + + def load_verify_locations(self, cafile=None, capath=None, cadata=None): + # OK, we only really support cadata and cafile. + if capath is not None: + raise ValueError( + "SecureTransport does not support cert directories" + ) + + self._trust_bundle = cafile or cadata + + def load_cert_chain(self, certfile, keyfile=None, password=None): + self._client_cert = certfile + self._client_key = keyfile + self._client_cert_passphrase = password + + def wrap_socket(self, sock, server_side=False, + do_handshake_on_connect=True, suppress_ragged_eofs=True, + server_hostname=None): + # So, what do we do here? Firstly, we assert some properties. This is a + # stripped down shim, so there is some functionality we don't support. + # See PEP 543 for the real deal. + assert not server_side + assert do_handshake_on_connect + assert suppress_ragged_eofs + + # Ok, we're good to go. Now we want to create the wrapped socket object + # and store it in the appropriate place. + wrapped_socket = WrappedSocket(sock) + + # Now we can handshake + wrapped_socket.handshake( + server_hostname, self._verify, self._trust_bundle, + self._min_version, self._max_version, self._client_cert, + self._client_key, self._client_key_passphrase + ) + return wrapped_socket diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/socks.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/socks.py new file mode 100644 index 0000000..811e312 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/contrib/socks.py @@ -0,0 +1,192 @@ +# -*- coding: utf-8 -*- +""" +This module contains provisional support for SOCKS proxies from within +urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and +SOCKS5. To enable its functionality, either install PySocks or install this +module with the ``socks`` extra. + +The SOCKS implementation supports the full range of urllib3 features. It also +supports the following SOCKS features: + +- SOCKS4 +- SOCKS4a +- SOCKS5 +- Usernames and passwords for the SOCKS proxy + +Known Limitations: + +- Currently PySocks does not support contacting remote websites via literal + IPv6 addresses. Any such connection attempt will fail. You must use a domain + name. +- Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any + such connection attempt will fail. +""" +from __future__ import absolute_import + +try: + import socks +except ImportError: + import warnings + from ..exceptions import DependencyWarning + + warnings.warn(( + 'SOCKS support in urllib3 requires the installation of optional ' + 'dependencies: specifically, PySocks. For more information, see ' + 'https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies' + ), + DependencyWarning + ) + raise + +from socket import error as SocketError, timeout as SocketTimeout + +from ..connection import ( + HTTPConnection, HTTPSConnection +) +from ..connectionpool import ( + HTTPConnectionPool, HTTPSConnectionPool +) +from ..exceptions import ConnectTimeoutError, NewConnectionError +from ..poolmanager import PoolManager +from ..util.url import parse_url + +try: + import ssl +except ImportError: + ssl = None + + +class SOCKSConnection(HTTPConnection): + """ + A plain-text HTTP connection that connects via a SOCKS proxy. + """ + def __init__(self, *args, **kwargs): + self._socks_options = kwargs.pop('_socks_options') + super(SOCKSConnection, self).__init__(*args, **kwargs) + + def _new_conn(self): + """ + Establish a new connection via the SOCKS proxy. + """ + extra_kw = {} + if self.source_address: + extra_kw['source_address'] = self.source_address + + if self.socket_options: + extra_kw['socket_options'] = self.socket_options + + try: + conn = socks.create_connection( + (self.host, self.port), + proxy_type=self._socks_options['socks_version'], + proxy_addr=self._socks_options['proxy_host'], + proxy_port=self._socks_options['proxy_port'], + proxy_username=self._socks_options['username'], + proxy_password=self._socks_options['password'], + proxy_rdns=self._socks_options['rdns'], + timeout=self.timeout, + **extra_kw + ) + + except SocketTimeout as e: + raise ConnectTimeoutError( + self, "Connection to %s timed out. (connect timeout=%s)" % + (self.host, self.timeout)) + + except socks.ProxyError as e: + # This is fragile as hell, but it seems to be the only way to raise + # useful errors here. + if e.socket_err: + error = e.socket_err + if isinstance(error, SocketTimeout): + raise ConnectTimeoutError( + self, + "Connection to %s timed out. (connect timeout=%s)" % + (self.host, self.timeout) + ) + else: + raise NewConnectionError( + self, + "Failed to establish a new connection: %s" % error + ) + else: + raise NewConnectionError( + self, + "Failed to establish a new connection: %s" % e + ) + + except SocketError as e: # Defensive: PySocks should catch all these. + raise NewConnectionError( + self, "Failed to establish a new connection: %s" % e) + + return conn + + +# We don't need to duplicate the Verified/Unverified distinction from +# urllib3/connection.py here because the HTTPSConnection will already have been +# correctly set to either the Verified or Unverified form by that module. This +# means the SOCKSHTTPSConnection will automatically be the correct type. +class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection): + pass + + +class SOCKSHTTPConnectionPool(HTTPConnectionPool): + ConnectionCls = SOCKSConnection + + +class SOCKSHTTPSConnectionPool(HTTPSConnectionPool): + ConnectionCls = SOCKSHTTPSConnection + + +class SOCKSProxyManager(PoolManager): + """ + A version of the urllib3 ProxyManager that routes connections via the + defined SOCKS proxy. + """ + pool_classes_by_scheme = { + 'http': SOCKSHTTPConnectionPool, + 'https': SOCKSHTTPSConnectionPool, + } + + def __init__(self, proxy_url, username=None, password=None, + num_pools=10, headers=None, **connection_pool_kw): + parsed = parse_url(proxy_url) + + if username is None and password is None and parsed.auth is not None: + split = parsed.auth.split(':') + if len(split) == 2: + username, password = split + if parsed.scheme == 'socks5': + socks_version = socks.PROXY_TYPE_SOCKS5 + rdns = False + elif parsed.scheme == 'socks5h': + socks_version = socks.PROXY_TYPE_SOCKS5 + rdns = True + elif parsed.scheme == 'socks4': + socks_version = socks.PROXY_TYPE_SOCKS4 + rdns = False + elif parsed.scheme == 'socks4a': + socks_version = socks.PROXY_TYPE_SOCKS4 + rdns = True + else: + raise ValueError( + "Unable to determine SOCKS version from %s" % proxy_url + ) + + self.proxy_url = proxy_url + + socks_options = { + 'socks_version': socks_version, + 'proxy_host': parsed.host, + 'proxy_port': parsed.port, + 'username': username, + 'password': password, + 'rdns': rdns + } + connection_pool_kw['_socks_options'] = socks_options + + super(SOCKSProxyManager, self).__init__( + num_pools, headers, **connection_pool_kw + ) + + self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/exceptions.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/exceptions.py new file mode 100644 index 0000000..7bbaa98 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/exceptions.py @@ -0,0 +1,246 @@ +from __future__ import absolute_import +from .packages.six.moves.http_client import ( + IncompleteRead as httplib_IncompleteRead +) +# Base Exceptions + + +class HTTPError(Exception): + "Base exception used by this module." + pass + + +class HTTPWarning(Warning): + "Base warning used by this module." + pass + + +class PoolError(HTTPError): + "Base exception for errors caused within a pool." + def __init__(self, pool, message): + self.pool = pool + HTTPError.__init__(self, "%s: %s" % (pool, message)) + + def __reduce__(self): + # For pickling purposes. + return self.__class__, (None, None) + + +class RequestError(PoolError): + "Base exception for PoolErrors that have associated URLs." + def __init__(self, pool, url, message): + self.url = url + PoolError.__init__(self, pool, message) + + def __reduce__(self): + # For pickling purposes. + return self.__class__, (None, self.url, None) + + +class SSLError(HTTPError): + "Raised when SSL certificate fails in an HTTPS connection." + pass + + +class ProxyError(HTTPError): + "Raised when the connection to a proxy fails." + pass + + +class DecodeError(HTTPError): + "Raised when automatic decoding based on Content-Type fails." + pass + + +class ProtocolError(HTTPError): + "Raised when something unexpected happens mid-request/response." + pass + + +#: Renamed to ProtocolError but aliased for backwards compatibility. +ConnectionError = ProtocolError + + +# Leaf Exceptions + +class MaxRetryError(RequestError): + """Raised when the maximum number of retries is exceeded. + + :param pool: The connection pool + :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` + :param string url: The requested Url + :param exceptions.Exception reason: The underlying error + + """ + + def __init__(self, pool, url, reason=None): + self.reason = reason + + message = "Max retries exceeded with url: %s (Caused by %r)" % ( + url, reason) + + RequestError.__init__(self, pool, url, message) + + +class HostChangedError(RequestError): + "Raised when an existing pool gets a request for a foreign host." + + def __init__(self, pool, url, retries=3): + message = "Tried to open a foreign host with url: %s" % url + RequestError.__init__(self, pool, url, message) + self.retries = retries + + +class TimeoutStateError(HTTPError): + """ Raised when passing an invalid state to a timeout """ + pass + + +class TimeoutError(HTTPError): + """ Raised when a socket timeout error occurs. + + Catching this error will catch both :exc:`ReadTimeoutErrors + <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`. + """ + pass + + +class ReadTimeoutError(TimeoutError, RequestError): + "Raised when a socket timeout occurs while receiving data from a server" + pass + + +# This timeout error does not have a URL attached and needs to inherit from the +# base HTTPError +class ConnectTimeoutError(TimeoutError): + "Raised when a socket timeout occurs while connecting to a server" + pass + + +class NewConnectionError(ConnectTimeoutError, PoolError): + "Raised when we fail to establish a new connection. Usually ECONNREFUSED." + pass + + +class EmptyPoolError(PoolError): + "Raised when a pool runs out of connections and no more are allowed." + pass + + +class ClosedPoolError(PoolError): + "Raised when a request enters a pool after the pool has been closed." + pass + + +class LocationValueError(ValueError, HTTPError): + "Raised when there is something wrong with a given URL input." + pass + + +class LocationParseError(LocationValueError): + "Raised when get_host or similar fails to parse the URL input." + + def __init__(self, location): + message = "Failed to parse: %s" % location + HTTPError.__init__(self, message) + + self.location = location + + +class ResponseError(HTTPError): + "Used as a container for an error reason supplied in a MaxRetryError." + GENERIC_ERROR = 'too many error responses' + SPECIFIC_ERROR = 'too many {status_code} error responses' + + +class SecurityWarning(HTTPWarning): + "Warned when performing security reducing actions" + pass + + +class SubjectAltNameWarning(SecurityWarning): + "Warned when connecting to a host with a certificate missing a SAN." + pass + + +class InsecureRequestWarning(SecurityWarning): + "Warned when making an unverified HTTPS request." + pass + + +class SystemTimeWarning(SecurityWarning): + "Warned when system time is suspected to be wrong" + pass + + +class InsecurePlatformWarning(SecurityWarning): + "Warned when certain SSL configuration is not available on a platform." + pass + + +class SNIMissingWarning(HTTPWarning): + "Warned when making a HTTPS request without SNI available." + pass + + +class DependencyWarning(HTTPWarning): + """ + Warned when an attempt is made to import a module with missing optional + dependencies. + """ + pass + + +class ResponseNotChunked(ProtocolError, ValueError): + "Response needs to be chunked in order to read it as chunks." + pass + + +class BodyNotHttplibCompatible(HTTPError): + """ + Body should be httplib.HTTPResponse like (have an fp attribute which + returns raw chunks) for read_chunked(). + """ + pass + + +class IncompleteRead(HTTPError, httplib_IncompleteRead): + """ + Response length doesn't match expected Content-Length + + Subclass of http_client.IncompleteRead to allow int value + for `partial` to avoid creating large objects on streamed + reads. + """ + def __init__(self, partial, expected): + super(IncompleteRead, self).__init__(partial, expected) + + def __repr__(self): + return ('IncompleteRead(%i bytes read, ' + '%i more expected)' % (self.partial, self.expected)) + + +class InvalidHeader(HTTPError): + "The header provided was somehow invalid." + pass + + +class ProxySchemeUnknown(AssertionError, ValueError): + "ProxyManager does not support the supplied scheme" + # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. + + def __init__(self, scheme): + message = "Not supported proxy scheme %s" % scheme + super(ProxySchemeUnknown, self).__init__(message) + + +class HeaderParsingError(HTTPError): + "Raised by assert_header_parsing, but we convert it to a log.warning statement." + def __init__(self, defects, unparsed_data): + message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data) + super(HeaderParsingError, self).__init__(message) + + +class UnrewindableBodyError(HTTPError): + "urllib3 encountered an error when trying to rewind a body" + pass diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/fields.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/fields.py new file mode 100644 index 0000000..37fe64a --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/fields.py @@ -0,0 +1,178 @@ +from __future__ import absolute_import +import email.utils +import mimetypes + +from .packages import six + + +def guess_content_type(filename, default='application/octet-stream'): + """ + Guess the "Content-Type" of a file. + + :param filename: + The filename to guess the "Content-Type" of using :mod:`mimetypes`. + :param default: + If no "Content-Type" can be guessed, default to `default`. + """ + if filename: + return mimetypes.guess_type(filename)[0] or default + return default + + +def format_header_param(name, value): + """ + Helper function to format and quote a single header parameter. + + Particularly useful for header parameters which might contain + non-ASCII values, like file names. This follows RFC 2231, as + suggested by RFC 2388 Section 4.4. + + :param name: + The name of the parameter, a string expected to be ASCII only. + :param value: + The value of the parameter, provided as a unicode string. + """ + if not any(ch in value for ch in '"\\\r\n'): + result = '%s="%s"' % (name, value) + try: + result.encode('ascii') + except (UnicodeEncodeError, UnicodeDecodeError): + pass + else: + return result + if not six.PY3 and isinstance(value, six.text_type): # Python 2: + value = value.encode('utf-8') + value = email.utils.encode_rfc2231(value, 'utf-8') + value = '%s*=%s' % (name, value) + return value + + +class RequestField(object): + """ + A data container for request body parameters. + + :param name: + The name of this request field. + :param data: + The data/value body. + :param filename: + An optional filename of the request field. + :param headers: + An optional dict-like object of headers to initially use for the field. + """ + def __init__(self, name, data, filename=None, headers=None): + self._name = name + self._filename = filename + self.data = data + self.headers = {} + if headers: + self.headers = dict(headers) + + @classmethod + def from_tuples(cls, fieldname, value): + """ + A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. + + Supports constructing :class:`~urllib3.fields.RequestField` from + parameter of key/value strings AND key/filetuple. A filetuple is a + (filename, data, MIME type) tuple where the MIME type is optional. + For example:: + + 'foo': 'bar', + 'fakefile': ('foofile.txt', 'contents of foofile'), + 'realfile': ('barfile.txt', open('realfile').read()), + 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), + 'nonamefile': 'contents of nonamefile field', + + Field names and filenames must be unicode. + """ + if isinstance(value, tuple): + if len(value) == 3: + filename, data, content_type = value + else: + filename, data = value + content_type = guess_content_type(filename) + else: + filename = None + content_type = None + data = value + + request_param = cls(fieldname, data, filename=filename) + request_param.make_multipart(content_type=content_type) + + return request_param + + def _render_part(self, name, value): + """ + Overridable helper function to format a single header parameter. + + :param name: + The name of the parameter, a string expected to be ASCII only. + :param value: + The value of the parameter, provided as a unicode string. + """ + return format_header_param(name, value) + + def _render_parts(self, header_parts): + """ + Helper function to format and quote a single header. + + Useful for single headers that are composed of multiple items. E.g., + 'Content-Disposition' fields. + + :param header_parts: + A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format + as `k1="v1"; k2="v2"; ...`. + """ + parts = [] + iterable = header_parts + if isinstance(header_parts, dict): + iterable = header_parts.items() + + for name, value in iterable: + if value is not None: + parts.append(self._render_part(name, value)) + + return '; '.join(parts) + + def render_headers(self): + """ + Renders the headers for this request field. + """ + lines = [] + + sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location'] + for sort_key in sort_keys: + if self.headers.get(sort_key, False): + lines.append('%s: %s' % (sort_key, self.headers[sort_key])) + + for header_name, header_value in self.headers.items(): + if header_name not in sort_keys: + if header_value: + lines.append('%s: %s' % (header_name, header_value)) + + lines.append('\r\n') + return '\r\n'.join(lines) + + def make_multipart(self, content_disposition=None, content_type=None, + content_location=None): + """ + Makes this request field into a multipart request field. + + This method overrides "Content-Disposition", "Content-Type" and + "Content-Location" headers to the request parameter. + + :param content_type: + The 'Content-Type' of the request body. + :param content_location: + The 'Content-Location' of the request body. + + """ + self.headers['Content-Disposition'] = content_disposition or 'form-data' + self.headers['Content-Disposition'] += '; '.join([ + '', self._render_parts( + (('name', self._name), ('filename', self._filename)) + ) + ]) + self.headers['Content-Type'] = content_type + self.headers['Content-Location'] = content_location diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/filepost.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/filepost.py new file mode 100644 index 0000000..78f1e19 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/filepost.py @@ -0,0 +1,98 @@ +from __future__ import absolute_import +import binascii +import codecs +import os + +from io import BytesIO + +from .packages import six +from .packages.six import b +from .fields import RequestField + +writer = codecs.lookup('utf-8')[3] + + +def choose_boundary(): + """ + Our embarrassingly-simple replacement for mimetools.choose_boundary. + """ + boundary = binascii.hexlify(os.urandom(16)) + if six.PY3: + boundary = boundary.decode('ascii') + return boundary + + +def iter_field_objects(fields): + """ + Iterate over fields. + + Supports list of (k, v) tuples and dicts, and lists of + :class:`~urllib3.fields.RequestField`. + + """ + if isinstance(fields, dict): + i = six.iteritems(fields) + else: + i = iter(fields) + + for field in i: + if isinstance(field, RequestField): + yield field + else: + yield RequestField.from_tuples(*field) + + +def iter_fields(fields): + """ + .. deprecated:: 1.6 + + Iterate over fields. + + The addition of :class:`~urllib3.fields.RequestField` makes this function + obsolete. Instead, use :func:`iter_field_objects`, which returns + :class:`~urllib3.fields.RequestField` objects. + + Supports list of (k, v) tuples and dicts. + """ + if isinstance(fields, dict): + return ((k, v) for k, v in six.iteritems(fields)) + + return ((k, v) for k, v in fields) + + +def encode_multipart_formdata(fields, boundary=None): + """ + Encode a dictionary of ``fields`` using the multipart/form-data MIME format. + + :param fields: + Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). + + :param boundary: + If not specified, then a random boundary will be generated using + :func:`urllib3.filepost.choose_boundary`. + """ + body = BytesIO() + if boundary is None: + boundary = choose_boundary() + + for field in iter_field_objects(fields): + body.write(b('--%s\r\n' % (boundary))) + + writer(body).write(field.render_headers()) + data = field.data + + if isinstance(data, int): + data = str(data) # Backwards compatibility + + if isinstance(data, six.text_type): + writer(body).write(data) + else: + body.write(data) + + body.write(b'\r\n') + + body.write(b('--%s--\r\n' % (boundary))) + + content_type = str('multipart/form-data; boundary=%s' % boundary) + + return body.getvalue(), content_type diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py new file mode 100644 index 0000000..75b80dc --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +""" +backports.makefile +~~~~~~~~~~~~~~~~~~ + +Backports the Python 3 ``socket.makefile`` method for use with anything that +wants to create a "fake" socket object. +""" +import io + +from socket import SocketIO + + +def backport_makefile(self, mode="r", buffering=None, encoding=None, + errors=None, newline=None): + """ + Backport of ``socket.makefile`` from Python 3.5. + """ + if not set(mode) <= set(["r", "w", "b"]): + raise ValueError( + "invalid mode %r (only r, w, b allowed)" % (mode,) + ) + writing = "w" in mode + reading = "r" in mode or not writing + assert reading or writing + binary = "b" in mode + rawmode = "" + if reading: + rawmode += "r" + if writing: + rawmode += "w" + raw = SocketIO(self, rawmode) + self._makefile_refs += 1 + if buffering is None: + buffering = -1 + if buffering < 0: + buffering = io.DEFAULT_BUFFER_SIZE + if buffering == 0: + if not binary: + raise ValueError("unbuffered streams must be binary") + return raw + if reading and writing: + buffer = io.BufferedRWPair(raw, raw, buffering) + elif reading: + buffer = io.BufferedReader(raw, buffering) + else: + assert writing + buffer = io.BufferedWriter(raw, buffering) + if binary: + return buffer + text = io.TextIOWrapper(buffer, encoding, errors, newline) + text.mode = mode + return text diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/packages/ordered_dict.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/packages/ordered_dict.py new file mode 100644 index 0000000..4479363 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/packages/ordered_dict.py @@ -0,0 +1,259 @@ +# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. +# Passes Python2.7's test suite and incorporates all the latest updates. +# Copyright 2009 Raymond Hettinger, released under the MIT License. +# http://code.activestate.com/recipes/576693/ +try: + from thread import get_ident as _get_ident +except ImportError: + from dummy_thread import get_ident as _get_ident + +try: + from _abcoll import KeysView, ValuesView, ItemsView +except ImportError: + pass + + +class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as for regular dictionaries. + + # The internal self.__map dictionary maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # Each link is stored as a list of length three: [PREV, NEXT, KEY]. + + def __init__(self, *args, **kwds): + '''Initialize an ordered dictionary. Signature is the same as for + regular dictionaries, but keyword arguments are not recommended + because their insertion order is arbitrary. + + ''' + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__root = root = [] # sentinel node + root[:] = [root, root, None] + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, dict_setitem=dict.__setitem__): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link which goes at the end of the linked + # list, and the inherited dictionary is updated with the new key/value pair. + if key not in self: + root = self.__root + last = root[0] + last[1] = root[0] = self.__map[key] = [last, root, key] + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which is + # then removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link_prev, link_next, key = self.__map.pop(key) + link_prev[1] = link_next + link_next[0] = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + root = self.__root + curr = root[1] + while curr is not root: + yield curr[2] + curr = curr[1] + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + root = self.__root + curr = root[0] + while curr is not root: + yield curr[2] + curr = curr[0] + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + try: + for node in self.__map.itervalues(): + del node[:] + root = self.__root + root[:] = [root, root, None] + self.__map.clear() + except AttributeError: + pass + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root[0] + link_prev = link[0] + link_prev[1] = root + root[0] = link_prev + else: + link = root[1] + link_next = link[1] + root[1] = link_next + link_next[0] = root + key = link[2] + del self.__map[key] + value = dict.pop(self, key) + return key, value + + # -- the following methods do not depend on the internal structure -- + + def keys(self): + 'od.keys() -> list of keys in od' + return list(self) + + def values(self): + 'od.values() -> list of values in od' + return [self[key] for key in self] + + def items(self): + 'od.items() -> list of (key, value) pairs in od' + return [(key, self[key]) for key in self] + + def iterkeys(self): + 'od.iterkeys() -> an iterator over the keys in od' + return iter(self) + + def itervalues(self): + 'od.itervalues -> an iterator over the values in od' + for k in self: + yield self[k] + + def iteritems(self): + 'od.iteritems -> an iterator over the (key, value) items in od' + for k in self: + yield (k, self[k]) + + def update(*args, **kwds): + '''od.update(E, **F) -> None. Update od from dict/iterable E and F. + + If E is a dict instance, does: for k in E: od[k] = E[k] + If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] + Or if E is an iterable of items, does: for k, v in E: od[k] = v + In either case, this is followed by: for k, v in F.items(): od[k] = v + + ''' + if len(args) > 2: + raise TypeError('update() takes at most 2 positional ' + 'arguments (%d given)' % (len(args),)) + elif not args: + raise TypeError('update() takes at least 1 argument (0 given)') + self = args[0] + # Make progressively weaker assumptions about "other" + other = () + if len(args) == 2: + other = args[1] + if isinstance(other, dict): + for key in other: + self[key] = other[key] + elif hasattr(other, 'keys'): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + __update = update # let subclasses override update without breaking __init__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + def __repr__(self, _repr_running={}): + 'od.__repr__() <==> repr(od)' + call_key = id(self), _get_ident() + if call_key in _repr_running: + return '...' + _repr_running[call_key] = 1 + try: + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + finally: + del _repr_running[call_key] + + def __reduce__(self): + 'Return state information for pickling' + items = [[k, self[k]] for k in self] + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S + and values equal to v (which defaults to None). + + ''' + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return len(self)==len(other) and self.items() == other.items() + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other + + # -- the following methods are only used in Python 2.7 -- + + def viewkeys(self): + "od.viewkeys() -> a set-like object providing a view on od's keys" + return KeysView(self) + + def viewvalues(self): + "od.viewvalues() -> an object providing a view on od's values" + return ValuesView(self) + + def viewitems(self): + "od.viewitems() -> a set-like object providing a view on od's items" + return ItemsView(self) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/packages/six.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/packages/six.py new file mode 100644 index 0000000..190c023 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/packages/six.py @@ -0,0 +1,868 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +# Copyright (c) 2010-2015 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson <benjamin@python.org>" +__version__ = "1.10.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + if from_value is None: + raise value + raise value from from_value +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + raise value from from_value +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py new file mode 100644 index 0000000..92c9bc7 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py @@ -0,0 +1,157 @@ +"""The match_hostname() function from Python 3.3.3, essential when using SSL.""" + +# Note: This file is under the PSF license as the code comes from the python +# stdlib. http://docs.python.org/3/license.html + +import re +import sys + +# ipaddress has been backported to 2.6+ in pypi. If it is installed on the +# system, use it to handle IPAddress ServerAltnames (this was added in +# python-3.5) otherwise only do DNS matching. This allows +# backports.ssl_match_hostname to continue to be used all the way back to +# python-2.4. +try: + from pip._vendor import ipaddress +except ImportError: + ipaddress = None + +__version__ = '3.5.0.1' + + +class CertificateError(ValueError): + pass + + +def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + # Ported from python3-syntax: + # leftmost, *remainder = dn.split(r'.') + parts = dn.split(r'.') + leftmost = parts[0] + remainder = parts[1:] + + wildcards = leftmost.count('*') + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn)) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + return pat.match(hostname) + + +def _to_unicode(obj): + if isinstance(obj, str) and sys.version_info < (3,): + obj = unicode(obj, encoding='ascii', errors='strict') + return obj + +def _ipaddress_match(ipname, host_ip): + """Exact matching of IP addresses. + + RFC 6125 explicitly doesn't define an algorithm for this + (section 1.7.2 - "Out of Scope"). + """ + # OpenSSL may add a trailing newline to a subjectAltName's IP address + # Divergence from upstream: ipaddress can't handle byte str + ip = ipaddress.ip_address(_to_unicode(ipname).rstrip()) + return ip == host_ip + + +def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED") + try: + # Divergence from upstream: ipaddress can't handle byte str + host_ip = ipaddress.ip_address(_to_unicode(hostname)) + except ValueError: + # Not an IP address (common case) + host_ip = None + except UnicodeError: + # Divergence from upstream: Have to deal with ipaddress not taking + # byte strings. addresses should be all ascii, so we consider it not + # an ipaddress in this case + host_ip = None + except AttributeError: + # Divergence from upstream: Make ipaddress library optional + if ipaddress is None: + host_ip = None + else: + raise + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if host_ip is None and _dnsname_match(value, hostname): + return + dnsnames.append(value) + elif key == 'IP Address': + if host_ip is not None and _ipaddress_match(value, host_ip): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" + % (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" + % (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/poolmanager.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/poolmanager.py new file mode 100644 index 0000000..506a3c9 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/poolmanager.py @@ -0,0 +1,449 @@ +from __future__ import absolute_import +import collections +import functools +import logging + +from ._collections import RecentlyUsedContainer +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool +from .connectionpool import port_by_scheme +from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown +from .packages.six.moves.urllib.parse import urljoin +from .request import RequestMethods +from .util.url import parse_url +from .util.retry import Retry + + +__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url'] + + +log = logging.getLogger(__name__) + +SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs', + 'ssl_version', 'ca_cert_dir', 'ssl_context') + +# All known keyword arguments that could be provided to the pool manager, its +# pools, or the underlying connections. This is used to construct a pool key. +_key_fields = ( + 'key_scheme', # str + 'key_host', # str + 'key_port', # int + 'key_timeout', # int or float or Timeout + 'key_retries', # int or Retry + 'key_strict', # bool + 'key_block', # bool + 'key_source_address', # str + 'key_key_file', # str + 'key_cert_file', # str + 'key_cert_reqs', # str + 'key_ca_certs', # str + 'key_ssl_version', # str + 'key_ca_cert_dir', # str + 'key_ssl_context', # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext + 'key_maxsize', # int + 'key_headers', # dict + 'key__proxy', # parsed proxy url + 'key__proxy_headers', # dict + 'key_socket_options', # list of (level (int), optname (int), value (int or str)) tuples + 'key__socks_options', # dict + 'key_assert_hostname', # bool or string + 'key_assert_fingerprint', # str +) + +#: The namedtuple class used to construct keys for the connection pool. +#: All custom key schemes should include the fields in this key at a minimum. +PoolKey = collections.namedtuple('PoolKey', _key_fields) + + +def _default_key_normalizer(key_class, request_context): + """ + Create a pool key out of a request context dictionary. + + According to RFC 3986, both the scheme and host are case-insensitive. + Therefore, this function normalizes both before constructing the pool + key for an HTTPS request. If you wish to change this behaviour, provide + alternate callables to ``key_fn_by_scheme``. + + :param key_class: + The class to use when constructing the key. This should be a namedtuple + with the ``scheme`` and ``host`` keys at a minimum. + :type key_class: namedtuple + :param request_context: + A dictionary-like object that contain the context for a request. + :type request_context: dict + + :return: A namedtuple that can be used as a connection pool key. + :rtype: PoolKey + """ + # Since we mutate the dictionary, make a copy first + context = request_context.copy() + context['scheme'] = context['scheme'].lower() + context['host'] = context['host'].lower() + + # These are both dictionaries and need to be transformed into frozensets + for key in ('headers', '_proxy_headers', '_socks_options'): + if key in context and context[key] is not None: + context[key] = frozenset(context[key].items()) + + # The socket_options key may be a list and needs to be transformed into a + # tuple. + socket_opts = context.get('socket_options') + if socket_opts is not None: + context['socket_options'] = tuple(socket_opts) + + # Map the kwargs to the names in the namedtuple - this is necessary since + # namedtuples can't have fields starting with '_'. + for key in list(context.keys()): + context['key_' + key] = context.pop(key) + + # Default to ``None`` for keys missing from the context + for field in key_class._fields: + if field not in context: + context[field] = None + + return key_class(**context) + + +#: A dictionary that maps a scheme to a callable that creates a pool key. +#: This can be used to alter the way pool keys are constructed, if desired. +#: Each PoolManager makes a copy of this dictionary so they can be configured +#: globally here, or individually on the instance. +key_fn_by_scheme = { + 'http': functools.partial(_default_key_normalizer, PoolKey), + 'https': functools.partial(_default_key_normalizer, PoolKey), +} + +pool_classes_by_scheme = { + 'http': HTTPConnectionPool, + 'https': HTTPSConnectionPool, +} + + +class PoolManager(RequestMethods): + """ + Allows for arbitrary requests while transparently keeping track of + necessary connection pools for you. + + :param num_pools: + Number of connection pools to cache before discarding the least + recently used pool. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + + :param \\**connection_pool_kw: + Additional parameters are used to create fresh + :class:`urllib3.connectionpool.ConnectionPool` instances. + + Example:: + + >>> manager = PoolManager(num_pools=2) + >>> r = manager.request('GET', 'http://google.com/') + >>> r = manager.request('GET', 'http://google.com/mail') + >>> r = manager.request('GET', 'http://yahoo.com/') + >>> len(manager.pools) + 2 + + """ + + proxy = None + + def __init__(self, num_pools=10, headers=None, **connection_pool_kw): + RequestMethods.__init__(self, headers) + self.connection_pool_kw = connection_pool_kw + self.pools = RecentlyUsedContainer(num_pools, + dispose_func=lambda p: p.close()) + + # Locally set the pool classes and keys so other PoolManagers can + # override them. + self.pool_classes_by_scheme = pool_classes_by_scheme + self.key_fn_by_scheme = key_fn_by_scheme.copy() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.clear() + # Return False to re-raise any potential exceptions + return False + + def _new_pool(self, scheme, host, port, request_context=None): + """ + Create a new :class:`ConnectionPool` based on host, port, scheme, and + any additional pool keyword arguments. + + If ``request_context`` is provided, it is provided as keyword arguments + to the pool class used. This method is used to actually create the + connection pools handed out by :meth:`connection_from_url` and + companion methods. It is intended to be overridden for customization. + """ + pool_cls = self.pool_classes_by_scheme[scheme] + if request_context is None: + request_context = self.connection_pool_kw.copy() + + # Although the context has everything necessary to create the pool, + # this function has historically only used the scheme, host, and port + # in the positional args. When an API change is acceptable these can + # be removed. + for key in ('scheme', 'host', 'port'): + request_context.pop(key, None) + + if scheme == 'http': + for kw in SSL_KEYWORDS: + request_context.pop(kw, None) + + return pool_cls(host, port, **request_context) + + def clear(self): + """ + Empty our store of pools and direct them all to close. + + This will not affect in-flight connections, but they will not be + re-used after completion. + """ + self.pools.clear() + + def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): + """ + Get a :class:`ConnectionPool` based on the host, port, and scheme. + + If ``port`` isn't given, it will be derived from the ``scheme`` using + ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is + provided, it is merged with the instance's ``connection_pool_kw`` + variable and used to create the new connection pool, if one is + needed. + """ + + if not host: + raise LocationValueError("No host specified.") + + request_context = self._merge_pool_kwargs(pool_kwargs) + request_context['scheme'] = scheme or 'http' + if not port: + port = port_by_scheme.get(request_context['scheme'].lower(), 80) + request_context['port'] = port + request_context['host'] = host + + return self.connection_from_context(request_context) + + def connection_from_context(self, request_context): + """ + Get a :class:`ConnectionPool` based on the request context. + + ``request_context`` must at least contain the ``scheme`` key and its + value must be a key in ``key_fn_by_scheme`` instance variable. + """ + scheme = request_context['scheme'].lower() + pool_key_constructor = self.key_fn_by_scheme[scheme] + pool_key = pool_key_constructor(request_context) + + return self.connection_from_pool_key(pool_key, request_context=request_context) + + def connection_from_pool_key(self, pool_key, request_context=None): + """ + Get a :class:`ConnectionPool` based on the provided pool key. + + ``pool_key`` should be a namedtuple that only contains immutable + objects. At a minimum it must have the ``scheme``, ``host``, and + ``port`` fields. + """ + with self.pools.lock: + # If the scheme, host, or port doesn't match existing open + # connections, open a new ConnectionPool. + pool = self.pools.get(pool_key) + if pool: + return pool + + # Make a fresh ConnectionPool of the desired type + scheme = request_context['scheme'] + host = request_context['host'] + port = request_context['port'] + pool = self._new_pool(scheme, host, port, request_context=request_context) + self.pools[pool_key] = pool + + return pool + + def connection_from_url(self, url, pool_kwargs=None): + """ + Similar to :func:`urllib3.connectionpool.connection_from_url`. + + If ``pool_kwargs`` is not provided and a new pool needs to be + constructed, ``self.connection_pool_kw`` is used to initialize + the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs`` + is provided, it is used instead. Note that if a new pool does not + need to be created for the request, the provided ``pool_kwargs`` are + not used. + """ + u = parse_url(url) + return self.connection_from_host(u.host, port=u.port, scheme=u.scheme, + pool_kwargs=pool_kwargs) + + def _merge_pool_kwargs(self, override): + """ + Merge a dictionary of override values for self.connection_pool_kw. + + This does not modify self.connection_pool_kw and returns a new dict. + Any keys in the override dictionary with a value of ``None`` are + removed from the merged dictionary. + """ + base_pool_kwargs = self.connection_pool_kw.copy() + if override: + for key, value in override.items(): + if value is None: + try: + del base_pool_kwargs[key] + except KeyError: + pass + else: + base_pool_kwargs[key] = value + return base_pool_kwargs + + def urlopen(self, method, url, redirect=True, **kw): + """ + Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen` + with custom cross-host redirect logic and only sends the request-uri + portion of the ``url``. + + The given ``url`` parameter must be absolute, such that an appropriate + :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. + """ + u = parse_url(url) + conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) + + kw['assert_same_host'] = False + kw['redirect'] = False + + if 'headers' not in kw: + kw['headers'] = self.headers.copy() + + if self.proxy is not None and u.scheme == "http": + response = conn.urlopen(method, url, **kw) + else: + response = conn.urlopen(method, u.request_uri, **kw) + + redirect_location = redirect and response.get_redirect_location() + if not redirect_location: + return response + + # Support relative URLs for redirecting. + redirect_location = urljoin(url, redirect_location) + + # RFC 7231, Section 6.4.4 + if response.status == 303: + method = 'GET' + + retries = kw.get('retries') + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect) + + # Strip headers marked as unsafe to forward to the redirected location. + # Check remove_headers_on_redirect to avoid a potential network call within + # conn.is_same_host() which may use socket.gethostbyname() in the future. + if (retries.remove_headers_on_redirect + and not conn.is_same_host(redirect_location)): + for header in retries.remove_headers_on_redirect: + kw['headers'].pop(header, None) + + try: + retries = retries.increment(method, url, response=response, _pool=conn) + except MaxRetryError: + if retries.raise_on_redirect: + raise + return response + + kw['retries'] = retries + kw['redirect'] = redirect + + log.info("Redirecting %s -> %s", url, redirect_location) + return self.urlopen(method, redirect_location, **kw) + + +class ProxyManager(PoolManager): + """ + Behaves just like :class:`PoolManager`, but sends all requests through + the defined proxy, using the CONNECT method for HTTPS URLs. + + :param proxy_url: + The URL of the proxy to be used. + + :param proxy_headers: + A dictionary containing headers that will be sent to the proxy. In case + of HTTP they are being sent with each request, while in the + HTTPS/CONNECT case they are sent only once. Could be used for proxy + authentication. + + Example: + >>> proxy = urllib3.ProxyManager('http://localhost:3128/') + >>> r1 = proxy.request('GET', 'http://google.com/') + >>> r2 = proxy.request('GET', 'http://httpbin.org/') + >>> len(proxy.pools) + 1 + >>> r3 = proxy.request('GET', 'https://httpbin.org/') + >>> r4 = proxy.request('GET', 'https://twitter.com/') + >>> len(proxy.pools) + 3 + + """ + + def __init__(self, proxy_url, num_pools=10, headers=None, + proxy_headers=None, **connection_pool_kw): + + if isinstance(proxy_url, HTTPConnectionPool): + proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host, + proxy_url.port) + proxy = parse_url(proxy_url) + if not proxy.port: + port = port_by_scheme.get(proxy.scheme, 80) + proxy = proxy._replace(port=port) + + if proxy.scheme not in ("http", "https"): + raise ProxySchemeUnknown(proxy.scheme) + + self.proxy = proxy + self.proxy_headers = proxy_headers or {} + + connection_pool_kw['_proxy'] = self.proxy + connection_pool_kw['_proxy_headers'] = self.proxy_headers + + super(ProxyManager, self).__init__( + num_pools, headers, **connection_pool_kw) + + def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): + if scheme == "https": + return super(ProxyManager, self).connection_from_host( + host, port, scheme, pool_kwargs=pool_kwargs) + + return super(ProxyManager, self).connection_from_host( + self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs) + + def _set_proxy_headers(self, url, headers=None): + """ + Sets headers needed by proxies: specifically, the Accept and Host + headers. Only sets headers not provided by the user. + """ + headers_ = {'Accept': '*/*'} + + netloc = parse_url(url).netloc + if netloc: + headers_['Host'] = netloc + + if headers: + headers_.update(headers) + return headers_ + + def urlopen(self, method, url, redirect=True, **kw): + "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." + u = parse_url(url) + + if u.scheme == "http": + # For proxied HTTPS requests, httplib sets the necessary headers + # on the CONNECT to the proxy. For HTTP, we'll definitely + # need to set 'Host' at the very least. + headers = kw.get('headers', self.headers) + kw['headers'] = self._set_proxy_headers(url, headers) + + return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw) + + +def proxy_from_url(url, **kw): + return ProxyManager(proxy_url=url, **kw) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/request.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/request.py new file mode 100644 index 0000000..1be3334 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/request.py @@ -0,0 +1,150 @@ +from __future__ import absolute_import + +from .filepost import encode_multipart_formdata +from .packages.six.moves.urllib.parse import urlencode + + +__all__ = ['RequestMethods'] + + +class RequestMethods(object): + """ + Convenience mixin for classes who implement a :meth:`urlopen` method, such + as :class:`~urllib3.connectionpool.HTTPConnectionPool` and + :class:`~urllib3.poolmanager.PoolManager`. + + Provides behavior for making common types of HTTP request methods and + decides which type of request field encoding to use. + + Specifically, + + :meth:`.request_encode_url` is for sending requests whose fields are + encoded in the URL (such as GET, HEAD, DELETE). + + :meth:`.request_encode_body` is for sending requests whose fields are + encoded in the *body* of the request using multipart or www-form-urlencoded + (such as for POST, PUT, PATCH). + + :meth:`.request` is for making any kind of request, it will look up the + appropriate encoding format and use one of the above two methods to make + the request. + + Initializer parameters: + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + """ + + _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS']) + + def __init__(self, headers=None): + self.headers = headers or {} + + def urlopen(self, method, url, body=None, headers=None, + encode_multipart=True, multipart_boundary=None, + **kw): # Abstract + raise NotImplementedError("Classes extending RequestMethods must implement " + "their own ``urlopen`` method.") + + def request(self, method, url, fields=None, headers=None, **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the appropriate encoding of + ``fields`` based on the ``method`` used. + + This is a convenience method that requires the least amount of manual + effort. It can be used in most situations, while still having the + option to drop down to more specific methods when necessary, such as + :meth:`request_encode_url`, :meth:`request_encode_body`, + or even the lowest level :meth:`urlopen`. + """ + method = method.upper() + + urlopen_kw['request_url'] = url + + if method in self._encode_url_methods: + return self.request_encode_url(method, url, fields=fields, + headers=headers, + **urlopen_kw) + else: + return self.request_encode_body(method, url, fields=fields, + headers=headers, + **urlopen_kw) + + def request_encode_url(self, method, url, fields=None, headers=None, + **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the url. This is useful for request methods like GET, HEAD, DELETE, etc. + """ + if headers is None: + headers = self.headers + + extra_kw = {'headers': headers} + extra_kw.update(urlopen_kw) + + if fields: + url += '?' + urlencode(fields) + + return self.urlopen(method, url, **extra_kw) + + def request_encode_body(self, method, url, fields=None, headers=None, + encode_multipart=True, multipart_boundary=None, + **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the body. This is useful for request methods like POST, PUT, PATCH, etc. + + When ``encode_multipart=True`` (default), then + :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode + the payload with the appropriate content type. Otherwise + :meth:`urllib.urlencode` is used with the + 'application/x-www-form-urlencoded' content type. + + Multipart encoding must be used when posting files, and it's reasonably + safe to use it in other times too. However, it may break request + signing, such as with OAuth. + + Supports an optional ``fields`` parameter of key/value strings AND + key/filetuple. A filetuple is a (filename, data, MIME type) tuple where + the MIME type is optional. For example:: + + fields = { + 'foo': 'bar', + 'fakefile': ('foofile.txt', 'contents of foofile'), + 'realfile': ('barfile.txt', open('realfile').read()), + 'typedfile': ('bazfile.bin', open('bazfile').read(), + 'image/jpeg'), + 'nonamefile': 'contents of nonamefile field', + } + + When uploading a file, providing a filename (the first parameter of the + tuple) is optional but recommended to best mimic behavior of browsers. + + Note that if ``headers`` are supplied, the 'Content-Type' header will + be overwritten because it depends on the dynamic random boundary string + which is used to compose the body of the request. The random boundary + string can be explicitly set with the ``multipart_boundary`` parameter. + """ + if headers is None: + headers = self.headers + + extra_kw = {'headers': {}} + + if fields: + if 'body' in urlopen_kw: + raise TypeError( + "request got values for both 'fields' and 'body', can only specify one.") + + if encode_multipart: + body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary) + else: + body, content_type = urlencode(fields), 'application/x-www-form-urlencoded' + + extra_kw['body'] = body + extra_kw['headers'] = {'Content-Type': content_type} + + extra_kw['headers'].update(headers) + extra_kw.update(urlopen_kw) + + return self.urlopen(method, url, **extra_kw) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/response.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/response.py new file mode 100644 index 0000000..9873cb9 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/response.py @@ -0,0 +1,676 @@ +from __future__ import absolute_import +from contextlib import contextmanager +import zlib +import io +import logging +from socket import timeout as SocketTimeout +from socket import error as SocketError + +from ._collections import HTTPHeaderDict +from .exceptions import ( + BodyNotHttplibCompatible, ProtocolError, DecodeError, ReadTimeoutError, + ResponseNotChunked, IncompleteRead, InvalidHeader +) +from .packages.six import string_types as basestring, binary_type, PY3 +from .packages.six.moves import http_client as httplib +from .connection import HTTPException, BaseSSLError +from .util.response import is_fp_closed, is_response_to_head + +log = logging.getLogger(__name__) + + +class DeflateDecoder(object): + + def __init__(self): + self._first_try = True + self._data = binary_type() + self._obj = zlib.decompressobj() + + def __getattr__(self, name): + return getattr(self._obj, name) + + def decompress(self, data): + if not data: + return data + + if not self._first_try: + return self._obj.decompress(data) + + self._data += data + try: + decompressed = self._obj.decompress(data) + if decompressed: + self._first_try = False + self._data = None + return decompressed + except zlib.error: + self._first_try = False + self._obj = zlib.decompressobj(-zlib.MAX_WBITS) + try: + return self.decompress(self._data) + finally: + self._data = None + + +class GzipDecoderState(object): + + FIRST_MEMBER = 0 + OTHER_MEMBERS = 1 + SWALLOW_DATA = 2 + + +class GzipDecoder(object): + + def __init__(self): + self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) + self._state = GzipDecoderState.FIRST_MEMBER + + def __getattr__(self, name): + return getattr(self._obj, name) + + def decompress(self, data): + ret = binary_type() + if self._state == GzipDecoderState.SWALLOW_DATA or not data: + return ret + while True: + try: + ret += self._obj.decompress(data) + except zlib.error: + previous_state = self._state + # Ignore data after the first error + self._state = GzipDecoderState.SWALLOW_DATA + if previous_state == GzipDecoderState.OTHER_MEMBERS: + # Allow trailing garbage acceptable in other gzip clients + return ret + raise + data = self._obj.unused_data + if not data: + return ret + self._state = GzipDecoderState.OTHER_MEMBERS + self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) + + +def _get_decoder(mode): + if mode == 'gzip': + return GzipDecoder() + + return DeflateDecoder() + + +class HTTPResponse(io.IOBase): + """ + HTTP Response container. + + Backwards-compatible to httplib's HTTPResponse but the response ``body`` is + loaded and decoded on-demand when the ``data`` property is accessed. This + class is also compatible with the Python standard library's :mod:`io` + module, and can hence be treated as a readable object in the context of that + framework. + + Extra parameters for behaviour not present in httplib.HTTPResponse: + + :param preload_content: + If True, the response's body will be preloaded during construction. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param original_response: + When this HTTPResponse wrapper is generated from an httplib.HTTPResponse + object, it's convenient to include the original for debug purposes. It's + otherwise unused. + + :param retries: + The retries contains the last :class:`~urllib3.util.retry.Retry` that + was used during the request. + + :param enforce_content_length: + Enforce content length checking. Body returned by server must match + value of Content-Length header, if present. Otherwise, raise error. + """ + + CONTENT_DECODERS = ['gzip', 'deflate'] + REDIRECT_STATUSES = [301, 302, 303, 307, 308] + + def __init__(self, body='', headers=None, status=0, version=0, reason=None, + strict=0, preload_content=True, decode_content=True, + original_response=None, pool=None, connection=None, msg=None, + retries=None, enforce_content_length=False, + request_method=None, request_url=None): + + if isinstance(headers, HTTPHeaderDict): + self.headers = headers + else: + self.headers = HTTPHeaderDict(headers) + self.status = status + self.version = version + self.reason = reason + self.strict = strict + self.decode_content = decode_content + self.retries = retries + self.enforce_content_length = enforce_content_length + + self._decoder = None + self._body = None + self._fp = None + self._original_response = original_response + self._fp_bytes_read = 0 + self.msg = msg + self._request_url = request_url + + if body and isinstance(body, (basestring, binary_type)): + self._body = body + + self._pool = pool + self._connection = connection + + if hasattr(body, 'read'): + self._fp = body + + # Are we using the chunked-style of transfer encoding? + self.chunked = False + self.chunk_left = None + tr_enc = self.headers.get('transfer-encoding', '').lower() + # Don't incur the penalty of creating a list and then discarding it + encodings = (enc.strip() for enc in tr_enc.split(",")) + if "chunked" in encodings: + self.chunked = True + + # Determine length of response + self.length_remaining = self._init_length(request_method) + + # If requested, preload the body. + if preload_content and not self._body: + self._body = self.read(decode_content=decode_content) + + def get_redirect_location(self): + """ + Should we redirect and where to? + + :returns: Truthy redirect location string if we got a redirect status + code and valid location. ``None`` if redirect status and no + location. ``False`` if not a redirect status code. + """ + if self.status in self.REDIRECT_STATUSES: + return self.headers.get('location') + + return False + + def release_conn(self): + if not self._pool or not self._connection: + return + + self._pool._put_conn(self._connection) + self._connection = None + + @property + def data(self): + # For backwords-compat with earlier urllib3 0.4 and earlier. + if self._body: + return self._body + + if self._fp: + return self.read(cache_content=True) + + @property + def connection(self): + return self._connection + + def isclosed(self): + return is_fp_closed(self._fp) + + def tell(self): + """ + Obtain the number of bytes pulled over the wire so far. May differ from + the amount of content returned by :meth:``HTTPResponse.read`` if bytes + are encoded on the wire (e.g, compressed). + """ + return self._fp_bytes_read + + def _init_length(self, request_method): + """ + Set initial length value for Response content if available. + """ + length = self.headers.get('content-length') + + if length is not None: + if self.chunked: + # This Response will fail with an IncompleteRead if it can't be + # received as chunked. This method falls back to attempt reading + # the response before raising an exception. + log.warning("Received response with both Content-Length and " + "Transfer-Encoding set. This is expressly forbidden " + "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " + "attempting to process response as Transfer-Encoding: " + "chunked.") + return None + + try: + # RFC 7230 section 3.3.2 specifies multiple content lengths can + # be sent in a single Content-Length header + # (e.g. Content-Length: 42, 42). This line ensures the values + # are all valid ints and that as long as the `set` length is 1, + # all values are the same. Otherwise, the header is invalid. + lengths = set([int(val) for val in length.split(',')]) + if len(lengths) > 1: + raise InvalidHeader("Content-Length contained multiple " + "unmatching values (%s)" % length) + length = lengths.pop() + except ValueError: + length = None + else: + if length < 0: + length = None + + # Convert status to int for comparison + # In some cases, httplib returns a status of "_UNKNOWN" + try: + status = int(self.status) + except ValueError: + status = 0 + + # Check for responses that shouldn't include a body + if status in (204, 304) or 100 <= status < 200 or request_method == 'HEAD': + length = 0 + + return length + + def _init_decoder(self): + """ + Set-up the _decoder attribute if necessary. + """ + # Note: content-encoding value should be case-insensitive, per RFC 7230 + # Section 3.2 + content_encoding = self.headers.get('content-encoding', '').lower() + if self._decoder is None and content_encoding in self.CONTENT_DECODERS: + self._decoder = _get_decoder(content_encoding) + + def _decode(self, data, decode_content, flush_decoder): + """ + Decode the data passed in and potentially flush the decoder. + """ + try: + if decode_content and self._decoder: + data = self._decoder.decompress(data) + except (IOError, zlib.error) as e: + content_encoding = self.headers.get('content-encoding', '').lower() + raise DecodeError( + "Received response with content-encoding: %s, but " + "failed to decode it." % content_encoding, e) + + if flush_decoder and decode_content: + data += self._flush_decoder() + + return data + + def _flush_decoder(self): + """ + Flushes the decoder. Should only be called if the decoder is actually + being used. + """ + if self._decoder: + buf = self._decoder.decompress(b'') + return buf + self._decoder.flush() + + return b'' + + @contextmanager + def _error_catcher(self): + """ + Catch low-level python exceptions, instead re-raising urllib3 + variants, so that low-level exceptions are not leaked in the + high-level api. + + On exit, release the connection back to the pool. + """ + clean_exit = False + + try: + try: + yield + + except SocketTimeout: + # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but + # there is yet no clean way to get at it from this context. + raise ReadTimeoutError(self._pool, None, 'Read timed out.') + + except BaseSSLError as e: + # FIXME: Is there a better way to differentiate between SSLErrors? + if 'read operation timed out' not in str(e): # Defensive: + # This shouldn't happen but just in case we're missing an edge + # case, let's avoid swallowing SSL errors. + raise + + raise ReadTimeoutError(self._pool, None, 'Read timed out.') + + except (HTTPException, SocketError) as e: + # This includes IncompleteRead. + raise ProtocolError('Connection broken: %r' % e, e) + + # If no exception is thrown, we should avoid cleaning up + # unnecessarily. + clean_exit = True + finally: + # If we didn't terminate cleanly, we need to throw away our + # connection. + if not clean_exit: + # The response may not be closed but we're not going to use it + # anymore so close it now to ensure that the connection is + # released back to the pool. + if self._original_response: + self._original_response.close() + + # Closing the response may not actually be sufficient to close + # everything, so if we have a hold of the connection close that + # too. + if self._connection: + self._connection.close() + + # If we hold the original response but it's closed now, we should + # return the connection back to the pool. + if self._original_response and self._original_response.isclosed(): + self.release_conn() + + def read(self, amt=None, decode_content=None, cache_content=False): + """ + Similar to :meth:`httplib.HTTPResponse.read`, but with two additional + parameters: ``decode_content`` and ``cache_content``. + + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param cache_content: + If True, will save the returned data such that the same result is + returned despite of the state of the underlying file object. This + is useful if you want the ``.data`` property to continue working + after having ``.read()`` the file object. (Overridden if ``amt`` is + set.) + """ + self._init_decoder() + if decode_content is None: + decode_content = self.decode_content + + if self._fp is None: + return + + flush_decoder = False + data = None + + with self._error_catcher(): + if amt is None: + # cStringIO doesn't like amt=None + data = self._fp.read() + flush_decoder = True + else: + cache_content = False + data = self._fp.read(amt) + if amt != 0 and not data: # Platform-specific: Buggy versions of Python. + # Close the connection when no data is returned + # + # This is redundant to what httplib/http.client _should_ + # already do. However, versions of python released before + # December 15, 2012 (http://bugs.python.org/issue16298) do + # not properly close the connection in all cases. There is + # no harm in redundantly calling close. + self._fp.close() + flush_decoder = True + if self.enforce_content_length and self.length_remaining not in (0, None): + # This is an edge case that httplib failed to cover due + # to concerns of backward compatibility. We're + # addressing it here to make sure IncompleteRead is + # raised during streaming, so all calls with incorrect + # Content-Length are caught. + raise IncompleteRead(self._fp_bytes_read, self.length_remaining) + + if data: + self._fp_bytes_read += len(data) + if self.length_remaining is not None: + self.length_remaining -= len(data) + + data = self._decode(data, decode_content, flush_decoder) + + if cache_content: + self._body = data + + return data + + def stream(self, amt=2**16, decode_content=None): + """ + A generator wrapper for the read() method. A call will block until + ``amt`` bytes have been read from the connection or until the + connection is closed. + + :param amt: + How much of the content to read. The generator will return up to + much data per iteration, but may return less. This is particularly + likely when using compressed data. However, the empty string will + never be returned. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + if self.chunked and self.supports_chunked_reads(): + for line in self.read_chunked(amt, decode_content=decode_content): + yield line + else: + while not is_fp_closed(self._fp): + data = self.read(amt=amt, decode_content=decode_content) + + if data: + yield data + + @classmethod + def from_httplib(ResponseCls, r, **response_kw): + """ + Given an :class:`httplib.HTTPResponse` instance ``r``, return a + corresponding :class:`urllib3.response.HTTPResponse` object. + + Remaining parameters are passed to the HTTPResponse constructor, along + with ``original_response=r``. + """ + headers = r.msg + + if not isinstance(headers, HTTPHeaderDict): + if PY3: # Python 3 + headers = HTTPHeaderDict(headers.items()) + else: # Python 2 + headers = HTTPHeaderDict.from_httplib(headers) + + # HTTPResponse objects in Python 3 don't have a .strict attribute + strict = getattr(r, 'strict', 0) + resp = ResponseCls(body=r, + headers=headers, + status=r.status, + version=r.version, + reason=r.reason, + strict=strict, + original_response=r, + **response_kw) + return resp + + # Backwards-compatibility methods for httplib.HTTPResponse + def getheaders(self): + return self.headers + + def getheader(self, name, default=None): + return self.headers.get(name, default) + + # Backwards compatibility for http.cookiejar + def info(self): + return self.headers + + # Overrides from io.IOBase + def close(self): + if not self.closed: + self._fp.close() + + if self._connection: + self._connection.close() + + @property + def closed(self): + if self._fp is None: + return True + elif hasattr(self._fp, 'isclosed'): + return self._fp.isclosed() + elif hasattr(self._fp, 'closed'): + return self._fp.closed + else: + return True + + def fileno(self): + if self._fp is None: + raise IOError("HTTPResponse has no file to get a fileno from") + elif hasattr(self._fp, "fileno"): + return self._fp.fileno() + else: + raise IOError("The file-like object this HTTPResponse is wrapped " + "around has no file descriptor") + + def flush(self): + if self._fp is not None and hasattr(self._fp, 'flush'): + return self._fp.flush() + + def readable(self): + # This method is required for `io` module compatibility. + return True + + def readinto(self, b): + # This method is required for `io` module compatibility. + temp = self.read(len(b)) + if len(temp) == 0: + return 0 + else: + b[:len(temp)] = temp + return len(temp) + + def supports_chunked_reads(self): + """ + Checks if the underlying file-like object looks like a + httplib.HTTPResponse object. We do this by testing for the fp + attribute. If it is present we assume it returns raw chunks as + processed by read_chunked(). + """ + return hasattr(self._fp, 'fp') + + def _update_chunk_length(self): + # First, we'll figure out length of a chunk and then + # we'll try to read it from socket. + if self.chunk_left is not None: + return + line = self._fp.fp.readline() + line = line.split(b';', 1)[0] + try: + self.chunk_left = int(line, 16) + except ValueError: + # Invalid chunked protocol response, abort. + self.close() + raise httplib.IncompleteRead(line) + + def _handle_chunk(self, amt): + returned_chunk = None + if amt is None: + chunk = self._fp._safe_read(self.chunk_left) + returned_chunk = chunk + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + elif amt < self.chunk_left: + value = self._fp._safe_read(amt) + self.chunk_left = self.chunk_left - amt + returned_chunk = value + elif amt == self.chunk_left: + value = self._fp._safe_read(amt) + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + returned_chunk = value + else: # amt > self.chunk_left + returned_chunk = self._fp._safe_read(self.chunk_left) + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + return returned_chunk + + def read_chunked(self, amt=None, decode_content=None): + """ + Similar to :meth:`HTTPResponse.read`, but with an additional + parameter: ``decode_content``. + + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + self._init_decoder() + # FIXME: Rewrite this method and make it a class with a better structured logic. + if not self.chunked: + raise ResponseNotChunked( + "Response is not chunked. " + "Header 'transfer-encoding: chunked' is missing.") + if not self.supports_chunked_reads(): + raise BodyNotHttplibCompatible( + "Body should be httplib.HTTPResponse like. " + "It should have have an fp attribute which returns raw chunks.") + + with self._error_catcher(): + # Don't bother reading the body of a HEAD request. + if self._original_response and is_response_to_head(self._original_response): + self._original_response.close() + return + + # If a response is already read and closed + # then return immediately. + if self._fp.fp is None: + return + + while True: + self._update_chunk_length() + if self.chunk_left == 0: + break + chunk = self._handle_chunk(amt) + decoded = self._decode(chunk, decode_content=decode_content, + flush_decoder=False) + if decoded: + yield decoded + + if decode_content: + # On CPython and PyPy, we should never need to flush the + # decoder. However, on Jython we *might* need to, so + # lets defensively do it anyway. + decoded = self._flush_decoder() + if decoded: # Platform-specific: Jython. + yield decoded + + # Chunk content ends with \r\n: discard it. + while True: + line = self._fp.fp.readline() + if not line: + # Some sites may not end with '\r\n'. + break + if line == b'\r\n': + break + + # We read everything; close the "file". + if self._original_response: + self._original_response.close() + + def geturl(self): + """ + Returns the URL that was the source of this response. + If the request that generated this response redirected, this method + will return the final redirect location. + """ + if self.retries is not None and len(self.retries.history): + return self.retries.history[-1].redirect_location + else: + return self._request_url diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/connection.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/connection.py new file mode 100644 index 0000000..5cf488f --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/connection.py @@ -0,0 +1,126 @@ +from __future__ import absolute_import +import socket +from .wait import NoWayToWaitForSocketError, wait_for_read + + +def is_connection_dropped(conn): # Platform-specific + """ + Returns True if the connection is dropped and should be closed. + + :param conn: + :class:`httplib.HTTPConnection` object. + + Note: For platforms like AppEngine, this will always return ``False`` to + let the platform handle connection recycling transparently for us. + """ + sock = getattr(conn, 'sock', False) + if sock is False: # Platform-specific: AppEngine + return False + if sock is None: # Connection already closed (such as by httplib). + return True + try: + # Returns True if readable, which here means it's been dropped + return wait_for_read(sock, timeout=0.0) + except NoWayToWaitForSocketError: # Platform-specific: AppEngine + return False + + +# This function is copied from socket.py in the Python 2.7 standard +# library test suite. Added to its signature is only `socket_options`. +# One additional modification is that we avoid binding to IPv6 servers +# discovered in DNS if the system doesn't have IPv6 functionality. +def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + source_address=None, socket_options=None): + """Connect to *address* and return the socket object. + + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + An host of '' or port 0 tells the OS to use the default. + """ + + host, port = address + if host.startswith('['): + host = host.strip('[]') + err = None + + # Using the value from allowed_gai_family() in the context of getaddrinfo lets + # us select whether to work with IPv4 DNS records, IPv6 records, or both. + # The original create_connection function always returns all records. + family = allowed_gai_family() + + for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + sock = None + try: + sock = socket.socket(af, socktype, proto) + + # If provided, set socket level options before connecting. + _set_socket_options(sock, socket_options) + + if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if source_address: + sock.bind(source_address) + sock.connect(sa) + return sock + + except socket.error as e: + err = e + if sock is not None: + sock.close() + sock = None + + if err is not None: + raise err + + raise socket.error("getaddrinfo returns an empty list") + + +def _set_socket_options(sock, options): + if options is None: + return + + for opt in options: + sock.setsockopt(*opt) + + +def allowed_gai_family(): + """This function is designed to work in the context of + getaddrinfo, where family=socket.AF_UNSPEC is the default and + will perform a DNS search for both IPv6 and IPv4 records.""" + + family = socket.AF_INET + if HAS_IPV6: + family = socket.AF_UNSPEC + return family + + +def _has_ipv6(host): + """ Returns True if the system can bind an IPv6 address. """ + sock = None + has_ipv6 = False + + if socket.has_ipv6: + # has_ipv6 returns true if cPython was compiled with IPv6 support. + # It does not tell us if the system has IPv6 support enabled. To + # determine that we must bind to an IPv6 address. + # https://github.com/shazow/urllib3/pull/611 + # https://bugs.python.org/issue658327 + try: + sock = socket.socket(socket.AF_INET6) + sock.bind((host, 0)) + has_ipv6 = True + except Exception: + pass + + if sock: + sock.close() + return has_ipv6 + + +HAS_IPV6 = _has_ipv6('::1') diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/queue.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/queue.py new file mode 100644 index 0000000..d3d379a --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/queue.py @@ -0,0 +1,21 @@ +import collections +from ..packages import six +from ..packages.six.moves import queue + +if six.PY2: + # Queue is imported for side effects on MS Windows. See issue #229. + import Queue as _unused_module_Queue # noqa: F401 + + +class LifoQueue(queue.Queue): + def _init(self, _): + self.queue = collections.deque() + + def _qsize(self, len=len): + return len(self.queue) + + def _put(self, item): + self.queue.append(item) + + def _get(self): + return self.queue.pop() diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/request.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/request.py new file mode 100644 index 0000000..3ddfcd5 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/request.py @@ -0,0 +1,118 @@ +from __future__ import absolute_import +from base64 import b64encode + +from ..packages.six import b, integer_types +from ..exceptions import UnrewindableBodyError + +ACCEPT_ENCODING = 'gzip,deflate' +_FAILEDTELL = object() + + +def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, + basic_auth=None, proxy_basic_auth=None, disable_cache=None): + """ + Shortcuts for generating request headers. + + :param keep_alive: + If ``True``, adds 'connection: keep-alive' header. + + :param accept_encoding: + Can be a boolean, list, or string. + ``True`` translates to 'gzip,deflate'. + List will get joined by comma. + String will be used as provided. + + :param user_agent: + String representing the user-agent you want, such as + "python-urllib3/0.6" + + :param basic_auth: + Colon-separated username:password string for 'authorization: basic ...' + auth header. + + :param proxy_basic_auth: + Colon-separated username:password string for 'proxy-authorization: basic ...' + auth header. + + :param disable_cache: + If ``True``, adds 'cache-control: no-cache' header. + + Example:: + + >>> make_headers(keep_alive=True, user_agent="Batman/1.0") + {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} + >>> make_headers(accept_encoding=True) + {'accept-encoding': 'gzip,deflate'} + """ + headers = {} + if accept_encoding: + if isinstance(accept_encoding, str): + pass + elif isinstance(accept_encoding, list): + accept_encoding = ','.join(accept_encoding) + else: + accept_encoding = ACCEPT_ENCODING + headers['accept-encoding'] = accept_encoding + + if user_agent: + headers['user-agent'] = user_agent + + if keep_alive: + headers['connection'] = 'keep-alive' + + if basic_auth: + headers['authorization'] = 'Basic ' + \ + b64encode(b(basic_auth)).decode('utf-8') + + if proxy_basic_auth: + headers['proxy-authorization'] = 'Basic ' + \ + b64encode(b(proxy_basic_auth)).decode('utf-8') + + if disable_cache: + headers['cache-control'] = 'no-cache' + + return headers + + +def set_file_position(body, pos): + """ + If a position is provided, move file to that point. + Otherwise, we'll attempt to record a position for future use. + """ + if pos is not None: + rewind_body(body, pos) + elif getattr(body, 'tell', None) is not None: + try: + pos = body.tell() + except (IOError, OSError): + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body. + pos = _FAILEDTELL + + return pos + + +def rewind_body(body, body_pos): + """ + Attempt to rewind body to a certain position. + Primarily used for request redirects and retries. + + :param body: + File-like object that supports seek. + + :param int pos: + Position to seek to in file. + """ + body_seek = getattr(body, 'seek', None) + if body_seek is not None and isinstance(body_pos, integer_types): + try: + body_seek(body_pos) + except (IOError, OSError): + raise UnrewindableBodyError("An error occurred when rewinding request " + "body for redirect/retry.") + elif body_pos is _FAILEDTELL: + raise UnrewindableBodyError("Unable to record file position for rewinding " + "request body during a redirect/retry.") + else: + raise ValueError("body_pos must be of type integer, " + "instead it was %s." % type(body_pos)) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/response.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/response.py new file mode 100644 index 0000000..67cf730 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/response.py @@ -0,0 +1,81 @@ +from __future__ import absolute_import +from ..packages.six.moves import http_client as httplib + +from ..exceptions import HeaderParsingError + + +def is_fp_closed(obj): + """ + Checks whether a given file-like object is closed. + + :param obj: + The file-like object to check. + """ + + try: + # Check `isclosed()` first, in case Python3 doesn't set `closed`. + # GH Issue #928 + return obj.isclosed() + except AttributeError: + pass + + try: + # Check via the official file-like-object way. + return obj.closed + except AttributeError: + pass + + try: + # Check if the object is a container for another file-like object that + # gets released on exhaustion (e.g. HTTPResponse). + return obj.fp is None + except AttributeError: + pass + + raise ValueError("Unable to determine whether fp is closed.") + + +def assert_header_parsing(headers): + """ + Asserts whether all headers have been successfully parsed. + Extracts encountered errors from the result of parsing headers. + + Only works on Python 3. + + :param headers: Headers to verify. + :type headers: `httplib.HTTPMessage`. + + :raises urllib3.exceptions.HeaderParsingError: + If parsing errors are found. + """ + + # This will fail silently if we pass in the wrong kind of parameter. + # To make debugging easier add an explicit check. + if not isinstance(headers, httplib.HTTPMessage): + raise TypeError('expected httplib.Message, got {0}.'.format( + type(headers))) + + defects = getattr(headers, 'defects', None) + get_payload = getattr(headers, 'get_payload', None) + + unparsed_data = None + if get_payload: # Platform-specific: Python 3. + unparsed_data = get_payload() + + if defects or unparsed_data: + raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) + + +def is_response_to_head(response): + """ + Checks whether the request of a response has been a HEAD-request. + Handles the quirks of AppEngine. + + :param conn: + :type conn: :class:`httplib.HTTPResponse` + """ + # FIXME: Can we do this somehow without accessing private httplib _method? + method = response._method + if isinstance(method, int): # Platform-specific: Appengine + return method == 3 + return method.upper() == 'HEAD' diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/retry.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/retry.py new file mode 100644 index 0000000..7ad3dc6 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/retry.py @@ -0,0 +1,411 @@ +from __future__ import absolute_import +import time +import logging +from collections import namedtuple +from itertools import takewhile +import email +import re + +from ..exceptions import ( + ConnectTimeoutError, + MaxRetryError, + ProtocolError, + ReadTimeoutError, + ResponseError, + InvalidHeader, +) +from ..packages import six + + +log = logging.getLogger(__name__) + + +# Data structure for representing the metadata of requests that result in a retry. +RequestHistory = namedtuple('RequestHistory', ["method", "url", "error", + "status", "redirect_location"]) + + +class Retry(object): + """ Retry configuration. + + Each retry attempt will create a new Retry object with updated values, so + they can be safely reused. + + Retries can be defined as a default for a pool:: + + retries = Retry(connect=5, read=2, redirect=5) + http = PoolManager(retries=retries) + response = http.request('GET', 'http://example.com/') + + Or per-request (which overrides the default for the pool):: + + response = http.request('GET', 'http://example.com/', retries=Retry(10)) + + Retries can be disabled by passing ``False``:: + + response = http.request('GET', 'http://example.com/', retries=False) + + Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless + retries are disabled, in which case the causing exception will be raised. + + :param int total: + Total number of retries to allow. Takes precedence over other counts. + + Set to ``None`` to remove this constraint and fall back on other + counts. It's a good idea to set this to some sensibly-high value to + account for unexpected edge cases and avoid infinite retry loops. + + Set to ``0`` to fail on the first retry. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param int connect: + How many connection-related errors to retry on. + + These are errors raised before the request is sent to the remote server, + which we assume has not triggered the server to process the request. + + Set to ``0`` to fail on the first retry of this type. + + :param int read: + How many times to retry on read errors. + + These errors are raised after the request was sent to the server, so the + request may have side-effects. + + Set to ``0`` to fail on the first retry of this type. + + :param int redirect: + How many redirects to perform. Limit this to avoid infinite redirect + loops. + + A redirect is a HTTP response with a status code 301, 302, 303, 307 or + 308. + + Set to ``0`` to fail on the first retry of this type. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param int status: + How many times to retry on bad status codes. + + These are retries made on responses, where status code matches + ``status_forcelist``. + + Set to ``0`` to fail on the first retry of this type. + + :param iterable method_whitelist: + Set of uppercased HTTP method verbs that we should retry on. + + By default, we only retry on methods which are considered to be + idempotent (multiple requests with the same parameters end with the + same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`. + + Set to a ``False`` value to retry on any verb. + + :param iterable status_forcelist: + A set of integer HTTP status codes that we should force a retry on. + A retry is initiated if the request method is in ``method_whitelist`` + and the response status code is in ``status_forcelist``. + + By default, this is disabled with ``None``. + + :param float backoff_factor: + A backoff factor to apply between attempts after the second try + (most errors are resolved immediately by a second try without a + delay). urllib3 will sleep for:: + + {backoff factor} * (2 ^ ({number of total retries} - 1)) + + seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep + for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer + than :attr:`Retry.BACKOFF_MAX`. + + By default, backoff is disabled (set to 0). + + :param bool raise_on_redirect: Whether, if the number of redirects is + exhausted, to raise a MaxRetryError, or to return a response with a + response code in the 3xx range. + + :param bool raise_on_status: Similar meaning to ``raise_on_redirect``: + whether we should raise an exception, or return a response, + if status falls in ``status_forcelist`` range and retries have + been exhausted. + + :param tuple history: The history of the request encountered during + each call to :meth:`~Retry.increment`. The list is in the order + the requests occurred. Each list item is of class :class:`RequestHistory`. + + :param bool respect_retry_after_header: + Whether to respect Retry-After header on status codes defined as + :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not. + + :param iterable remove_headers_on_redirect: + Sequence of headers to remove from the request when a response + indicating a redirect is returned before firing off the redirected + request. + """ + + DEFAULT_METHOD_WHITELIST = frozenset([ + 'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE']) + + RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) + + DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(['Authorization']) + + #: Maximum backoff time. + BACKOFF_MAX = 120 + + def __init__(self, total=10, connect=None, read=None, redirect=None, status=None, + method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None, + backoff_factor=0, raise_on_redirect=True, raise_on_status=True, + history=None, respect_retry_after_header=True, + remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST): + + self.total = total + self.connect = connect + self.read = read + self.status = status + + if redirect is False or total is False: + redirect = 0 + raise_on_redirect = False + + self.redirect = redirect + self.status_forcelist = status_forcelist or set() + self.method_whitelist = method_whitelist + self.backoff_factor = backoff_factor + self.raise_on_redirect = raise_on_redirect + self.raise_on_status = raise_on_status + self.history = history or tuple() + self.respect_retry_after_header = respect_retry_after_header + self.remove_headers_on_redirect = remove_headers_on_redirect + + def new(self, **kw): + params = dict( + total=self.total, + connect=self.connect, read=self.read, redirect=self.redirect, status=self.status, + method_whitelist=self.method_whitelist, + status_forcelist=self.status_forcelist, + backoff_factor=self.backoff_factor, + raise_on_redirect=self.raise_on_redirect, + raise_on_status=self.raise_on_status, + history=self.history, + remove_headers_on_redirect=self.remove_headers_on_redirect + ) + params.update(kw) + return type(self)(**params) + + @classmethod + def from_int(cls, retries, redirect=True, default=None): + """ Backwards-compatibility for the old retries format.""" + if retries is None: + retries = default if default is not None else cls.DEFAULT + + if isinstance(retries, Retry): + return retries + + redirect = bool(redirect) and None + new_retries = cls(retries, redirect=redirect) + log.debug("Converted retries value: %r -> %r", retries, new_retries) + return new_retries + + def get_backoff_time(self): + """ Formula for computing the current backoff + + :rtype: float + """ + # We want to consider only the last consecutive errors sequence (Ignore redirects). + consecutive_errors_len = len(list(takewhile(lambda x: x.redirect_location is None, + reversed(self.history)))) + if consecutive_errors_len <= 1: + return 0 + + backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1)) + return min(self.BACKOFF_MAX, backoff_value) + + def parse_retry_after(self, retry_after): + # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4 + if re.match(r"^\s*[0-9]+\s*$", retry_after): + seconds = int(retry_after) + else: + retry_date_tuple = email.utils.parsedate(retry_after) + if retry_date_tuple is None: + raise InvalidHeader("Invalid Retry-After header: %s" % retry_after) + retry_date = time.mktime(retry_date_tuple) + seconds = retry_date - time.time() + + if seconds < 0: + seconds = 0 + + return seconds + + def get_retry_after(self, response): + """ Get the value of Retry-After in seconds. """ + + retry_after = response.getheader("Retry-After") + + if retry_after is None: + return None + + return self.parse_retry_after(retry_after) + + def sleep_for_retry(self, response=None): + retry_after = self.get_retry_after(response) + if retry_after: + time.sleep(retry_after) + return True + + return False + + def _sleep_backoff(self): + backoff = self.get_backoff_time() + if backoff <= 0: + return + time.sleep(backoff) + + def sleep(self, response=None): + """ Sleep between retry attempts. + + This method will respect a server's ``Retry-After`` response header + and sleep the duration of the time requested. If that is not present, it + will use an exponential backoff. By default, the backoff factor is 0 and + this method will return immediately. + """ + + if response: + slept = self.sleep_for_retry(response) + if slept: + return + + self._sleep_backoff() + + def _is_connection_error(self, err): + """ Errors when we're fairly sure that the server did not receive the + request, so it should be safe to retry. + """ + return isinstance(err, ConnectTimeoutError) + + def _is_read_error(self, err): + """ Errors that occur after the request has been started, so we should + assume that the server began processing it. + """ + return isinstance(err, (ReadTimeoutError, ProtocolError)) + + def _is_method_retryable(self, method): + """ Checks if a given HTTP method should be retried upon, depending if + it is included on the method whitelist. + """ + if self.method_whitelist and method.upper() not in self.method_whitelist: + return False + + return True + + def is_retry(self, method, status_code, has_retry_after=False): + """ Is this method/status code retryable? (Based on whitelists and control + variables such as the number of total retries to allow, whether to + respect the Retry-After header, whether this header is present, and + whether the returned status code is on the list of status codes to + be retried upon on the presence of the aforementioned header) + """ + if not self._is_method_retryable(method): + return False + + if self.status_forcelist and status_code in self.status_forcelist: + return True + + return (self.total and self.respect_retry_after_header and + has_retry_after and (status_code in self.RETRY_AFTER_STATUS_CODES)) + + def is_exhausted(self): + """ Are we out of retries? """ + retry_counts = (self.total, self.connect, self.read, self.redirect, self.status) + retry_counts = list(filter(None, retry_counts)) + if not retry_counts: + return False + + return min(retry_counts) < 0 + + def increment(self, method=None, url=None, response=None, error=None, + _pool=None, _stacktrace=None): + """ Return a new Retry object with incremented retry counters. + + :param response: A response object, or None, if the server did not + return a response. + :type response: :class:`~urllib3.response.HTTPResponse` + :param Exception error: An error encountered during the request, or + None if the response was received successfully. + + :return: A new ``Retry`` object. + """ + if self.total is False and error: + # Disabled, indicate to re-raise the error. + raise six.reraise(type(error), error, _stacktrace) + + total = self.total + if total is not None: + total -= 1 + + connect = self.connect + read = self.read + redirect = self.redirect + status_count = self.status + cause = 'unknown' + status = None + redirect_location = None + + if error and self._is_connection_error(error): + # Connect retry? + if connect is False: + raise six.reraise(type(error), error, _stacktrace) + elif connect is not None: + connect -= 1 + + elif error and self._is_read_error(error): + # Read retry? + if read is False or not self._is_method_retryable(method): + raise six.reraise(type(error), error, _stacktrace) + elif read is not None: + read -= 1 + + elif response and response.get_redirect_location(): + # Redirect retry? + if redirect is not None: + redirect -= 1 + cause = 'too many redirects' + redirect_location = response.get_redirect_location() + status = response.status + + else: + # Incrementing because of a server error like a 500 in + # status_forcelist and a the given method is in the whitelist + cause = ResponseError.GENERIC_ERROR + if response and response.status: + if status_count is not None: + status_count -= 1 + cause = ResponseError.SPECIFIC_ERROR.format( + status_code=response.status) + status = response.status + + history = self.history + (RequestHistory(method, url, error, status, redirect_location),) + + new_retry = self.new( + total=total, + connect=connect, read=read, redirect=redirect, status=status_count, + history=history) + + if new_retry.is_exhausted(): + raise MaxRetryError(_pool, url, error or ResponseError(cause)) + + log.debug("Incremented Retry for (url='%s'): %r", url, new_retry) + + return new_retry + + def __repr__(self): + return ('{cls.__name__}(total={self.total}, connect={self.connect}, ' + 'read={self.read}, redirect={self.redirect}, status={self.status})').format( + cls=type(self), self=self) + + +# For backwards compatibility (equivalent to pre-v1.9): +Retry.DEFAULT = Retry(3) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/ssl_.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/ssl_.py new file mode 100644 index 0000000..3254280 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/ssl_.py @@ -0,0 +1,396 @@ +from __future__ import absolute_import +import errno +import warnings +import hmac +import socket + +from binascii import hexlify, unhexlify +from hashlib import md5, sha1, sha256 + +from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning +from ..packages import six + + +SSLContext = None +HAS_SNI = False +IS_PYOPENSSL = False +IS_SECURETRANSPORT = False + +# Maps the length of a digest to a possible hash function producing this digest +HASHFUNC_MAP = { + 32: md5, + 40: sha1, + 64: sha256, +} + + +def _const_compare_digest_backport(a, b): + """ + Compare two digests of equal length in constant time. + + The digests must be of type str/bytes. + Returns True if the digests match, and False otherwise. + """ + result = abs(len(a) - len(b)) + for l, r in zip(bytearray(a), bytearray(b)): + result |= l ^ r + return result == 0 + + +_const_compare_digest = getattr(hmac, 'compare_digest', + _const_compare_digest_backport) + + +try: # Test for SSL features + import ssl + from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 + from ssl import HAS_SNI # Has SNI? +except ImportError: + pass + + +try: + from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION +except ImportError: + OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 + OP_NO_COMPRESSION = 0x20000 + + +# Python 2.7 and earlier didn't have inet_pton on non-Linux +# so we fallback on inet_aton in those cases. This means that +# we can only detect IPv4 addresses in this case. +if hasattr(socket, 'inet_pton'): + inet_pton = socket.inet_pton +else: + # Maybe we can use ipaddress if the user has urllib3[secure]? + try: + from pip._vendor import ipaddress + + def inet_pton(_, host): + if isinstance(host, six.binary_type): + host = host.decode('ascii') + return ipaddress.ip_address(host) + + except ImportError: # Platform-specific: Non-Linux + def inet_pton(_, host): + return socket.inet_aton(host) + + +# A secure default. +# Sources for more information on TLS ciphers: +# +# - https://wiki.mozilla.org/Security/Server_Side_TLS +# - https://www.ssllabs.com/projects/best-practices/index.html +# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ +# +# The general intent is: +# - Prefer TLS 1.3 cipher suites +# - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), +# - prefer ECDHE over DHE for better performance, +# - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and +# security, +# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common, +# - disable NULL authentication, MD5 MACs and DSS for security reasons. +DEFAULT_CIPHERS = ':'.join([ + 'TLS13-AES-256-GCM-SHA384', + 'TLS13-CHACHA20-POLY1305-SHA256', + 'TLS13-AES-128-GCM-SHA256', + 'ECDH+AESGCM', + 'ECDH+CHACHA20', + 'DH+AESGCM', + 'DH+CHACHA20', + 'ECDH+AES256', + 'DH+AES256', + 'ECDH+AES128', + 'DH+AES', + 'RSA+AESGCM', + 'RSA+AES', + '!aNULL', + '!eNULL', + '!MD5', +]) + +try: + from ssl import SSLContext # Modern SSL? +except ImportError: + import sys + + class SSLContext(object): # Platform-specific: Python 2 & 3.1 + supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or + (3, 2) <= sys.version_info) + + def __init__(self, protocol_version): + self.protocol = protocol_version + # Use default values from a real SSLContext + self.check_hostname = False + self.verify_mode = ssl.CERT_NONE + self.ca_certs = None + self.options = 0 + self.certfile = None + self.keyfile = None + self.ciphers = None + + def load_cert_chain(self, certfile, keyfile): + self.certfile = certfile + self.keyfile = keyfile + + def load_verify_locations(self, cafile=None, capath=None): + self.ca_certs = cafile + + if capath is not None: + raise SSLError("CA directories not supported in older Pythons") + + def set_ciphers(self, cipher_suite): + if not self.supports_set_ciphers: + raise TypeError( + 'Your version of Python does not support setting ' + 'a custom cipher suite. Please upgrade to Python ' + '2.7, 3.2, or later if you need this functionality.' + ) + self.ciphers = cipher_suite + + def wrap_socket(self, socket, server_hostname=None, server_side=False): + warnings.warn( + 'A true SSLContext object is not available. This prevents ' + 'urllib3 from configuring SSL appropriately and may cause ' + 'certain SSL connections to fail. You can upgrade to a newer ' + 'version of Python to solve this. For more information, see ' + 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' + '#ssl-warnings', + InsecurePlatformWarning + ) + kwargs = { + 'keyfile': self.keyfile, + 'certfile': self.certfile, + 'ca_certs': self.ca_certs, + 'cert_reqs': self.verify_mode, + 'ssl_version': self.protocol, + 'server_side': server_side, + } + if self.supports_set_ciphers: # Platform-specific: Python 2.7+ + return wrap_socket(socket, ciphers=self.ciphers, **kwargs) + else: # Platform-specific: Python 2.6 + return wrap_socket(socket, **kwargs) + + +def assert_fingerprint(cert, fingerprint): + """ + Checks if given fingerprint matches the supplied certificate. + + :param cert: + Certificate as bytes object. + :param fingerprint: + Fingerprint as string of hexdigits, can be interspersed by colons. + """ + + fingerprint = fingerprint.replace(':', '').lower() + digest_length = len(fingerprint) + hashfunc = HASHFUNC_MAP.get(digest_length) + if not hashfunc: + raise SSLError( + 'Fingerprint of invalid length: {0}'.format(fingerprint)) + + # We need encode() here for py32; works on py2 and p33. + fingerprint_bytes = unhexlify(fingerprint.encode()) + + cert_digest = hashfunc(cert).digest() + + if not _const_compare_digest(cert_digest, fingerprint_bytes): + raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' + .format(fingerprint, hexlify(cert_digest))) + + +def resolve_cert_reqs(candidate): + """ + Resolves the argument to a numeric constant, which can be passed to + the wrap_socket function/method from the ssl module. + Defaults to :data:`ssl.CERT_NONE`. + If given a string it is assumed to be the name of the constant in the + :mod:`ssl` module or its abbreviation. + (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. + If it's neither `None` nor a string we assume it is already the numeric + constant which can directly be passed to wrap_socket. + """ + if candidate is None: + return CERT_NONE + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, 'CERT_' + candidate) + return res + + return candidate + + +def resolve_ssl_version(candidate): + """ + like resolve_cert_reqs + """ + if candidate is None: + return PROTOCOL_SSLv23 + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, 'PROTOCOL_' + candidate) + return res + + return candidate + + +def create_urllib3_context(ssl_version=None, cert_reqs=None, + options=None, ciphers=None): + """All arguments have the same meaning as ``ssl_wrap_socket``. + + By default, this function does a lot of the same work that + ``ssl.create_default_context`` does on Python 3.4+. It: + + - Disables SSLv2, SSLv3, and compression + - Sets a restricted set of server ciphers + + If you wish to enable SSLv3, you can do:: + + from pip._vendor.urllib3.util import ssl_ + context = ssl_.create_urllib3_context() + context.options &= ~ssl_.OP_NO_SSLv3 + + You can do the same to enable compression (substituting ``COMPRESSION`` + for ``SSLv3`` in the last line above). + + :param ssl_version: + The desired protocol version to use. This will default to + PROTOCOL_SSLv23 which will negotiate the highest protocol that both + the server and your installation of OpenSSL support. + :param cert_reqs: + Whether to require the certificate verification. This defaults to + ``ssl.CERT_REQUIRED``. + :param options: + Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, + ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``. + :param ciphers: + Which cipher suites to allow the server to select. + :returns: + Constructed SSLContext object with specified options + :rtype: SSLContext + """ + context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23) + + # Setting the default here, as we may have no ssl module on import + cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs + + if options is None: + options = 0 + # SSLv2 is easily broken and is considered harmful and dangerous + options |= OP_NO_SSLv2 + # SSLv3 has several problems and is now dangerous + options |= OP_NO_SSLv3 + # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ + # (issue #309) + options |= OP_NO_COMPRESSION + + context.options |= options + + if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6 + context.set_ciphers(ciphers or DEFAULT_CIPHERS) + + context.verify_mode = cert_reqs + if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2 + # We do our own verification, including fingerprints and alternative + # hostnames. So disable it here + context.check_hostname = False + return context + + +def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, + ca_certs=None, server_hostname=None, + ssl_version=None, ciphers=None, ssl_context=None, + ca_cert_dir=None): + """ + All arguments except for server_hostname, ssl_context, and ca_cert_dir have + the same meaning as they do when using :func:`ssl.wrap_socket`. + + :param server_hostname: + When SNI is supported, the expected hostname of the certificate + :param ssl_context: + A pre-made :class:`SSLContext` object. If none is provided, one will + be created using :func:`create_urllib3_context`. + :param ciphers: + A string of ciphers we wish the client to support. This is not + supported on Python 2.6 as the ssl module does not support it. + :param ca_cert_dir: + A directory containing CA certificates in multiple separate files, as + supported by OpenSSL's -CApath flag or the capath argument to + SSLContext.load_verify_locations(). + """ + context = ssl_context + if context is None: + # Note: This branch of code and all the variables in it are no longer + # used by urllib3 itself. We should consider deprecating and removing + # this code. + context = create_urllib3_context(ssl_version, cert_reqs, + ciphers=ciphers) + + if ca_certs or ca_cert_dir: + try: + context.load_verify_locations(ca_certs, ca_cert_dir) + except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2 + raise SSLError(e) + # Py33 raises FileNotFoundError which subclasses OSError + # These are not equivalent unless we check the errno attribute + except OSError as e: # Platform-specific: Python 3.3 and beyond + if e.errno == errno.ENOENT: + raise SSLError(e) + raise + elif getattr(context, 'load_default_certs', None) is not None: + # try to load OS default certs; works well on Windows (require Python3.4+) + context.load_default_certs() + + if certfile: + context.load_cert_chain(certfile, keyfile) + + # If we detect server_hostname is an IP address then the SNI + # extension should not be used according to RFC3546 Section 3.1 + # We shouldn't warn the user if SNI isn't available but we would + # not be using SNI anyways due to IP address for server_hostname. + if ((server_hostname is not None and not is_ipaddress(server_hostname)) + or IS_SECURETRANSPORT): + if HAS_SNI and server_hostname is not None: + return context.wrap_socket(sock, server_hostname=server_hostname) + + warnings.warn( + 'An HTTPS request has been made, but the SNI (Server Name ' + 'Indication) extension to TLS is not available on this platform. ' + 'This may cause the server to present an incorrect TLS ' + 'certificate, which can cause validation failures. You can upgrade to ' + 'a newer version of Python to solve this. For more information, see ' + 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' + '#ssl-warnings', + SNIMissingWarning + ) + + return context.wrap_socket(sock) + + +def is_ipaddress(hostname): + """Detects whether the hostname given is an IP address. + + :param str hostname: Hostname to examine. + :return: True if the hostname is an IP address, False otherwise. + """ + if six.PY3 and isinstance(hostname, six.binary_type): + # IDN A-label bytes are ASCII compatible. + hostname = hostname.decode('ascii') + + families = [socket.AF_INET] + if hasattr(socket, 'AF_INET6'): + families.append(socket.AF_INET6) + + for af in families: + try: + inet_pton(af, hostname) + except (socket.error, ValueError, OSError): + pass + else: + return True + return False diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/timeout.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/timeout.py new file mode 100644 index 0000000..cec817e --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/timeout.py @@ -0,0 +1,242 @@ +from __future__ import absolute_import +# The default socket timeout, used by httplib to indicate that no timeout was +# specified by the user +from socket import _GLOBAL_DEFAULT_TIMEOUT +import time + +from ..exceptions import TimeoutStateError + +# A sentinel value to indicate that no timeout was specified by the user in +# urllib3 +_Default = object() + + +# Use time.monotonic if available. +current_time = getattr(time, "monotonic", time.time) + + +class Timeout(object): + """ Timeout configuration. + + Timeouts can be defined as a default for a pool:: + + timeout = Timeout(connect=2.0, read=7.0) + http = PoolManager(timeout=timeout) + response = http.request('GET', 'http://example.com/') + + Or per-request (which overrides the default for the pool):: + + response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) + + Timeouts can be disabled by setting all the parameters to ``None``:: + + no_timeout = Timeout(connect=None, read=None) + response = http.request('GET', 'http://example.com/, timeout=no_timeout) + + + :param total: + This combines the connect and read timeouts into one; the read timeout + will be set to the time leftover from the connect attempt. In the + event that both a connect timeout and a total are specified, or a read + timeout and a total are specified, the shorter timeout will be applied. + + Defaults to None. + + :type total: integer, float, or None + + :param connect: + The maximum amount of time to wait for a connection attempt to a server + to succeed. Omitting the parameter will default the connect timeout to + the system default, probably `the global default timeout in socket.py + <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_. + None will set an infinite timeout for connection attempts. + + :type connect: integer, float, or None + + :param read: + The maximum amount of time to wait between consecutive + read operations for a response from the server. Omitting + the parameter will default the read timeout to the system + default, probably `the global default timeout in socket.py + <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_. + None will set an infinite timeout. + + :type read: integer, float, or None + + .. note:: + + Many factors can affect the total amount of time for urllib3 to return + an HTTP response. + + For example, Python's DNS resolver does not obey the timeout specified + on the socket. Other factors that can affect total request time include + high CPU load, high swap, the program running at a low priority level, + or other behaviors. + + In addition, the read and total timeouts only measure the time between + read operations on the socket connecting the client and the server, + not the total amount of time for the request to return a complete + response. For most requests, the timeout is raised because the server + has not sent the first byte in the specified time. This is not always + the case; if a server streams one byte every fifteen seconds, a timeout + of 20 seconds will not trigger, even though the request will take + several minutes to complete. + + If your goal is to cut off any request after a set amount of wall clock + time, consider having a second "watcher" thread to cut off a slow + request. + """ + + #: A sentinel object representing the default timeout value + DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT + + def __init__(self, total=None, connect=_Default, read=_Default): + self._connect = self._validate_timeout(connect, 'connect') + self._read = self._validate_timeout(read, 'read') + self.total = self._validate_timeout(total, 'total') + self._start_connect = None + + def __str__(self): + return '%s(connect=%r, read=%r, total=%r)' % ( + type(self).__name__, self._connect, self._read, self.total) + + @classmethod + def _validate_timeout(cls, value, name): + """ Check that a timeout attribute is valid. + + :param value: The timeout value to validate + :param name: The name of the timeout attribute to validate. This is + used to specify in error messages. + :return: The validated and casted version of the given value. + :raises ValueError: If it is a numeric value less than or equal to + zero, or the type is not an integer, float, or None. + """ + if value is _Default: + return cls.DEFAULT_TIMEOUT + + if value is None or value is cls.DEFAULT_TIMEOUT: + return value + + if isinstance(value, bool): + raise ValueError("Timeout cannot be a boolean value. It must " + "be an int, float or None.") + try: + float(value) + except (TypeError, ValueError): + raise ValueError("Timeout value %s was %s, but it must be an " + "int, float or None." % (name, value)) + + try: + if value <= 0: + raise ValueError("Attempted to set %s timeout to %s, but the " + "timeout cannot be set to a value less " + "than or equal to 0." % (name, value)) + except TypeError: # Python 3 + raise ValueError("Timeout value %s was %s, but it must be an " + "int, float or None." % (name, value)) + + return value + + @classmethod + def from_float(cls, timeout): + """ Create a new Timeout from a legacy timeout value. + + The timeout value used by httplib.py sets the same timeout on the + connect(), and recv() socket requests. This creates a :class:`Timeout` + object that sets the individual timeouts to the ``timeout`` value + passed to this function. + + :param timeout: The legacy timeout value. + :type timeout: integer, float, sentinel default object, or None + :return: Timeout object + :rtype: :class:`Timeout` + """ + return Timeout(read=timeout, connect=timeout) + + def clone(self): + """ Create a copy of the timeout object + + Timeout properties are stored per-pool but each request needs a fresh + Timeout object to ensure each one has its own start/stop configured. + + :return: a copy of the timeout object + :rtype: :class:`Timeout` + """ + # We can't use copy.deepcopy because that will also create a new object + # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to + # detect the user default. + return Timeout(connect=self._connect, read=self._read, + total=self.total) + + def start_connect(self): + """ Start the timeout clock, used during a connect() attempt + + :raises urllib3.exceptions.TimeoutStateError: if you attempt + to start a timer that has been started already. + """ + if self._start_connect is not None: + raise TimeoutStateError("Timeout timer has already been started.") + self._start_connect = current_time() + return self._start_connect + + def get_connect_duration(self): + """ Gets the time elapsed since the call to :meth:`start_connect`. + + :return: Elapsed time. + :rtype: float + :raises urllib3.exceptions.TimeoutStateError: if you attempt + to get duration for a timer that hasn't been started. + """ + if self._start_connect is None: + raise TimeoutStateError("Can't get connect duration for timer " + "that has not started.") + return current_time() - self._start_connect + + @property + def connect_timeout(self): + """ Get the value to use when setting a connection timeout. + + This will be a positive float or integer, the value None + (never timeout), or the default system timeout. + + :return: Connect timeout. + :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None + """ + if self.total is None: + return self._connect + + if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: + return self.total + + return min(self._connect, self.total) + + @property + def read_timeout(self): + """ Get the value for the read timeout. + + This assumes some time has elapsed in the connection timeout and + computes the read timeout appropriately. + + If self.total is set, the read timeout is dependent on the amount of + time taken by the connect timeout. If the connection time has not been + established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be + raised. + + :return: Value to use for the read timeout. + :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None + :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` + has not yet been called on this object. + """ + if (self.total is not None and + self.total is not self.DEFAULT_TIMEOUT and + self._read is not None and + self._read is not self.DEFAULT_TIMEOUT): + # In case the connect timeout has not yet been established. + if self._start_connect is None: + return self._read + return max(0, min(self.total - self.get_connect_duration(), + self._read)) + elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: + return max(0, self.total - self.get_connect_duration()) + else: + return self._read diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/url.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/url.py new file mode 100644 index 0000000..6b6f996 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/url.py @@ -0,0 +1,230 @@ +from __future__ import absolute_import +from collections import namedtuple + +from ..exceptions import LocationParseError + + +url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'] + +# We only want to normalize urls with an HTTP(S) scheme. +# urllib3 infers URLs without a scheme (None) to be http. +NORMALIZABLE_SCHEMES = ('http', 'https', None) + + +class Url(namedtuple('Url', url_attrs)): + """ + Datastructure for representing an HTTP URL. Used as a return value for + :func:`parse_url`. Both the scheme and host are normalized as they are + both case-insensitive according to RFC 3986. + """ + __slots__ = () + + def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, + query=None, fragment=None): + if path and not path.startswith('/'): + path = '/' + path + if scheme: + scheme = scheme.lower() + if host and scheme in NORMALIZABLE_SCHEMES: + host = host.lower() + return super(Url, cls).__new__(cls, scheme, auth, host, port, path, + query, fragment) + + @property + def hostname(self): + """For backwards-compatibility with urlparse. We're nice like that.""" + return self.host + + @property + def request_uri(self): + """Absolute path including the query string.""" + uri = self.path or '/' + + if self.query is not None: + uri += '?' + self.query + + return uri + + @property + def netloc(self): + """Network location including host and port""" + if self.port: + return '%s:%d' % (self.host, self.port) + return self.host + + @property + def url(self): + """ + Convert self into a url + + This function should more or less round-trip with :func:`.parse_url`. The + returned url may not be exactly the same as the url inputted to + :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls + with a blank port will have : removed). + + Example: :: + + >>> U = parse_url('http://google.com/mail/') + >>> U.url + 'http://google.com/mail/' + >>> Url('http', 'username:password', 'host.com', 80, + ... '/path', 'query', 'fragment').url + 'http://username:password@host.com:80/path?query#fragment' + """ + scheme, auth, host, port, path, query, fragment = self + url = '' + + # We use "is not None" we want things to happen with empty strings (or 0 port) + if scheme is not None: + url += scheme + '://' + if auth is not None: + url += auth + '@' + if host is not None: + url += host + if port is not None: + url += ':' + str(port) + if path is not None: + url += path + if query is not None: + url += '?' + query + if fragment is not None: + url += '#' + fragment + + return url + + def __str__(self): + return self.url + + +def split_first(s, delims): + """ + Given a string and an iterable of delimiters, split on the first found + delimiter. Return two split parts and the matched delimiter. + + If not found, then the first part is the full input string. + + Example:: + + >>> split_first('foo/bar?baz', '?/=') + ('foo', 'bar?baz', '/') + >>> split_first('foo/bar?baz', '123') + ('foo/bar?baz', '', None) + + Scales linearly with number of delims. Not ideal for large number of delims. + """ + min_idx = None + min_delim = None + for d in delims: + idx = s.find(d) + if idx < 0: + continue + + if min_idx is None or idx < min_idx: + min_idx = idx + min_delim = d + + if min_idx is None or min_idx < 0: + return s, '', None + + return s[:min_idx], s[min_idx + 1:], min_delim + + +def parse_url(url): + """ + Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is + performed to parse incomplete urls. Fields not provided will be None. + + Partly backwards-compatible with :mod:`urlparse`. + + Example:: + + >>> parse_url('http://google.com/mail/') + Url(scheme='http', host='google.com', port=None, path='/mail/', ...) + >>> parse_url('google.com:80') + Url(scheme=None, host='google.com', port=80, path=None, ...) + >>> parse_url('/foo?bar') + Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) + """ + + # While this code has overlap with stdlib's urlparse, it is much + # simplified for our needs and less annoying. + # Additionally, this implementations does silly things to be optimal + # on CPython. + + if not url: + # Empty + return Url() + + scheme = None + auth = None + host = None + port = None + path = None + fragment = None + query = None + + # Scheme + if '://' in url: + scheme, url = url.split('://', 1) + + # Find the earliest Authority Terminator + # (http://tools.ietf.org/html/rfc3986#section-3.2) + url, path_, delim = split_first(url, ['/', '?', '#']) + + if delim: + # Reassemble the path + path = delim + path_ + + # Auth + if '@' in url: + # Last '@' denotes end of auth part + auth, url = url.rsplit('@', 1) + + # IPv6 + if url and url[0] == '[': + host, url = url.split(']', 1) + host += ']' + + # Port + if ':' in url: + _host, port = url.split(':', 1) + + if not host: + host = _host + + if port: + # If given, ports must be integers. No whitespace, no plus or + # minus prefixes, no non-integer digits such as ^2 (superscript). + if not port.isdigit(): + raise LocationParseError(url) + try: + port = int(port) + except ValueError: + raise LocationParseError(url) + else: + # Blank ports are cool, too. (rfc3986#section-3.2.3) + port = None + + elif not host and url: + host = url + + if not path: + return Url(scheme, auth, host, port, path, query, fragment) + + # Fragment + if '#' in path: + path, fragment = path.split('#', 1) + + # Query + if '?' in path: + path, query = path.split('?', 1) + + return Url(scheme, auth, host, port, path, query, fragment) + + +def get_host(url): + """ + Deprecated. Use :func:`parse_url` instead. + """ + p = parse_url(url) + return p.scheme or 'http', p.hostname, p.port diff --git a/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/wait.py b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/wait.py new file mode 100644 index 0000000..fa686ef --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/urllib3/util/wait.py @@ -0,0 +1,153 @@ +import errno +from functools import partial +import select +import sys +try: + from time import monotonic +except ImportError: + from time import time as monotonic + +__all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"] + + +class NoWayToWaitForSocketError(Exception): + pass + + +# How should we wait on sockets? +# +# There are two types of APIs you can use for waiting on sockets: the fancy +# modern stateful APIs like epoll/kqueue, and the older stateless APIs like +# select/poll. The stateful APIs are more efficient when you have a lots of +# sockets to keep track of, because you can set them up once and then use them +# lots of times. But we only ever want to wait on a single socket at a time +# and don't want to keep track of state, so the stateless APIs are actually +# more efficient. So we want to use select() or poll(). +# +# Now, how do we choose between select() and poll()? On traditional Unixes, +# select() has a strange calling convention that makes it slow, or fail +# altogether, for high-numbered file descriptors. The point of poll() is to fix +# that, so on Unixes, we prefer poll(). +# +# On Windows, there is no poll() (or at least Python doesn't provide a wrapper +# for it), but that's OK, because on Windows, select() doesn't have this +# strange calling convention; plain select() works fine. +# +# So: on Windows we use select(), and everywhere else we use poll(). We also +# fall back to select() in case poll() is somehow broken or missing. + +if sys.version_info >= (3, 5): + # Modern Python, that retries syscalls by default + def _retry_on_intr(fn, timeout): + return fn(timeout) +else: + # Old and broken Pythons. + def _retry_on_intr(fn, timeout): + if timeout is not None and timeout <= 0: + return fn(timeout) + + if timeout is None: + deadline = float("inf") + else: + deadline = monotonic() + timeout + + while True: + try: + return fn(timeout) + # OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7 + except (OSError, select.error) as e: + # 'e.args[0]' incantation works for both OSError and select.error + if e.args[0] != errno.EINTR: + raise + else: + timeout = deadline - monotonic() + if timeout < 0: + timeout = 0 + if timeout == float("inf"): + timeout = None + continue + + +def select_wait_for_socket(sock, read=False, write=False, timeout=None): + if not read and not write: + raise RuntimeError("must specify at least one of read=True, write=True") + rcheck = [] + wcheck = [] + if read: + rcheck.append(sock) + if write: + wcheck.append(sock) + # When doing a non-blocking connect, most systems signal success by + # marking the socket writable. Windows, though, signals success by marked + # it as "exceptional". We paper over the difference by checking the write + # sockets for both conditions. (The stdlib selectors module does the same + # thing.) + fn = partial(select.select, rcheck, wcheck, wcheck) + rready, wready, xready = _retry_on_intr(fn, timeout) + return bool(rready or wready or xready) + + +def poll_wait_for_socket(sock, read=False, write=False, timeout=None): + if not read and not write: + raise RuntimeError("must specify at least one of read=True, write=True") + mask = 0 + if read: + mask |= select.POLLIN + if write: + mask |= select.POLLOUT + poll_obj = select.poll() + poll_obj.register(sock, mask) + + # For some reason, poll() takes timeout in milliseconds + def do_poll(t): + if t is not None: + t *= 1000 + return poll_obj.poll(t) + + return bool(_retry_on_intr(do_poll, timeout)) + + +def null_wait_for_socket(*args, **kwargs): + raise NoWayToWaitForSocketError("no select-equivalent available") + + +def _have_working_poll(): + # Apparently some systems have a select.poll that fails as soon as you try + # to use it, either due to strange configuration or broken monkeypatching + # from libraries like eventlet/greenlet. + try: + poll_obj = select.poll() + poll_obj.poll(0) + except (AttributeError, OSError): + return False + else: + return True + + +def wait_for_socket(*args, **kwargs): + # We delay choosing which implementation to use until the first time we're + # called. We could do it at import time, but then we might make the wrong + # decision if someone goes wild with monkeypatching select.poll after + # we're imported. + global wait_for_socket + if _have_working_poll(): + wait_for_socket = poll_wait_for_socket + elif hasattr(select, "select"): + wait_for_socket = select_wait_for_socket + else: # Platform-specific: Appengine. + wait_for_socket = null_wait_for_socket + return wait_for_socket(*args, **kwargs) + + +def wait_for_read(sock, timeout=None): + """ Waits for reading to be available on a given socket. + Returns True if the socket is readable, or False if the timeout expired. + """ + return wait_for_socket(sock, read=True, timeout=timeout) + + +def wait_for_write(sock, timeout=None): + """ Waits for writing to be available on a given socket. + Returns True if the socket is readable, or False if the timeout expired. + """ + return wait_for_socket(sock, write=True, timeout=timeout) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/webencodings/labels.py b/env/lib/python3.6/site-packages/pip/_vendor/webencodings/labels.py new file mode 100644 index 0000000..29cbf91 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/webencodings/labels.py @@ -0,0 +1,231 @@ +""" + + webencodings.labels + ~~~~~~~~~~~~~~~~~~~ + + Map encoding labels to their name. + + :copyright: Copyright 2012 by Simon Sapin + :license: BSD, see LICENSE for details. + +""" + +# XXX Do not edit! +# This file is automatically generated by mklabels.py + +LABELS = { + 'unicode-1-1-utf-8': 'utf-8', + 'utf-8': 'utf-8', + 'utf8': 'utf-8', + '866': 'ibm866', + 'cp866': 'ibm866', + 'csibm866': 'ibm866', + 'ibm866': 'ibm866', + 'csisolatin2': 'iso-8859-2', + 'iso-8859-2': 'iso-8859-2', + 'iso-ir-101': 'iso-8859-2', + 'iso8859-2': 'iso-8859-2', + 'iso88592': 'iso-8859-2', + 'iso_8859-2': 'iso-8859-2', + 'iso_8859-2:1987': 'iso-8859-2', + 'l2': 'iso-8859-2', + 'latin2': 'iso-8859-2', + 'csisolatin3': 'iso-8859-3', + 'iso-8859-3': 'iso-8859-3', + 'iso-ir-109': 'iso-8859-3', + 'iso8859-3': 'iso-8859-3', + 'iso88593': 'iso-8859-3', + 'iso_8859-3': 'iso-8859-3', + 'iso_8859-3:1988': 'iso-8859-3', + 'l3': 'iso-8859-3', + 'latin3': 'iso-8859-3', + 'csisolatin4': 'iso-8859-4', + 'iso-8859-4': 'iso-8859-4', + 'iso-ir-110': 'iso-8859-4', + 'iso8859-4': 'iso-8859-4', + 'iso88594': 'iso-8859-4', + 'iso_8859-4': 'iso-8859-4', + 'iso_8859-4:1988': 'iso-8859-4', + 'l4': 'iso-8859-4', + 'latin4': 'iso-8859-4', + 'csisolatincyrillic': 'iso-8859-5', + 'cyrillic': 'iso-8859-5', + 'iso-8859-5': 'iso-8859-5', + 'iso-ir-144': 'iso-8859-5', + 'iso8859-5': 'iso-8859-5', + 'iso88595': 'iso-8859-5', + 'iso_8859-5': 'iso-8859-5', + 'iso_8859-5:1988': 'iso-8859-5', + 'arabic': 'iso-8859-6', + 'asmo-708': 'iso-8859-6', + 'csiso88596e': 'iso-8859-6', + 'csiso88596i': 'iso-8859-6', + 'csisolatinarabic': 'iso-8859-6', + 'ecma-114': 'iso-8859-6', + 'iso-8859-6': 'iso-8859-6', + 'iso-8859-6-e': 'iso-8859-6', + 'iso-8859-6-i': 'iso-8859-6', + 'iso-ir-127': 'iso-8859-6', + 'iso8859-6': 'iso-8859-6', + 'iso88596': 'iso-8859-6', + 'iso_8859-6': 'iso-8859-6', + 'iso_8859-6:1987': 'iso-8859-6', + 'csisolatingreek': 'iso-8859-7', + 'ecma-118': 'iso-8859-7', + 'elot_928': 'iso-8859-7', + 'greek': 'iso-8859-7', + 'greek8': 'iso-8859-7', + 'iso-8859-7': 'iso-8859-7', + 'iso-ir-126': 'iso-8859-7', + 'iso8859-7': 'iso-8859-7', + 'iso88597': 'iso-8859-7', + 'iso_8859-7': 'iso-8859-7', + 'iso_8859-7:1987': 'iso-8859-7', + 'sun_eu_greek': 'iso-8859-7', + 'csiso88598e': 'iso-8859-8', + 'csisolatinhebrew': 'iso-8859-8', + 'hebrew': 'iso-8859-8', + 'iso-8859-8': 'iso-8859-8', + 'iso-8859-8-e': 'iso-8859-8', + 'iso-ir-138': 'iso-8859-8', + 'iso8859-8': 'iso-8859-8', + 'iso88598': 'iso-8859-8', + 'iso_8859-8': 'iso-8859-8', + 'iso_8859-8:1988': 'iso-8859-8', + 'visual': 'iso-8859-8', + 'csiso88598i': 'iso-8859-8-i', + 'iso-8859-8-i': 'iso-8859-8-i', + 'logical': 'iso-8859-8-i', + 'csisolatin6': 'iso-8859-10', + 'iso-8859-10': 'iso-8859-10', + 'iso-ir-157': 'iso-8859-10', + 'iso8859-10': 'iso-8859-10', + 'iso885910': 'iso-8859-10', + 'l6': 'iso-8859-10', + 'latin6': 'iso-8859-10', + 'iso-8859-13': 'iso-8859-13', + 'iso8859-13': 'iso-8859-13', + 'iso885913': 'iso-8859-13', + 'iso-8859-14': 'iso-8859-14', + 'iso8859-14': 'iso-8859-14', + 'iso885914': 'iso-8859-14', + 'csisolatin9': 'iso-8859-15', + 'iso-8859-15': 'iso-8859-15', + 'iso8859-15': 'iso-8859-15', + 'iso885915': 'iso-8859-15', + 'iso_8859-15': 'iso-8859-15', + 'l9': 'iso-8859-15', + 'iso-8859-16': 'iso-8859-16', + 'cskoi8r': 'koi8-r', + 'koi': 'koi8-r', + 'koi8': 'koi8-r', + 'koi8-r': 'koi8-r', + 'koi8_r': 'koi8-r', + 'koi8-u': 'koi8-u', + 'csmacintosh': 'macintosh', + 'mac': 'macintosh', + 'macintosh': 'macintosh', + 'x-mac-roman': 'macintosh', + 'dos-874': 'windows-874', + 'iso-8859-11': 'windows-874', + 'iso8859-11': 'windows-874', + 'iso885911': 'windows-874', + 'tis-620': 'windows-874', + 'windows-874': 'windows-874', + 'cp1250': 'windows-1250', + 'windows-1250': 'windows-1250', + 'x-cp1250': 'windows-1250', + 'cp1251': 'windows-1251', + 'windows-1251': 'windows-1251', + 'x-cp1251': 'windows-1251', + 'ansi_x3.4-1968': 'windows-1252', + 'ascii': 'windows-1252', + 'cp1252': 'windows-1252', + 'cp819': 'windows-1252', + 'csisolatin1': 'windows-1252', + 'ibm819': 'windows-1252', + 'iso-8859-1': 'windows-1252', + 'iso-ir-100': 'windows-1252', + 'iso8859-1': 'windows-1252', + 'iso88591': 'windows-1252', + 'iso_8859-1': 'windows-1252', + 'iso_8859-1:1987': 'windows-1252', + 'l1': 'windows-1252', + 'latin1': 'windows-1252', + 'us-ascii': 'windows-1252', + 'windows-1252': 'windows-1252', + 'x-cp1252': 'windows-1252', + 'cp1253': 'windows-1253', + 'windows-1253': 'windows-1253', + 'x-cp1253': 'windows-1253', + 'cp1254': 'windows-1254', + 'csisolatin5': 'windows-1254', + 'iso-8859-9': 'windows-1254', + 'iso-ir-148': 'windows-1254', + 'iso8859-9': 'windows-1254', + 'iso88599': 'windows-1254', + 'iso_8859-9': 'windows-1254', + 'iso_8859-9:1989': 'windows-1254', + 'l5': 'windows-1254', + 'latin5': 'windows-1254', + 'windows-1254': 'windows-1254', + 'x-cp1254': 'windows-1254', + 'cp1255': 'windows-1255', + 'windows-1255': 'windows-1255', + 'x-cp1255': 'windows-1255', + 'cp1256': 'windows-1256', + 'windows-1256': 'windows-1256', + 'x-cp1256': 'windows-1256', + 'cp1257': 'windows-1257', + 'windows-1257': 'windows-1257', + 'x-cp1257': 'windows-1257', + 'cp1258': 'windows-1258', + 'windows-1258': 'windows-1258', + 'x-cp1258': 'windows-1258', + 'x-mac-cyrillic': 'x-mac-cyrillic', + 'x-mac-ukrainian': 'x-mac-cyrillic', + 'chinese': 'gbk', + 'csgb2312': 'gbk', + 'csiso58gb231280': 'gbk', + 'gb2312': 'gbk', + 'gb_2312': 'gbk', + 'gb_2312-80': 'gbk', + 'gbk': 'gbk', + 'iso-ir-58': 'gbk', + 'x-gbk': 'gbk', + 'gb18030': 'gb18030', + 'hz-gb-2312': 'hz-gb-2312', + 'big5': 'big5', + 'big5-hkscs': 'big5', + 'cn-big5': 'big5', + 'csbig5': 'big5', + 'x-x-big5': 'big5', + 'cseucpkdfmtjapanese': 'euc-jp', + 'euc-jp': 'euc-jp', + 'x-euc-jp': 'euc-jp', + 'csiso2022jp': 'iso-2022-jp', + 'iso-2022-jp': 'iso-2022-jp', + 'csshiftjis': 'shift_jis', + 'ms_kanji': 'shift_jis', + 'shift-jis': 'shift_jis', + 'shift_jis': 'shift_jis', + 'sjis': 'shift_jis', + 'windows-31j': 'shift_jis', + 'x-sjis': 'shift_jis', + 'cseuckr': 'euc-kr', + 'csksc56011987': 'euc-kr', + 'euc-kr': 'euc-kr', + 'iso-ir-149': 'euc-kr', + 'korean': 'euc-kr', + 'ks_c_5601-1987': 'euc-kr', + 'ks_c_5601-1989': 'euc-kr', + 'ksc5601': 'euc-kr', + 'ksc_5601': 'euc-kr', + 'windows-949': 'euc-kr', + 'csiso2022kr': 'iso-2022-kr', + 'iso-2022-kr': 'iso-2022-kr', + 'utf-16be': 'utf-16be', + 'utf-16': 'utf-16le', + 'utf-16le': 'utf-16le', + 'x-user-defined': 'x-user-defined', +} diff --git a/env/lib/python3.6/site-packages/pip/_vendor/webencodings/mklabels.py b/env/lib/python3.6/site-packages/pip/_vendor/webencodings/mklabels.py new file mode 100644 index 0000000..295dc92 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/webencodings/mklabels.py @@ -0,0 +1,59 @@ +""" + + webencodings.mklabels + ~~~~~~~~~~~~~~~~~~~~~ + + Regenarate the webencodings.labels module. + + :copyright: Copyright 2012 by Simon Sapin + :license: BSD, see LICENSE for details. + +""" + +import json +try: + from urllib import urlopen +except ImportError: + from urllib.request import urlopen + + +def assert_lower(string): + assert string == string.lower() + return string + + +def generate(url): + parts = ['''\ +""" + + webencodings.labels + ~~~~~~~~~~~~~~~~~~~ + + Map encoding labels to their name. + + :copyright: Copyright 2012 by Simon Sapin + :license: BSD, see LICENSE for details. + +""" + +# XXX Do not edit! +# This file is automatically generated by mklabels.py + +LABELS = { +'''] + labels = [ + (repr(assert_lower(label)).lstrip('u'), + repr(encoding['name']).lstrip('u')) + for category in json.loads(urlopen(url).read().decode('ascii')) + for encoding in category['encodings'] + for label in encoding['labels']] + max_len = max(len(label) for label, name in labels) + parts.extend( + ' %s:%s %s,\n' % (label, ' ' * (max_len - len(label)), name) + for label, name in labels) + parts.append('}') + return ''.join(parts) + + +if __name__ == '__main__': + print(generate('http://encoding.spec.whatwg.org/encodings.json')) diff --git a/env/lib/python3.6/site-packages/pip/_vendor/webencodings/tests.py b/env/lib/python3.6/site-packages/pip/_vendor/webencodings/tests.py new file mode 100644 index 0000000..e12c10d --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/webencodings/tests.py @@ -0,0 +1,153 @@ +# coding: utf-8 +""" + + webencodings.tests + ~~~~~~~~~~~~~~~~~~ + + A basic test suite for Encoding. + + :copyright: Copyright 2012 by Simon Sapin + :license: BSD, see LICENSE for details. + +""" + +from __future__ import unicode_literals + +from . import (lookup, LABELS, decode, encode, iter_decode, iter_encode, + IncrementalDecoder, IncrementalEncoder, UTF8) + + +def assert_raises(exception, function, *args, **kwargs): + try: + function(*args, **kwargs) + except exception: + return + else: # pragma: no cover + raise AssertionError('Did not raise %s.' % exception) + + +def test_labels(): + assert lookup('utf-8').name == 'utf-8' + assert lookup('Utf-8').name == 'utf-8' + assert lookup('UTF-8').name == 'utf-8' + assert lookup('utf8').name == 'utf-8' + assert lookup('utf8').name == 'utf-8' + assert lookup('utf8 ').name == 'utf-8' + assert lookup(' \r\nutf8\t').name == 'utf-8' + assert lookup('u8') is None # Python label. + assert lookup('utf-8 ') is None # Non-ASCII white space. + + assert lookup('US-ASCII').name == 'windows-1252' + assert lookup('iso-8859-1').name == 'windows-1252' + assert lookup('latin1').name == 'windows-1252' + assert lookup('LATIN1').name == 'windows-1252' + assert lookup('latin-1') is None + assert lookup('LATİN1') is None # ASCII-only case insensitivity. + + +def test_all_labels(): + for label in LABELS: + assert decode(b'', label) == ('', lookup(label)) + assert encode('', label) == b'' + for repeat in [0, 1, 12]: + output, _ = iter_decode([b''] * repeat, label) + assert list(output) == [] + assert list(iter_encode([''] * repeat, label)) == [] + decoder = IncrementalDecoder(label) + assert decoder.decode(b'') == '' + assert decoder.decode(b'', final=True) == '' + encoder = IncrementalEncoder(label) + assert encoder.encode('') == b'' + assert encoder.encode('', final=True) == b'' + # All encoding names are valid labels too: + for name in set(LABELS.values()): + assert lookup(name).name == name + + +def test_invalid_label(): + assert_raises(LookupError, decode, b'\xEF\xBB\xBF\xc3\xa9', 'invalid') + assert_raises(LookupError, encode, 'é', 'invalid') + assert_raises(LookupError, iter_decode, [], 'invalid') + assert_raises(LookupError, iter_encode, [], 'invalid') + assert_raises(LookupError, IncrementalDecoder, 'invalid') + assert_raises(LookupError, IncrementalEncoder, 'invalid') + + +def test_decode(): + assert decode(b'\x80', 'latin1') == ('€', lookup('latin1')) + assert decode(b'\x80', lookup('latin1')) == ('€', lookup('latin1')) + assert decode(b'\xc3\xa9', 'utf8') == ('é', lookup('utf8')) + assert decode(b'\xc3\xa9', UTF8) == ('é', lookup('utf8')) + assert decode(b'\xc3\xa9', 'ascii') == ('é', lookup('ascii')) + assert decode(b'\xEF\xBB\xBF\xc3\xa9', 'ascii') == ('é', lookup('utf8')) # UTF-8 with BOM + + assert decode(b'\xFE\xFF\x00\xe9', 'ascii') == ('é', lookup('utf-16be')) # UTF-16-BE with BOM + assert decode(b'\xFF\xFE\xe9\x00', 'ascii') == ('é', lookup('utf-16le')) # UTF-16-LE with BOM + assert decode(b'\xFE\xFF\xe9\x00', 'ascii') == ('\ue900', lookup('utf-16be')) + assert decode(b'\xFF\xFE\x00\xe9', 'ascii') == ('\ue900', lookup('utf-16le')) + + assert decode(b'\x00\xe9', 'UTF-16BE') == ('é', lookup('utf-16be')) + assert decode(b'\xe9\x00', 'UTF-16LE') == ('é', lookup('utf-16le')) + assert decode(b'\xe9\x00', 'UTF-16') == ('é', lookup('utf-16le')) + + assert decode(b'\xe9\x00', 'UTF-16BE') == ('\ue900', lookup('utf-16be')) + assert decode(b'\x00\xe9', 'UTF-16LE') == ('\ue900', lookup('utf-16le')) + assert decode(b'\x00\xe9', 'UTF-16') == ('\ue900', lookup('utf-16le')) + + +def test_encode(): + assert encode('é', 'latin1') == b'\xe9' + assert encode('é', 'utf8') == b'\xc3\xa9' + assert encode('é', 'utf8') == b'\xc3\xa9' + assert encode('é', 'utf-16') == b'\xe9\x00' + assert encode('é', 'utf-16le') == b'\xe9\x00' + assert encode('é', 'utf-16be') == b'\x00\xe9' + + +def test_iter_decode(): + def iter_decode_to_string(input, fallback_encoding): + output, _encoding = iter_decode(input, fallback_encoding) + return ''.join(output) + assert iter_decode_to_string([], 'latin1') == '' + assert iter_decode_to_string([b''], 'latin1') == '' + assert iter_decode_to_string([b'\xe9'], 'latin1') == 'é' + assert iter_decode_to_string([b'hello'], 'latin1') == 'hello' + assert iter_decode_to_string([b'he', b'llo'], 'latin1') == 'hello' + assert iter_decode_to_string([b'hell', b'o'], 'latin1') == 'hello' + assert iter_decode_to_string([b'\xc3\xa9'], 'latin1') == 'é' + assert iter_decode_to_string([b'\xEF\xBB\xBF\xc3\xa9'], 'latin1') == 'é' + assert iter_decode_to_string([ + b'\xEF\xBB\xBF', b'\xc3', b'\xa9'], 'latin1') == 'é' + assert iter_decode_to_string([ + b'\xEF\xBB\xBF', b'a', b'\xc3'], 'latin1') == 'a\uFFFD' + assert iter_decode_to_string([ + b'', b'\xEF', b'', b'', b'\xBB\xBF\xc3', b'\xa9'], 'latin1') == 'é' + assert iter_decode_to_string([b'\xEF\xBB\xBF'], 'latin1') == '' + assert iter_decode_to_string([b'\xEF\xBB'], 'latin1') == 'ï»' + assert iter_decode_to_string([b'\xFE\xFF\x00\xe9'], 'latin1') == 'é' + assert iter_decode_to_string([b'\xFF\xFE\xe9\x00'], 'latin1') == 'é' + assert iter_decode_to_string([ + b'', b'\xFF', b'', b'', b'\xFE\xe9', b'\x00'], 'latin1') == 'é' + assert iter_decode_to_string([ + b'', b'h\xe9', b'llo'], 'x-user-defined') == 'h\uF7E9llo' + + +def test_iter_encode(): + assert b''.join(iter_encode([], 'latin1')) == b'' + assert b''.join(iter_encode([''], 'latin1')) == b'' + assert b''.join(iter_encode(['é'], 'latin1')) == b'\xe9' + assert b''.join(iter_encode(['', 'é', '', ''], 'latin1')) == b'\xe9' + assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16')) == b'\xe9\x00' + assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16le')) == b'\xe9\x00' + assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16be')) == b'\x00\xe9' + assert b''.join(iter_encode([ + '', 'h\uF7E9', '', 'llo'], 'x-user-defined')) == b'h\xe9llo' + + +def test_x_user_defined(): + encoded = b'2,\x0c\x0b\x1aO\xd9#\xcb\x0f\xc9\xbbt\xcf\xa8\xca' + decoded = '2,\x0c\x0b\x1aO\uf7d9#\uf7cb\x0f\uf7c9\uf7bbt\uf7cf\uf7a8\uf7ca' + encoded = b'aa' + decoded = 'aa' + assert decode(encoded, 'x-user-defined') == (decoded, lookup('x-user-defined')) + assert encode(decoded, 'x-user-defined') == encoded diff --git a/env/lib/python3.6/site-packages/pip/_vendor/webencodings/x_user_defined.py b/env/lib/python3.6/site-packages/pip/_vendor/webencodings/x_user_defined.py new file mode 100644 index 0000000..d16e326 --- /dev/null +++ b/env/lib/python3.6/site-packages/pip/_vendor/webencodings/x_user_defined.py @@ -0,0 +1,325 @@ +# coding: utf-8 +""" + + webencodings.x_user_defined + ~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + An implementation of the x-user-defined encoding. + + :copyright: Copyright 2012 by Simon Sapin + :license: BSD, see LICENSE for details. + +""" + +from __future__ import unicode_literals + +import codecs + + +### Codec APIs + +class Codec(codecs.Codec): + + def encode(self, input, errors='strict'): + return codecs.charmap_encode(input, errors, encoding_table) + + def decode(self, input, errors='strict'): + return codecs.charmap_decode(input, errors, decoding_table) + + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input, final=False): + return codecs.charmap_encode(input, self.errors, encoding_table)[0] + + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input, final=False): + return codecs.charmap_decode(input, self.errors, decoding_table)[0] + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + + +class StreamReader(Codec, codecs.StreamReader): + pass + + +### encodings module API + +codec_info = codecs.CodecInfo( + name='x-user-defined', + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamreader=StreamReader, + streamwriter=StreamWriter, +) + + +### Decoding Table + +# Python 3: +# for c in range(256): print(' %r' % chr(c if c < 128 else c + 0xF700)) +decoding_table = ( + '\x00' + '\x01' + '\x02' + '\x03' + '\x04' + '\x05' + '\x06' + '\x07' + '\x08' + '\t' + '\n' + '\x0b' + '\x0c' + '\r' + '\x0e' + '\x0f' + '\x10' + '\x11' + '\x12' + '\x13' + '\x14' + '\x15' + '\x16' + '\x17' + '\x18' + '\x19' + '\x1a' + '\x1b' + '\x1c' + '\x1d' + '\x1e' + '\x1f' + ' ' + '!' + '"' + '#' + '$' + '%' + '&' + "'" + '(' + ')' + '*' + '+' + ',' + '-' + '.' + '/' + '0' + '1' + '2' + '3' + '4' + '5' + '6' + '7' + '8' + '9' + ':' + ';' + '<' + '=' + '>' + '?' + '@' + 'A' + 'B' + 'C' + 'D' + 'E' + 'F' + 'G' + 'H' + 'I' + 'J' + 'K' + 'L' + 'M' + 'N' + 'O' + 'P' + 'Q' + 'R' + 'S' + 'T' + 'U' + 'V' + 'W' + 'X' + 'Y' + 'Z' + '[' + '\\' + ']' + '^' + '_' + '`' + 'a' + 'b' + 'c' + 'd' + 'e' + 'f' + 'g' + 'h' + 'i' + 'j' + 'k' + 'l' + 'm' + 'n' + 'o' + 'p' + 'q' + 'r' + 's' + 't' + 'u' + 'v' + 'w' + 'x' + 'y' + 'z' + '{' + '|' + '}' + '~' + '\x7f' + '\uf780' + '\uf781' + '\uf782' + '\uf783' + '\uf784' + '\uf785' + '\uf786' + '\uf787' + '\uf788' + '\uf789' + '\uf78a' + '\uf78b' + '\uf78c' + '\uf78d' + '\uf78e' + '\uf78f' + '\uf790' + '\uf791' + '\uf792' + '\uf793' + '\uf794' + '\uf795' + '\uf796' + '\uf797' + '\uf798' + '\uf799' + '\uf79a' + '\uf79b' + '\uf79c' + '\uf79d' + '\uf79e' + '\uf79f' + '\uf7a0' + '\uf7a1' + '\uf7a2' + '\uf7a3' + '\uf7a4' + '\uf7a5' + '\uf7a6' + '\uf7a7' + '\uf7a8' + '\uf7a9' + '\uf7aa' + '\uf7ab' + '\uf7ac' + '\uf7ad' + '\uf7ae' + '\uf7af' + '\uf7b0' + '\uf7b1' + '\uf7b2' + '\uf7b3' + '\uf7b4' + '\uf7b5' + '\uf7b6' + '\uf7b7' + '\uf7b8' + '\uf7b9' + '\uf7ba' + '\uf7bb' + '\uf7bc' + '\uf7bd' + '\uf7be' + '\uf7bf' + '\uf7c0' + '\uf7c1' + '\uf7c2' + '\uf7c3' + '\uf7c4' + '\uf7c5' + '\uf7c6' + '\uf7c7' + '\uf7c8' + '\uf7c9' + '\uf7ca' + '\uf7cb' + '\uf7cc' + '\uf7cd' + '\uf7ce' + '\uf7cf' + '\uf7d0' + '\uf7d1' + '\uf7d2' + '\uf7d3' + '\uf7d4' + '\uf7d5' + '\uf7d6' + '\uf7d7' + '\uf7d8' + '\uf7d9' + '\uf7da' + '\uf7db' + '\uf7dc' + '\uf7dd' + '\uf7de' + '\uf7df' + '\uf7e0' + '\uf7e1' + '\uf7e2' + '\uf7e3' + '\uf7e4' + '\uf7e5' + '\uf7e6' + '\uf7e7' + '\uf7e8' + '\uf7e9' + '\uf7ea' + '\uf7eb' + '\uf7ec' + '\uf7ed' + '\uf7ee' + '\uf7ef' + '\uf7f0' + '\uf7f1' + '\uf7f2' + '\uf7f3' + '\uf7f4' + '\uf7f5' + '\uf7f6' + '\uf7f7' + '\uf7f8' + '\uf7f9' + '\uf7fa' + '\uf7fb' + '\uf7fc' + '\uf7fd' + '\uf7fe' + '\uf7ff' +) + +### Encoding table +encoding_table = codecs.charmap_build(decoding_table) diff --git a/env/lib/python3.6/site-packages/pkg_resources/_vendor/appdirs.py b/env/lib/python3.6/site-packages/pkg_resources/_vendor/appdirs.py new file mode 100644 index 0000000..f4dba09 --- /dev/null +++ b/env/lib/python3.6/site-packages/pkg_resources/_vendor/appdirs.py @@ -0,0 +1,552 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2005-2010 ActiveState Software Inc. +# Copyright (c) 2013 Eddy Petrișor + +"""Utilities for determining application-specific dirs. + +See <http://github.com/ActiveState/appdirs> for details and usage. +""" +# Dev Notes: +# - MSDN on where to store app data files: +# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 +# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html +# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html + +__version_info__ = (1, 4, 0) +__version__ = '.'.join(map(str, __version_info__)) + + +import sys +import os + +PY3 = sys.version_info[0] == 3 + +if PY3: + unicode = str + +if sys.platform.startswith('java'): + import platform + os_name = platform.java_ver()[3][0] + if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. + system = 'win32' + elif os_name.startswith('Mac'): # "Mac OS X", etc. + system = 'darwin' + else: # "Linux", "SunOS", "FreeBSD", etc. + # Setting this to "linux2" is not ideal, but only Windows or Mac + # are actually checked for and the rest of the module expects + # *sys.platform* style strings. + system = 'linux2' +else: + system = sys.platform + + + +def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> + for a discussion of issues. + + Typical user data directories are: + Mac OS X: ~/Library/Application Support/<AppName> + Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined + Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName> + Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName> + Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName> + Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName> + + For Unix, we follow the XDG spec and support $XDG_DATA_HOME. + That means, by default "~/.local/share/<AppName>". + """ + if system == "win32": + if appauthor is None: + appauthor = appname + const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" + path = os.path.normpath(_get_win_folder(const)) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('~/Library/Application Support/') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): + """Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of data dirs should be + returned. By default, the first item from XDG_DATA_DIRS is + returned, or '/usr/local/share/<AppName>', + if XDG_DATA_DIRS is not set + + Typical user data directories are: + Mac OS X: /Library/Application Support/<AppName> + Unix: /usr/local/share/<AppName> or /usr/share/<AppName> + Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName> + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7. + + For Unix, this is using the $XDG_DATA_DIRS[0] default. + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('/Library/Application Support') + if appname: + path = os.path.join(path, appname) + else: + # XDG default for $XDG_DATA_DIRS + # only first, if multipath is False + path = os.getenv('XDG_DATA_DIRS', + os.pathsep.join(['/usr/local/share', '/usr/share'])) + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + if appname and version: + path = os.path.join(path, version) + return path + + +def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific config dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> + for a discussion of issues. + + Typical user data directories are: + Mac OS X: same as user_data_dir + Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined + Win *: same as user_data_dir + + For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. + That means, by deafult "~/.config/<AppName>". + """ + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, None, roaming) + else: + path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): + """Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of config dirs should be + returned. By default, the first item from XDG_CONFIG_DIRS is + returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set + + Typical user data directories are: + Mac OS X: same as site_data_dir + Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in + $XDG_CONFIG_DIRS + Win *: same as site_data_dir + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + + For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system in ["win32", "darwin"]: + path = site_data_dir(appname, appauthor) + if appname and version: + path = os.path.join(path, version) + else: + # XDG default for $XDG_CONFIG_DIRS + # only first, if multipath is False + path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + +def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific cache dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Cache" to the base app data dir for Windows. See + discussion below. + + Typical user cache directories are: + Mac OS X: ~/Library/Caches/<AppName> + Unix: ~/.cache/<AppName> (XDG default) + Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache + Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache + + On Windows the only suggestion in the MSDN docs is that local settings go in + the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming + app data dir (the default returned by `user_data_dir` above). Apps typically + put cache data somewhere *under* the given dir here. Some examples: + ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache + ...\Acme\SuperApp\Cache\1.0 + OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. + This can be disabled with the `opinion=False` option. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + if opinion: + path = os.path.join(path, "Cache") + elif system == 'darwin': + path = os.path.expanduser('~/Library/Caches') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific log dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Logs" to the base app data dir for Windows, and "log" to the + base cache dir for Unix. See discussion below. + + Typical user cache directories are: + Mac OS X: ~/Library/Logs/<AppName> + Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined + Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs + Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs + + On Windows the only suggestion in the MSDN docs is that local settings + go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in + examples of what some windows apps use for a logs dir.) + + OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` + value for Windows and appends "log" to the user cache dir for Unix. + This can be disabled with the `opinion=False` option. + """ + if system == "darwin": + path = os.path.join( + os.path.expanduser('~/Library/Logs'), + appname) + elif system == "win32": + path = user_data_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "Logs") + else: + path = user_cache_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "log") + if appname and version: + path = os.path.join(path, version) + return path + + +class AppDirs(object): + """Convenience wrapper for getting application dirs.""" + def __init__(self, appname, appauthor=None, version=None, roaming=False, + multipath=False): + self.appname = appname + self.appauthor = appauthor + self.version = version + self.roaming = roaming + self.multipath = multipath + + @property + def user_data_dir(self): + return user_data_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_data_dir(self): + return site_data_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_config_dir(self): + return user_config_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_config_dir(self): + return site_config_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_cache_dir(self): + return user_cache_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_log_dir(self): + return user_log_dir(self.appname, self.appauthor, + version=self.version) + + +#---- internal support stuff + +def _get_win_folder_from_registry(csidl_name): + """This is a fallback technique at best. I'm not sure if using the + registry for this guarantees us the correct answer for all CSIDL_* + names. + """ + import _winreg + + shell_folder_name = { + "CSIDL_APPDATA": "AppData", + "CSIDL_COMMON_APPDATA": "Common AppData", + "CSIDL_LOCAL_APPDATA": "Local AppData", + }[csidl_name] + + key = _winreg.OpenKey( + _winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" + ) + dir, type = _winreg.QueryValueEx(key, shell_folder_name) + return dir + + +def _get_win_folder_with_pywin32(csidl_name): + from win32com.shell import shellcon, shell + dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) + # Try to make this a unicode path because SHGetFolderPath does + # not return unicode strings when there is unicode data in the + # path. + try: + dir = unicode(dir) + + # Downgrade to short path name if have highbit chars. See + # <http://bugs.activestate.com/show_bug.cgi?id=85099>. + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + try: + import win32api + dir = win32api.GetShortPathName(dir) + except ImportError: + pass + except UnicodeError: + pass + return dir + + +def _get_win_folder_with_ctypes(csidl_name): + import ctypes + + csidl_const = { + "CSIDL_APPDATA": 26, + "CSIDL_COMMON_APPDATA": 35, + "CSIDL_LOCAL_APPDATA": 28, + }[csidl_name] + + buf = ctypes.create_unicode_buffer(1024) + ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) + + # Downgrade to short path name if have highbit chars. See + # <http://bugs.activestate.com/show_bug.cgi?id=85099>. + has_high_char = False + for c in buf: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf2 = ctypes.create_unicode_buffer(1024) + if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): + buf = buf2 + + return buf.value + +def _get_win_folder_with_jna(csidl_name): + import array + from com.sun import jna + from com.sun.jna.platform import win32 + + buf_size = win32.WinDef.MAX_PATH * 2 + buf = array.zeros('c', buf_size) + shell = win32.Shell32.INSTANCE + shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + # Downgrade to short path name if have highbit chars. See + # <http://bugs.activestate.com/show_bug.cgi?id=85099>. + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf = array.zeros('c', buf_size) + kernel = win32.Kernel32.INSTANCE + if kernal.GetShortPathName(dir, buf, buf_size): + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + return dir + +if system == "win32": + try: + import win32com.shell + _get_win_folder = _get_win_folder_with_pywin32 + except ImportError: + try: + from ctypes import windll + _get_win_folder = _get_win_folder_with_ctypes + except ImportError: + try: + import com.sun.jna + _get_win_folder = _get_win_folder_with_jna + except ImportError: + _get_win_folder = _get_win_folder_from_registry + + +#---- self test code + +if __name__ == "__main__": + appname = "MyApp" + appauthor = "MyCompany" + + props = ("user_data_dir", "site_data_dir", + "user_config_dir", "site_config_dir", + "user_cache_dir", "user_log_dir") + + print("-- app dirs (with optional 'version')") + dirs = AppDirs(appname, appauthor, version="1.0") + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'version')") + dirs = AppDirs(appname, appauthor) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'appauthor')") + dirs = AppDirs(appname) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (with disabled 'appauthor')") + dirs = AppDirs(appname, appauthor=False) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) diff --git a/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/_compat.py b/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/_compat.py new file mode 100644 index 0000000..210bb80 --- /dev/null +++ b/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/_compat.py @@ -0,0 +1,30 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import sys + + +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + +# flake8: noqa + +if PY3: + string_types = str, +else: + string_types = basestring, + + +def with_metaclass(meta, *bases): + """ + Create a base class with a metaclass. + """ + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) diff --git a/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/_structures.py b/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/_structures.py new file mode 100644 index 0000000..ccc2786 --- /dev/null +++ b/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/_structures.py @@ -0,0 +1,68 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + + +class Infinity(object): + + def __repr__(self): + return "Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return False + + def __le__(self, other): + return False + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return True + + def __ge__(self, other): + return True + + def __neg__(self): + return NegativeInfinity + +Infinity = Infinity() + + +class NegativeInfinity(object): + + def __repr__(self): + return "-Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return True + + def __le__(self, other): + return True + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return False + + def __ge__(self, other): + return False + + def __neg__(self): + return Infinity + +NegativeInfinity = NegativeInfinity() diff --git a/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/markers.py b/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/markers.py new file mode 100644 index 0000000..c5d29cd --- /dev/null +++ b/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/markers.py @@ -0,0 +1,287 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import operator +import os +import platform +import sys + +from pkg_resources.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd +from pkg_resources.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString +from pkg_resources.extern.pyparsing import Literal as L # noqa + +from ._compat import string_types +from .specifiers import Specifier, InvalidSpecifier + + +__all__ = [ + "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName", + "Marker", "default_environment", +] + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +class Node(object): + + def __init__(self, value): + self.value = value + + def __str__(self): + return str(self.value) + + def __repr__(self): + return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) + + +class Variable(Node): + pass + + +class Value(Node): + pass + + +VARIABLE = ( + L("implementation_version") | + L("platform_python_implementation") | + L("implementation_name") | + L("python_full_version") | + L("platform_release") | + L("platform_version") | + L("platform_machine") | + L("platform_system") | + L("python_version") | + L("sys_platform") | + L("os_name") | + L("os.name") | # PEP-345 + L("sys.platform") | # PEP-345 + L("platform.version") | # PEP-345 + L("platform.machine") | # PEP-345 + L("platform.python_implementation") | # PEP-345 + L("python_implementation") | # undocumented setuptools legacy + L("extra") +) +ALIASES = { + 'os.name': 'os_name', + 'sys.platform': 'sys_platform', + 'platform.version': 'platform_version', + 'platform.machine': 'platform_machine', + 'platform.python_implementation': 'platform_python_implementation', + 'python_implementation': 'platform_python_implementation' +} +VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) + +VERSION_CMP = ( + L("===") | + L("==") | + L(">=") | + L("<=") | + L("!=") | + L("~=") | + L(">") | + L("<") +) + +MARKER_OP = VERSION_CMP | L("not in") | L("in") + +MARKER_VALUE = QuotedString("'") | QuotedString('"') +MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) + +BOOLOP = L("and") | L("or") + +MARKER_VAR = VARIABLE | MARKER_VALUE + +MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) +MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) + +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() + +MARKER_EXPR = Forward() +MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) +MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) + +MARKER = stringStart + MARKER_EXPR + stringEnd + + +def _coerce_parse_result(results): + if isinstance(results, ParseResults): + return [_coerce_parse_result(i) for i in results] + else: + return results + + +def _format_marker(marker, first=True): + assert isinstance(marker, (list, tuple, string_types)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if (isinstance(marker, list) and len(marker) == 1 and + isinstance(marker[0], (list, tuple))): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return '{0} {1} "{2}"'.format(*marker) + else: + return marker + + +_operators = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, +} + + +def _eval_op(lhs, op, rhs): + try: + spec = Specifier("".join([op, rhs])) + except InvalidSpecifier: + pass + else: + return spec.contains(lhs) + + oper = _operators.get(op) + if oper is None: + raise UndefinedComparison( + "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) + ) + + return oper(lhs, rhs) + + +_undefined = object() + + +def _get_env(environment, name): + value = environment.get(name, _undefined) + + if value is _undefined: + raise UndefinedEnvironmentName( + "{0!r} does not exist in evaluation environment.".format(name) + ) + + return value + + +def _evaluate_markers(markers, environment): + groups = [[]] + + for marker in markers: + assert isinstance(marker, (list, tuple, string_types)) + + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + lhs_value = _get_env(environment, lhs.value) + rhs_value = rhs.value + else: + lhs_value = lhs.value + rhs_value = _get_env(environment, rhs.value) + + groups[-1].append(_eval_op(lhs_value, op, rhs_value)) + else: + assert marker in ["and", "or"] + if marker == "or": + groups.append([]) + + return any(all(item) for item in groups) + + +def format_full_version(info): + version = '{0.major}.{0.minor}.{0.micro}'.format(info) + kind = info.releaselevel + if kind != 'final': + version += kind[0] + str(info.serial) + return version + + +def default_environment(): + if hasattr(sys, 'implementation'): + iver = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + else: + iver = '0' + implementation_name = '' + + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": platform.python_version()[:3], + "sys_platform": sys.platform, + } + + +class Marker(object): + + def __init__(self, marker): + try: + self._markers = _coerce_parse_result(MARKER.parseString(marker)) + except ParseException as e: + err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( + marker, marker[e.loc:e.loc + 8]) + raise InvalidMarker(err_str) + + def __str__(self): + return _format_marker(self._markers) + + def __repr__(self): + return "<Marker({0!r})>".format(str(self)) + + def evaluate(self, environment=None): + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. + + The environment is determined from the current Python process. + """ + current_environment = default_environment() + if environment is not None: + current_environment.update(environment) + + return _evaluate_markers(self._markers, current_environment) diff --git a/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/requirements.py b/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/requirements.py new file mode 100644 index 0000000..0c8c4a3 --- /dev/null +++ b/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/requirements.py @@ -0,0 +1,127 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import string +import re + +from pkg_resources.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException +from pkg_resources.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine +from pkg_resources.extern.pyparsing import Literal as L # noqa +from pkg_resources.extern.six.moves.urllib import parse as urlparse + +from .markers import MARKER_EXPR, Marker +from .specifiers import LegacySpecifier, Specifier, SpecifierSet + + +class InvalidRequirement(ValueError): + """ + An invalid requirement was found, users should refer to PEP 508. + """ + + +ALPHANUM = Word(string.ascii_letters + string.digits) + +LBRACKET = L("[").suppress() +RBRACKET = L("]").suppress() +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() +COMMA = L(",").suppress() +SEMICOLON = L(";").suppress() +AT = L("@").suppress() + +PUNCTUATION = Word("-_.") +IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) +IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) + +NAME = IDENTIFIER("name") +EXTRA = IDENTIFIER + +URI = Regex(r'[^ ]+')("url") +URL = (AT + URI) + +EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) +EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") + +VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) +VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) + +VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY +VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), + joinString=",", adjacent=False)("_raw_spec") +_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) +_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '') + +VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") +VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) + +MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") +MARKER_EXPR.setParseAction( + lambda s, l, t: Marker(s[t._original_start:t._original_end]) +) +MARKER_SEPERATOR = SEMICOLON +MARKER = MARKER_SEPERATOR + MARKER_EXPR + +VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) +URL_AND_MARKER = URL + Optional(MARKER) + +NAMED_REQUIREMENT = \ + NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) + +REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd + + +class Requirement(object): + """Parse a requirement. + + Parse a given requirement string into its parts, such as name, specifier, + URL, and extras. Raises InvalidRequirement on a badly-formed requirement + string. + """ + + # TODO: Can we test whether something is contained within a requirement? + # If so how do we do that? Do we need to test against the _name_ of + # the thing as well as the version? What about the markers? + # TODO: Can we normalize the name and extra name? + + def __init__(self, requirement_string): + try: + req = REQUIREMENT.parseString(requirement_string) + except ParseException as e: + raise InvalidRequirement( + "Invalid requirement, parse error at \"{0!r}\"".format( + requirement_string[e.loc:e.loc + 8])) + + self.name = req.name + if req.url: + parsed_url = urlparse.urlparse(req.url) + if not (parsed_url.scheme and parsed_url.netloc) or ( + not parsed_url.scheme and not parsed_url.netloc): + raise InvalidRequirement("Invalid URL given") + self.url = req.url + else: + self.url = None + self.extras = set(req.extras.asList() if req.extras else []) + self.specifier = SpecifierSet(req.specifier) + self.marker = req.marker if req.marker else None + + def __str__(self): + parts = [self.name] + + if self.extras: + parts.append("[{0}]".format(",".join(sorted(self.extras)))) + + if self.specifier: + parts.append(str(self.specifier)) + + if self.url: + parts.append("@ {0}".format(self.url)) + + if self.marker: + parts.append("; {0}".format(self.marker)) + + return "".join(parts) + + def __repr__(self): + return "<Requirement({0!r})>".format(str(self)) diff --git a/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/specifiers.py b/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/specifiers.py new file mode 100644 index 0000000..7f5a76c --- /dev/null +++ b/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/specifiers.py @@ -0,0 +1,774 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import abc +import functools +import itertools +import re + +from ._compat import string_types, with_metaclass +from .version import Version, LegacyVersion, parse + + +class InvalidSpecifier(ValueError): + """ + An invalid specifier was found, users should refer to PEP 440. + """ + + +class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): + + @abc.abstractmethod + def __str__(self): + """ + Returns the str representation of this Specifier like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self): + """ + Returns a hash value for this Specifier like object. + """ + + @abc.abstractmethod + def __eq__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are equal. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are not equal. + """ + + @abc.abstractproperty + def prereleases(self): + """ + Returns whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @prereleases.setter + def prereleases(self, value): + """ + Sets whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @abc.abstractmethod + def contains(self, item, prereleases=None): + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter(self, iterable, prereleases=None): + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class _IndividualSpecifier(BaseSpecifier): + + _operators = {} + + def __init__(self, spec="", prereleases=None): + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) + + self._spec = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<{0}({1!r}{2})>".format( + self.__class__.__name__, + str(self), + pre, + ) + + def __str__(self): + return "{0}{1}".format(*self._spec) + + def __hash__(self): + return hash(self._spec) + + def __eq__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec == other._spec + + def __ne__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec != other._spec + + def _get_operator(self, op): + return getattr(self, "_compare_{0}".format(self._operators[op])) + + def _coerce_version(self, version): + if not isinstance(version, (LegacyVersion, Version)): + version = parse(version) + return version + + @property + def operator(self): + return self._spec[0] + + @property + def version(self): + return self._spec[1] + + @property + def prereleases(self): + return self._prereleases + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def __contains__(self, item): + return self.contains(item) + + def contains(self, item, prereleases=None): + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version or LegacyVersion, this allows us to have + # a shortcut for ``"2.0" in Specifier(">=2") + item = self._coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + return self._get_operator(self.operator)(item, self.version) + + def filter(self, iterable, prereleases=None): + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = self._coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later incase nothing + # else matches this specifier. + if (parsed_version.is_prerelease and not + (prereleases or self.prereleases)): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the begining. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +class LegacySpecifier(_IndividualSpecifier): + + _regex_str = ( + r""" + (?P<operator>(==|!=|<=|>=|<|>)) + \s* + (?P<version> + [^,;\s)]* # Since this is a "legacy" specifier, and the version + # string can be just about anything, we match everything + # except for whitespace, a semi-colon for marker support, + # a closing paren since versions can be enclosed in + # them, and a comma since it's a version separator. + ) + """ + ) + + _regex = re.compile( + r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + + _operators = { + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + } + + def _coerce_version(self, version): + if not isinstance(version, LegacyVersion): + version = LegacyVersion(str(version)) + return version + + def _compare_equal(self, prospective, spec): + return prospective == self._coerce_version(spec) + + def _compare_not_equal(self, prospective, spec): + return prospective != self._coerce_version(spec) + + def _compare_less_than_equal(self, prospective, spec): + return prospective <= self._coerce_version(spec) + + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= self._coerce_version(spec) + + def _compare_less_than(self, prospective, spec): + return prospective < self._coerce_version(spec) + + def _compare_greater_than(self, prospective, spec): + return prospective > self._coerce_version(spec) + + +def _require_version_compare(fn): + @functools.wraps(fn) + def wrapped(self, prospective, spec): + if not isinstance(prospective, Version): + return False + return fn(self, prospective, spec) + return wrapped + + +class Specifier(_IndividualSpecifier): + + _regex_str = ( + r""" + (?P<operator>(~=|==|!=|<=|>=|<|>|===)) + (?P<version> + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s]* # We just match everything, except for whitespace + # since we are only testing for strict identity. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + + # You cannot use a wild card and a dev or local version + # together so group them with a | and make them optional. + (?: + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + | + \.\* # Wild card syntax of .* + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?<!==|!=|~=) # We have special cases for these + # operators so we want to make sure they + # don't match here. + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + ) + """ + ) + + _regex = re.compile( + r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + + _operators = { + "~=": "compatible", + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + @_require_version_compare + def _compare_compatible(self, prospective, spec): + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore post and dev releases and we want to treat the pre-release as + # it's own separate segment. + prefix = ".".join( + list( + itertools.takewhile( + lambda x: (not x.startswith("post") and not + x.startswith("dev")), + _version_split(spec), + ) + )[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return (self._get_operator(">=")(prospective, spec) and + self._get_operator("==")(prospective, prefix)) + + @_require_version_compare + def _compare_equal(self, prospective, spec): + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + prospective = Version(prospective.public) + # Split the spec out by dots, and pretend that there is an implicit + # dot in between a release segment and a pre-release segment. + spec = _version_split(spec[:-2]) # Remove the trailing .* + + # Split the prospective version out by dots, and pretend that there + # is an implicit dot in between a release segment and a pre-release + # segment. + prospective = _version_split(str(prospective)) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + prospective = prospective[:len(spec)] + + # Pad out our two sides with zeros so that they both equal the same + # length. + spec, prospective = _pad_version(spec, prospective) + else: + # Convert our spec string into a Version + spec = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec.local: + prospective = Version(prospective.public) + + return prospective == spec + + @_require_version_compare + def _compare_not_equal(self, prospective, spec): + return not self._compare_equal(prospective, spec) + + @_require_version_compare + def _compare_less_than_equal(self, prospective, spec): + return prospective <= Version(spec) + + @_require_version_compare + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= Version(spec) + + @_require_version_compare + def _compare_less_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + @_require_version_compare + def _compare_greater_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is techincally greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective, spec): + return str(prospective).lower() == str(spec).lower() + + @property + def prereleases(self): + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "==="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if parse(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version): + result = [] + for item in version.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _pad_version(left, right): + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]):]) + right_split.append(right[len(right_split[0]):]) + + # Insert our padding + left_split.insert( + 1, + ["0"] * max(0, len(right_split[0]) - len(left_split[0])), + ) + right_split.insert( + 1, + ["0"] * max(0, len(left_split[0]) - len(right_split[0])), + ) + + return ( + list(itertools.chain(*left_split)), + list(itertools.chain(*right_split)), + ) + + +class SpecifierSet(BaseSpecifier): + + def __init__(self, specifiers="", prereleases=None): + # Split on , to break each indidivual specifier into it's own item, and + # strip each item to remove leading/trailing whitespace. + specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Parsed each individual specifier, attempting first to make it a + # Specifier and falling back to a LegacySpecifier. + parsed = set() + for specifier in specifiers: + try: + parsed.add(Specifier(specifier)) + except InvalidSpecifier: + parsed.add(LegacySpecifier(specifier)) + + # Turn our parsed specifiers into a frozen set and save them for later. + self._specs = frozenset(parsed) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<SpecifierSet({0!r}{1})>".format(str(self), pre) + + def __str__(self): + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self): + return hash(self._specs) + + def __and__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease " + "overrides." + ) + + return specifier + + def __eq__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __ne__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs != other._specs + + def __len__(self): + return len(self._specs) + + def __iter__(self): + return iter(self._specs) + + @property + def prereleases(self): + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def __contains__(self, item): + return self.contains(item) + + def contains(self, item, prereleases=None): + # Ensure that our item is a Version or LegacyVersion instance. + if not isinstance(item, (LegacyVersion, Version)): + item = parse(item) + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if not prereleases and item.is_prerelease: + return False + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all( + s.contains(item, prereleases=prereleases) + for s in self._specs + ) + + def filter(self, iterable, prereleases=None): + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=bool(prereleases)) + return iterable + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases, and which will filter out LegacyVersion in general. + else: + filtered = [] + found_prereleases = [] + + for item in iterable: + # Ensure that we some kind of Version class for this item. + if not isinstance(item, (LegacyVersion, Version)): + parsed_version = parse(item) + else: + parsed_version = item + + # Filter out any item which is parsed as a LegacyVersion + if isinstance(parsed_version, LegacyVersion): + continue + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return found_prereleases + + return filtered diff --git a/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/utils.py b/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/utils.py new file mode 100644 index 0000000..942387c --- /dev/null +++ b/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/utils.py @@ -0,0 +1,14 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import re + + +_canonicalize_regex = re.compile(r"[-_.]+") + + +def canonicalize_name(name): + # This is taken from PEP 503. + return _canonicalize_regex.sub("-", name).lower() diff --git a/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/version.py b/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/version.py new file mode 100644 index 0000000..83b5ee8 --- /dev/null +++ b/env/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/version.py @@ -0,0 +1,393 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import collections +import itertools +import re + +from ._structures import Infinity + + +__all__ = [ + "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN" +] + + +_Version = collections.namedtuple( + "_Version", + ["epoch", "release", "dev", "pre", "post", "local"], +) + + +def parse(version): + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return LegacyVersion(version) + + +class InvalidVersion(ValueError): + """ + An invalid version was found, users should refer to PEP 440. + """ + + +class _BaseVersion(object): + + def __hash__(self): + return hash(self._key) + + def __lt__(self, other): + return self._compare(other, lambda s, o: s < o) + + def __le__(self, other): + return self._compare(other, lambda s, o: s <= o) + + def __eq__(self, other): + return self._compare(other, lambda s, o: s == o) + + def __ge__(self, other): + return self._compare(other, lambda s, o: s >= o) + + def __gt__(self, other): + return self._compare(other, lambda s, o: s > o) + + def __ne__(self, other): + return self._compare(other, lambda s, o: s != o) + + def _compare(self, other, method): + if not isinstance(other, _BaseVersion): + return NotImplemented + + return method(self._key, other._key) + + +class LegacyVersion(_BaseVersion): + + def __init__(self, version): + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + + def __str__(self): + return self._version + + def __repr__(self): + return "<LegacyVersion({0})>".format(repr(str(self))) + + @property + def public(self): + return self._version + + @property + def base_version(self): + return self._version + + @property + def local(self): + return None + + @property + def is_prerelease(self): + return False + + @property + def is_postrelease(self): + return False + + +_legacy_version_component_re = re.compile( + r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE, +) + +_legacy_version_replacement_map = { + "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", +} + + +def _parse_version_parts(s): + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + + if not part or part == ".": + continue + + if part[:1] in "0123456789": + # pad for numeric comparison + yield part.zfill(8) + else: + yield "*" + part + + # ensure that alpha/beta/candidate are before final + yield "*final" + + +def _legacy_cmpkey(version): + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch + # greater than or equal to 0. This will effectively put the LegacyVersion, + # which uses the defacto standard originally implemented by setuptools, + # as before all PEP 440 versions. + epoch = -1 + + # This scheme is taken from pkg_resources.parse_version setuptools prior to + # it's adoption of the packaging library. + parts = [] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + # remove "-" before a prerelease tag + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + parts = tuple(parts) + + return epoch, parts + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +VERSION_PATTERN = r""" + v? + (?: + (?:(?P<epoch>[0-9]+)!)? # epoch + (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment + (?P<pre> # pre-release + [-_\.]? + (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview)) + [-_\.]? + (?P<pre_n>[0-9]+)? + )? + (?P<post> # post release + (?:-(?P<post_n1>[0-9]+)) + | + (?: + [-_\.]? + (?P<post_l>post|rev|r) + [-_\.]? + (?P<post_n2>[0-9]+)? + ) + )? + (?P<dev> # dev release + [-_\.]? + (?P<dev_l>dev) + [-_\.]? + (?P<dev_n>[0-9]+)? + )? + ) + (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version +""" + + +class Version(_BaseVersion): + + _regex = re.compile( + r"^\s*" + VERSION_PATTERN + r"\s*$", + re.VERBOSE | re.IGNORECASE, + ) + + def __init__(self, version): + # Validate the version and parse it into pieces + match = self._regex.search(version) + if not match: + raise InvalidVersion("Invalid version: '{0}'".format(version)) + + # Store the parsed out pieces of the version + self._version = _Version( + epoch=int(match.group("epoch")) if match.group("epoch") else 0, + release=tuple(int(i) for i in match.group("release").split(".")), + pre=_parse_letter_version( + match.group("pre_l"), + match.group("pre_n"), + ), + post=_parse_letter_version( + match.group("post_l"), + match.group("post_n1") or match.group("post_n2"), + ), + dev=_parse_letter_version( + match.group("dev_l"), + match.group("dev_n"), + ), + local=_parse_local_version(match.group("local")), + ) + + # Generate a key which will be used for sorting + self._key = _cmpkey( + self._version.epoch, + self._version.release, + self._version.pre, + self._version.post, + self._version.dev, + self._version.local, + ) + + def __repr__(self): + return "<Version({0})>".format(repr(str(self))) + + def __str__(self): + parts = [] + + # Epoch + if self._version.epoch != 0: + parts.append("{0}!".format(self._version.epoch)) + + # Release segment + parts.append(".".join(str(x) for x in self._version.release)) + + # Pre-release + if self._version.pre is not None: + parts.append("".join(str(x) for x in self._version.pre)) + + # Post-release + if self._version.post is not None: + parts.append(".post{0}".format(self._version.post[1])) + + # Development release + if self._version.dev is not None: + parts.append(".dev{0}".format(self._version.dev[1])) + + # Local version segment + if self._version.local is not None: + parts.append( + "+{0}".format(".".join(str(x) for x in self._version.local)) + ) + + return "".join(parts) + + @property + def public(self): + return str(self).split("+", 1)[0] + + @property + def base_version(self): + parts = [] + + # Epoch + if self._version.epoch != 0: + parts.append("{0}!".format(self._version.epoch)) + + # Release segment + parts.append(".".join(str(x) for x in self._version.release)) + + return "".join(parts) + + @property + def local(self): + version_string = str(self) + if "+" in version_string: + return version_string.split("+", 1)[1] + + @property + def is_prerelease(self): + return bool(self._version.dev or self._version.pre) + + @property + def is_postrelease(self): + return bool(self._version.post) + + +def _parse_letter_version(letter, number): + if letter: + # We consider there to be an implicit 0 in a pre-release if there is + # not a numeral associated with it. + if number is None: + number = 0 + + # We normalize any letters to their lower case form + letter = letter.lower() + + # We consider some words to be alternate spellings of other words and + # in those cases we want to normalize the spellings to our preferred + # spelling. + if letter == "alpha": + letter = "a" + elif letter == "beta": + letter = "b" + elif letter in ["c", "pre", "preview"]: + letter = "rc" + elif letter in ["rev", "r"]: + letter = "post" + + return letter, int(number) + if not letter and number: + # We assume if we are given a number, but we are not given a letter + # then this is using the implicit post release syntax (e.g. 1.0-1) + letter = "post" + + return letter, int(number) + + +_local_version_seperators = re.compile(r"[\._-]") + + +def _parse_local_version(local): + """ + Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). + """ + if local is not None: + return tuple( + part.lower() if not part.isdigit() else int(part) + for part in _local_version_seperators.split(local) + ) + + +def _cmpkey(epoch, release, pre, post, dev, local): + # When we compare a release version, we want to compare it with all of the + # trailing zeros removed. So we'll use a reverse the list, drop all the now + # leading zeros until we come to something non zero, then take the rest + # re-reverse it back into the correct order and make it a tuple and use + # that for our sorting key. + release = tuple( + reversed(list( + itertools.dropwhile( + lambda x: x == 0, + reversed(release), + ) + )) + ) + + # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0. + # We'll do this by abusing the pre segment, but we _only_ want to do this + # if there is not a pre or a post segment. If we have one of those then + # the normal sorting rules will handle this case correctly. + if pre is None and post is None and dev is not None: + pre = -Infinity + # Versions without a pre-release (except as noted above) should sort after + # those with one. + elif pre is None: + pre = Infinity + + # Versions without a post segment should sort before those with one. + if post is None: + post = -Infinity + + # Versions without a development segment should sort after those with one. + if dev is None: + dev = Infinity + + if local is None: + # Versions without a local segment should sort before those with one. + local = -Infinity + else: + # Versions with a local segment need that segment parsed to implement + # the sorting rules in PEP440. + # - Alpha numeric segments sort before numeric segments + # - Alpha numeric segments sort lexicographically + # - Numeric segments sort numerically + # - Shorter versions sort before longer versions when the prefixes + # match exactly + local = tuple( + (i, "") if isinstance(i, int) else (-Infinity, i) + for i in local + ) + + return epoch, release, pre, post, dev, local diff --git a/env/lib/python3.6/site-packages/pkg_resources/_vendor/pyparsing.py b/env/lib/python3.6/site-packages/pkg_resources/_vendor/pyparsing.py new file mode 100644 index 0000000..a212243 --- /dev/null +++ b/env/lib/python3.6/site-packages/pkg_resources/_vendor/pyparsing.py @@ -0,0 +1,5696 @@ +# module pyparsing.py +# +# Copyright (c) 2003-2016 Paul T. McGuire +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__doc__ = \ +""" +pyparsing module - Classes and methods to define and execute parsing grammars + +The pyparsing module is an alternative approach to creating and executing simple grammars, +vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you +don't need to learn a new syntax for defining grammars or matching expressions - the parsing module +provides a library of classes that you use to construct the grammar directly in Python. + +Here is a program to parse "Hello, World!" (or any greeting of the form +C{"<salutation>, <addressee>!"}), built up using L{Word}, L{Literal}, and L{And} elements +(L{'+'<ParserElement.__add__>} operator gives L{And} expressions, strings are auto-converted to +L{Literal} expressions):: + + from pyparsing import Word, alphas + + # define grammar of a greeting + greet = Word(alphas) + "," + Word(alphas) + "!" + + hello = "Hello, World!" + print (hello, "->", greet.parseString(hello)) + +The program outputs the following:: + + Hello, World! -> ['Hello', ',', 'World', '!'] + +The Python representation of the grammar is quite readable, owing to the self-explanatory +class names, and the use of '+', '|' and '^' operators. + +The L{ParseResults} object returned from L{ParserElement.parseString<ParserElement.parseString>} can be accessed as a nested list, a dictionary, or an +object with named attributes. + +The pyparsing module handles some of the problems that are typically vexing when writing text parsers: + - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) + - quoted strings + - embedded comments +""" + +__version__ = "2.1.10" +__versionTime__ = "07 Oct 2016 01:31 UTC" +__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>" + +import string +from weakref import ref as wkref +import copy +import sys +import warnings +import re +import sre_constants +import collections +import pprint +import traceback +import types +from datetime import datetime + +try: + from _thread import RLock +except ImportError: + from threading import RLock + +try: + from collections import OrderedDict as _OrderedDict +except ImportError: + try: + from ordereddict import OrderedDict as _OrderedDict + except ImportError: + _OrderedDict = None + +#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) + +__all__ = [ +'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', +'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', +'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', +'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', +'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', +'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', +'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', +'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', +'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', +'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', +'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', +'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', +'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', +'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', +'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', +'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', +'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass', +'CloseMatch', 'tokenMap', 'pyparsing_common', +] + +system_version = tuple(sys.version_info)[:3] +PY_3 = system_version[0] == 3 +if PY_3: + _MAX_INT = sys.maxsize + basestring = str + unichr = chr + _ustr = str + + # build list of single arg builtins, that can be used as parse actions + singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max] + +else: + _MAX_INT = sys.maxint + range = xrange + + def _ustr(obj): + """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries + str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It + then < returns the unicode object | encodes it with the default encoding | ... >. + """ + if isinstance(obj,unicode): + return obj + + try: + # If this works, then _ustr(obj) has the same behaviour as str(obj), so + # it won't break any existing code. + return str(obj) + + except UnicodeEncodeError: + # Else encode it + ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace') + xmlcharref = Regex('&#\d+;') + xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:]) + return xmlcharref.transformString(ret) + + # build list of single arg builtins, tolerant of Python version, that can be used as parse actions + singleArgBuiltins = [] + import __builtin__ + for fname in "sum len sorted reversed list tuple set any all min max".split(): + try: + singleArgBuiltins.append(getattr(__builtin__,fname)) + except AttributeError: + continue + +_generatorType = type((y for y in range(1))) + +def _xml_escape(data): + """Escape &, <, >, ", ', etc. in a string of data.""" + + # ampersand must be replaced first + from_symbols = '&><"\'' + to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split()) + for from_,to_ in zip(from_symbols, to_symbols): + data = data.replace(from_, to_) + return data + +class _Constants(object): + pass + +alphas = string.ascii_uppercase + string.ascii_lowercase +nums = "0123456789" +hexnums = nums + "ABCDEFabcdef" +alphanums = alphas + nums +_bslash = chr(92) +printables = "".join(c for c in string.printable if c not in string.whitespace) + +class ParseBaseException(Exception): + """base exception class for all parsing runtime exceptions""" + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( self, pstr, loc=0, msg=None, elem=None ): + self.loc = loc + if msg is None: + self.msg = pstr + self.pstr = "" + else: + self.msg = msg + self.pstr = pstr + self.parserElement = elem + self.args = (pstr, loc, msg) + + @classmethod + def _from_exception(cls, pe): + """ + internal factory method to simplify creating one type of ParseException + from another - avoids having __init__ signature conflicts among subclasses + """ + return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) + + def __getattr__( self, aname ): + """supported attributes by name are: + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + """ + if( aname == "lineno" ): + return lineno( self.loc, self.pstr ) + elif( aname in ("col", "column") ): + return col( self.loc, self.pstr ) + elif( aname == "line" ): + return line( self.loc, self.pstr ) + else: + raise AttributeError(aname) + + def __str__( self ): + return "%s (at char %d), (line:%d, col:%d)" % \ + ( self.msg, self.loc, self.lineno, self.column ) + def __repr__( self ): + return _ustr(self) + def markInputline( self, markerString = ">!<" ): + """Extracts the exception line from the input string, and marks + the location of the exception with a special symbol. + """ + line_str = self.line + line_column = self.column - 1 + if markerString: + line_str = "".join((line_str[:line_column], + markerString, line_str[line_column:])) + return line_str.strip() + def __dir__(self): + return "lineno col line".split() + dir(type(self)) + +class ParseException(ParseBaseException): + """ + Exception thrown when parse expressions don't match class; + supported attributes by name are: + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + + Example:: + try: + Word(nums).setName("integer").parseString("ABC") + except ParseException as pe: + print(pe) + print("column: {}".format(pe.col)) + + prints:: + Expected integer (at char 0), (line:1, col:1) + column: 1 + """ + pass + +class ParseFatalException(ParseBaseException): + """user-throwable exception thrown when inconsistent parse content + is found; stops all parsing immediately""" + pass + +class ParseSyntaxException(ParseFatalException): + """just like L{ParseFatalException}, but thrown internally when an + L{ErrorStop<And._ErrorStop>} ('-' operator) indicates that parsing is to stop + immediately because an unbacktrackable syntax error has been found""" + pass + +#~ class ReparseException(ParseBaseException): + #~ """Experimental class - parse actions can raise this exception to cause + #~ pyparsing to reparse the input string: + #~ - with a modified input string, and/or + #~ - with a modified start location + #~ Set the values of the ReparseException in the constructor, and raise the + #~ exception in a parse action to cause pyparsing to use the new string/location. + #~ Setting the values as None causes no change to be made. + #~ """ + #~ def __init_( self, newstring, restartLoc ): + #~ self.newParseText = newstring + #~ self.reparseLoc = restartLoc + +class RecursiveGrammarException(Exception): + """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive""" + def __init__( self, parseElementList ): + self.parseElementTrace = parseElementList + + def __str__( self ): + return "RecursiveGrammarException: %s" % self.parseElementTrace + +class _ParseResultsWithOffset(object): + def __init__(self,p1,p2): + self.tup = (p1,p2) + def __getitem__(self,i): + return self.tup[i] + def __repr__(self): + return repr(self.tup[0]) + def setOffset(self,i): + self.tup = (self.tup[0],i) + +class ParseResults(object): + """ + Structured parse results, to provide multiple means of access to the parsed data: + - as a list (C{len(results)}) + - by list index (C{results[0], results[1]}, etc.) + - by attribute (C{results.<resultsName>} - see L{ParserElement.setResultsName}) + + Example:: + integer = Word(nums) + date_str = (integer.setResultsName("year") + '/' + + integer.setResultsName("month") + '/' + + integer.setResultsName("day")) + # equivalent form: + # date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + # parseString returns a ParseResults object + result = date_str.parseString("1999/12/31") + + def test(s, fn=repr): + print("%s -> %s" % (s, fn(eval(s)))) + test("list(result)") + test("result[0]") + test("result['month']") + test("result.day") + test("'month' in result") + test("'minutes' in result") + test("result.dump()", str) + prints:: + list(result) -> ['1999', '/', '12', '/', '31'] + result[0] -> '1999' + result['month'] -> '12' + result.day -> '31' + 'month' in result -> True + 'minutes' in result -> False + result.dump() -> ['1999', '/', '12', '/', '31'] + - day: 31 + - month: 12 + - year: 1999 + """ + def __new__(cls, toklist=None, name=None, asList=True, modal=True ): + if isinstance(toklist, cls): + return toklist + retobj = object.__new__(cls) + retobj.__doinit = True + return retobj + + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ): + if self.__doinit: + self.__doinit = False + self.__name = None + self.__parent = None + self.__accumNames = {} + self.__asList = asList + self.__modal = modal + if toklist is None: + toklist = [] + if isinstance(toklist, list): + self.__toklist = toklist[:] + elif isinstance(toklist, _generatorType): + self.__toklist = list(toklist) + else: + self.__toklist = [toklist] + self.__tokdict = dict() + + if name is not None and name: + if not modal: + self.__accumNames[name] = 0 + if isinstance(name,int): + name = _ustr(name) # will always return a str, but use _ustr for consistency + self.__name = name + if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])): + if isinstance(toklist,basestring): + toklist = [ toklist ] + if asList: + if isinstance(toklist,ParseResults): + self[name] = _ParseResultsWithOffset(toklist.copy(),0) + else: + self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) + self[name].__name = name + else: + try: + self[name] = toklist[0] + except (KeyError,TypeError,IndexError): + self[name] = toklist + + def __getitem__( self, i ): + if isinstance( i, (int,slice) ): + return self.__toklist[i] + else: + if i not in self.__accumNames: + return self.__tokdict[i][-1][0] + else: + return ParseResults([ v[0] for v in self.__tokdict[i] ]) + + def __setitem__( self, k, v, isinstance=isinstance ): + if isinstance(v,_ParseResultsWithOffset): + self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] + sub = v[0] + elif isinstance(k,(int,slice)): + self.__toklist[k] = v + sub = v + else: + self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)] + sub = v + if isinstance(sub,ParseResults): + sub.__parent = wkref(self) + + def __delitem__( self, i ): + if isinstance(i,(int,slice)): + mylen = len( self.__toklist ) + del self.__toklist[i] + + # convert int to slice + if isinstance(i, int): + if i < 0: + i += mylen + i = slice(i, i+1) + # get removed indices + removed = list(range(*i.indices(mylen))) + removed.reverse() + # fixup indices in token dictionary + for name,occurrences in self.__tokdict.items(): + for j in removed: + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) + else: + del self.__tokdict[i] + + def __contains__( self, k ): + return k in self.__tokdict + + def __len__( self ): return len( self.__toklist ) + def __bool__(self): return ( not not self.__toklist ) + __nonzero__ = __bool__ + def __iter__( self ): return iter( self.__toklist ) + def __reversed__( self ): return iter( self.__toklist[::-1] ) + def _iterkeys( self ): + if hasattr(self.__tokdict, "iterkeys"): + return self.__tokdict.iterkeys() + else: + return iter(self.__tokdict) + + def _itervalues( self ): + return (self[k] for k in self._iterkeys()) + + def _iteritems( self ): + return ((k, self[k]) for k in self._iterkeys()) + + if PY_3: + keys = _iterkeys + """Returns an iterator of all named result keys (Python 3.x only).""" + + values = _itervalues + """Returns an iterator of all named result values (Python 3.x only).""" + + items = _iteritems + """Returns an iterator of all named result key-value tuples (Python 3.x only).""" + + else: + iterkeys = _iterkeys + """Returns an iterator of all named result keys (Python 2.x only).""" + + itervalues = _itervalues + """Returns an iterator of all named result values (Python 2.x only).""" + + iteritems = _iteritems + """Returns an iterator of all named result key-value tuples (Python 2.x only).""" + + def keys( self ): + """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).""" + return list(self.iterkeys()) + + def values( self ): + """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).""" + return list(self.itervalues()) + + def items( self ): + """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).""" + return list(self.iteritems()) + + def haskeys( self ): + """Since keys() returns an iterator, this method is helpful in bypassing + code that looks for the existence of any defined results names.""" + return bool(self.__tokdict) + + def pop( self, *args, **kwargs): + """ + Removes and returns item at specified index (default=C{last}). + Supports both C{list} and C{dict} semantics for C{pop()}. If passed no + argument or an integer argument, it will use C{list} semantics + and pop tokens from the list of parsed tokens. If passed a + non-integer argument (most likely a string), it will use C{dict} + semantics and pop the corresponding value from any defined + results names. A second default return value argument is + supported, just as in C{dict.pop()}. + + Example:: + def remove_first(tokens): + tokens.pop(0) + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321'] + + label = Word(alphas) + patt = label("LABEL") + OneOrMore(Word(nums)) + print(patt.parseString("AAB 123 321").dump()) + + # Use pop() in a parse action to remove named result (note that corresponding value is not + # removed from list form of results) + def remove_LABEL(tokens): + tokens.pop("LABEL") + return tokens + patt.addParseAction(remove_LABEL) + print(patt.parseString("AAB 123 321").dump()) + prints:: + ['AAB', '123', '321'] + - LABEL: AAB + + ['AAB', '123', '321'] + """ + if not args: + args = [-1] + for k,v in kwargs.items(): + if k == 'default': + args = (args[0], v) + else: + raise TypeError("pop() got an unexpected keyword argument '%s'" % k) + if (isinstance(args[0], int) or + len(args) == 1 or + args[0] in self): + index = args[0] + ret = self[index] + del self[index] + return ret + else: + defaultvalue = args[1] + return defaultvalue + + def get(self, key, defaultValue=None): + """ + Returns named result matching the given key, or if there is no + such name, then returns the given C{defaultValue} or C{None} if no + C{defaultValue} is specified. + + Similar to C{dict.get()}. + + Example:: + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString("1999/12/31") + print(result.get("year")) # -> '1999' + print(result.get("hour", "not specified")) # -> 'not specified' + print(result.get("hour")) # -> None + """ + if key in self: + return self[key] + else: + return defaultValue + + def insert( self, index, insStr ): + """ + Inserts new element at location index in the list of parsed tokens. + + Similar to C{list.insert()}. + + Example:: + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + + # use a parse action to insert the parse location in the front of the parsed results + def insert_locn(locn, tokens): + tokens.insert(0, locn) + print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321'] + """ + self.__toklist.insert(index, insStr) + # fixup indices in token dictionary + for name,occurrences in self.__tokdict.items(): + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) + + def append( self, item ): + """ + Add single element to end of ParseResults list of elements. + + Example:: + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + + # use a parse action to compute the sum of the parsed integers, and add it to the end + def append_sum(tokens): + tokens.append(sum(map(int, tokens))) + print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444] + """ + self.__toklist.append(item) + + def extend( self, itemseq ): + """ + Add sequence of elements to end of ParseResults list of elements. + + Example:: + patt = OneOrMore(Word(alphas)) + + # use a parse action to append the reverse of the matched strings, to make a palindrome + def make_palindrome(tokens): + tokens.extend(reversed([t[::-1] for t in tokens])) + return ''.join(tokens) + print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' + """ + if isinstance(itemseq, ParseResults): + self += itemseq + else: + self.__toklist.extend(itemseq) + + def clear( self ): + """ + Clear all elements and results names. + """ + del self.__toklist[:] + self.__tokdict.clear() + + def __getattr__( self, name ): + try: + return self[name] + except KeyError: + return "" + + if name in self.__tokdict: + if name not in self.__accumNames: + return self.__tokdict[name][-1][0] + else: + return ParseResults([ v[0] for v in self.__tokdict[name] ]) + else: + return "" + + def __add__( self, other ): + ret = self.copy() + ret += other + return ret + + def __iadd__( self, other ): + if other.__tokdict: + offset = len(self.__toklist) + addoffset = lambda a: offset if a<0 else a+offset + otheritems = other.__tokdict.items() + otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) ) + for (k,vlist) in otheritems for v in vlist] + for k,v in otherdictitems: + self[k] = v + if isinstance(v[0],ParseResults): + v[0].__parent = wkref(self) + + self.__toklist += other.__toklist + self.__accumNames.update( other.__accumNames ) + return self + + def __radd__(self, other): + if isinstance(other,int) and other == 0: + # useful for merging many ParseResults using sum() builtin + return self.copy() + else: + # this may raise a TypeError - so be it + return other + self + + def __repr__( self ): + return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) + + def __str__( self ): + return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']' + + def _asStringList( self, sep='' ): + out = [] + for item in self.__toklist: + if out and sep: + out.append(sep) + if isinstance( item, ParseResults ): + out += item._asStringList() + else: + out.append( _ustr(item) ) + return out + + def asList( self ): + """ + Returns the parse results as a nested list of matching tokens, all converted to strings. + + Example:: + patt = OneOrMore(Word(alphas)) + result = patt.parseString("sldkj lsdkj sldkj") + # even though the result prints in string-like form, it is actually a pyparsing ParseResults + print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj'] + + # Use asList() to create an actual list + result_list = result.asList() + print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj'] + """ + return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist] + + def asDict( self ): + """ + Returns the named parse results as a nested dictionary. + + Example:: + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString('12/31/1999') + print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) + + result_dict = result.asDict() + print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'} + + # even though a ParseResults supports dict-like access, sometime you just need to have a dict + import json + print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable + print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"} + """ + if PY_3: + item_fn = self.items + else: + item_fn = self.iteritems + + def toItem(obj): + if isinstance(obj, ParseResults): + if obj.haskeys(): + return obj.asDict() + else: + return [toItem(v) for v in obj] + else: + return obj + + return dict((k,toItem(v)) for k,v in item_fn()) + + def copy( self ): + """ + Returns a new copy of a C{ParseResults} object. + """ + ret = ParseResults( self.__toklist ) + ret.__tokdict = self.__tokdict.copy() + ret.__parent = self.__parent + ret.__accumNames.update( self.__accumNames ) + ret.__name = self.__name + return ret + + def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): + """ + (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. + """ + nl = "\n" + out = [] + namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items() + for v in vlist) + nextLevelIndent = indent + " " + + # collapse out indents if formatting is not desired + if not formatted: + indent = "" + nextLevelIndent = "" + nl = "" + + selfTag = None + if doctag is not None: + selfTag = doctag + else: + if self.__name: + selfTag = self.__name + + if not selfTag: + if namedItemsOnly: + return "" + else: + selfTag = "ITEM" + + out += [ nl, indent, "<", selfTag, ">" ] + + for i,res in enumerate(self.__toklist): + if isinstance(res,ParseResults): + if i in namedItems: + out += [ res.asXML(namedItems[i], + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] + else: + out += [ res.asXML(None, + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] + else: + # individual token, see if there is a name for it + resTag = None + if i in namedItems: + resTag = namedItems[i] + if not resTag: + if namedItemsOnly: + continue + else: + resTag = "ITEM" + xmlBodyText = _xml_escape(_ustr(res)) + out += [ nl, nextLevelIndent, "<", resTag, ">", + xmlBodyText, + "</", resTag, ">" ] + + out += [ nl, indent, "</", selfTag, ">" ] + return "".join(out) + + def __lookup(self,sub): + for k,vlist in self.__tokdict.items(): + for v,loc in vlist: + if sub is v: + return k + return None + + def getName(self): + """ + Returns the results name for this token expression. Useful when several + different expressions might match at a particular location. + + Example:: + integer = Word(nums) + ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") + house_number_expr = Suppress('#') + Word(nums, alphanums) + user_data = (Group(house_number_expr)("house_number") + | Group(ssn_expr)("ssn") + | Group(integer)("age")) + user_info = OneOrMore(user_data) + + result = user_info.parseString("22 111-22-3333 #221B") + for item in result: + print(item.getName(), ':', item[0]) + prints:: + age : 22 + ssn : 111-22-3333 + house_number : 221B + """ + if self.__name: + return self.__name + elif self.__parent: + par = self.__parent() + if par: + return par.__lookup(self) + else: + return None + elif (len(self) == 1 and + len(self.__tokdict) == 1 and + next(iter(self.__tokdict.values()))[0][1] in (0,-1)): + return next(iter(self.__tokdict.keys())) + else: + return None + + def dump(self, indent='', depth=0, full=True): + """ + Diagnostic method for listing out the contents of a C{ParseResults}. + Accepts an optional C{indent} argument so that this string can be embedded + in a nested display of other data. + + Example:: + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString('12/31/1999') + print(result.dump()) + prints:: + ['12', '/', '31', '/', '1999'] + - day: 1999 + - month: 31 + - year: 12 + """ + out = [] + NL = '\n' + out.append( indent+_ustr(self.asList()) ) + if full: + if self.haskeys(): + items = sorted((str(k), v) for k,v in self.items()) + for k,v in items: + if out: + out.append(NL) + out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) + if isinstance(v,ParseResults): + if v: + out.append( v.dump(indent,depth+1) ) + else: + out.append(_ustr(v)) + else: + out.append(repr(v)) + elif any(isinstance(vv,ParseResults) for vv in self): + v = self + for i,vv in enumerate(v): + if isinstance(vv,ParseResults): + out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) + else: + out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) + + return "".join(out) + + def pprint(self, *args, **kwargs): + """ + Pretty-printer for parsed results as a list, using the C{pprint} module. + Accepts additional positional or keyword args as defined for the + C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint}) + + Example:: + ident = Word(alphas, alphanums) + num = Word(nums) + func = Forward() + term = ident | num | Group('(' + func + ')') + func <<= ident + Group(Optional(delimitedList(term))) + result = func.parseString("fna a,b,(fnb c,d,200),100") + result.pprint(width=40) + prints:: + ['fna', + ['a', + 'b', + ['(', 'fnb', ['c', 'd', '200'], ')'], + '100']] + """ + pprint.pprint(self.asList(), *args, **kwargs) + + # add support for pickle protocol + def __getstate__(self): + return ( self.__toklist, + ( self.__tokdict.copy(), + self.__parent is not None and self.__parent() or None, + self.__accumNames, + self.__name ) ) + + def __setstate__(self,state): + self.__toklist = state[0] + (self.__tokdict, + par, + inAccumNames, + self.__name) = state[1] + self.__accumNames = {} + self.__accumNames.update(inAccumNames) + if par is not None: + self.__parent = wkref(par) + else: + self.__parent = None + + def __getnewargs__(self): + return self.__toklist, self.__name, self.__asList, self.__modal + + def __dir__(self): + return (dir(type(self)) + list(self.keys())) + +collections.MutableMapping.register(ParseResults) + +def col (loc,strg): + """Returns current column within a string, counting newlines as line separators. + The first column is number 1. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information + on parsing strings containing C{<TAB>}s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + """ + s = strg + return 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc) + +def lineno(loc,strg): + """Returns current line number within a string, counting newlines as line separators. + The first line is number 1. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information + on parsing strings containing C{<TAB>}s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + """ + return strg.count("\n",0,loc) + 1 + +def line( loc, strg ): + """Returns the line of text containing loc within a string, counting newlines as line separators. + """ + lastCR = strg.rfind("\n", 0, loc) + nextCR = strg.find("\n", loc) + if nextCR >= 0: + return strg[lastCR+1:nextCR] + else: + return strg[lastCR+1:] + +def _defaultStartDebugAction( instring, loc, expr ): + print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))) + +def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ): + print ("Matched " + _ustr(expr) + " -> " + str(toks.asList())) + +def _defaultExceptionDebugAction( instring, loc, expr, exc ): + print ("Exception raised:" + _ustr(exc)) + +def nullDebugAction(*args): + """'Do-nothing' debug action, to suppress debugging output during parsing.""" + pass + +# Only works on Python 3.x - nonlocal is toxic to Python 2 installs +#~ 'decorator to trim function calls to match the arity of the target' +#~ def _trim_arity(func, maxargs=3): + #~ if func in singleArgBuiltins: + #~ return lambda s,l,t: func(t) + #~ limit = 0 + #~ foundArity = False + #~ def wrapper(*args): + #~ nonlocal limit,foundArity + #~ while 1: + #~ try: + #~ ret = func(*args[limit:]) + #~ foundArity = True + #~ return ret + #~ except TypeError: + #~ if limit == maxargs or foundArity: + #~ raise + #~ limit += 1 + #~ continue + #~ return wrapper + +# this version is Python 2.x-3.x cross-compatible +'decorator to trim function calls to match the arity of the target' +def _trim_arity(func, maxargs=2): + if func in singleArgBuiltins: + return lambda s,l,t: func(t) + limit = [0] + foundArity = [False] + + # traceback return data structure changed in Py3.5 - normalize back to plain tuples + if system_version[:2] >= (3,5): + def extract_stack(limit=0): + # special handling for Python 3.5.0 - extra deep call stack by 1 + offset = -3 if system_version == (3,5,0) else -2 + frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset] + return [(frame_summary.filename, frame_summary.lineno)] + def extract_tb(tb, limit=0): + frames = traceback.extract_tb(tb, limit=limit) + frame_summary = frames[-1] + return [(frame_summary.filename, frame_summary.lineno)] + else: + extract_stack = traceback.extract_stack + extract_tb = traceback.extract_tb + + # synthesize what would be returned by traceback.extract_stack at the call to + # user's parse action 'func', so that we don't incur call penalty at parse time + + LINE_DIFF = 6 + # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND + # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! + this_line = extract_stack(limit=2)[-1] + pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF) + + def wrapper(*args): + while 1: + try: + ret = func(*args[limit[0]:]) + foundArity[0] = True + return ret + except TypeError: + # re-raise TypeErrors if they did not come from our arity testing + if foundArity[0]: + raise + else: + try: + tb = sys.exc_info()[-1] + if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth: + raise + finally: + del tb + + if limit[0] <= maxargs: + limit[0] += 1 + continue + raise + + # copy func name to wrapper for sensible debug output + func_name = "<parse action>" + try: + func_name = getattr(func, '__name__', + getattr(func, '__class__').__name__) + except Exception: + func_name = str(func) + wrapper.__name__ = func_name + + return wrapper + +class ParserElement(object): + """Abstract base level parser element class.""" + DEFAULT_WHITE_CHARS = " \n\t\r" + verbose_stacktrace = False + + @staticmethod + def setDefaultWhitespaceChars( chars ): + r""" + Overrides the default whitespace chars + + Example:: + # default whitespace chars are space, <TAB> and newline + OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] + + # change to just treat newline as significant + ParserElement.setDefaultWhitespaceChars(" \t") + OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def'] + """ + ParserElement.DEFAULT_WHITE_CHARS = chars + + @staticmethod + def inlineLiteralsUsing(cls): + """ + Set class to be used for inclusion of string literals into a parser. + + Example:: + # default literal class used is Literal + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] + + + # change to Suppress + ParserElement.inlineLiteralsUsing(Suppress) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + date_str.parseString("1999/12/31") # -> ['1999', '12', '31'] + """ + ParserElement._literalStringClass = cls + + def __init__( self, savelist=False ): + self.parseAction = list() + self.failAction = None + #~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall + self.strRepr = None + self.resultsName = None + self.saveAsList = savelist + self.skipWhitespace = True + self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS + self.copyDefaultWhiteChars = True + self.mayReturnEmpty = False # used when checking for left-recursion + self.keepTabs = False + self.ignoreExprs = list() + self.debug = False + self.streamlined = False + self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index + self.errmsg = "" + self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) + self.debugActions = ( None, None, None ) #custom debug actions + self.re = None + self.callPreparse = True # used to avoid redundant calls to preParse + self.callDuringTry = False + + def copy( self ): + """ + Make a copy of this C{ParserElement}. Useful for defining different parse actions + for the same parsing pattern, using copies of the original parse element. + + Example:: + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") + integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") + + print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) + prints:: + [5120, 100, 655360, 268435456] + Equivalent form of C{expr.copy()} is just C{expr()}:: + integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") + """ + cpy = copy.copy( self ) + cpy.parseAction = self.parseAction[:] + cpy.ignoreExprs = self.ignoreExprs[:] + if self.copyDefaultWhiteChars: + cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS + return cpy + + def setName( self, name ): + """ + Define name for this expression, makes debugging and exception messages clearer. + + Example:: + Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1) + Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) + """ + self.name = name + self.errmsg = "Expected " + self.name + if hasattr(self,"exception"): + self.exception.msg = self.errmsg + return self + + def setResultsName( self, name, listAllMatches=False ): + """ + Define name for referencing matching tokens as a nested attribute + of the returned parse results. + NOTE: this returns a *copy* of the original C{ParserElement} object; + this is so that the client can define a basic element, such as an + integer, and reference it in multiple places with different names. + + You can also set results names using the abbreviated syntax, + C{expr("name")} in place of C{expr.setResultsName("name")} - + see L{I{__call__}<__call__>}. + + Example:: + date_str = (integer.setResultsName("year") + '/' + + integer.setResultsName("month") + '/' + + integer.setResultsName("day")) + + # equivalent form: + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + """ + newself = self.copy() + if name.endswith("*"): + name = name[:-1] + listAllMatches=True + newself.resultsName = name + newself.modalResults = not listAllMatches + return newself + + def setBreak(self,breakFlag = True): + """Method to invoke the Python pdb debugger when this element is + about to be parsed. Set C{breakFlag} to True to enable, False to + disable. + """ + if breakFlag: + _parseMethod = self._parse + def breaker(instring, loc, doActions=True, callPreParse=True): + import pdb + pdb.set_trace() + return _parseMethod( instring, loc, doActions, callPreParse ) + breaker._originalParseMethod = _parseMethod + self._parse = breaker + else: + if hasattr(self._parse,"_originalParseMethod"): + self._parse = self._parse._originalParseMethod + return self + + def setParseAction( self, *fns, **kwargs ): + """ + Define action to perform when successfully matching parse element definition. + Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, + C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: + - s = the original string being parsed (see note below) + - loc = the location of the matching substring + - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object + If the functions in fns modify the tokens, they can return them as the return + value from fn, and the modified list of tokens will replace the original. + Otherwise, fn does not need to return any value. + + Optional keyword arguments: + - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{parseString}<parseString>} for more information + on parsing strings containing C{<TAB>}s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + + Example:: + integer = Word(nums) + date_str = integer + '/' + integer + '/' + integer + + date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] + + # use parse action to convert to ints at parse time + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + date_str = integer + '/' + integer + '/' + integer + + # note that integer fields are now ints, not strings + date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] + """ + self.parseAction = list(map(_trim_arity, list(fns))) + self.callDuringTry = kwargs.get("callDuringTry", False) + return self + + def addParseAction( self, *fns, **kwargs ): + """ + Add parse action to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}. + + See examples in L{I{copy}<copy>}. + """ + self.parseAction += list(map(_trim_arity, list(fns))) + self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) + return self + + def addCondition(self, *fns, **kwargs): + """Add a boolean predicate function to expression's list of parse actions. See + L{I{setParseAction}<setParseAction>} for function call signatures. Unlike C{setParseAction}, + functions passed to C{addCondition} need to return boolean success/fail of the condition. + + Optional keyword arguments: + - message = define a custom message to be used in the raised exception + - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException + + Example:: + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + year_int = integer.copy() + year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") + date_str = year_int + '/' + integer + '/' + integer + + result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) + """ + msg = kwargs.get("message", "failed user-defined condition") + exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException + for fn in fns: + def pa(s,l,t): + if not bool(_trim_arity(fn)(s,l,t)): + raise exc_type(s,l,msg) + self.parseAction.append(pa) + self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) + return self + + def setFailAction( self, fn ): + """Define action to perform if parsing fails at this expression. + Fail acton fn is a callable function that takes the arguments + C{fn(s,loc,expr,err)} where: + - s = string being parsed + - loc = location where expression match was attempted and failed + - expr = the parse expression that failed + - err = the exception thrown + The function returns no value. It may throw C{L{ParseFatalException}} + if it is desired to stop parsing immediately.""" + self.failAction = fn + return self + + def _skipIgnorables( self, instring, loc ): + exprsFound = True + while exprsFound: + exprsFound = False + for e in self.ignoreExprs: + try: + while 1: + loc,dummy = e._parse( instring, loc ) + exprsFound = True + except ParseException: + pass + return loc + + def preParse( self, instring, loc ): + if self.ignoreExprs: + loc = self._skipIgnorables( instring, loc ) + + if self.skipWhitespace: + wt = self.whiteChars + instrlen = len(instring) + while loc < instrlen and instring[loc] in wt: + loc += 1 + + return loc + + def parseImpl( self, instring, loc, doActions=True ): + return loc, [] + + def postParse( self, instring, loc, tokenlist ): + return tokenlist + + #~ @profile + def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): + debugging = ( self.debug ) #and doActions ) + + if debugging or self.failAction: + #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) + if (self.debugActions[0] ): + self.debugActions[0]( instring, loc, self ) + if callPreParse and self.callPreparse: + preloc = self.preParse( instring, loc ) + else: + preloc = loc + tokensStart = preloc + try: + try: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + except IndexError: + raise ParseException( instring, len(instring), self.errmsg, self ) + except ParseBaseException as err: + #~ print ("Exception raised:", err) + if self.debugActions[2]: + self.debugActions[2]( instring, tokensStart, self, err ) + if self.failAction: + self.failAction( instring, tokensStart, self, err ) + raise + else: + if callPreParse and self.callPreparse: + preloc = self.preParse( instring, loc ) + else: + preloc = loc + tokensStart = preloc + if self.mayIndexError or loc >= len(instring): + try: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + except IndexError: + raise ParseException( instring, len(instring), self.errmsg, self ) + else: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + + tokens = self.postParse( instring, loc, tokens ) + + retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults ) + if self.parseAction and (doActions or self.callDuringTry): + if debugging: + try: + for fn in self.parseAction: + tokens = fn( instring, tokensStart, retTokens ) + if tokens is not None: + retTokens = ParseResults( tokens, + self.resultsName, + asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), + modal=self.modalResults ) + except ParseBaseException as err: + #~ print "Exception raised in user parse action:", err + if (self.debugActions[2] ): + self.debugActions[2]( instring, tokensStart, self, err ) + raise + else: + for fn in self.parseAction: + tokens = fn( instring, tokensStart, retTokens ) + if tokens is not None: + retTokens = ParseResults( tokens, + self.resultsName, + asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), + modal=self.modalResults ) + + if debugging: + #~ print ("Matched",self,"->",retTokens.asList()) + if (self.debugActions[1] ): + self.debugActions[1]( instring, tokensStart, loc, self, retTokens ) + + return loc, retTokens + + def tryParse( self, instring, loc ): + try: + return self._parse( instring, loc, doActions=False )[0] + except ParseFatalException: + raise ParseException( instring, loc, self.errmsg, self) + + def canParseNext(self, instring, loc): + try: + self.tryParse(instring, loc) + except (ParseException, IndexError): + return False + else: + return True + + class _UnboundedCache(object): + def __init__(self): + cache = {} + self.not_in_cache = not_in_cache = object() + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + + def clear(self): + cache.clear() + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + + if _OrderedDict is not None: + class _FifoCache(object): + def __init__(self, size): + self.not_in_cache = not_in_cache = object() + + cache = _OrderedDict() + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + if len(cache) > size: + cache.popitem(False) + + def clear(self): + cache.clear() + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + + else: + class _FifoCache(object): + def __init__(self, size): + self.not_in_cache = not_in_cache = object() + + cache = {} + key_fifo = collections.deque([], size) + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + if len(cache) > size: + cache.pop(key_fifo.popleft(), None) + key_fifo.append(key) + + def clear(self): + cache.clear() + key_fifo.clear() + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + + # argument cache for optimizing repeated calls when backtracking through recursive expressions + packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail + packrat_cache_lock = RLock() + packrat_cache_stats = [0, 0] + + # this method gets repeatedly called during backtracking with the same arguments - + # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression + def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): + HIT, MISS = 0, 1 + lookup = (self, instring, loc, callPreParse, doActions) + with ParserElement.packrat_cache_lock: + cache = ParserElement.packrat_cache + value = cache.get(lookup) + if value is cache.not_in_cache: + ParserElement.packrat_cache_stats[MISS] += 1 + try: + value = self._parseNoCache(instring, loc, doActions, callPreParse) + except ParseBaseException as pe: + # cache a copy of the exception, without the traceback + cache.set(lookup, pe.__class__(*pe.args)) + raise + else: + cache.set(lookup, (value[0], value[1].copy())) + return value + else: + ParserElement.packrat_cache_stats[HIT] += 1 + if isinstance(value, Exception): + raise value + return (value[0], value[1].copy()) + + _parse = _parseNoCache + + @staticmethod + def resetCache(): + ParserElement.packrat_cache.clear() + ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats) + + _packratEnabled = False + @staticmethod + def enablePackrat(cache_size_limit=128): + """Enables "packrat" parsing, which adds memoizing to the parsing logic. + Repeated parse attempts at the same string location (which happens + often in many complex grammars) can immediately return a cached value, + instead of re-executing parsing/validating code. Memoizing is done of + both valid results and parsing exceptions. + + Parameters: + - cache_size_limit - (default=C{128}) - if an integer value is provided + will limit the size of the packrat cache; if None is passed, then + the cache size will be unbounded; if 0 is passed, the cache will + be effectively disabled. + + This speedup may break existing programs that use parse actions that + have side-effects. For this reason, packrat parsing is disabled when + you first import pyparsing. To activate the packrat feature, your + program must call the class method C{ParserElement.enablePackrat()}. If + your program uses C{psyco} to "compile as you go", you must call + C{enablePackrat} before calling C{psyco.full()}. If you do not do this, + Python will crash. For best results, call C{enablePackrat()} immediately + after importing pyparsing. + + Example:: + import pyparsing + pyparsing.ParserElement.enablePackrat() + """ + if not ParserElement._packratEnabled: + ParserElement._packratEnabled = True + if cache_size_limit is None: + ParserElement.packrat_cache = ParserElement._UnboundedCache() + else: + ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit) + ParserElement._parse = ParserElement._parseCache + + def parseString( self, instring, parseAll=False ): + """ + Execute the parse expression with the given string. + This is the main interface to the client code, once the complete + expression has been built. + + If you want the grammar to require that the entire input string be + successfully parsed, then set C{parseAll} to True (equivalent to ending + the grammar with C{L{StringEnd()}}). + + Note: C{parseString} implicitly calls C{expandtabs()} on the input string, + in order to report proper column numbers in parse actions. + If the input string contains tabs and + the grammar uses parse actions that use the C{loc} argument to index into the + string being parsed, you can ensure you have a consistent view of the input + string by: + - calling C{parseWithTabs} on your grammar before calling C{parseString} + (see L{I{parseWithTabs}<parseWithTabs>}) + - define your parse action using the full C{(s,loc,toks)} signature, and + reference the input string using the parse action's C{s} argument + - explictly expand the tabs in your input string before calling + C{parseString} + + Example:: + Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] + Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text + """ + ParserElement.resetCache() + if not self.streamlined: + self.streamline() + #~ self.saveAsList = True + for e in self.ignoreExprs: + e.streamline() + if not self.keepTabs: + instring = instring.expandtabs() + try: + loc, tokens = self._parse( instring, 0 ) + if parseAll: + loc = self.preParse( instring, loc ) + se = Empty() + StringEnd() + se._parse( instring, loc ) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + else: + return tokens + + def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): + """ + Scan the input string for expression matches. Each match will return the + matching tokens, start location, and end location. May be called with optional + C{maxMatches} argument, to clip scanning after 'n' matches are found. If + C{overlap} is specified, then overlapping matches will be reported. + + Note that the start and end locations are reported relative to the string + being parsed. See L{I{parseString}<parseString>} for more information on parsing + strings with embedded tabs. + + Example:: + source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" + print(source) + for tokens,start,end in Word(alphas).scanString(source): + print(' '*start + '^'*(end-start)) + print(' '*start + tokens[0]) + + prints:: + + sldjf123lsdjjkf345sldkjf879lkjsfd987 + ^^^^^ + sldjf + ^^^^^^^ + lsdjjkf + ^^^^^^ + sldkjf + ^^^^^^ + lkjsfd + """ + if not self.streamlined: + self.streamline() + for e in self.ignoreExprs: + e.streamline() + + if not self.keepTabs: + instring = _ustr(instring).expandtabs() + instrlen = len(instring) + loc = 0 + preparseFn = self.preParse + parseFn = self._parse + ParserElement.resetCache() + matches = 0 + try: + while loc <= instrlen and matches < maxMatches: + try: + preloc = preparseFn( instring, loc ) + nextLoc,tokens = parseFn( instring, preloc, callPreParse=False ) + except ParseException: + loc = preloc+1 + else: + if nextLoc > loc: + matches += 1 + yield tokens, preloc, nextLoc + if overlap: + nextloc = preparseFn( instring, loc ) + if nextloc > loc: + loc = nextLoc + else: + loc += 1 + else: + loc = nextLoc + else: + loc = preloc+1 + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def transformString( self, instring ): + """ + Extension to C{L{scanString}}, to modify matching text with modified tokens that may + be returned from a parse action. To use C{transformString}, define a grammar and + attach a parse action to it that modifies the returned token list. + Invoking C{transformString()} on a target string will then scan for matches, + and replace the matched text patterns according to the logic in the parse + action. C{transformString()} returns the resulting transformed string. + + Example:: + wd = Word(alphas) + wd.setParseAction(lambda toks: toks[0].title()) + + print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york.")) + Prints:: + Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. + """ + out = [] + lastE = 0 + # force preservation of <TAB>s, to minimize unwanted transformation of string, and to + # keep string locs straight between transformString and scanString + self.keepTabs = True + try: + for t,s,e in self.scanString( instring ): + out.append( instring[lastE:s] ) + if t: + if isinstance(t,ParseResults): + out += t.asList() + elif isinstance(t,list): + out += t + else: + out.append(t) + lastE = e + out.append(instring[lastE:]) + out = [o for o in out if o] + return "".join(map(_ustr,_flatten(out))) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def searchString( self, instring, maxMatches=_MAX_INT ): + """ + Another extension to C{L{scanString}}, simplifying the access to the tokens found + to match the given parse expression. May be called with optional + C{maxMatches} argument, to clip searching after 'n' matches are found. + + Example:: + # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters + cap_word = Word(alphas.upper(), alphas.lower()) + + print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) + prints:: + ['More', 'Iron', 'Lead', 'Gold', 'I'] + """ + try: + return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): + """ + Generator method to split a string using the given expression as a separator. + May be called with optional C{maxsplit} argument, to limit the number of splits; + and the optional C{includeSeparators} argument (default=C{False}), if the separating + matching text should be included in the split results. + + Example:: + punc = oneOf(list(".,;:/-!?")) + print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) + prints:: + ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] + """ + splits = 0 + last = 0 + for t,s,e in self.scanString(instring, maxMatches=maxsplit): + yield instring[last:s] + if includeSeparators: + yield t[0] + last = e + yield instring[last:] + + def __add__(self, other ): + """ + Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement + converts them to L{Literal}s by default. + + Example:: + greet = Word(alphas) + "," + Word(alphas) + "!" + hello = "Hello, World!" + print (hello, "->", greet.parseString(hello)) + Prints:: + Hello, World! -> ['Hello', ',', 'World', '!'] + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return And( [ self, other ] ) + + def __radd__(self, other ): + """ + Implementation of + operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other + self + + def __sub__(self, other): + """ + Implementation of - operator, returns C{L{And}} with error stop + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return And( [ self, And._ErrorStop(), other ] ) + + def __rsub__(self, other ): + """ + Implementation of - operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other - self + + def __mul__(self,other): + """ + Implementation of * operator, allows use of C{expr * 3} in place of + C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer + tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples + may also include C{None} as in: + - C{expr*(n,None)} or C{expr*(n,)} is equivalent + to C{expr*n + L{ZeroOrMore}(expr)} + (read as "at least n instances of C{expr}") + - C{expr*(None,n)} is equivalent to C{expr*(0,n)} + (read as "0 to n instances of C{expr}") + - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} + - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} + + Note that C{expr*(None,n)} does not raise an exception if + more than n exprs exist in the input stream; that is, + C{expr*(None,n)} does not enforce a maximum number of expr + occurrences. If this behavior is desired, then write + C{expr*(None,n) + ~expr} + """ + if isinstance(other,int): + minElements, optElements = other,0 + elif isinstance(other,tuple): + other = (other + (None, None))[:2] + if other[0] is None: + other = (0, other[1]) + if isinstance(other[0],int) and other[1] is None: + if other[0] == 0: + return ZeroOrMore(self) + if other[0] == 1: + return OneOrMore(self) + else: + return self*other[0] + ZeroOrMore(self) + elif isinstance(other[0],int) and isinstance(other[1],int): + minElements, optElements = other + optElements -= minElements + else: + raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) + else: + raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) + + if minElements < 0: + raise ValueError("cannot multiply ParserElement by negative value") + if optElements < 0: + raise ValueError("second tuple value must be greater or equal to first tuple value") + if minElements == optElements == 0: + raise ValueError("cannot multiply ParserElement by 0 or (0,0)") + + if (optElements): + def makeOptionalList(n): + if n>1: + return Optional(self + makeOptionalList(n-1)) + else: + return Optional(self) + if minElements: + if minElements == 1: + ret = self + makeOptionalList(optElements) + else: + ret = And([self]*minElements) + makeOptionalList(optElements) + else: + ret = makeOptionalList(optElements) + else: + if minElements == 1: + ret = self + else: + ret = And([self]*minElements) + return ret + + def __rmul__(self, other): + return self.__mul__(other) + + def __or__(self, other ): + """ + Implementation of | operator - returns C{L{MatchFirst}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return MatchFirst( [ self, other ] ) + + def __ror__(self, other ): + """ + Implementation of | operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other | self + + def __xor__(self, other ): + """ + Implementation of ^ operator - returns C{L{Or}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return Or( [ self, other ] ) + + def __rxor__(self, other ): + """ + Implementation of ^ operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other ^ self + + def __and__(self, other ): + """ + Implementation of & operator - returns C{L{Each}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return Each( [ self, other ] ) + + def __rand__(self, other ): + """ + Implementation of & operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other & self + + def __invert__( self ): + """ + Implementation of ~ operator - returns C{L{NotAny}} + """ + return NotAny( self ) + + def __call__(self, name=None): + """ + Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}. + + If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be + passed as C{True}. + + If C{name} is omitted, same as calling C{L{copy}}. + + Example:: + # these are equivalent + userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") + userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") + """ + if name is not None: + return self.setResultsName(name) + else: + return self.copy() + + def suppress( self ): + """ + Suppresses the output of this C{ParserElement}; useful to keep punctuation from + cluttering up returned output. + """ + return Suppress( self ) + + def leaveWhitespace( self ): + """ + Disables the skipping of whitespace before matching the characters in the + C{ParserElement}'s defined pattern. This is normally only used internally by + the pyparsing module, but may be needed in some whitespace-sensitive grammars. + """ + self.skipWhitespace = False + return self + + def setWhitespaceChars( self, chars ): + """ + Overrides the default whitespace chars + """ + self.skipWhitespace = True + self.whiteChars = chars + self.copyDefaultWhiteChars = False + return self + + def parseWithTabs( self ): + """ + Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string. + Must be called before C{parseString} when the input grammar contains elements that + match C{<TAB>} characters. + """ + self.keepTabs = True + return self + + def ignore( self, other ): + """ + Define expression to be ignored (e.g., comments) while doing pattern + matching; may be called repeatedly, to define multiple comment or other + ignorable patterns. + + Example:: + patt = OneOrMore(Word(alphas)) + patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] + + patt.ignore(cStyleComment) + patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] + """ + if isinstance(other, basestring): + other = Suppress(other) + + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + self.ignoreExprs.append(other) + else: + self.ignoreExprs.append( Suppress( other.copy() ) ) + return self + + def setDebugActions( self, startAction, successAction, exceptionAction ): + """ + Enable display of debugging messages while doing pattern matching. + """ + self.debugActions = (startAction or _defaultStartDebugAction, + successAction or _defaultSuccessDebugAction, + exceptionAction or _defaultExceptionDebugAction) + self.debug = True + return self + + def setDebug( self, flag=True ): + """ + Enable display of debugging messages while doing pattern matching. + Set C{flag} to True to enable, False to disable. + + Example:: + wd = Word(alphas).setName("alphaword") + integer = Word(nums).setName("numword") + term = wd | integer + + # turn on debugging for wd + wd.setDebug() + + OneOrMore(term).parseString("abc 123 xyz 890") + + prints:: + Match alphaword at loc 0(1,1) + Matched alphaword -> ['abc'] + Match alphaword at loc 3(1,4) + Exception raised:Expected alphaword (at char 4), (line:1, col:5) + Match alphaword at loc 7(1,8) + Matched alphaword -> ['xyz'] + Match alphaword at loc 11(1,12) + Exception raised:Expected alphaword (at char 12), (line:1, col:13) + Match alphaword at loc 15(1,16) + Exception raised:Expected alphaword (at char 15), (line:1, col:16) + + The output shown is that produced by the default debug actions - custom debug actions can be + specified using L{setDebugActions}. Prior to attempting + to match the C{wd} expression, the debugging message C{"Match <exprname> at loc <n>(<line>,<col>)"} + is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"} + message is shown. Also note the use of L{setName} to assign a human-readable name to the expression, + which makes debugging and exception messages easier to understand - for instance, the default + name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}. + """ + if flag: + self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) + else: + self.debug = False + return self + + def __str__( self ): + return self.name + + def __repr__( self ): + return _ustr(self) + + def streamline( self ): + self.streamlined = True + self.strRepr = None + return self + + def checkRecursion( self, parseElementList ): + pass + + def validate( self, validateTrace=[] ): + """ + Check defined expressions for valid structure, check for infinite recursive definitions. + """ + self.checkRecursion( [] ) + + def parseFile( self, file_or_filename, parseAll=False ): + """ + Execute the parse expression on the given file or filename. + If a filename is specified (instead of a file object), + the entire file is opened, read, and closed before parsing. + """ + try: + file_contents = file_or_filename.read() + except AttributeError: + with open(file_or_filename, "r") as f: + file_contents = f.read() + try: + return self.parseString(file_contents, parseAll) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def __eq__(self,other): + if isinstance(other, ParserElement): + return self is other or vars(self) == vars(other) + elif isinstance(other, basestring): + return self.matches(other) + else: + return super(ParserElement,self)==other + + def __ne__(self,other): + return not (self == other) + + def __hash__(self): + return hash(id(self)) + + def __req__(self,other): + return self == other + + def __rne__(self,other): + return not (self == other) + + def matches(self, testString, parseAll=True): + """ + Method for quick testing of a parser against a test string. Good for simple + inline microtests of sub expressions while building up larger parser. + + Parameters: + - testString - to test against this expression for a match + - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests + + Example:: + expr = Word(nums) + assert expr.matches("100") + """ + try: + self.parseString(_ustr(testString), parseAll=parseAll) + return True + except ParseBaseException: + return False + + def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False): + """ + Execute the parse expression on a series of test strings, showing each + test, the parsed results or where the parse failed. Quick and easy way to + run a parse expression against a list of sample strings. + + Parameters: + - tests - a list of separate test strings, or a multiline string of test strings + - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests + - comment - (default=C{'#'}) - expression for indicating embedded comments in the test + string; pass None to disable comment filtering + - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline; + if False, only dump nested list + - printResults - (default=C{True}) prints test output to stdout + - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing + + Returns: a (success, results) tuple, where success indicates that all tests succeeded + (or failed if C{failureTests} is True), and the results contain a list of lines of each + test's output + + Example:: + number_expr = pyparsing_common.number.copy() + + result = number_expr.runTests(''' + # unsigned integer + 100 + # negative integer + -100 + # float with scientific notation + 6.02e23 + # integer with scientific notation + 1e-12 + ''') + print("Success" if result[0] else "Failed!") + + result = number_expr.runTests(''' + # stray character + 100Z + # missing leading digit before '.' + -.100 + # too many '.' + 3.14.159 + ''', failureTests=True) + print("Success" if result[0] else "Failed!") + prints:: + # unsigned integer + 100 + [100] + + # negative integer + -100 + [-100] + + # float with scientific notation + 6.02e23 + [6.02e+23] + + # integer with scientific notation + 1e-12 + [1e-12] + + Success + + # stray character + 100Z + ^ + FAIL: Expected end of text (at char 3), (line:1, col:4) + + # missing leading digit before '.' + -.100 + ^ + FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1) + + # too many '.' + 3.14.159 + ^ + FAIL: Expected end of text (at char 4), (line:1, col:5) + + Success + + Each test string must be on a single line. If you want to test a string that spans multiple + lines, create a test like this:: + + expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines") + + (Note that this is a raw string literal, you must include the leading 'r'.) + """ + if isinstance(tests, basestring): + tests = list(map(str.strip, tests.rstrip().splitlines())) + if isinstance(comment, basestring): + comment = Literal(comment) + allResults = [] + comments = [] + success = True + for t in tests: + if comment is not None and comment.matches(t, False) or comments and not t: + comments.append(t) + continue + if not t: + continue + out = ['\n'.join(comments), t] + comments = [] + try: + t = t.replace(r'\n','\n') + result = self.parseString(t, parseAll=parseAll) + out.append(result.dump(full=fullDump)) + success = success and not failureTests + except ParseBaseException as pe: + fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" + if '\n' in t: + out.append(line(pe.loc, t)) + out.append(' '*(col(pe.loc,t)-1) + '^' + fatal) + else: + out.append(' '*pe.loc + '^' + fatal) + out.append("FAIL: " + str(pe)) + success = success and failureTests + result = pe + except Exception as exc: + out.append("FAIL-EXCEPTION: " + str(exc)) + success = success and failureTests + result = exc + + if printResults: + if fullDump: + out.append('') + print('\n'.join(out)) + + allResults.append((t, result)) + + return success, allResults + + +class Token(ParserElement): + """ + Abstract C{ParserElement} subclass, for defining atomic matching patterns. + """ + def __init__( self ): + super(Token,self).__init__( savelist=False ) + + +class Empty(Token): + """ + An empty token, will always match. + """ + def __init__( self ): + super(Empty,self).__init__() + self.name = "Empty" + self.mayReturnEmpty = True + self.mayIndexError = False + + +class NoMatch(Token): + """ + A token that will never match. + """ + def __init__( self ): + super(NoMatch,self).__init__() + self.name = "NoMatch" + self.mayReturnEmpty = True + self.mayIndexError = False + self.errmsg = "Unmatchable token" + + def parseImpl( self, instring, loc, doActions=True ): + raise ParseException(instring, loc, self.errmsg, self) + + +class Literal(Token): + """ + Token to exactly match a specified string. + + Example:: + Literal('blah').parseString('blah') # -> ['blah'] + Literal('blah').parseString('blahfooblah') # -> ['blah'] + Literal('blah').parseString('bla') # -> Exception: Expected "blah" + + For case-insensitive matching, use L{CaselessLiteral}. + + For keyword matching (force word break before and after the matched string), + use L{Keyword} or L{CaselessKeyword}. + """ + def __init__( self, matchString ): + super(Literal,self).__init__() + self.match = matchString + self.matchLen = len(matchString) + try: + self.firstMatchChar = matchString[0] + except IndexError: + warnings.warn("null string passed to Literal; use Empty() instead", + SyntaxWarning, stacklevel=2) + self.__class__ = Empty + self.name = '"%s"' % _ustr(self.match) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = False + self.mayIndexError = False + + # Performance tuning: this routine gets called a *lot* + # if this is a single character match string and the first character matches, + # short-circuit as quickly as possible, and avoid calling startswith + #~ @profile + def parseImpl( self, instring, loc, doActions=True ): + if (instring[loc] == self.firstMatchChar and + (self.matchLen==1 or instring.startswith(self.match,loc)) ): + return loc+self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) +_L = Literal +ParserElement._literalStringClass = Literal + +class Keyword(Token): + """ + Token to exactly match a specified string as a keyword, that is, it must be + immediately followed by a non-keyword character. Compare with C{L{Literal}}: + - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}. + - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} + Accepts two optional constructor arguments in addition to the keyword string: + - C{identChars} is a string of characters that would be valid identifier characters, + defaulting to all alphanumerics + "_" and "$" + - C{caseless} allows case-insensitive matching, default is C{False}. + + Example:: + Keyword("start").parseString("start") # -> ['start'] + Keyword("start").parseString("starting") # -> Exception + + For case-insensitive matching, use L{CaselessKeyword}. + """ + DEFAULT_KEYWORD_CHARS = alphanums+"_$" + + def __init__( self, matchString, identChars=None, caseless=False ): + super(Keyword,self).__init__() + if identChars is None: + identChars = Keyword.DEFAULT_KEYWORD_CHARS + self.match = matchString + self.matchLen = len(matchString) + try: + self.firstMatchChar = matchString[0] + except IndexError: + warnings.warn("null string passed to Keyword; use Empty() instead", + SyntaxWarning, stacklevel=2) + self.name = '"%s"' % self.match + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = False + self.mayIndexError = False + self.caseless = caseless + if caseless: + self.caselessmatch = matchString.upper() + identChars = identChars.upper() + self.identChars = set(identChars) + + def parseImpl( self, instring, loc, doActions=True ): + if self.caseless: + if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and + (loc == 0 or instring[loc-1].upper() not in self.identChars) ): + return loc+self.matchLen, self.match + else: + if (instring[loc] == self.firstMatchChar and + (self.matchLen==1 or instring.startswith(self.match,loc)) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and + (loc == 0 or instring[loc-1] not in self.identChars) ): + return loc+self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) + + def copy(self): + c = super(Keyword,self).copy() + c.identChars = Keyword.DEFAULT_KEYWORD_CHARS + return c + + @staticmethod + def setDefaultKeywordChars( chars ): + """Overrides the default Keyword chars + """ + Keyword.DEFAULT_KEYWORD_CHARS = chars + +class CaselessLiteral(Literal): + """ + Token to match a specified string, ignoring case of letters. + Note: the matched results will always be in the case of the given + match string, NOT the case of the input text. + + Example:: + OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'] + + (Contrast with example for L{CaselessKeyword}.) + """ + def __init__( self, matchString ): + super(CaselessLiteral,self).__init__( matchString.upper() ) + # Preserve the defining literal. + self.returnString = matchString + self.name = "'%s'" % self.returnString + self.errmsg = "Expected " + self.name + + def parseImpl( self, instring, loc, doActions=True ): + if instring[ loc:loc+self.matchLen ].upper() == self.match: + return loc+self.matchLen, self.returnString + raise ParseException(instring, loc, self.errmsg, self) + +class CaselessKeyword(Keyword): + """ + Caseless version of L{Keyword}. + + Example:: + OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'] + + (Contrast with example for L{CaselessLiteral}.) + """ + def __init__( self, matchString, identChars=None ): + super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) + + def parseImpl( self, instring, loc, doActions=True ): + if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ): + return loc+self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) + +class CloseMatch(Token): + """ + A variation on L{Literal} which matches "close" matches, that is, + strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters: + - C{match_string} - string to be matched + - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match + + The results from a successful parse will contain the matched text from the input string and the following named results: + - C{mismatches} - a list of the positions within the match_string where mismatches were found + - C{original} - the original match_string used to compare against the input string + + If C{mismatches} is an empty list, then the match was an exact match. + + Example:: + patt = CloseMatch("ATCATCGAATGGA") + patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) + patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) + + # exact match + patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']}) + + # close match allowing up to 2 mismatches + patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2) + patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) + """ + def __init__(self, match_string, maxMismatches=1): + super(CloseMatch,self).__init__() + self.name = match_string + self.match_string = match_string + self.maxMismatches = maxMismatches + self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches) + self.mayIndexError = False + self.mayReturnEmpty = False + + def parseImpl( self, instring, loc, doActions=True ): + start = loc + instrlen = len(instring) + maxloc = start + len(self.match_string) + + if maxloc <= instrlen: + match_string = self.match_string + match_stringloc = 0 + mismatches = [] + maxMismatches = self.maxMismatches + + for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)): + src,mat = s_m + if src != mat: + mismatches.append(match_stringloc) + if len(mismatches) > maxMismatches: + break + else: + loc = match_stringloc + 1 + results = ParseResults([instring[start:loc]]) + results['original'] = self.match_string + results['mismatches'] = mismatches + return loc, results + + raise ParseException(instring, loc, self.errmsg, self) + + +class Word(Token): + """ + Token for matching words composed of allowed character sets. + Defined with string containing all allowed initial characters, + an optional string containing allowed body characters (if omitted, + defaults to the initial character set), and an optional minimum, + maximum, and/or exact length. The default value for C{min} is 1 (a + minimum value < 1 is not valid); the default values for C{max} and C{exact} + are 0, meaning no maximum or exact length restriction. An optional + C{excludeChars} parameter can list characters that might be found in + the input C{bodyChars} string; useful to define a word of all printables + except for one or two characters, for instance. + + L{srange} is useful for defining custom character set strings for defining + C{Word} expressions, using range notation from regular expression character sets. + + A common mistake is to use C{Word} to match a specific literal string, as in + C{Word("Address")}. Remember that C{Word} uses the string argument to define + I{sets} of matchable characters. This expression would match "Add", "AAA", + "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'. + To match an exact literal string, use L{Literal} or L{Keyword}. + + pyparsing includes helper strings for building Words: + - L{alphas} + - L{nums} + - L{alphanums} + - L{hexnums} + - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.) + - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.) + - L{printables} (any non-whitespace character) + + Example:: + # a word composed of digits + integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) + + # a word with a leading capital, and zero or more lowercase + capital_word = Word(alphas.upper(), alphas.lower()) + + # hostnames are alphanumeric, with leading alpha, and '-' + hostname = Word(alphas, alphanums+'-') + + # roman numeral (not a strict parser, accepts invalid mix of characters) + roman = Word("IVXLCDM") + + # any string of non-whitespace characters, except for ',' + csv_value = Word(printables, excludeChars=",") + """ + def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ): + super(Word,self).__init__() + if excludeChars: + initChars = ''.join(c for c in initChars if c not in excludeChars) + if bodyChars: + bodyChars = ''.join(c for c in bodyChars if c not in excludeChars) + self.initCharsOrig = initChars + self.initChars = set(initChars) + if bodyChars : + self.bodyCharsOrig = bodyChars + self.bodyChars = set(bodyChars) + else: + self.bodyCharsOrig = initChars + self.bodyChars = set(initChars) + + self.maxSpecified = max > 0 + + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted") + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.asKeyword = asKeyword + + if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0): + if self.bodyCharsOrig == self.initCharsOrig: + self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) + elif len(self.initCharsOrig) == 1: + self.reString = "%s[%s]*" % \ + (re.escape(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) + else: + self.reString = "[%s][%s]*" % \ + (_escapeRegexRangeChars(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) + if self.asKeyword: + self.reString = r"\b"+self.reString+r"\b" + try: + self.re = re.compile( self.reString ) + except Exception: + self.re = None + + def parseImpl( self, instring, loc, doActions=True ): + if self.re: + result = self.re.match(instring,loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + return loc, result.group() + + if not(instring[ loc ] in self.initChars): + raise ParseException(instring, loc, self.errmsg, self) + + start = loc + loc += 1 + instrlen = len(instring) + bodychars = self.bodyChars + maxloc = start + self.maxLen + maxloc = min( maxloc, instrlen ) + while loc < maxloc and instring[loc] in bodychars: + loc += 1 + + throwException = False + if loc - start < self.minLen: + throwException = True + if self.maxSpecified and loc < instrlen and instring[loc] in bodychars: + throwException = True + if self.asKeyword: + if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars): + throwException = True + + if throwException: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + def __str__( self ): + try: + return super(Word,self).__str__() + except Exception: + pass + + + if self.strRepr is None: + + def charsAsStr(s): + if len(s)>4: + return s[:4]+"..." + else: + return s + + if ( self.initCharsOrig != self.bodyCharsOrig ): + self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) ) + else: + self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) + + return self.strRepr + + +class Regex(Token): + """ + Token for matching strings that match a given regular expression. + Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. + If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved as + named parse results. + + Example:: + realnum = Regex(r"[+-]?\d+\.\d*") + date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)') + # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression + roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") + """ + compiledREtype = type(re.compile("[A-Z]")) + def __init__( self, pattern, flags=0): + """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.""" + super(Regex,self).__init__() + + if isinstance(pattern, basestring): + if not pattern: + warnings.warn("null string passed to Regex; use Empty() instead", + SyntaxWarning, stacklevel=2) + + self.pattern = pattern + self.flags = flags + + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + except sre_constants.error: + warnings.warn("invalid pattern (%s) passed to Regex" % pattern, + SyntaxWarning, stacklevel=2) + raise + + elif isinstance(pattern, Regex.compiledREtype): + self.re = pattern + self.pattern = \ + self.reString = str(pattern) + self.flags = flags + + else: + raise ValueError("Regex may only be constructed with a string or a compiled RE object") + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + result = self.re.match(instring,loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + d = result.groupdict() + ret = ParseResults(result.group()) + if d: + for k in d: + ret[k] = d[k] + return loc,ret + + def __str__( self ): + try: + return super(Regex,self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "Re:(%s)" % repr(self.pattern) + + return self.strRepr + + +class QuotedString(Token): + r""" + Token for matching strings that are delimited by quoting characters. + + Defined with the following parameters: + - quoteChar - string of one or more characters defining the quote delimiting string + - escChar - character to escape quotes, typically backslash (default=C{None}) + - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None}) + - multiline - boolean indicating whether quotes can span multiple lines (default=C{False}) + - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True}) + - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar) + - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True}) + + Example:: + qs = QuotedString('"') + print(qs.searchString('lsjdf "This is the quote" sldjf')) + complex_qs = QuotedString('{{', endQuoteChar='}}') + print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) + sql_qs = QuotedString('"', escQuote='""') + print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) + prints:: + [['This is the quote']] + [['This is the "quote"']] + [['This is the quote with "embedded" quotes']] + """ + def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): + super(QuotedString,self).__init__() + + # remove white space from quote chars - wont work anyway + quoteChar = quoteChar.strip() + if not quoteChar: + warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + raise SyntaxError() + + if endQuoteChar is None: + endQuoteChar = quoteChar + else: + endQuoteChar = endQuoteChar.strip() + if not endQuoteChar: + warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + raise SyntaxError() + + self.quoteChar = quoteChar + self.quoteCharLen = len(quoteChar) + self.firstQuoteChar = quoteChar[0] + self.endQuoteChar = endQuoteChar + self.endQuoteCharLen = len(endQuoteChar) + self.escChar = escChar + self.escQuote = escQuote + self.unquoteResults = unquoteResults + self.convertWhitespaceEscapes = convertWhitespaceEscapes + + if multiline: + self.flags = re.MULTILINE | re.DOTALL + self.pattern = r'%s(?:[^%s%s]' % \ + ( re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + else: + self.flags = 0 + self.pattern = r'%s(?:[^%s\n\r%s]' % \ + ( re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + if len(self.endQuoteChar) > 1: + self.pattern += ( + '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), + _escapeRegexRangeChars(self.endQuoteChar[i])) + for i in range(len(self.endQuoteChar)-1,0,-1)) + ')' + ) + if escQuote: + self.pattern += (r'|(?:%s)' % re.escape(escQuote)) + if escChar: + self.pattern += (r'|(?:%s.)' % re.escape(escChar)) + self.escCharReplacePattern = re.escape(self.escChar)+"(.)" + self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) + + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + except sre_constants.error: + warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, + SyntaxWarning, stacklevel=2) + raise + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + ret = result.group() + + if self.unquoteResults: + + # strip off quotes + ret = ret[self.quoteCharLen:-self.endQuoteCharLen] + + if isinstance(ret,basestring): + # replace escaped whitespace + if '\\' in ret and self.convertWhitespaceEscapes: + ws_map = { + r'\t' : '\t', + r'\n' : '\n', + r'\f' : '\f', + r'\r' : '\r', + } + for wslit,wschar in ws_map.items(): + ret = ret.replace(wslit, wschar) + + # replace escaped characters + if self.escChar: + ret = re.sub(self.escCharReplacePattern,"\g<1>",ret) + + # replace escaped quotes + if self.escQuote: + ret = ret.replace(self.escQuote, self.endQuoteChar) + + return loc, ret + + def __str__( self ): + try: + return super(QuotedString,self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) + + return self.strRepr + + +class CharsNotIn(Token): + """ + Token for matching words composed of characters I{not} in a given set (will + include whitespace in matched characters if not listed in the provided exclusion set - see example). + Defined with string containing all disallowed characters, and an optional + minimum, maximum, and/or exact length. The default value for C{min} is 1 (a + minimum value < 1 is not valid); the default values for C{max} and C{exact} + are 0, meaning no maximum or exact length restriction. + + Example:: + # define a comma-separated-value as anything that is not a ',' + csv_value = CharsNotIn(',') + print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) + prints:: + ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] + """ + def __init__( self, notChars, min=1, max=0, exact=0 ): + super(CharsNotIn,self).__init__() + self.skipWhitespace = False + self.notChars = notChars + + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted") + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = ( self.minLen == 0 ) + self.mayIndexError = False + + def parseImpl( self, instring, loc, doActions=True ): + if instring[loc] in self.notChars: + raise ParseException(instring, loc, self.errmsg, self) + + start = loc + loc += 1 + notchars = self.notChars + maxlen = min( start+self.maxLen, len(instring) ) + while loc < maxlen and \ + (instring[loc] not in notchars): + loc += 1 + + if loc - start < self.minLen: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + def __str__( self ): + try: + return super(CharsNotIn, self).__str__() + except Exception: + pass + + if self.strRepr is None: + if len(self.notChars) > 4: + self.strRepr = "!W:(%s...)" % self.notChars[:4] + else: + self.strRepr = "!W:(%s)" % self.notChars + + return self.strRepr + +class White(Token): + """ + Special matching class for matching whitespace. Normally, whitespace is ignored + by pyparsing grammars. This class is included when some whitespace structures + are significant. Define with a string containing the whitespace characters to be + matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, + as defined for the C{L{Word}} class. + """ + whiteStrs = { + " " : "<SPC>", + "\t": "<TAB>", + "\n": "<LF>", + "\r": "<CR>", + "\f": "<FF>", + } + def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): + super(White,self).__init__() + self.matchWhite = ws + self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) ) + #~ self.leaveWhitespace() + self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) + self.mayReturnEmpty = True + self.errmsg = "Expected " + self.name + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + def parseImpl( self, instring, loc, doActions=True ): + if not(instring[ loc ] in self.matchWhite): + raise ParseException(instring, loc, self.errmsg, self) + start = loc + loc += 1 + maxloc = start + self.maxLen + maxloc = min( maxloc, len(instring) ) + while loc < maxloc and instring[loc] in self.matchWhite: + loc += 1 + + if loc - start < self.minLen: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + +class _PositionToken(Token): + def __init__( self ): + super(_PositionToken,self).__init__() + self.name=self.__class__.__name__ + self.mayReturnEmpty = True + self.mayIndexError = False + +class GoToColumn(_PositionToken): + """ + Token to advance to a specific column of input text; useful for tabular report scraping. + """ + def __init__( self, colno ): + super(GoToColumn,self).__init__() + self.col = colno + + def preParse( self, instring, loc ): + if col(loc,instring) != self.col: + instrlen = len(instring) + if self.ignoreExprs: + loc = self._skipIgnorables( instring, loc ) + while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col : + loc += 1 + return loc + + def parseImpl( self, instring, loc, doActions=True ): + thiscol = col( loc, instring ) + if thiscol > self.col: + raise ParseException( instring, loc, "Text not in expected column", self ) + newloc = loc + self.col - thiscol + ret = instring[ loc: newloc ] + return newloc, ret + + +class LineStart(_PositionToken): + """ + Matches if current position is at the beginning of a line within the parse string + + Example:: + + test = '''\ + AAA this line + AAA and this line + AAA but not this one + B AAA and definitely not this one + ''' + + for t in (LineStart() + 'AAA' + restOfLine).searchString(test): + print(t) + + Prints:: + ['AAA', ' this line'] + ['AAA', ' and this line'] + + """ + def __init__( self ): + super(LineStart,self).__init__() + self.errmsg = "Expected start of line" + + def parseImpl( self, instring, loc, doActions=True ): + if col(loc, instring) == 1: + return loc, [] + raise ParseException(instring, loc, self.errmsg, self) + +class LineEnd(_PositionToken): + """ + Matches if current position is at the end of a line within the parse string + """ + def __init__( self ): + super(LineEnd,self).__init__() + self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) + self.errmsg = "Expected end of line" + + def parseImpl( self, instring, loc, doActions=True ): + if loc<len(instring): + if instring[loc] == "\n": + return loc+1, "\n" + else: + raise ParseException(instring, loc, self.errmsg, self) + elif loc == len(instring): + return loc+1, [] + else: + raise ParseException(instring, loc, self.errmsg, self) + +class StringStart(_PositionToken): + """ + Matches if current position is at the beginning of the parse string + """ + def __init__( self ): + super(StringStart,self).__init__() + self.errmsg = "Expected start of text" + + def parseImpl( self, instring, loc, doActions=True ): + if loc != 0: + # see if entire string up to here is just whitespace and ignoreables + if loc != self.preParse( instring, 0 ): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + +class StringEnd(_PositionToken): + """ + Matches if current position is at the end of the parse string + """ + def __init__( self ): + super(StringEnd,self).__init__() + self.errmsg = "Expected end of text" + + def parseImpl( self, instring, loc, doActions=True ): + if loc < len(instring): + raise ParseException(instring, loc, self.errmsg, self) + elif loc == len(instring): + return loc+1, [] + elif loc > len(instring): + return loc, [] + else: + raise ParseException(instring, loc, self.errmsg, self) + +class WordStart(_PositionToken): + """ + Matches if the current position is at the beginning of a Word, and + is not preceded by any character in a given set of C{wordChars} + (default=C{printables}). To emulate the C{\b} behavior of regular expressions, + use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of + the string being parsed, or at the beginning of a line. + """ + def __init__(self, wordChars = printables): + super(WordStart,self).__init__() + self.wordChars = set(wordChars) + self.errmsg = "Not at the start of a word" + + def parseImpl(self, instring, loc, doActions=True ): + if loc != 0: + if (instring[loc-1] in self.wordChars or + instring[loc] not in self.wordChars): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + +class WordEnd(_PositionToken): + """ + Matches if the current position is at the end of a Word, and + is not followed by any character in a given set of C{wordChars} + (default=C{printables}). To emulate the C{\b} behavior of regular expressions, + use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of + the string being parsed, or at the end of a line. + """ + def __init__(self, wordChars = printables): + super(WordEnd,self).__init__() + self.wordChars = set(wordChars) + self.skipWhitespace = False + self.errmsg = "Not at the end of a word" + + def parseImpl(self, instring, loc, doActions=True ): + instrlen = len(instring) + if instrlen>0 and loc<instrlen: + if (instring[loc] in self.wordChars or + instring[loc-1] not in self.wordChars): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + + +class ParseExpression(ParserElement): + """ + Abstract subclass of ParserElement, for combining and post-processing parsed tokens. + """ + def __init__( self, exprs, savelist = False ): + super(ParseExpression,self).__init__(savelist) + if isinstance( exprs, _generatorType ): + exprs = list(exprs) + + if isinstance( exprs, basestring ): + self.exprs = [ ParserElement._literalStringClass( exprs ) ] + elif isinstance( exprs, collections.Iterable ): + exprs = list(exprs) + # if sequence of strings provided, wrap with Literal + if all(isinstance(expr, basestring) for expr in exprs): + exprs = map(ParserElement._literalStringClass, exprs) + self.exprs = list(exprs) + else: + try: + self.exprs = list( exprs ) + except TypeError: + self.exprs = [ exprs ] + self.callPreparse = False + + def __getitem__( self, i ): + return self.exprs[i] + + def append( self, other ): + self.exprs.append( other ) + self.strRepr = None + return self + + def leaveWhitespace( self ): + """Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on + all contained expressions.""" + self.skipWhitespace = False + self.exprs = [ e.copy() for e in self.exprs ] + for e in self.exprs: + e.leaveWhitespace() + return self + + def ignore( self, other ): + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + super( ParseExpression, self).ignore( other ) + for e in self.exprs: + e.ignore( self.ignoreExprs[-1] ) + else: + super( ParseExpression, self).ignore( other ) + for e in self.exprs: + e.ignore( self.ignoreExprs[-1] ) + return self + + def __str__( self ): + try: + return super(ParseExpression,self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) ) + return self.strRepr + + def streamline( self ): + super(ParseExpression,self).streamline() + + for e in self.exprs: + e.streamline() + + # collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d ) + # but only if there are no parse actions or resultsNames on the nested And's + # (likewise for Or's and MatchFirst's) + if ( len(self.exprs) == 2 ): + other = self.exprs[0] + if ( isinstance( other, self.__class__ ) and + not(other.parseAction) and + other.resultsName is None and + not other.debug ): + self.exprs = other.exprs[:] + [ self.exprs[1] ] + self.strRepr = None + self.mayReturnEmpty |= other.mayReturnEmpty + self.mayIndexError |= other.mayIndexError + + other = self.exprs[-1] + if ( isinstance( other, self.__class__ ) and + not(other.parseAction) and + other.resultsName is None and + not other.debug ): + self.exprs = self.exprs[:-1] + other.exprs[:] + self.strRepr = None + self.mayReturnEmpty |= other.mayReturnEmpty + self.mayIndexError |= other.mayIndexError + + self.errmsg = "Expected " + _ustr(self) + + return self + + def setResultsName( self, name, listAllMatches=False ): + ret = super(ParseExpression,self).setResultsName(name,listAllMatches) + return ret + + def validate( self, validateTrace=[] ): + tmp = validateTrace[:]+[self] + for e in self.exprs: + e.validate(tmp) + self.checkRecursion( [] ) + + def copy(self): + ret = super(ParseExpression,self).copy() + ret.exprs = [e.copy() for e in self.exprs] + return ret + +class And(ParseExpression): + """ + Requires all given C{ParseExpression}s to be found in the given order. + Expressions may be separated by whitespace. + May be constructed using the C{'+'} operator. + May also be constructed using the C{'-'} operator, which will suppress backtracking. + + Example:: + integer = Word(nums) + name_expr = OneOrMore(Word(alphas)) + + expr = And([integer("id"),name_expr("name"),integer("age")]) + # more easily written as: + expr = integer("id") + name_expr("name") + integer("age") + """ + + class _ErrorStop(Empty): + def __init__(self, *args, **kwargs): + super(And._ErrorStop,self).__init__(*args, **kwargs) + self.name = '-' + self.leaveWhitespace() + + def __init__( self, exprs, savelist = True ): + super(And,self).__init__(exprs, savelist) + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + self.setWhitespaceChars( self.exprs[0].whiteChars ) + self.skipWhitespace = self.exprs[0].skipWhitespace + self.callPreparse = True + + def parseImpl( self, instring, loc, doActions=True ): + # pass False as last arg to _parse for first element, since we already + # pre-parsed the string as part of our And pre-parsing + loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False ) + errorStop = False + for e in self.exprs[1:]: + if isinstance(e, And._ErrorStop): + errorStop = True + continue + if errorStop: + try: + loc, exprtokens = e._parse( instring, loc, doActions ) + except ParseSyntaxException: + raise + except ParseBaseException as pe: + pe.__traceback__ = None + raise ParseSyntaxException._from_exception(pe) + except IndexError: + raise ParseSyntaxException(instring, len(instring), self.errmsg, self) + else: + loc, exprtokens = e._parse( instring, loc, doActions ) + if exprtokens or exprtokens.haskeys(): + resultlist += exprtokens + return loc, resultlist + + def __iadd__(self, other ): + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + return self.append( other ) #And( [ self, other ] ) + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + if not e.mayReturnEmpty: + break + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + +class Or(ParseExpression): + """ + Requires that at least one C{ParseExpression} is found. + If two expressions match, the expression that matches the longest string will be used. + May be constructed using the C{'^'} operator. + + Example:: + # construct Or using '^' operator + + number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums)) + print(number.searchString("123 3.1416 789")) + prints:: + [['123'], ['3.1416'], ['789']] + """ + def __init__( self, exprs, savelist = False ): + super(Or,self).__init__(exprs, savelist) + if self.exprs: + self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) + else: + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + maxExcLoc = -1 + maxException = None + matches = [] + for e in self.exprs: + try: + loc2 = e.tryParse( instring, loc ) + except ParseException as err: + err.__traceback__ = None + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring,len(instring),e.errmsg,self) + maxExcLoc = len(instring) + else: + # save match among all matches, to retry longest to shortest + matches.append((loc2, e)) + + if matches: + matches.sort(key=lambda x: -x[0]) + for _,e in matches: + try: + return e._parse( instring, loc, doActions ) + except ParseException as err: + err.__traceback__ = None + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + + if maxException is not None: + maxException.msg = self.errmsg + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + + def __ixor__(self, other ): + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + return self.append( other ) #Or( [ self, other ] ) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class MatchFirst(ParseExpression): + """ + Requires that at least one C{ParseExpression} is found. + If two expressions match, the first one listed is the one that will match. + May be constructed using the C{'|'} operator. + + Example:: + # construct MatchFirst using '|' operator + + # watch the order of expressions to match + number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) + print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] + + # put more selective expression first + number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) + print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] + """ + def __init__( self, exprs, savelist = False ): + super(MatchFirst,self).__init__(exprs, savelist) + if self.exprs: + self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) + else: + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + maxExcLoc = -1 + maxException = None + for e in self.exprs: + try: + ret = e._parse( instring, loc, doActions ) + return ret + except ParseException as err: + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring,len(instring),e.errmsg,self) + maxExcLoc = len(instring) + + # only got here if no expression matched, raise exception for match that made it the furthest + else: + if maxException is not None: + maxException.msg = self.errmsg + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + def __ior__(self, other ): + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + return self.append( other ) #MatchFirst( [ self, other ] ) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class Each(ParseExpression): + """ + Requires all given C{ParseExpression}s to be found, but in any order. + Expressions may be separated by whitespace. + May be constructed using the C{'&'} operator. + + Example:: + color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") + shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") + integer = Word(nums) + shape_attr = "shape:" + shape_type("shape") + posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") + color_attr = "color:" + color("color") + size_attr = "size:" + integer("size") + + # use Each (using operator '&') to accept attributes in any order + # (shape and posn are required, color and size are optional) + shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) + + shape_spec.runTests(''' + shape: SQUARE color: BLACK posn: 100, 120 + shape: CIRCLE size: 50 color: BLUE posn: 50,80 + color:GREEN size:20 shape:TRIANGLE posn:20,40 + ''' + ) + prints:: + shape: SQUARE color: BLACK posn: 100, 120 + ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] + - color: BLACK + - posn: ['100', ',', '120'] + - x: 100 + - y: 120 + - shape: SQUARE + + + shape: CIRCLE size: 50 color: BLUE posn: 50,80 + ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] + - color: BLUE + - posn: ['50', ',', '80'] + - x: 50 + - y: 80 + - shape: CIRCLE + - size: 50 + + + color: GREEN size: 20 shape: TRIANGLE posn: 20,40 + ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] + - color: GREEN + - posn: ['20', ',', '40'] + - x: 20 + - y: 40 + - shape: TRIANGLE + - size: 20 + """ + def __init__( self, exprs, savelist = True ): + super(Each,self).__init__(exprs, savelist) + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + self.skipWhitespace = True + self.initExprGroups = True + + def parseImpl( self, instring, loc, doActions=True ): + if self.initExprGroups: + self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional)) + opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ] + opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)] + self.optionals = opt1 + opt2 + self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ] + self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ] + self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ] + self.required += self.multirequired + self.initExprGroups = False + tmpLoc = loc + tmpReqd = self.required[:] + tmpOpt = self.optionals[:] + matchOrder = [] + + keepMatching = True + while keepMatching: + tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired + failed = [] + for e in tmpExprs: + try: + tmpLoc = e.tryParse( instring, tmpLoc ) + except ParseException: + failed.append(e) + else: + matchOrder.append(self.opt1map.get(id(e),e)) + if e in tmpReqd: + tmpReqd.remove(e) + elif e in tmpOpt: + tmpOpt.remove(e) + if len(failed) == len(tmpExprs): + keepMatching = False + + if tmpReqd: + missing = ", ".join(_ustr(e) for e in tmpReqd) + raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing ) + + # add any unmatched Optionals, in case they have default values defined + matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt] + + resultlist = [] + for e in matchOrder: + loc,results = e._parse(instring,loc,doActions) + resultlist.append(results) + + finalResults = sum(resultlist, ParseResults([])) + return loc, finalResults + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class ParseElementEnhance(ParserElement): + """ + Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens. + """ + def __init__( self, expr, savelist=False ): + super(ParseElementEnhance,self).__init__(savelist) + if isinstance( expr, basestring ): + if issubclass(ParserElement._literalStringClass, Token): + expr = ParserElement._literalStringClass(expr) + else: + expr = ParserElement._literalStringClass(Literal(expr)) + self.expr = expr + self.strRepr = None + if expr is not None: + self.mayIndexError = expr.mayIndexError + self.mayReturnEmpty = expr.mayReturnEmpty + self.setWhitespaceChars( expr.whiteChars ) + self.skipWhitespace = expr.skipWhitespace + self.saveAsList = expr.saveAsList + self.callPreparse = expr.callPreparse + self.ignoreExprs.extend(expr.ignoreExprs) + + def parseImpl( self, instring, loc, doActions=True ): + if self.expr is not None: + return self.expr._parse( instring, loc, doActions, callPreParse=False ) + else: + raise ParseException("",loc,self.errmsg,self) + + def leaveWhitespace( self ): + self.skipWhitespace = False + self.expr = self.expr.copy() + if self.expr is not None: + self.expr.leaveWhitespace() + return self + + def ignore( self, other ): + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + super( ParseElementEnhance, self).ignore( other ) + if self.expr is not None: + self.expr.ignore( self.ignoreExprs[-1] ) + else: + super( ParseElementEnhance, self).ignore( other ) + if self.expr is not None: + self.expr.ignore( self.ignoreExprs[-1] ) + return self + + def streamline( self ): + super(ParseElementEnhance,self).streamline() + if self.expr is not None: + self.expr.streamline() + return self + + def checkRecursion( self, parseElementList ): + if self in parseElementList: + raise RecursiveGrammarException( parseElementList+[self] ) + subRecCheckList = parseElementList[:] + [ self ] + if self.expr is not None: + self.expr.checkRecursion( subRecCheckList ) + + def validate( self, validateTrace=[] ): + tmp = validateTrace[:]+[self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion( [] ) + + def __str__( self ): + try: + return super(ParseElementEnhance,self).__str__() + except Exception: + pass + + if self.strRepr is None and self.expr is not None: + self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) + return self.strRepr + + +class FollowedBy(ParseElementEnhance): + """ + Lookahead matching of the given parse expression. C{FollowedBy} + does I{not} advance the parsing position within the input string, it only + verifies that the specified parse expression matches at the current + position. C{FollowedBy} always returns a null token list. + + Example:: + # use FollowedBy to match a label only if it is followed by a ':' + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + + OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() + prints:: + [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] + """ + def __init__( self, expr ): + super(FollowedBy,self).__init__(expr) + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + self.expr.tryParse( instring, loc ) + return loc, [] + + +class NotAny(ParseElementEnhance): + """ + Lookahead to disallow matching with the given parse expression. C{NotAny} + does I{not} advance the parsing position within the input string, it only + verifies that the specified parse expression does I{not} match at the current + position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny} + always returns a null token list. May be constructed using the '~' operator. + + Example:: + + """ + def __init__( self, expr ): + super(NotAny,self).__init__(expr) + #~ self.leaveWhitespace() + self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs + self.mayReturnEmpty = True + self.errmsg = "Found unwanted token, "+_ustr(self.expr) + + def parseImpl( self, instring, loc, doActions=True ): + if self.expr.canParseNext(instring, loc): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "~{" + _ustr(self.expr) + "}" + + return self.strRepr + +class _MultipleMatch(ParseElementEnhance): + def __init__( self, expr, stopOn=None): + super(_MultipleMatch, self).__init__(expr) + self.saveAsList = True + ender = stopOn + if isinstance(ender, basestring): + ender = ParserElement._literalStringClass(ender) + self.not_ender = ~ender if ender is not None else None + + def parseImpl( self, instring, loc, doActions=True ): + self_expr_parse = self.expr._parse + self_skip_ignorables = self._skipIgnorables + check_ender = self.not_ender is not None + if check_ender: + try_not_ender = self.not_ender.tryParse + + # must be at least one (but first see if we are the stopOn sentinel; + # if so, fail) + if check_ender: + try_not_ender(instring, loc) + loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False ) + try: + hasIgnoreExprs = (not not self.ignoreExprs) + while 1: + if check_ender: + try_not_ender(instring, loc) + if hasIgnoreExprs: + preloc = self_skip_ignorables( instring, loc ) + else: + preloc = loc + loc, tmptokens = self_expr_parse( instring, preloc, doActions ) + if tmptokens or tmptokens.haskeys(): + tokens += tmptokens + except (ParseException,IndexError): + pass + + return loc, tokens + +class OneOrMore(_MultipleMatch): + """ + Repetition of one or more of the given expression. + + Parameters: + - expr - expression that must match one or more times + - stopOn - (default=C{None}) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) + + Example:: + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) + + text = "shape: SQUARE posn: upper left color: BLACK" + OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] + + # use stopOn attribute for OneOrMore to avoid reading label string as part of the data + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] + + # could also be written as + (attr_expr * (1,)).parseString(text).pprint() + """ + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + _ustr(self.expr) + "}..." + + return self.strRepr + +class ZeroOrMore(_MultipleMatch): + """ + Optional repetition of zero or more of the given expression. + + Parameters: + - expr - expression that must match zero or more times + - stopOn - (default=C{None}) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) + + Example: similar to L{OneOrMore} + """ + def __init__( self, expr, stopOn=None): + super(ZeroOrMore,self).__init__(expr, stopOn=stopOn) + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + try: + return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) + except (ParseException,IndexError): + return loc, [] + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]..." + + return self.strRepr + +class _NullToken(object): + def __bool__(self): + return False + __nonzero__ = __bool__ + def __str__(self): + return "" + +_optionalNotMatched = _NullToken() +class Optional(ParseElementEnhance): + """ + Optional matching of the given expression. + + Parameters: + - expr - expression that must match zero or more times + - default (optional) - value to be returned if the optional expression is not found. + + Example:: + # US postal code can be a 5-digit zip, plus optional 4-digit qualifier + zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) + zip.runTests(''' + # traditional ZIP code + 12345 + + # ZIP+4 form + 12101-0001 + + # invalid ZIP + 98765- + ''') + prints:: + # traditional ZIP code + 12345 + ['12345'] + + # ZIP+4 form + 12101-0001 + ['12101-0001'] + + # invalid ZIP + 98765- + ^ + FAIL: Expected end of text (at char 5), (line:1, col:6) + """ + def __init__( self, expr, default=_optionalNotMatched ): + super(Optional,self).__init__( expr, savelist=False ) + self.saveAsList = self.expr.saveAsList + self.defaultValue = default + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + try: + loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) + except (ParseException,IndexError): + if self.defaultValue is not _optionalNotMatched: + if self.expr.resultsName: + tokens = ParseResults([ self.defaultValue ]) + tokens[self.expr.resultsName] = self.defaultValue + else: + tokens = [ self.defaultValue ] + else: + tokens = [] + return loc, tokens + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]" + + return self.strRepr + +class SkipTo(ParseElementEnhance): + """ + Token for skipping over all undefined text until the matched expression is found. + + Parameters: + - expr - target expression marking the end of the data to be skipped + - include - (default=C{False}) if True, the target expression is also parsed + (the skipped text and target expression are returned as a 2-element list). + - ignore - (default=C{None}) used to define grammars (typically quoted strings and + comments) that might contain false matches to the target expression + - failOn - (default=C{None}) define expressions that are not allowed to be + included in the skipped test; if found before the target expression is found, + the SkipTo is not a match + + Example:: + report = ''' + Outstanding Issues Report - 1 Jan 2000 + + # | Severity | Description | Days Open + -----+----------+-------------------------------------------+----------- + 101 | Critical | Intermittent system crash | 6 + 94 | Cosmetic | Spelling error on Login ('log|n') | 14 + 79 | Minor | System slow when running too many reports | 47 + ''' + integer = Word(nums) + SEP = Suppress('|') + # use SkipTo to simply match everything up until the next SEP + # - ignore quoted strings, so that a '|' character inside a quoted string does not match + # - parse action will call token.strip() for each matched token, i.e., the description body + string_data = SkipTo(SEP, ignore=quotedString) + string_data.setParseAction(tokenMap(str.strip)) + ticket_expr = (integer("issue_num") + SEP + + string_data("sev") + SEP + + string_data("desc") + SEP + + integer("days_open")) + + for tkt in ticket_expr.searchString(report): + print tkt.dump() + prints:: + ['101', 'Critical', 'Intermittent system crash', '6'] + - days_open: 6 + - desc: Intermittent system crash + - issue_num: 101 + - sev: Critical + ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] + - days_open: 14 + - desc: Spelling error on Login ('log|n') + - issue_num: 94 + - sev: Cosmetic + ['79', 'Minor', 'System slow when running too many reports', '47'] + - days_open: 47 + - desc: System slow when running too many reports + - issue_num: 79 + - sev: Minor + """ + def __init__( self, other, include=False, ignore=None, failOn=None ): + super( SkipTo, self ).__init__( other ) + self.ignoreExpr = ignore + self.mayReturnEmpty = True + self.mayIndexError = False + self.includeMatch = include + self.asList = False + if isinstance(failOn, basestring): + self.failOn = ParserElement._literalStringClass(failOn) + else: + self.failOn = failOn + self.errmsg = "No match found for "+_ustr(self.expr) + + def parseImpl( self, instring, loc, doActions=True ): + startloc = loc + instrlen = len(instring) + expr = self.expr + expr_parse = self.expr._parse + self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None + self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None + + tmploc = loc + while tmploc <= instrlen: + if self_failOn_canParseNext is not None: + # break if failOn expression matches + if self_failOn_canParseNext(instring, tmploc): + break + + if self_ignoreExpr_tryParse is not None: + # advance past ignore expressions + while 1: + try: + tmploc = self_ignoreExpr_tryParse(instring, tmploc) + except ParseBaseException: + break + + try: + expr_parse(instring, tmploc, doActions=False, callPreParse=False) + except (ParseException, IndexError): + # no match, advance loc in string + tmploc += 1 + else: + # matched skipto expr, done + break + + else: + # ran off the end of the input string without matching skipto expr, fail + raise ParseException(instring, loc, self.errmsg, self) + + # build up return values + loc = tmploc + skiptext = instring[startloc:loc] + skipresult = ParseResults(skiptext) + + if self.includeMatch: + loc, mat = expr_parse(instring,loc,doActions,callPreParse=False) + skipresult += mat + + return loc, skipresult + +class Forward(ParseElementEnhance): + """ + Forward declaration of an expression to be defined later - + used for recursive grammars, such as algebraic infix notation. + When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator. + + Note: take care when assigning to C{Forward} not to overlook precedence of operators. + Specifically, '|' has a lower precedence than '<<', so that:: + fwdExpr << a | b | c + will actually be evaluated as:: + (fwdExpr << a) | b | c + thereby leaving b and c out as parseable alternatives. It is recommended that you + explicitly group the values inserted into the C{Forward}:: + fwdExpr << (a | b | c) + Converting to use the '<<=' operator instead will avoid this problem. + + See L{ParseResults.pprint} for an example of a recursive parser created using + C{Forward}. + """ + def __init__( self, other=None ): + super(Forward,self).__init__( other, savelist=False ) + + def __lshift__( self, other ): + if isinstance( other, basestring ): + other = ParserElement._literalStringClass(other) + self.expr = other + self.strRepr = None + self.mayIndexError = self.expr.mayIndexError + self.mayReturnEmpty = self.expr.mayReturnEmpty + self.setWhitespaceChars( self.expr.whiteChars ) + self.skipWhitespace = self.expr.skipWhitespace + self.saveAsList = self.expr.saveAsList + self.ignoreExprs.extend(self.expr.ignoreExprs) + return self + + def __ilshift__(self, other): + return self << other + + def leaveWhitespace( self ): + self.skipWhitespace = False + return self + + def streamline( self ): + if not self.streamlined: + self.streamlined = True + if self.expr is not None: + self.expr.streamline() + return self + + def validate( self, validateTrace=[] ): + if self not in validateTrace: + tmp = validateTrace[:]+[self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion([]) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + return self.__class__.__name__ + ": ..." + + # stubbed out for now - creates awful memory and perf issues + self._revertClass = self.__class__ + self.__class__ = _ForwardNoRecurse + try: + if self.expr is not None: + retString = _ustr(self.expr) + else: + retString = "None" + finally: + self.__class__ = self._revertClass + return self.__class__.__name__ + ": " + retString + + def copy(self): + if self.expr is not None: + return super(Forward,self).copy() + else: + ret = Forward() + ret <<= self + return ret + +class _ForwardNoRecurse(Forward): + def __str__( self ): + return "..." + +class TokenConverter(ParseElementEnhance): + """ + Abstract subclass of C{ParseExpression}, for converting parsed results. + """ + def __init__( self, expr, savelist=False ): + super(TokenConverter,self).__init__( expr )#, savelist ) + self.saveAsList = False + +class Combine(TokenConverter): + """ + Converter to concatenate all matching tokens to a single string. + By default, the matching patterns must also be contiguous in the input string; + this can be disabled by specifying C{'adjacent=False'} in the constructor. + + Example:: + real = Word(nums) + '.' + Word(nums) + print(real.parseString('3.1416')) # -> ['3', '.', '1416'] + # will also erroneously match the following + print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] + + real = Combine(Word(nums) + '.' + Word(nums)) + print(real.parseString('3.1416')) # -> ['3.1416'] + # no match when there are internal spaces + print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) + """ + def __init__( self, expr, joinString="", adjacent=True ): + super(Combine,self).__init__( expr ) + # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself + if adjacent: + self.leaveWhitespace() + self.adjacent = adjacent + self.skipWhitespace = True + self.joinString = joinString + self.callPreparse = True + + def ignore( self, other ): + if self.adjacent: + ParserElement.ignore(self, other) + else: + super( Combine, self).ignore( other ) + return self + + def postParse( self, instring, loc, tokenlist ): + retToks = tokenlist.copy() + del retToks[:] + retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults) + + if self.resultsName and retToks.haskeys(): + return [ retToks ] + else: + return retToks + +class Group(TokenConverter): + """ + Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions. + + Example:: + ident = Word(alphas) + num = Word(nums) + term = ident | num + func = ident + Optional(delimitedList(term)) + print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100'] + + func = ident + Group(Optional(delimitedList(term))) + print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']] + """ + def __init__( self, expr ): + super(Group,self).__init__( expr ) + self.saveAsList = True + + def postParse( self, instring, loc, tokenlist ): + return [ tokenlist ] + +class Dict(TokenConverter): + """ + Converter to return a repetitive expression as a list, but also as a dictionary. + Each element can also be referenced using the first token in the expression as its key. + Useful for tabular report scraping when the first column can be used as a item key. + + Example:: + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) + + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" + attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + + # print attributes as plain groups + print(OneOrMore(attr_expr).parseString(text).dump()) + + # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names + result = Dict(OneOrMore(Group(attr_expr))).parseString(text) + print(result.dump()) + + # access named fields as dict entries, or output as dict + print(result['shape']) + print(result.asDict()) + prints:: + ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] + + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] + - color: light blue + - posn: upper left + - shape: SQUARE + - texture: burlap + SQUARE + {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} + See more examples at L{ParseResults} of accessing fields by results name. + """ + def __init__( self, expr ): + super(Dict,self).__init__( expr ) + self.saveAsList = True + + def postParse( self, instring, loc, tokenlist ): + for i,tok in enumerate(tokenlist): + if len(tok) == 0: + continue + ikey = tok[0] + if isinstance(ikey,int): + ikey = _ustr(tok[0]).strip() + if len(tok)==1: + tokenlist[ikey] = _ParseResultsWithOffset("",i) + elif len(tok)==2 and not isinstance(tok[1],ParseResults): + tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i) + else: + dictvalue = tok.copy() #ParseResults(i) + del dictvalue[0] + if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()): + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i) + else: + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i) + + if self.resultsName: + return [ tokenlist ] + else: + return tokenlist + + +class Suppress(TokenConverter): + """ + Converter for ignoring the results of a parsed expression. + + Example:: + source = "a, b, c,d" + wd = Word(alphas) + wd_list1 = wd + ZeroOrMore(',' + wd) + print(wd_list1.parseString(source)) + + # often, delimiters that are useful during parsing are just in the + # way afterward - use Suppress to keep them out of the parsed output + wd_list2 = wd + ZeroOrMore(Suppress(',') + wd) + print(wd_list2.parseString(source)) + prints:: + ['a', ',', 'b', ',', 'c', ',', 'd'] + ['a', 'b', 'c', 'd'] + (See also L{delimitedList}.) + """ + def postParse( self, instring, loc, tokenlist ): + return [] + + def suppress( self ): + return self + + +class OnlyOnce(object): + """ + Wrapper for parse actions, to ensure they are only called once. + """ + def __init__(self, methodCall): + self.callable = _trim_arity(methodCall) + self.called = False + def __call__(self,s,l,t): + if not self.called: + results = self.callable(s,l,t) + self.called = True + return results + raise ParseException(s,l,"") + def reset(self): + self.called = False + +def traceParseAction(f): + """ + Decorator for debugging parse actions. + + When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".} + When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised. + + Example:: + wd = Word(alphas) + + @traceParseAction + def remove_duplicate_chars(tokens): + return ''.join(sorted(set(''.join(tokens))) + + wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) + print(wds.parseString("slkdjs sld sldd sdlf sdljf")) + prints:: + >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) + <<leaving remove_duplicate_chars (ret: 'dfjkls') + ['dfjkls'] + """ + f = _trim_arity(f) + def z(*paArgs): + thisFunc = f.__name__ + s,l,t = paArgs[-3:] + if len(paArgs)>3: + thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc + sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) ) + try: + ret = f(*paArgs) + except Exception as exc: + sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) ) + raise + sys.stderr.write( "<<leaving %s (ret: %r)\n" % (thisFunc,ret) ) + return ret + try: + z.__name__ = f.__name__ + except AttributeError: + pass + return z + +# +# global helpers +# +def delimitedList( expr, delim=",", combine=False ): + """ + Helper to define a delimited list of expressions - the delimiter defaults to ','. + By default, the list elements and delimiters can have intervening whitespace, and + comments, but this can be overridden by passing C{combine=True} in the constructor. + If C{combine} is set to C{True}, the matching tokens are returned as a single token + string, with the delimiters included; otherwise, the matching tokens are returned + as a list of tokens, with the delimiters suppressed. + + Example:: + delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc'] + delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] + """ + dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..." + if combine: + return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName) + else: + return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName) + +def countedArray( expr, intExpr=None ): + """ + Helper to define a counted list of expressions. + This helper defines a pattern of the form:: + integer expr expr expr... + where the leading integer tells how many expr expressions follow. + The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed. + + If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value. + + Example:: + countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] + + # in this parser, the leading integer value is given in binary, + # '10' indicating that 2 values are in the array + binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) + countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] + """ + arrayExpr = Forward() + def countFieldParseAction(s,l,t): + n = t[0] + arrayExpr << (n and Group(And([expr]*n)) or Group(empty)) + return [] + if intExpr is None: + intExpr = Word(nums).setParseAction(lambda t:int(t[0])) + else: + intExpr = intExpr.copy() + intExpr.setName("arrayLen") + intExpr.addParseAction(countFieldParseAction, callDuringTry=True) + return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...') + +def _flatten(L): + ret = [] + for i in L: + if isinstance(i,list): + ret.extend(_flatten(i)) + else: + ret.append(i) + return ret + +def matchPreviousLiteral(expr): + """ + Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks + for a 'repeat' of a previous expression. For example:: + first = Word(nums) + second = matchPreviousLiteral(first) + matchExpr = first + ":" + second + will match C{"1:1"}, but not C{"1:2"}. Because this matches a + previous literal, will also match the leading C{"1:1"} in C{"1:10"}. + If this is not desired, use C{matchPreviousExpr}. + Do I{not} use with packrat parsing enabled. + """ + rep = Forward() + def copyTokenToRepeater(s,l,t): + if t: + if len(t) == 1: + rep << t[0] + else: + # flatten t tokens + tflat = _flatten(t.asList()) + rep << And(Literal(tt) for tt in tflat) + else: + rep << Empty() + expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + rep.setName('(prev) ' + _ustr(expr)) + return rep + +def matchPreviousExpr(expr): + """ + Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks + for a 'repeat' of a previous expression. For example:: + first = Word(nums) + second = matchPreviousExpr(first) + matchExpr = first + ":" + second + will match C{"1:1"}, but not C{"1:2"}. Because this matches by + expressions, will I{not} match the leading C{"1:1"} in C{"1:10"}; + the expressions are evaluated first, and then compared, so + C{"1"} is compared with C{"10"}. + Do I{not} use with packrat parsing enabled. + """ + rep = Forward() + e2 = expr.copy() + rep <<= e2 + def copyTokenToRepeater(s,l,t): + matchTokens = _flatten(t.asList()) + def mustMatchTheseTokens(s,l,t): + theseTokens = _flatten(t.asList()) + if theseTokens != matchTokens: + raise ParseException("",0,"") + rep.setParseAction( mustMatchTheseTokens, callDuringTry=True ) + expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + rep.setName('(prev) ' + _ustr(expr)) + return rep + +def _escapeRegexRangeChars(s): + #~ escape these chars: ^-] + for c in r"\^-]": + s = s.replace(c,_bslash+c) + s = s.replace("\n",r"\n") + s = s.replace("\t",r"\t") + return _ustr(s) + +def oneOf( strs, caseless=False, useRegex=True ): + """ + Helper to quickly define a set of alternative Literals, and makes sure to do + longest-first testing when there is a conflict, regardless of the input order, + but returns a C{L{MatchFirst}} for best performance. + + Parameters: + - strs - a string of space-delimited literals, or a collection of string literals + - caseless - (default=C{False}) - treat all literals as caseless + - useRegex - (default=C{True}) - as an optimization, will generate a Regex + object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or + if creating a C{Regex} raises an exception) + + Example:: + comp_oper = oneOf("< = > <= >= !=") + var = Word(alphas) + number = Word(nums) + term = var | number + comparison_expr = term + comp_oper + term + print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) + prints:: + [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] + """ + if caseless: + isequal = ( lambda a,b: a.upper() == b.upper() ) + masks = ( lambda a,b: b.upper().startswith(a.upper()) ) + parseElementClass = CaselessLiteral + else: + isequal = ( lambda a,b: a == b ) + masks = ( lambda a,b: b.startswith(a) ) + parseElementClass = Literal + + symbols = [] + if isinstance(strs,basestring): + symbols = strs.split() + elif isinstance(strs, collections.Iterable): + symbols = list(strs) + else: + warnings.warn("Invalid argument to oneOf, expected string or iterable", + SyntaxWarning, stacklevel=2) + if not symbols: + return NoMatch() + + i = 0 + while i < len(symbols)-1: + cur = symbols[i] + for j,other in enumerate(symbols[i+1:]): + if ( isequal(other, cur) ): + del symbols[i+j+1] + break + elif ( masks(cur, other) ): + del symbols[i+j+1] + symbols.insert(i,other) + cur = other + break + else: + i += 1 + + if not caseless and useRegex: + #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) + try: + if len(symbols)==len("".join(symbols)): + return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols)) + else: + return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols)) + except Exception: + warnings.warn("Exception creating Regex for oneOf, building MatchFirst", + SyntaxWarning, stacklevel=2) + + + # last resort, just use MatchFirst + return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) + +def dictOf( key, value ): + """ + Helper to easily and clearly define a dictionary by specifying the respective patterns + for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens + in the proper order. The key pattern can include delimiting markers or punctuation, + as long as they are suppressed, thereby leaving the significant key text. The value + pattern can include named results, so that the C{Dict} results can include named token + fields. + + Example:: + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" + attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + print(OneOrMore(attr_expr).parseString(text).dump()) + + attr_label = label + attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) + + # similar to Dict, but simpler call format + result = dictOf(attr_label, attr_value).parseString(text) + print(result.dump()) + print(result['shape']) + print(result.shape) # object attribute access works too + print(result.asDict()) + prints:: + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] + - color: light blue + - posn: upper left + - shape: SQUARE + - texture: burlap + SQUARE + SQUARE + {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} + """ + return Dict( ZeroOrMore( Group ( key + value ) ) ) + +def originalTextFor(expr, asString=True): + """ + Helper to return the original, untokenized text for a given expression. Useful to + restore the parsed fields of an HTML start tag into the raw tag text itself, or to + revert separate tokens with intervening whitespace back to the original matching + input text. By default, returns astring containing the original parsed text. + + If the optional C{asString} argument is passed as C{False}, then the return value is a + C{L{ParseResults}} containing any results names that were originally matched, and a + single token containing the original matched text from the input string. So if + the expression passed to C{L{originalTextFor}} contains expressions with defined + results names, you must set C{asString} to C{False} if you want to preserve those + results name values. + + Example:: + src = "this is test <b> bold <i>text</i> </b> normal text " + for tag in ("b","i"): + opener,closer = makeHTMLTags(tag) + patt = originalTextFor(opener + SkipTo(closer) + closer) + print(patt.searchString(src)[0]) + prints:: + ['<b> bold <i>text</i> </b>'] + ['<i>text</i>'] + """ + locMarker = Empty().setParseAction(lambda s,loc,t: loc) + endlocMarker = locMarker.copy() + endlocMarker.callPreparse = False + matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") + if asString: + extractText = lambda s,l,t: s[t._original_start:t._original_end] + else: + def extractText(s,l,t): + t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] + matchExpr.setParseAction(extractText) + matchExpr.ignoreExprs = expr.ignoreExprs + return matchExpr + +def ungroup(expr): + """ + Helper to undo pyparsing's default grouping of And expressions, even + if all but one are non-empty. + """ + return TokenConverter(expr).setParseAction(lambda t:t[0]) + +def locatedExpr(expr): + """ + Helper to decorate a returned token with its starting and ending locations in the input string. + This helper adds the following results names: + - locn_start = location where matched expression begins + - locn_end = location where matched expression ends + - value = the actual parsed results + + Be careful if the input text contains C{<TAB>} characters, you may want to call + C{L{ParserElement.parseWithTabs}} + + Example:: + wd = Word(alphas) + for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): + print(match) + prints:: + [[0, 'ljsdf', 5]] + [[8, 'lksdjjf', 15]] + [[18, 'lkkjj', 23]] + """ + locator = Empty().setParseAction(lambda s,l,t: l) + return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) + + +# convenience constants for positional expressions +empty = Empty().setName("empty") +lineStart = LineStart().setName("lineStart") +lineEnd = LineEnd().setName("lineEnd") +stringStart = StringStart().setName("stringStart") +stringEnd = StringEnd().setName("stringEnd") + +_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) +_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16))) +_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) +_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(printables, excludeChars=r'\]', exact=1) | Regex(r"\w", re.UNICODE) +_charRange = Group(_singleChar + Suppress("-") + _singleChar) +_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" + +def srange(s): + r""" + Helper to easily define string ranges for use in Word construction. Borrows + syntax from regexp '[]' string range definitions:: + srange("[0-9]") -> "0123456789" + srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" + srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" + The input string must be enclosed in []'s, and the returned string is the expanded + character set joined into a single string. + The values enclosed in the []'s may be: + - a single character + - an escaped character with a leading backslash (such as C{\-} or C{\]}) + - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) + (C{\0x##} is also supported for backwards compatibility) + - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character) + - a range of any of the above, separated by a dash (C{'a-z'}, etc.) + - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.) + """ + _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1)) + try: + return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) + except Exception: + return "" + +def matchOnlyAtCol(n): + """ + Helper method for defining parse actions that require matching at a specific + column in the input text. + """ + def verifyCol(strg,locn,toks): + if col(locn,strg) != n: + raise ParseException(strg,locn,"matched token not at column %d" % n) + return verifyCol + +def replaceWith(replStr): + """ + Helper method for common parse actions that simply return a literal value. Especially + useful when used with C{L{transformString<ParserElement.transformString>}()}. + + Example:: + num = Word(nums).setParseAction(lambda toks: int(toks[0])) + na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) + term = na | num + + OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] + """ + return lambda s,l,t: [replStr] + +def removeQuotes(s,l,t): + """ + Helper parse action for removing quotation marks from parsed quoted strings. + + Example:: + # by default, quotation marks are included in parsed results + quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] + + # use removeQuotes to strip quotation marks from parsed results + quotedString.setParseAction(removeQuotes) + quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] + """ + return t[0][1:-1] + +def tokenMap(func, *args): + """ + Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional + args are passed, they are forwarded to the given function as additional arguments after + the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the + parsed data to an integer using base 16. + + Example (compare the last to example in L{ParserElement.transformString}:: + hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) + hex_ints.runTests(''' + 00 11 22 aa FF 0a 0d 1a + ''') + + upperword = Word(alphas).setParseAction(tokenMap(str.upper)) + OneOrMore(upperword).runTests(''' + my kingdom for a horse + ''') + + wd = Word(alphas).setParseAction(tokenMap(str.title)) + OneOrMore(wd).setParseAction(' '.join).runTests(''' + now is the winter of our discontent made glorious summer by this sun of york + ''') + prints:: + 00 11 22 aa FF 0a 0d 1a + [0, 17, 34, 170, 255, 10, 13, 26] + + my kingdom for a horse + ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] + + now is the winter of our discontent made glorious summer by this sun of york + ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] + """ + def pa(s,l,t): + return [func(tokn, *args) for tokn in t] + + try: + func_name = getattr(func, '__name__', + getattr(func, '__class__').__name__) + except Exception: + func_name = str(func) + pa.__name__ = func_name + + return pa + +upcaseTokens = tokenMap(lambda t: _ustr(t).upper()) +"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}""" + +downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) +"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}""" + +def _makeTags(tagStr, xml): + """Internal helper to construct opening and closing tag expressions, given a tag name""" + if isinstance(tagStr,basestring): + resname = tagStr + tagStr = Keyword(tagStr, caseless=not xml) + else: + resname = tagStr.name + + tagAttrName = Word(alphas,alphanums+"_-:") + if (xml): + tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) + openTag = Suppress("<") + tagStr("tag") + \ + Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \ + Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") + else: + printablesLessRAbrack = "".join(c for c in printables if c not in ">") + tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack) + openTag = Suppress("<") + tagStr("tag") + \ + Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \ + Optional( Suppress("=") + tagAttrValue ) ))) + \ + Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") + closeTag = Combine(_L("</") + tagStr + ">") + + openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname) + closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname) + openTag.tag = resname + closeTag.tag = resname + return openTag, closeTag + +def makeHTMLTags(tagStr): + """ + Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches + tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values. + + Example:: + text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>' + # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple + a,a_end = makeHTMLTags("A") + link_expr = a + SkipTo(a_end)("link_text") + a_end + + for link in link_expr.searchString(text): + # attributes in the <A> tag (like "href" shown here) are also accessible as named results + print(link.link_text, '->', link.href) + prints:: + pyparsing -> http://pyparsing.wikispaces.com + """ + return _makeTags( tagStr, False ) + +def makeXMLTags(tagStr): + """ + Helper to construct opening and closing tag expressions for XML, given a tag name. Matches + tags only in the given upper/lower case. + + Example: similar to L{makeHTMLTags} + """ + return _makeTags( tagStr, True ) + +def withAttribute(*args,**attrDict): + """ + Helper to create a validating parse action to be used with start tags created + with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag + with a required attribute value, to avoid false matches on common tags such as + C{<TD>} or C{<DIV>}. + + Call C{withAttribute} with a series of attribute names and values. Specify the list + of filter attributes names and values as: + - keyword arguments, as in C{(align="right")}, or + - as an explicit dict with C{**} operator, when an attribute name is also a Python + reserved word, as in C{**{"class":"Customer", "align":"right"}} + - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) + For attribute names with a namespace prefix, you must use the second form. Attribute + names are matched insensitive to upper/lower case. + + If just testing for C{class} (with or without a namespace), use C{L{withClass}}. + + To verify that the attribute exists, but without specifying a value, pass + C{withAttribute.ANY_VALUE} as the value. + + Example:: + html = ''' + <div> + Some text + <div type="grid">1 4 0 1 0</div> + <div type="graph">1,3 2,3 1,1</div> + <div>this has no type</div> + </div> + + ''' + div,div_end = makeHTMLTags("div") + + # only match div tag having a type attribute with value "grid" + div_grid = div().setParseAction(withAttribute(type="grid")) + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + # construct a match with any div tag having a type attribute, regardless of the value + div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + prints:: + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + if args: + attrs = args[:] + else: + attrs = attrDict.items() + attrs = [(k,v) for k,v in attrs] + def pa(s,l,tokens): + for attrName,attrValue in attrs: + if attrName not in tokens: + raise ParseException(s,l,"no matching attribute " + attrName) + if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: + raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % + (attrName, tokens[attrName], attrValue)) + return pa +withAttribute.ANY_VALUE = object() + +def withClass(classname, namespace=''): + """ + Simplified version of C{L{withAttribute}} when matching on a div class - made + difficult because C{class} is a reserved word in Python. + + Example:: + html = ''' + <div> + Some text + <div class="grid">1 4 0 1 0</div> + <div class="graph">1,3 2,3 1,1</div> + <div>this <div> has no class</div> + </div> + + ''' + div,div_end = makeHTMLTags("div") + div_grid = div().setParseAction(withClass("grid")) + + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + prints:: + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + classattr = "%s:class" % namespace if namespace else "class" + return withAttribute(**{classattr : classname}) + +opAssoc = _Constants() +opAssoc.LEFT = object() +opAssoc.RIGHT = object() + +def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): + """ + Helper method for constructing grammars of expressions made up of + operators working in a precedence hierarchy. Operators may be unary or + binary, left- or right-associative. Parse actions can also be attached + to operator expressions. The generated parser will also recognize the use + of parentheses to override operator precedences (see example below). + + Note: if you define a deep operator list, you may see performance issues + when using infixNotation. See L{ParserElement.enablePackrat} for a + mechanism to potentially improve your parser performance. + + Parameters: + - baseExpr - expression representing the most basic element for the nested + - opList - list of tuples, one for each operator precedence level in the + expression grammar; each tuple is of the form + (opExpr, numTerms, rightLeftAssoc, parseAction), where: + - opExpr is the pyparsing expression for the operator; + may also be a string, which will be converted to a Literal; + if numTerms is 3, opExpr is a tuple of two expressions, for the + two operators separating the 3 terms + - numTerms is the number of terms for this operator (must + be 1, 2, or 3) + - rightLeftAssoc is the indicator whether the operator is + right or left associative, using the pyparsing-defined + constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. + - parseAction is the parse action to be associated with + expressions matching this operator expression (the + parse action tuple member may be omitted) + - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) + - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) + + Example:: + # simple example of four-function arithmetic with ints and variable names + integer = pyparsing_common.signed_integer + varname = pyparsing_common.identifier + + arith_expr = infixNotation(integer | varname, + [ + ('-', 1, opAssoc.RIGHT), + (oneOf('* /'), 2, opAssoc.LEFT), + (oneOf('+ -'), 2, opAssoc.LEFT), + ]) + + arith_expr.runTests(''' + 5+3*6 + (5+3)*6 + -2--11 + ''', fullDump=False) + prints:: + 5+3*6 + [[5, '+', [3, '*', 6]]] + + (5+3)*6 + [[[5, '+', 3], '*', 6]] + + -2--11 + [[['-', 2], '-', ['-', 11]]] + """ + ret = Forward() + lastExpr = baseExpr | ( lpar + ret + rpar ) + for i,operDef in enumerate(opList): + opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] + termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr + if arity == 3: + if opExpr is None or len(opExpr) != 2: + raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") + opExpr1, opExpr2 = opExpr + thisExpr = Forward().setName(termName) + if rightLeftAssoc == opAssoc.LEFT: + if arity == 1: + matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) + else: + matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ + Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + elif rightLeftAssoc == opAssoc.RIGHT: + if arity == 1: + # try to avoid LR with this extra test + if not isinstance(opExpr, Optional): + opExpr = Optional(opExpr) + matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) + else: + matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ + Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + else: + raise ValueError("operator must indicate right or left associativity") + if pa: + matchExpr.setParseAction( pa ) + thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) + lastExpr = thisExpr + ret <<= lastExpr + return ret + +operatorPrecedence = infixNotation +"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release.""" + +dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") +sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") +quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| + Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") +unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") + +def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): + """ + Helper method for defining nested lists enclosed in opening and closing + delimiters ("(" and ")" are the default). + + Parameters: + - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression + - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression + - content - expression for items within the nested lists (default=C{None}) + - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) + + If an expression is not provided for the content argument, the nested + expression will capture all whitespace-delimited content between delimiters + as a list of separate values. + + Use the C{ignoreExpr} argument to define expressions that may contain + opening or closing characters that should not be treated as opening + or closing characters for nesting, such as quotedString or a comment + expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. + The default is L{quotedString}, but if no expressions are to be ignored, + then pass C{None} for this argument. + + Example:: + data_type = oneOf("void int short long char float double") + decl_data_type = Combine(data_type + Optional(Word('*'))) + ident = Word(alphas+'_', alphanums+'_') + number = pyparsing_common.number + arg = Group(decl_data_type + ident) + LPAR,RPAR = map(Suppress, "()") + + code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) + + c_function = (decl_data_type("type") + + ident("name") + + LPAR + Optional(delimitedList(arg), [])("args") + RPAR + + code_body("body")) + c_function.ignore(cStyleComment) + + source_code = ''' + int is_odd(int x) { + return (x%2); + } + + int dec_to_hex(char hchar) { + if (hchar >= '0' && hchar <= '9') { + return (ord(hchar)-ord('0')); + } else { + return (10+ord(hchar)-ord('A')); + } + } + ''' + for func in c_function.searchString(source_code): + print("%(name)s (%(type)s) args: %(args)s" % func) + + prints:: + is_odd (int) args: [['int', 'x']] + dec_to_hex (int) args: [['char', 'hchar']] + """ + if opener == closer: + raise ValueError("opening and closing strings cannot be the same") + if content is None: + if isinstance(opener,basestring) and isinstance(closer,basestring): + if len(opener) == 1 and len(closer)==1: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS + ).setParseAction(lambda t:t[0].strip())) + else: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + ~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + raise ValueError("opening and closing arguments must be strings if no content expression is given") + ret = Forward() + if ignoreExpr is not None: + ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) + else: + ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) + ret.setName('nested %s%s expression' % (opener,closer)) + return ret + +def indentedBlock(blockStatementExpr, indentStack, indent=True): + """ + Helper method for defining space-delimited indentation blocks, such as + those used to define block statements in Python source code. + + Parameters: + - blockStatementExpr - expression defining syntax of statement that + is repeated within the indented block + - indentStack - list created by caller to manage indentation stack + (multiple statementWithIndentedBlock expressions within a single grammar + should share a common indentStack) + - indent - boolean indicating whether block must be indented beyond the + the current level; set to False for block of left-most statements + (default=C{True}) + + A valid block must contain at least one C{blockStatement}. + + Example:: + data = ''' + def A(z): + A1 + B = 100 + G = A2 + A2 + A3 + B + def BB(a,b,c): + BB1 + def BBA(): + bba1 + bba2 + bba3 + C + D + def spam(x,y): + def eggs(z): + pass + ''' + + + indentStack = [1] + stmt = Forward() + + identifier = Word(alphas, alphanums) + funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") + func_body = indentedBlock(stmt, indentStack) + funcDef = Group( funcDecl + func_body ) + + rvalue = Forward() + funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") + rvalue << (funcCall | identifier | Word(nums)) + assignment = Group(identifier + "=" + rvalue) + stmt << ( funcDef | assignment | identifier ) + + module_body = OneOrMore(stmt) + + parseTree = module_body.parseString(data) + parseTree.pprint() + prints:: + [['def', + 'A', + ['(', 'z', ')'], + ':', + [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], + 'B', + ['def', + 'BB', + ['(', 'a', 'b', 'c', ')'], + ':', + [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], + 'C', + 'D', + ['def', + 'spam', + ['(', 'x', 'y', ')'], + ':', + [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] + """ + def checkPeerIndent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if curCol != indentStack[-1]: + if curCol > indentStack[-1]: + raise ParseFatalException(s,l,"illegal nesting") + raise ParseException(s,l,"not a peer entry") + + def checkSubIndent(s,l,t): + curCol = col(l,s) + if curCol > indentStack[-1]: + indentStack.append( curCol ) + else: + raise ParseException(s,l,"not a subentry") + + def checkUnindent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): + raise ParseException(s,l,"not an unindent") + indentStack.pop() + + NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) + INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') + PEER = Empty().setParseAction(checkPeerIndent).setName('') + UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') + if indent: + smExpr = Group( Optional(NL) + + #~ FollowedBy(blockStatementExpr) + + INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) + else: + smExpr = Group( Optional(NL) + + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) + blockStatementExpr.ignore(_bslash + LineEnd()) + return smExpr.setName('indented block') + +alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") +punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") + +anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) +_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) +commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") +def replaceHTMLEntity(t): + """Helper parser action to replace common HTML entities with their special characters""" + return _htmlEntityMap.get(t.entity) + +# it's easy to get these comment structures wrong - they're very common, so may as well make them available +cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") +"Comment of the form C{/* ... */}" + +htmlComment = Regex(r"<!--[\s\S]*?-->").setName("HTML comment") +"Comment of the form C{<!-- ... -->}" + +restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") +dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") +"Comment of the form C{// ... (to end of line)}" + +cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") +"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}" + +javaStyleComment = cppStyleComment +"Same as C{L{cppStyleComment}}" + +pythonStyleComment = Regex(r"#.*").setName("Python style comment") +"Comment of the form C{# ... (to end of line)}" + +_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + + Optional( Word(" \t") + + ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") +commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") +"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. + This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}.""" + +# some other useful expressions - using lower-case class name since we are really using this as a namespace +class pyparsing_common: + """ + Here are some common low-level expressions that may be useful in jump-starting parser development: + - numeric forms (L{integers<integer>}, L{reals<real>}, L{scientific notation<sci_real>}) + - common L{programming identifiers<identifier>} + - network addresses (L{MAC<mac_address>}, L{IPv4<ipv4_address>}, L{IPv6<ipv6_address>}) + - ISO8601 L{dates<iso8601_date>} and L{datetime<iso8601_datetime>} + - L{UUID<uuid>} + - L{comma-separated list<comma_separated_list>} + Parse actions: + - C{L{convertToInteger}} + - C{L{convertToFloat}} + - C{L{convertToDate}} + - C{L{convertToDatetime}} + - C{L{stripHTMLTags}} + - C{L{upcaseTokens}} + - C{L{downcaseTokens}} + + Example:: + pyparsing_common.number.runTests(''' + # any int or real number, returned as the appropriate type + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.fnumber.runTests(''' + # any int or real number, returned as float + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.hex_integer.runTests(''' + # hex numbers + 100 + FF + ''') + + pyparsing_common.fraction.runTests(''' + # fractions + 1/2 + -3/4 + ''') + + pyparsing_common.mixed_integer.runTests(''' + # mixed fractions + 1 + 1/2 + -3/4 + 1-3/4 + ''') + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(''' + # uuid + 12345678-1234-5678-1234-567812345678 + ''') + prints:: + # any int or real number, returned as the appropriate type + 100 + [100] + + -100 + [-100] + + +100 + [100] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # any int or real number, returned as float + 100 + [100.0] + + -100 + [-100.0] + + +100 + [100.0] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # hex numbers + 100 + [256] + + FF + [255] + + # fractions + 1/2 + [0.5] + + -3/4 + [-0.75] + + # mixed fractions + 1 + [1] + + 1/2 + [0.5] + + -3/4 + [-0.75] + + 1-3/4 + [1.75] + + # uuid + 12345678-1234-5678-1234-567812345678 + [UUID('12345678-1234-5678-1234-567812345678')] + """ + + convertToInteger = tokenMap(int) + """ + Parse action for converting parsed integers to Python int + """ + + convertToFloat = tokenMap(float) + """ + Parse action for converting parsed numbers to Python float + """ + + integer = Word(nums).setName("integer").setParseAction(convertToInteger) + """expression that parses an unsigned integer, returns an int""" + + hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) + """expression that parses a hexadecimal integer, returns an int""" + + signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) + """expression that parses an integer with optional leading sign, returns an int""" + + fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") + """fractional expression of an integer divided by an integer, returns a float""" + fraction.addParseAction(lambda t: t[0]/t[-1]) + + mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") + """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" + mixed_integer.addParseAction(sum) + + real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) + """expression that parses a floating point number and returns a float""" + + sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) + """expression that parses a floating point number with optional scientific notation and returns a float""" + + # streamlining this expression makes the docs nicer-looking + number = (sci_real | real | signed_integer).streamline() + """any numeric expression, returns the corresponding Python type""" + + fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) + """any int or real number, returned as float""" + + identifier = Word(alphas+'_', alphanums+'_').setName("identifier") + """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" + + ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") + "IPv4 address (C{0.0.0.0 - 255.255.255.255})" + + _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") + _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") + _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") + _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) + _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") + ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") + "IPv6 address (long, short, or mixed form)" + + mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") + "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" + + @staticmethod + def convertToDate(fmt="%Y-%m-%d"): + """ + Helper to create a parse action for converting parsed date string to Python datetime.date + + Params - + - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) + + Example:: + date_expr = pyparsing_common.iso8601_date.copy() + date_expr.setParseAction(pyparsing_common.convertToDate()) + print(date_expr.parseString("1999-12-31")) + prints:: + [datetime.date(1999, 12, 31)] + """ + def cvt_fn(s,l,t): + try: + return datetime.strptime(t[0], fmt).date() + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + @staticmethod + def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): + """ + Helper to create a parse action for converting parsed datetime string to Python datetime.datetime + + Params - + - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) + + Example:: + dt_expr = pyparsing_common.iso8601_datetime.copy() + dt_expr.setParseAction(pyparsing_common.convertToDatetime()) + print(dt_expr.parseString("1999-12-31T23:59:59.999")) + prints:: + [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] + """ + def cvt_fn(s,l,t): + try: + return datetime.strptime(t[0], fmt) + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + iso8601_date = Regex(r'(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?').setName("ISO8601 date") + "ISO8601 date (C{yyyy-mm-dd})" + + iso8601_datetime = Regex(r'(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") + "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}" + + uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") + "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})" + + _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() + @staticmethod + def stripHTMLTags(s, l, tokens): + """ + Parse action to remove HTML tags from web page HTML source + + Example:: + # strip HTML links from normal text + text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>' + td,td_end = makeHTMLTags("TD") + table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end + + print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' + """ + return pyparsing_common._html_stripper.transformString(tokens[0]) + + _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') + + Optional( White(" \t") ) ) ).streamline().setName("commaItem") + comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") + """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" + + upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) + """Parse action to convert tokens to upper case.""" + + downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) + """Parse action to convert tokens to lower case.""" + + +if __name__ == "__main__": + + selectToken = CaselessLiteral("select") + fromToken = CaselessLiteral("from") + + ident = Word(alphas, alphanums + "_$") + + columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + columnNameList = Group(delimitedList(columnName)).setName("columns") + columnSpec = ('*' | columnNameList) + + tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + tableNameList = Group(delimitedList(tableName)).setName("tables") + + simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") + + # demo runTests method, including embedded comments in test string + simpleSQL.runTests(""" + # '*' as column list and dotted table name + select * from SYS.XYZZY + + # caseless match on "SELECT", and casts back to "select" + SELECT * from XYZZY, ABC + + # list of column names, and mixed case SELECT keyword + Select AA,BB,CC from Sys.dual + + # multiple tables + Select A, B, C from Sys.dual, Table2 + + # invalid SELECT keyword - should fail + Xelect A, B, C from Sys.dual + + # incomplete command - should fail + Select + + # invalid column name - should fail + Select ^^^ frox Sys.dual + + """) + + pyparsing_common.number.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + # any int or real number, returned as float + pyparsing_common.fnumber.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + pyparsing_common.hex_integer.runTests(""" + 100 + FF + """) + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(""" + 12345678-1234-5678-1234-567812345678 + """) diff --git a/env/lib/python3.6/site-packages/pkg_resources/_vendor/six.py b/env/lib/python3.6/site-packages/pkg_resources/_vendor/six.py new file mode 100644 index 0000000..190c023 --- /dev/null +++ b/env/lib/python3.6/site-packages/pkg_resources/_vendor/six.py @@ -0,0 +1,868 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +# Copyright (c) 2010-2015 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson <benjamin@python.org>" +__version__ = "1.10.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + if from_value is None: + raise value + raise value from from_value +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + raise value from from_value +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/DESCRIPTION.rst b/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..ac2d8fd --- /dev/null +++ b/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/DESCRIPTION.rst @@ -0,0 +1,5 @@ + +The dateutil module provides powerful extensions to the +datetime module available in the Python standard library. + + diff --git a/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/INSTALLER b/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/METADATA b/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/METADATA new file mode 100644 index 0000000..35a546b --- /dev/null +++ b/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/METADATA @@ -0,0 +1,31 @@ +Metadata-Version: 2.0 +Name: python-dateutil +Version: 2.6.1 +Summary: Extensions to the standard Python datetime module +Home-page: https://dateutil.readthedocs.io +Author: Paul Ganssle +Author-email: dateutil@python.org +License: Simplified BSD +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Software Development :: Libraries +Requires: six +Requires-Dist: six (>=1.5) + + +The dateutil module provides powerful extensions to the +datetime module available in the Python standard library. + + diff --git a/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/RECORD b/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/RECORD new file mode 100644 index 0000000..bb0ff90 --- /dev/null +++ b/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/RECORD @@ -0,0 +1,37 @@ +dateutil/__init__.py,sha256=pmIYgg6QzDojrDZCuzpxEEeYheJBFXmtyfxwE2BIx7I,69 +dateutil/_common.py,sha256=l1Vl-vdei4Vm6fw7bDOPpIZU5sqSvL0BzCFE6y5m7kw,768 +dateutil/_version.py,sha256=OEgUyKiSf0dU1UUJl13GrtJMMn2vH0uuiFMtiLaPzFI,219 +dateutil/easter.py,sha256=6WQORXKs5kdAfUT3ojvc4fKITj91O-fTycZvlb2l12E,2629 +dateutil/parser.py,sha256=ZvxUmQsfztwix6RdJe2r1OFt5TZIY6woTymDEQRsltA,50939 +dateutil/relativedelta.py,sha256=Df_G3ykKPCTyiql3jBmwF2tr4mORFQcXfXwgfnRyT5o,23145 +dateutil/rrule.py,sha256=1YrItKAPQi-k1o-v5t_VGOmQd7Fyd5quIYUPTn7ORcs,61835 +dateutil/tzwin.py,sha256=7Ar4vdQCnnM0mKR3MUjbIKsZrBVfHgdwsJZc_mGYRew,59 +dateutil/tz/__init__.py,sha256=GpFGapfpCsnR8gswxLP4QcTjzImCGXmkrwr_fwF2ggw,207 +dateutil/tz/_common.py,sha256=27BskrtXNaI0WTRs9cfKkFlSv6GaS7zjCHFNLFYbVbU,11990 +dateutil/tz/tz.py,sha256=2IVXN5dfLcgAGHiXEF6rSYx2EE7Q6IceWI2czLqxz8Y,50578 +dateutil/tz/win.py,sha256=3MDzXRsoeOxZJFU6SB_-MTcZ6vTobp5iSCgH8-8Vyt4,11397 +dateutil/zoneinfo/__init__.py,sha256=h0718WwD-vp3565_Qh2ZxaGFwdJUEMs2FXjcjUDNn6Q,6747 +dateutil/zoneinfo/dateutil-zoneinfo.tar.gz,sha256=I1SBXwMhO3_qV6bu5UKVGx8VFlhpVYEcF_Qe1tqXRe8,138881 +dateutil/zoneinfo/rebuild.py,sha256=MCdwfmSyYjHzgv1LbSEn5F3oIbeIB-wxYWcwLHePHqw,1689 +python_dateutil-2.6.1.dist-info/DESCRIPTION.rst,sha256=WzfQq_9HAZsBuKLe06cpD3Kt7lxqKtcoW7nJgXllcUU,117 +python_dateutil-2.6.1.dist-info/METADATA,sha256=Kv00T0JSA6IWLR9J0J0_yO4LMPrIlfERVUrusRkyCT4,1112 +python_dateutil-2.6.1.dist-info/RECORD,, +python_dateutil-2.6.1.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +python_dateutil-2.6.1.dist-info/metadata.json,sha256=zrrY2iUSpoXqkN2ewmMXEzeH-gWX8rDDDcp-EvL_UZg,1113 +python_dateutil-2.6.1.dist-info/top_level.txt,sha256=4tjdWkhRZvF7LA_BYe_L9gB2w_p2a-z5y6ArjaRkot8,9 +python_dateutil-2.6.1.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +python_dateutil-2.6.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +dateutil/zoneinfo/__pycache__/rebuild.cpython-36.pyc,, +dateutil/zoneinfo/__pycache__/__init__.cpython-36.pyc,, +dateutil/__pycache__/parser.cpython-36.pyc,, +dateutil/__pycache__/rrule.cpython-36.pyc,, +dateutil/__pycache__/_version.cpython-36.pyc,, +dateutil/__pycache__/tzwin.cpython-36.pyc,, +dateutil/__pycache__/_common.cpython-36.pyc,, +dateutil/__pycache__/relativedelta.cpython-36.pyc,, +dateutil/__pycache__/__init__.cpython-36.pyc,, +dateutil/__pycache__/easter.cpython-36.pyc,, +dateutil/tz/__pycache__/tz.cpython-36.pyc,, +dateutil/tz/__pycache__/_common.cpython-36.pyc,, +dateutil/tz/__pycache__/win.cpython-36.pyc,, +dateutil/tz/__pycache__/__init__.cpython-36.pyc,, diff --git a/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/WHEEL b/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/metadata.json b/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/metadata.json new file mode 100644 index 0000000..594b099 --- /dev/null +++ b/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Software Development :: Libraries"], "extensions": {"python.details": {"contacts": [{"email": "dateutil@python.org", "name": "Paul Ganssle", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://dateutil.readthedocs.io"}}}, "extras": [], "generator": "bdist_wheel (0.29.0)", "license": "Simplified BSD", "metadata_version": "2.0", "name": "python-dateutil", "requires": "six", "run_requires": [{"requires": ["six (>=1.5)"]}], "summary": "Extensions to the standard Python datetime module", "version": "2.6.1"} \ No newline at end of file diff --git a/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/top_level.txt b/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/top_level.txt new file mode 100644 index 0000000..6650148 --- /dev/null +++ b/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/top_level.txt @@ -0,0 +1 @@ +dateutil diff --git a/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/zip-safe b/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/env/lib/python3.6/site-packages/python_dateutil-2.6.1.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/DESCRIPTION.rst b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..dea0b50 --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/DESCRIPTION.rst @@ -0,0 +1,589 @@ +pytz - World Timezone Definitions for Python +============================================ + +:Author: Stuart Bishop <stuart@stuartbishop.net> + +Introduction +~~~~~~~~~~~~ + +pytz brings the Olson tz database into Python. This library allows +accurate and cross platform timezone calculations using Python 2.4 +or higher. It also solves the issue of ambiguous times at the end +of daylight saving time, which you can read more about in the Python +Library Reference (``datetime.tzinfo``). + +Almost all of the Olson timezones are supported. + +.. note:: + + This library differs from the documented Python API for + tzinfo implementations; if you want to create local wallclock + times you need to use the ``localize()`` method documented in this + document. In addition, if you perform date arithmetic on local + times that cross DST boundaries, the result may be in an incorrect + timezone (ie. subtract 1 minute from 2002-10-27 1:00 EST and you get + 2002-10-27 0:59 EST instead of the correct 2002-10-27 1:59 EDT). A + ``normalize()`` method is provided to correct this. Unfortunately these + issues cannot be resolved without modifying the Python datetime + implementation (see PEP-431). + + +Installation +~~~~~~~~~~~~ + +This package can either be installed from a .egg file using setuptools, +or from the tarball using the standard Python distutils. + +If you are installing from a tarball, run the following command as an +administrative user:: + + python setup.py install + +If you are installing using setuptools, you don't even need to download +anything as the latest version will be downloaded for you +from the Python package index:: + + easy_install --upgrade pytz + +If you already have the .egg file, you can use that too:: + + easy_install pytz-2008g-py2.6.egg + + +Example & Usage +~~~~~~~~~~~~~~~ + +Localized times and date arithmetic +----------------------------------- + +>>> from datetime import datetime, timedelta +>>> from pytz import timezone +>>> import pytz +>>> utc = pytz.utc +>>> utc.zone +'UTC' +>>> eastern = timezone('US/Eastern') +>>> eastern.zone +'US/Eastern' +>>> amsterdam = timezone('Europe/Amsterdam') +>>> fmt = '%Y-%m-%d %H:%M:%S %Z%z' + +This library only supports two ways of building a localized time. The +first is to use the ``localize()`` method provided by the pytz library. +This is used to localize a naive datetime (datetime with no timezone +information): + +>>> loc_dt = eastern.localize(datetime(2002, 10, 27, 6, 0, 0)) +>>> print(loc_dt.strftime(fmt)) +2002-10-27 06:00:00 EST-0500 + +The second way of building a localized time is by converting an existing +localized time using the standard ``astimezone()`` method: + +>>> ams_dt = loc_dt.astimezone(amsterdam) +>>> ams_dt.strftime(fmt) +'2002-10-27 12:00:00 CET+0100' + +Unfortunately using the tzinfo argument of the standard datetime +constructors ''does not work'' with pytz for many timezones. + +>>> datetime(2002, 10, 27, 12, 0, 0, tzinfo=amsterdam).strftime(fmt) +'2002-10-27 12:00:00 LMT+0020' + +It is safe for timezones without daylight saving transitions though, such +as UTC: + +>>> datetime(2002, 10, 27, 12, 0, 0, tzinfo=pytz.utc).strftime(fmt) +'2002-10-27 12:00:00 UTC+0000' + +The preferred way of dealing with times is to always work in UTC, +converting to localtime only when generating output to be read +by humans. + +>>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) +>>> loc_dt = utc_dt.astimezone(eastern) +>>> loc_dt.strftime(fmt) +'2002-10-27 01:00:00 EST-0500' + +This library also allows you to do date arithmetic using local +times, although it is more complicated than working in UTC as you +need to use the ``normalize()`` method to handle daylight saving time +and other timezone transitions. In this example, ``loc_dt`` is set +to the instant when daylight saving time ends in the US/Eastern +timezone. + +>>> before = loc_dt - timedelta(minutes=10) +>>> before.strftime(fmt) +'2002-10-27 00:50:00 EST-0500' +>>> eastern.normalize(before).strftime(fmt) +'2002-10-27 01:50:00 EDT-0400' +>>> after = eastern.normalize(before + timedelta(minutes=20)) +>>> after.strftime(fmt) +'2002-10-27 01:10:00 EST-0500' + +Creating local times is also tricky, and the reason why working with +local times is not recommended. Unfortunately, you cannot just pass +a ``tzinfo`` argument when constructing a datetime (see the next +section for more details) + +>>> dt = datetime(2002, 10, 27, 1, 30, 0) +>>> dt1 = eastern.localize(dt, is_dst=True) +>>> dt1.strftime(fmt) +'2002-10-27 01:30:00 EDT-0400' +>>> dt2 = eastern.localize(dt, is_dst=False) +>>> dt2.strftime(fmt) +'2002-10-27 01:30:00 EST-0500' + +Converting between timezones is more easily done, using the +standard astimezone method. + +>>> utc_dt = utc.localize(datetime.utcfromtimestamp(1143408899)) +>>> utc_dt.strftime(fmt) +'2006-03-26 21:34:59 UTC+0000' +>>> au_tz = timezone('Australia/Sydney') +>>> au_dt = utc_dt.astimezone(au_tz) +>>> au_dt.strftime(fmt) +'2006-03-27 08:34:59 AEDT+1100' +>>> utc_dt2 = au_dt.astimezone(utc) +>>> utc_dt2.strftime(fmt) +'2006-03-26 21:34:59 UTC+0000' +>>> utc_dt == utc_dt2 +True + +You can take shortcuts when dealing with the UTC side of timezone +conversions. ``normalize()`` and ``localize()`` are not really +necessary when there are no daylight saving time transitions to +deal with. + +>>> utc_dt = datetime.utcfromtimestamp(1143408899).replace(tzinfo=utc) +>>> utc_dt.strftime(fmt) +'2006-03-26 21:34:59 UTC+0000' +>>> au_tz = timezone('Australia/Sydney') +>>> au_dt = au_tz.normalize(utc_dt.astimezone(au_tz)) +>>> au_dt.strftime(fmt) +'2006-03-27 08:34:59 AEDT+1100' +>>> utc_dt2 = au_dt.astimezone(utc) +>>> utc_dt2.strftime(fmt) +'2006-03-26 21:34:59 UTC+0000' + + +``tzinfo`` API +-------------- + +The ``tzinfo`` instances returned by the ``timezone()`` function have +been extended to cope with ambiguous times by adding an ``is_dst`` +parameter to the ``utcoffset()``, ``dst()`` && ``tzname()`` methods. + +>>> tz = timezone('America/St_Johns') + +>>> normal = datetime(2009, 9, 1) +>>> ambiguous = datetime(2009, 10, 31, 23, 30) + +The ``is_dst`` parameter is ignored for most timestamps. It is only used +during DST transition ambiguous periods to resolve that ambiguity. + +>>> tz.utcoffset(normal, is_dst=True) +datetime.timedelta(-1, 77400) +>>> tz.dst(normal, is_dst=True) +datetime.timedelta(0, 3600) +>>> tz.tzname(normal, is_dst=True) +'NDT' + +>>> tz.utcoffset(ambiguous, is_dst=True) +datetime.timedelta(-1, 77400) +>>> tz.dst(ambiguous, is_dst=True) +datetime.timedelta(0, 3600) +>>> tz.tzname(ambiguous, is_dst=True) +'NDT' + +>>> tz.utcoffset(normal, is_dst=False) +datetime.timedelta(-1, 77400) +>>> tz.dst(normal, is_dst=False) +datetime.timedelta(0, 3600) +>>> tz.tzname(normal, is_dst=False) +'NDT' + +>>> tz.utcoffset(ambiguous, is_dst=False) +datetime.timedelta(-1, 73800) +>>> tz.dst(ambiguous, is_dst=False) +datetime.timedelta(0) +>>> tz.tzname(ambiguous, is_dst=False) +'NST' + +If ``is_dst`` is not specified, ambiguous timestamps will raise +an ``pytz.exceptions.AmbiguousTimeError`` exception. + +>>> tz.utcoffset(normal) +datetime.timedelta(-1, 77400) +>>> tz.dst(normal) +datetime.timedelta(0, 3600) +>>> tz.tzname(normal) +'NDT' + +>>> import pytz.exceptions +>>> try: +... tz.utcoffset(ambiguous) +... except pytz.exceptions.AmbiguousTimeError: +... print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous) +pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00 +>>> try: +... tz.dst(ambiguous) +... except pytz.exceptions.AmbiguousTimeError: +... print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous) +pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00 +>>> try: +... tz.tzname(ambiguous) +... except pytz.exceptions.AmbiguousTimeError: +... print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous) +pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00 + + +Problems with Localtime +~~~~~~~~~~~~~~~~~~~~~~~ + +The major problem we have to deal with is that certain datetimes +may occur twice in a year. For example, in the US/Eastern timezone +on the last Sunday morning in October, the following sequence +happens: + + - 01:00 EDT occurs + - 1 hour later, instead of 2:00am the clock is turned back 1 hour + and 01:00 happens again (this time 01:00 EST) + +In fact, every instant between 01:00 and 02:00 occurs twice. This means +that if you try and create a time in the 'US/Eastern' timezone +the standard datetime syntax, there is no way to specify if you meant +before of after the end-of-daylight-saving-time transition. Using the +pytz custom syntax, the best you can do is make an educated guess: + +>>> loc_dt = eastern.localize(datetime(2002, 10, 27, 1, 30, 00)) +>>> loc_dt.strftime(fmt) +'2002-10-27 01:30:00 EST-0500' + +As you can see, the system has chosen one for you and there is a 50% +chance of it being out by one hour. For some applications, this does +not matter. However, if you are trying to schedule meetings with people +in different timezones or analyze log files it is not acceptable. + +The best and simplest solution is to stick with using UTC. The pytz +package encourages using UTC for internal timezone representation by +including a special UTC implementation based on the standard Python +reference implementation in the Python documentation. + +The UTC timezone unpickles to be the same instance, and pickles to a +smaller size than other pytz tzinfo instances. The UTC implementation +can be obtained as pytz.utc, pytz.UTC, or pytz.timezone('UTC'). + +>>> import pickle, pytz +>>> dt = datetime(2005, 3, 1, 14, 13, 21, tzinfo=utc) +>>> naive = dt.replace(tzinfo=None) +>>> p = pickle.dumps(dt, 1) +>>> naive_p = pickle.dumps(naive, 1) +>>> len(p) - len(naive_p) +17 +>>> new = pickle.loads(p) +>>> new == dt +True +>>> new is dt +False +>>> new.tzinfo is dt.tzinfo +True +>>> pytz.utc is pytz.UTC is pytz.timezone('UTC') +True + +Note that some other timezones are commonly thought of as the same (GMT, +Greenwich, Universal, etc.). The definition of UTC is distinct from these +other timezones, and they are not equivalent. For this reason, they will +not compare the same in Python. + +>>> utc == pytz.timezone('GMT') +False + +See the section `What is UTC`_, below. + +If you insist on working with local times, this library provides a +facility for constructing them unambiguously: + +>>> loc_dt = datetime(2002, 10, 27, 1, 30, 00) +>>> est_dt = eastern.localize(loc_dt, is_dst=True) +>>> edt_dt = eastern.localize(loc_dt, is_dst=False) +>>> print(est_dt.strftime(fmt) + ' / ' + edt_dt.strftime(fmt)) +2002-10-27 01:30:00 EDT-0400 / 2002-10-27 01:30:00 EST-0500 + +If you pass None as the is_dst flag to localize(), pytz will refuse to +guess and raise exceptions if you try to build ambiguous or non-existent +times. + +For example, 1:30am on 27th Oct 2002 happened twice in the US/Eastern +timezone when the clocks where put back at the end of Daylight Saving +Time: + +>>> dt = datetime(2002, 10, 27, 1, 30, 00) +>>> try: +... eastern.localize(dt, is_dst=None) +... except pytz.exceptions.AmbiguousTimeError: +... print('pytz.exceptions.AmbiguousTimeError: %s' % dt) +pytz.exceptions.AmbiguousTimeError: 2002-10-27 01:30:00 + +Similarly, 2:30am on 7th April 2002 never happened at all in the +US/Eastern timezone, as the clocks where put forward at 2:00am skipping +the entire hour: + +>>> dt = datetime(2002, 4, 7, 2, 30, 00) +>>> try: +... eastern.localize(dt, is_dst=None) +... except pytz.exceptions.NonExistentTimeError: +... print('pytz.exceptions.NonExistentTimeError: %s' % dt) +pytz.exceptions.NonExistentTimeError: 2002-04-07 02:30:00 + +Both of these exceptions share a common base class to make error handling +easier: + +>>> isinstance(pytz.AmbiguousTimeError(), pytz.InvalidTimeError) +True +>>> isinstance(pytz.NonExistentTimeError(), pytz.InvalidTimeError) +True + + +A special case is where countries change their timezone definitions +with no daylight savings time switch. For example, in 1915 Warsaw +switched from Warsaw time to Central European time with no daylight savings +transition. So at the stroke of midnight on August 5th 1915 the clocks +were wound back 24 minutes creating an ambiguous time period that cannot +be specified without referring to the timezone abbreviation or the +actual UTC offset. In this case midnight happened twice, neither time +during a daylight saving time period. pytz handles this transition by +treating the ambiguous period before the switch as daylight savings +time, and the ambiguous period after as standard time. + + +>>> warsaw = pytz.timezone('Europe/Warsaw') +>>> amb_dt1 = warsaw.localize(datetime(1915, 8, 4, 23, 59, 59), is_dst=True) +>>> amb_dt1.strftime(fmt) +'1915-08-04 23:59:59 WMT+0124' +>>> amb_dt2 = warsaw.localize(datetime(1915, 8, 4, 23, 59, 59), is_dst=False) +>>> amb_dt2.strftime(fmt) +'1915-08-04 23:59:59 CET+0100' +>>> switch_dt = warsaw.localize(datetime(1915, 8, 5, 00, 00, 00), is_dst=False) +>>> switch_dt.strftime(fmt) +'1915-08-05 00:00:00 CET+0100' +>>> str(switch_dt - amb_dt1) +'0:24:01' +>>> str(switch_dt - amb_dt2) +'0:00:01' + +The best way of creating a time during an ambiguous time period is +by converting from another timezone such as UTC: + +>>> utc_dt = datetime(1915, 8, 4, 22, 36, tzinfo=pytz.utc) +>>> utc_dt.astimezone(warsaw).strftime(fmt) +'1915-08-04 23:36:00 CET+0100' + +The standard Python way of handling all these ambiguities is not to +handle them, such as demonstrated in this example using the US/Eastern +timezone definition from the Python documentation (Note that this +implementation only works for dates between 1987 and 2006 - it is +included for tests only!): + +>>> from pytz.reference import Eastern # pytz.reference only for tests +>>> dt = datetime(2002, 10, 27, 0, 30, tzinfo=Eastern) +>>> str(dt) +'2002-10-27 00:30:00-04:00' +>>> str(dt + timedelta(hours=1)) +'2002-10-27 01:30:00-05:00' +>>> str(dt + timedelta(hours=2)) +'2002-10-27 02:30:00-05:00' +>>> str(dt + timedelta(hours=3)) +'2002-10-27 03:30:00-05:00' + +Notice the first two results? At first glance you might think they are +correct, but taking the UTC offset into account you find that they are +actually two hours appart instead of the 1 hour we asked for. + +>>> from pytz.reference import UTC # pytz.reference only for tests +>>> str(dt.astimezone(UTC)) +'2002-10-27 04:30:00+00:00' +>>> str((dt + timedelta(hours=1)).astimezone(UTC)) +'2002-10-27 06:30:00+00:00' + + +Country Information +~~~~~~~~~~~~~~~~~~~ + +A mechanism is provided to access the timezones commonly in use +for a particular country, looked up using the ISO 3166 country code. +It returns a list of strings that can be used to retrieve the relevant +tzinfo instance using ``pytz.timezone()``: + +>>> print(' '.join(pytz.country_timezones['nz'])) +Pacific/Auckland Pacific/Chatham + +The Olson database comes with a ISO 3166 country code to English country +name mapping that pytz exposes as a dictionary: + +>>> print(pytz.country_names['nz']) +New Zealand + + +What is UTC +~~~~~~~~~~~ + +'UTC' is `Coordinated Universal Time`_. It is a successor to, but distinct +from, Greenwich Mean Time (GMT) and the various definitions of Universal +Time. UTC is now the worldwide standard for regulating clocks and time +measurement. + +All other timezones are defined relative to UTC, and include offsets like +UTC+0800 - hours to add or subtract from UTC to derive the local time. No +daylight saving time occurs in UTC, making it a useful timezone to perform +date arithmetic without worrying about the confusion and ambiguities caused +by daylight saving time transitions, your country changing its timezone, or +mobile computers that roam through multiple timezones. + +.. _Coordinated Universal Time: https://en.wikipedia.org/wiki/Coordinated_Universal_Time + + +Helpers +~~~~~~~ + +There are two lists of timezones provided. + +``all_timezones`` is the exhaustive list of the timezone names that can +be used. + +>>> from pytz import all_timezones +>>> len(all_timezones) >= 500 +True +>>> 'Etc/Greenwich' in all_timezones +True + +``common_timezones`` is a list of useful, current timezones. It doesn't +contain deprecated zones or historical zones, except for a few I've +deemed in common usage, such as US/Eastern (open a bug report if you +think other timezones are deserving of being included here). It is also +a sequence of strings. + +>>> from pytz import common_timezones +>>> len(common_timezones) < len(all_timezones) +True +>>> 'Etc/Greenwich' in common_timezones +False +>>> 'Australia/Melbourne' in common_timezones +True +>>> 'US/Eastern' in common_timezones +True +>>> 'Canada/Eastern' in common_timezones +True +>>> 'Australia/Yancowinna' in all_timezones +True +>>> 'Australia/Yancowinna' in common_timezones +False + +Both ``common_timezones`` and ``all_timezones`` are alphabetically +sorted: + +>>> common_timezones_dupe = common_timezones[:] +>>> common_timezones_dupe.sort() +>>> common_timezones == common_timezones_dupe +True +>>> all_timezones_dupe = all_timezones[:] +>>> all_timezones_dupe.sort() +>>> all_timezones == all_timezones_dupe +True + +``all_timezones`` and ``common_timezones`` are also available as sets. + +>>> from pytz import all_timezones_set, common_timezones_set +>>> 'US/Eastern' in all_timezones_set +True +>>> 'US/Eastern' in common_timezones_set +True +>>> 'Australia/Victoria' in common_timezones_set +False + +You can also retrieve lists of timezones used by particular countries +using the ``country_timezones()`` function. It requires an ISO-3166 +two letter country code. + +>>> from pytz import country_timezones +>>> print(' '.join(country_timezones('ch'))) +Europe/Zurich +>>> print(' '.join(country_timezones('CH'))) +Europe/Zurich + + +Internationalization - i18n/l10n +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Pytz is an interface to the IANA database, which uses ASCII names. The `Unicode Consortium's Unicode Locales (CLDR) <http://cldr.unicode.org>`_ +project provides translations. Thomas Khyn's +`l18n <https://pypi.org/project/l18n/>`_ package can be used to access +these translations from Python. + + +License +~~~~~~~ + +MIT license. + +This code is also available as part of Zope 3 under the Zope Public +License, Version 2.1 (ZPL). + +I'm happy to relicense this code if necessary for inclusion in other +open source projects. + + +Latest Versions +~~~~~~~~~~~~~~~ + +This package will be updated after releases of the Olson timezone +database. The latest version can be downloaded from the `Python Package +Index <https://pypi.org/project/pytz/>`_. The code that is used +to generate this distribution is hosted on launchpad.net and available +using git:: + + git clone https://git.launchpad.net/pytz + +A mirror on github is also available at https://github.com/stub42/pytz + +Announcements of new releases are made on +`Launchpad <https://launchpad.net/pytz>`_, and the +`Atom feed <http://feeds.launchpad.net/pytz/announcements.atom>`_ +hosted there. + + +Bugs, Feature Requests & Patches +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Bugs can be reported using `Launchpad <https://bugs.launchpad.net/pytz>`__. + + +Issues & Limitations +~~~~~~~~~~~~~~~~~~~~ + +- Offsets from UTC are rounded to the nearest whole minute, so timezones + such as Europe/Amsterdam pre 1937 will be up to 30 seconds out. This + is a limitation of the Python datetime library. + +- If you think a timezone definition is incorrect, I probably can't fix + it. pytz is a direct translation of the Olson timezone database, and + changes to the timezone definitions need to be made to this source. + If you find errors they should be reported to the time zone mailing + list, linked from http://www.iana.org/time-zones. + + +Further Reading +~~~~~~~~~~~~~~~ + +More info than you want to know about timezones: +http://www.twinsun.com/tz/tz-link.htm + + +Contact +~~~~~~~ + +Stuart Bishop <stuart@stuartbishop.net> + + + + diff --git a/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/INSTALLER b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/LICENSE.txt b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/LICENSE.txt new file mode 100644 index 0000000..7c901fd --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/LICENSE.txt @@ -0,0 +1,19 @@ +Copyright (c) 2003-2018 Stuart Bishop <stuart@stuartbishop.net> + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/METADATA b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/METADATA new file mode 100644 index 0000000..847e122 --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/METADATA @@ -0,0 +1,623 @@ +Metadata-Version: 2.0 +Name: pytz +Version: 2018.5 +Summary: World timezone definitions, modern and historical +Home-page: http://pythonhosted.org/pytz +Author: Stuart Bishop +Author-email: stuart@stuartbishop.net +Maintainer: Stuart Bishop +Maintainer-email: stuart@stuartbishop.net +License: MIT +Download-URL: https://pypi.org/project/pytz/ +Keywords: timezone,tzinfo,datetime,olson,time +Platform: Independent +Classifier: Development Status :: 6 - Mature +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.4 +Classifier: Programming Language :: Python :: 2.5 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.0 +Classifier: Programming Language :: Python :: 3.1 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Software Development :: Libraries :: Python Modules + +pytz - World Timezone Definitions for Python +============================================ + +:Author: Stuart Bishop <stuart@stuartbishop.net> + +Introduction +~~~~~~~~~~~~ + +pytz brings the Olson tz database into Python. This library allows +accurate and cross platform timezone calculations using Python 2.4 +or higher. It also solves the issue of ambiguous times at the end +of daylight saving time, which you can read more about in the Python +Library Reference (``datetime.tzinfo``). + +Almost all of the Olson timezones are supported. + +.. note:: + + This library differs from the documented Python API for + tzinfo implementations; if you want to create local wallclock + times you need to use the ``localize()`` method documented in this + document. In addition, if you perform date arithmetic on local + times that cross DST boundaries, the result may be in an incorrect + timezone (ie. subtract 1 minute from 2002-10-27 1:00 EST and you get + 2002-10-27 0:59 EST instead of the correct 2002-10-27 1:59 EDT). A + ``normalize()`` method is provided to correct this. Unfortunately these + issues cannot be resolved without modifying the Python datetime + implementation (see PEP-431). + + +Installation +~~~~~~~~~~~~ + +This package can either be installed from a .egg file using setuptools, +or from the tarball using the standard Python distutils. + +If you are installing from a tarball, run the following command as an +administrative user:: + + python setup.py install + +If you are installing using setuptools, you don't even need to download +anything as the latest version will be downloaded for you +from the Python package index:: + + easy_install --upgrade pytz + +If you already have the .egg file, you can use that too:: + + easy_install pytz-2008g-py2.6.egg + + +Example & Usage +~~~~~~~~~~~~~~~ + +Localized times and date arithmetic +----------------------------------- + +>>> from datetime import datetime, timedelta +>>> from pytz import timezone +>>> import pytz +>>> utc = pytz.utc +>>> utc.zone +'UTC' +>>> eastern = timezone('US/Eastern') +>>> eastern.zone +'US/Eastern' +>>> amsterdam = timezone('Europe/Amsterdam') +>>> fmt = '%Y-%m-%d %H:%M:%S %Z%z' + +This library only supports two ways of building a localized time. The +first is to use the ``localize()`` method provided by the pytz library. +This is used to localize a naive datetime (datetime with no timezone +information): + +>>> loc_dt = eastern.localize(datetime(2002, 10, 27, 6, 0, 0)) +>>> print(loc_dt.strftime(fmt)) +2002-10-27 06:00:00 EST-0500 + +The second way of building a localized time is by converting an existing +localized time using the standard ``astimezone()`` method: + +>>> ams_dt = loc_dt.astimezone(amsterdam) +>>> ams_dt.strftime(fmt) +'2002-10-27 12:00:00 CET+0100' + +Unfortunately using the tzinfo argument of the standard datetime +constructors ''does not work'' with pytz for many timezones. + +>>> datetime(2002, 10, 27, 12, 0, 0, tzinfo=amsterdam).strftime(fmt) +'2002-10-27 12:00:00 LMT+0020' + +It is safe for timezones without daylight saving transitions though, such +as UTC: + +>>> datetime(2002, 10, 27, 12, 0, 0, tzinfo=pytz.utc).strftime(fmt) +'2002-10-27 12:00:00 UTC+0000' + +The preferred way of dealing with times is to always work in UTC, +converting to localtime only when generating output to be read +by humans. + +>>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) +>>> loc_dt = utc_dt.astimezone(eastern) +>>> loc_dt.strftime(fmt) +'2002-10-27 01:00:00 EST-0500' + +This library also allows you to do date arithmetic using local +times, although it is more complicated than working in UTC as you +need to use the ``normalize()`` method to handle daylight saving time +and other timezone transitions. In this example, ``loc_dt`` is set +to the instant when daylight saving time ends in the US/Eastern +timezone. + +>>> before = loc_dt - timedelta(minutes=10) +>>> before.strftime(fmt) +'2002-10-27 00:50:00 EST-0500' +>>> eastern.normalize(before).strftime(fmt) +'2002-10-27 01:50:00 EDT-0400' +>>> after = eastern.normalize(before + timedelta(minutes=20)) +>>> after.strftime(fmt) +'2002-10-27 01:10:00 EST-0500' + +Creating local times is also tricky, and the reason why working with +local times is not recommended. Unfortunately, you cannot just pass +a ``tzinfo`` argument when constructing a datetime (see the next +section for more details) + +>>> dt = datetime(2002, 10, 27, 1, 30, 0) +>>> dt1 = eastern.localize(dt, is_dst=True) +>>> dt1.strftime(fmt) +'2002-10-27 01:30:00 EDT-0400' +>>> dt2 = eastern.localize(dt, is_dst=False) +>>> dt2.strftime(fmt) +'2002-10-27 01:30:00 EST-0500' + +Converting between timezones is more easily done, using the +standard astimezone method. + +>>> utc_dt = utc.localize(datetime.utcfromtimestamp(1143408899)) +>>> utc_dt.strftime(fmt) +'2006-03-26 21:34:59 UTC+0000' +>>> au_tz = timezone('Australia/Sydney') +>>> au_dt = utc_dt.astimezone(au_tz) +>>> au_dt.strftime(fmt) +'2006-03-27 08:34:59 AEDT+1100' +>>> utc_dt2 = au_dt.astimezone(utc) +>>> utc_dt2.strftime(fmt) +'2006-03-26 21:34:59 UTC+0000' +>>> utc_dt == utc_dt2 +True + +You can take shortcuts when dealing with the UTC side of timezone +conversions. ``normalize()`` and ``localize()`` are not really +necessary when there are no daylight saving time transitions to +deal with. + +>>> utc_dt = datetime.utcfromtimestamp(1143408899).replace(tzinfo=utc) +>>> utc_dt.strftime(fmt) +'2006-03-26 21:34:59 UTC+0000' +>>> au_tz = timezone('Australia/Sydney') +>>> au_dt = au_tz.normalize(utc_dt.astimezone(au_tz)) +>>> au_dt.strftime(fmt) +'2006-03-27 08:34:59 AEDT+1100' +>>> utc_dt2 = au_dt.astimezone(utc) +>>> utc_dt2.strftime(fmt) +'2006-03-26 21:34:59 UTC+0000' + + +``tzinfo`` API +-------------- + +The ``tzinfo`` instances returned by the ``timezone()`` function have +been extended to cope with ambiguous times by adding an ``is_dst`` +parameter to the ``utcoffset()``, ``dst()`` && ``tzname()`` methods. + +>>> tz = timezone('America/St_Johns') + +>>> normal = datetime(2009, 9, 1) +>>> ambiguous = datetime(2009, 10, 31, 23, 30) + +The ``is_dst`` parameter is ignored for most timestamps. It is only used +during DST transition ambiguous periods to resolve that ambiguity. + +>>> tz.utcoffset(normal, is_dst=True) +datetime.timedelta(-1, 77400) +>>> tz.dst(normal, is_dst=True) +datetime.timedelta(0, 3600) +>>> tz.tzname(normal, is_dst=True) +'NDT' + +>>> tz.utcoffset(ambiguous, is_dst=True) +datetime.timedelta(-1, 77400) +>>> tz.dst(ambiguous, is_dst=True) +datetime.timedelta(0, 3600) +>>> tz.tzname(ambiguous, is_dst=True) +'NDT' + +>>> tz.utcoffset(normal, is_dst=False) +datetime.timedelta(-1, 77400) +>>> tz.dst(normal, is_dst=False) +datetime.timedelta(0, 3600) +>>> tz.tzname(normal, is_dst=False) +'NDT' + +>>> tz.utcoffset(ambiguous, is_dst=False) +datetime.timedelta(-1, 73800) +>>> tz.dst(ambiguous, is_dst=False) +datetime.timedelta(0) +>>> tz.tzname(ambiguous, is_dst=False) +'NST' + +If ``is_dst`` is not specified, ambiguous timestamps will raise +an ``pytz.exceptions.AmbiguousTimeError`` exception. + +>>> tz.utcoffset(normal) +datetime.timedelta(-1, 77400) +>>> tz.dst(normal) +datetime.timedelta(0, 3600) +>>> tz.tzname(normal) +'NDT' + +>>> import pytz.exceptions +>>> try: +... tz.utcoffset(ambiguous) +... except pytz.exceptions.AmbiguousTimeError: +... print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous) +pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00 +>>> try: +... tz.dst(ambiguous) +... except pytz.exceptions.AmbiguousTimeError: +... print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous) +pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00 +>>> try: +... tz.tzname(ambiguous) +... except pytz.exceptions.AmbiguousTimeError: +... print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous) +pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00 + + +Problems with Localtime +~~~~~~~~~~~~~~~~~~~~~~~ + +The major problem we have to deal with is that certain datetimes +may occur twice in a year. For example, in the US/Eastern timezone +on the last Sunday morning in October, the following sequence +happens: + + - 01:00 EDT occurs + - 1 hour later, instead of 2:00am the clock is turned back 1 hour + and 01:00 happens again (this time 01:00 EST) + +In fact, every instant between 01:00 and 02:00 occurs twice. This means +that if you try and create a time in the 'US/Eastern' timezone +the standard datetime syntax, there is no way to specify if you meant +before of after the end-of-daylight-saving-time transition. Using the +pytz custom syntax, the best you can do is make an educated guess: + +>>> loc_dt = eastern.localize(datetime(2002, 10, 27, 1, 30, 00)) +>>> loc_dt.strftime(fmt) +'2002-10-27 01:30:00 EST-0500' + +As you can see, the system has chosen one for you and there is a 50% +chance of it being out by one hour. For some applications, this does +not matter. However, if you are trying to schedule meetings with people +in different timezones or analyze log files it is not acceptable. + +The best and simplest solution is to stick with using UTC. The pytz +package encourages using UTC for internal timezone representation by +including a special UTC implementation based on the standard Python +reference implementation in the Python documentation. + +The UTC timezone unpickles to be the same instance, and pickles to a +smaller size than other pytz tzinfo instances. The UTC implementation +can be obtained as pytz.utc, pytz.UTC, or pytz.timezone('UTC'). + +>>> import pickle, pytz +>>> dt = datetime(2005, 3, 1, 14, 13, 21, tzinfo=utc) +>>> naive = dt.replace(tzinfo=None) +>>> p = pickle.dumps(dt, 1) +>>> naive_p = pickle.dumps(naive, 1) +>>> len(p) - len(naive_p) +17 +>>> new = pickle.loads(p) +>>> new == dt +True +>>> new is dt +False +>>> new.tzinfo is dt.tzinfo +True +>>> pytz.utc is pytz.UTC is pytz.timezone('UTC') +True + +Note that some other timezones are commonly thought of as the same (GMT, +Greenwich, Universal, etc.). The definition of UTC is distinct from these +other timezones, and they are not equivalent. For this reason, they will +not compare the same in Python. + +>>> utc == pytz.timezone('GMT') +False + +See the section `What is UTC`_, below. + +If you insist on working with local times, this library provides a +facility for constructing them unambiguously: + +>>> loc_dt = datetime(2002, 10, 27, 1, 30, 00) +>>> est_dt = eastern.localize(loc_dt, is_dst=True) +>>> edt_dt = eastern.localize(loc_dt, is_dst=False) +>>> print(est_dt.strftime(fmt) + ' / ' + edt_dt.strftime(fmt)) +2002-10-27 01:30:00 EDT-0400 / 2002-10-27 01:30:00 EST-0500 + +If you pass None as the is_dst flag to localize(), pytz will refuse to +guess and raise exceptions if you try to build ambiguous or non-existent +times. + +For example, 1:30am on 27th Oct 2002 happened twice in the US/Eastern +timezone when the clocks where put back at the end of Daylight Saving +Time: + +>>> dt = datetime(2002, 10, 27, 1, 30, 00) +>>> try: +... eastern.localize(dt, is_dst=None) +... except pytz.exceptions.AmbiguousTimeError: +... print('pytz.exceptions.AmbiguousTimeError: %s' % dt) +pytz.exceptions.AmbiguousTimeError: 2002-10-27 01:30:00 + +Similarly, 2:30am on 7th April 2002 never happened at all in the +US/Eastern timezone, as the clocks where put forward at 2:00am skipping +the entire hour: + +>>> dt = datetime(2002, 4, 7, 2, 30, 00) +>>> try: +... eastern.localize(dt, is_dst=None) +... except pytz.exceptions.NonExistentTimeError: +... print('pytz.exceptions.NonExistentTimeError: %s' % dt) +pytz.exceptions.NonExistentTimeError: 2002-04-07 02:30:00 + +Both of these exceptions share a common base class to make error handling +easier: + +>>> isinstance(pytz.AmbiguousTimeError(), pytz.InvalidTimeError) +True +>>> isinstance(pytz.NonExistentTimeError(), pytz.InvalidTimeError) +True + + +A special case is where countries change their timezone definitions +with no daylight savings time switch. For example, in 1915 Warsaw +switched from Warsaw time to Central European time with no daylight savings +transition. So at the stroke of midnight on August 5th 1915 the clocks +were wound back 24 minutes creating an ambiguous time period that cannot +be specified without referring to the timezone abbreviation or the +actual UTC offset. In this case midnight happened twice, neither time +during a daylight saving time period. pytz handles this transition by +treating the ambiguous period before the switch as daylight savings +time, and the ambiguous period after as standard time. + + +>>> warsaw = pytz.timezone('Europe/Warsaw') +>>> amb_dt1 = warsaw.localize(datetime(1915, 8, 4, 23, 59, 59), is_dst=True) +>>> amb_dt1.strftime(fmt) +'1915-08-04 23:59:59 WMT+0124' +>>> amb_dt2 = warsaw.localize(datetime(1915, 8, 4, 23, 59, 59), is_dst=False) +>>> amb_dt2.strftime(fmt) +'1915-08-04 23:59:59 CET+0100' +>>> switch_dt = warsaw.localize(datetime(1915, 8, 5, 00, 00, 00), is_dst=False) +>>> switch_dt.strftime(fmt) +'1915-08-05 00:00:00 CET+0100' +>>> str(switch_dt - amb_dt1) +'0:24:01' +>>> str(switch_dt - amb_dt2) +'0:00:01' + +The best way of creating a time during an ambiguous time period is +by converting from another timezone such as UTC: + +>>> utc_dt = datetime(1915, 8, 4, 22, 36, tzinfo=pytz.utc) +>>> utc_dt.astimezone(warsaw).strftime(fmt) +'1915-08-04 23:36:00 CET+0100' + +The standard Python way of handling all these ambiguities is not to +handle them, such as demonstrated in this example using the US/Eastern +timezone definition from the Python documentation (Note that this +implementation only works for dates between 1987 and 2006 - it is +included for tests only!): + +>>> from pytz.reference import Eastern # pytz.reference only for tests +>>> dt = datetime(2002, 10, 27, 0, 30, tzinfo=Eastern) +>>> str(dt) +'2002-10-27 00:30:00-04:00' +>>> str(dt + timedelta(hours=1)) +'2002-10-27 01:30:00-05:00' +>>> str(dt + timedelta(hours=2)) +'2002-10-27 02:30:00-05:00' +>>> str(dt + timedelta(hours=3)) +'2002-10-27 03:30:00-05:00' + +Notice the first two results? At first glance you might think they are +correct, but taking the UTC offset into account you find that they are +actually two hours appart instead of the 1 hour we asked for. + +>>> from pytz.reference import UTC # pytz.reference only for tests +>>> str(dt.astimezone(UTC)) +'2002-10-27 04:30:00+00:00' +>>> str((dt + timedelta(hours=1)).astimezone(UTC)) +'2002-10-27 06:30:00+00:00' + + +Country Information +~~~~~~~~~~~~~~~~~~~ + +A mechanism is provided to access the timezones commonly in use +for a particular country, looked up using the ISO 3166 country code. +It returns a list of strings that can be used to retrieve the relevant +tzinfo instance using ``pytz.timezone()``: + +>>> print(' '.join(pytz.country_timezones['nz'])) +Pacific/Auckland Pacific/Chatham + +The Olson database comes with a ISO 3166 country code to English country +name mapping that pytz exposes as a dictionary: + +>>> print(pytz.country_names['nz']) +New Zealand + + +What is UTC +~~~~~~~~~~~ + +'UTC' is `Coordinated Universal Time`_. It is a successor to, but distinct +from, Greenwich Mean Time (GMT) and the various definitions of Universal +Time. UTC is now the worldwide standard for regulating clocks and time +measurement. + +All other timezones are defined relative to UTC, and include offsets like +UTC+0800 - hours to add or subtract from UTC to derive the local time. No +daylight saving time occurs in UTC, making it a useful timezone to perform +date arithmetic without worrying about the confusion and ambiguities caused +by daylight saving time transitions, your country changing its timezone, or +mobile computers that roam through multiple timezones. + +.. _Coordinated Universal Time: https://en.wikipedia.org/wiki/Coordinated_Universal_Time + + +Helpers +~~~~~~~ + +There are two lists of timezones provided. + +``all_timezones`` is the exhaustive list of the timezone names that can +be used. + +>>> from pytz import all_timezones +>>> len(all_timezones) >= 500 +True +>>> 'Etc/Greenwich' in all_timezones +True + +``common_timezones`` is a list of useful, current timezones. It doesn't +contain deprecated zones or historical zones, except for a few I've +deemed in common usage, such as US/Eastern (open a bug report if you +think other timezones are deserving of being included here). It is also +a sequence of strings. + +>>> from pytz import common_timezones +>>> len(common_timezones) < len(all_timezones) +True +>>> 'Etc/Greenwich' in common_timezones +False +>>> 'Australia/Melbourne' in common_timezones +True +>>> 'US/Eastern' in common_timezones +True +>>> 'Canada/Eastern' in common_timezones +True +>>> 'Australia/Yancowinna' in all_timezones +True +>>> 'Australia/Yancowinna' in common_timezones +False + +Both ``common_timezones`` and ``all_timezones`` are alphabetically +sorted: + +>>> common_timezones_dupe = common_timezones[:] +>>> common_timezones_dupe.sort() +>>> common_timezones == common_timezones_dupe +True +>>> all_timezones_dupe = all_timezones[:] +>>> all_timezones_dupe.sort() +>>> all_timezones == all_timezones_dupe +True + +``all_timezones`` and ``common_timezones`` are also available as sets. + +>>> from pytz import all_timezones_set, common_timezones_set +>>> 'US/Eastern' in all_timezones_set +True +>>> 'US/Eastern' in common_timezones_set +True +>>> 'Australia/Victoria' in common_timezones_set +False + +You can also retrieve lists of timezones used by particular countries +using the ``country_timezones()`` function. It requires an ISO-3166 +two letter country code. + +>>> from pytz import country_timezones +>>> print(' '.join(country_timezones('ch'))) +Europe/Zurich +>>> print(' '.join(country_timezones('CH'))) +Europe/Zurich + + +Internationalization - i18n/l10n +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Pytz is an interface to the IANA database, which uses ASCII names. The `Unicode Consortium's Unicode Locales (CLDR) <http://cldr.unicode.org>`_ +project provides translations. Thomas Khyn's +`l18n <https://pypi.org/project/l18n/>`_ package can be used to access +these translations from Python. + + +License +~~~~~~~ + +MIT license. + +This code is also available as part of Zope 3 under the Zope Public +License, Version 2.1 (ZPL). + +I'm happy to relicense this code if necessary for inclusion in other +open source projects. + + +Latest Versions +~~~~~~~~~~~~~~~ + +This package will be updated after releases of the Olson timezone +database. The latest version can be downloaded from the `Python Package +Index <https://pypi.org/project/pytz/>`_. The code that is used +to generate this distribution is hosted on launchpad.net and available +using git:: + + git clone https://git.launchpad.net/pytz + +A mirror on github is also available at https://github.com/stub42/pytz + +Announcements of new releases are made on +`Launchpad <https://launchpad.net/pytz>`_, and the +`Atom feed <http://feeds.launchpad.net/pytz/announcements.atom>`_ +hosted there. + + +Bugs, Feature Requests & Patches +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Bugs can be reported using `Launchpad <https://bugs.launchpad.net/pytz>`__. + + +Issues & Limitations +~~~~~~~~~~~~~~~~~~~~ + +- Offsets from UTC are rounded to the nearest whole minute, so timezones + such as Europe/Amsterdam pre 1937 will be up to 30 seconds out. This + is a limitation of the Python datetime library. + +- If you think a timezone definition is incorrect, I probably can't fix + it. pytz is a direct translation of the Olson timezone database, and + changes to the timezone definitions need to be made to this source. + If you find errors they should be reported to the time zone mailing + list, linked from http://www.iana.org/time-zones. + + +Further Reading +~~~~~~~~~~~~~~~ + +More info than you want to know about timezones: +http://www.twinsun.com/tz/tz-link.htm + + +Contact +~~~~~~~ + +Stuart Bishop <stuart@stuartbishop.net> + + + + diff --git a/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/RECORD b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/RECORD new file mode 100644 index 0000000..5fe4746 --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/RECORD @@ -0,0 +1,619 @@ +pytz/__init__.py,sha256=SWf8oUKu_S2NiDDN7Xxf7TUlMIahDDvv_HiC61e0__E,34156 +pytz/exceptions.py,sha256=_GCDPHpBk2r-CQIg3Kcyw8RCsLm2teJdnzT85bl5VsM,1329 +pytz/lazy.py,sha256=toeR5uDWKBj6ezsUZ4elNP6CEMtK7CO2jS9A30nsFbo,5404 +pytz/reference.py,sha256=zUtCki7JFEmrzrjNsfMD7YL0lWDxynKc1Ubo4iXSs74,3778 +pytz/tzfile.py,sha256=g2CMhXZ1PX2slgg5_Kk9TvmIkVKeOjbuONHEfZP6jMk,4745 +pytz/tzinfo.py,sha256=-5UjW-yqHbtO5NtSaWope7EbSdf2oTES26Kdlxjqdk0,19272 +pytz/zoneinfo/CET,sha256=PAApBF9vgLxahPG7jtNiMEVHWcVFeOuajBldFPRCITw,2102 +pytz/zoneinfo/CST6CDT,sha256=ROi1aeYAJ2R_mAGjPQtDvgEGptP2zQWWd-DtZcm4uDE,2294 +pytz/zoneinfo/Cuba,sha256=eHH4daiBn0FcKSUZ2xWQVWoNwabOaRv0969V5nFvuJQ,2437 +pytz/zoneinfo/EET,sha256=C_bSZpq0XBOhyb5Hw1GXL-tnF3C5CmHZ0xP8YLchsrQ,1876 +pytz/zoneinfo/EST,sha256=yedfESpJj_ADRFUcPFxKYr0V1cIY7pUfQ2OrIYxdiOs,127 +pytz/zoneinfo/EST5EDT,sha256=ec4n4DonUgkeiknMfnzMmsIC1sUt1dIkVx_oImL77sg,2294 +pytz/zoneinfo/Egypt,sha256=J5u-H6YtpnOHxjWTtgu2VSUu9cjxic9DRpCHdArytPw,1972 +pytz/zoneinfo/Eire,sha256=RL3F1j5bFmOGdJHMDTC4GCD8hpStD9XvUAuorGt_ul8,3531 +pytz/zoneinfo/Factory,sha256=RsnkmUKmJyK5vxc3_LgTDdFgk4-qfANYYu8CY3w3qh0,148 +pytz/zoneinfo/GB,sha256=sUxIYBnjyyWc-CNaDWpLw_9s-nJqFl8eot9APIrjG4Y,3687 +pytz/zoneinfo/GB-Eire,sha256=sUxIYBnjyyWc-CNaDWpLw_9s-nJqFl8eot9APIrjG4Y,3687 +pytz/zoneinfo/GMT,sha256=17OYeQlBNdE-_SgpN2kLQ_SLtTWXzj54aX9I3M6us-w,127 +pytz/zoneinfo/GMT+0,sha256=17OYeQlBNdE-_SgpN2kLQ_SLtTWXzj54aX9I3M6us-w,127 +pytz/zoneinfo/GMT-0,sha256=17OYeQlBNdE-_SgpN2kLQ_SLtTWXzj54aX9I3M6us-w,127 +pytz/zoneinfo/GMT0,sha256=17OYeQlBNdE-_SgpN2kLQ_SLtTWXzj54aX9I3M6us-w,127 +pytz/zoneinfo/Greenwich,sha256=17OYeQlBNdE-_SgpN2kLQ_SLtTWXzj54aX9I3M6us-w,127 +pytz/zoneinfo/HST,sha256=RASL999hvc9FlywTQmsDnw002AlH1gomAxg7O2vkAn8,128 +pytz/zoneinfo/Hongkong,sha256=l4b75K2bgg9hpWZLO4XdYBqm6RUsk_9OFL1U7_348dU,1189 +pytz/zoneinfo/Iceland,sha256=nNzqaqHu2CdtP2Yg4MD_rpz8w3IsRD6mujkUeXXq-qM,1188 +pytz/zoneinfo/Iran,sha256=53RfdtZdlFhrpyM-e_9UkRiHSA-l4j8cU_5eV0P0seM,1718 +pytz/zoneinfo/Israel,sha256=YAGe4NHetuaZShpclRhh4dufmSpcAn-8TjYX-ULAIc8,2265 +pytz/zoneinfo/Jamaica,sha256=rduYyvNFm7ddbhTtdqpm5kK-rS0Gfn_oGBSk8CzxNQM,507 +pytz/zoneinfo/Japan,sha256=TVmM6j_lW0SvvZxvYD90iWDkqYBuHqHof8Lyu5ghQ3c,318 +pytz/zoneinfo/Kwajalein,sha256=CQLqlpOXlUrsuxL6X5HToAlr1EO7rqNqlu5Lk9lXuiU,259 +pytz/zoneinfo/Libya,sha256=j_U_cHKGP7VvHnEzk5K23n5QZ176QzO54DK2d8nJpSc,655 +pytz/zoneinfo/MET,sha256=H_8zGkQU6YCX0zvsGpu_KhVdmRtXrNG7TBH4VZ-05RQ,2102 +pytz/zoneinfo/MST,sha256=-PthAFYIe7PKjs9c3LUwXBZStkn95RL2BrnuGzVW-54,127 +pytz/zoneinfo/MST7MDT,sha256=hUUtAxUmYhF46bJMka9pt-zDDfRwNmaTeJVhNcTnNdA,2294 +pytz/zoneinfo/NZ,sha256=17UXU4eseOKfe5Ah5BFRJ1a-KD7T0YGZQu9dRezzOOQ,2460 +pytz/zoneinfo/NZ-CHAT,sha256=WAGfL6op3H23CBKTIwpyh2kFTdfA0PqelujEKZ5xMU0,2087 +pytz/zoneinfo/Navajo,sha256=9N88x0x50HCiWnkndE06QioF2GKpojShIQXFyWTvsi0,2453 +pytz/zoneinfo/PRC,sha256=lTYiu9frnrqMO56M1dXsmM6moIWp3rHEPknoiaFU00Q,414 +pytz/zoneinfo/PST8PDT,sha256=TY5pvUPo1x8PWOEVWTgU1owaaqRBJVsXs-mpKp1u_EY,2294 +pytz/zoneinfo/Poland,sha256=aOdJPBygUOQTQGKnSqSk_DIVnAQrTJ2NQMi_ydJzxfo,2705 +pytz/zoneinfo/Portugal,sha256=S75l1P8zlP-htK5v4iluMz9VutCuScpnF7YHblNJDqI,3469 +pytz/zoneinfo/ROC,sha256=Jc_QK8hHvcsR5YZEW6iGp2MV8fm-hvfnSUSm6OhkRUM,790 +pytz/zoneinfo/ROK,sha256=It6UZCqXimr0Y9sXXp-ToykuioGnSouSmJvCF0N57dY,531 +pytz/zoneinfo/Singapore,sha256=5pKf3kP_xIu6xAgbMbv3_UJkOzwm-t8yK96t6yPPx0g,424 +pytz/zoneinfo/Turkey,sha256=ldOJ2-YltB_ZTylyeN94QXWsuqtz3jhrzcJAcT43kVI,2166 +pytz/zoneinfo/UCT,sha256=s7dihjysJWmqM_faGSWEr9mWXO63Jj_tSth8GzXkx9g,127 +pytz/zoneinfo/UTC,sha256=PHGzWL6B4TscJOGZoRn9AB2825Dtx9RMLHrhdTIaAhU,127 +pytz/zoneinfo/Universal,sha256=PHGzWL6B4TscJOGZoRn9AB2825Dtx9RMLHrhdTIaAhU,127 +pytz/zoneinfo/W-SU,sha256=AtVVFtD51JeZgmC08Smu5Zhnt3qSC6LKDFixXshYjno,1544 +pytz/zoneinfo/WET,sha256=5efEYxKV5_FwheNTD5n8KYTMfkvbmgfbdwLejBjCqrE,1873 +pytz/zoneinfo/Zulu,sha256=PHGzWL6B4TscJOGZoRn9AB2825Dtx9RMLHrhdTIaAhU,127 +pytz/zoneinfo/iso3166.tab,sha256=_ovReSwOfsa0JQfFijukM-vdrb9MMDHqym2pxB3vN9I,4445 +pytz/zoneinfo/leapseconds,sha256=_UTC3ZyD4sru4wvZDMAfpTjmvCbbdW4W14aSxDcLAgU,2196 +pytz/zoneinfo/posixrules,sha256=X6bczDAzUuEZXENIsYnzCFAU2KVqGXbI6KMr1P7baf0,3545 +pytz/zoneinfo/tzdata.zi,sha256=1rIZCxXS5MTRUIbA4KN5HTjPrfUvtdXFlSgS4RjUj7c,106910 +pytz/zoneinfo/zone.tab,sha256=FM37LF4_cgHYLDaNOsIEo8-55Q2J3RBtQU4wLbV8FB4,19165 +pytz/zoneinfo/zone1970.tab,sha256=MSS5l7kYP3ApWFaQrjyzGv4eJxpLvb8F7wAtI9A3YLI,17781 +pytz/zoneinfo/Africa/Abidjan,sha256=1d7RJt-PaTzh_4PoWqTUQYXCve99ofkVshT1Pe_97kc,170 +pytz/zoneinfo/Africa/Accra,sha256=6gqJ7DwlM5D3RhB8PqaTkicNjfDcLSrtbyP0z_hSv5E,842 +pytz/zoneinfo/Africa/Addis_Ababa,sha256=8UO8uDuAvBrQu7itc2yFLmK762sxNEEr-ndoRmPtIio,285 +pytz/zoneinfo/Africa/Algiers,sha256=13fo7sueviaWkjSdqmtFskY-SjwtEHzNE5tiBsT6c8w,760 +pytz/zoneinfo/Africa/Asmara,sha256=8UO8uDuAvBrQu7itc2yFLmK762sxNEEr-ndoRmPtIio,285 +pytz/zoneinfo/Africa/Asmera,sha256=8UO8uDuAvBrQu7itc2yFLmK762sxNEEr-ndoRmPtIio,285 +pytz/zoneinfo/Africa/Bamako,sha256=1d7RJt-PaTzh_4PoWqTUQYXCve99ofkVshT1Pe_97kc,170 +pytz/zoneinfo/Africa/Bangui,sha256=5AwzhvOlzYigPIEfow7Kw08xNo-WCueeSpDeKVxbGTg,171 +pytz/zoneinfo/Africa/Banjul,sha256=1d7RJt-PaTzh_4PoWqTUQYXCve99ofkVshT1Pe_97kc,170 +pytz/zoneinfo/Africa/Bissau,sha256=jdrROtwzze6Or1XPox78r9eTBa6N_MO-Bv94KZ8p8dg,208 +pytz/zoneinfo/Africa/Blantyre,sha256=PX5tF8q9qhgUpW3d7AJofhCHvDM0_pIK0miokr8IBRE,171 +pytz/zoneinfo/Africa/Brazzaville,sha256=5AwzhvOlzYigPIEfow7Kw08xNo-WCueeSpDeKVxbGTg,171 +pytz/zoneinfo/Africa/Bujumbura,sha256=PX5tF8q9qhgUpW3d7AJofhCHvDM0_pIK0miokr8IBRE,171 +pytz/zoneinfo/Africa/Cairo,sha256=J5u-H6YtpnOHxjWTtgu2VSUu9cjxic9DRpCHdArytPw,1972 +pytz/zoneinfo/Africa/Casablanca,sha256=es_hVvjPnP-Xq4-b5rgHkFiY0nZMtd9nFDRCDRfobWM,1643 +pytz/zoneinfo/Africa/Ceuta,sha256=OH0sNUEX_ioi9vO0KeQZOjMdPpPXAHpVxQs7nu3uY94,2059 +pytz/zoneinfo/Africa/Conakry,sha256=1d7RJt-PaTzh_4PoWqTUQYXCve99ofkVshT1Pe_97kc,170 +pytz/zoneinfo/Africa/Dakar,sha256=1d7RJt-PaTzh_4PoWqTUQYXCve99ofkVshT1Pe_97kc,170 +pytz/zoneinfo/Africa/Dar_es_Salaam,sha256=8UO8uDuAvBrQu7itc2yFLmK762sxNEEr-ndoRmPtIio,285 +pytz/zoneinfo/Africa/Djibouti,sha256=8UO8uDuAvBrQu7itc2yFLmK762sxNEEr-ndoRmPtIio,285 +pytz/zoneinfo/Africa/Douala,sha256=5AwzhvOlzYigPIEfow7Kw08xNo-WCueeSpDeKVxbGTg,171 +pytz/zoneinfo/Africa/El_Aaiun,sha256=y9Km_PfK3OXHdW4-AawIfk2KoAJcgxWbLwId-cAHdxI,1473 +pytz/zoneinfo/Africa/Freetown,sha256=1d7RJt-PaTzh_4PoWqTUQYXCve99ofkVshT1Pe_97kc,170 +pytz/zoneinfo/Africa/Gaborone,sha256=PX5tF8q9qhgUpW3d7AJofhCHvDM0_pIK0miokr8IBRE,171 +pytz/zoneinfo/Africa/Harare,sha256=PX5tF8q9qhgUpW3d7AJofhCHvDM0_pIK0miokr8IBRE,171 +pytz/zoneinfo/Africa/Johannesburg,sha256=_OxCRwkZBdiKC4aejlx-5rz7p9tsMQFluqlQeLC-Ma8,271 +pytz/zoneinfo/Africa/Juba,sha256=c9mGxwFzx2PpsmLb82cSC8Kk9j8AbB1BLqmtq3fgnaM,683 +pytz/zoneinfo/Africa/Kampala,sha256=8UO8uDuAvBrQu7itc2yFLmK762sxNEEr-ndoRmPtIio,285 +pytz/zoneinfo/Africa/Khartoum,sha256=Ulapb3g4LoLbgK2FkSQMOIb8WPmoP-lFBuOhkvvPL04,713 +pytz/zoneinfo/Africa/Kigali,sha256=PX5tF8q9qhgUpW3d7AJofhCHvDM0_pIK0miokr8IBRE,171 +pytz/zoneinfo/Africa/Kinshasa,sha256=5AwzhvOlzYigPIEfow7Kw08xNo-WCueeSpDeKVxbGTg,171 +pytz/zoneinfo/Africa/Lagos,sha256=5AwzhvOlzYigPIEfow7Kw08xNo-WCueeSpDeKVxbGTg,171 +pytz/zoneinfo/Africa/Libreville,sha256=5AwzhvOlzYigPIEfow7Kw08xNo-WCueeSpDeKVxbGTg,171 +pytz/zoneinfo/Africa/Lome,sha256=1d7RJt-PaTzh_4PoWqTUQYXCve99ofkVshT1Pe_97kc,170 +pytz/zoneinfo/Africa/Luanda,sha256=5AwzhvOlzYigPIEfow7Kw08xNo-WCueeSpDeKVxbGTg,171 +pytz/zoneinfo/Africa/Lubumbashi,sha256=PX5tF8q9qhgUpW3d7AJofhCHvDM0_pIK0miokr8IBRE,171 +pytz/zoneinfo/Africa/Lusaka,sha256=PX5tF8q9qhgUpW3d7AJofhCHvDM0_pIK0miokr8IBRE,171 +pytz/zoneinfo/Africa/Malabo,sha256=5AwzhvOlzYigPIEfow7Kw08xNo-WCueeSpDeKVxbGTg,171 +pytz/zoneinfo/Africa/Maputo,sha256=PX5tF8q9qhgUpW3d7AJofhCHvDM0_pIK0miokr8IBRE,171 +pytz/zoneinfo/Africa/Maseru,sha256=_OxCRwkZBdiKC4aejlx-5rz7p9tsMQFluqlQeLC-Ma8,271 +pytz/zoneinfo/Africa/Mbabane,sha256=_OxCRwkZBdiKC4aejlx-5rz7p9tsMQFluqlQeLC-Ma8,271 +pytz/zoneinfo/Africa/Mogadishu,sha256=8UO8uDuAvBrQu7itc2yFLmK762sxNEEr-ndoRmPtIio,285 +pytz/zoneinfo/Africa/Monrovia,sha256=P5ZyyYmDr1lbPGJ0z4E1coyIFaT5yY_7oENwdgnl0SI,233 +pytz/zoneinfo/Africa/Nairobi,sha256=8UO8uDuAvBrQu7itc2yFLmK762sxNEEr-ndoRmPtIio,285 +pytz/zoneinfo/Africa/Ndjamena,sha256=sTkcjt0js_c-C_rPi4eIAcWCBspCNJwwsi_Lfo0T3jo,225 +pytz/zoneinfo/Africa/Niamey,sha256=5AwzhvOlzYigPIEfow7Kw08xNo-WCueeSpDeKVxbGTg,171 +pytz/zoneinfo/Africa/Nouakchott,sha256=1d7RJt-PaTzh_4PoWqTUQYXCve99ofkVshT1Pe_97kc,170 +pytz/zoneinfo/Africa/Ouagadougou,sha256=1d7RJt-PaTzh_4PoWqTUQYXCve99ofkVshT1Pe_97kc,170 +pytz/zoneinfo/Africa/Porto-Novo,sha256=5AwzhvOlzYigPIEfow7Kw08xNo-WCueeSpDeKVxbGTg,171 +pytz/zoneinfo/Africa/Sao_Tome,sha256=jZzEQajLff678jUQ26hRC6i3osrHZhsMzKs2hVXYZb8,234 +pytz/zoneinfo/Africa/Timbuktu,sha256=1d7RJt-PaTzh_4PoWqTUQYXCve99ofkVshT1Pe_97kc,170 +pytz/zoneinfo/Africa/Tripoli,sha256=j_U_cHKGP7VvHnEzk5K23n5QZ176QzO54DK2d8nJpSc,655 +pytz/zoneinfo/Africa/Tunis,sha256=7sw0Q20d2WxJ1rZx7WG8WUVI0oCpZ1N6llOEG1N6mpI,710 +pytz/zoneinfo/Africa/Windhoek,sha256=9tNzU8PPArsblQzcHMAk6YSaN0UyqPfk7guwC91LarE,988 +pytz/zoneinfo/America/Adak,sha256=xFyU0xZBPI9mav9l7R-Den4tOSJi3jHOWfrC6Woe3IE,2365 +pytz/zoneinfo/America/Anchorage,sha256=9d8Kb3-dQ8u9PnTTOiP-aGCA61WWX12SRrboWbPbnRg,2380 +pytz/zoneinfo/America/Anguilla,sha256=smWcJn91VcBkBQVmAjTL4Nf-6tOl4p9BJy4oodfRiWI,170 +pytz/zoneinfo/America/Antigua,sha256=smWcJn91VcBkBQVmAjTL4Nf-6tOl4p9BJy4oodfRiWI,170 +pytz/zoneinfo/America/Araguaina,sha256=-3_i0G6O5cXZqKVoyMs377sAhoJPONG8TYUF-WO1lw0,910 +pytz/zoneinfo/America/Aruba,sha256=Aj2HeTLzXYiXcvVh954mbIVBuYTgzivSV3I6r8fYg8E,212 +pytz/zoneinfo/America/Asuncion,sha256=Hin3zcUwQZrTHkr8j8Kt9A2Vd_VeIXSjqCNYoCd4DKI,2077 +pytz/zoneinfo/America/Atikokan,sha256=wwImtHK1B7WjC2lVfVbyT8yOlGcRDk0-wVwsUd5dJFw,345 +pytz/zoneinfo/America/Atka,sha256=xFyU0xZBPI9mav9l7R-Den4tOSJi3jHOWfrC6Woe3IE,2365 +pytz/zoneinfo/America/Bahia,sha256=LhwbXiV54VpiO_03dNtwOkm5N3kOaLRKa5mjCMbsumY,1050 +pytz/zoneinfo/America/Bahia_Banderas,sha256=nQ4Nn2FoY1pfmFUKFw1COFT_W2Ys4Nh0w8uncZm_HL0,1588 +pytz/zoneinfo/America/Barbados,sha256=mxp4V6Aa4qOumlHgtAz77ZG6xGhXntPAi1RGYna7sfo,344 +pytz/zoneinfo/America/Belem,sha256=hHSfYHXeWdxivWk0MZSAbP1VJkG5Eqb46WyDyZkUoYc,602 +pytz/zoneinfo/America/Belize,sha256=605XqMZX0MJT704KWvEYySjm5YdVZAgacUVjrd_Usxo,978 +pytz/zoneinfo/America/Blanc-Sablon,sha256=x9ODv7foUzEDD4CRqQVaX0JONnRAfKDHbM4GtaAxb9s,307 +pytz/zoneinfo/America/Boa_Vista,sha256=xXpj8iKAwsRlh0FJVu_I_KrcNu1CJYm0iSncx6tPZoA,658 +pytz/zoneinfo/America/Bogota,sha256=NhHeNPdl8tZewFk-ecZ43jYJJUmJhoDcRjm4xo9xN7o,271 +pytz/zoneinfo/America/Boise,sha256=MzU-8FzN2n3r51fPhl7nvXgDHzjGeXuPv8EfkBD1WhA,2403 +pytz/zoneinfo/America/Buenos_Aires,sha256=hBubypR_Ks2a3Gy1wQFxIE7s7B5G5gQmVPNVyJ3rxD8,1109 +pytz/zoneinfo/America/Cambridge_Bay,sha256=CurtWxBO6ZqxOp9bWnqvf2yaf1m979FdPJlRVRx7X28,2098 +pytz/zoneinfo/America/Campo_Grande,sha256=72crPAY39eZ3MH2idZvCYa-k8H5S_kUZf4WyEfi7k2Q,2016 +pytz/zoneinfo/America/Cancun,sha256=0iMWhz8wl5nG-X_vuKCoiX0N9Os3b4S7C3FgLsJA0E8,816 +pytz/zoneinfo/America/Caracas,sha256=lOjvrojgltbdCr2iZh2CbABPDFh_l3gssvkR7XyULxo,289 +pytz/zoneinfo/America/Catamarca,sha256=6ESo80xxwtBL_axrEekTtfIOqf3bXRRUM9CDEIfxxdY,1109 +pytz/zoneinfo/America/Cayenne,sha256=r43odEf3CTdZpZXMPUA-X6e1H8NSCVnE2lSVb_ushWo,224 +pytz/zoneinfo/America/Cayman,sha256=_E-7oUZToxhvPFtxn3ub99ilgkQBBkz21QggVZLlWp8,203 +pytz/zoneinfo/America/Chicago,sha256=FD8puVcXOkYAgYcjCjgSW9OgOz28ug3B0bhmEzH3FpM,3585 +pytz/zoneinfo/America/Chihuahua,sha256=p9FQ5xbbWxyWuw5QJSuAMG4dnovMK_kKOucHGQbnFRM,1522 +pytz/zoneinfo/America/Coral_Harbour,sha256=wwImtHK1B7WjC2lVfVbyT8yOlGcRDk0-wVwsUd5dJFw,345 +pytz/zoneinfo/America/Cordoba,sha256=JUow-bmwBVg1DQwfQKHZyHOARbt_6I5WyBu7n70WECY,1109 +pytz/zoneinfo/America/Costa_Rica,sha256=Tm_ymnduBTImv1kOvnNIlvkVDWkHTyKhahxxmaFITsU,341 +pytz/zoneinfo/America/Creston,sha256=l7dL7sNxS-UYIZ8kUzzCHt-bU9AfSreSpv4PiJc0Ses,233 +pytz/zoneinfo/America/Cuiaba,sha256=HfZRqJDrg60x5QP6EGl1Vfc1l0rkUBtIc11nzd9fSis,1988 +pytz/zoneinfo/America/Curacao,sha256=Aj2HeTLzXYiXcvVh954mbIVBuYTgzivSV3I6r8fYg8E,212 +pytz/zoneinfo/America/Danmarkshavn,sha256=CH45zW8QtpRLaLHeVXKJ7zN2lGfxspgGuWoWz45Djm4,712 +pytz/zoneinfo/America/Dawson,sha256=LQG0thFN-LDtiqZGVPd3hC8gcpgY8VZqXzQdc-9wk9w,2093 +pytz/zoneinfo/America/Dawson_Creek,sha256=KdAkW8BNrcuxlqO2qBus4WlkURZqdBfVuyqGEPN-xbo,1059 +pytz/zoneinfo/America/Denver,sha256=9N88x0x50HCiWnkndE06QioF2GKpojShIQXFyWTvsi0,2453 +pytz/zoneinfo/America/Detroit,sha256=2QN63tOQ0w4uMPIwIGKtYEJlGyYTkocu93LK1a-u2So,2188 +pytz/zoneinfo/America/Dominica,sha256=smWcJn91VcBkBQVmAjTL4Nf-6tOl4p9BJy4oodfRiWI,170 +pytz/zoneinfo/America/Edmonton,sha256=ZnjCP4ZXO2yV5sdIoxeqjOkPaGNrza0Y2wgecJhvtU4,2402 +pytz/zoneinfo/America/Eirunepe,sha256=B3YSePXFiGfGRWJcm4GeoJ5HA9dlZOIxl55I1tLgiBo,690 +pytz/zoneinfo/America/El_Salvador,sha256=YyFbIToxUFv9VF_VKxEhTLYU8T8PVZEfQU7bRChuf4o,250 +pytz/zoneinfo/America/Ensenada,sha256=2Ce5W0-ha4xW2aFjY0HJESZX5WeuhLN6m_yhM64xurs,2356 +pytz/zoneinfo/America/Fort_Nelson,sha256=586aAc3TE9IDUhEkMDQcJi4bkBgt5JD8lcEy2qwRjOc,2249 +pytz/zoneinfo/America/Fort_Wayne,sha256=VcLz_rJB-IQ16YduduLGnd_Q39NqJztVH_SA4s-tmf4,1675 +pytz/zoneinfo/America/Fortaleza,sha256=yjzWzdTsX5RXQs1yqR5Tl1bgrS6sPfpir8ITlwYe6pk,742 +pytz/zoneinfo/America/Glace_Bay,sha256=5q8k47nxJAq7hhj6wybuOh7MzSXS-kk2sqKLCwiA5VA,2206 +pytz/zoneinfo/America/Godthab,sha256=NWzS1OdMlYYit3OFrgUJteqH5pHyEgcjD2_h-mfiIK8,1892 +pytz/zoneinfo/America/Goose_Bay,sha256=8MBsahhBzcN7-zEcEcq6HJdNDWwnclwEtpZX58oRKkk,3219 +pytz/zoneinfo/America/Grand_Turk,sha256=kISPq7i8_btOZvWmJMTn-oiWLxb4tgBfUnzYSr6_pXQ,1881 +pytz/zoneinfo/America/Grenada,sha256=smWcJn91VcBkBQVmAjTL4Nf-6tOl4p9BJy4oodfRiWI,170 +pytz/zoneinfo/America/Guadeloupe,sha256=smWcJn91VcBkBQVmAjTL4Nf-6tOl4p9BJy4oodfRiWI,170 +pytz/zoneinfo/America/Guatemala,sha256=eVzCXl_-glqLPYbu2YrV-bufbRUt8rYk8OKmOxfw7kM,306 +pytz/zoneinfo/America/Guayaquil,sha256=b1LAz_MhANeX5kAyxM4Z-lqUt1t2ePG-s8DLVJzcAPc,271 +pytz/zoneinfo/America/Guyana,sha256=_6TbnvnIopAjVd5Zs2k6o9q9IeegYFR3K_zXVJAuk0I,266 +pytz/zoneinfo/America/Halifax,sha256=Yn4YIYxvPD9EbElwq-gWRnLip7qUvPhBseBq9QiblOo,3438 +pytz/zoneinfo/America/Havana,sha256=eHH4daiBn0FcKSUZ2xWQVWoNwabOaRv0969V5nFvuJQ,2437 +pytz/zoneinfo/America/Hermosillo,sha256=7G6yHQaPHKjoCp6CUgatG1oEseirst2YHGyQt2hriCA,454 +pytz/zoneinfo/America/Indianapolis,sha256=VcLz_rJB-IQ16YduduLGnd_Q39NqJztVH_SA4s-tmf4,1675 +pytz/zoneinfo/America/Inuvik,sha256=6mTLwL-Sz4jIkXIi8R24g49C4tQTYM2Bu5P2aaK8aFs,1928 +pytz/zoneinfo/America/Iqaluit,sha256=64eXJQuL2MPQmJO08mH_sb7dZohpoFHI4sgPbA1jQIw,2046 +pytz/zoneinfo/America/Jamaica,sha256=rduYyvNFm7ddbhTtdqpm5kK-rS0Gfn_oGBSk8CzxNQM,507 +pytz/zoneinfo/America/Jujuy,sha256=Iz9D-JWwjyHNKJGPvbnCKJJRHAmWrGIS4BnD6GaspFU,1081 +pytz/zoneinfo/America/Juneau,sha256=gYWRPuaPfscs2Y787mjRtr0MME4wOi3GE7Bs2X5jM8g,2362 +pytz/zoneinfo/America/Knox_IN,sha256=nnXdbFLFM5yOKxlf-KxqchLixehLK-MCPMNVlywg2FY,2437 +pytz/zoneinfo/America/Kralendijk,sha256=Aj2HeTLzXYiXcvVh954mbIVBuYTgzivSV3I6r8fYg8E,212 +pytz/zoneinfo/America/La_Paz,sha256=HxCwE66oWtVcOz9l1o-gm7pKvn-1MRAYaYgj8KuQmog,257 +pytz/zoneinfo/America/Lima,sha256=X1FNUkWrud58Nfq-1MTMhvbAJ1SM0b4EzISxIoeOGtM,431 +pytz/zoneinfo/America/Los_Angeles,sha256=_qnWb_ZSLmnSIHPcToQXm3ysLjcrOY3C-v39thqesuE,2845 +pytz/zoneinfo/America/Louisville,sha256=d5xqbFZndPlPdU2tiTbqf39q-wKO2cdrdNjzdTVUOe4,2781 +pytz/zoneinfo/America/Lower_Princes,sha256=Aj2HeTLzXYiXcvVh954mbIVBuYTgzivSV3I6r8fYg8E,212 +pytz/zoneinfo/America/Maceio,sha256=MkHOjfAASgy12y6zgAo9_ncMwdJNFE_uJ4YsTddACEA,770 +pytz/zoneinfo/America/Managua,sha256=o8HwC8h57oTmI9JSUvH7j_2M9DzZyLi_Era17agEVoM,463 +pytz/zoneinfo/America/Manaus,sha256=4SE-e5fPpYC0-ccow0_6ZKarJ3sGtViJOymcINNSjmw,630 +pytz/zoneinfo/America/Marigot,sha256=smWcJn91VcBkBQVmAjTL4Nf-6tOl4p9BJy4oodfRiWI,170 +pytz/zoneinfo/America/Martinique,sha256=4IfD5K4gI0oIZnrc9qtM0Dqzru5-A1J48nHdsLZc__I,257 +pytz/zoneinfo/America/Matamoros,sha256=Q5jYMd9L_P2br6Iqw1zVXbt9_V8lwThFLorfJ16hFzU,1416 +pytz/zoneinfo/America/Mazatlan,sha256=k0C3GbJQdzJizsYudx0SEQWu0WiDZyPfwwXjC7BM-7E,1564 +pytz/zoneinfo/America/Mendoza,sha256=1Q8kXPHus2UNu91Fcg3caxxeIq7eeYHyC57-LHrGjE0,1109 +pytz/zoneinfo/America/Menominee,sha256=Rs7VgOdINNLGioCmCuBaC3FQFLsrTa1nz5lKwgzirCI,2283 +pytz/zoneinfo/America/Merida,sha256=7Nz0sp97Q5FSuem1WfBNfUunWd2ULC_WhaTuNnmPyNE,1456 +pytz/zoneinfo/America/Metlakatla,sha256=Sa1eT8VJ0NsuLMbc0pNuyxK4WR28OoBv_PhD34JA9qA,1418 +pytz/zoneinfo/America/Mexico_City,sha256=iLw_yxqS7wU-CvSvkFPsvxKFX98Y-FmypU2Cbb-stlU,1618 +pytz/zoneinfo/America/Miquelon,sha256=Ifq8nCyv2O70avkSYEDM7W7yfWSNc9SJUReBQ-a7AGs,1696 +pytz/zoneinfo/America/Moncton,sha256=EwDVqTuzdsvyOokKI-oF-isilIbn631ZWceDQmD7t7c,3163 +pytz/zoneinfo/America/Monterrey,sha256=idrcqFLr9S5EBclYEyJVwl-xlWBNffm4RL9QtIoIZdg,1416 +pytz/zoneinfo/America/Montevideo,sha256=VQ78OdPhyeaQmqgKnQZ_k1XEeodPyvT1kwJAfvb5aOE,1564 +pytz/zoneinfo/America/Montreal,sha256=hC96ED36ycDCwzycw5KhE9JmrAZMXHSXiDq0G3l84ZQ,3503 +pytz/zoneinfo/America/Montserrat,sha256=smWcJn91VcBkBQVmAjTL4Nf-6tOl4p9BJy4oodfRiWI,170 +pytz/zoneinfo/America/Nassau,sha256=TJmdvMbGBFrL7fb9SPW3CyDrjYsL2WEtK-qI3AOqn00,2284 +pytz/zoneinfo/America/New_York,sha256=X6bczDAzUuEZXENIsYnzCFAU2KVqGXbI6KMr1P7baf0,3545 +pytz/zoneinfo/America/Nipigon,sha256=cQDKS_MEQhH_jncNreILaDiA85ZfFG673XLGRKrsfDc,2131 +pytz/zoneinfo/America/Nome,sha256=0xK8eX6xs4TM-6DECCLVCy4KvzfV2qQ90-JV_cdXOwA,2376 +pytz/zoneinfo/America/Noronha,sha256=y06Wj0Fe1T52kQjJ1flxDomHFq90U205twd7BCbzlg0,742 +pytz/zoneinfo/America/Ojinaga,sha256=oH_4_j7jUx-Gavyg95N9hqzsGCb7PqplXVQwDsItxxM,1522 +pytz/zoneinfo/America/Panama,sha256=_E-7oUZToxhvPFtxn3ub99ilgkQBBkz21QggVZLlWp8,203 +pytz/zoneinfo/America/Pangnirtung,sha256=G2pJdlPfkiDDCgIth-wqoTIK-eUbs83Eh1bnLXcKZzg,2108 +pytz/zoneinfo/America/Paramaribo,sha256=VwObMaaLMLoHMZFs_6e5obu1K_dlD3qmIFLomo_J7fY,296 +pytz/zoneinfo/America/Phoenix,sha256=nAE-z4K27R3eI1tfyYP-nSPI1W1hAyP3yUxro817RWQ,353 +pytz/zoneinfo/America/Port-au-Prince,sha256=sLYK0cVXxBWW4PzH_IubOURMeClbFHzuSV4pmFoiXaY,1455 +pytz/zoneinfo/America/Port_of_Spain,sha256=smWcJn91VcBkBQVmAjTL4Nf-6tOl4p9BJy4oodfRiWI,170 +pytz/zoneinfo/America/Porto_Acre,sha256=FshszMk8fr_v-uiA5DnOhIBV5CFXiYLVyr74quwV-AI,662 +pytz/zoneinfo/America/Porto_Velho,sha256=Kfjaqo_coJDfcUXmns3JNs0_7azqC3I_XJPaKoyA-XY,602 +pytz/zoneinfo/America/Puerto_Rico,sha256=fu5E4ct6yIX91QQoFcVIvLbKhM_43gB-UrU8C5rVPxk,255 +pytz/zoneinfo/America/Punta_Arenas,sha256=M-JrPIvz3XEAFR08GEKumqjvabFflhqi7uLh3DUIxuU,1911 +pytz/zoneinfo/America/Rainy_River,sha256=mnuN1Mlo3jH-Nf2f4HX62K_BWJIGcRn2OLWu8NIojYM,2131 +pytz/zoneinfo/America/Rankin_Inlet,sha256=s644zPowkevYA3_V4f0nFacgCJMvlFJqFfsap_rnItw,1930 +pytz/zoneinfo/America/Recife,sha256=zrttMMILALxDfU4KjGoMkdsSCjrvLCi8VpYPaCs_www,742 +pytz/zoneinfo/America/Regina,sha256=kp0HRXQHUpY3Ym0J9cqXW08FgB810L-sTjsAJ-_uZ3Y,994 +pytz/zoneinfo/America/Resolute,sha256=xSkUGAdwRyWwWRh7n0WAgPkRtt1DE-dBOdLH2sKHyQE,1930 +pytz/zoneinfo/America/Rio_Branco,sha256=FshszMk8fr_v-uiA5DnOhIBV5CFXiYLVyr74quwV-AI,662 +pytz/zoneinfo/America/Rosario,sha256=JUow-bmwBVg1DQwfQKHZyHOARbt_6I5WyBu7n70WECY,1109 +pytz/zoneinfo/America/Santa_Isabel,sha256=2Ce5W0-ha4xW2aFjY0HJESZX5WeuhLN6m_yhM64xurs,2356 +pytz/zoneinfo/America/Santarem,sha256=eEQNAfTFt8E9Hb5lALpxGI79z5AGmXnICiJNgxyL2Xs,632 +pytz/zoneinfo/America/Santiago,sha256=A0WTAf1TQ-LArMtXwKvnGlyZBA3v4mSkPTK0MENXMe4,2538 +pytz/zoneinfo/America/Santo_Domingo,sha256=rfNJ5McxSqppnEiTxYmwd_bfp9mlTqnq5Fllj5r0Hcc,491 +pytz/zoneinfo/America/Sao_Paulo,sha256=5U8tGHsI7_0tGsnE9m1rwKMTZwSIeShVelx_dMI69Ag,2016 +pytz/zoneinfo/America/Scoresbysund,sha256=xmh3LUkybN43mNFYCJ0vnO1DeI-QQ2Cm3GelOGiyj5Y,1930 +pytz/zoneinfo/America/Shiprock,sha256=9N88x0x50HCiWnkndE06QioF2GKpojShIQXFyWTvsi0,2453 +pytz/zoneinfo/America/Sitka,sha256=unm4ns2OZNukEZ8sWvI3MWdVfEvHG3sTSrJS4LdIX9s,2350 +pytz/zoneinfo/America/St_Barthelemy,sha256=smWcJn91VcBkBQVmAjTL4Nf-6tOl4p9BJy4oodfRiWI,170 +pytz/zoneinfo/America/St_Johns,sha256=K5YKWNbT9qJycH-UH1WxW4uj_Q_VX4aA6oSvax6YuuA,3664 +pytz/zoneinfo/America/St_Kitts,sha256=smWcJn91VcBkBQVmAjTL4Nf-6tOl4p9BJy4oodfRiWI,170 +pytz/zoneinfo/America/St_Lucia,sha256=smWcJn91VcBkBQVmAjTL4Nf-6tOl4p9BJy4oodfRiWI,170 +pytz/zoneinfo/America/St_Thomas,sha256=smWcJn91VcBkBQVmAjTL4Nf-6tOl4p9BJy4oodfRiWI,170 +pytz/zoneinfo/America/St_Vincent,sha256=smWcJn91VcBkBQVmAjTL4Nf-6tOl4p9BJy4oodfRiWI,170 +pytz/zoneinfo/America/Swift_Current,sha256=a2Ap8ErAfDguINMV2npy2SBNgT74NYXwQlYuf2eIp48,574 +pytz/zoneinfo/America/Tegucigalpa,sha256=_SlanMaJqWZ4a24OydDxAXlqyLTwSm9SmxkpN986IRU,278 +pytz/zoneinfo/America/Thule,sha256=mkdKH8dkNDRw0xA2nNv21wB-phERbiW-po9g3fWmzWg,1528 +pytz/zoneinfo/America/Thunder_Bay,sha256=rvReFHQ2n1Lhr7HNfzEunwNcoTaGCSzyNRo1MTPhzuY,2211 +pytz/zoneinfo/America/Tijuana,sha256=2Ce5W0-ha4xW2aFjY0HJESZX5WeuhLN6m_yhM64xurs,2356 +pytz/zoneinfo/America/Toronto,sha256=hC96ED36ycDCwzycw5KhE9JmrAZMXHSXiDq0G3l84ZQ,3503 +pytz/zoneinfo/America/Tortola,sha256=smWcJn91VcBkBQVmAjTL4Nf-6tOl4p9BJy4oodfRiWI,170 +pytz/zoneinfo/America/Vancouver,sha256=i0FLmiC-Ca1hIU3ustv_O7G9Hp01Gnms6udX-g--kd0,2901 +pytz/zoneinfo/America/Virgin,sha256=smWcJn91VcBkBQVmAjTL4Nf-6tOl4p9BJy4oodfRiWI,170 +pytz/zoneinfo/America/Whitehorse,sha256=GGT-M9UeWHgKXIQApH58QubmZVq-SagswRpGEFkFgxM,2093 +pytz/zoneinfo/America/Winnipeg,sha256=d75cCPb46-UzD7hqYMREfqJUliBgn072p6emjRoS2dY,2891 +pytz/zoneinfo/America/Yakutat,sha256=B_GJ9JhzsTkpiey94Z0Z0zzWwWlTv35rknyi8QQPcQY,2314 +pytz/zoneinfo/America/Yellowknife,sha256=MUDef-QTb7SSDou47Y1wclbnjGcu384WV64iZy3mh2g,1980 +pytz/zoneinfo/America/Argentina/Buenos_Aires,sha256=hBubypR_Ks2a3Gy1wQFxIE7s7B5G5gQmVPNVyJ3rxD8,1109 +pytz/zoneinfo/America/Argentina/Catamarca,sha256=6ESo80xxwtBL_axrEekTtfIOqf3bXRRUM9CDEIfxxdY,1109 +pytz/zoneinfo/America/Argentina/ComodRivadavia,sha256=6ESo80xxwtBL_axrEekTtfIOqf3bXRRUM9CDEIfxxdY,1109 +pytz/zoneinfo/America/Argentina/Cordoba,sha256=JUow-bmwBVg1DQwfQKHZyHOARbt_6I5WyBu7n70WECY,1109 +pytz/zoneinfo/America/Argentina/Jujuy,sha256=Iz9D-JWwjyHNKJGPvbnCKJJRHAmWrGIS4BnD6GaspFU,1081 +pytz/zoneinfo/America/Argentina/La_Rioja,sha256=7cgtkiW4rjypEeqxT4IBqJFpKM-rIz4YL4B-cVzxhDU,1123 +pytz/zoneinfo/America/Argentina/Mendoza,sha256=1Q8kXPHus2UNu91Fcg3caxxeIq7eeYHyC57-LHrGjE0,1109 +pytz/zoneinfo/America/Argentina/Rio_Gallegos,sha256=92IGeyXMfmFBsGpurnd2TKzMy_6HFvE3DDPhaewuFyM,1109 +pytz/zoneinfo/America/Argentina/Salta,sha256=He7enBTtC03G5aQBEMcFYIPI_tVXuEqtwPoqBFQR5WA,1081 +pytz/zoneinfo/America/Argentina/San_Juan,sha256=EAwACwO5oOGcd42ega2OXCw020t9okxV1nwK2AUKZ8U,1123 +pytz/zoneinfo/America/Argentina/San_Luis,sha256=qxWxFBuHsTgeXK04b8N0tamiypIlBNpbhIB0-rkemrw,1139 +pytz/zoneinfo/America/Argentina/Tucuman,sha256=amI7vNIUTx5DiSd3CEveTiYTrm8iv5Le324dxuaCSLM,1137 +pytz/zoneinfo/America/Argentina/Ushuaia,sha256=AvmpZCaCPHvyX_FaHyQQaeLLFZadSNDGzDGq0Tp9D0k,1109 +pytz/zoneinfo/America/Indiana/Indianapolis,sha256=VcLz_rJB-IQ16YduduLGnd_Q39NqJztVH_SA4s-tmf4,1675 +pytz/zoneinfo/America/Indiana/Knox,sha256=nnXdbFLFM5yOKxlf-KxqchLixehLK-MCPMNVlywg2FY,2437 +pytz/zoneinfo/America/Indiana/Marengo,sha256=ZLuHZpp8FhRUsoPlhV4o3nZVRQaAzEA-6gVy5YDrJiU,1731 +pytz/zoneinfo/America/Indiana/Petersburg,sha256=PrXdUQ1nf5EY7EvKeJLbTtGBci-6-U75yxpEFxhAAyE,1913 +pytz/zoneinfo/America/Indiana/Tell_City,sha256=JoxThon0gV-Grh0QC0sHKnhdXmiwPecbdPQ3E1vFKEM,1735 +pytz/zoneinfo/America/Indiana/Vevay,sha256=XPcorCZ8zlG93xh1IZvw6ADVqqF3lNwsdAgpN3P1FuU,1423 +pytz/zoneinfo/America/Indiana/Vincennes,sha256=ip2zj3V1-eKmJMlGYSiSfK7w7qfpo4QWec1usSmwGdM,1703 +pytz/zoneinfo/America/Indiana/Winamac,sha256=0tyRkge425W9gq_2i4pJim077CwzIgulOB68oXHcwJU,1787 +pytz/zoneinfo/America/Kentucky/Louisville,sha256=d5xqbFZndPlPdU2tiTbqf39q-wKO2cdrdNjzdTVUOe4,2781 +pytz/zoneinfo/America/Kentucky/Monticello,sha256=uHCB5Gu-aIgWp-A8_cW07-EL1rkstaeA32sMhq9MGjc,2361 +pytz/zoneinfo/America/North_Dakota/Beulah,sha256=9gTrQvAZf17e_NbUMFGgtk5uEyfb0tPPqU0DSLI-H6g,2389 +pytz/zoneinfo/America/North_Dakota/Center,sha256=CO_qSvmitqWrJehhFs_PS5PBa6m3si92LZyzSB7POsg,2389 +pytz/zoneinfo/America/North_Dakota/New_Salem,sha256=xaQ-tndsMmsSUPkU8A6eAOEhubUfY2ZfkPJFN22iAXo,2389 +pytz/zoneinfo/Antarctica/Casey,sha256=AIMlsO0bh5BHz9AwzNJMsv2q7s3q7cJz93qqiIreMTY,311 +pytz/zoneinfo/Antarctica/Davis,sha256=jex3sqIzidMMaApvsCQcrjLxwMcb8o_eFnm9sDWik5s,311 +pytz/zoneinfo/Antarctica/DumontDUrville,sha256=htHnKnytHZgX9X1FbjkYS3SUNqAng6Jy8hCZlK3fvVU,216 +pytz/zoneinfo/Antarctica/Macquarie,sha256=7cyqtCrc_H-khy_yK2P9wSijNU-n1EP4lbj3BozteI8,1543 +pytz/zoneinfo/Antarctica/Mawson,sha256=r6Suw22f-RmSlwtMZF4DiBDyXY5YEYpWVo28kiZwRUM,225 +pytz/zoneinfo/Antarctica/McMurdo,sha256=17UXU4eseOKfe5Ah5BFRJ1a-KD7T0YGZQu9dRezzOOQ,2460 +pytz/zoneinfo/Antarctica/Palmer,sha256=qj_H3RsfFZm_ce0yiuXbqB4JrD46kUaJ5-pf863CgYM,1432 +pytz/zoneinfo/Antarctica/Rothera,sha256=5n2et4tT06QVhlQCdY7KSVxpw0ftDKfVoCOMX3rAHYs,186 +pytz/zoneinfo/Antarctica/South_Pole,sha256=17UXU4eseOKfe5Ah5BFRJ1a-KD7T0YGZQu9dRezzOOQ,2460 +pytz/zoneinfo/Antarctica/Syowa,sha256=rvdl7Js_riztu68rUlUFAIqiTy9OBquuP3cKlnc4eHk,187 +pytz/zoneinfo/Antarctica/Troll,sha256=HYvf52cpJymYGZOmT9ii90WyQFjIvLzyG_OwttUHXC0,1176 +pytz/zoneinfo/Antarctica/Vostok,sha256=kD0gENiga-IM1XAA6J2S5vgwfco4EClCFaWM18fIY-I,187 +pytz/zoneinfo/Arctic/Longyearbyen,sha256=D6TmNdorF4-j6hP_OCnHAoRM-L1p4WvKjh0029kknQE,2251 +pytz/zoneinfo/Asia/Aden,sha256=MQLBdV2aZLLis2M4G79S1qAeuGak0s39DPfggyUXCU0,187 +pytz/zoneinfo/Asia/Almaty,sha256=RAFijG0qNkMO_d3sPZrrn_CR6F2qIdB4MpjN3-cMOMQ,1031 +pytz/zoneinfo/Asia/Amman,sha256=uhih-CP3fkeOcDDuuC3dcaXz-ubv31YyW3d2MEkS2gg,1877 +pytz/zoneinfo/Asia/Anadyr,sha256=tLiARdZiTiHLIzx8Yibt7oivtj4MqNTj31gNdKbIo0w,1222 +pytz/zoneinfo/Asia/Aqtau,sha256=4ueceTcbDIYBzI2p3GExRbYHIenFdDEO1Lf6mum6ows,1017 +pytz/zoneinfo/Asia/Aqtobe,sha256=zNKrBxj8imN7tEV25Mo_8mcVHMQIHcaX5p69Qm2-TB0,1047 +pytz/zoneinfo/Asia/Ashgabat,sha256=RihKzwD87pkYhu6Hn1B_lwvvQQWgXV4zBzagKzKdM3U,651 +pytz/zoneinfo/Asia/Ashkhabad,sha256=RihKzwD87pkYhu6Hn1B_lwvvQQWgXV4zBzagKzKdM3U,651 +pytz/zoneinfo/Asia/Atyrau,sha256=RIEq2hzMSatC3QTjxevo-fWSaCdlsOQAlyZiw0726TE,1025 +pytz/zoneinfo/Asia/Baghdad,sha256=lCupYy1WTx4p9g51p-31mNOwAVFezdfUA7FH5b9wPLE,1004 +pytz/zoneinfo/Asia/Bahrain,sha256=8V1FW1A6HZuZqbwV8n4Nh9m_PKyBAHCfajFA5jurVr0,225 +pytz/zoneinfo/Asia/Baku,sha256=jv_6GX9u4HR9YPSzfbiCPS9xLz33NQv7QEYWQdnnyjE,1269 +pytz/zoneinfo/Asia/Bangkok,sha256=z4ZnA6BbBnBp2wX4dYTVyKNIm8qtPkG7ASYJkEkVwRs,220 +pytz/zoneinfo/Asia/Barnaul,sha256=tVZVLYgcdynyGk-xDF5144ha_AjlOUYdQNbk41nc3X8,1255 +pytz/zoneinfo/Asia/Beirut,sha256=q9-lCe2YJFWHPBA1li2GQq6LiKt19_GppM9-6lzhIO8,2175 +pytz/zoneinfo/Asia/Bishkek,sha256=i0Sc9krX1GuuQZZ4f0cBK_0Imar9fKx3-HlN13MLQe4,1045 +pytz/zoneinfo/Asia/Brunei,sha256=zRTIn0Dq7Of4f5Z569b9wjNWwe4x2gMUAMWYBgRMpNE,229 +pytz/zoneinfo/Asia/Calcutta,sha256=xx17wQ1Sxk9Z6ujqxwHBsVa77D_Z_nUJcL7RXptAj6U,312 +pytz/zoneinfo/Asia/Chita,sha256=O8U3tsP2L7zRNM5MjljDUaV3C5IW8kg_HjbY7JA1s7w,1257 +pytz/zoneinfo/Asia/Choibalsan,sha256=bqCCcv547xUFi1gh0FPpB-qTfbm7bKj3HLmZekTGQxU,991 +pytz/zoneinfo/Asia/Chongqing,sha256=lTYiu9frnrqMO56M1dXsmM6moIWp3rHEPknoiaFU00Q,414 +pytz/zoneinfo/Asia/Chungking,sha256=lTYiu9frnrqMO56M1dXsmM6moIWp3rHEPknoiaFU00Q,414 +pytz/zoneinfo/Asia/Colombo,sha256=YBCKWuwII2tekTLTPecmSc3wH4VMhtAai9YJ2CC1abo,413 +pytz/zoneinfo/Asia/Dacca,sha256=sXYxwfswM__eFTkdL77E49KbhG_Sz7CJiYxrYTCOt7I,370 +pytz/zoneinfo/Asia/Damascus,sha256=D0LVcC7lKUTd5DBxVp3mRaXWaKOFxKLgzYqkPTnS6iE,2320 +pytz/zoneinfo/Asia/Dhaka,sha256=sXYxwfswM__eFTkdL77E49KbhG_Sz7CJiYxrYTCOt7I,370 +pytz/zoneinfo/Asia/Dili,sha256=H2kd8kTXNhPedYl1rcpUVO6akYIbP0OC6pt5PvBLH8Q,253 +pytz/zoneinfo/Asia/Dubai,sha256=UsGWhPtJQ3c9hsQ_eMete0audVeorp7RZQg0K9MZZ4o,187 +pytz/zoneinfo/Asia/Dushanbe,sha256=6DJSSg0CCjQBXkIxgr7AWSDB5ztBSaqxuzG3R5oKj0w,621 +pytz/zoneinfo/Asia/Famagusta,sha256=Sj5mdZwGD_XZ4zgWl4FngWHfW5rNmuxhRvHU08_ZAws,2042 +pytz/zoneinfo/Asia/Gaza,sha256=Op34-3ClEwpyToQiHmO7GMyEz1xsh4h3i-4HDrczDqI,2295 +pytz/zoneinfo/Asia/Harbin,sha256=lTYiu9frnrqMO56M1dXsmM6moIWp3rHEPknoiaFU00Q,414 +pytz/zoneinfo/Asia/Hebron,sha256=k-HJa_9dJlIXpok5rhYOhrvuFtljlNfCuZWJBjQ17qQ,2323 +pytz/zoneinfo/Asia/Ho_Chi_Minh,sha256=AT_8zxoFqee1CbVfa5SVad2eZ2v8zhDIhv_-WXRUQOU,389 +pytz/zoneinfo/Asia/Hong_Kong,sha256=l4b75K2bgg9hpWZLO4XdYBqm6RUsk_9OFL1U7_348dU,1189 +pytz/zoneinfo/Asia/Hovd,sha256=zcZfkT8rZ80dIyhpRFRsQvq7ZHILeKrzL4hgWhlDaJo,921 +pytz/zoneinfo/Asia/Irkutsk,sha256=d-2KOKnz5KZaXe1qhGCJxMimDrJFxHb37iDWJ4AwPu8,1276 +pytz/zoneinfo/Asia/Istanbul,sha256=ldOJ2-YltB_ZTylyeN94QXWsuqtz3jhrzcJAcT43kVI,2166 +pytz/zoneinfo/Asia/Jakarta,sha256=fqHD5ToNP0DLbXck8brLT8oc8K6W2atC8A8tMNwN7jo,392 +pytz/zoneinfo/Asia/Jayapura,sha256=xvok3i6D9HGHi44Q7NaIKOs6uh9JzkYtrHchZhOG6ww,251 +pytz/zoneinfo/Asia/Jerusalem,sha256=YAGe4NHetuaZShpclRhh4dufmSpcAn-8TjYX-ULAIc8,2265 +pytz/zoneinfo/Asia/Kabul,sha256=OGuYuVsZu-xSxtjzNOBKF49PmfK4oc6jPBQjdWaNcic,229 +pytz/zoneinfo/Asia/Kamchatka,sha256=pF1YfHE0YHy2_q3mr5oEIDw4se1IH3x86OsQ582XLKw,1198 +pytz/zoneinfo/Asia/Karachi,sha256=_EsqaK15763s9S8zP6GcuqXdCEzcm_lquLZadcVZo3A,417 +pytz/zoneinfo/Asia/Kashgar,sha256=muiGjfVEHOSsM6rtd39epog-uVBQt9ZtHl7FZIyeP8w,187 +pytz/zoneinfo/Asia/Kathmandu,sha256=XFV7hsXw_dGdEFr704vZ2qrRzQdenv2-gFR93Kha5a4,238 +pytz/zoneinfo/Asia/Katmandu,sha256=XFV7hsXw_dGdEFr704vZ2qrRzQdenv2-gFR93Kha5a4,238 +pytz/zoneinfo/Asia/Khandyga,sha256=AWnyrYKDL2RmmEytnMZz-0CY7hXhSyFSHOVPN6P6beM,1311 +pytz/zoneinfo/Asia/Kolkata,sha256=xx17wQ1Sxk9Z6ujqxwHBsVa77D_Z_nUJcL7RXptAj6U,312 +pytz/zoneinfo/Asia/Krasnoyarsk,sha256=kSLsPfnS8eF2ft-8nM5J58_5VJHLneI0xFiPmF6zYcg,1243 +pytz/zoneinfo/Asia/Kuala_Lumpur,sha256=Jo08wp2umFT-obkQnaKHNgLEiAaZTT898LnKmGP89Zs,424 +pytz/zoneinfo/Asia/Kuching,sha256=DQxo0s3c-UMQVrJ7iEyJlR3kVqSE_flqKxDHj68ZW9g,521 +pytz/zoneinfo/Asia/Kuwait,sha256=MQLBdV2aZLLis2M4G79S1qAeuGak0s39DPfggyUXCU0,187 +pytz/zoneinfo/Asia/Macao,sha256=wrjNyhVaeMeuNMpcMBhXE8YLJJawHP0x-Zi7a9oW884,771 +pytz/zoneinfo/Asia/Macau,sha256=wrjNyhVaeMeuNMpcMBhXE8YLJJawHP0x-Zi7a9oW884,771 +pytz/zoneinfo/Asia/Magadan,sha256=oy8CKyqps3D0GGYEfCi22WAHvsfn8F5P0aLwYREFfos,1258 +pytz/zoneinfo/Asia/Makassar,sha256=JPrJAWle9Dtz-os82eS_iTzrdXxSALZiiuag_HDwGVY,288 +pytz/zoneinfo/Asia/Manila,sha256=plPqwY61QQ1OaJIemnJqwyE9R4UjojT2rL1SZiHJjtc,367 +pytz/zoneinfo/Asia/Muscat,sha256=UsGWhPtJQ3c9hsQ_eMete0audVeorp7RZQg0K9MZZ4o,187 +pytz/zoneinfo/Asia/Nicosia,sha256=8qoqP3ekO3VYp1CKbNbFD999mR-dZNpZSP2QA5I7HXI,2016 +pytz/zoneinfo/Asia/Novokuznetsk,sha256=Rd8ggmbOQdzNrmpHtreCNaLnDE7rabKOMBJeA-e54NM,1197 +pytz/zoneinfo/Asia/Novosibirsk,sha256=U_VVwHg3jXJtttIDyWvufvybE4wQz9Y091CyjLYhK6U,1255 +pytz/zoneinfo/Asia/Omsk,sha256=4yv7l2J0ZXqJL1kYs_QuVsg42sBA4GrGDC02MYyA_Uk,1243 +pytz/zoneinfo/Asia/Oral,sha256=Td1mX4H5_-f6PHVA9QZd2tcidNoikTiF7v6GlRqFeZg,1039 +pytz/zoneinfo/Asia/Phnom_Penh,sha256=z4ZnA6BbBnBp2wX4dYTVyKNIm8qtPkG7ASYJkEkVwRs,220 +pytz/zoneinfo/Asia/Pontianak,sha256=JRasK8hP5kmKULyIZewA40mbOPL0hUA81QKFeKmNH9g,395 +pytz/zoneinfo/Asia/Pyongyang,sha256=QENKzB3BrAWp_z07sSRIYpykX3pBXwPrSxP1QWBh0pk,267 +pytz/zoneinfo/Asia/Qatar,sha256=8V1FW1A6HZuZqbwV8n4Nh9m_PKyBAHCfajFA5jurVr0,225 +pytz/zoneinfo/Asia/Qyzylorda,sha256=oUuEgD145jbzuaoGGUumf9IA16ejyospS_jWblF6mB4,1047 +pytz/zoneinfo/Asia/Rangoon,sha256=G0YFglrbrjxxNvPwVdfLrHb6rWJwNRbq-U_I0Q4d860,297 +pytz/zoneinfo/Asia/Riyadh,sha256=MQLBdV2aZLLis2M4G79S1qAeuGak0s39DPfggyUXCU0,187 +pytz/zoneinfo/Asia/Saigon,sha256=AT_8zxoFqee1CbVfa5SVad2eZ2v8zhDIhv_-WXRUQOU,389 +pytz/zoneinfo/Asia/Sakhalin,sha256=1q9n3YU-og7JKqOf3WR7cOwylgbnVlU2Aw291w8GIUg,1234 +pytz/zoneinfo/Asia/Samarkand,sha256=_ZKLVv8rb98eKMGY2Iceh5eUcxCd_DlaUdiq7Q77WSQ,619 +pytz/zoneinfo/Asia/Seoul,sha256=It6UZCqXimr0Y9sXXp-ToykuioGnSouSmJvCF0N57dY,531 +pytz/zoneinfo/Asia/Shanghai,sha256=lTYiu9frnrqMO56M1dXsmM6moIWp3rHEPknoiaFU00Q,414 +pytz/zoneinfo/Asia/Singapore,sha256=5pKf3kP_xIu6xAgbMbv3_UJkOzwm-t8yK96t6yPPx0g,424 +pytz/zoneinfo/Asia/Srednekolymsk,sha256=NcVF4k1hox9f1PpxLYtswJ7L393uEOW4WdaynlfZiAY,1244 +pytz/zoneinfo/Asia/Taipei,sha256=Jc_QK8hHvcsR5YZEW6iGp2MV8fm-hvfnSUSm6OhkRUM,790 +pytz/zoneinfo/Asia/Tashkent,sha256=hnTrUBzSXFQCWOlABs4VH5H2U4SegAqpeYZVG4nq1og,635 +pytz/zoneinfo/Asia/Tbilisi,sha256=vIjv31faZqqnHBXY-8NthyQq3Kd24QPd1VMapFyiEXc,1080 +pytz/zoneinfo/Asia/Tehran,sha256=53RfdtZdlFhrpyM-e_9UkRiHSA-l4j8cU_5eV0P0seM,1718 +pytz/zoneinfo/Asia/Tel_Aviv,sha256=YAGe4NHetuaZShpclRhh4dufmSpcAn-8TjYX-ULAIc8,2265 +pytz/zoneinfo/Asia/Thimbu,sha256=5UxNVlpL5fNCCbo1HHqt0QcdzPigOA1p4G6TakJSA6I,229 +pytz/zoneinfo/Asia/Thimphu,sha256=5UxNVlpL5fNCCbo1HHqt0QcdzPigOA1p4G6TakJSA6I,229 +pytz/zoneinfo/Asia/Tokyo,sha256=TVmM6j_lW0SvvZxvYD90iWDkqYBuHqHof8Lyu5ghQ3c,318 +pytz/zoneinfo/Asia/Tomsk,sha256=EULbQLkWeLSrPCk1NG9vC85qhDUzkqGrl9vroO4VgtU,1255 +pytz/zoneinfo/Asia/Ujung_Pandang,sha256=JPrJAWle9Dtz-os82eS_iTzrdXxSALZiiuag_HDwGVY,288 +pytz/zoneinfo/Asia/Ulaanbaatar,sha256=F6MdDqjq8NFIS1TlPWgD6uqoMnQNUho0Dh1cBz3pfiI,921 +pytz/zoneinfo/Asia/Ulan_Bator,sha256=F6MdDqjq8NFIS1TlPWgD6uqoMnQNUho0Dh1cBz3pfiI,921 +pytz/zoneinfo/Asia/Urumqi,sha256=muiGjfVEHOSsM6rtd39epog-uVBQt9ZtHl7FZIyeP8w,187 +pytz/zoneinfo/Asia/Ust-Nera,sha256=qw7b6IcYE-EVSNNGQVIYeKyhJjSkRoOUXSTvhQFr0Ko,1290 +pytz/zoneinfo/Asia/Vientiane,sha256=z4ZnA6BbBnBp2wX4dYTVyKNIm8qtPkG7ASYJkEkVwRs,220 +pytz/zoneinfo/Asia/Vladivostok,sha256=MutuFAWqBI5sujOW1LCa0E7QXCOdvLBU-C5Nu9LbvTE,1244 +pytz/zoneinfo/Asia/Yakutsk,sha256=VFA2qMtIBo1fb5i9KOuQu2wl0xNrWPAUhrh1eAUZII4,1243 +pytz/zoneinfo/Asia/Yangon,sha256=G0YFglrbrjxxNvPwVdfLrHb6rWJwNRbq-U_I0Q4d860,297 +pytz/zoneinfo/Asia/Yekaterinburg,sha256=iBnv8pqQrSwNNYj1bW6XTZlBnoAQS_yTEydPCjPgtZA,1281 +pytz/zoneinfo/Asia/Yerevan,sha256=LkVgEeng2MGVjBe_NBFv6JoyOQKAEOfbYa5GASyPIwQ,1213 +pytz/zoneinfo/Atlantic/Azores,sha256=09_NnHfR4qSeFch6_55D8NUoOlMrS8bHr6IvoU-gw3c,3493 +pytz/zoneinfo/Atlantic/Bermuda,sha256=Ri0gWQTzLct5MX-D3bTeoVSNUYSSF9w-QroXw8x_zwg,2004 +pytz/zoneinfo/Atlantic/Canary,sha256=RhfLGqdVFAA_GBkI6cz8HT0GLvIrsBloZ9vlMOwuFBY,1911 +pytz/zoneinfo/Atlantic/Cape_Verde,sha256=96gTQu1YhPNP3Afm6_jw8yLkG6Pi05nX9Ra00odxNQs,284 +pytz/zoneinfo/Atlantic/Faeroe,sha256=axpXafj_ouwpvymN_9f7Mk5iXjb8UnwUu2a2Ug5vdqc,1829 +pytz/zoneinfo/Atlantic/Faroe,sha256=axpXafj_ouwpvymN_9f7Mk5iXjb8UnwUu2a2Ug5vdqc,1829 +pytz/zoneinfo/Atlantic/Jan_Mayen,sha256=D6TmNdorF4-j6hP_OCnHAoRM-L1p4WvKjh0029kknQE,2251 +pytz/zoneinfo/Atlantic/Madeira,sha256=JMYWeAWJ-2p-IpE-NAJSJRe6SnRgc4zNOPGjoOSiH0A,3484 +pytz/zoneinfo/Atlantic/Reykjavik,sha256=nNzqaqHu2CdtP2Yg4MD_rpz8w3IsRD6mujkUeXXq-qM,1188 +pytz/zoneinfo/Atlantic/South_Georgia,sha256=90Xco5ZMauPouIFm4Ntt9Ifuj25q1_saw61OarLgo2E,181 +pytz/zoneinfo/Atlantic/St_Helena,sha256=1d7RJt-PaTzh_4PoWqTUQYXCve99ofkVshT1Pe_97kc,170 +pytz/zoneinfo/Atlantic/Stanley,sha256=V-4n-sfXK6LDRyVwLlh2qidGKgmsS4QbQBIq_hA6TEE,1251 +pytz/zoneinfo/Australia/ACT,sha256=tUD44h7WprJiM24OsCDBirQ_KD6XdGE92YZCOVI-QjM,2223 +pytz/zoneinfo/Australia/Adelaide,sha256=c1R27vgWUtcYlXT4t6EclCqYaroktt3GRPvr0etJJFw,2238 +pytz/zoneinfo/Australia/Brisbane,sha256=dKyfXR0V7w9r2eacaHuQR_sfdJxZJ5R1aFch9XS0J8w,452 +pytz/zoneinfo/Australia/Broken_Hill,sha256=1kUWddO1r7hXLiy7TTgXMNoj2qO_y1dgH-b4FZhSN9s,2274 +pytz/zoneinfo/Australia/Canberra,sha256=tUD44h7WprJiM24OsCDBirQ_KD6XdGE92YZCOVI-QjM,2223 +pytz/zoneinfo/Australia/Currie,sha256=VDx6-uv92Qf41jfvzki_QcQH2nJli46cEvcgilTR2Eo,2223 +pytz/zoneinfo/Australia/Darwin,sha256=YXLYaHp4YI2ISwSQPTYFO9-1ZDNUGTCypCtAXLti3As,323 +pytz/zoneinfo/Australia/Eucla,sha256=Qsf5xEz42_1WSnU5uGJ42YKFR2V4vvS_wBzD_GHrsro,503 +pytz/zoneinfo/Australia/Hobart,sha256=3hu16C-Gd05wCCuQZGLgKgYiOOXE12FJVm4hscsxsjo,2335 +pytz/zoneinfo/Australia/LHI,sha256=CWJpde6GI4_V-FvCder62DvGlnCdeBRMwL1M7XWsry0,1889 +pytz/zoneinfo/Australia/Lindeman,sha256=Zj3zXwRKFcdDuXFuGDWVFH0MGDjpkUipRzYjrIIHa_k,522 +pytz/zoneinfo/Australia/Lord_Howe,sha256=CWJpde6GI4_V-FvCder62DvGlnCdeBRMwL1M7XWsry0,1889 +pytz/zoneinfo/Australia/Melbourne,sha256=JywfE9AeNealiFXLtTh4eVRRkorb8MjKKYK3nbH0UKc,2223 +pytz/zoneinfo/Australia/NSW,sha256=tUD44h7WprJiM24OsCDBirQ_KD6XdGE92YZCOVI-QjM,2223 +pytz/zoneinfo/Australia/North,sha256=YXLYaHp4YI2ISwSQPTYFO9-1ZDNUGTCypCtAXLti3As,323 +pytz/zoneinfo/Australia/Perth,sha256=Ts1KCFyp7Ft5AwFsLU4xEnYCSivNDDXUAoFljkhCH5M,479 +pytz/zoneinfo/Australia/Queensland,sha256=dKyfXR0V7w9r2eacaHuQR_sfdJxZJ5R1aFch9XS0J8w,452 +pytz/zoneinfo/Australia/South,sha256=c1R27vgWUtcYlXT4t6EclCqYaroktt3GRPvr0etJJFw,2238 +pytz/zoneinfo/Australia/Sydney,sha256=tUD44h7WprJiM24OsCDBirQ_KD6XdGE92YZCOVI-QjM,2223 +pytz/zoneinfo/Australia/Tasmania,sha256=3hu16C-Gd05wCCuQZGLgKgYiOOXE12FJVm4hscsxsjo,2335 +pytz/zoneinfo/Australia/Victoria,sha256=JywfE9AeNealiFXLtTh4eVRRkorb8MjKKYK3nbH0UKc,2223 +pytz/zoneinfo/Australia/West,sha256=Ts1KCFyp7Ft5AwFsLU4xEnYCSivNDDXUAoFljkhCH5M,479 +pytz/zoneinfo/Australia/Yancowinna,sha256=1kUWddO1r7hXLiy7TTgXMNoj2qO_y1dgH-b4FZhSN9s,2274 +pytz/zoneinfo/Brazil/Acre,sha256=FshszMk8fr_v-uiA5DnOhIBV5CFXiYLVyr74quwV-AI,662 +pytz/zoneinfo/Brazil/DeNoronha,sha256=y06Wj0Fe1T52kQjJ1flxDomHFq90U205twd7BCbzlg0,742 +pytz/zoneinfo/Brazil/East,sha256=5U8tGHsI7_0tGsnE9m1rwKMTZwSIeShVelx_dMI69Ag,2016 +pytz/zoneinfo/Brazil/West,sha256=4SE-e5fPpYC0-ccow0_6ZKarJ3sGtViJOymcINNSjmw,630 +pytz/zoneinfo/Canada/Atlantic,sha256=Yn4YIYxvPD9EbElwq-gWRnLip7qUvPhBseBq9QiblOo,3438 +pytz/zoneinfo/Canada/Central,sha256=d75cCPb46-UzD7hqYMREfqJUliBgn072p6emjRoS2dY,2891 +pytz/zoneinfo/Canada/Eastern,sha256=hC96ED36ycDCwzycw5KhE9JmrAZMXHSXiDq0G3l84ZQ,3503 +pytz/zoneinfo/Canada/Mountain,sha256=ZnjCP4ZXO2yV5sdIoxeqjOkPaGNrza0Y2wgecJhvtU4,2402 +pytz/zoneinfo/Canada/Newfoundland,sha256=K5YKWNbT9qJycH-UH1WxW4uj_Q_VX4aA6oSvax6YuuA,3664 +pytz/zoneinfo/Canada/Pacific,sha256=i0FLmiC-Ca1hIU3ustv_O7G9Hp01Gnms6udX-g--kd0,2901 +pytz/zoneinfo/Canada/Saskatchewan,sha256=kp0HRXQHUpY3Ym0J9cqXW08FgB810L-sTjsAJ-_uZ3Y,994 +pytz/zoneinfo/Canada/Yukon,sha256=GGT-M9UeWHgKXIQApH58QubmZVq-SagswRpGEFkFgxM,2093 +pytz/zoneinfo/Chile/Continental,sha256=A0WTAf1TQ-LArMtXwKvnGlyZBA3v4mSkPTK0MENXMe4,2538 +pytz/zoneinfo/Chile/EasterIsland,sha256=CdfM__TGTQVJnXqxIiruUWk4Rtzk4kprAWWzGKRkPG8,2242 +pytz/zoneinfo/Etc/GMT,sha256=17OYeQlBNdE-_SgpN2kLQ_SLtTWXzj54aX9I3M6us-w,127 +pytz/zoneinfo/Etc/GMT+0,sha256=17OYeQlBNdE-_SgpN2kLQ_SLtTWXzj54aX9I3M6us-w,127 +pytz/zoneinfo/Etc/GMT+1,sha256=NdAE7bKgsRN64eo2We-OladTMw8HE_yUkp0PedgCGwc,148 +pytz/zoneinfo/Etc/GMT+10,sha256=R2JgPz9RwNUGPqVJ-aV4t-vybkf9cQmm40SVrD4Jsu0,149 +pytz/zoneinfo/Etc/GMT+11,sha256=iiNSHW6TMmKR29rPKFf4p4lwvvPdk6U1V9pMwuecNro,149 +pytz/zoneinfo/Etc/GMT+12,sha256=7HBG9-QSUvg5lQzgTj8g5BuiKOZ4quKkW1sFC6mQ5iY,149 +pytz/zoneinfo/Etc/GMT+2,sha256=ITGbjCY0qDSehMO-9CKZj23U95utkfefo4FFwfa2lN0,148 +pytz/zoneinfo/Etc/GMT+3,sha256=54Yd76CovF4O5Y2Kepk6wilQ4_7WCMlTLGgLdO9sxn8,148 +pytz/zoneinfo/Etc/GMT+4,sha256=Dwq3fFvspoIxSECQw47MHOIRsTVRHVQx3BmU-KJYDIk,148 +pytz/zoneinfo/Etc/GMT+5,sha256=vnzvMs8AlFILNE_EYbwodH5hfWBDuL4LCHHociXuhWg,148 +pytz/zoneinfo/Etc/GMT+6,sha256=0oXuyHOpGyZg_ymBYwRpMFbuYawem9NIXibEvMBn4EE,148 +pytz/zoneinfo/Etc/GMT+7,sha256=YxvlZZroNznhBW4Igom2Qsr00HvliH90xsyVTisOnlw,148 +pytz/zoneinfo/Etc/GMT+8,sha256=8O3l2BHg2LKDsYuArr5s5hcmdmTsMT_FvwHiiAqMQik,148 +pytz/zoneinfo/Etc/GMT+9,sha256=aqtVL5R5hrALLUP_KKMlere4iWcyK5zgZ-RcXqlswBQ,148 +pytz/zoneinfo/Etc/GMT-0,sha256=17OYeQlBNdE-_SgpN2kLQ_SLtTWXzj54aX9I3M6us-w,127 +pytz/zoneinfo/Etc/GMT-1,sha256=1ffwaC5xAA3jQ_zify6M_5435QywZL8PYSRdx_9oBu0,149 +pytz/zoneinfo/Etc/GMT-10,sha256=L9z9AMG-RjKYkdqStG9JJYs1wJ654RA-N4mj1YM463g,150 +pytz/zoneinfo/Etc/GMT-11,sha256=RDnIp9WoyHxHt6gb0ulTTIxnb2ENQDj987OVEIml25E,150 +pytz/zoneinfo/Etc/GMT-12,sha256=XwwsIc7EAg7DEWwDjKn_Xlqehj3bf8C-unE2wyGwWFE,150 +pytz/zoneinfo/Etc/GMT-13,sha256=DE5r_2NUQGN48r2xZfrgJfoQD-jH12xs-qu3Fvbwlso,150 +pytz/zoneinfo/Etc/GMT-14,sha256=RoX5LvpbvbYl3Y1kVKNAr4rAUQMItrZoR61fe8PE_IQ,150 +pytz/zoneinfo/Etc/GMT-2,sha256=UwM1smrAMG7cjwaDqDC8Hn9REa0ijfS3TBl9LLnDE4c,149 +pytz/zoneinfo/Etc/GMT-3,sha256=pZ4eSnByIqwi_vs6bcSVzvJmhyqU1R5cqGL_3nTvDEw,149 +pytz/zoneinfo/Etc/GMT-4,sha256=fWRx-INdpeeQb4Ig3ZZ0tmRXP-5lDwootatRqlSkUk4,149 +pytz/zoneinfo/Etc/GMT-5,sha256=M6Q5EwBIyLZACtCCsuQBHHuF-v6RceExEKqG8ma-36Q,149 +pytz/zoneinfo/Etc/GMT-6,sha256=8q4WvZo6mnV4jKE6KBvMOVZ8k6r1rVQC_L_rrEc7bPc,149 +pytz/zoneinfo/Etc/GMT-7,sha256=sON9m_SW83W3wCToG2rllDzLrOD_vstoTYvRhHxcuTo,149 +pytz/zoneinfo/Etc/GMT-8,sha256=XsZ4EfvOE-4jEj7uYHkb6MtfnIRFGuDYKXc4r5t_DMo,149 +pytz/zoneinfo/Etc/GMT-9,sha256=Fu1XzXw1d_3CLVdoOEHpIrIIpTXmEl5oa-T4cCp19IU,149 +pytz/zoneinfo/Etc/GMT0,sha256=17OYeQlBNdE-_SgpN2kLQ_SLtTWXzj54aX9I3M6us-w,127 +pytz/zoneinfo/Etc/Greenwich,sha256=17OYeQlBNdE-_SgpN2kLQ_SLtTWXzj54aX9I3M6us-w,127 +pytz/zoneinfo/Etc/UCT,sha256=s7dihjysJWmqM_faGSWEr9mWXO63Jj_tSth8GzXkx9g,127 +pytz/zoneinfo/Etc/UTC,sha256=PHGzWL6B4TscJOGZoRn9AB2825Dtx9RMLHrhdTIaAhU,127 +pytz/zoneinfo/Etc/Universal,sha256=PHGzWL6B4TscJOGZoRn9AB2825Dtx9RMLHrhdTIaAhU,127 +pytz/zoneinfo/Etc/Zulu,sha256=PHGzWL6B4TscJOGZoRn9AB2825Dtx9RMLHrhdTIaAhU,127 +pytz/zoneinfo/Europe/Amsterdam,sha256=ioE6xrjRtop5YCQsrlMloiaf0ceRsgP40i8t-jthuoc,2949 +pytz/zoneinfo/Europe/Andorra,sha256=rdVQXEcyJeM6iEoCEFYQqblQA_QpGVYkuVPBj3cTF68,1751 +pytz/zoneinfo/Europe/Astrakhan,sha256=oCdWH0k8AqBNaZkDoI6eeKx26zpxnEdJ2a6UgEGLqtg,1197 +pytz/zoneinfo/Europe/Athens,sha256=eZCQVRICwLhBf4NvrPdQSVc90cJ7XmretYT8xBQFETk,2271 +pytz/zoneinfo/Europe/Belfast,sha256=sUxIYBnjyyWc-CNaDWpLw_9s-nJqFl8eot9APIrjG4Y,3687 +pytz/zoneinfo/Europe/Belgrade,sha256=6VdUNiO6q6hJmbQBiOfglIRxt1qP9PiKuyZ-dz_rjlw,1957 +pytz/zoneinfo/Europe/Berlin,sha256=frk9y6YD1Sj99TYWDvaRHBb4NK_PiM4jo4K5f_KDGdQ,2335 +pytz/zoneinfo/Europe/Bratislava,sha256=pQvkcKIt6d7z5P7HvNldXX4k5bEe30SKgrBNGdo9PlI,2338 +pytz/zoneinfo/Europe/Brussels,sha256=83KpA6alfaQ6b1ANas7M_HToANff6Ip-r1bKJWRVfmY,2970 +pytz/zoneinfo/Europe/Bucharest,sha256=OzoAFzM7L0ZuWcisPcDPeqTwpGCAQKMYD3UrGdapNSY,2221 +pytz/zoneinfo/Europe/Budapest,sha256=tn8sRpCofylOpdNa45Z8iqi94ieus2w4dyheTpShdBg,2405 +pytz/zoneinfo/Europe/Busingen,sha256=vEX4xsgZBHfNquRvdwWfq3T96SoC_Fe3M_B8uaVemKM,1918 +pytz/zoneinfo/Europe/Chisinau,sha256=V0nwHHjQwv1Q0NwigMGVfOBBntv8fEBzxn5tp4FT2Mg,2445 +pytz/zoneinfo/Europe/Copenhagen,sha256=0tmjWe8C0q_ik_QpxP1g_AT7-NHYNDybIk3PwRbAEag,2160 +pytz/zoneinfo/Europe/Dublin,sha256=RL3F1j5bFmOGdJHMDTC4GCD8hpStD9XvUAuorGt_ul8,3531 +pytz/zoneinfo/Europe/Gibraltar,sha256=x5CI9nul0_qa2Ym9Vzv97w6GyJ4xDqcLw-AeFNyhB14,3061 +pytz/zoneinfo/Europe/Guernsey,sha256=sUxIYBnjyyWc-CNaDWpLw_9s-nJqFl8eot9APIrjG4Y,3687 +pytz/zoneinfo/Europe/Helsinki,sha256=7X2J-uH7QKlYLt1-A-0C1_6BukVrnB7Y1u5fC5MarUU,1909 +pytz/zoneinfo/Europe/Isle_of_Man,sha256=sUxIYBnjyyWc-CNaDWpLw_9s-nJqFl8eot9APIrjG4Y,3687 +pytz/zoneinfo/Europe/Istanbul,sha256=ldOJ2-YltB_ZTylyeN94QXWsuqtz3jhrzcJAcT43kVI,2166 +pytz/zoneinfo/Europe/Jersey,sha256=sUxIYBnjyyWc-CNaDWpLw_9s-nJqFl8eot9APIrjG4Y,3687 +pytz/zoneinfo/Europe/Kaliningrad,sha256=RQosb_adJitcXY71hEZOnIJ0s4mhw5t6vFqnBgjSlCU,1518 +pytz/zoneinfo/Europe/Kiev,sha256=JCkS3zIS4HJd7UqrJf2GnFLxPDzmGXZKiDrcu9k3r8U,2097 +pytz/zoneinfo/Europe/Kirov,sha256=pEJnMTy6Q_tnFiKvWxfNooXe8YT2Eh6OxgBxZGQ-PCU,1167 +pytz/zoneinfo/Europe/Lisbon,sha256=S75l1P8zlP-htK5v4iluMz9VutCuScpnF7YHblNJDqI,3469 +pytz/zoneinfo/Europe/Ljubljana,sha256=6VdUNiO6q6hJmbQBiOfglIRxt1qP9PiKuyZ-dz_rjlw,1957 +pytz/zoneinfo/Europe/London,sha256=sUxIYBnjyyWc-CNaDWpLw_9s-nJqFl8eot9APIrjG4Y,3687 +pytz/zoneinfo/Europe/Luxembourg,sha256=kLdiWSdMeKQPNKpbWFRbVAnt-7ov0I76GzAIlstAYu4,2974 +pytz/zoneinfo/Europe/Madrid,sha256=dBA60eSPcfTNm20cA9zZe1jYe7iv-wKx1pZ7IEA269Y,2637 +pytz/zoneinfo/Europe/Malta,sha256=fEE0yNN70VnjH9c56LG4IDqfMCN4i9nIO4EJ42Hu5dU,2629 +pytz/zoneinfo/Europe/Mariehamn,sha256=7X2J-uH7QKlYLt1-A-0C1_6BukVrnB7Y1u5fC5MarUU,1909 +pytz/zoneinfo/Europe/Minsk,sha256=yCqoMaaP7ByRjSM5PXlf752_TQlIeRvLproJ9Fs4JsQ,1370 +pytz/zoneinfo/Europe/Monaco,sha256=fHIzWYiEF7hqZqYJ_90L7PgWc8uz6LARExCItNJva80,2953 +pytz/zoneinfo/Europe/Moscow,sha256=AtVVFtD51JeZgmC08Smu5Zhnt3qSC6LKDFixXshYjno,1544 +pytz/zoneinfo/Europe/Nicosia,sha256=8qoqP3ekO3VYp1CKbNbFD999mR-dZNpZSP2QA5I7HXI,2016 +pytz/zoneinfo/Europe/Oslo,sha256=D6TmNdorF4-j6hP_OCnHAoRM-L1p4WvKjh0029kknQE,2251 +pytz/zoneinfo/Europe/Paris,sha256=c1sI4nN94rR-efWW81dLWp6QGeVtLq0M3BfAsp6EpYU,2971 +pytz/zoneinfo/Europe/Podgorica,sha256=6VdUNiO6q6hJmbQBiOfglIRxt1qP9PiKuyZ-dz_rjlw,1957 +pytz/zoneinfo/Europe/Prague,sha256=pQvkcKIt6d7z5P7HvNldXX4k5bEe30SKgrBNGdo9PlI,2338 +pytz/zoneinfo/Europe/Riga,sha256=edEN67qidDRY0N7B-3HTxXbOqA0kX4SBnagqJdk8FAE,2235 +pytz/zoneinfo/Europe/Rome,sha256=Pv8u8s3Gfi9c_USGbNtNRIlWuJ6uiw439jEm8gaO6Ik,2692 +pytz/zoneinfo/Europe/Samara,sha256=UieObyK_kA-u2kJmB4z6f-0lzB1WU700XPMJD95ukRQ,1253 +pytz/zoneinfo/Europe/San_Marino,sha256=Pv8u8s3Gfi9c_USGbNtNRIlWuJ6uiw439jEm8gaO6Ik,2692 +pytz/zoneinfo/Europe/Sarajevo,sha256=6VdUNiO6q6hJmbQBiOfglIRxt1qP9PiKuyZ-dz_rjlw,1957 +pytz/zoneinfo/Europe/Saratov,sha256=KasqBfY0EmVqFDUV_lchio4ZuckW39Bd4VqHr8wNmEk,1197 +pytz/zoneinfo/Europe/Simferopol,sha256=se5vcU_Yj9Yf7231T5WrrLgN0wNsJemhBwj-ybEcNM8,1490 +pytz/zoneinfo/Europe/Skopje,sha256=6VdUNiO6q6hJmbQBiOfglIRxt1qP9PiKuyZ-dz_rjlw,1957 +pytz/zoneinfo/Europe/Sofia,sha256=FoE_sw8uu3gqgGzgZkAU3fv5IYkNMuw9E5i9GCv5JFw,2130 +pytz/zoneinfo/Europe/Stockholm,sha256=B7JC-ePYFQZjv69Bf-fSCZJ_wpn6xId4m3CEGVbDUzU,1918 +pytz/zoneinfo/Europe/Tallinn,sha256=48S6kWwlUAxwnFY5XAQKutYqg0-vr1FjqJl0t_ZrAZo,2187 +pytz/zoneinfo/Europe/Tirane,sha256=YtvGBqMqX1DOyobG-W0IjqaJvO1goWI8mG8EXN6ccwo,2098 +pytz/zoneinfo/Europe/Tiraspol,sha256=V0nwHHjQwv1Q0NwigMGVfOBBntv8fEBzxn5tp4FT2Mg,2445 +pytz/zoneinfo/Europe/Ulyanovsk,sha256=nP6H4QhGU2nxTb9fju2ShQKPZQDAnQbMPnh76UxVy5E,1281 +pytz/zoneinfo/Europe/Uzhgorod,sha256=CYV1tOxlmXWMhbytjdIdibyiE6n4kMDq1t79FIeHBfM,2103 +pytz/zoneinfo/Europe/Vaduz,sha256=vEX4xsgZBHfNquRvdwWfq3T96SoC_Fe3M_B8uaVemKM,1918 +pytz/zoneinfo/Europe/Vatican,sha256=Pv8u8s3Gfi9c_USGbNtNRIlWuJ6uiw439jEm8gaO6Ik,2692 +pytz/zoneinfo/Europe/Vienna,sha256=vkLDKtfFTxXc9d2UJeKJ_SC-vphuXowkChHf7sRlUPc,2237 +pytz/zoneinfo/Europe/Vilnius,sha256=da3AqQaznig_XlAgmEo280tPWO8dMJnvvImf8H8DX34,2199 +pytz/zoneinfo/Europe/Volgograd,sha256=DYtZim_d5NJa9aBEmZbqFZnl0RsmfAOZZzM60iLpobY,1167 +pytz/zoneinfo/Europe/Warsaw,sha256=aOdJPBygUOQTQGKnSqSk_DIVnAQrTJ2NQMi_ydJzxfo,2705 +pytz/zoneinfo/Europe/Zagreb,sha256=6VdUNiO6q6hJmbQBiOfglIRxt1qP9PiKuyZ-dz_rjlw,1957 +pytz/zoneinfo/Europe/Zaporozhye,sha256=tsUSe1JRiBjjtCEeieXh6aR5zKZfd2Ogr7FF1RLDjjQ,2115 +pytz/zoneinfo/Europe/Zurich,sha256=vEX4xsgZBHfNquRvdwWfq3T96SoC_Fe3M_B8uaVemKM,1918 +pytz/zoneinfo/Indian/Antananarivo,sha256=8UO8uDuAvBrQu7itc2yFLmK762sxNEEr-ndoRmPtIio,285 +pytz/zoneinfo/Indian/Chagos,sha256=-dL8AQ0RKF2EMCSOepyhbF_meUnnaIZvALO-cKoOqHE,225 +pytz/zoneinfo/Indian/Christmas,sha256=bQlKPZsCLtBPxT5KZlWIvXP07q7pwjZntGlEusXbvgU,182 +pytz/zoneinfo/Indian/Cocos,sha256=OlfERtZzSgdGWbhU7VbOxTxAgxozwQUs5u9LXw9rAAk,191 +pytz/zoneinfo/Indian/Comoro,sha256=8UO8uDuAvBrQu7itc2yFLmK762sxNEEr-ndoRmPtIio,285 +pytz/zoneinfo/Indian/Kerguelen,sha256=Tcqj3AwmKAl0mdLP03qyrXABHuMb6ffkU5HqL67mOwc,187 +pytz/zoneinfo/Indian/Mahe,sha256=gqbjMTmwOU6xpcuf-xUKQt9db8LFatJkIoXx1eVT_Rc,187 +pytz/zoneinfo/Indian/Maldives,sha256=tbkzs_xVSRRYfGr5VwKkwNk5QUGHN6iJU3LqUUqn1HU,220 +pytz/zoneinfo/Indian/Mauritius,sha256=8ldGbe0M4aMkxj5uvDo5NU5s3gUE8dqjUTbGhVJ47w8,267 +pytz/zoneinfo/Indian/Mayotte,sha256=8UO8uDuAvBrQu7itc2yFLmK762sxNEEr-ndoRmPtIio,285 +pytz/zoneinfo/Indian/Reunion,sha256=w9lerOsoBqgrHywJPz1zygz6ADStBEau--jEkE9qlV4,187 +pytz/zoneinfo/Mexico/BajaNorte,sha256=2Ce5W0-ha4xW2aFjY0HJESZX5WeuhLN6m_yhM64xurs,2356 +pytz/zoneinfo/Mexico/BajaSur,sha256=k0C3GbJQdzJizsYudx0SEQWu0WiDZyPfwwXjC7BM-7E,1564 +pytz/zoneinfo/Mexico/General,sha256=iLw_yxqS7wU-CvSvkFPsvxKFX98Y-FmypU2Cbb-stlU,1618 +pytz/zoneinfo/Pacific/Apia,sha256=-ySjHlON060LIs9HiNgMsX15E0YiUQ4qpnyHEtCXIcs,1134 +pytz/zoneinfo/Pacific/Auckland,sha256=17UXU4eseOKfe5Ah5BFRJ1a-KD7T0YGZQu9dRezzOOQ,2460 +pytz/zoneinfo/Pacific/Bougainville,sha256=wbZw1DSqbATL9ztkegflvn3PL_MGY-EMJODwz6vlWzY,296 +pytz/zoneinfo/Pacific/Chatham,sha256=WAGfL6op3H23CBKTIwpyh2kFTdfA0PqelujEKZ5xMU0,2087 +pytz/zoneinfo/Pacific/Chuuk,sha256=xS5vy1vR7giv8GVpgU-WEFwMGLf3n1C5AaC9fa95Kv0,183 +pytz/zoneinfo/Pacific/Easter,sha256=CdfM__TGTQVJnXqxIiruUWk4Rtzk4kprAWWzGKRkPG8,2242 +pytz/zoneinfo/Pacific/Efate,sha256=hdeSr_wnXfGhvc-QZ8tZ9rGri7k8RQzuEpOoFXpDxq0,492 +pytz/zoneinfo/Pacific/Enderbury,sha256=W_Lhk3ldSo7Ii83PM4CX36ccJUMy7TI159MnDqcFHPc,259 +pytz/zoneinfo/Pacific/Fakaofo,sha256=6827uX2Pp8miDs9ilk0gfx7YHnOjy-p33IvlFElQr20,221 +pytz/zoneinfo/Pacific/Fiji,sha256=QB9V_GcrmcI6UyDH4vqxWJkyMxOKuz9_kXHvsyMh3u8,1104 +pytz/zoneinfo/Pacific/Funafuti,sha256=gS8nZXbK5rvQE11AcA_eT85k-DD3X-qSjKvnfFHc5Xk,183 +pytz/zoneinfo/Pacific/Galapagos,sha256=NyfsZvcdhillY3fB86AExc-t4PbFK42ot8O6LTaZhgM,268 +pytz/zoneinfo/Pacific/Gambier,sha256=q9T35Rcx0lnjDsSzPCvLiZ4UfuEC6yeKGpsruAAcZNs,186 +pytz/zoneinfo/Pacific/Guadalcanal,sha256=pps6s6bmVBkzgxYJq4u8PtW_D_Z45Rkia435ZrSXPyA,188 +pytz/zoneinfo/Pacific/Guam,sha256=ZhNXbr302da-XrhJU50_dKT7E3rPa8-M2iDSjlxhLnc,225 +pytz/zoneinfo/Pacific/Honolulu,sha256=iIhYJL-RWppvOacDB5jagl40vnnRs1wkRGlqgcFZ8ug,276 +pytz/zoneinfo/Pacific/Johnston,sha256=iIhYJL-RWppvOacDB5jagl40vnnRs1wkRGlqgcFZ8ug,276 +pytz/zoneinfo/Pacific/Kiritimati,sha256=t480Gz9wPF3FCIBZI8keOITZGui9Hi-C2bKLIwjNju8,263 +pytz/zoneinfo/Pacific/Kosrae,sha256=sv8zf7bWQERLCFeLaUqG3WnGBYg0CU3pxqD0JxU1BwY,251 +pytz/zoneinfo/Pacific/Kwajalein,sha256=CQLqlpOXlUrsuxL6X5HToAlr1EO7rqNqlu5Lk9lXuiU,259 +pytz/zoneinfo/Pacific/Majuro,sha256=AUaIy2JAAQ9YuWZzUVzowgW99tikAznhN4zrKy_IrB0,221 +pytz/zoneinfo/Pacific/Marquesas,sha256=WmPeaBtT17_HKMXUkbKYarRzR6ny_RWm87b_l40vgm0,195 +pytz/zoneinfo/Pacific/Midway,sha256=chiirjhs1eiYGpQPa1b2-bYKZfPjvS7B_mydQ7rE2xo,196 +pytz/zoneinfo/Pacific/Nauru,sha256=ycOpbPxH12SVRZr8X8UDPknxXoTqD6bBNgzPY5nQPD0,282 +pytz/zoneinfo/Pacific/Niue,sha256=wWxzYx8oxBNR__kNMQi8V1HL1AAQ_fhy2hEqEOlzmlM,266 +pytz/zoneinfo/Pacific/Norfolk,sha256=MlnCrsJnd9hekhlCy_br-PUsIvJoqIfZ3aw9mJcqUYA,323 +pytz/zoneinfo/Pacific/Noumea,sha256=E-GLS7Qmw3ObNON9bNwA1IhyGgWGXN7KlK92JG9VpN4,328 +pytz/zoneinfo/Pacific/Pago_Pago,sha256=chiirjhs1eiYGpQPa1b2-bYKZfPjvS7B_mydQ7rE2xo,196 +pytz/zoneinfo/Pacific/Palau,sha256=Oa-WwW4Z_WAm0WzmQpH1kwQgp99T03Clri3oKmFnU4U,182 +pytz/zoneinfo/Pacific/Pitcairn,sha256=sK1MvocrTSCNRbxtMmKQzSQMGIYVDw7kJjg4YnaosLA,223 +pytz/zoneinfo/Pacific/Pohnpei,sha256=K_gUCmShapwdLwLACqVi80lhcli5YQOB6Ba6gAxZIVg,183 +pytz/zoneinfo/Pacific/Ponape,sha256=K_gUCmShapwdLwLACqVi80lhcli5YQOB6Ba6gAxZIVg,183 +pytz/zoneinfo/Pacific/Port_Moresby,sha256=H7RhP7S_JG9TfiZeRB_l9icTA330AzjP2Ay312joyl8,206 +pytz/zoneinfo/Pacific/Rarotonga,sha256=lpWohSiWZFEetl-TGGD1hOfFRD1vBbELUZeseDTUfN4,602 +pytz/zoneinfo/Pacific/Saipan,sha256=ZhNXbr302da-XrhJU50_dKT7E3rPa8-M2iDSjlxhLnc,225 +pytz/zoneinfo/Pacific/Samoa,sha256=chiirjhs1eiYGpQPa1b2-bYKZfPjvS7B_mydQ7rE2xo,196 +pytz/zoneinfo/Pacific/Tahiti,sha256=yaImId23N7XWNCaR3C2U4mXIGw50MBC2cTmG2xIvyFU,187 +pytz/zoneinfo/Pacific/Tarawa,sha256=KP6jhSgTWlT9ZC_j0ru0Gqjaa3yJLDmRqyYSqBFE55k,183 +pytz/zoneinfo/Pacific/Tongatapu,sha256=FNW_On_qIetsnmOXDR2tW5--3F9bD9P1Bp7nT3oPTY0,393 +pytz/zoneinfo/Pacific/Truk,sha256=xS5vy1vR7giv8GVpgU-WEFwMGLf3n1C5AaC9fa95Kv0,183 +pytz/zoneinfo/Pacific/Wake,sha256=A0anjPYQvEPq6HwKMy0wrFz5yVABpCZHMVY8zPPDgzE,183 +pytz/zoneinfo/Pacific/Wallis,sha256=g3aZvQetpj1jL8IwOmh-XvnhlOBjuseGBViFJYIG7lw,183 +pytz/zoneinfo/Pacific/Yap,sha256=xS5vy1vR7giv8GVpgU-WEFwMGLf3n1C5AaC9fa95Kv0,183 +pytz/zoneinfo/US/Alaska,sha256=9d8Kb3-dQ8u9PnTTOiP-aGCA61WWX12SRrboWbPbnRg,2380 +pytz/zoneinfo/US/Aleutian,sha256=xFyU0xZBPI9mav9l7R-Den4tOSJi3jHOWfrC6Woe3IE,2365 +pytz/zoneinfo/US/Arizona,sha256=nAE-z4K27R3eI1tfyYP-nSPI1W1hAyP3yUxro817RWQ,353 +pytz/zoneinfo/US/Central,sha256=FD8puVcXOkYAgYcjCjgSW9OgOz28ug3B0bhmEzH3FpM,3585 +pytz/zoneinfo/US/East-Indiana,sha256=VcLz_rJB-IQ16YduduLGnd_Q39NqJztVH_SA4s-tmf4,1675 +pytz/zoneinfo/US/Eastern,sha256=X6bczDAzUuEZXENIsYnzCFAU2KVqGXbI6KMr1P7baf0,3545 +pytz/zoneinfo/US/Hawaii,sha256=iIhYJL-RWppvOacDB5jagl40vnnRs1wkRGlqgcFZ8ug,276 +pytz/zoneinfo/US/Indiana-Starke,sha256=nnXdbFLFM5yOKxlf-KxqchLixehLK-MCPMNVlywg2FY,2437 +pytz/zoneinfo/US/Michigan,sha256=2QN63tOQ0w4uMPIwIGKtYEJlGyYTkocu93LK1a-u2So,2188 +pytz/zoneinfo/US/Mountain,sha256=9N88x0x50HCiWnkndE06QioF2GKpojShIQXFyWTvsi0,2453 +pytz/zoneinfo/US/Pacific,sha256=_qnWb_ZSLmnSIHPcToQXm3ysLjcrOY3C-v39thqesuE,2845 +pytz/zoneinfo/US/Samoa,sha256=chiirjhs1eiYGpQPa1b2-bYKZfPjvS7B_mydQ7rE2xo,196 +pytz-2018.5.dist-info/DESCRIPTION.rst,sha256=qigr7c8XIWZGXKXaH_1bM5ycnjR9t5TmRhlqePtHJ9E,19403 +pytz-2018.5.dist-info/LICENSE.txt,sha256=OfB8cqG_2jScvSe6ybyx5vjFtOXMP631aQBAbozAt5I,1088 +pytz-2018.5.dist-info/METADATA,sha256=HDsrBDoBISPtyedbLng0B1vyCuwlqfQEGJRFqioZfyQ,20795 +pytz-2018.5.dist-info/RECORD,, +pytz-2018.5.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110 +pytz-2018.5.dist-info/metadata.json,sha256=omslXNiOJw_TkDNyYMu16QcLXjN-NUtIG924dGnzlZM,1505 +pytz-2018.5.dist-info/top_level.txt,sha256=6xRYlt934v1yHb1JIrXgHyGxn3cqACvd-yE8ski_kcc,5 +pytz-2018.5.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +pytz-2018.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pytz/__pycache__/exceptions.cpython-36.pyc,, +pytz/__pycache__/tzinfo.cpython-36.pyc,, +pytz/__pycache__/reference.cpython-36.pyc,, +pytz/__pycache__/lazy.cpython-36.pyc,, +pytz/__pycache__/tzfile.cpython-36.pyc,, +pytz/__pycache__/__init__.cpython-36.pyc,, diff --git a/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/WHEEL b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/WHEEL new file mode 100644 index 0000000..7332a41 --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/metadata.json b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/metadata.json new file mode 100644 index 0000000..006d441 --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 6 - Mature", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.4", "Programming Language :: Python :: 2.5", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.0", "Programming Language :: Python :: 3.1", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Software Development :: Libraries :: Python Modules"], "download_url": "https://pypi.org/project/pytz/", "extensions": {"python.details": {"contacts": [{"email": "stuart@stuartbishop.net", "name": "Stuart Bishop", "role": "author"}, {"email": "stuart@stuartbishop.net", "name": "Stuart Bishop", "role": "maintainer"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "http://pythonhosted.org/pytz"}}}, "generator": "bdist_wheel (0.30.0)", "keywords": ["timezone", "tzinfo", "datetime", "olson", "time"], "license": "MIT", "metadata_version": "2.0", "name": "pytz", "platform": "Independent", "summary": "World timezone definitions, modern and historical", "version": "2018.5"} \ No newline at end of file diff --git a/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/top_level.txt b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/top_level.txt new file mode 100644 index 0000000..af44f19 --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/top_level.txt @@ -0,0 +1 @@ +pytz diff --git a/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/zip-safe b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz-2018.5.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/env/lib/python3.6/site-packages/pytz/exceptions.py b/env/lib/python3.6/site-packages/pytz/exceptions.py new file mode 100644 index 0000000..18df33e --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz/exceptions.py @@ -0,0 +1,48 @@ +''' +Custom exceptions raised by pytz. +''' + +__all__ = [ + 'UnknownTimeZoneError', 'InvalidTimeError', 'AmbiguousTimeError', + 'NonExistentTimeError', +] + + +class UnknownTimeZoneError(KeyError): + '''Exception raised when pytz is passed an unknown timezone. + + >>> isinstance(UnknownTimeZoneError(), LookupError) + True + + This class is actually a subclass of KeyError to provide backwards + compatibility with code relying on the undocumented behavior of earlier + pytz releases. + + >>> isinstance(UnknownTimeZoneError(), KeyError) + True + ''' + pass + + +class InvalidTimeError(Exception): + '''Base class for invalid time exceptions.''' + + +class AmbiguousTimeError(InvalidTimeError): + '''Exception raised when attempting to create an ambiguous wallclock time. + + At the end of a DST transition period, a particular wallclock time will + occur twice (once before the clocks are set back, once after). Both + possibilities may be correct, unless further information is supplied. + + See DstTzInfo.normalize() for more info + ''' + + +class NonExistentTimeError(InvalidTimeError): + '''Exception raised when attempting to create a wallclock time that + cannot exist. + + At the start of a DST transition period, the wallclock time jumps forward. + The instants jumped over never occur. + ''' diff --git a/env/lib/python3.6/site-packages/pytz/lazy.py b/env/lib/python3.6/site-packages/pytz/lazy.py new file mode 100644 index 0000000..39344fc --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz/lazy.py @@ -0,0 +1,172 @@ +from threading import RLock +try: + from collections.abc import Mapping as DictMixin +except ImportError: # Python < 3.3 + try: + from UserDict import DictMixin # Python 2 + except ImportError: # Python 3.0-3.3 + from collections import Mapping as DictMixin + + +# With lazy loading, we might end up with multiple threads triggering +# it at the same time. We need a lock. +_fill_lock = RLock() + + +class LazyDict(DictMixin): + """Dictionary populated on first use.""" + data = None + + def __getitem__(self, key): + if self.data is None: + _fill_lock.acquire() + try: + if self.data is None: + self._fill() + finally: + _fill_lock.release() + return self.data[key.upper()] + + def __contains__(self, key): + if self.data is None: + _fill_lock.acquire() + try: + if self.data is None: + self._fill() + finally: + _fill_lock.release() + return key in self.data + + def __iter__(self): + if self.data is None: + _fill_lock.acquire() + try: + if self.data is None: + self._fill() + finally: + _fill_lock.release() + return iter(self.data) + + def __len__(self): + if self.data is None: + _fill_lock.acquire() + try: + if self.data is None: + self._fill() + finally: + _fill_lock.release() + return len(self.data) + + def keys(self): + if self.data is None: + _fill_lock.acquire() + try: + if self.data is None: + self._fill() + finally: + _fill_lock.release() + return self.data.keys() + + +class LazyList(list): + """List populated on first use.""" + + _props = [ + '__str__', '__repr__', '__unicode__', + '__hash__', '__sizeof__', '__cmp__', + '__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__', + 'append', 'count', 'index', 'extend', 'insert', 'pop', 'remove', + 'reverse', 'sort', '__add__', '__radd__', '__iadd__', '__mul__', + '__rmul__', '__imul__', '__contains__', '__len__', '__nonzero__', + '__getitem__', '__setitem__', '__delitem__', '__iter__', + '__reversed__', '__getslice__', '__setslice__', '__delslice__'] + + def __new__(cls, fill_iter=None): + + if fill_iter is None: + return list() + + # We need a new class as we will be dynamically messing with its + # methods. + class LazyList(list): + pass + + fill_iter = [fill_iter] + + def lazy(name): + def _lazy(self, *args, **kw): + _fill_lock.acquire() + try: + if len(fill_iter) > 0: + list.extend(self, fill_iter.pop()) + for method_name in cls._props: + delattr(LazyList, method_name) + finally: + _fill_lock.release() + return getattr(list, name)(self, *args, **kw) + return _lazy + + for name in cls._props: + setattr(LazyList, name, lazy(name)) + + new_list = LazyList() + return new_list + +# Not all versions of Python declare the same magic methods. +# Filter out properties that don't exist in this version of Python +# from the list. +LazyList._props = [prop for prop in LazyList._props if hasattr(list, prop)] + + +class LazySet(set): + """Set populated on first use.""" + + _props = ( + '__str__', '__repr__', '__unicode__', + '__hash__', '__sizeof__', '__cmp__', + '__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__', + '__contains__', '__len__', '__nonzero__', + '__getitem__', '__setitem__', '__delitem__', '__iter__', + '__sub__', '__and__', '__xor__', '__or__', + '__rsub__', '__rand__', '__rxor__', '__ror__', + '__isub__', '__iand__', '__ixor__', '__ior__', + 'add', 'clear', 'copy', 'difference', 'difference_update', + 'discard', 'intersection', 'intersection_update', 'isdisjoint', + 'issubset', 'issuperset', 'pop', 'remove', + 'symmetric_difference', 'symmetric_difference_update', + 'union', 'update') + + def __new__(cls, fill_iter=None): + + if fill_iter is None: + return set() + + class LazySet(set): + pass + + fill_iter = [fill_iter] + + def lazy(name): + def _lazy(self, *args, **kw): + _fill_lock.acquire() + try: + if len(fill_iter) > 0: + for i in fill_iter.pop(): + set.add(self, i) + for method_name in cls._props: + delattr(LazySet, method_name) + finally: + _fill_lock.release() + return getattr(set, name)(self, *args, **kw) + return _lazy + + for name in cls._props: + setattr(LazySet, name, lazy(name)) + + new_set = LazySet() + return new_set + +# Not all versions of Python declare the same magic methods. +# Filter out properties that don't exist in this version of Python +# from the list. +LazySet._props = [prop for prop in LazySet._props if hasattr(set, prop)] diff --git a/env/lib/python3.6/site-packages/pytz/reference.py b/env/lib/python3.6/site-packages/pytz/reference.py new file mode 100644 index 0000000..f765ca0 --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz/reference.py @@ -0,0 +1,140 @@ +''' +Reference tzinfo implementations from the Python docs. +Used for testing against as they are only correct for the years +1987 to 2006. Do not use these for real code. +''' + +from datetime import tzinfo, timedelta, datetime +from pytz import HOUR, ZERO, UTC + +__all__ = [ + 'FixedOffset', + 'LocalTimezone', + 'USTimeZone', + 'Eastern', + 'Central', + 'Mountain', + 'Pacific', + 'UTC' +] + + +# A class building tzinfo objects for fixed-offset time zones. +# Note that FixedOffset(0, "UTC") is a different way to build a +# UTC tzinfo object. +class FixedOffset(tzinfo): + """Fixed offset in minutes east from UTC.""" + + def __init__(self, offset, name): + self.__offset = timedelta(minutes=offset) + self.__name = name + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return self.__name + + def dst(self, dt): + return ZERO + + +import time as _time + +STDOFFSET = timedelta(seconds=-_time.timezone) +if _time.daylight: + DSTOFFSET = timedelta(seconds=-_time.altzone) +else: + DSTOFFSET = STDOFFSET + +DSTDIFF = DSTOFFSET - STDOFFSET + + +# A class capturing the platform's idea of local time. +class LocalTimezone(tzinfo): + + def utcoffset(self, dt): + if self._isdst(dt): + return DSTOFFSET + else: + return STDOFFSET + + def dst(self, dt): + if self._isdst(dt): + return DSTDIFF + else: + return ZERO + + def tzname(self, dt): + return _time.tzname[self._isdst(dt)] + + def _isdst(self, dt): + tt = (dt.year, dt.month, dt.day, + dt.hour, dt.minute, dt.second, + dt.weekday(), 0, -1) + stamp = _time.mktime(tt) + tt = _time.localtime(stamp) + return tt.tm_isdst > 0 + +Local = LocalTimezone() + + +def first_sunday_on_or_after(dt): + days_to_go = 6 - dt.weekday() + if days_to_go: + dt += timedelta(days_to_go) + return dt + + +# In the US, DST starts at 2am (standard time) on the first Sunday in April. +DSTSTART = datetime(1, 4, 1, 2) +# and ends at 2am (DST time; 1am standard time) on the last Sunday of Oct. +# which is the first Sunday on or after Oct 25. +DSTEND = datetime(1, 10, 25, 1) + + +# A complete implementation of current DST rules for major US time zones. +class USTimeZone(tzinfo): + + def __init__(self, hours, reprname, stdname, dstname): + self.stdoffset = timedelta(hours=hours) + self.reprname = reprname + self.stdname = stdname + self.dstname = dstname + + def __repr__(self): + return self.reprname + + def tzname(self, dt): + if self.dst(dt): + return self.dstname + else: + return self.stdname + + def utcoffset(self, dt): + return self.stdoffset + self.dst(dt) + + def dst(self, dt): + if dt is None or dt.tzinfo is None: + # An exception may be sensible here, in one or both cases. + # It depends on how you want to treat them. The default + # fromutc() implementation (called by the default astimezone() + # implementation) passes a datetime with dt.tzinfo is self. + return ZERO + assert dt.tzinfo is self + + # Find first Sunday in April & the last in October. + start = first_sunday_on_or_after(DSTSTART.replace(year=dt.year)) + end = first_sunday_on_or_after(DSTEND.replace(year=dt.year)) + + # Can't compare naive to aware objects, so strip the timezone from + # dt first. + if start <= dt.replace(tzinfo=None) < end: + return HOUR + else: + return ZERO + +Eastern = USTimeZone(-5, "Eastern", "EST", "EDT") +Central = USTimeZone(-6, "Central", "CST", "CDT") +Mountain = USTimeZone(-7, "Mountain", "MST", "MDT") +Pacific = USTimeZone(-8, "Pacific", "PST", "PDT") diff --git a/env/lib/python3.6/site-packages/pytz/tzfile.py b/env/lib/python3.6/site-packages/pytz/tzfile.py new file mode 100644 index 0000000..25117f3 --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz/tzfile.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python +''' +$Id: tzfile.py,v 1.8 2004/06/03 00:15:24 zenzen Exp $ +''' + +from datetime import datetime +from struct import unpack, calcsize + +from pytz.tzinfo import StaticTzInfo, DstTzInfo, memorized_ttinfo +from pytz.tzinfo import memorized_datetime, memorized_timedelta + + +def _byte_string(s): + """Cast a string or byte string to an ASCII byte string.""" + return s.encode('ASCII') + +_NULL = _byte_string('\0') + + +def _std_string(s): + """Cast a string or byte string to an ASCII string.""" + return str(s.decode('ASCII')) + + +def build_tzinfo(zone, fp): + head_fmt = '>4s c 15x 6l' + head_size = calcsize(head_fmt) + (magic, format, ttisgmtcnt, ttisstdcnt, leapcnt, timecnt, + typecnt, charcnt) = unpack(head_fmt, fp.read(head_size)) + + # Make sure it is a tzfile(5) file + assert magic == _byte_string('TZif'), 'Got magic %s' % repr(magic) + + # Read out the transition times, localtime indices and ttinfo structures. + data_fmt = '>%(timecnt)dl %(timecnt)dB %(ttinfo)s %(charcnt)ds' % dict( + timecnt=timecnt, ttinfo='lBB' * typecnt, charcnt=charcnt) + data_size = calcsize(data_fmt) + data = unpack(data_fmt, fp.read(data_size)) + + # make sure we unpacked the right number of values + assert len(data) == 2 * timecnt + 3 * typecnt + 1 + transitions = [memorized_datetime(trans) + for trans in data[:timecnt]] + lindexes = list(data[timecnt:2 * timecnt]) + ttinfo_raw = data[2 * timecnt:-1] + tznames_raw = data[-1] + del data + + # Process ttinfo into separate structs + ttinfo = [] + tznames = {} + i = 0 + while i < len(ttinfo_raw): + # have we looked up this timezone name yet? + tzname_offset = ttinfo_raw[i + 2] + if tzname_offset not in tznames: + nul = tznames_raw.find(_NULL, tzname_offset) + if nul < 0: + nul = len(tznames_raw) + tznames[tzname_offset] = _std_string( + tznames_raw[tzname_offset:nul]) + ttinfo.append((ttinfo_raw[i], + bool(ttinfo_raw[i + 1]), + tznames[tzname_offset])) + i += 3 + + # Now build the timezone object + if len(ttinfo) == 1 or len(transitions) == 0: + ttinfo[0][0], ttinfo[0][2] + cls = type(zone, (StaticTzInfo,), dict( + zone=zone, + _utcoffset=memorized_timedelta(ttinfo[0][0]), + _tzname=ttinfo[0][2])) + else: + # Early dates use the first standard time ttinfo + i = 0 + while ttinfo[i][1]: + i += 1 + if ttinfo[i] == ttinfo[lindexes[0]]: + transitions[0] = datetime.min + else: + transitions.insert(0, datetime.min) + lindexes.insert(0, i) + + # calculate transition info + transition_info = [] + for i in range(len(transitions)): + inf = ttinfo[lindexes[i]] + utcoffset = inf[0] + if not inf[1]: + dst = 0 + else: + for j in range(i - 1, -1, -1): + prev_inf = ttinfo[lindexes[j]] + if not prev_inf[1]: + break + dst = inf[0] - prev_inf[0] # dst offset + + # Bad dst? Look further. DST > 24 hours happens when + # a timzone has moved across the international dateline. + if dst <= 0 or dst > 3600 * 3: + for j in range(i + 1, len(transitions)): + stdinf = ttinfo[lindexes[j]] + if not stdinf[1]: + dst = inf[0] - stdinf[0] + if dst > 0: + break # Found a useful std time. + + tzname = inf[2] + + # Round utcoffset and dst to the nearest minute or the + # datetime library will complain. Conversions to these timezones + # might be up to plus or minus 30 seconds out, but it is + # the best we can do. + utcoffset = int((utcoffset + 30) // 60) * 60 + dst = int((dst + 30) // 60) * 60 + transition_info.append(memorized_ttinfo(utcoffset, dst, tzname)) + + cls = type(zone, (DstTzInfo,), dict( + zone=zone, + _utc_transition_times=transitions, + _transition_info=transition_info)) + + return cls() + +if __name__ == '__main__': + import os.path + from pprint import pprint + base = os.path.join(os.path.dirname(__file__), 'zoneinfo') + tz = build_tzinfo('Australia/Melbourne', + open(os.path.join(base, 'Australia', 'Melbourne'), 'rb')) + tz = build_tzinfo('US/Eastern', + open(os.path.join(base, 'US', 'Eastern'), 'rb')) + pprint(tz._utc_transition_times) diff --git a/env/lib/python3.6/site-packages/pytz/tzinfo.py b/env/lib/python3.6/site-packages/pytz/tzinfo.py new file mode 100644 index 0000000..725978d --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz/tzinfo.py @@ -0,0 +1,577 @@ +'''Base classes and helpers for building zone specific tzinfo classes''' + +from datetime import datetime, timedelta, tzinfo +from bisect import bisect_right +try: + set +except NameError: + from sets import Set as set + +import pytz +from pytz.exceptions import AmbiguousTimeError, NonExistentTimeError + +__all__ = [] + +_timedelta_cache = {} + + +def memorized_timedelta(seconds): + '''Create only one instance of each distinct timedelta''' + try: + return _timedelta_cache[seconds] + except KeyError: + delta = timedelta(seconds=seconds) + _timedelta_cache[seconds] = delta + return delta + +_epoch = datetime.utcfromtimestamp(0) +_datetime_cache = {0: _epoch} + + +def memorized_datetime(seconds): + '''Create only one instance of each distinct datetime''' + try: + return _datetime_cache[seconds] + except KeyError: + # NB. We can't just do datetime.utcfromtimestamp(seconds) as this + # fails with negative values under Windows (Bug #90096) + dt = _epoch + timedelta(seconds=seconds) + _datetime_cache[seconds] = dt + return dt + +_ttinfo_cache = {} + + +def memorized_ttinfo(*args): + '''Create only one instance of each distinct tuple''' + try: + return _ttinfo_cache[args] + except KeyError: + ttinfo = ( + memorized_timedelta(args[0]), + memorized_timedelta(args[1]), + args[2] + ) + _ttinfo_cache[args] = ttinfo + return ttinfo + +_notime = memorized_timedelta(0) + + +def _to_seconds(td): + '''Convert a timedelta to seconds''' + return td.seconds + td.days * 24 * 60 * 60 + + +class BaseTzInfo(tzinfo): + # Overridden in subclass + _utcoffset = None + _tzname = None + zone = None + + def __str__(self): + return self.zone + + +class StaticTzInfo(BaseTzInfo): + '''A timezone that has a constant offset from UTC + + These timezones are rare, as most locations have changed their + offset at some point in their history + ''' + def fromutc(self, dt): + '''See datetime.tzinfo.fromutc''' + if dt.tzinfo is not None and dt.tzinfo is not self: + raise ValueError('fromutc: dt.tzinfo is not self') + return (dt + self._utcoffset).replace(tzinfo=self) + + def utcoffset(self, dt, is_dst=None): + '''See datetime.tzinfo.utcoffset + + is_dst is ignored for StaticTzInfo, and exists only to + retain compatibility with DstTzInfo. + ''' + return self._utcoffset + + def dst(self, dt, is_dst=None): + '''See datetime.tzinfo.dst + + is_dst is ignored for StaticTzInfo, and exists only to + retain compatibility with DstTzInfo. + ''' + return _notime + + def tzname(self, dt, is_dst=None): + '''See datetime.tzinfo.tzname + + is_dst is ignored for StaticTzInfo, and exists only to + retain compatibility with DstTzInfo. + ''' + return self._tzname + + def localize(self, dt, is_dst=False): + '''Convert naive time to local time''' + if dt.tzinfo is not None: + raise ValueError('Not naive datetime (tzinfo is already set)') + return dt.replace(tzinfo=self) + + def normalize(self, dt, is_dst=False): + '''Correct the timezone information on the given datetime. + + This is normally a no-op, as StaticTzInfo timezones never have + ambiguous cases to correct: + + >>> from pytz import timezone + >>> gmt = timezone('GMT') + >>> isinstance(gmt, StaticTzInfo) + True + >>> dt = datetime(2011, 5, 8, 1, 2, 3, tzinfo=gmt) + >>> gmt.normalize(dt) is dt + True + + The supported method of converting between timezones is to use + datetime.astimezone(). Currently normalize() also works: + + >>> la = timezone('America/Los_Angeles') + >>> dt = la.localize(datetime(2011, 5, 7, 1, 2, 3)) + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + >>> gmt.normalize(dt).strftime(fmt) + '2011-05-07 08:02:03 GMT (+0000)' + ''' + if dt.tzinfo is self: + return dt + if dt.tzinfo is None: + raise ValueError('Naive time - no tzinfo set') + return dt.astimezone(self) + + def __repr__(self): + return '<StaticTzInfo %r>' % (self.zone,) + + def __reduce__(self): + # Special pickle to zone remains a singleton and to cope with + # database changes. + return pytz._p, (self.zone,) + + +class DstTzInfo(BaseTzInfo): + '''A timezone that has a variable offset from UTC + + The offset might change if daylight saving time comes into effect, + or at a point in history when the region decides to change their + timezone definition. + ''' + # Overridden in subclass + + # Sorted list of DST transition times, UTC + _utc_transition_times = None + + # [(utcoffset, dstoffset, tzname)] corresponding to + # _utc_transition_times entries + _transition_info = None + + zone = None + + # Set in __init__ + + _tzinfos = None + _dst = None # DST offset + + def __init__(self, _inf=None, _tzinfos=None): + if _inf: + self._tzinfos = _tzinfos + self._utcoffset, self._dst, self._tzname = _inf + else: + _tzinfos = {} + self._tzinfos = _tzinfos + self._utcoffset, self._dst, self._tzname = ( + self._transition_info[0]) + _tzinfos[self._transition_info[0]] = self + for inf in self._transition_info[1:]: + if inf not in _tzinfos: + _tzinfos[inf] = self.__class__(inf, _tzinfos) + + def fromutc(self, dt): + '''See datetime.tzinfo.fromutc''' + if (dt.tzinfo is not None and + getattr(dt.tzinfo, '_tzinfos', None) is not self._tzinfos): + raise ValueError('fromutc: dt.tzinfo is not self') + dt = dt.replace(tzinfo=None) + idx = max(0, bisect_right(self._utc_transition_times, dt) - 1) + inf = self._transition_info[idx] + return (dt + inf[0]).replace(tzinfo=self._tzinfos[inf]) + + def normalize(self, dt): + '''Correct the timezone information on the given datetime + + If date arithmetic crosses DST boundaries, the tzinfo + is not magically adjusted. This method normalizes the + tzinfo to the correct one. + + To test, first we need to do some setup + + >>> from pytz import timezone + >>> utc = timezone('UTC') + >>> eastern = timezone('US/Eastern') + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + + We next create a datetime right on an end-of-DST transition point, + the instant when the wallclocks are wound back one hour. + + >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) + >>> loc_dt = utc_dt.astimezone(eastern) + >>> loc_dt.strftime(fmt) + '2002-10-27 01:00:00 EST (-0500)' + + Now, if we subtract a few minutes from it, note that the timezone + information has not changed. + + >>> before = loc_dt - timedelta(minutes=10) + >>> before.strftime(fmt) + '2002-10-27 00:50:00 EST (-0500)' + + But we can fix that by calling the normalize method + + >>> before = eastern.normalize(before) + >>> before.strftime(fmt) + '2002-10-27 01:50:00 EDT (-0400)' + + The supported method of converting between timezones is to use + datetime.astimezone(). Currently, normalize() also works: + + >>> th = timezone('Asia/Bangkok') + >>> am = timezone('Europe/Amsterdam') + >>> dt = th.localize(datetime(2011, 5, 7, 1, 2, 3)) + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + >>> am.normalize(dt).strftime(fmt) + '2011-05-06 20:02:03 CEST (+0200)' + ''' + if dt.tzinfo is None: + raise ValueError('Naive time - no tzinfo set') + + # Convert dt in localtime to UTC + offset = dt.tzinfo._utcoffset + dt = dt.replace(tzinfo=None) + dt = dt - offset + # convert it back, and return it + return self.fromutc(dt) + + def localize(self, dt, is_dst=False): + '''Convert naive time to local time. + + This method should be used to construct localtimes, rather + than passing a tzinfo argument to a datetime constructor. + + is_dst is used to determine the correct timezone in the ambigous + period at the end of daylight saving time. + + >>> from pytz import timezone + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + >>> amdam = timezone('Europe/Amsterdam') + >>> dt = datetime(2004, 10, 31, 2, 0, 0) + >>> loc_dt1 = amdam.localize(dt, is_dst=True) + >>> loc_dt2 = amdam.localize(dt, is_dst=False) + >>> loc_dt1.strftime(fmt) + '2004-10-31 02:00:00 CEST (+0200)' + >>> loc_dt2.strftime(fmt) + '2004-10-31 02:00:00 CET (+0100)' + >>> str(loc_dt2 - loc_dt1) + '1:00:00' + + Use is_dst=None to raise an AmbiguousTimeError for ambiguous + times at the end of daylight saving time + + >>> try: + ... loc_dt1 = amdam.localize(dt, is_dst=None) + ... except AmbiguousTimeError: + ... print('Ambiguous') + Ambiguous + + is_dst defaults to False + + >>> amdam.localize(dt) == amdam.localize(dt, False) + True + + is_dst is also used to determine the correct timezone in the + wallclock times jumped over at the start of daylight saving time. + + >>> pacific = timezone('US/Pacific') + >>> dt = datetime(2008, 3, 9, 2, 0, 0) + >>> ploc_dt1 = pacific.localize(dt, is_dst=True) + >>> ploc_dt2 = pacific.localize(dt, is_dst=False) + >>> ploc_dt1.strftime(fmt) + '2008-03-09 02:00:00 PDT (-0700)' + >>> ploc_dt2.strftime(fmt) + '2008-03-09 02:00:00 PST (-0800)' + >>> str(ploc_dt2 - ploc_dt1) + '1:00:00' + + Use is_dst=None to raise a NonExistentTimeError for these skipped + times. + + >>> try: + ... loc_dt1 = pacific.localize(dt, is_dst=None) + ... except NonExistentTimeError: + ... print('Non-existent') + Non-existent + ''' + if dt.tzinfo is not None: + raise ValueError('Not naive datetime (tzinfo is already set)') + + # Find the two best possibilities. + possible_loc_dt = set() + for delta in [timedelta(days=-1), timedelta(days=1)]: + loc_dt = dt + delta + idx = max(0, bisect_right( + self._utc_transition_times, loc_dt) - 1) + inf = self._transition_info[idx] + tzinfo = self._tzinfos[inf] + loc_dt = tzinfo.normalize(dt.replace(tzinfo=tzinfo)) + if loc_dt.replace(tzinfo=None) == dt: + possible_loc_dt.add(loc_dt) + + if len(possible_loc_dt) == 1: + return possible_loc_dt.pop() + + # If there are no possibly correct timezones, we are attempting + # to convert a time that never happened - the time period jumped + # during the start-of-DST transition period. + if len(possible_loc_dt) == 0: + # If we refuse to guess, raise an exception. + if is_dst is None: + raise NonExistentTimeError(dt) + + # If we are forcing the pre-DST side of the DST transition, we + # obtain the correct timezone by winding the clock forward a few + # hours. + elif is_dst: + return self.localize( + dt + timedelta(hours=6), is_dst=True) - timedelta(hours=6) + + # If we are forcing the post-DST side of the DST transition, we + # obtain the correct timezone by winding the clock back. + else: + return self.localize( + dt - timedelta(hours=6), + is_dst=False) + timedelta(hours=6) + + # If we get this far, we have multiple possible timezones - this + # is an ambiguous case occuring during the end-of-DST transition. + + # If told to be strict, raise an exception since we have an + # ambiguous case + if is_dst is None: + raise AmbiguousTimeError(dt) + + # Filter out the possiblilities that don't match the requested + # is_dst + filtered_possible_loc_dt = [ + p for p in possible_loc_dt if bool(p.tzinfo._dst) == is_dst + ] + + # Hopefully we only have one possibility left. Return it. + if len(filtered_possible_loc_dt) == 1: + return filtered_possible_loc_dt[0] + + if len(filtered_possible_loc_dt) == 0: + filtered_possible_loc_dt = list(possible_loc_dt) + + # If we get this far, we have in a wierd timezone transition + # where the clocks have been wound back but is_dst is the same + # in both (eg. Europe/Warsaw 1915 when they switched to CET). + # At this point, we just have to guess unless we allow more + # hints to be passed in (such as the UTC offset or abbreviation), + # but that is just getting silly. + # + # Choose the earliest (by UTC) applicable timezone if is_dst=True + # Choose the latest (by UTC) applicable timezone if is_dst=False + # i.e., behave like end-of-DST transition + dates = {} # utc -> local + for local_dt in filtered_possible_loc_dt: + utc_time = ( + local_dt.replace(tzinfo=None) - local_dt.tzinfo._utcoffset) + assert utc_time not in dates + dates[utc_time] = local_dt + return dates[[min, max][not is_dst](dates)] + + def utcoffset(self, dt, is_dst=None): + '''See datetime.tzinfo.utcoffset + + The is_dst parameter may be used to remove ambiguity during DST + transitions. + + >>> from pytz import timezone + >>> tz = timezone('America/St_Johns') + >>> ambiguous = datetime(2009, 10, 31, 23, 30) + + >>> str(tz.utcoffset(ambiguous, is_dst=False)) + '-1 day, 20:30:00' + + >>> str(tz.utcoffset(ambiguous, is_dst=True)) + '-1 day, 21:30:00' + + >>> try: + ... tz.utcoffset(ambiguous) + ... except AmbiguousTimeError: + ... print('Ambiguous') + Ambiguous + + ''' + if dt is None: + return None + elif dt.tzinfo is not self: + dt = self.localize(dt, is_dst) + return dt.tzinfo._utcoffset + else: + return self._utcoffset + + def dst(self, dt, is_dst=None): + '''See datetime.tzinfo.dst + + The is_dst parameter may be used to remove ambiguity during DST + transitions. + + >>> from pytz import timezone + >>> tz = timezone('America/St_Johns') + + >>> normal = datetime(2009, 9, 1) + + >>> str(tz.dst(normal)) + '1:00:00' + >>> str(tz.dst(normal, is_dst=False)) + '1:00:00' + >>> str(tz.dst(normal, is_dst=True)) + '1:00:00' + + >>> ambiguous = datetime(2009, 10, 31, 23, 30) + + >>> str(tz.dst(ambiguous, is_dst=False)) + '0:00:00' + >>> str(tz.dst(ambiguous, is_dst=True)) + '1:00:00' + >>> try: + ... tz.dst(ambiguous) + ... except AmbiguousTimeError: + ... print('Ambiguous') + Ambiguous + + ''' + if dt is None: + return None + elif dt.tzinfo is not self: + dt = self.localize(dt, is_dst) + return dt.tzinfo._dst + else: + return self._dst + + def tzname(self, dt, is_dst=None): + '''See datetime.tzinfo.tzname + + The is_dst parameter may be used to remove ambiguity during DST + transitions. + + >>> from pytz import timezone + >>> tz = timezone('America/St_Johns') + + >>> normal = datetime(2009, 9, 1) + + >>> tz.tzname(normal) + 'NDT' + >>> tz.tzname(normal, is_dst=False) + 'NDT' + >>> tz.tzname(normal, is_dst=True) + 'NDT' + + >>> ambiguous = datetime(2009, 10, 31, 23, 30) + + >>> tz.tzname(ambiguous, is_dst=False) + 'NST' + >>> tz.tzname(ambiguous, is_dst=True) + 'NDT' + >>> try: + ... tz.tzname(ambiguous) + ... except AmbiguousTimeError: + ... print('Ambiguous') + Ambiguous + ''' + if dt is None: + return self.zone + elif dt.tzinfo is not self: + dt = self.localize(dt, is_dst) + return dt.tzinfo._tzname + else: + return self._tzname + + def __repr__(self): + if self._dst: + dst = 'DST' + else: + dst = 'STD' + if self._utcoffset > _notime: + return '<DstTzInfo %r %s+%s %s>' % ( + self.zone, self._tzname, self._utcoffset, dst + ) + else: + return '<DstTzInfo %r %s%s %s>' % ( + self.zone, self._tzname, self._utcoffset, dst + ) + + def __reduce__(self): + # Special pickle to zone remains a singleton and to cope with + # database changes. + return pytz._p, ( + self.zone, + _to_seconds(self._utcoffset), + _to_seconds(self._dst), + self._tzname + ) + + +def unpickler(zone, utcoffset=None, dstoffset=None, tzname=None): + """Factory function for unpickling pytz tzinfo instances. + + This is shared for both StaticTzInfo and DstTzInfo instances, because + database changes could cause a zones implementation to switch between + these two base classes and we can't break pickles on a pytz version + upgrade. + """ + # Raises a KeyError if zone no longer exists, which should never happen + # and would be a bug. + tz = pytz.timezone(zone) + + # A StaticTzInfo - just return it + if utcoffset is None: + return tz + + # This pickle was created from a DstTzInfo. We need to + # determine which of the list of tzinfo instances for this zone + # to use in order to restore the state of any datetime instances using + # it correctly. + utcoffset = memorized_timedelta(utcoffset) + dstoffset = memorized_timedelta(dstoffset) + try: + return tz._tzinfos[(utcoffset, dstoffset, tzname)] + except KeyError: + # The particular state requested in this timezone no longer exists. + # This indicates a corrupt pickle, or the timezone database has been + # corrected violently enough to make this particular + # (utcoffset,dstoffset) no longer exist in the zone, or the + # abbreviation has been changed. + pass + + # See if we can find an entry differing only by tzname. Abbreviations + # get changed from the initial guess by the database maintainers to + # match reality when this information is discovered. + for localized_tz in tz._tzinfos.values(): + if (localized_tz._utcoffset == utcoffset and + localized_tz._dst == dstoffset): + return localized_tz + + # This (utcoffset, dstoffset) information has been removed from the + # zone. Add it back. This might occur when the database maintainers have + # corrected incorrect information. datetime instances using this + # incorrect information will continue to do so, exactly as they were + # before being pickled. This is purely an overly paranoid safety net - I + # doubt this will ever been needed in real life. + inf = (utcoffset, dstoffset, tzname) + tz._tzinfos[inf] = tz.__class__(inf, tz._tzinfos) + return tz._tzinfos[inf] diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Abidjan b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Abidjan new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Accra b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Accra new file mode 100644 index 0000000000000000000000000000000000000000..8726e80df27f8d6794b295674a18a71c9cab4c85 GIT binary patch literal 842 zcmcK2J!n%=9ES0KK9&Yr7h4crvI#}Kgl5W8rz{Z<m@XNlAOv)95J9L22Rn6<4mBVF zhlFah%?IH&YW%!}M6HP&2!c3O=_ul&R8Zo1j&7YY^n{z=C6GYg=Vo&k^9k8^BK8w5 zj%P3Kr?h>$>*f9O+;v?Z{#NVLby?p!W$yj_qVE?jsLIC~d2p>@9=40JkvnQOi|=(c zbyn4Ggr$D$rfHl{>c*dzYR<QHb0?+z_&4!`%cd2H>sG0x9_=p3<K++LN$-=i^Evah zGO3@XZ>i@ir{%@*z`VS4Rll0}p@PgW9eiC<os%b|GpL&Gp+()TeOA5iYw~*auz52) zB5$wUG24DdzdJWj{o7gwNih-*jrAIhMt%F2CE5r@QDT`hS(){`gePfx_wQuKSa9$E zckC~ZSQ1&&X%|ISb=qZ-b&-XUm64^9wUNb<)sf|q^^pQd1*a{6)NtA&NEN3ogVb@_ zLP#Z~6jBQ*hEzk!A@z`gNJXS1QqyURB2}HXEK=8L3nP`0(nxKjI8q%ckJQJ5djewi IVen#q0by*H{r~^~ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Addis_Ababa b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Addis_Ababa new file mode 100644 index 0000000000000000000000000000000000000000..39631f21486c61a19639d73d7571ef0840176b3e GIT binary patch literal 285 zcmWHE%1kq2zyPd35fBCe7+a(P$l3Vr)}JkhW?eWw(f!6-{fAc=7@3$E85opWKq|Ei z7+4q>lr<PQ7#P&nFz`V{e0)O~TpdFgv<-}m4M3!cDM&FCgplBXA5cC2fdJ$X5Djt+ Thz2<bL{r64TtN5g8gl^vna4UY literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Algiers b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Algiers new file mode 100644 index 0000000000000000000000000000000000000000..2a25f3ac268fd5c605c58d36260759c8f8d125bc GIT binary patch literal 760 zcmc(cyDtPm9Dsj&)*ixH&q5S}N|2oJ4_JbDoWv#`k=#`mqM0b<B8pSe(Rv+DDN%|j zXxvrai9{3{{UjO{=35GhLTNTL-(+?-+5J9$YBt#+zebTcVKN%cWM7vy%UWVIw$Yc; z8xO5&^K?yZo%QSOm3Ot1Y0;U)f!<A*>b>?#y}vl5vt7$Nd+ex-4@c!DPSUx@;>^{0 zr+d9Bax-7+-i|b@`@yh!xxACt=_4IVo$CTGugk6;By!lJ%TrV0&P=+M3!bV<cBiZ3 zOKwdpE|wiC_;J}zp}B0woQBziZAT>ZWIk3xQL#lLuJ!dVWt@pw>l>^J&ZYM2?$EF= zqh^lxj<NXSvGL4C#Il07z8C**UrpxGz}MUNJ0E7u<p-W2{vZwm8jlc{5T6jI5U&uo z5Wf(|5YG_T0gZ2nbBOnV#y!M8qyeM@qy?l0qzR-8zd;*FA4nqxogl3k^nx^lbc3{m O^!wL<C8laywB!@(boU$p literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Asmara b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Asmara new file mode 100644 index 0000000000000000000000000000000000000000..39631f21486c61a19639d73d7571ef0840176b3e GIT binary patch literal 285 zcmWHE%1kq2zyPd35fBCe7+a(P$l3Vr)}JkhW?eWw(f!6-{fAc=7@3$E85opWKq|Ei z7+4q>lr<PQ7#P&nFz`V{e0)O~TpdFgv<-}m4M3!cDM&FCgplBXA5cC2fdJ$X5Djt+ Thz2<bL{r64TtN5g8gl^vna4UY literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Asmera b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Asmera new file mode 100644 index 0000000000000000000000000000000000000000..39631f21486c61a19639d73d7571ef0840176b3e GIT binary patch literal 285 zcmWHE%1kq2zyPd35fBCe7+a(P$l3Vr)}JkhW?eWw(f!6-{fAc=7@3$E85opWKq|Ei z7+4q>lr<PQ7#P&nFz`V{e0)O~TpdFgv<-}m4M3!cDM&FCgplBXA5cC2fdJ$X5Djt+ Thz2<bL{r64TtN5g8gl^vna4UY literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Bamako b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Bamako new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Bangui b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Bangui new file mode 100644 index 0000000000000000000000000000000000000000..b1c97cc5a77eb187cc8ea8a4031a45a9bf153b35 GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$XOWpIe>wYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$<Lx`tc;I6M;> literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Banjul b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Banjul new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Bissau b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Bissau new file mode 100644 index 0000000000000000000000000000000000000000..8e32be3e6e843379e69000273552ab8588c5ca34 GIT binary patch literal 208 zcmWHE%1kq2zyQoZ5fBCe7@Ma7$eHwP&IHc%L;(gyrvLvxrZ6x7$qx)HAax8JKE5Fg lx(0>}?m(J>5rjfWF#ZSF5Ri%g|AVXnaY?X>3uvnW7XTd?B!vI~ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Blantyre b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Blantyre new file mode 100644 index 0000000000000000000000000000000000000000..5b871dbaa7c2969f6b4dfc854184a29010bfb2cc GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$Z2vr`h|g!fkCbZBqgK3z~bW@!r<%}0^%}+kYL&m Tunq=>|3HA>R4$<Lx<*_8th^MU literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Brazzaville b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Brazzaville new file mode 100644 index 0000000000000000000000000000000000000000..b1c97cc5a77eb187cc8ea8a4031a45a9bf153b35 GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$XOWpIe>wYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$<Lx`tc;I6M;> literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Bujumbura b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Bujumbura new file mode 100644 index 0000000000000000000000000000000000000000..5b871dbaa7c2969f6b4dfc854184a29010bfb2cc GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$Z2vr`h|g!fkCbZBqgK3z~bW@!r<%}0^%}+kYL&m Tunq=>|3HA>R4$<Lx<*_8th^MU literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Cairo b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Cairo new file mode 100644 index 0000000000000000000000000000000000000000..ba097504459673e254831edea689c2f8143990df GIT binary patch literal 1972 zcmcK4Z%oxy9LMo<K@qQ5UbPhS8X6>Gl#oH>O)9+(4cD*T%Sy-Uhc#Wb+-c;NQsd0E znq_Oo16^(DGFzoqW?3|K%eF$aIN8Wew>-3#WQHdMjhjU0ee79Vt_OY@_r?4(o}Bk5 zz4Vdg<>FsA%>4=9T(kS;=h4c2w6%O>+A>Z0;Ds}${i|+$=+JxN;S)3T*E_n*H(&PY zj+d7k{myFr?X#gd^1{xbyW*j+yRS4jHL5i{)fwtvPiO4uj~^Gmbu`<ajlJT`2fL+R zU8H{B@U!h*x<s69oFx0^tXKVYo8`GFF?GJ;h&?})6ay1;<Um)u$P853KMxG53&;1{ z!EI~RUweDy&~tl)c)iF*R{Wx}nwQAzg>f;g;RiW<<{KhsUd)cj>r}agNtxR-QbdP8 zWuqMlHR|M`&D-_48hx-#Uhzu5xboeH?U*NvRqVx0a%^3_7`HMm$5(cVtL_Qx)%oMq zge@(0LZ(LK%dIy5c$1pcm$29D7iw~+l=028#gsi;?X_x!D%f~bPHlKiOj}bgubW>W z3YSfnMMWufec}r_J?D8*96x7^PkyFKM3XIPuU0d=-j_FQZxp2mezZ5Pe@or8Wt^Gy zSe`Cx?l<L&di2c?9yPN|w7#Xf)l@`3)Ro2Ghm~hLf~wq4!>aFg1-Es-8qV3>5+vH5 z4sUN+70iA6fv~#qzTl1xYs{VXjry)9>dm}Fot}U1T$3C-7%V6&FbmA1K}{^n)O-@t z3++#4(Mzqt-NzzfZNolYyY7Bd`(`Fct-95ucC6E>V|&AN;o2Z8a+$uev;O<*;_nqA zUgut)D47*;PjPaA7<uvM+Qn&+Os4oq^Y;-x(wBbI%KbC{rvKnSxEpf!%)Z6jkvHyj z?;LsS$a_cLJo4_5w~xGkqyeM@qy?l0qzSL<0%^nR`al{%Izd`NdO?~&x<T4O`av2( zIzn1PdP17=x~`D6ysj^#F{Cr3HKaH6&G7@>ajrd}KcqpVLyi`a9yyvsx<uOax;{A? z^}0@xR*_zjW|3|=+C}<B8b&(iXc_65qiLjTq;0S3o1<~B>l|qv=^bev>7Ju~q<>@s zkR9OI0%Q+3HUZfMWE+ru;MfSS+X-YVylyX$%|Ld8V>^)jKsE%~5oAk{JwY}F*%f44 zkbOZm#_M(l*&46g8)S2k-9feo*&k$skR3v{2-zcKlaO6Pwh7rMWTU)pr;x4ky1ha+ x3)wAXyO90zx()NX9YeOv>-Nm+HVxS|ahYZ<a*0{X{^$4puVIV2l`Ace{tf(fqG134 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Casablanca b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Casablanca new file mode 100644 index 0000000000000000000000000000000000000000..65de34457986ee755ffd614de6378dbee965a64d GIT binary patch literal 1643 zcmd7SYiP}J9LMqh#xl2|hzMVowI{<i*C%ext=ZVwIkU6bW*c^KJ?75XpM(b}CBo8I zu0>i>6c6OVSW2ShHd3xnaEQ#U`M!UYnn#}bt^fb^Kj*YP+2?&i1y$u~-d`uq-r?db zw2S8+zTNf*zSK3e_VllZ&ql64c;nwWnihRD`ndP#ZBq1GV)dkN*WN|q7jN_98<s{p zzwY97DV-ke+Bnfq*z+OM?QX#Do^&kI_u@%!K%eE&8Ixc7GcFH~WDM!+W$bQ>%xu2u zXBNGPWF5HV&l<HWI=fCaJN~j}ryi2*ic=b>*{Xs2H#H~jh~_L=sll-uG<a^12Aiiz zC^J`cdlzbM%~Q!e(_ZH!kJUM!Cum-9hvscLE8$)t3EvaVmyens4r>0VF;Z|bRtrAY z$=t-NI=88h6zV`Jyp^tn-z%l)!dEHwW2E?Sy%sk%$-I5dQZnzElvIbML~FEkQ;U|Z z9V+vS9_oTAdu8GHD_Y)foL2Ndq80C-O63n<t8VPjMUTsL@#!jE(oic)ch1yhCrf2{ z<uHwGPnQ*e45=<pl$EK;vT9lfS=}Q+){GdUYobwE*DX%h-M%C1-@eif^%rF0twyam zcw0A}SRtF&pV!*zCsMoTaJ?5B)8?mLo7ZhW|M%Cit*tH3?SJnjbdPC|UPqq2uD_l$ zvqE0p)F8Kzmu}ae6Z(gBwfz3IRy(W=SsJo7r!5Xy9kM*9tq)nC(^klzV~I{%BeF;i zt3;NGtP@!%vQlKJ9M*~~)@iFnmg}_jA`3=Vj4T;hGqPx8)lOTs)7FhF+-WOEmhQB* zBa3(1>YcWHWc^41POHFaB_K68tq7zFr<H-!;j}`KN}N`T(`rGAaauJ<IZmqwDadIR zIjtn5CZs6CqbjGBh17)<=CsP3RvJ<pQXEnpQXWztQXo>H(@I2YM2bYJM9M_!L<&VJ zMM_0#MT$kLMao6$MG8hLc3R0u%}y&CsTwI8sT(OAsT?UCsonYC7vIUg1XB1mNX{BL QeDv^?<gC;b95J%fPb9Cfs{jB1 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Ceuta b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Ceuta new file mode 100644 index 0000000000000000000000000000000000000000..aaa657ffde7908600a3ec66dff03ca6e91ba13f6 GIT binary patch literal 2059 zcmdtiUrd#C9LMqBqn>00^rfY?MA3-QI>G^j)F9LoFcmc9q@)s(h?;mho|J*9=QNpC zW9-|;YOeWjI&3V-9OebJmMbUAv9?Cdnrqd{KWoY=r;RcCy??oR(M5M%^n3R3dN|{Z zo9FWmt=iO>XZ~@<+fTSTF?;hoF>Zex{Ca!j(2?-kZr_ba_vNrT(SLlX=XjoZ|M0TN z$$bx)4_@D4PDKLoQ$M^JAI^-<A3l2{lCZfgo^Wo<(7jiZOj3RMP;!4sJf-8e$bDZ` z#U<-#<e@WNW@@`i`p#EnT2oY}uURJ#SA-;^s8k;D7fNPYmOPsDpk(=inssxw&iFY~ zXAVx($37pgkN4hI|FNq&tMgmUZu>%Wwtb|5h7+1wdr|VDy^>$tC$pEoB?VIsNMT-! z6pqAX&g4#=bE#hD4maz(kAw2$cQsnnRj0*$^Yp2`3$)~&G@aj=p$ppP%EB_A1{)^J z)0vmGG&mw<zJ4vw_+HAd|00XVekzLw&dcJTj>?jrZmGENiB@*B%hLCb=(2EFo@w8s zRV&}n<xT6g`iUkDRg~+qla}fV|4dzRE1)$K%B1F-`}Mh-Gi23=f9UGLL|Jn<Rcd>G zmFHu>OI_!HtgRcA`d!!bg_1L}uJ(d9r0<vY#XZ`Xa9TG^QQdImZQVE$)lKJH^~Fme z-TdxqZ92DAw!BuX;jU_VX=A!Z_7=$2sv?OtPLh{%r^>d{KV^HWk>;EU+C225w4~h9 zmM=e(*6SB_$H^{vWuQ-Y?mw%$dJgFB)?*s$h^aTm8#DIri^n_HINv@0%{4kYJkGv- z9BYbr33s1OvN37Xyi`6n$rY6$v$B-CI%I<OA;^P0p7A{QCC>}}`%53Uf9hWP+wMEY zjM}ri@U)W=@H<05#^AJrKt_QK0~rT05M(6CP@Hxw$Y7jyH2hnJ<Fw;J2E<`R$dHgR zA%j9jg$#?sxR8M%BSVIUj13taGCE{<$oP-}I_(INAtGZ$28oOk874ALWT41Mk)a}E zMFxwE78x!wUSz;dJ7Q$WPCI5~(8#EfVI$*429Asz89Fj{Wbnx7k>MlbM*@IE011K9 z#sCR|(?$Ua0}=-$5J)7DP$02Df`LQ>2?r7nBp^sckdQcSOpu^BZB&r3AaOwggG2@i z4H6q9I7oDm@F4L)0)#{e2@w(_BuGvhB_vEpoRB~vkwQX+#0m)(5-lWLNW74MArV7D zhQtgBn$t!N37gZ#4GA0)IV5yQ?2zCg(L=&#{#WHov_+hiOA`H+`Pl{80e@v~0CVya F{{prT0UiJV literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Conakry b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Conakry new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Dakar b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Dakar new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Dar_es_Salaam b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Dar_es_Salaam new file mode 100644 index 0000000000000000000000000000000000000000..39631f21486c61a19639d73d7571ef0840176b3e GIT binary patch literal 285 zcmWHE%1kq2zyPd35fBCe7+a(P$l3Vr)}JkhW?eWw(f!6-{fAc=7@3$E85opWKq|Ei z7+4q>lr<PQ7#P&nFz`V{e0)O~TpdFgv<-}m4M3!cDM&FCgplBXA5cC2fdJ$X5Djt+ Thz2<bL{r64TtN5g8gl^vna4UY literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Djibouti b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Djibouti new file mode 100644 index 0000000000000000000000000000000000000000..39631f21486c61a19639d73d7571ef0840176b3e GIT binary patch literal 285 zcmWHE%1kq2zyPd35fBCe7+a(P$l3Vr)}JkhW?eWw(f!6-{fAc=7@3$E85opWKq|Ei z7+4q>lr<PQ7#P&nFz`V{e0)O~TpdFgv<-}m4M3!cDM&FCgplBXA5cC2fdJ$X5Djt+ Thz2<bL{r64TtN5g8gl^vna4UY literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Douala b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Douala new file mode 100644 index 0000000000000000000000000000000000000000..b1c97cc5a77eb187cc8ea8a4031a45a9bf153b35 GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$XOWpIe>wYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$<Lx`tc;I6M;> literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/El_Aaiun b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/El_Aaiun new file mode 100644 index 0000000000000000000000000000000000000000..f5f8ffbc61716d3a421f555fa962a2b3e488d78e GIT binary patch literal 1473 zcmd7RNl27&7{~EfGYZ!{L{UUfi*(V{HoH`WW#UrmjQif=QdXn3s98=OvO`Hh5T(ci z(E>$C1O<8U6iE<PND&r7w47qvD5992@1Jg+It2ZP_x;SgFsJwX^H(%CEc5<2QTBw3 zQ(+h16N7de&JIn+JQ_M3^Dfqlt=|-i8(kNS@B0`?cv2XgvE*VPd;ObW_T7bnoJDiI zoKxe0+=-E3UiIrh{<%BB%_~lZwhX8iOuwrI=@+D+@ro99AJf8T5433Npcb{Xt1qKV zeK!`UZz4<ldBs{ht4fQzC#CpWl$IoCXvx>LTI%c7(xca<Y^Gnzo{E;sXDu)DY5A8` zQgJ&{E4~g$WzvXNj?b1Voi9}nGqvhRlT_dOE;Ydjsp%imn$dBoJv$+FwXdYEtxW2) zTk8*h)2#;=%eLz0x_!eL*|BC=8|JRo#(9I<_+e6-eg?Jqey{F)u~m0nZPu21J+k{m zuI{;9FMFF(G;lmqS_^ZetszO;)01Ui)->6l7%vA>7wLgeNIDXxYRA}P>3siIyM}Jb zp@*Z|eSS<2Uuu;joj0|o?WOee^$&RwQzHM#XG%EyNj)zd{t)7g#?OeDZm-ny;_~wS zUP@ZJSGv*Xx0~NR<>vp3Jih(@SJ)1j@RL-WmJ5eukZh22kbIDYkc^y`5|WeCl0vdV z(n9h=5<@aWQgd2vPD>8S&S~i(`8h2?BtxgA=(HS>B#|tgmZsD4L=tscrbwzz%N0r1 zY1ty_IxSzPC5&Y3w3LyYot89`wbRmeTHZ+FNalt|>Q2iYNgm1GY3VyHe`Es443H@x zb3i76%mSH))8>Ip1epmk6=W{RWRTe)(?RBgObD3~G9_eA$fS^2A=7f&ypV}GZDz>S qkhvj~LuQ9e51Ah_LFfN9!)V)N8skjPU%qtZ(zN9K^fXLe9{meZcW0FV literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Freetown b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Freetown new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Gaborone b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Gaborone new file mode 100644 index 0000000000000000000000000000000000000000..5b871dbaa7c2969f6b4dfc854184a29010bfb2cc GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$Z2vr`h|g!fkCbZBqgK3z~bW@!r<%}0^%}+kYL&m Tunq=>|3HA>R4$<Lx<*_8th^MU literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Harare b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Harare new file mode 100644 index 0000000000000000000000000000000000000000..5b871dbaa7c2969f6b4dfc854184a29010bfb2cc GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$Z2vr`h|g!fkCbZBqgK3z~bW@!r<%}0^%}+kYL&m Tunq=>|3HA>R4$<Lx<*_8th^MU literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Johannesburg b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Johannesburg new file mode 100644 index 0000000000000000000000000000000000000000..ddf3652e159e4c3cd844bdedf4784dcd29da23c7 GIT binary patch literal 271 zcmWHE%1kq2zyK^j5fBCeHXsJEIU9gPliT@>GwXU9&d$p(IM<igaK0|EfRULA2pJfp z9DoWKL?u9~wG0@6q>KUsi;r&zL$G6T2uKbLLI_*Q4pa!X@&{DUe;~-Mc60#IASZxm ckTXCu$SEKi<QxzUauNfHZsG#DP}hhH09Bqn8vp<R literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Juba b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Juba new file mode 100644 index 0000000000000000000000000000000000000000..9fa711904dff7519d3061e1bc81527f5dba0769b GIT binary patch literal 683 zcmcK1yDtPm9Ki9pr@OU>^E$^}Y(gO}5<#I6uUsM?$t1TGg{d5cD0C8+tB{DsAE5C_ zM5WPcM4}t5C`Ls=p`&8H8!Ck;%x31ZyU8Y-@6VrKSsD;OR@A;>v%2i&K3%oP#o^t( zILMgL#*hqezMIHGSw~gH#73TVd`y{|PERJh36m5h?TBvUJZ#Ha6F2UaD^r(GrtWxD z*Pq5z!`8Y??>wr;@`-L*zERE71>G`rrdkV&GSjoC+T4AeO{}Tx%e8EO-8CJ0M&@o; zP5!*1hpR<TghRhv5h3zN_K!txf5`rR&J#)QMQV1=7n9>94qp`Q{$BncTD*OI^kG-f zglF3YXoK`Y8X=vKR!A?T8PW}Dhx7}y4UvvWOQa{#6zPhzMfxI*k<LhKq&LzW>5jBV a`Ulz>Aag)wfy@J$3I5ky96O}GLFW@PZiCMN literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Kampala b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Kampala new file mode 100644 index 0000000000000000000000000000000000000000..39631f21486c61a19639d73d7571ef0840176b3e GIT binary patch literal 285 zcmWHE%1kq2zyPd35fBCe7+a(P$l3Vr)}JkhW?eWw(f!6-{fAc=7@3$E85opWKq|Ei z7+4q>lr<PQ7#P&nFz`V{e0)O~TpdFgv<-}m4M3!cDM&FCgplBXA5cC2fdJ$X5Djt+ Thz2<bL{r64TtN5g8gl^vna4UY literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Khartoum b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Khartoum new file mode 100644 index 0000000000000000000000000000000000000000..f2c9e303797396981ce1900c06cb51e65f94b830 GIT binary patch literal 713 zcmcK1yDvjg0LSrj`_f)5>Q$w<LkH1Fq%l~;D^0{BIZ5w=!I_jn3^s|T6B4ob11v%! zCX3Y~65Fwwjzq%1V5^DqEs4P(2B*3Ab9<8}&G+XotS;xpj}x|UxHwsR@tmyK+r`nH z5Qiz_Y!1o5)|&|~mUKv!O?c#4N5+(?>-A*Rn=mm^)UL=H_kKs#n}~^D#bx61$ut~~ z>c-QsYTDkA$=yfQTsqM$D>tfjI;Y#F&QyDDNv8VtRY&|lr=#mC{c<fkU-wLxo{^c` zHPd}w*29&8H(z~_BH;Y;2?m7d;ctunK_?_c#uG7~MdI^$cFq@*<3(=1DA?=!_<smf z{rcIvJ+(l5w~>HQNGv265)BE5#6toi5s{Ec%$hbR5)}!H#6<!lk&)0yY$P}m9SM)b iM+QJfK!!lZsA&g5MnQ%_#z6)`M#6s|%C*}K<lPUR4To+3 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Kigali b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Kigali new file mode 100644 index 0000000000000000000000000000000000000000..5b871dbaa7c2969f6b4dfc854184a29010bfb2cc GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$Z2vr`h|g!fkCbZBqgK3z~bW@!r<%}0^%}+kYL&m Tunq=>|3HA>R4$<Lx<*_8th^MU literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Kinshasa b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Kinshasa new file mode 100644 index 0000000000000000000000000000000000000000..b1c97cc5a77eb187cc8ea8a4031a45a9bf153b35 GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$XOWpIe>wYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$<Lx`tc;I6M;> literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Lagos b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Lagos new file mode 100644 index 0000000000000000000000000000000000000000..b1c97cc5a77eb187cc8ea8a4031a45a9bf153b35 GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$XOWpIe>wYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$<Lx`tc;I6M;> literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Libreville b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Libreville new file mode 100644 index 0000000000000000000000000000000000000000..b1c97cc5a77eb187cc8ea8a4031a45a9bf153b35 GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$XOWpIe>wYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$<Lx`tc;I6M;> literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Lome b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Lome new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Luanda b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Luanda new file mode 100644 index 0000000000000000000000000000000000000000..b1c97cc5a77eb187cc8ea8a4031a45a9bf153b35 GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$XOWpIe>wYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$<Lx`tc;I6M;> literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Lubumbashi b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Lubumbashi new file mode 100644 index 0000000000000000000000000000000000000000..5b871dbaa7c2969f6b4dfc854184a29010bfb2cc GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$Z2vr`h|g!fkCbZBqgK3z~bW@!r<%}0^%}+kYL&m Tunq=>|3HA>R4$<Lx<*_8th^MU literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Lusaka b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Lusaka new file mode 100644 index 0000000000000000000000000000000000000000..5b871dbaa7c2969f6b4dfc854184a29010bfb2cc GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$Z2vr`h|g!fkCbZBqgK3z~bW@!r<%}0^%}+kYL&m Tunq=>|3HA>R4$<Lx<*_8th^MU literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Malabo b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Malabo new file mode 100644 index 0000000000000000000000000000000000000000..b1c97cc5a77eb187cc8ea8a4031a45a9bf153b35 GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$XOWpIe>wYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$<Lx`tc;I6M;> literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Maputo b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Maputo new file mode 100644 index 0000000000000000000000000000000000000000..5b871dbaa7c2969f6b4dfc854184a29010bfb2cc GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$Z2vr`h|g!fkCbZBqgK3z~bW@!r<%}0^%}+kYL&m Tunq=>|3HA>R4$<Lx<*_8th^MU literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Maseru b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Maseru new file mode 100644 index 0000000000000000000000000000000000000000..ddf3652e159e4c3cd844bdedf4784dcd29da23c7 GIT binary patch literal 271 zcmWHE%1kq2zyK^j5fBCeHXsJEIU9gPliT@>GwXU9&d$p(IM<igaK0|EfRULA2pJfp z9DoWKL?u9~wG0@6q>KUsi;r&zL$G6T2uKbLLI_*Q4pa!X@&{DUe;~-Mc60#IASZxm ckTXCu$SEKi<QxzUauNfHZsG#DP}hhH09Bqn8vp<R literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Mbabane b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Mbabane new file mode 100644 index 0000000000000000000000000000000000000000..ddf3652e159e4c3cd844bdedf4784dcd29da23c7 GIT binary patch literal 271 zcmWHE%1kq2zyK^j5fBCeHXsJEIU9gPliT@>GwXU9&d$p(IM<igaK0|EfRULA2pJfp z9DoWKL?u9~wG0@6q>KUsi;r&zL$G6T2uKbLLI_*Q4pa!X@&{DUe;~-Mc60#IASZxm ckTXCu$SEKi<QxzUauNfHZsG#DP}hhH09Bqn8vp<R literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Mogadishu b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Mogadishu new file mode 100644 index 0000000000000000000000000000000000000000..39631f21486c61a19639d73d7571ef0840176b3e GIT binary patch literal 285 zcmWHE%1kq2zyPd35fBCe7+a(P$l3Vr)}JkhW?eWw(f!6-{fAc=7@3$E85opWKq|Ei z7+4q>lr<PQ7#P&nFz`V{e0)O~TpdFgv<-}m4M3!cDM&FCgplBXA5cC2fdJ$X5Djt+ Thz2<bL{r64TtN5g8gl^vna4UY literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Monrovia b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Monrovia new file mode 100644 index 0000000000000000000000000000000000000000..b434c67fa5a3808cea974aaeec9cb4befa99c3bd GIT binary patch literal 233 zcmWHE%1kq2zyK^j5fBCeW*`Q!c^ZJkg7};{%%`IA7@3&=|Nr`gfdNdi0Le*E4IDnc uAq>7i><+{*5JK1*n3X&~pql;zK~&YUIUpKjEl3z-H3Nxua{(P<zy$z(wKJFi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Nairobi b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Nairobi new file mode 100644 index 0000000000000000000000000000000000000000..39631f21486c61a19639d73d7571ef0840176b3e GIT binary patch literal 285 zcmWHE%1kq2zyPd35fBCe7+a(P$l3Vr)}JkhW?eWw(f!6-{fAc=7@3$E85opWKq|Ei z7+4q>lr<PQ7#P&nFz`V{e0)O~TpdFgv<-}m4M3!cDM&FCgplBXA5cC2fdJ$X5Djt+ Thz2<bL{r64TtN5g8gl^vna4UY literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Ndjamena b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Ndjamena new file mode 100644 index 0000000000000000000000000000000000000000..bbfe19d60a5fc8b8d1a5e5319e4d687e2a2b078d GIT binary patch literal 225 zcmWHE%1kq2zyQoZ5fBCe79a+(c^iPlq-PB<QBs!fN*t7#Nut85sCvfWi!X0w8rV v3XB{+z99_ZjvyQy0+IoO5E3l;0nx(n9|%B}fw&+GK{QF0aseHoYsduvED#(Q literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Niamey b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Niamey new file mode 100644 index 0000000000000000000000000000000000000000..b1c97cc5a77eb187cc8ea8a4031a45a9bf153b35 GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$XOWpIe>wYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$<Lx`tc;I6M;> literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Nouakchott b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Nouakchott new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Ouagadougou b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Ouagadougou new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Porto-Novo b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Porto-Novo new file mode 100644 index 0000000000000000000000000000000000000000..b1c97cc5a77eb187cc8ea8a4031a45a9bf153b35 GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$XOWpIe>wYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$<Lx`tc;I6M;> literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Sao_Tome b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Sao_Tome new file mode 100644 index 0000000000000000000000000000000000000000..a4ece7ff2b7aba7f8e99fb8ee4ddbe1d439056cf GIT binary patch literal 234 zcmWHE%1kq2zyK^j5fBCeW*`Q!c^ZJkq-T8%QJx(Fj7&gbwg3i(|Np-y1I0lAsEAL1 zfy2i)guxw%!yQ9_f{b7sLf9IZl{`N{W&y!}Ac(X1YXG7_)`G-9Rx^-jHy6+$x`tc; Dq#`8m literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Timbuktu b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Timbuktu new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Tripoli b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Tripoli new file mode 100644 index 0000000000000000000000000000000000000000..b32e2202f572b8ca6ef3c5cf1d9e787a24f2c328 GIT binary patch literal 655 zcmcK1y)Oe{9Ki9Xt$}*US=39>YHJ`OImJk57onkU5{bt^(oWAP25I~WOa>97K}6ET zAO?%6iNq#Co+cUzi5v^D@O)2ACWBx5Jm0%Z+FU-5vyfZ0#jmL`PgqRUEUudiX4`)~ zkKdjiTX(TTSzf!c$`32LGB~Cx2POTKxo}&yCS<7gO@%)Cb?alF+jg@g+e=4o$JxCM z7uVg+y^!wOu2<cg&oZK>RCIYm_hc_sEEUl4q~A>>MrH5#iEC$G<WOovC5Q8Rc6Ue2 zIoE}3u2>L$pZ5t=`+di5p70f-VbQ#&2uFP8rJF2K$F*pjo^ixvdY;V@X|sOD`GdfF z^+%s(kf3N#L?|j=jSNM{s}Z6o@i$0Ow7eQIiW)_ZqDK*=C{iRTniNrrDn*u}OA)3h cQ=}={6mg0=MV_M1a02vyP9bQHWWWx70c9krqyPW_ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Tunis b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Tunis new file mode 100644 index 0000000000000000000000000000000000000000..4bd3885a96f61bbf9c0db6b42956b02d6e2bccc4 GIT binary patch literal 710 zcmchUyGjE=6o${{A{s~>5h6+oNsO`3BBD)5WlVy23%G?^r4S+AB6$Ey0~XN-u+U0T z@D)@rsEN1MB8!6QWD5%$(fJm%(8A6$ob&AtGcbI=xm;Qum0#7ScW5fAoA0i5J+_L4 z`Sa9<U0d8K)*hnv<)dj|9hIDVYg#q-d#a7our%t&>UwuvZnnqNZRN?nTMMiExo|O9 zYdWFIq1Ab?;;3?;72QiYvGkBLFx>0JBLQbHeP$*632UMpbTa96nT)rp+047j`7f&Z zSX1R+p4F81L>WGh*W>elFa7spiF9o0$BHK@K9NYj(Jm5F68xI%%*>in(4EOHFid^i zFt|jSe`|^9W3HAr_lC%auBBY}D?}4S6-1Xytqh_Kq7I@DqR^$*=u)eM=!7VRXoaYS z=;awGhG>SUhUkVUhiHeWhv<hy0Eqz-1tboNNFcF5qJhNYQbz=dN&X?HfDUdX8TbTO CBFtt0 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Windhoek b/env/lib/python3.6/site-packages/pytz/zoneinfo/Africa/Windhoek new file mode 100644 index 0000000000000000000000000000000000000000..f5d40bafc3912fe7c65f47cb63e52fc6b9b932c0 GIT binary patch literal 988 zcmc(dO-NKx97q3SI_L;`)g%cQ;zCVP$Fa$e@?%s+^CPW!GAalnWVxuRHU<)0%;kvJ zmFU7nEnEnSD_sgf5h~H9phe6o30H!MgsAsCTnH|rRlniAGd$kg+;g0v@v(qhnb$ty zVnKWH|Aw}=sqmMlJF9WMyA*p-<Xujb$3Kl!jOVM$H^0T-ajL4Gr076NLRE+N^zEz@ z6D<0yg9krNjdw-YeBCm&zh-pZ`a4tqF{ba%O`CgbacOutW*TRoOViL>)g0@SQ2k@o z(i@Q0o9(Kt^18JBxuwDxM-u*#uG){Yb;pNdbN~CH?tGtSx;DP)?x}s#Gry@HjP9D= ziACMlvSuERY{;X+ITLAK5GQ?A^%qP^|GrUC8J6hwD>aa4lED>6d6KRrr6m93Pei5K zvyj|8Nfya35T8h?-(&9?%9H6n$?5KKq|9Gc?UzU>;z%^)NUOar&-we=-?8tzXMcS0 z;hEjD$*$KB7Z4v1C%lFih#QC>h$Dt4h%1OMh%<;ch&z|oAH<<c>k;A-;uGQ&;uYeS z;TPhV;ThtZ;Tz(d;T_`MrS%VKz@_Z~(gLIhNE47QAZ<YUfHVT>1kwtm7f3UVZXoS2 d`hhg$(sl%C$))WH(iEdB`JZ9=Y-!~I-#K{P&1e7s literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Adak b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Adak new file mode 100644 index 0000000000000000000000000000000000000000..5696e0f8bedee72515c43f686881a561af339db9 GIT binary patch literal 2365 zcmciCZ%ma{0LSsm-^)cYDj-Bm!Hl7RhrdBt9tc86<UwwrWC#iolmVg)3ox8hdC{a; z&Wf15=#OjEF}7yX*qNL2g>zkSYhaF=n{(Efu39oj-p=>b){C0&@;rN<&*6@@zpuBZ z`Na&^zmCWJ!pj*jFZYBu%;$W6p>J}<Z*7zHj=VkjoVs(W-SL0wm9ArzPQc+D8Q7KW z1hsF{!SxlskcuoFnxE-&r^jgbn>&4;$T{s9x$cDDyrdWWIPF_FeM?5@G2f!Er{#So zrku!k$K?I}r=17(ejp!g9&)0#?32+Yy-rMCy?iLS&WTOz(ThC=PF&zN9k)7hdE76K zUUDT<#ZPAFrDvkm!(T?}Wgp&B%R7r@Lf^DXtV)(ET7DOg<SdXYOQ*!6(X%pX)oGFB z_vqyCA(1@uvtIR6uUI{PL8n}NPo)ljq0>gY)tcV7v>2>a=}n)@j1H+Xiw9&@WunSX z+9h)`gVkehugr~45V_ZKWZry`c>G+feB%47Vy(*3PmY}vPYuNAb?=`LvUN`9A2_QD z$}j1s8$MBmYrfG%h5f4N*Bi1p^<z<daYB|v92O;`qq20iMLaw7t={n6K~>g$Qg8g~ zB~`w4P&-FURmIvpdQ)eLs*JDJ-l|2)JD;bkvTv%Yt5tIIvIw#HY_8mL=Z2{HC{8{< zGa<Gfn3pe%kBZuc8M$rv9Z^?yTh|{tskWzH);pR8RYSzMZY<uT8fTB|rle}sbYZ`4 zcIT<)kp|szZHapMXrFw=mn(L5?vSslIMG^FB-@V6i}su(xvO<Xbi{b%?s6@5`&a7D z+{>!-2e<Bu9amlB*Y)1H<Lb5H^ZNCv{i?fnME9I(P`$NB^uA-ysDQwrfZ&k3J)vRl zd-(YMHQQY-zrWFO1^fLSbpc`ayU_j;Y41^dU9o<DUt7RZdtbbv%<I}%Y2Iq{I$l?S zxfc|AUHR+HWxctW6TpY_*TwtaBMUeGz{`2tyxbG!_{$u>IqS~+F@ZgYYHE@3Ap^9U z5h6oGJ4P-Hl4C{*3=<hAGEiis$WW27B7?P>(IUgOn(-n7Mn;Sb85uJ&Xk^sLu#s^i z14l-V3>_IeGI(V4$ncTzBLT3Q2v|)BkQg9AK%#(z0f_?=2qY3nD3Dkn!9b#cgae6( z)dU2Ih}DDyi3t)EBq~T)khma$K_Y{M28j(493(nOc#!xY0YV~VH6cP`gaiqR5)vjP zPDr4TNFkv@Vub_?i53zrBwkh%FeGAD6EY-bRueQNYDn0SxFLZ<B8P+yi5(I=Bzj2r zkoZ|m0Fek<O$d<~T1^m<C?a7*;)nzii6jzAB$h}pk!T{}MB<4A6p5(SgcOOX)dUrZ zs?~%Qi7OIVB(g|ok=P=^MWTy@7l|(tU?jp;6JjLBRug0-%2pF*B+f{nkw_z<Mq-Tw z8;Q2dR695@B=l~N+Y@#VAD8R1EA;XIiuXVD{`(eg%APJv+EdCh(=yUTN?Ce(T6&rY F{}YG!e4hXS literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Anchorage b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Anchorage new file mode 100644 index 0000000000000000000000000000000000000000..6c8bdf226900d7f4c6a4d47338e6742d4d6747cd GIT binary patch literal 2380 zcmciCeN2^A0LSs?B5>gbMG2`$^d$2B;sFJrC@%;CA^~!{36V--cUTDoWB5j5I&v#< zv^kuOj&p3aNjuZ6bS=7Rtxe1kZoP~}>DEk^uJtyH<aWNNw*IL3U!G^r^Euq{@9*nr zXn8uv`qv59zwmMn>X&=UZv7eCpXZ-Q__=MOsaX6zQ6^_kZE^?RSS75%Qn%x!6cN;& z?4HwJqJo=N`a^1yRA^D6-<chzoUd%~heu7T@R3Qk>xc8|j&H{N5no;vk^T|?ou|jd zU2mRqM;#s!cke&$zGqv%nA>u|9bMlpVk+9)_ZHNNd8upN^B1+M1>wuwu|c&eHa$8j z_Qzti@N%Mzn^>&wJL8n`CqmWzN3Tk|t3W)^J0=%-7m9=}-`R_EZ;Hg#=j^1IuSIg& zaXWc-My0wA*r`)rskG@fJAL$hm2vT9nK^V+Wu4qAm-M`-#H%$j+b2a%M~+-twnr?h zi<Y^`%_1-7SGnA|O614Qwezo}h=Q44><2$~iH9aXw+mI0didSb_9F+w)QTfVY<KIl zD%$&wEUrGURyH4&B}?8?r6oIL=}(tLS>~W!cJZvJh}>>hoE#RbuQl40uYI7_eAX?i zb{<oYzTF_J8~3T2Lxr-o(5KdR#ml<53gz)y((^~U^5$Na-fzpry7+*-?xQqOKYPh; z7>*E6Or5nG_g)uIjt<-Fo5#h5p+0+K)%U8Y@0ffl^Mcys+b5eNmD*h9lbf#%C|`1g z^nKE)TAb;!WyGttT#1y=9O@9yj;Gnrbv-7wstCK)yIi#OU$@)y7KrCt$L)@o8PQpN z!tM;rQ(gHNWY^Shs(ZeY-J_Fg+l>Laeds;4<BLwYv*&fS>y%gatnXDX4CYHm&>Tl_ z$gQ5xFz0Q20)dWh%hKPCR&XHD+vW%}-w5+dl)1<7wPFK-{@spvb5B@P<*|zD^!vDe zYxG;rALS(;tDZ}xz7$pJn?4RcoWCxf|K6HQ{{t^)K)>8mZt#~r0ex1Sx%nyX>MJTo zHi+!dsJDph5zQvKuuG2KCa_OrqsUH?ts;9xHjC`msJDyk*Qhs)>=@ZHvS(z|$gYuX zBl|`+j_e%SI<j|U^T_Uz?IZg~8h~`bs9RvvJwTd(bOC7t(g&mwNGFh1AiY4Efpi0D z2htCuAx7O1q$Nh(6Qn6fSCF<KeL)(7bOvb+(i@~XNOzF-ApJoagmegLkx};uX%f;U zq)kYlkVYY$LRy9N3TYP7Eu>vYzmSF*b;ppF8FkN)rWtkDkhURxLmG#44rv|IJEVC? z_mK7>{X-gP)Ez`xXw*GKnrPHrMB0e-5osjSNu-rXFOg;<-9*}n^b=_)(ov+PM%`1S zsYcyZq^(BXSER8>XOY$-y+xXfbQfta(qE*(NQaRY8+DJ7CL48^kv1E3pOHo*okm)X z^crb4(ru*OmX3FDP)O*l9%p#iZG0^2LmkQgz2W~v{O_BKx);#-jH;zsIawm3DmyzX IJ4?9!1Y+B7Z2$lO literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Anguilla b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Anguilla new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC<zyoQ1C|Nmnl3=IGOA3wmr;^P~_;1~?#0zn7~ Vru_ix0GaatKYmlWfX16}0RSy&9%BFi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Antigua b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Antigua new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC<zyoQ1C|Nmnl3=IGOA3wmr;^P~_;1~?#0zn7~ Vru_ix0GaatKYmlWfX16}0RSy&9%BFi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Araguaina b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Araguaina new file mode 100644 index 0000000000000000000000000000000000000000..8b295a98bac071e39551940705ea050839e37b9d GIT binary patch literal 910 zcmcK2KS<PJ7{KwLW+InZi-Uvunz$J}-S5=XOj?B$g&*{1XsXDT8XR;Gm1q(Qmx5Z} zF&yL_&!z%RofJ_7(eui&hKjNuXYu-c-$o|~-{ZZ{ad5}o^L>)LBM-CDiE~ch;c`0k z^160PpI*(4O5tLEsqpWHUHFlc_r+1Olb@HJuVvY7`0REU?wGx!dMUmwn*F^uQc7a; z>$P{k&)+kLAMVSax+a?rw@bRbWXhQhSALh;qi@yncV@yiuD+0_v2ELIJDiEvY|HeB zT)wbkTW?=-tzVv+wyvM9ZFAYk)VN%29I)A2#>Lf-O)j5EF8ydaO|x{aJ+fVgpIpB1 z+;s0BNYBf5du_$L-Y0eT`r@qg4Su&n&mPFI>9&*J`>3+w)Q?K<H`b%b`-4(NmG@ig zQO(VPB)TG5y|E67lbm9(>Hm-DH+s*2ks$|l(8#cnfg?i?>fn*#2Q>f^0SSS`K!PAq zkT6IbBoGoQsG*QpK@EmPL&EX%iH8J4A|fG?m`G40YEZ)>af2EdiHw9sVk5zk=s^vS s#1HBMAWMKO0<sLqLLf_lEC#Y1$bukCf-DM7>VG}6Wy$pF;`GItV_67C%>V!Z literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Buenos_Aires b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Buenos_Aires new file mode 100644 index 0000000000000000000000000000000000000000..e4866ce1778fd2678a85dc6791a6c1b3fcaa5d1e GIT binary patch literal 1109 zcmc)IPe{{Y9LMqB($yM5gMuO=7AZ0#c+lCRKlY0qtZ)|_6rKtq2!f(R22ra^r(h5x z2o#rIIs`o(EYX!uEn9k!!Hftx42o_=#Tv`Xr|<iLpy*Jye&gBeVW%Jaywd|Gj(2*0 zJj>-54o{C9?rSc|*XXsZx_7?czrTJ~Pi)Nk6AP!+<k|~na(qlrHSYR_>o3%Umv79& zb74I_wazyKS^a2u*nhmUsEQZM=E;V-o;lL&&n$JQ(%ytARqv>0mvjDH>ZYE1f5ptF zteV&FOeNN>7m~+JZFfl3M(XBs@T97DM*RBQ5#3mwG>uY4eW?xli<NEqYq8sWD;!kc zZ}<C4L*2?=jr%Lc4(L$psr=^LxNeQ)@@<Jvy6wZ+eEX(Vs=a(TAM-w{nBI}^s8-aL z8;7Ea`cu7qx<mI4uGGD+GSPjDqk4bgXf$K34J>Q=$4{{Nmrs+s2d$lrd!Dt$ve#^F z$#@}apO*tI*1oO=BHX=}zEs*v%Gs*oa=PUFzDfG;EA9EYQbR60^5XW)(5*gXHDozt zJ!C<rtcWb>lr@n>kyVjpk#&)Uk(H69k+qS<k=2pqow7bsfKw_!N<eCGN)bpEPALPa z11SWl#3`j9wIIbfr5dChr__TKgj9r-gw%u-g{~?tQWpO$bs>czl_8}ewK=6Yq&laR oht%hk0+9-l5|J8_B9SVcQYKO-QYdER)x7_oX?T~!+tU^P0h%oABme*a literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Catamarca b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Catamarca new file mode 100644 index 0000000000000000000000000000000000000000..9fe9ad6470939d78d5dfd94276f0ff332f1a91e6 GIT binary patch literal 1109 zcmc)IJ!n%=9ES1RG|^ZK78Dc_sn}8y6%K7<#gBR<LqW_^14<_c5d=YTu!3lGaS{Sj z1fiwTu0oGPwKUZp8*8*f38X~?hk)XysKgMzdi*~}9K=C4@8#UzCDSGElO8yEqBHp8 zS|Pu1xO(OAzV5O-ZeP!;dl&lk{f*De<mRlNTs)(u)?c(!6JsV{zpJNjJXa51ytWU| zH<_9I25kqj=F!NAp50SWg-a#-cvH>H9SiHZ<+v&y=&{AhxO#eJNYAHknfZ5D?Lx|_ z1@qRHV+pgEJZ`J|qN+Mtv!5cTRIRf`*WQep`r4$e7t88%bx?mP?=)Wv3HxpOu=;+d zUoQ_Ql)KigSB@Pt(YDjMtwR&0tz{_J-t*D4zdx7j*s?}-l#b?NfzvUwI~T8%)wY{Q zT6=0w%&wWZ(fJQbFRiwHgR4y6%S>y=IoA+s{Kq>I{&|P6F%ptz#JSmY5I9#T1!3ok zHi$a6RBC8;?p39sh4-NOP%0fH<?K-1a=PUFzDfG;EA9WaQe9rS<ip+B;qmRrYRGcP zddPxaSrJ*%D{CT)BC8_HBI_axBP%0IBWojzBda6Jdu4s30IyVllz`OWl_HQTyix{I z2T}-9iC0QNYC($eN;OD1Ua1Eu2&o7u38@Jw3Vl_)NLl>1)P)p=RECs>)aI4qkm|fr p9#WrI3PdVIN<?ZzibSgPN|{KVUMUna@@c{U&$MZ;#M|4|^aHgk>@xrW literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/ComodRivadavia b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/ComodRivadavia new file mode 100644 index 0000000000000000000000000000000000000000..9fe9ad6470939d78d5dfd94276f0ff332f1a91e6 GIT binary patch literal 1109 zcmc)IJ!n%=9ES1RG|^ZK78Dc_sn}8y6%K7<#gBR<LqW_^14<_c5d=YTu!3lGaS{Sj z1fiwTu0oGPwKUZp8*8*f38X~?hk)XysKgMzdi*~}9K=C4@8#UzCDSGElO8yEqBHp8 zS|Pu1xO(OAzV5O-ZeP!;dl&lk{f*De<mRlNTs)(u)?c(!6JsV{zpJNjJXa51ytWU| zH<_9I25kqj=F!NAp50SWg-a#-cvH>H9SiHZ<+v&y=&{AhxO#eJNYAHknfZ5D?Lx|_ z1@qRHV+pgEJZ`J|qN+Mtv!5cTRIRf`*WQep`r4$e7t88%bx?mP?=)Wv3HxpOu=;+d zUoQ_Ql)KigSB@Pt(YDjMtwR&0tz{_J-t*D4zdx7j*s?}-l#b?NfzvUwI~T8%)wY{Q zT6=0w%&wWZ(fJQbFRiwHgR4y6%S>y=IoA+s{Kq>I{&|P6F%ptz#JSmY5I9#T1!3ok zHi$a6RBC8;?p39sh4-NOP%0fH<?K-1a=PUFzDfG;EA9WaQe9rS<ip+B;qmRrYRGcP zddPxaSrJ*%D{CT)BC8_HBI_axBP%0IBWojzBda6Jdu4s30IyVllz`OWl_HQTyix{I z2T}-9iC0QNYC($eN;OD1Ua1Eu2&o7u38@Jw3Vl_)NLl>1)P)p=RECs>)aI4qkm|fr p9#WrI3PdVIN<?ZzibSgPN|{KVUMUna@@c{U&$MZ;#M|4|^aHgk>@xrW literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Cordoba b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Cordoba new file mode 100644 index 0000000000000000000000000000000000000000..8c58f8c23eb0d66d80714a9789b5c20abf043ca2 GIT binary patch literal 1109 zcmc)IKWLLd9Eb5YZKE+13@9if60xNuA{=dF#6R_w3{^2p4Je%yL=XhU!3v_$#YqTA z5QLV-c2zuwYH6cAjWyb#1X2;fA)vS^A~D3OJ%7)MgE;8s8!n$C(@VZjX7I%EZtst0 znf$`x>662K^(FZlyOvY;&JXza*FD#h8*={S!f7?N=7O1;7}wLyyMFQdOZDK@Tl4T- zSkFwa_03>TKN=bFAMYrs(#48-vc92bkF@!-OL0}+lQiYp9rf&T-k(d~)N>!MnEA9- z^ZLE1#u9oVb<EUvg;agCVLk^>sz!IjZ@e4T%~dJWELYW+`jEd^-KxKq66Ra+p!$Am zz+W0pD0{WnUp~HHhdNFbHsvRDM<ie9On%axAI}!LHm+1%mBWRYXZ@JoUWnJKYV(al z(PZPP-Zm50{X;8s|LbgY@8X!=S3DZcT5AI>t^fE5w*CCHV5_?atu0kN&waO9TQ*+E z+832TyR~m>fe3f6{Xjb7rQ~c;y>fcw{Ju%%?<?*8wNg_qJo4i9%<yfEtcEOytcNV< zlogRBow6pfD6%TDEV3@LFtReTG_p3bII=pjyi?Xk3UEpVNC`*{PALMZ!YO4Sbs&Wx zl{lpoq!y$Yr&NQK<CJ=kf{==kl8~B^qR>^vMats8r7ol}q%x#5q&BA%hg9d3@{szR lQXo<xQX*0#QY2EPQ_4i@L<+^Myqfp_GY#*Qc>8+7KLGQ$?e72p literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Jujuy b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Jujuy new file mode 100644 index 0000000000000000000000000000000000000000..a74ba046226060a01c6255b6714a40211f739a24 GIT binary patch literal 1081 zcmc)IPe{{Y9LMqBa@HCi1_eb#2r1?wcu?1(G@jT&h<34o;Hg6dK~QwaAj-OQh+q&S z2ozUdqn-}dP|GKm%{<6pMnoM(MYkkkjrGr`@B4uc(V=eremr~q7(4vf=bafkeyl6_ z<60}<aJYKq@VxP&eB8R0Rrk*g=m%S#nZmZLE-ar?Q=87)smTd5-MFV`uD?(ZU%s=C z&PL7b^k!{`vgYyFn4a5LRK*Kr`($g~%pVEs`PH~89Z1?z^{!gDl+%mpn`Uw4vRz6$ zwPZfnN@v0>r;gfMUqsc$>-KZ=303ck>H7O|)7X%*jZ#H@sSWF|mEGoBF=4;Y99BPW z59rm=gmPDV^ty?ICen5?zdbi;+G4qUd-9WM|9B?f5g)W2Z(f-lD_3-<*_)47D{AMB z!PaE`so66dH~quwP5<kW))D7iXid{U-pyfd`7t)hX?AXIAqbo+mV>Z!B^yMXdtMH; zIQO<1it!w@97<<`l$>3vM^3k#-<QbzeVe{t+ce~XOJ3WZ9=%gQwnFwoHbZvv%67<p zUfB@Y5!n*i6WJ8m71<Wq7uguu8QB`y+bf$RyL)AOWPhXquXKR4;FTVbCXg<WHoVdY z(g@OtS6V@O@k%pDH%L23KS)DJN9bGPL3-l9r75H<q%EW`q%p5_hP39D-jL>y?vVD7 c{*VTd4v`kU(j$(@s|5c$%4n~oy1zU66E?ExkN^Mx literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/La_Rioja b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/La_Rioja new file mode 100644 index 0000000000000000000000000000000000000000..cb184d6a80d3c4976cb78d82b37409190edc65f3 GIT binary patch literal 1123 zcmd7QPe{{Y9LMpWx|J=VK|v7_M*hiF@Sv_mf1mImL|$wVc<K;B5ELCUh+17b1%ntt zpt$lH^mJH>vwUpXN=OD%BIqC#-HMDg)<2)V@269AsAIqJ?DeqIkA2?Bfs=!s!5_~G z`Gv!CP!9KXm*wmB^|ZQwp<h4P_}q+dPV4dcGiqY}MLUrjGx^#*J$d7$did(CeRMuz zrt%xK9Y~w$krDlPZ$T9<mF$yERWoxuq-U06s(2`2i;H*Fvnv@r+jq;%ez<Dq`kb0G z?`^pwZsvPV*vkG!RT-_?&*4+5+8Nc=ccZ4Zw#U|rW%Z>pq`#JTnQw)-{XThA{kYSw zmxtrZUF+5>#}1pumebj-nVe~fX0ok`Pp0+bxorEEHLCqZF1xL*O?L!Ks>AHb#um$J z`^{s`iRx3cdn#sheo5(t)wXwNmFaz*YEC)l>gpT*@e>aH`qX<G!WedLIvE7c6-q(K zxuOjkom(i?H97ZYu`bFzXgboD40`13RNZpA<orHN^6yh=S;$XSlM9c$xjQ>-b|BLs z^C1%=GkRr8WKOS4ip+{ki_D8mjLeKojm(Wqj?9iskIe6t1dt58k^+(gl7v^XK+^C^ z9!MfcCP*q?$puLU$;K<`Ao+MDAtWOtB_t;#DI_cOX>pOf_-{!J$qY#i$qh-)E7>9G tc_lw2K_o-3q=@8*B#C5+q>1F|l|+$Dy^<=X<Q0Sepx4MgsrW!w<R{OP?j8UD literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Mendoza b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Mendoza new file mode 100644 index 0000000000000000000000000000000000000000..5e8c44c8934960e2010e7b0505966747cb2c6e50 GIT binary patch literal 1109 zcmc)IPe{{Y9LMqBbgLyE1cUw%A*7g#;6Ysrs`ZEmBihBJz*C1P0-@-TL4Q`4PQf5X z5Gby^20aCqsO3}3RzfnE5kZGV(XEJBV_Et1eLv74I@GP-c=q~j3^w+84-6hV(i!}5 zt&n#(TuC|Huf8BpH?CyV-823A-uh=|a$`nME}l?RYtGrJiE%SsyRB!gzEt;Ly|WKa zH<;P!wb~A5%)`-9{b)yC<<A%G;|*0acPOmqmSd{Wldy&IE%o$bR?nxdoB5BI>_W<^ z1@pm{I^t%r_pq((imJ+3)qaT#scL7FuD%~LwdP)1E0omN%CKH4Z8P8Uar=E{zxr{r zUoVfumAl-nSB~#B(bnU+P1y<4+LX<;B|e+BPp5M2>$-LO^Ss%-bkug3JtkIuqqkf; z*pjF|G23TjMw@2Sb0BH^hF6)s*XfqDb1qa@|BsJIn1}yxI8q;}lUEVvX3qwJbNONr zcCKK9sB<rhp+@K4mP1Y42aWqu13|Bxt*Tp2mz>`>8Tk83yML`zlN&Di;qK(f9r;e@ zSPfYYSr1vzD=Q*PdSy*yQDjwQS!7*gVPs`wX=H6=ab$I5d9SRG6yTK#kP?s@yix>G zg;&Zz>OcxXD)CAwNG(V)Ua1Bt$1C+91tAq7B_TB-MWL^X8!3zbmb#F_kjjwKklMUb s98#TE%0ucy3PdVIN<?ZzibSeJ%0%k)N}-sRpBDW8OdEDeyveSHpJY<*WB>pF literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Rio_Gallegos b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Rio_Gallegos new file mode 100644 index 0000000000000000000000000000000000000000..966a529ba9ed8a4fc6385077cf188f95d47665e3 GIT binary patch literal 1109 zcmc)IPe{{Y9LMqB)YV!-gMuO?j1-v>Jou+Yf9#1JglHEV6rMUr5C}zw45F+{r(h5x z2ozUdgPsCQoaIx?Rvu(9C4vq@(XGf>WBv2#`+guOI@GP-k7uvncKWf;JKcZcct`NZ zwMu^BaP`RHdBa8dxOp|B?w#w?_cuQ`lUp)+a^aMk+IZehO^lm-?T((l_Ch^)`Nlpx z8#OceP1^Qn%%jm!{diA76)u$QlcuVfJ=UOSSK3waK*AQ6ZmVaPvU)Cc!_2+EZ0A!> z&6{_&9Pc&@$pKs07g3e5s{IrmRMn1{uD%^Jwe?9`E0)#g%8>q2-f6xTy6v~=!|MC3 zKD{#1t=yF^y?XqhiL{)|ZOcxWmRL5|n)qm1Kb*<6ZC$6@N=I|?U|Ge@?p*s)S#7_5 zq&ZQ2YIe=E8=YTPdU37o9a>|0Ukx`8JLf`m_5XN>8-BU#!*%j4?A%OK5I9#T1r5#> zZ4hy8u@q`_?)6eA#&ghkD3uP9a(1XLIh}HT-z5F_mG=K!sU{Cx^5X9F$lU>CHDozt zJ!C<ztcWb>l{Jw?kyVjpk#&)Uk(H69k+qS<k=2pqy|O-1fLAI&N<eDxN)bpEUMT~q z11SWl#4DvBwIIcKr5dChuhfGSgj9r-gw%u-g}y2tq%8hh>Ou-bDnm*`YV%5QNOfK* p52?>91tJw9B_cH<MIu#trA(wwuM~>I@@m2V&osJM;_c~-{s0qJ>%ss4 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Salta b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Salta new file mode 100644 index 0000000000000000000000000000000000000000..b19aa222f91d07c4fcdc8b50497244c64f2df1a2 GIT binary patch literal 1081 zcmc)IPe{{Y9LMqBbk-U|gMuO=j1<`-cu<#78c*yXM7x+!c<K;A5F{Nkh+17b1%ntt zpt$lX^>naAEuUPr@*smPBIqzEx)l*?tbaaz-%oUi4t47{p1mG+`mxV@X!zubuHcVn zjr_vl>662K<0bjJbuFvzogdWqw>&d5+p>D5GNNWTU9hv$lV+}QSI=L6p&q<^V;`Q2 znuWQ|+74&Uqw#UQxUZlJ7fbf>*1B0b7S>Cv2~|AMYm3!8>gna2UQXXM%kQt)m9$eU z=AA9alcth7ZfpG!Rhy{WPc5fZy(^~cZzoJ+L&`RaW%ao>s=t(Xo3DkW{WgDCeV-cC zt7A#!uJ-7)lLt+t{d9guZrZfRa`}$lkEY|p*?i~r^{TUUG#?L~j+?#tM76AT-Z;|M zTYqBqEF{dp=sGj-D$|y6&V`zq|MAlj{`qObW;rd+Elvf2bA?h6cCKiHh;z?Np;qT! zS3@!GLF=LPP>_<dOZCX<mh<}(Lx10<|JODRx$wwqyE9|AE67&JUdU$1ZeH0A+0QE* zB0C~mB6}j6BD*5nBKsm6BReBoBYS&gb7Xg~Y>(`ZG~ksEkQThs1JVT21=5CB`al{% zI`K*?NH1P#2I&TA2k8fC2<ZrYOI)NU{#%+tx<cAQ`a&A>N@qxGUg-^K&MVy^?IHak a4I&*PEqbL#%*d+*|2xWPpQO6KJNg3zGUb2( literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/San_Juan b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/San_Juan new file mode 100644 index 0000000000000000000000000000000000000000..9e5ade6100bb4a12e62827a596c0fcc29df53915 GIT binary patch literal 1123 zcmd7QJ7`l;9LMpSrqLJ*1{4$#sn~~!2!}q5_&g;;LF`fkLMI0igo5H=1<~l@Bm^V~ zLQA4u1y6@)Nvpk@)@Z>JNJRvPfa0d8#1LOS{=dgb9CY-Clg}a3CBJWK<m8DS?~iAd z{KDZmD2My{OY(K=T1MSHKdkR<dTOS(Wc2j%X;s{C!7mmj%}jGj&t89_?!SEFKR6dL zb2A&YKaw#ICnofxy(Lw;Sn=mKH_XDZR=uziSLK0(U#{IzPcCQm;?PaA`2LE&G-TD1 zdFNMS{bo6N+^_Est9q{Ce-4eRMo&~X-sVhmUD9ustLjUAOn<HJGT%!5{`c7<>c{P2 zy)xdf?A1QKdh(D7cbv*^%N9&WG@I{Cd@`LM&gQ$fu2tR73i<6_T{`A{R57zBAFox_ zjvGhY6OG4a_gviQV$KW%l^z^hV+LQP+tb$CKuho+KcUuNpB6_jgduC^<~`5aQpIbv zw(NUhYoAvFZPvc71)|)&w!=dyFDYlI>XXwe=l5Y!f1gUrLT;+2TzKTo?V0hZS!6n7 zK4d~<MyE`P%;}U#ky(*xk$I7ck(rUHk-3q{k=c>yk@=mH0Fr@IQb2M*l5k2ENE%Mb z14#tQ1WCmyxgg0P**GN~Bp;_Fgk*%Igye)Ig=B>;EiRH5|1F6jnIWkmxgp6pB|9WN sr{squh-B!L6p<W}B#|tUG?6@!M3GFLk}9U<6}|tU*T_Dp_&{&uC$tjnjQ{`u literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/San_Luis b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/San_Luis new file mode 100644 index 0000000000000000000000000000000000000000..af8aa99860119c9fd4af7b9356f7b548b771ac1b GIT binary patch literal 1139 zcmd7QPe{{Y9LMqB+<v%5he1Jq>5^gz!Gmrs=-(qdtRyeCLwM>CK@b!jGKjJ+odTgo zC&iU?3w)w1nU;?&TS<vABf1Giw}QgPD)s66evWmhW54nEdKe0BpLf3J#PPP^kEc=I z;qdI2!+q@~`El!7zj|=KTR+_R!i;U{*JJaiRcYM?TN)iU<Mq3G;>KI`=-mhV_*}wF zj<46Yr{7Es4(cbn%c^{F&OY5-Gt)<7dU~Z*RXQ@ZvM{2aUoPmGuA64&^A$VW<<zYC zWUJfLW<Gn&F7A!1#i5%07M@hKwxq6o95VGaSzE7E)%V3-{iC|m{4A&K(!^o4e5YHl z45XF2n$nHKohIINs<^c<YMPRTVsqxJX<oivY}t^~Ew9RETW*uh)Sj7LldVRVhD=9P z>0IwBle@lD%vHP1fiGwE!Fwsw`TB_NbIwH?qW|~`W4z>-!)VwL#`v=^bZ+Wp5I9$! z3u4YyY!G+u&0J))bMF@-N$!E%dFN19KFG@1p;B_%<@`QR{_m4%S;|jVmkW=40(WL$ z<UTSXG9xl2GN)H2MP~KNw8*^3#K_FZ)X3b(<jCyE^vL{30!Rj4Ndd{hD@h<(AZd6d z4<r$<WP+rE<bovQm28l7kbJz75R#EsQbKY<l0vdV(n9h=pBNX(jPI7zklc{uknE83 xypkW1pjR?PQbck@l0>pZ(nRv~N}^uL6iF4y6-gHR<X;T_qiz#>r00F@iC?^;`#Asr literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Tucuman b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Tucuman new file mode 100644 index 0000000000000000000000000000000000000000..bbb03a0c705626dd22776818b61035ff501b7932 GIT binary patch literal 1137 zcmd7QPe{{Y9LMqBrmH1FgMuO=7AfK)c<@h){_%(hEAnDO;i(`3At*Xz5Vg8=3WOLz zpt!VS)YHKdxAMtlD+5`~h`@tTbSo-sEGwVB?<YD$hdTEA@$B{6PCxc}clY!jYYP6j zmdP(1E-i=W)fePr_)5RJd$vp8TldtAZRpoyvnSQ~nsau1bi`yUcXaOROLhO%8~fl) zjhV=<)wZYKJRBU<lRNS%f4*oRtuLFY!_|6fDWM8&Nn4n|t)5&=>*>zxX8QdlJJadZ zjCp5Ejjd+3{it2o6;%sEW&1gDT$P(*y8L#?R93ayN};5_EcEHc(pK{|-)g_*4yf-p zyY$jPt8$lH^zxB?CR%qQvnf4l>SF0kee#p3|8P3fuyLhoC?3i*22MAc?U}@UNo~G% zFrF+wHrpl=MrS`N{p`8!=v!er=2G!Ji^FDb?npf4oC}4k{_&1f|GdLk6$$%q5$7i7 zg21_aF{pN~V1uZ0FN&dB=U&f;Vmt@6`#ZaXb~#&Ai=1XTzpvB%_r2PF?NyNnE_ro# zYT(ulWItp>WJhF6uk49z>XluQZIOMEjgg&^t&zQv&5_-a?UDVF2E5V%(t=ldK$<|h z@Jbs<A6{t$=>%y7>BTF}Al)GCc%>htA+L0Vw1o78G=+49w1vJe9;7k;TRKBpLwZA+ zL%Q=ydq{s?X%OkqD=i{DB26M)B5fjldZkgMQ?Im&^olf#DS6f4e+ai`r{ui5x#kCJ CU-o<e literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Ushuaia b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Argentina/Ushuaia new file mode 100644 index 0000000000000000000000000000000000000000..07e4e9f0bd5f87ae067f613f52ce4327dae4f882 GIT binary patch literal 1109 zcmc)IJ!n%=9ES0mHqjUh78Dc_sn}8)5f1$@;zvD_p&-Vg29!<?A`}G0!3v_$MI3~J z6hUZdw5#ChP_1n>M`Mk4D1o$y;1E#U6qOj_SC9YafS@?&<_#ymn=HBHebRmX$J&EG zu4VEKhpSr-_iHZ5+s&(k>dx6-eRuscGr4h4PcEEPQ)|!JsfjT&T^rXk*Pg5UFJ9XR zXJRHdy-wS{LGy5AL_gY@SNZcr`*=gu%pM8r*`>HD>`mB0`L=p`aY)alZkV}um+XAX zsd@9(mfE_^Lh`7s?2f9+Xw`m-99Px$MqPa~YHF*KwpJ*q&y@lFrL@g_&3D;vGY8f8 zTfKT|xJ$V!oqGA$J`-&|k=;BrVVWC<vMq^^rse(VZ0n{~s<n7H+ZKFKZDvO{UM{IE z*AF!%s!z=JT-@k;2b5l1X?q4%n4XuJri^ngR9F9xuSocpSAC>TK1H0%)q=ped@%?+ zSFl0Uxy53r!MRuEP$Tz2!~Rq{NXpr&I^}f8`F)f0-&fl6b0zsPf60TpQ^Vtzk=2mp zkoAxSy|N;*q*vBN7DZM?mPOV@7DiS^mPXb_7DrY`miNl~NC94{04V{f!7D`|Rd}Tg zqz<GIq!O=`g4BW(<CSWVa=cOxQV>!RQW8=VQWW~CxRJ8>bEyj{45<t$4XMp5#Ua&s qr97lQuM~(>h?I!bh!lxb>6J2(I=xaTX5`U=|DS1Wm&DuM5&Hq8dF)dF literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Aruba b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Aruba new file mode 100644 index 0000000000000000000000000000000000000000..d308336bec9a539742ca3885c44a4d3c5a674463 GIT binary patch literal 212 zcmWHE%1kq2zyQoZ5fBCe7@MyF$eApsr~GZk8xICXrvLx<$1*Vd|9@Zy1Iz#a#}6=Y r`S^w~=o*+98!$Kqhk&#KK?n&J{D5cyn)&}f$RZF;l4V>#dri0im60!* literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Asuncion b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Asuncion new file mode 100644 index 0000000000000000000000000000000000000000..3c61ddb5a7a9956e2448ec7185edea75df08bd3e GIT binary patch literal 2077 zcmdVaUrd#C9LMn^nI@7RGzC<$KN^W}_#V$0V8`%BYLfEDKcIq@{}qhzpoC(7L~Ghw zONy3x<#2AgY4KH-<wf$to61$NVYS&-a@TA!)cGgoc>2AcwRO=&m#v3Azt^*8aKZL` z-sM#_n`Z_7agA_qxViSZo9AG&JHAM{)pGyL%Vy-g0Zr^~u%i!M)TGmY*avqF$(Yt` zGqx(M!G^UqdE-1CSMi}4|LoWLP=1=3ka15YrWM<iQ8}72*lbd-ZjiK}JM5%yf0N1Q z^X-&RCu{nVTJ!M1GMU<Z#6A+=r;o1dHIFSHsgKWJWu`@AGQBWno|sUl86!GP#_gl> zWdA#6h8AgN_m_6&xlxjJ@}$X*Cu!*2Uu;fUze?RW6V9!cSt~;}GWLu{^MW>dw_9>2 z-mtlSziZy@OE$mjJDvUGS^HF5yT&?u&7AhL@^pQREjakG%&q#wm~FeIu)g0s6FH^x zmiCzW<KNQ-;WK8z&A2X{^u1l!yG)A))jrn|)kT*-u#3B5QheOAC2i?adZ5piHG8ro zK4g}b*2=OaSIqKIsg%nFQ!z$lMevweai>^U4(vCpehO)2PqnG~JgBSBy=9;O=!U*< zc(Z-6@sh4-Dza<WT$go~nRfl$UfD1^$!?tbnQTh!x0}2}vL#`(+45_XRQFvoHJz(v zYge~<>BLU0Z98pVe!EiZ_O_aB)iI54ZMXGB>AHQ<`*uf$rwy4q>?_Grq%pC=HVzEQ z&Z~KLSI-q`x|n8n>jinWBiFpva!g)tO*L=C_sgE`gJzHCdBYO!OBnuN{tWlLZw3MZ z|8&n^*}wyy_d`!0iF24cc~e|c9w>BownW_>ao_IWSNXqx^>4c0_P_eOfBl*J$Gw^Z z$t}nVc+C=!H6V*XR)H)7SqHKZWF^Q_khLI-K~{q-2U!oYAY?_pZb`_RkVPS@LY9TB z3t1SlGGuAU+K|N|t3#HDtPfcrvO;8uzHW`kB9T=h%S6_REEHKOvQ%WP$YPPzBFjbA zi!9jJtr%IduUj*+Xk^vMvXONo3rALtEFD=pvUp_m$nuf(BLzSzfRw=3)c`4iud4!5 z2BZ#1A&^QSr9f(d6a%RSQVygZNI{T_ASFR+f)vHqRRt*vQWvB!NM(@HAhkh?gH#78 z4^khbKuCp<5+OB0isb96gp|qG)d?vSQYoZVNUe}!A=N_4h13fv7*a8$WJt}Bq9Ij7 z%I53ph7=B|98x-@c1ZD%>LKMr>W382*EJyU|Ic9J92dues5>JCStVf!g+r1fStXHB Il!wGW0YAGNZ2$lO literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Atikokan b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Atikokan new file mode 100644 index 0000000000000000000000000000000000000000..5708b55ac6bcb7580498bed9721a43fbd5a1773f GIT binary patch literal 345 zcmWHE%1kq2zyNGO5fBCeb|40^B^rRlyd4W0=I{DhaN<XJ!s(8G4VRR^6kJN={J_M> z#K_FT`v3nb83u;`|95U+WcmMp^#TSCFq;QV3V=uk5g*?W24@!_4hG_IAPxv&a0RkK zfDuZD5Ox*^P$}41KfroGw*LQL^sXfZM1!0OqCrjt(IDr7XpoaZG|1Ut8t8NeD!ZKv I=owQk0JN5Mg#Z8m literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Atka b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Atka new file mode 100644 index 0000000000000000000000000000000000000000..5696e0f8bedee72515c43f686881a561af339db9 GIT binary patch literal 2365 zcmciCZ%ma{0LSsm-^)cYDj-Bm!Hl7RhrdBt9tc86<UwwrWC#iolmVg)3ox8hdC{a; z&Wf15=#OjEF}7yX*qNL2g>zkSYhaF=n{(Efu39oj-p=>b){C0&@;rN<&*6@@zpuBZ z`Na&^zmCWJ!pj*jFZYBu%;$W6p>J}<Z*7zHj=VkjoVs(W-SL0wm9ArzPQc+D8Q7KW z1hsF{!SxlskcuoFnxE-&r^jgbn>&4;$T{s9x$cDDyrdWWIPF_FeM?5@G2f!Er{#So zrku!k$K?I}r=17(ejp!g9&)0#?32+Yy-rMCy?iLS&WTOz(ThC=PF&zN9k)7hdE76K zUUDT<#ZPAFrDvkm!(T?}Wgp&B%R7r@Lf^DXtV)(ET7DOg<SdXYOQ*!6(X%pX)oGFB z_vqyCA(1@uvtIR6uUI{PL8n}NPo)ljq0>gY)tcV7v>2>a=}n)@j1H+Xiw9&@WunSX z+9h)`gVkehugr~45V_ZKWZry`c>G+feB%47Vy(*3PmY}vPYuNAb?=`LvUN`9A2_QD z$}j1s8$MBmYrfG%h5f4N*Bi1p^<z<daYB|v92O;`qq20iMLaw7t={n6K~>g$Qg8g~ zB~`w4P&-FURmIvpdQ)eLs*JDJ-l|2)JD;bkvTv%Yt5tIIvIw#HY_8mL=Z2{HC{8{< zGa<Gfn3pe%kBZuc8M$rv9Z^?yTh|{tskWzH);pR8RYSzMZY<uT8fTB|rle}sbYZ`4 zcIT<)kp|szZHapMXrFw=mn(L5?vSslIMG^FB-@V6i}su(xvO<Xbi{b%?s6@5`&a7D z+{>!-2e<Bu9amlB*Y)1H<Lb5H^ZNCv{i?fnME9I(P`$NB^uA-ysDQwrfZ&k3J)vRl zd-(YMHQQY-zrWFO1^fLSbpc`ayU_j;Y41^dU9o<DUt7RZdtbbv%<I}%Y2Iq{I$l?S zxfc|AUHR+HWxctW6TpY_*TwtaBMUeGz{`2tyxbG!_{$u>IqS~+F@ZgYYHE@3Ap^9U z5h6oGJ4P-Hl4C{*3=<hAGEiis$WW27B7?P>(IUgOn(-n7Mn;Sb85uJ&Xk^sLu#s^i z14l-V3>_IeGI(V4$ncTzBLT3Q2v|)BkQg9AK%#(z0f_?=2qY3nD3Dkn!9b#cgae6( z)dU2Ih}DDyi3t)EBq~T)khma$K_Y{M28j(493(nOc#!xY0YV~VH6cP`gaiqR5)vjP zPDr4TNFkv@Vub_?i53zrBwkh%FeGAD6EY-bRueQNYDn0SxFLZ<B8P+yi5(I=Bzj2r zkoZ|m0Fek<O$d<~T1^m<C?a7*;)nzii6jzAB$h}pk!T{}MB<4A6p5(SgcOOX)dUrZ zs?~%Qi7OIVB(g|ok=P=^MWTy@7l|(tU?jp;6JjLBRug0-%2pF*B+f{nkw_z<Mq-Tw z8;Q2dR695@B=l~N+Y@#VAD8R1EA;XIiuXVD{`(eg%APJv+EdCh(=yUTN?Ce(T6&rY F{}YG!e4hXS literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Bahia b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Bahia new file mode 100644 index 0000000000000000000000000000000000000000..6008a5749d01875a6995189735aa8b4e8747d250 GIT binary patch literal 1050 zcmciAKS<PJ9LMpWRuWffOGA+Qn21<seY@YKSD7_MLK?vj`ZF|zWDYen)DYUxBosv? z*z!yf4e>0&83YEFHW^U_rsvf)RYOMEaTZVC_tV%ELC^8K*KzPWj^F2<oEYfOM1PzW z`VALnpI&@kJFIWF?hS6r*Yy<hKlj`5#jHFmoHw(%`!f5nB6Azxy1DV=W`22t6mA#H z!u*&Nlh}M6_3qoMlV)k+lzgvku;n9ZDNj8ymDCGYxm&i&pK9dC$R%6<?5Z?ecx@Z) zPG{m7+jMzAHm{ks&BxZe=6B~zOXnBY@^Z?wHq}Y%qmpfl7Nzaxh}rUCo!dH?vFV8i z(%wI5w%JkHe&CLg;fu1PzSm}s8W-1`G1*)q+45u4VH%}l=Ctixdg^ldD`wY1Np@dL z+db3Xb)BuXdnd0;ci(5*H+Ik+?zv%xy!TOc)jzzdy?^sQio7ost5$md>Up&KP;U~o zOGd9)A1f#Miz&*#Z_!`$o=K2df;tT{4>A!lQ&6Wu<_hX$$ZW`T$b86z$c)I8$ehTe z$gDw~7MVAw6C*PtQ}g$k8<`xL9hn}PA4veoAgC!IIRrHcB#WS?f#ea?M3797RFGVd zWRPr-bdY?IgpiDol#rZ)niP^%P}4&4LJ~tVLsCO>Ly|+XL()U?LlVRxeRlsL!&H|h I*&U~T0UU~E(EtDd literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Bahia_Banderas b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Bahia_Banderas new file mode 100644 index 0000000000000000000000000000000000000000..21e2b719f33d6195a65cbce81764adc92167b989 GIT binary patch literal 1588 zcmdUuTSyd90EW++VwvDzB~b)XffQzHYIZYiO*7ptx|&(7YuVa%(J@V|yc|e`AaV?X zNQ5A|QKUhcW)HpOOITz?B?LYQMFpJ#Ns5@xH}w)hP;dQ*Gv6?<`}BR@(qJG-{I+oY zh0A>U@;+1Q$Hm?^X7{J6(cVC@|Bidu-xrzc@B7xK9=@&dzv#H7gfL}dWvh`?8zYzI z1dQZ@37HbR-bhUvk;}pt8ELaWi?r8~#`3Q(#ftH0Yh~{fv8pG+T0PJu(%au#YdTv* zM)e~rv$a#M-PB{bE1G21lGE0@tTLG$RcGZa$dow~9Y${Ge3|>A*4Qu>CiD7=jQpW# z%5!$1*m!+hZEBb*HlKX1wv@-pf&=}kFmsCZ=69;1x$k6gT$9==hGfZ%b7skCx7;?~ zXqMjZklUa6%pF&6h@H1HOy3Dhl%1Jp`h$l=d2_njmFpF|eREaC!c?&*Jwyef3uNV- zF;)2~S?>KdsDi^Wa^I^fYX8j%d0_B?d9Y(dR$aPmR@dB<X4?s~ru3ya6xeUpt{4z? z*&g%otS(W%B&h1Yw1|eNJk>B#E{=Rytd8Dwi(`+YRO5v**>vNhYCfJRTTVSy!WH72 zNv_a8oUo98JGR~B6+%BP5o+7l&Wp*m-7ZBW?=Hi(`+Ho|e|*pPc!j6XE8O~S*4LAz z-}U`<-oG$v;^$9oJ<NjG1u+a_8N@V)+BS%B@P~E$ktum%AH+b2g%A@VHbRVqSP3!H zp|%rZD8o{SsSH~o#xkshn9HyiVlc#Fh{+I}Ax1;2hM4V8+YK??p|%`iI>dH}@eu17 z=0ogf6ac9JQUas~ND+`KAZ0-6fE40TR{|*oQVXOQNHvUdAoVZ`f>gvP2~rcIC`eU| zvLJOq3UjC{gOmoT4N@GWI!Jkt`XB{DDuk2>sgY46q)JAakUANKI@FaiN_D7fWfTjk nmQgOGUP!@^isAoPa)b^&RR<pDNsdp9Pl)p*Cd8-2Cq#S)zm>O& literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Barbados b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Barbados new file mode 100644 index 0000000000000000000000000000000000000000..6339936014862e144f8beb04b55b617f9834c3dc GIT binary patch literal 344 zcmWHE%1kq2zyK^j5fBCeZXgD+1sZ_F%1V`|J6e;U@-cjRz&EAm0{?V|2LdH|7X-^5 zJrD{BxF8(=KS9L6E`WiNnF$L2|34wkzyKy${{O#vfsy0?|KkT3czk?A7@UCE(FKTu zLx6@uKnMv=`T^DR9|%BB1JNMofrLR$1k*rgf@qLaK{Uv@AR6Rk5DjuRhz2<wL{sQ| JE}&mbxBynQR5btq literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Belem b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Belem new file mode 100644 index 0000000000000000000000000000000000000000..b8e13b02fe93679946331a1736945d033ea82525 GIT binary patch literal 602 zcmb`@JxIeq7=Yol{vc6wa&WLcofVEo!9m1T&_T$egOdo|Gq^}~RvZ;{xltS>W829^ z)JZ9bP}&-C)J5<oq{{idaTEl>3zsJZF64Xt<>Few`LR0W4V#sh&2!(T-1d%xpgb@a zl|N@q<#|F^TY;)io#^^wq8mf^VWYC7n(aZ|I&7)C=7ElUPrdBh@U?42y`8S=_x_xT z7sqsbex?$)7A8lrX+L%AkL?XJe7UQ0>$fIvCPU?2nUSrc9__s_C2Osd&HQp@t*z}i zj<pX_rqkM+Yo}*^!FR@WLAEEq&GP>+YW#gfzGy8YBV(t^=*aj;0whDKq(E|{N)jXs zk_O3xBtkMFsgPVqG9+87q(kzhN<t(fl9JyiCz2G&iljyIB8jmiXa6CyJ1xm)JogLX C#sD7x literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Belize b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Belize new file mode 100644 index 0000000000000000000000000000000000000000..7dcc4fc5b77151e0997e46204d584dfb6b464c31 GIT binary patch literal 978 zcmciAPe@cj0LSt7c7=^CBqYL%gr`y{79$gcJXp}d;6<+(f^_mAbrB<opg<2&SH(kr zE~}-zblY72AiHImf0o7U-zi2sIMTsW(jlZ%(|jjyoxC*5+t0G_F7y3`m#=r!dH<X; z`wtJN!9M)1=&@e|t*g0ox=|0FIj@GgtMrp&o#JV$(Zl6|7&*0}N4DxjbojT7J{=IF z2`xvn+hS~UL1rFQs28PSnZ5N>y^QY3@k?J-?*277QJ+_nUA=nh;A1r%bm-~ryJ{wN zR?n<I68ZhLIzM+u6n^Z}uhLh<>(v@L+ubDI6!yxuZPjA#;dlA2DK6%3f0heJ4yi>k zA(!^}s@O0fi<{q6sd`+ON=3Eo_v_^-Qtvlz>Xp7F@nQa={@C?QB-+<xqVOfCym~`c zW=;lP%-{7V9*f6}$p)Th%;a;w%$Uremo%o3^UGV#h2GKBiQ_4+`9kP5pAPN){XO9> zZvEHJJH{SvA1Q!TKuRDrkRnJGq>R(nK?*r-C8U(o)<TLQ)sS*XJ)|H~5h;n(M2aF+ zk+M!(7b)ztm66g;TN^2kR7c7q^^pZ2D?pZjtN~dBvI=Aw$U2aPIPFT1r8w<cki|Id fYLMk1>p>QTtcW};$!XVwEXw&`SCzCgt55y`P>t&N literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Blanc-Sablon b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Blanc-Sablon new file mode 100644 index 0000000000000000000000000000000000000000..abcde7d98693a9d583928c6f1acdf46b190a500a GIT binary patch literal 307 zcmWHE%1kq2zyPd35fBCeHXsJEMH+y_ydA9x^LO11INk9m;ga%~f=dZ+FEBAOF|#oJ z|9`54f#LuEs}~qq{{KIIfPn+d<^hueKE5FgjxInP48-9;91sFDje&s?OoR}21v^k7 w*cCrOI)LCm5X9M90?q&bALJ?!4RRTX2DuJIgIoxvfv#kr+?iZJ*PCzw0K7tFCIA2c literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Boa_Vista b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Boa_Vista new file mode 100644 index 0000000000000000000000000000000000000000..f7769048cb33e11fb0a1486696473e8820666be9 GIT binary patch literal 658 zcmb`^yGz4B9Ki9kJ}C9IxHwd+T@?;n(2B^Zs|rFu@DES~Cl}qSt{nvtbZ}Is8(VS6 zR@?+1L+NXA5DHq*T3Y4&edFjLf)_5I5Xh1Hp1ZoSHtzfwZSsc6h{@!+dsl9U7pY37 zZ{;QPxn#1nIel5$SNX{+ov(&;Vc^*-WY<-(71O2Tswx-Hbj6)juSY%JTgRrVdt3Uw zchoeNr*tECtD518*F5hut!lsiNT*HY`a}=zJer}?c`q6-n&G`|J<|1HChB+IWadC6 z%Qc;RikOtO)(HlFxq{Z_qK;$j{Y{`<o;@eDu;@BtDlRi2TVc3=*nZ>t6Isz(G9W2@ zB?poO$%3Tul{`oyU&(}|LUJL=kZedgBp;Fx$%v%%m7GXYU&)H3Me_3dBt|kLsgc}B dawI#F9?6d!fv+3`rey7ZI*RbL9Lr21`~|^N5w8FM literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Bogota b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Bogota new file mode 100644 index 0000000000000000000000000000000000000000..d8934466bb728c88b211dba172369f5105b6300e GIT binary patch literal 271 zcmWHE%1kq2zyK^j5fBCeRv-qk1sZ_FjEK+zE#~3??R~Kh_5c6>XJlq#X8!+w*8~Oz zFv;@&|M3Hi9RL6C+`z!&;~T=@1jM=qCO~Wo)D8h5ge_$Qssvm51F8pT*Z;UVCLcgF e$Ppl6kV8N;$T1+g9w^5^mW#M-fbO$1<pKa8PC)wr literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Boise b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Boise new file mode 100644 index 0000000000000000000000000000000000000000..ada6d64b1afc93b62445fb8697075fd6be6ed3b6 GIT binary patch literal 2403 zcmd_qeN5F=9LMnkg2;`BQ;8v<l9o3J5Kt1xj0!}A8-!Q58etU3u2$efH!Z|tP>jDg zOGlJR(37@iD!OK4LtMkPmCaqZrnRu=nq{MhO%@No&inLdfAnu#U-rAN-|k@JpU>O7 z!M`b6{Np0bH$2>n=HWd_nUCJLR`<O+7^nN5Pwne}`I<hwsx0}V&L|yR^ha~BE?)dO z{-m7v(I+M^R?Ddm^TqV3Jn0-x5~3$jhICAe(5I&4jI9YOtZuFjub5Ob3np|#=BSDo zSfC^2u!=l#T)M_yQ`ddjucI!V7SY3PI_CX;5&L$ZlrOc3>pP#7H|%N<H~Q=4?DfSW zu4I**lb0fHO1VwOFDz1XBV95f)U6VJTdWhmj#Np(S$f`?pVa*0SM-AZb1FG7Pu{%u zs9Lzrk&8BeEN;p9O5VEWU9mXs1L;oH!aaRir?|F>l+iOf^@>;AKJ>ayyU?uCd!N%8 zCo0vF)<&JVZ<)$!Y?s;F=cuK{o8+>}ah2m<E$_(wOf8Q{mAQ%6MD7n!GH>FN$Upmw zyz}!xvErCZ-!*tptnB((-+f?@C}{db-_w3T-CKT4-&em|6)t%}7Zq+)MZb^8;`A0# zeBqERiLMnTCpzVts|BL;l{a<Sr!~sc(xLBvZ>1`)@#~82Wc9#`Qe7DcRcn*dwRhdO z$~zgWt8&h&s_)XIZ~iyJHxeV)PmhQVZ;Z(YM-Pdb_Mm)ds8iI|pOhPWw}^*5=XG62 zhk7J^KtI~(SM|~RbwhEfYPcHEjqY^Sc+RK&5wXgDBwug-{#Ui7yIMYWK1Mtq$dgYT z9TQDeiL$vXD7NNI$!$$1#rC+%az}Z$*fBk!1Gxh#F#3hw8NXlc96GIcjR(}zz5V)` zi$2xT+NO7(%2%zm4SG*ck_rtmKjAaN!e{=cejI0CtPqZKP=|y$PL~q19Os?BkSO~p z`CiPn@9||Guc#<A&swkWRGX*5JiMFlxDfjZ-hZE5kNIUgx$(dM%KQh8QyZ2rj7$)j zAu>f|j>sgDSt8T4nt37<MP`aj6`3pA$@0Q%IWt{gz8n)qW{gZ3nKLqJWY$(QZDih7 zGjU|*$kdU!b4(tYJ;(Hs`Ew)y$pA+RkQ{I%0m%YK8jw67i9j-8HK{;yfg}UT29gdW zA4o!wj36mNa>9`mBr6<gLGofXi9s@BHK{>z!;u^$J4kwv{2&QJGK8cE$q|wyBuhw| zkUSxYLNaAFsX}takt`%z9O**x#gQ;1V;m_%a)u-g$r_S2ByU!eI3#mclR6}KR+Bs= zdmQOQ@`of4$sm$KB!@^6kt`x<MDl1gi9|AKHK{~$X*J12vdNK7B%d4!MKa2fQY5Dw zNky`Xq!r04l2|0OR+CyJw^ox}B)e9VUL?Orf{_e!q!`IDN0N~&bEFx`Ge@G4Oj}K= zkz89%vXN|CO}deMBMC<`j-(vPIg)fF>*9aZb(hKga+CWs&(e(SjLbAoR#rw<MyBg8 DiZO5? literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Buenos_Aires b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Buenos_Aires new file mode 100644 index 0000000000000000000000000000000000000000..e4866ce1778fd2678a85dc6791a6c1b3fcaa5d1e GIT binary patch literal 1109 zcmc)IPe{{Y9LMqB($yM5gMuO=7AZ0#c+lCRKlY0qtZ)|_6rKtq2!f(R22ra^r(h5x z2o#rIIs`o(EYX!uEn9k!!Hftx42o_=#Tv`Xr|<iLpy*Jye&gBeVW%Jaywd|Gj(2*0 zJj>-54o{C9?rSc|*XXsZx_7?czrTJ~Pi)Nk6AP!+<k|~na(qlrHSYR_>o3%Umv79& zb74I_wazyKS^a2u*nhmUsEQZM=E;V-o;lL&&n$JQ(%ytARqv>0mvjDH>ZYE1f5ptF zteV&FOeNN>7m~+JZFfl3M(XBs@T97DM*RBQ5#3mwG>uY4eW?xli<NEqYq8sWD;!kc zZ}<C4L*2?=jr%Lc4(L$psr=^LxNeQ)@@<Jvy6wZ+eEX(Vs=a(TAM-w{nBI}^s8-aL z8;7Ea`cu7qx<mI4uGGD+GSPjDqk4bgXf$K34J>Q=$4{{Nmrs+s2d$lrd!Dt$ve#^F z$#@}apO*tI*1oO=BHX=}zEs*v%Gs*oa=PUFzDfG;EA9EYQbR60^5XW)(5*gXHDozt zJ!C<rtcWb>lr@n>kyVjpk#&)Uk(H69k+qS<k=2pqow7bsfKw_!N<eCGN)bpEPALPa z11SWl#3`j9wIIbfr5dChr__TKgj9r-gw%u-g{~?tQWpO$bs>czl_8}ewK=6Yq&laR oht%hk0+9-l5|J8_B9SVcQYKO-QYdER)x7_oX?T~!+tU^P0h%oABme*a literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cambridge_Bay b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cambridge_Bay new file mode 100644 index 0000000000000000000000000000000000000000..d322f01ed1d11856e0f17d1ba5d5f6ca867533c3 GIT binary patch literal 2098 zcmd^<ZA{fw9LIk=6nK3)n_(zmKBQNOd%2(_keLtA4e18q6_E(Fz`Ls!r0&iN@w9M^ zSDvY3ZYGOW^F=H1VXfvya}8fqG<V&a(~0ZJ&Bvj&$!2jqpR?*sZ}4?rcK)BU|L%o1 zd_LiZ=;k8%&nDV89M*0R_xajB`tA<t@46?M_`E;b;+rz%UO_|K=)%1t)1uAlvGqa8 zsC-^cFU^(5a~G=2xfOcG1dp1T?A0^xE-+a?Pte)%bdz)DhMsl&x_P2+NY9Sts5$Q) z({tA*)VwX<%ag@FsrjovmjxN$C~uw-@7NWS>)9^3!)Hw1^{_lO@R7+s-==-NdyW5O ztuE+@n81O>x-inIigrxbi-Mcg;@VMN?0rE!U35_|Ny}3uS@)#m>X?+?|8}G-^;%8Y z+1u)wOQ&UNzsEd#`iLyschfv~c#kY^{lTp0Jgiq%_nTGCyLEZNep6AtL09}fsDi$B z37$WyDknEd<;iZf`qpx(`rs4u;*X6w)ZS%Y`eK=`Zj73mp4s~4rB$XjmaNxg`%HN4 zuR8qaR8v=cR@eQOuj*(0BK3n))cUbOY4~_Vy)t}M8aw0a)q!qlYCfel^lp`np>t+a zSC@Xxcf!0LiR$Lb2Te<`O1IpKnTXe?BSZBjnl@EOKPxj^uHMmGd)BFK=cdRTu~PNs zu@Pyl%TjIo;__DUUut{nDcO;6MeVHak)31rO|0aEjtyTnyD|^zT?1d6w?||8o!&n4 z?uB~Y-toTK{biZ%XlgNg4rOaeNlH#hO?hN|KWUGqr;YQY<M@U4U(GI&`1Hf1RQF6z zB#s!Flt_HlpQPNI40oTwU2?d~oPXZ)Nnao!p><&i)!4Jfp7L@YVOhnoFDmR2NpgS4 zJ^sChA3}=JzrUZbLk@@_5J?=ZC=gj7!tgbufr!J92O<zdB8W&3nIJ+rTB#snLF9r6 z29XRR8bmgTa1iMr;xXid2*{8SBBG;}5hA3cl@cN*Lr#dG5J@4TLS%&q3y~HgE<|34 zzz~TcB12?`2<>R4W{3@u+tCURk(?nqM0SSo5a}7>L*$1901^R62p}<Vv_XJG!O?~R z5(h^c2uLItp@75!5)4Q*AmM<-0}>EOL?9u7#Kh4C1rilU8x}}h9Bp7Akzs@e5*tQv zAkkrj2NEAffFKcqga{HNNRS{=a<pNB#L3YH3KA(t8!AYwAi;t}ixDnJychw4M2rzK zNX!^PgG9~Ih7A%oM;kau<Q#42AhCl44-!2{_#p9v1Q7nOBj~ZwTw;5fA6n!u@(1!m Lg@yh?f57tqQ<e|c literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Campo_Grande b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Campo_Grande new file mode 100644 index 0000000000000000000000000000000000000000..de52bb68143db08d381e441ba26a84c62048f0b5 GIT binary patch literal 2016 zcmc)Ke@xVM9LMpKiaFAn%gtC}7&9Z`d>?n|MMiIBilzsW7ZPeIOo=o}VnkR9vPPSK zxS7i|D_U#w$5x-VHo;~4tlAulg_Ts2{Ftk@8e6d?o{)X}JU{)}AN8Nd?jARG{P^Sj zd?l;uwibl`^|--(hbNCV_vHPwR`>GSiI!N`t?Pd8x>jns$Cm2pp}jH?|5yh`r|QMb z-|WTiS{a<o(V;g+Wq9zIjwF}JAFofhmv7oG<F-Ms+>&Qf<%OC$`;AOwjM$0e>1J|t zrvABapULVus<WGaGIw<r+wAC|nbY>X-hK18CZ}??&AGH&=Eg&I?)iS1Hz!Nyof$K^ zp+IvF9+vxly~EDmrY6sRrukdHmT>2Lx?uT7BCQ=7$y#p;)`e{}y;)-MYK^7(WMOu` zF6?VEi^k8}c-O1)!0?zB?k_YC_Dr{n>+duVeg3u<mHuTOJ}^x+JYtq~r0ddI*QB`o zoEBdhmSu^}cG<|JDVcFlOHREjD<+IxdFW+%WZ-~Z)%uQEeY(*mHnp45BNeu6Wv8rZ ze@@G@OU$D+kLlV_u1OXz(&R78r6MO&D|+sg%Ct$XJf0zsUmDb^S4L(1&<?xdsV`;Y zCu{5zD~vq(R)O6V={1`>%CvgM`=+KgqBRp=$d;1Zbjw!_ruLr8TKi$r)TMu?bw_r} z(?56XwwATBy|3RsQ+2=W_~4YSFPtUM?mujIW?q$s`dzl+>JQRbw$V0TI3rE*7H#_U zf@!{egEpTWmlsk++Oqdev+Mh8eX)9vd8sQ+cdt4sdk+1mp|mO2_bUi`dqbfh=s!2* z#vnLrLsM5LlA(Nwx@XKCD39cSA9cI?m_O>2{|SC$5OCbcfqUJNBZrP0J96+|cl5~N zd)@IP0U!||As{gzK_F2eVIXlJfp}dcNGM(x3la<x4gWsjAn_mpArT=VAu%CAd0kXU zSY8(w5}4OThJ@yIu_3`B(IMd>@gV^s5h5WXF(N@CQ6gb_U7SduUKc46DiSLaED|je zE)p*iFcL8mG7>WqG!iuuw%5gt1nzZ_BcXd;>`3rP^ho$f{Kx<xBY+G6G6u*XAftc` z12PWCKzQ9qAVcAGV}T5Y*Np};9LRVe1A>eQG9<{DAcKO83NkFnxF7@Lbt8iejn|D0 zGB{p0I>_*N-S{8_gp3d}M93H+gM^F{GEB%gAp_-gBZUl=*Nqi2SY9_;$Z&bxcp(FZ zj2JRx$e1C6hKw5eVRN?5|EGtxHrK=TojLyc|Czds#cuYBV(v9sl$VG^!jZ5nki0}e JILe!hzX46hU$6iG literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cancun b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cancun new file mode 100644 index 0000000000000000000000000000000000000000..7e69f73de44698b65b61038ea40972f1e7c7affa GIT binary patch literal 816 zcmchUPe>F|9LIm-j+k3uV0qD_u!M(bZEGv@-|jYS?zZkaCr=ZYiy-XKspEr|PX4(H zUL*(v#X2o5U6v4p&|?%)U4rQNE~04ke4jz^&>_6^9o~Fq2Ilhl_|tQ<N%?E-_6vs% z*u(P`WpA$<UxJ<&AA=j!s!l#V3I@HrzFGMiq!I%<wJpJ|<ZgJoqfgSkzjXTNAGve< zLEqhX)xE}hJ@opwy8muT4=;aLBdehvt+muxeK8y_y;PZ*TA00FSGjCCe9$$ky#B$^ zJA5vA_iC8`{6rr9I0z@+dQ#Zy*2R@;@@R8km!4ge$%Rk4{5YX1^G!W9)~Wo$s`mH3 zi8!$~F4y_j6*FdQUBsA;HF1sEX*m~+X}*z5+?~sXqA!^#UvgP{EHjp8^UtC0@bouh zhelpe3DF5r3egHt3(*_VDu!r=sAlMnXq7XxL)1g`LlQtTKvF<*K$1YRK+-_+KoUVR vK~h0-MYPEn*&^C>jC_!UjEs<!jGU09jI5Bfki3w@kjxSPU+TE6pNgLVO=iwS literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Caracas b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Caracas new file mode 100644 index 0000000000000000000000000000000000000000..c8cab1af26b03a7008d34078efdf5ae093f50c10 GIT binary patch literal 289 zcmWHE%1kq2zyK^j5fBCeHXsJEg&KgwWH}w1Z!_L_xJOSa2v=QMQ2+n`e?}%|CT8aU z{|`DaFaSvx2A2Q-5A0yz`2YX-0R}!F-w+08Al5Z7F*X2^KvN(fgs|Q0K$T#-e?avB qt^1$ZF698CL2d!jAoqZzL2d%kAa{Z2dY~KwxsKzq0Xo&rgbM%zHA%++ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Catamarca b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Catamarca new file mode 100644 index 0000000000000000000000000000000000000000..9fe9ad6470939d78d5dfd94276f0ff332f1a91e6 GIT binary patch literal 1109 zcmc)IJ!n%=9ES1RG|^ZK78Dc_sn}8y6%K7<#gBR<LqW_^14<_c5d=YTu!3lGaS{Sj z1fiwTu0oGPwKUZp8*8*f38X~?hk)XysKgMzdi*~}9K=C4@8#UzCDSGElO8yEqBHp8 zS|Pu1xO(OAzV5O-ZeP!;dl&lk{f*De<mRlNTs)(u)?c(!6JsV{zpJNjJXa51ytWU| zH<_9I25kqj=F!NAp50SWg-a#-cvH>H9SiHZ<+v&y=&{AhxO#eJNYAHknfZ5D?Lx|_ z1@qRHV+pgEJZ`J|qN+Mtv!5cTRIRf`*WQep`r4$e7t88%bx?mP?=)Wv3HxpOu=;+d zUoQ_Ql)KigSB@Pt(YDjMtwR&0tz{_J-t*D4zdx7j*s?}-l#b?NfzvUwI~T8%)wY{Q zT6=0w%&wWZ(fJQbFRiwHgR4y6%S>y=IoA+s{Kq>I{&|P6F%ptz#JSmY5I9#T1!3ok zHi$a6RBC8;?p39sh4-NOP%0fH<?K-1a=PUFzDfG;EA9WaQe9rS<ip+B;qmRrYRGcP zddPxaSrJ*%D{CT)BC8_HBI_axBP%0IBWojzBda6Jdu4s30IyVllz`OWl_HQTyix{I z2T}-9iC0QNYC($eN;OD1Ua1Eu2&o7u38@Jw3Vl_)NLl>1)P)p=RECs>)aI4qkm|fr p9#WrI3PdVIN<?ZzibSgPN|{KVUMUna@@c{U&$MZ;#M|4|^aHgk>@xrW literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cayenne b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cayenne new file mode 100644 index 0000000000000000000000000000000000000000..6db640981f2b845769834e57142817da8bc61c31 GIT binary patch literal 224 zcmWHE%1kq2zyQoZ5fBCe79a+(c^ZJk#4p+tejheHQ2+n`e+EV-rvLxXePCet|Nr;_ z2A2Q-uU=r_@bL{{&^0gtVq>6s5C|c`jvo*$KokG}2iXOpLH2=!>w#=y?c}ln+HYsf F1pxi^IW_<Q literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cayman b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cayman new file mode 100644 index 0000000000000000000000000000000000000000..5c1c06372c6dc8610ffd14f74e923bdcb9b21d31 GIT binary patch literal 203 zcmWHE%1kq2zyQoZ5fBCeCLji}c^ZI3_m{*Mj7<OkZ!KV80Fn|6EdT%S+`z!$;~T=@ r48*R%AwX3i5JK2wm@Pa%AX<PX{{NrZt|kDYK{hcEXB8LFR#Pqj=|C+j literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Chicago b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Chicago new file mode 100644 index 0000000000000000000000000000000000000000..3dd8f0fa82a60710c0711f35dee93ef5013ca796 GIT binary patch literal 3585 zcmeI!X;77A6u|L=OQ?yOM&c4$mSiHEp=g+zG6aRVAg*Dqh^ag(qNw5HL|RQ{Wu>;@ zhGvVQk_wrKh~{WcDN2$`xS>(#<C5Xhy*>ZeX~vnF$&Y>LFfYH$oy#!8hx0#iLzBjZ z$lu1(zQe=(Y9C(vX!|X5jlW*@)it$zegnPY{iAB-z7$#Y;_oIa;-FqVy40*0P%0mL zFIH<iZk4sJ9P??DkL1&`cg<&)=gQ~#Gt9c;k@7|65R;wLOV+19ZZ>>4LT^kQta9e` z)L%w+Rhx#l(VP8Rsx9HQb?#j*mDl2|&Z{U_TWi;nt-A}=*QYCFTXvbrFWMv97Z#X; zp(V0oeYh&jcujW3W|*QelVw-nII}zERsBuFr_7!X6ZM|zv1+eZAHDZTq}ungOCM}= zMI9Vj*A!2`EQfmhVh#^HEG4b?n;$~A$PvjlrQS=WwEQjeqid5sS}@HVD_g9OfAXd( z%TLoky)spmXAaXRlH%0K^lti8R3~*hp_M)}DBM&;hRfN`zUG{tul(FpOr`y=RbD-5 zeyKPqzm^o4s@+9$e)|b^A-i0gWjocyg@yXk+|}xGa+ba_VuAWCcD}yaJxTo@I9^vj z@{+mMFj`-)lW4A2C(4a;QRe26DEVVgd*jM&FYdKoMwWPq$ASx{#*7P6b4q<xYvkR! zcHi^rt{zp|Gw6He`MRI3+i0VzyKa@#Gw-Oo%NObTrR$AX!F1gq?`_lYleeYO(iC&g zD=E@AE#5RqikEwbgc<LsFuAWwbJMh4bMbkwvual7rZlh7Of|pcqg$M~p;{ieuJ7M* zT={M|rdzGpu3D!yln3UmP!A5gC~YQeGY^Fp%fr#jOk3|w;`iu0<L5e{+tnLw+Lh(# zM=K*t`}}vcfAJ{QA#;Wf$my*D(}(DwMQv4ZVyc8ptD`zb4wlFISE^9IuF@&wTh-aK zrG&LOXTmC6(&ffp({*>bJbv_Z6P|rjciZ}gd17Ii?*4%?J(3G_&y3gAld&sxuQAE0 zcVLE&=-p36T;3v)9VVH`;-wPh6>Fk$W=nKcuzC8!#rm0&J}PETn(nhXNW~5xru)9v zSoI6<ru$F7q6V~VrQ-&csJNRRIzBX4#h>w&!M=yg;9Vj^T$|0%tdlaVY>643Q6$gi z&oT*P*2sv=;pVxRLOpUpni|z1OOH+*rp9>9*JC5Qsj*e#b)sJ@mAF4zCwY3Pq;>v! zLd7Zd{CiO{@jJJfl-gb<XP+?1@m``9?le<EFUZv7)n=Oa0ZEBnU{YLFIyEdyrIvlC zXZp-nGxIm-wCeF{R^}o-`$)8!lRjO)kn69~69(zIOB$#e9(V9n^R~~_s(srJck;{s zozJ-4>kf&y-FfRhYPsE?EtfjHmio>+jhyfI-g^I;m^kUx+dc#0B*H$u2HB@?oZW49 zJpLl?-}hpb{j9SWt8e|1{p)UbLQU6lWKSZy64{r?&P4VmvOAIeiR@5hk0QGi*{6<n zry_gR(e74czal#p*|W&5MfNSSbCJD^>|SL5B0Ct_!^kd1_A#=Pk-h9_cQdk|9qo=r z_B67qk$sKqY-DdEyBpcx$PP#LII_!;eU9vOWUo8g-Hz;cN4w*ZJ&){qN4xKlosaB& zWcMTcA87#60i*?vwg*TPkS-u?K>C0*0_g<O3ZxfEGmvf|?Lhj0Gz94g(h{U6NK+hb zSCF<KeL)(7bOvb+(i@~XNOzF-ApJoagmegL5z-^1Nl2F*ZJUrjA&o*hg-)w@L9ZCw zEQW3&?PBN`(lDfBNXw9(IohTnU30W;W9S>wIHYq(>yX|d&12{u(msa%Aq`~cAkspH z9wJR-=pxcahCVvlMk1X=T8Z=$X(rN5q@4`?L>kJ_QKY2|Jw=+z&{ap<R-~_vwy{WO zk=7!;MVgCr7illjU!=hd9Y$Kr&|{>@3|&Ur%+O~?+h~SPBduoWHPURP+eo{Sej^P> zI*zm)={eGLr0b5h?F@Z)w2f!zyrXSBL+_F1Bi%>ZkMtk80gyWYxdo7W0J#Z}yTH-j z2FQKjXm13DJHgT33JmuGax*a84an`la6ce71j8MH+!Dw=f!q|xU4h&d$bI2xZw%zl zaJ07ua&I`=n*+H!klTad{y=UJhC2kgMHuc8<R)RbOOV@y;XZM+Hwtp6INDnUxmO(R z&4S!5$nApMFUSpp+%d>4gWNN6N7u>!2hBeoH1+Jg)5+dF{xKZ`LIQ&PV}gSNf&+r; F{Ry|ZcJBZH literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Chihuahua b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Chihuahua new file mode 100644 index 0000000000000000000000000000000000000000..e3adbdbfb25b557db2a2edfc721c365d90706233 GIT binary patch literal 1522 zcmdUuYiN#P9EbnoHEXTh773A1qP3Hq%y9>sv3IlW9ka2$W8N8?ZN}Oh@1hkA_mn8d z5@E?wJ2;h3LLt+NNiE5i6isq!J?`t}gYrrF;&*pH*V9wax9jp0R`{ajj|JK<TvlW+ z?_;!mobMQ@xjMMHufw;(+nLhs?FxzWb`3Rsh$^p_=(?bq+35umll!(NcE%DF7u~Dl zN6u0S6F*Bre~6m%{kbH53N>>(p2)nmU^D;bDOqs%omqHdk1VQwWRmus(8<|tCZ%kb zPK`KX7N?fzv{744`t&57{-s4_xTfihXLV}n>k)ccSH8-8I91%o<5kubmt;4LSIe8< zNKR?2$*sI6c}e4pC-bu8Px)$Agf+`bd0`61UepDBx6P_g2Xx_`Gp4AgQm?*rORZ_o z(#88vtCFLU+FPMj>29~I%~-3}6-P+f%v800!D#V?dQJJHk5WFEY&Hz_NJaN_v+-r8 zY`PX`D(^nkRV@Q%^M!U@UHib)G#%5mh5c%auTj?}-d9`GiuAS#t*SnvLF&I9R1KpF zrJ=W4HNKxG+i&Ek9gio=&T~~}*Hw|-d$Y}+L$5>vhH-}5;|ln<$8k<Ji#X0HBQD2j zJu5+ubG<Dfg!cgT^LwV-Bknwpq;OO1yS=~9^B3BD`Sqh?hdvO6AR0kbg6QO@Rf^xx zilG)nFNR_e%^<2lbb~18r_~Ok9z#Eff(#8IDl&A0D9O+gq9#O7h@ucpA*w=jg(&N% z)fS?zpH^Rp!VrxiDl>G3D9z9sqBcWsh~f;*A*wTUhbYg`9-_XVR)0tWkPILxKyrX2 z0m%ZA1|$ziB8*HRsW5VZB*VxCl8&D?A4o!sj36m7a)KlU$qJGdBrix}kjx;dL2`p6 z2gweSo}V^9NP>Ra3?V5pa)czw$P$t!BTx7rBpPh{jkn!~xnshk!Xv}nQIX-X;gP{V E0A>k%D*ylh literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Coral_Harbour b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Coral_Harbour new file mode 100644 index 0000000000000000000000000000000000000000..5708b55ac6bcb7580498bed9721a43fbd5a1773f GIT binary patch literal 345 zcmWHE%1kq2zyNGO5fBCeb|40^B^rRlyd4W0=I{DhaN<XJ!s(8G4VRR^6kJN={J_M> z#K_FT`v3nb83u;`|95U+WcmMp^#TSCFq;QV3V=uk5g*?W24@!_4hG_IAPxv&a0RkK zfDuZD5Ox*^P$}41KfroGw*LQL^sXfZM1!0OqCrjt(IDr7XpoaZG|1Ut8t8NeD!ZKv I=owQk0JN5Mg#Z8m literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cordoba b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cordoba new file mode 100644 index 0000000000000000000000000000000000000000..8c58f8c23eb0d66d80714a9789b5c20abf043ca2 GIT binary patch literal 1109 zcmc)IKWLLd9Eb5YZKE+13@9if60xNuA{=dF#6R_w3{^2p4Je%yL=XhU!3v_$#YqTA z5QLV-c2zuwYH6cAjWyb#1X2;fA)vS^A~D3OJ%7)MgE;8s8!n$C(@VZjX7I%EZtst0 znf$`x>662K^(FZlyOvY;&JXza*FD#h8*={S!f7?N=7O1;7}wLyyMFQdOZDK@Tl4T- zSkFwa_03>TKN=bFAMYrs(#48-vc92bkF@!-OL0}+lQiYp9rf&T-k(d~)N>!MnEA9- z^ZLE1#u9oVb<EUvg;agCVLk^>sz!IjZ@e4T%~dJWELYW+`jEd^-KxKq66Ra+p!$Am zz+W0pD0{WnUp~HHhdNFbHsvRDM<ie9On%axAI}!LHm+1%mBWRYXZ@JoUWnJKYV(al z(PZPP-Zm50{X;8s|LbgY@8X!=S3DZcT5AI>t^fE5w*CCHV5_?atu0kN&waO9TQ*+E z+832TyR~m>fe3f6{Xjb7rQ~c;y>fcw{Ju%%?<?*8wNg_qJo4i9%<yfEtcEOytcNV< zlogRBow6pfD6%TDEV3@LFtReTG_p3bII=pjyi?Xk3UEpVNC`*{PALMZ!YO4Sbs&Wx zl{lpoq!y$Yr&NQK<CJ=kf{==kl8~B^qR>^vMats8r7ol}q%x#5q&BA%hg9d3@{szR lQXo<xQX*0#QY2EPQ_4i@L<+^Myqfp_GY#*Qc>8+7KLGQ$?e72p literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Costa_Rica b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Costa_Rica new file mode 100644 index 0000000000000000000000000000000000000000..c247133e334bee3b7802741383bddedb9cdf6cc3 GIT binary patch literal 341 zcmWHE%1kq2zyK^j5fBCeE+7W61sj0G;um7Rf@Yoxg4^=~gvGQIgr^4ts84^8ppm!j zf@b-l1kFP?FEBDQLE-=Z8;uzlz$DB6|2sD@a{m9ndI1Bkk8cP=uosYUb^+qx5TL;j z5JK2F+(4CJ=lp=``40q{?OMJd8st2XFvy8u8t6<A4RR`&209l+gPaVaLCyxzAg6<A Ppz|3ha6cE&GiF=>7LHDf literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Creston b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Creston new file mode 100644 index 0000000000000000000000000000000000000000..798f627a81e25f9657c12909f4b0878f55eea9d2 GIT binary patch literal 233 zcmWHE%1kq2zyK^j5fBCeW*`Q!c^ZJk>}%cy^L|=0FfuXz|3B#n1H=FSb0;vc{QuwI zz`y}v`}l@1_y&hC1OPD%gb=m{W+l%LsHXow5NBJk2SkIc1<@d@Nwu2`=ooV@0J&g0 AN&o-= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cuiaba b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Cuiaba new file mode 100644 index 0000000000000000000000000000000000000000..145c89e0f88a210dd7fa3d953d2f62af803b62b9 GIT binary patch literal 1988 zcmc)Ke@xVM9LMpKiaE-ftIb$p7&9Z`d>==8k+HWjMbiVx3kkIprbLn?F(N1dU8BuE z+=^+M6|K4XW2;YFo8Yp2R&9>i!b&Phe$1s?jjh-cPsqM~o}d2gkNVGJcaIx8e*E!% zz7mzSTk}Hydfeo`!;?pwd-DFacK5RHSZi0$9qWGYxn5#=$Cv8K;axHq`&0+Vrt5|D z-|U6n8X20((&4wqWMt@wjwY7KA8*aDS8l133EQAoZ_hEwvI0$>{!S*-M(yO$R5LYp zm;SkDkICpeq_dkZnftnnY-V)G%;|VZ@4xkXlT|U>W?kGVb7LVp_uPQYo0FmQPK}%F zP@vge2jrn&@3Hf@smZZlXztc;CER^j7cBooq`gxk8S72ny0DF=HcNi2O7oNbvM@7O z7xp)qMHA<2tmjR6WMo_m_7<2&`)1h1_4k^`zI;y$Oa3xTI#YG&tm{%#c2<k7j>xk3 zX1i>3$`sG+(&7^z$cjm0SMGmJ9v^(eu4;eZtUlRj<C}Jyl7r>8bY-`!+5MuHWfq$! zs-M)gp=^^VTBM0zmP>h7x|a7nC>1GFT5&W@p1L@sm9LM<`r++%!?Rz@#?RK+r&kzx z=G{EIDe{%s+*ztsGe0!dH4&|zJR@6*@6;{dG?<zPu4v822~(T;gVrA0A<tdz)om?n zrLKR#K41B;Z2#zltuL4*FYG;FccfpFhWb|9aP22)EZt}u&!3W}Sc^7&e%>_Sy+NCg zPsq#3LT%agj%ocdQ(viSGq3if=+0G#q;3Dt8cLaVBfo;+OkF4x1OsQM-5dm`ZD{)H zcp{W5QTNPuXUQY+->2N+KITt3>3@RX7zCU&a@JmV+Q@k$Cyt!C*PS|Y?p}BD$k`*O zkDNb}0FnWc0+Iug1d@f<rGez(b%`LEAgS>0lM9jzk`0m$k`Iy)l9AV?gyiIPNg-Kz zU0O(9UY8h>8Il^38<HH79g-fBACe%FA(A4Jqt_*gWa)KjB6%W-BAFtoBDo^TBH1G8 zBKaZ-BN=;L%1F*$mo$>K*QJf*jU<j_j--y{jwFv{kED;}k4ykE1IQF0bKrH8fXsr| zO#?CyUN;fQOdwN%%mp$T$ZR0ffy@UoA;^p%Q-aKi*G&pCD_%D($h>&n#2_={byI`P z4Kg{%>>$&F%nvd_$P6J<gv^oGO%gIoUN=q1JbB$jAv5K5Q-#bGGFixMA=8D-7cyb! xXUy3?|DXHs=y3OcBS($D{eOlnZLu4<!hH7{EzF5WBH>6_7D!G!FC67f+TR%~S#SUV literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Curacao b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Curacao new file mode 100644 index 0000000000000000000000000000000000000000..d308336bec9a539742ca3885c44a4d3c5a674463 GIT binary patch literal 212 zcmWHE%1kq2zyQoZ5fBCe7@MyF$eApsr~GZk8xICXrvLx<$1*Vd|9@Zy1Iz#a#}6=Y r`S^w~=o*+98!$Kqhk&#KK?n&J{D5cyn)&}f$RZF;l4V>#dri0im60!* literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Danmarkshavn b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Danmarkshavn new file mode 100644 index 0000000000000000000000000000000000000000..ad68c722f8f56d14a0a9b3e2e98fe56ee17256e8 GIT binary patch literal 712 zcmci9JxD@P7=Yn(Q!B*bfTdRUYh^!L7mcR2hzK+q?i6iB5Hv-jgF$1%7qk`B(Adw? zXf4qcK@cv{5J*G>EfGk|>AXTy5VZ8kInPBA^!>c~mF0~3G4=Kx4wJBlXI-?{ZgED1 z^M$#l?58v*UL_KKkf`rQqVE?HtDH*9-GQ`TZcAI~T*r@2bo<`E?${FDS=i8Bt82P@ zVNv(wJe`=B(!Hs1-4{>l{!mOO1Cx^c7?XjgJ{c@WWaz3vhR>>!I((7z&b^FmUd!lu zN!+Dlai3>h<@oD-zrF&-yxCuDO!-#%zwgSU(`XF0{Vz&|a(Pds-K;$sHEo}javVP3 z_WqEj%9rdIJ0xPrgrq`pA<2+zHElX1ACeHsh@?bvB1w^~NLnN>k{HR1q(*Wh$&u_x k`kFRBvI1lc$SRO^AS*%Ef~*Ev53-_~|Ffo`-J2Wy0%5kDYXATM literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Dawson b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Dawson new file mode 100644 index 0000000000000000000000000000000000000000..61c96889b074b751386ba0923b180e4eecbea226 GIT binary patch literal 2093 zcmdtie@xVM9LMp4RFq`<Y>FZs_EAg(;rJooDfY{7C&~%Md3k20#*aZNBy?&Co*S!K zE8gkUw(9!G{HL`hj%;ogGq>8Ba~ZVOvN6|IHjOp4Eh@*?&+}9N^+$iU^}T(*kMDg4 z_uuOo+1T1#=KIGr)xP0y?YD>Lc?o+<p1Cma&e>dJE=?ae-ankUaB$JUaPF@i!&}Ss zZ^dyLU(~PCN`96Zt-JKh*m{*&6VbDlEmgDgD|OcV2AMN0OJ}E6NcPQgbI<p)#XmgD z<b3kG%sn-3?md24bbPU%ci>~0AI;FYZQrT|6~E~F)_kNEW`3#j3X&>sV$9^HCsh9E z7pCCaHnn)@lqnqPk)q^b6Byhg#XZlNlEW*dwB?X4>s%m9LS4GNVTx4bMRjG_PqH*E zsDu8=DtI+pFZ=VVTK?H>U3K|$wPGO4+<*2x^+4Yr=D{~#RS&g)XCCf-S60>ynMWF5 zk?LY&R#oqiRkucTsOV)C8ab_NGPbLl!DD*O*gCcLtwB@!b(@6u9Wm=Ztd+V=yG{Mk zfIPY)W*&<xY4BH>$oiWSxt(L86<<m8hvj<1+#71cr+yuq7*QKfOz0;@PpeJ6SM-xZ z$JFM=^ZKdeUbQ7WYPP;~M79;3Gf%hdmd1>J(-ewH(^#)*$*Yo<i_NArEk|14Uv1ic z^yHbNO?vw!zuFNG>76GhRC_dEcl2FR&sN^8ceS5aotb00tL}vA@}`-1@SMa)ubVwt z{jz81qS-s%E6*oSnisxlmVG_1o9;8KrDt=e+5bj?q^8)<U6WELr%p-x4?dpPUF-9C zUSA?*lII=h@J)5k)7@Qp-rJHglV{%?_ncE3j`(8L_B7b@xIOiJw=e4Li+U?pio5@1 z<llFYuz&7$F#h-ddG_jl@wE?-T_D>)_JM2!*$J{0r`-#(8Duy7EZaf$gKP-d5way@ zPspZ_T_M|Y+I=A#Lw1I24cQyAIb?Up_K^J{8$@=9Y!TTbvPq}iC9+MY-6yh9WT(hh zk-Z|DMRtp97uheeVPwb1mXSRpn?`nxY};w~jcgp*IkI(R@5ttn-6Pva_K!3G=>XCK zqz6b7oVE){8=STeNF$uK6G$tNULegtx`DI<=?Bsfq$5a6ke(n-aoVmRZE@PZAdPX_ z&LFKpdV@3v=?>B!q(4Z5kPaa&LVAQW3F#8jCa3Ka(kQ3x6w)fE?G@52q+3Y4kbWTz zLpp}E4CxutG^A@t+nly<NaLKgb4cr)ws%PLknSPvL;8m_5a}S&Lf`*AhUvDFL0ief T@RC4TprkNdS{f(~l%(GQc_b}} literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Dawson_Creek b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Dawson_Creek new file mode 100644 index 0000000000000000000000000000000000000000..78f9076308013c2c0b46f8d1b9700abae2f435d3 GIT binary patch literal 1059 zcmc)IOGs2v9LMo<$1%l({)7V&E)E2#Z2~I_CE5fUB$-Y0SQsuGaFv2kD-%@EO`aA( z8|`45f+!*aK?>VMP@5(yr^htCY?xlY%JI?M&iBx+MYQS;_k0e+h>PDhe!L@5tNwV_ zxNmrQ>fFnHL$5nV$8Xz<6EX8(XroPTTIfu+ZI&m8XZ2H^lV>{~>nZh6Qn6b)mANL* z3y<uJNh2@QH|?t{S55lPdHZ_ktjUa?vTr&%&D)-1_T9b~^S-s-eyG`QrW=mxkLwRg zwyI8N=e9|%q*CXSG5IuI(4TKC%9nc?{q<Z*zFl9nGhGuhJDj$2XYQK$laqGg#3l3n zz?l89XTU6O?YH^rF<Dxdu*;?WvYhYJE7^psPPOZw_Zy`!-k`(FTSQ%`(ZOK3L{6{O zMP1b<+E!(YgBcSEB1KVm{PU+c42K()3d0c-MBTktmHK<RKZR3O>~D&z=DqGc<lZCh zZH}uZcjpOypYgx1Qty81SIPUo;lGAq>+bGd$V$jk$Xdu^Ubh;u9I_s=AhIH|q}Q#9 zEQ+j(EQ_p*ER3v-ERC#<ERL*>ERU>@6o6EKl;CwWAVqjx6-XIK9Y`TaB}geqEl4p) zHAp!~JxD=FMMz0VO-NBlRbE#XQWsJfQW^TvxKJC9D-Ng*DG#X+DG;ep{r?QhT$ek` FegO+5`(*$C literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Denver b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Denver new file mode 100644 index 0000000000000000000000000000000000000000..7fc669171f88e8e1fb0c1483bb83e746e5f1c779 GIT binary patch literal 2453 zcmdtjeN5F=9LMnkqQH%ZQ;8v<nU)CgtR#>b8FWLuL3o9$5k`URY6U)Y(?UE3#rVT< zR*YFDf|Ayni4SWwHq<p-TiG0LO>3b&mt~`eO%k}b^FAy8>5u+w>&t$;e%!&IpEvGR z-Zfd`A2->2!ozi$hxe(9ANJ?zJ^i7o`=tck^V$z;Z>?YNYndW?3oq&3_WkO^wg^2m z=l6!8>R53#-KR(Ab%;NrJ^EUhPh1;)Mvi^&5##48<irPg!hbwh2Hs2%VrRSzYW0iY zXD8&O^>Hesdb*xmI<BVVkLl2iVHLU~TZhY&D*WJK=@{9oZn)H=BQBf}ktdsV)O$T5 z`mJs$Uu_mQw!I*4+EOcS_SVR$E1e>y=m9w`H%Z)*G*8CPE>zRQ9WpLBQN{f_SI2)D zt`dgA^o&zKs+or`>sx!ys9C-l^0w`V)a(@jIcM!h;`Zz><Q+@j5p!eSmx;+*B>FGv zB*zAkG<-@YUv`T-2lnZda}6rB>qVV*v`nQp)#;2^7O2d+7MZninwsxiBNvp7s_euE z<y~2ys)eD+GAI73$oVcp=8jzud8dDtcYoF|7WFywJ^j1I;`X2Py}P!F{Q8geeJ#7x zl9E1sf6Z1^kp8kRELg1ye;bs})JEYvcR&_JR*9mcZF1?Ad{O-R8+zF%mCDuFsvmlH zu_~$b>e9|x>fuGjy37}>mM5fY_lmETdpuf~XP;K(-=s*-%&&xJFiNiU4~kX2Bl3~q z1ER8JNIp8yCaP+V$<<x!#AB|ry1KPhJ)U|*KT+pZHIW^<)>*7-ulRIbVydb;<I&#G zXyrYar`LY_i(1!NA)h=OC7$x-%BK&Fi2Cw)+0Z^D)@M)14fV&w#+Zw8Q%R@T<R8<% zoFmFN{JGv7+o3iOoX}fFed@Wc9{v1zk7{gc)?1I~sivx0y=`ZL3J&_~Yf{Md*S|md z?+pZYcL)&(yxkoXV&1g~v+oi1yIkgS3s-@8mYb)-Jf&{4A|Zn8H}}7<Z;$y!yS`EW z!d$>yRY*i1vPNW)$SRR#BI`sJimcRXmWr$uS*+Ep7FjN`USz?@imhhJ$eNKwBdbQc zY+hJ5XBG~uoMY+8+L6U0t4EfPtlw%1fK<S0N`TY=DFRXjjxr#1;3x!A364@AwcscQ zQVouBAobuV2vQNGBuGuHrYJ~Nkg_0kK?;LZ1}P0v8>Bc$bvVj{)Q6)$NQJDXL`aRS zrbtMYILd_72`Lm(DWp_Lt&n0N)k4aJ)C(yXQZb}tNX@LKXh_vK%7)a9qi{&&I7)}q zj-z-;^^o!*^+O7XRM2Wlh}6((iilLvYRZVzk)x1EC6Q7hwM2@ER1+yDQct9yNJXut zq)1Jzrl?3&t){F<T{#MiRF<Q(NNqWai&U4RyhweK0wWbhN{rOlYKn|h*=ov+)Y)nZ zjZ_*bHBxJiVk6b&C^u4Xj)Ef<=O{T+bE_#jQgy2-J5qP6DLhhnr1VJbk>VrON6L@X jUtDkg|1SRy^Iu`1`R|b8nxB@HmXYGh%uLHn%W(V&DI1f# literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Detroit b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Detroit new file mode 100644 index 0000000000000000000000000000000000000000..e3ea5c3ef1219fdc1b6e6663d2bf3a38c27df689 GIT binary patch literal 2188 zcmdtiZ%oxy9LMn=L|_+4C@EB;f{cOux2tHk!hjL#71N6;njQ_jyF&3zjX@dOf=ssN znhtX{mT8S@jnr(pim5qkYh}tR`!hVX#e)`gj2^hy_;ucAJ?l}=TKDdp*WKNIyHD=t z9chehD);{JO0?f_c=g)D=ggh<>iIBd&4JK9@6%_ET*&=HE~p9pb#TC3^bYF8%1e?| zdRiqtH(jntJFEiUAqkjgRI=`oly6?qsVAdy?TKzZ_4TJs+JQ|veRs3DuJb;9eXQ0@ zTV0?t>hjGE^HcPV`N<|TCtppUGG=DPC#xC12V~|CV=C+XFLKkVb1M7D=W_GGBkGoR z&)oXdUX|1EqsiSosBa6NF|!suuW!$I&Eyqq)p?U2Nq(R~=a0N91wYTzvxjy`;c!G1 z^~a?6*j!c8wOC5`W~#Di)Rb@eQ_ZQZGj}ZiUIp`Jo4MsfYF<*RshIh#t{DBz%%Au` zFF1SN-1%L<4h{Y)cfG$;-@WGxx#z`=y0Y!4EbQK)?ycV^RZSaIbxFI_R9C5*@c~m? zv|iT^_nNxYg}Uxot64Obs~5ktTb6uPslwY^Wa+y(s(x))mh~m5`$HwN{IOBBA}d`Y z4WFpU#JH>szM)qBG}WxiKC4%qxolQX4(P_i!)DD$uU^}I+&nncs@FB`H|zVCY88G* zHtcFq8;knnp=ek&rFKYjZHa0gYm{hSx{7`ll33!nioKOBn@7J?Eqx12>$%JN;r1-^ z$l$PUYnU{$=eT|}sLhtP{d#N0X|t`qQ*WERB<&S_s(s{?bYym@j-f-+d8JW3-rpln zTnwr0T~T@Ry=>LBu1<Em;3+RI{;GudtN#OU-#<Fb^L+o!m%RkvKk>FV#rHox8JFVj zG<Q#15{`IPD<WPE=K*_`*;5^{|8>uc`}>aki<Mk)&+x0h&svbhAge)^<FxBR7KE&b z$5|4xCZ}B#vMOX*PP;B-VaUpmr6FrW7Kf}3Sst=JWP!*EktHH)L>7sx5?Lm)PN!Wc zvQlKJ$Xb!bBCAD~i>wz}FtTD~$;g_KMI)<r+GQi_cG`s_D@T@&tQ}cAvU+6s$oi22 zAQeDLfYbmf0#XH}3`iZEwh%}qkWwJEK#GA>11Sem52PSSMUavpH9?AkRK;n_g4D%n z3xia~X-k9D1}P3w9i%)+eUJhn6+%jc)CegOQYEJ?6H+IqEfi8Ir!5syE2LORwUBZl z^+F1UR17H@QZuAzNY#+CA$4=w!XcG&+R`DlbK2q|)kDgM)DI~jQbDAINDYx9B2`4n zh}6+(3yD<HX-kRJ(rJr{R1+yDQct9yNJWv7;{UUz0h`*OO>AL!PH}m0X<@jmthlVW GH1HR^1#Eo) literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Dominica b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Dominica new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC<zyoQ1C|Nmnl3=IGOA3wmr;^P~_;1~?#0zn7~ Vru_ix0GaatKYmlWfX16}0RSy&9%BFi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Edmonton b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Edmonton new file mode 100644 index 0000000000000000000000000000000000000000..d02fbcd47f845bd101a7ec97150df7821b826357 GIT binary patch literal 2402 zcmdtiZ%oxy9LMnkp}>tlrxHU!BP|I6<c~Lj%%~tjyg_h5G{Pv5U92FrJ1xXh;E0}h zDq}X1KqY9cFm;W_2AadIKRS0^E-TTp(pfh8v&F)=o%eaf)}tP^^<}^N`T=7+`n=t1 zJnPcMKW>Kk2@lt49^Pju^YP-?iSu291NzOOVAZ|rW!>%DuJ*iiN$+i)DfZQWqI;c3 z)mx<(WnWaU>d*LC_D_8zK5Y-vzJ<Rx`l_SF=Vce<>A0`OpA*l>$x9VtYA{Rs`(s5= z^&CB;_^O(jGpU19$5cq%Ssf}5D~A}-j`2O}+Ved+?97M=Kir}tKI{>ZZ+A=idW*QO z{RMgbmRfOxr$)|NoiCya?w7N(62y%Ox5?-Qd1_9mL(UD1S95<|q+`AfRk6MhJ@3em zYX19|^-Vp;Rh)O8y!rV7wV=!|7uJ6!ZgGAiZ(Z@8SQK?s#wThKKXpzgI5vocu_HS1 zvRm9fyjLfkYE;Qx+jYvp61BLwPN%-QM5WcW%Jhx1RYv|gxuj%5IpZIYccg!*mIf!v z%$Pq!=EX3XHF-v4ANyI}`PGnEw%?)e8rm(E@AygI-MLNVG@Q`)w05d{i}vgLYPPD} z#johR+_ft2w^5m&+$8c(^~r+pDp7E-U9Py2BMRT>)hka|DpymRe(;0ks;JVVi#y`f zL(2+vi8oM{#wKfb*>}o)HBy&5kE!zSlVrvG3!-8)Lav?~6>Ij5%ZJDML}jZ_J~G@c zs%j3&wO#AQqpp*>x~)w;mV7`zUguFY;X8G0exa(p;?;HW$*S&nh4utTD$l#wy8ee> z)cTH9@`;lX;z@6od}?4^G?d54#vMNKwDT{yq2Z9&7<Eo=D(VoMrY5yF^MLY>oz<J8 zcdE_9BYMk(S3TR+qo4btLNztF=&gg<s=2CGZ`&2CL}0+QuWN#)eKY+R|HZrC|5~>Y ze*Zh&0YQHMZY@IWdzk%{D_w5k$~8}^c~+UH*llJbM1cKp|BJaz*uUdH`TfienI1Af zWP(;RLu87`9Fa*PvqYwe%oCZY)yx!`Dl%8AnJh9}WV*<FkqM)nF)wDy9CJn{jm#RE zHZpHy;>gUcX6ne?k;!w+9+^JJ{E-B3WB^G4M-GrAaAW~V14kZ^L?D?!Qeic@K$3xE z14##x4<sQ-Mv#;sIYE-bkrgB@9C<+!V>OvUQe!o_L6XCf9V9(Sevkwq8A4Ko<OoR; zk|iWfNS=^HA(=u_Wi`1%lEslNBwZZ&LK4Q2F(hRiIYW|$WDQ9hk~buAR+Bj-bykx* zBzaboJtTb``9l(jWDrRql0zhkNEVSaB6&m-X*HQdQfW20M3QMW*+kOGkxwL{92rGY z%8^qfsT^5F(u(92Ni33CB(+wPTO_$wlU*deR+C>O!AORY6m#SlNis*4ku-DU8A&up zrjb-zO|Fq-TTQl+bX!fnk%S`|M^cXD97#Hob^ITv-C>Hq)RaHTm64L3lA7d7OG`;h HNp<`UV`q_f literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Eirunepe b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Eirunepe new file mode 100644 index 0000000000000000000000000000000000000000..41047f290049eed111a7e1c0e6c605666cdea5f1 GIT binary patch literal 690 zcmcK1y-Pw-7=ZDsDTGBW5k&N5YeHW_M8q8(ijHxoXlN4p1A=I|HUvRSQxq+(IW>hg z#Mz{#+bU`XZAlP)i6*D_x!4p04V}yJcetP&pVwL2Oif!qR#@J!S>v)fcc<m!D7&A| z^`@*`YhCA`mAb4P+QqpWRjl=^(m=~A<+to|b4pcC8}?oKLRFm=`}KIxdkgQ_?>enM zdggUKnNan@z1@gBd5yC^-K<5`=fQ~{y3VTM?H4_gNqVC*k9ur(Per>P^n|`uu~gMg zY`ALG4QbaHV+8}hJ{`fJF;{Y9V+uMDGA4g+b#k`a=cQ%GicQYSj?3TrUd|tK)Z1T@ z+ZeJSX?!IQk_gF!r1F(qNHSl^hNMICAqkO;NJ=Cpk`&2`r1h1&NMc{fjHE_#bA6H{ j*^%@}eq;p57?4pQ<3L7&j0Ii!?SC3gWI;x?7>|4blHVW? literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/El_Salvador b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/El_Salvador new file mode 100644 index 0000000000000000000000000000000000000000..9b8bc7a8778a0b8caea494cd5995318ca30d4fa8 GIT binary patch literal 250 zcmWHE%1kq2zyQoZ5fBCeHXsJEc^ZJk;;YLP6lUFdpcpedK&fiq17(}q00t&TApHM- zO#%bM|NlETFtYsrzj^@!hmUUvgR=_|2ZsQ4f<Oofw*7!;0h;*#Kgd=P7i2St2H6gx QK`sE%q`86%=pZvL03CTYLjV8( literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Ensenada b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Ensenada new file mode 100644 index 0000000000000000000000000000000000000000..29c83e71ffa6b071c52b2360295bf55009c09b76 GIT binary patch literal 2356 zcmdtiZ%oxy9LMnsp(Ki)RFwE*C#4t&SN@A){}j7|+(5aKn2{NO24#>CO3@7+TWihn zMJMGdIyrm5S`o9z{xNdPtu<C*E6m22DKLy8){4q;JMUi)d(^Ym@9vz}-QDi)?t{<U zx2w7`&GnCGuKk3=(_s(qBU|kCTJNtdT}_2`-P6rey_My4??x-vnXu`p&nAwv46gd6 zZt%qWb>Bpnx`x`{p1S1UD&OaAm&;|zawQ={Mm_mzZ1FZV9-pMfr_<F$=t4Cy7@@9? zPN^Syep8d@2Gx)47u5WbZnNORfC}$#Hw)kER*Smc)apdLy5U*Ti<|1zlIs0>Y3U9X zk>}IP)~!{`6S8#V%3`^3UZjo+&XlO>3=@56xx@@EGqE54E-QLw%uTOe6y3O5-+bsJ zSy>XUSJiy0;xd2Ix8#4QZjJa-$0v5G_}NL55Z0m+hCern6MNO_z8;fw-Y?0W$Bg^* z9$Djm+@u`aD5+IPby|I#q<b24M)6$9j4#nyY2&gsG+SrK%u(56QF`5<V`}}mYdYuR zr)tAnk><7!URSrb{bBBS@rb&!_B(S|YnN;+>@%CnpOf4*#%#{rCwDJ8Y&>y=;+c6x z=Y^z7-q0bPe|d>)ed)9*__{{C&Bx6>y#-RZ`+zCxaLcw0rDl7hlH!;g<J&PUzH70j zB=ajNxw2mGT=BEo`ALi}ojtF1otoA64Zovyw~pxh`<_>2<!AK+olR<wci24m%yHS9 z+;1MLIw0lYCryQ?R4OK0O;vo3R1H*`>d;uJeq)QN84bw89TobKFJjcbMvs29dsfw! zB<Q-f5%pNse7(Q+tg4Tg)D4BFR6}5%Y0U1I#^I~xVB|?T*f(IBW?JQm&TjMMP^C2c z+s#vFwury1-aP$6qPT*BgMzOM4w+-G|IRBAI9lLx1p+UND<lwTYjIuA`=ABRt(<#l zf!F8q7W-U9oLqbEwWr8ucVeHQzi;2aSlEp76@E1kU}?zOki{XZLzaiE4_P3xLPxtq zWR1uokyRqgMAnHc6j>><RAjA=cCpB69qn?F^&$&KR*Wo}zr&i5MI)<5mW`|%SvazC zWa-G-k;NmcN0yJQA1MG*0i*<uwgyNMkSZW$K<a=L0;vR23ZxcDF_3B?<v{9z6vWY1 z1SyH5tqD>Tq$)^Rkh&m+K`Mik2B{5F9Hcr(d64=b1wtx>l*rN62q_X$C8SJ9osdEy zl|o8|)CwsUQZ1xhNWGAPAr(VP=4fk%6wT3A4JjK^H>7Y#<&e@LwL^-BR1Ya1Qa_}C zj<$kG2_0<>ks><UDk5b>>WCB)sU%WLq?SlAk!m94MCyqY6saguQb$`;q^ORzsz_NK zZC#PVB9%o-i_{h=E>c~jyhweK0wWbhO6+KBj1<|?Rv9U?qpdSiXr$6esgYVE#YU=) d|NnA_*{o;VtS5QX-D&QWByVb}JJp>M_7|antVjR= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Fort_Nelson b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Fort_Nelson new file mode 100644 index 0000000000000000000000000000000000000000..5923cc6888f881c8672d96be9e23fac7166b1c20 GIT binary patch literal 2249 zcmdtiUrg0y9LMn=2m(jQZzf8F^`s_(DTW{@nrS$~KtK)_ty#9B;c99Y*4Bs?t>_;Q zIs3DUzS&KzCNgHTkqmEKt*i;lM2yHv7)mNy0ura^{abfkbldu!J<sd6v%|Rhyo2{t zR-}9XcqY4VI6U?4@SafPuEqlg_5HSZ`&C_{cFwu9t#k7{`L67OIc5gs`^+wL-1}O( z<M*5HzP)l{;;8m?SUK6-sy{S+Vtd<n>yLG>*}le|`cvgLd#buzf6m`vf7z6yzoul^ z(|LEB{%IvLki6Us3@?$vs3bGk882r}PnfeUmu2X1pZTq!Th29)>-p_%a-qIgho3oQ zFK+G7OZR<fe=pjjf2?}mUQVmkk@<UMbZUi;Mc2yM$aCh(K!uDSf5`m##T_znAkTz8 zN)_*|6chGhtb{)~#Z1~h-$ras*2t28oxD0mqZXgF(dLZC$Tv3N9nrw~eRfJ;v&MEG zk+_Zqz3Q`OncB41$lDEab)7ZSp4uVTR6b^=mzPO=e$dQVmL)S&mYRgwrS{sG1d|w^ zX%mMt^tw|sZBpk9opor`Uf(jJH+;}zP1OQ(<Fg0t?2U0|&XyB0H}jmiY2D{CFaD@W zPHmLri6Ko1)JRJ2x0*WmfGlWl(VM%f?ZU=gx~TPjn^ygVF5b1q-tzG4CVj_TyCm;% zlTkX^W+rbmOVbBzR#djhPKuQ5{zS9vO1~^WJZ^G&zLXXF5_DzT$8u}^n67%~Rk>~J zNnKsrWY-k6>+KJ|Y;)68*XBNI*Iw*0c?(~XysrHwKkgC9Z++9O8(J@SzT2t=$F|tQ zr+4f6PYZ0(y*st|jYYO(MY(RM5?h*-qrtLa8yugd;bGyEBHZ=g_(X<6uZ((LDD<8U ziwK44YrJUxJ=VX4+nZic81#yB-6?hFE_aH9UZH#E3H$pF{`-8a`>8*l5&v)auc6SU zl~pewb3!JC%nF&7ubUS#F=S@Q)R4I$lk;`6L#Bt!51Ak`Lu87`9Fa*PvqYwe%oCX? zGE-!#$Xt=h`nuU7)Ae=pMJ9~Q7@0CMXJpdItdVIW^F}6)%p93IGIwP1$n25nBlGul z2|zM{qyWhQ`bpr0EbzEA0C_+Xfn)+n1(FLS8AvvebRhXa65{JJf}{k=36c~fD@a<9 zyda4|GJ~WB$qkYmBs)lYko+JCLNes*QiS9PNfMGJBuz-3kVGMwLQ;j~3P~1{EhJq? zzI<K6kc|1dlp#4ol7?grNgI+kBymXQkklc$Lz0JN4@n=AKO})j27O%$ksKmPM6!sa z5y>NxNF<X;Dv?|w$wab=q!Y=fuS+PBQD2u*B&WVEsYq6lv?6&$5{qOONiC9FB)Ld- Vk@O<@^>qpM{-+HNxF}}@{sJZt2OIzZ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Fort_Wayne b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Fort_Wayne new file mode 100644 index 0000000000000000000000000000000000000000..4a92c06593d33d3969756f482e5d3d4b773984ef GIT binary patch literal 1675 zcmdVaT}+K}0LSs?kk@(LFc&RW7jAmD%q$zL)}m8hP9)@yXes=a&Q2uH1yVP-Da?{F zud{4KTr|uuW;Thu)jw}D*7heCne9CO-+%60xN+k-d!Em8&Q9xG{Ju}1pk!mR^T#p5 ze8S1G-kjV=y5`b!I@UdY<A-eCH_knA^p$+DJlcKGl_?+Y54%rdzRM?TN9m5re&^}D zFS;|Y(|I=ik$#?X&FOMl^oz+yoEPelei?3cU)`+Gue%%FH|Gw@?)ECTr><P8rj>4Q zNkG1>nd`pGnkC;CPIEtfo1#CP{~m6O)ZQ6SIgMfMtL;_k2|<~Wo+dK-&+5#$H7c{C zUT2M+ud*(e=>>f;YT>aunf+{@@K=}0oU73yca>i*YKRbvQxoKp%8z1c<U|?ByDtJi z`{lBf^J3YD@j5^1uFAjjRj&x2RRyOy^vdpfwQ5hRUfo`%3X2-`nx-5j{8!}K+ETGD zu0gIZ_KTviA-N$lL2T$Nki|15isDD9vSfI_D7hFVH+}3Br44g*+2gNjb1+hGY3Wc~ z^TJvlXjR)%lrG=es46Dk*4uNd)b?<X49;#4!R}i!G&Lkb?I&f`K!MoNv{&xzN)^>L z#d23$l&C4plDiK(Vu;VahDQ9p8GJi<9X4dx@PF{~yp}nR<9XLF`64{;LbEf{-jA`@ z30$2?o_Fu2Z)&zb;H0ISbE!F(n{!dX$uRdB<}(hTy+Yvcb1O1mwsRX8{44VdJg;zQ zxEYxrGC^d9$P|$|TFoSpSt8T4nt37<MP`aj6`3nCS!A}zbggE-$b^v@BU47^j7%Dt zH8O2v-pIs}nIlt2=8jAr?d-W>`W$BdKmtexND5Yy1CoT*WPzk%HF+S3AekVkAh{sP zAlV@4Ao(B(AsHblSxrtzQdW}{l9tuvg(QY#hNOn%h9rk%hopz(ha`w(h@^<*h$Lw> zSt4m#O`b@iR+A}`Dv~RbERrpfE|M>jFp@EnGLkcrwAEydq-`~MBZ*s0=1A&D?nv@T o_DK3j{>UT1`A?q#qs^ls#XK5f{WIf};}c{3NlEcZ@rk2<0ei4<R{#J2 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Fortaleza b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Fortaleza new file mode 100644 index 0000000000000000000000000000000000000000..22396bb5151aa9345133b2835aeedcc4d8b401d2 GIT binary patch literal 742 zcmb`^Pbh<70LSrn{-qHvii3Gr4vKm*+Lk3(;-GfG!AWX4aBy(ZI5;Usipz79gWluj z<ih4eA~pMmi{e7e+UW6oUq=_pv)+By>Rs=?zwmTquFv{$D$O@soNjaRxqj8WZSBvu z#2QEPvCm-@f9{uu*(H$*9LUsTNv2zG?R0!fWXjDlyPFlc%#O^5ed2Xn+i%q~qHr`T z-y1xtIME}EiDOZ6U)ZI+qAEYt$d8Q`)poimJ<HdsT@Bd6ccwblBC@kCsl1a-w)bvP zbopQGuFHh*7f$RzY+VF%_cHi0Dh89<4lOjOsMgx5aQ*64q4mv!Wodn%cU5V9b#B#; zjfbsX*=H`F`J<fhAL10hZ!y29HIa}|jwTio42gz>b2RagfQ}|25)z4t1Vy4EVUf5< zU?egU+R?;Df;*b%NO&YZkIw+e2*?n~7|0;VD9A95W*lT7M>7&K6fzbvn4=jD84ei_ Rqvl!ubwGE>j5*|Ue*r;HC9?nk literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Glace_Bay b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Glace_Bay new file mode 100644 index 0000000000000000000000000000000000000000..f58522b674c2e06fb5a184130fbc470ae9db72de GIT binary patch literal 2206 zcmdtiUrg0y9LMp4AQ6dCL7AY5R*1oozlEZKg)t1`s~i+m=vzSqRwxNVlG)ONi)LG) zWsc@@Sr=sub7ULC%Dl+x0jt?s_>jYk7E7%)H<Q!zer~(!uJt>6ey`uocFwsupLb|& z)1%pee_T`DI~=Z;+~GN4ySr__@J?>uz)pSh=uh%x_s=>q?~jcmI$>;Ip^i1>OO*bm zQ)NV=i-z^8%)=5hr(ds*-z~9$UX4B9A=lU~I_-y7P25n8PXD;wT>EaDjeot`T-US4 zUf;Rc+|abhCe$RG#L^^tWB#PMY5o;EV{Ve%9COCbjJP5*FZ`uRV`n6JcvNQ{J0`O~ z9MW6%za?|pF4~mmcFNqUZ|%J1{pQxZBlflx&zsv5U$dzT8cgcsN17H}V$w$U=z?F; z%pHTBnttj&$>>|B!T$NOaNAPNd?i-08f$EJ>#veiQebmeeJy#Zv+SMOAIM$NVVj?H z(&V2Tvx_djZx(;?nJxJ7WwYdv)uo@bn!9&>s)cWaOi{}LUAAMB+*9#_-dkTO#S0(P z<;8ij{P(@KBx9v1In`xL<8n-Cf1O=1k!bGUw^hr&&zAD8T7BS1f>f+8RO$UwRxU}= zRc)uGGC5L1RYxUsd0bcL?UU8x5w?2vfT{lctgV^cYt|k(Zr6==ne{sk+S<W7Q&-<( zAM7hKTK={^^kS_%oYAe1G!{yIT(dTmq)5X=nKq_IO5;~SZHgY3ruX8t`P>QFushv8 z{>@qQMEi96<e}rHrRu!hxa*+Vls95qTYAh>iHGgxiVbG-<gm8qcT4+dzjn-MmX5(* z?YvYbPxo!nt*3+1wXH^<8Hkr{bp`tDo{JKg5-}w*%H948uW<Op$v_|+{?0^1x#yuk zjDMcy@9|~jp}?|YcPiak<Bo*fA3hL>@VC&v7|JF8IesY|W-!QTkl`TXK?dY?BSMCR zjES#dP{^o|VIku}28N6b8JgFP4H+CVI%Igr_>ci2BSeOXj1d_mGD>8a$T*RKA|pkH zij39k28)aq87?wjWWdOXks%{vMh1<H8W}b+Ze-xd$h~gp$k@GZ@W|+q;UnWm0)RvS z2>}uVBnU_pkT4)|Kmvh80tp2Y3$F_X5)C9ANIZ~$AQ3@Ag2V&~3KA70EJ$3Cz#x(F zy3io8@w(t3(eb+QAn`#0ghU7l5fURLNJx~BFd=b50)<4%>q3RZ%IkuKM9b^Kg~SU9 z7!olgWJt`ApdnF1!iK~R2^<nRBy>pZye@c1^t>*7Nc_AmfJg+95F#-|f`~*B2_q6m zB#=lXkx(MB^txao(e%1-BJuRPfFcn^LW;x`2`Um*B&_(qifgQEZmw%@dU;MTJD8bX Mo|P5M3TDRs4J6`am;e9( literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Godthab b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Godthab new file mode 100644 index 0000000000000000000000000000000000000000..ea293cc4066c0e01ffafcc98e2db8a710596997e GIT binary patch literal 1892 zcmdVaYfQ~?9LMofX{?!jNkSzgxt!{Bj&iG9QVku~gItnJ5)wj+IE<NDv#+f&!!Xv& zhzH?8E^{42Y}U+eOmjE0VQy=GzxUrf^2igLe>?xzSz8a<zVDB3+JZul<BuoU{Du$D zD)Zrc>-FZfdwr23)LS{Oz301z#k|!Hp^r2?=$3|mJf|I-j%%m8`?d3>?HY0BtVABI zm8d=Yq)U}bbma!=T3jmKX3UlDIX;O=&6FO_6p4+Dlb)?RN?dS;#(nOmy`INv@5T<= z=SoZMTlZC+hhA%Z^#e`VbVCzYoKaW7F-^+5FYeMh@ubyDzX>}eIcl$@xXUG_*)Ibk zs&&BQ*)s4)nGQOeA%h?0YHCfsrqvJ9^v%O`$nHoTS{S3lHVu^FS;3l75FsOCUutGX zvt$Kd*X)=llKuXLj10OUBO9*CsHTH5y7q|V+&r(|ZB;Vn=m8yDQX=E3HtP5(TXjP5 zJe}CTSbaI!Iw^dNPImUz$zNQW8<r)x_uA>y&%I>YsZTn+F+^tUZ!39q?_{R`qvTgN z$gKQEnZ5Rn&KYuA=H}hhg6Lf`FRfM!Tb<PTQL6J_?bM>?QeAMZLKi;v>7v8awfO2X zS-d4pOKK*{lA>r`x;a^vjZc-*!f;uh)J0Zgev_4L9a5GUrez<VNqL(uT7KuURJ^~h zt54L(nudC<+;vgc*6!7H6^GQn&98pTvK#>|0)zhfPoQPpHdkd?jdvU^e!rWZ2DGv) z-Z@^MBi`jPAGbNZfPh~w-(Q^USMw)4=0CJJT^`OCIbr0CkyA#_+18via@NRcBj=5r zICAF5sUzo(oIG;&$mt{Jk0gL(fTV!rfFxmSvOv;6@<0+nGC@*7azT<ovO&^8@<9?p zGD1>9azc`_HCZ8P*_ynN#E{I8)R5ee<dE!;^pO0J1d$Ap6p<W}B#|tUG;K|uNTRkT zQzTU+S0q^^TO?g1UnF59V<crHXC!GPYb0%3lQ)vMt;rlo9myR@9?2d_AITq?0AvP` zDM01`nFM4OkZC~X0htI}GZV;EAaj9C1~MDSbRhGAOb9X~$dn*+f=miBE6B7U^MXu_ zt(h5QYHZEiAd`d44l+H+{2&vA%n&j~w*S{0g`}E^O81zzJKgE^BqS%e664)YZ<6cR H*QAi2VUWP1 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Goose_Bay b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Goose_Bay new file mode 100644 index 0000000000000000000000000000000000000000..b4b945e8d8d26ff300b54916bcbea257817addcf GIT binary patch literal 3219 zcmeI!TToPW0LSrNQAE6fSG=U6CaACiO0Ks6!&ngHq=1VF9u*^S4KozY6K3O#Fw=}P z4rrQ#HaT7%8dIFyY$7a?&Cxnw(3*rt2{Kz1L=AOi`+tu;FylkxOpl$Nv!6e^2OfW4 zQkLYr5n*`d>McIu=DHwm&VlcXw=#R-hx<?0D-~r)wn}@r`OKy}N51`Np!vJ>nj>cm z?whM-lpi^_+}~b3tn`Q@y50VLw-dqV$JE)+UoQ#1U^r#39d~bSt=!_cR21v{DZ$H9 z_vQp={lsR=&lVr2vsbm{a?nlZ<v)&EuK4$~Ua75eG&JU0e?7X!(pZsZy;_`X(aOx$ zYdPVT>xILuP4NRQzh$_s&B6CAEpY+P-v{_RZcKROywTR~xar;CyjfG{__L+L={kPO zaqHXx=k0GQEO(B5ZEf9uz|yv3zxD2x4=n9@Tdnt&Wm)bgue3ghde!pRlzG;U;1!OJ zUshWm_BY9oeCDf<Iuf+UH$qjnGemph=&yF#`)QuTZK`2+t7hDON$plpt9h+$mAcD! z<Q`J3<Q-ij_Y6BJ`HU%-d-?5>d<~_tZ|g>>w^ktcxm=?5txT8ueYrvHe{{X(x5ujb z7cbTZY?`SK%$cDLN*}2Xjvt{7F?Xs%gP&*tV@F8Ce6DK4jZdWEZEktQwX4#|nnwAB zGiRhx$1CO0pL{BfSzqrC+?rz>n`(ED%PnXaAGz24VnViS!eGT6G%4N{)VW8Q=sVdp zv2n99>1JQo<cbU><kBQtXjznED(_>PQW&U&?YwOZ&z|FsSf|>i#*KDQTX@128Dw<7 z6j5TE?%m`zkNDhWZmDw5c)Zg!^W0(gtSkAh*~iW*bG}M*y}aX)67{j!6`fb4#B58o z%}vf#Udftni=C3F%!?gno7Z0Gjth-+#a)u!@qLH7;>+i{6I>5n^Y<-N66=T9l9U)_ z!54$1<fT!vRQf=AZFZo%aJ^GnG}22>Nj)v4JZ_fLBKJvY%|^{S%C1_^Hfiadd(|Ze ztF+e}6?N&h!&*i~rka^mtSu{xR^_Ba^78E&(u&X`d1ZE#l+`y^UKJN8t-6&cX9syn z*%wW6j(4+^^O>KV+fps9-W8(Rer!_LZ0M)Gb*xIwOKsKG?l`Qz9a*cb%PUsj8GKU9 zPhPF&ch<-o%tg|M#&UUMK(4g0qEz10ktn@eRv>S7nk1z#U4GB*ClzMSlDF)>D;c{P zy}EaKdH3k@L)R~!J^S>0j#sbec=mbzZr5i$p6aCrgU9njiJ`m4({Rw}BhC)}?Bv|w zH1^To{XCvJ)i_8zUug*Nc$zfh$futqCZ!k_EE0FDxFvBfPBFxYbBy>bCRQBu%lH)D zpB_{GeV{IihxLK#_~(~B;>w&edpt5&$Y>$Mg^U+6V91CeLxzkQGHA%CA;X4@8!~Xn z$mxoqL&gpnJY6w*$nYWKhYTPxg2)geV~7l*t{6pR7<I)sA_IwxBr=r9SR#Xoj3zRi z$ao?Hii{{Sq{x^egQ_b=6&Y4tF|NqKA|s0oEi$&q;3A`o3@<Xi$N(cFj0`a{#>gNe zqpT~285w6}pmoJaBSVdhH8R+!k2VK}n@fy0FyP3DBSVgiIWp+Ts3XIUj5{*$$jIx8 zp-09Z8GK~)k>N+i9|-^w0VD)S43HooQ9#0g!~qEe5(y*}x*`@xFpy{<;XvYn1O$l) z5)vdPNKlZdAYnn`f&@laL<R|su80j193(nOc#!xY0YV~#gb0Zd5+o!_NSKf~A%Q|7 zg@j61#0m)(5-lWLNW74MArV7DhQtgB8WJ@mY)IUY!0C#}A)(V1u|tBVE24*l4~ZWV zKqP`l2$2{fK}4d6gb|4&5=dPUNhFlIB9=%nbwxCha3b+U0*XWw2`Lg&B&bMKk+33h zMFNXN774Ach%FLaT@hU*yt*R3NPv+DBOyj&j072pG7@Ga&Pbq<NF$-v6|qKwtt+C9 zgj-j{8wofPaU|qO%#olYQAfgV_`mG`LD_vp`KO8h078<cnj%bLAxYukrf^f3@81mQ By}ked literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Grand_Turk b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Grand_Turk new file mode 100644 index 0000000000000000000000000000000000000000..4c8ca6f7fe8e432b6d0a2f511d7993546f3a5c68 GIT binary patch literal 1881 zcmdVaT}ah;9LMp0PFh$}R&G{i&1!N?j}PsXl|7is>0~Ec9fMh`Z>1ehTK-mc&>pOy zA$1esu)&59Jun8twH_!|vbLyhdO{kb!hUt(MP&WTn<((V-}_$|!7jFoZuXbIFFB}J z-}fg{9otao{NovHKjGzRv@bu8-ebSD4L?1QvcJ}hY+k5G#e!z^7uh;B<TD@59Hu|^ z4K-<*J{dEl&x}nNDq~+Hsc{ecB>naiHU82~$vAmKO*nE=CdM7}$=)`}ta@Z7ZS2&O z1DDN|MThjKsV7WU_Ew!W@Q3mxRcT+(ah3gehMwBhs&cv`lG_ne{!`N>ucboex1~uz zG-?XBypn0bkoj!ceF<buHPZ{bWX6ZdrfA$<UDW%`%zS-8&$@Ek%)Zy5i#wmHIlu1F za}VB9^S)cB=QsSKO7`uR1(n~Z(wenWmKRs$Wu;Qy|APtUuF=8n116MQqC=<Z&7!_Z zy5jr&YVn`*CA_mvEjgPhm8-&PX?vo4UYw_vZR?fg=_x8wbxk6#`_+oTk5YZC+pO$4 zpjYiXW4`RF*Q;v|n>8Ivw1iKqwXJosF1KB+kA|ftxk=Rq^Q5+~T1B%`BznDA#Rm6F z?57O1vG<PDwa+s3H=pZG@pQAfvs*V*4H$LsjQ%R1&6bA4dTZ+Mrm?bFHx4{d@uGH# z_gqp<X-(49byPLKtd{K^U#lH|70b?+sM>WdLt0jc)b4K`8I<tB`}#`!um8z)FD`W) z*FBfuB)abTpPeDDd-b<L!}#uuc=O5PaKx#wZ~1cjmPMQr>$^|n-*0xC{eACdU%vfA z*FLe<jC|yV`ItLKZW*~}<ff6kMs6FqZ{)_2J4bHa)80FB^T^#Jw~yRE(g4x{(gM;0 z(ge~4(gxB8(uk++1Zl<7_JTBnbc3{m^n)~nbcD2o^n^5pbcM8q^o2BrbcVF%X?sJO zL%KuSL;6D+L^?!TM0!M;M7l)UMEXP;^|YNLt$NyCk!C$@w@AB4zevMK$4JXa&q&iq z*GSt)-$>)0wsWL)q<5rwq<f@&q<>@skR3p_0NDd%6Odg%w!zcx1F{jGb|;Xn@U(k@ zYzDF$$aWz6fouq}BgmE@dxC5VvMb28c-nnIHpbKL46-$zc5jf)L3Rh(9%O%z4MKJZ g*&^rvIz~x$r2=-Pa>CR6h5r1Ua6y5;z@MM=H!J1*O8@`> literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Grenada b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Grenada new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC<zyoQ1C|Nmnl3=IGOA3wmr;^P~_;1~?#0zn7~ Vru_ix0GaatKYmlWfX16}0RSy&9%BFi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Guadeloupe b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Guadeloupe new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC<zyoQ1C|Nmnl3=IGOA3wmr;^P~_;1~?#0zn7~ Vru_ix0GaatKYmlWfX16}0RSy&9%BFi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Guatemala b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Guatemala new file mode 100644 index 0000000000000000000000000000000000000000..abf943be0fe277c7435ac9e5d51718028ef216b2 GIT binary patch literal 306 zcmWHE%1kq2zyQoZ5fBCeE+7W6c^ZJk{JF30u!pWmU_U$Uf}~DGg5>!p0qU>bJ<vE2 zbHODfD8W_UJAi?S5eonRU#-Hx@c;kL4U8;6_5ubDAKwrLXBQw24gs150wE;0;0Htt v(8T}$K`sGtL9PMQKo@~%kgLEn&}ASR<T?-yav_KYxe`Q^?ouwGqs_Pgm;yx` literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Guayaquil b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Guayaquil new file mode 100644 index 0000000000000000000000000000000000000000..92de38bed407d9d7154cd3875edbc2e01c993f6f GIT binary patch literal 271 zcmWHE%1kq2zyK^j5fBCeRv-qk1sZ_Fwk1vy+G2k$Xcs<nsQ>@}KO-{}GxPudTQwLM z{{P>W!NBtW|M3Hi9RL6C+`z!&;~T;d2*kPuCO~Wo)D8h5ge_$Qssvm51F8pT*Z<6R hwFnRmas)^i<Pb0obPR~D2g)&!<svQ{p!@7hxd7s;LFWJf literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Guyana b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Guyana new file mode 100644 index 0000000000000000000000000000000000000000..7d2987677d321fa44fafb72a1a6032432d0c64d7 GIT binary patch literal 266 zcmWHE%1kq2zyK^j5fBCeRv-qkg&KgwjGL7mT(w(l)a!p6)c^ngpMjBynfd?!lNAgM z|NozMVPN_H|LO$>uK)j!A7J3~@eN_nH83_Y1(85(0yF>uLP)Uo2UHKxw*MgOL0pg% UKs3l1AesWFaM=JIW@o|$04A(H7ytkO literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Halifax b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Halifax new file mode 100644 index 0000000000000000000000000000000000000000..f86ece4c4032634b3526187c5f86364077b9a9b1 GIT binary patch literal 3438 zcmeI!X;76_9LMpCm=UE?iki4l7HWtOD27Y%SQ?{(oMcf_@MM~SrDWogggR--OsP$o zWk!pJOQn}ELruKyj%FPo4NLK;p=MfQ8*bdr_q^>z<4iBR%yU0?81C@y_x-0nmi=Um z=O3?|%x`#j)tZOTE$5iGIdvbuzwE~v-#dp}I7Mai<$Ifti4Wgx?XGZ2oR!a~yB~#h zbc$2n_Z43o?p@t~tGoJWFYnrR@40Ia_H{nKHrM@R+iT9Jzf`;HUTJXFe>uqY&Q5bm z)-88G8=C7aU0C35=)A@Ge3EoGhWfl;4135e3u^7%)cGghrZbnlTN)qmZK-JGZ1rTi zTi=cLmS5W7Zd?4l<EuL3+n$x+?09vdyHgZ7yPnB)cRO>OJyZI-dqc)L`+86E?Ymsy z-QP<1_8*bn0}D>d%DaEht&|s3Ro-F!t^18Slvbt>cPdfe#V*s;p4Zip(0qMF=c%JL zv*fWgQ$<aAraWGlE_6}6JdqtIP8NjAQ-j-z(+M5*nLFC3vk?LMY~2-guE{BVuHvZr z`SfmmzNB2$Eh?6mW*3UzCNGe`kC`a`NY0Ue#wUrtdL+vFF4^jG^C)>GXsEhUKSW<W z+g)9&j?~xpHCGK~&9r}2o$@Tbr~_WEQjMlp>cHHKqOtr{-XtnT6SqR%>}(K0ol4~` zA#aLc&k`AYafY~6PnWkHc|$ezWyofmrm5y@@^r}CBh~GNBlI0J`>Gb%eRRu=b}Dpm zm~Iu<pjvmiuG`!dChiP6uG<D&7j5gV$*>d0MZ1a`*?#+W(V@gAJASZ2bjrJ=!{^Qy zccp!+@6P!^b&lPr?-}x<x;OMK9no#PifH&;b`4HYU2B%gZs)qH`^sm^$f`#~_o7iU zs<eyfQSg9tUJn-06Ebwnl=GrzLO<PW_}3yfqP^}N;}!Qe@$0y-uT<RW6S~jUb*k^4 zox0x-i&g*iT0XF4ih6L-4jKPWs&Xf<lmq5HD;`RINj^L-MGWlmlpHiLRt)-WnNH~b zs7k2H*Mpn(RD(;i^pFdoYUuJ=GVySXNXpNY!#1`Q$)n??Sn{WMw12o9o>whW+6Btg zw9O**YOPF<T`tmV1N6uaWoqQEQ#zwznR;yHZau0dUyYu(N@td5sjP8@dQ6e4WKyww z{N+qBw)+D4#DsV;u4#@OpAas_Ur3Y_A_B#PgHbZONv+8KG(_f{-YX`)8L6NC=9GG7 zS~ERq{cbfm?V`?Iv`Rf2TdAi^E>zEjZqQScC#tCp6>?hK0x_+oRL*FVBW9E@ku&QP z#q&ke<*e!`kzbG@UnmO^1zG*%?4_54r%}KS{scC@!7tqCKltze-tXTfJs!XRkP2w* z_wV(2g6!wp?0ZOJQmSXbK=Y)SXM}k~su?PeC&0d?-oU`s+wbw8{C<WG89QX~kkLbi z4;epX0Fe<yh7cJ;s~JRO6s=|$k#V$|fkZ|U8A@a<k-<bp6B$lqJdpuKMid!RWK5Ak zMMf1FR%BeQW?+$#wVI(t#@1>E7a3h-c#-i%1{fJ(WQdV5Mg|!fWn`F<akiR)Mn>9d zh8h`bWU#Gfw2|RP#v2)MWW<pnN5&i(bgLP4WZ02$M+P1ld1UC3u}20U8GU5<k?}_Y zU^NjyLSQv9K!Sio0SN;V2P6<kB#=-bu|R@>L<0#25)Z2h2oez_BuGrq289nqg_j8n z5EmpcNMw-EAhAJ$gG2`j4-y|FKvokWBt%GzkRTyZLc)Z^2?-PuDI`=#tdL+K(L%z7 z#0v=+5;3a@84@!jXh_tMupx0n0*6En2^|tUBzQ>lknkb#vzh=R5ww~RA~8gQh(r+y zBN9g>kVqtvP$IEJf{8>E2`3UyB%nw{ttO;MOp%}>QANUv#1#oF5?Lg)NNkbdBGE;{ zi^SJz0*pl1YC?>}*lL1|L>UP)5@#gPNTiWaBe6z;jYJy>Hxh5F2{;mQs|h(0bE^qD z5_Kf(NZgUYBauf!kHj7cJ`#N-{7C$f0{}S!tmY6vjsdGV2#}+|Y7PVBI6w{r<VZjc z1>{&j4hH0CKn@4wct8#a<cP4ELjpM_tmdFVjtZ+eERf>@IWUkT135I1V*@!j@c*NO gGvr^6j$m_(^fEU|WKz$lm?$SQDLOhTI?4(D8&gR`p#T5? literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Havana b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Havana new file mode 100644 index 0000000000000000000000000000000000000000..1a58fcdc988ea6ec1bb660ceabe85ea19e5b0774 GIT binary patch literal 2437 zcmdtjZ%kEn9LMp)-yCXL{y`%Nlf+6;u<Ixk&n5~6SHHy76okkSycl6dLZaqcNwzk6 zP?AYJsEoN)8ri^Vbe8yMj^;PWVWTkv<3XES%NZKU?f3q*wzeMhpdNM3&Uu~P-Mjnf z^FD#XV0oJJw<Fqo!o|^SE}kdtF}L?Oe!1tP-q)n9rr14GRIX=w^WC!<IePZ<Dz7_n zlXN$)_P*{KFFhGoz20{&%Qta--biVbj)Zo(*Oz4J^`Qdq*ZvG0ZQbkscJi>^XxQwI zbvPPo9rB!}K@Dp->4n#JX~fn+H?s75iSkYHqTLg6)3h9ST*4t4H&XA#I1Li>Q>8op zVo)Z0yVslO<;u;+H+#3Vy)Bb=9dvKa|50x%Z}%prcI%WC2fV38&+D`~^<Hd7nNE)` z_GZMb&>6!^z1v6UYTUUE?j8LxI`c@T8^5DK?rcnO6AFDYt2*4hYgxX|UUkV$oV`%* zPC4bynfkIO#SVH&W34iGq(|==*eCb)9n<?dx5&Kq{hHk3mz1VDoxgj&ET}Ei)QVh9 zdt|XL%ud$(-Sy&|d0rN!9hF5l+9f@%ThoVo^?}h3^}$o0Xh#35y7)+sEctAkW;Py^ zr3VA*mKW*rET1gD@``3B#!B{ghh=5Nby@kb=tD#OlG|J*dEF!Ge{r9#`eK!?uDz)8 z`Y)2dc(OeF%mpckyDEV-pGsh~PuBXHW$mRax-Py$*L9uO^<!^p;k)fxG_X@Qyl_B^ zJ4>{rtX>~&akbR{hCJ3#EE|(|$m11DrEKB`*_1s?HeJq?ilhjs_$ony(LYG=gK()F zJ}q1J$Lka4hO}zCqfZ_=snu%+b!%g*KIQAuZPokq>8bB&&FU>$Gj>9@r@tiI2M$Ty z^h&AgY>;QK<;jkgpwwSTk)6A9Wmm@}*<F$>&o%ulVG-f~_KXbw%NZ5<-<_dQ+Zo3( zch89og-&~6<3ge1N1X|O-uWcYA8>NawJghA1p%`j#|aCwIDvoO-CO3Hc6ZnQ_=)+q zP$<|iw*%QBvPEQ%$R@33m&i7)W}nDLt!AglR*}6Tn?-huY}ab`i)`3xc8qKp*)y_f zWY=i7%>(=9FdGMUj%*#-JF<CX_sI5<{UZ&qnhqc>u$mqqO+dPUv;pY@(g>szNGp(D zAk9F!fwTkZ2htFvBS=fErYA^KkggzYLHdF;2I&ma8l*Q!bCB*J?Lqp3GzjUC)wBrd z5z-{2OGulLJ|T@lI)$_f=@rr}q+3Y4kbWTzLpo+PEkk-{HBCdhhO`ao8`3zWb4cru z-XYCHx@R@*L;7bm4MaL<H7!JXh%^!DBGN{rk4PhtP9m*DdWkd>>891R6X~bbG!*Hm z)wC4pDbiG=t4Ldsz9Nl9I*YUx=`GS+q`OFak^WjugOLtfO^cBpTTPRZE+cJ5`iwLh z=`_-6q}NEZk!~aHM*3|v4M#d|H7!SaZZ%Cux{kCR={wSRr1MDYk={H1=itYfH-OK) Z3Fi41rlh4Tn7?42KQ%Qa)jXxf{0?B7_b&hd literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Hermosillo b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Hermosillo new file mode 100644 index 0000000000000000000000000000000000000000..ec435c23bc47f925bd70754b38bc8b1d2c4d3943 GIT binary patch literal 454 zcmWHE%1kq2zyNGO5fBCe0U!pkMH+y_(rqsa)_=--uq`btVY~CCgdIF)2|NC_eK`H9 zFyY3;Egu*d3K|U4TLg@X#Tty=(*#U{el(b>dkB~r-D+T9WCBBGMiwyq|9|=q28RFt z=T2Z?`Tu|Q0tSx%|N9#lctC7M0f@MdZwP~La0r7l5C;IUuS*Ef6$lVQg8PA%fx&+u z0Qmz%gZu)bLH+^JAU}a<kiWn*&~G3b<UbG%@gvBXf(8%?@+*i2`4>ck{0yQ&{sz$? Pzk_I6`JW3IIObdcn4XX> literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Indianapolis b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Indianapolis new file mode 100644 index 0000000000000000000000000000000000000000..4a92c06593d33d3969756f482e5d3d4b773984ef GIT binary patch literal 1675 zcmdVaT}+K}0LSs?kk@(LFc&RW7jAmD%q$zL)}m8hP9)@yXes=a&Q2uH1yVP-Da?{F zud{4KTr|uuW;Thu)jw}D*7heCne9CO-+%60xN+k-d!Em8&Q9xG{Ju}1pk!mR^T#p5 ze8S1G-kjV=y5`b!I@UdY<A-eCH_knA^p$+DJlcKGl_?+Y54%rdzRM?TN9m5re&^}D zFS;|Y(|I=ik$#?X&FOMl^oz+yoEPelei?3cU)`+Gue%%FH|Gw@?)ECTr><P8rj>4Q zNkG1>nd`pGnkC;CPIEtfo1#CP{~m6O)ZQ6SIgMfMtL;_k2|<~Wo+dK-&+5#$H7c{C zUT2M+ud*(e=>>f;YT>aunf+{@@K=}0oU73yca>i*YKRbvQxoKp%8z1c<U|?ByDtJi z`{lBf^J3YD@j5^1uFAjjRj&x2RRyOy^vdpfwQ5hRUfo`%3X2-`nx-5j{8!}K+ETGD zu0gIZ_KTviA-N$lL2T$Nki|15isDD9vSfI_D7hFVH+}3Br44g*+2gNjb1+hGY3Wc~ z^TJvlXjR)%lrG=es46Dk*4uNd)b?<X49;#4!R}i!G&Lkb?I&f`K!MoNv{&xzN)^>L z#d23$l&C4plDiK(Vu;VahDQ9p8GJi<9X4dx@PF{~yp}nR<9XLF`64{;LbEf{-jA`@ z30$2?o_Fu2Z)&zb;H0ISbE!F(n{!dX$uRdB<}(hTy+Yvcb1O1mwsRX8{44VdJg;zQ zxEYxrGC^d9$P|$|TFoSpSt8T4nt37<MP`aj6`3nCS!A}zbggE-$b^v@BU47^j7%Dt zH8O2v-pIs}nIlt2=8jAr?d-W>`W$BdKmtexND5Yy1CoT*WPzk%HF+S3AekVkAh{sP zAlV@4Ao(B(AsHblSxrtzQdW}{l9tuvg(QY#hNOn%h9rk%hopz(ha`w(h@^<*h$Lw> zSt4m#O`b@iR+A}`Dv~RbERrpfE|M>jFp@EnGLkcrwAEydq-`~MBZ*s0=1A&D?nv@T o_DK3j{>UT1`A?q#qs^ls#XK5f{WIf};}c{3NlEcZ@rk2<0ei4<R{#J2 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Knox b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Knox new file mode 100644 index 0000000000000000000000000000000000000000..cc785da97de0a5614613f9ba6e502d7dc5f525b5 GIT binary patch literal 2437 zcmd_rUrg0y9LMo52q6eaCWd5SS}_9&Y$$E8wFP5`#6Nk!KM+Q0h-E%1kf_n))+8^Q zrA@&a>LQnGhSpSU3r(rDnlm&BQVB4LS;(VtK}zTJydN*KKlgpv@4U_qC|931-bH24 zPm{k~i2a0z+hrf#$7uUfzb{Ge{`7aXXLF?9yX%7b=5?PwJ9$u@EeSQ}^Uq7$#M9>c zw4>54jiw{IPCB~YGC%kZ>kB8=nv0z~^`-r9s?O#r{o|H3s;j<Gcb9opPj#lg?3u29 zDvsAz=4Y7RoDBJSMx5zOh?Bk><IGk2+po3`nO}N)<kz+q)Bk0QTsz*a1{%Ax*?me~ z-+EFHF0N5Ghf3vEpT`Whd*qL=5{<7ZQT!i77^#hrfSLgl`0{{^SRJl{N=E76IoH%( zPxR}Mly6nY8wolz>SGmp;5`X5Z>v#V+jV%yK@)MJLPs9kW=8MdCQ)_e=I$-!GN!7) z+*4K{V;82IXivI~dpy?MJ0(_PCe2XeM-EGD;CK~#BSzoXeM?Pfy{Yg2{E~`0bWz9e zJ+3BJj+O^D?NyWVugl~WpP2{K&dEc$yUoMVhb7^WO(wzDs;7i4F<FmRs_d+}D*Lyi zk~4Ls$vIaio`~7T^T`Iu?N2pNy}MJ-Y@4O>Hdg6bM^e=6qC!1q#~3v?BU3+JF{tKE zh}YiyHsu`-&;@Bts^ChTEQtHgEcilXq3?)U)X*b^owcUuwHA4%dA%uKx=$9@7nx`C zPU@1HD)rpd2EC-TP%Vwvte53vs%8Dlb!kGpDm|U6%R&NF*?}azqW7|TVTVUvJmWVj zD--3V#%{B!AVSpEQ)YGAfUH^dzF8aHD&@0lOu4ULSEe_p%FZ)-UCd^+uKAFz8d|Q_ z*KgMw+H=*$>I(fzQ<ADKp08i6jZ}dF_G?6tgSU773jUkV@BGv5)8K#j-hcYv{Qi%= z7V-O!9SjKa`#;<xBi;LO_Zr2kjJ^H&%sj6=nPs1O_L*;=IbO-O@3{c?3f_NT-#hk) z-TH=Z|1103{r=(+zCFkqkwqe_M3#xH6IrO!t`u1+vQ}iV$ZC=0I_-Lq1tTj)mW-?! zSv0b0WZB5Nk%c2GN0yGP9a%iGdSv-dyMCkqPFn$_1V{~#A|O>j%7D}XDFjjpq!dUk zkYXU!KvxdlP!G-)grg!zNjPeP6a}dYQWm5xPFonHGEQ3>j@lr_L8^n42dNKIAdU(l zCE}<NQY4NlA!XvI6H+LSN+G4<sFl+e3#k@TE~H*a!H|j}CF7_WQZ$aLA!XyJ8&Wuq z$~kT6klG=|L#pSr<>RQI(-x4Uf=*jPjv69GM5>6C5ve0mNTiZTDUn(t#YC#<wB<zV z>9hsqsHoGHl%u9dQ8}uLl$E2dNMSiDi<B0rEmB;hx=4AE`Z{fakqSF)iIEyRZIO{G zBW32OGg4@dN+YG_s5Me-j%p+2=BT&R796R#)0P~mxziRMsX9`2r0z)Jk;)^bM`|yw hy#I^bhuOad)9qh{q`b$HrzNK(<)x-3rzWR_{RxkBPOAU_ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Marengo b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Marengo new file mode 100644 index 0000000000000000000000000000000000000000..a23d7b7596fc3019a2b6847a7593007f985e7e13 GIT binary patch literal 1731 zcmdVaZD`F=0LStFwI^G~2u-aidm%M@z$8sgw$^ZGxR_y%$7?nm%nUR41sh5zZIWgp zk7=Gm5)w)hi{xRBr_mT^=EWrU?|hH&!V7unfBXMG|8Cpbi{JP73X0|ra{jm?%s1Rz zesl947ch^y1Brn>7rw~)s;+^(yB^D18-sy+37_S?S&{llQ*Ges;cfCo<%qz`oT2hn zVc)>(0kN_<JKcHHV~Bp+Ki+x!skd%1f34+ihkn=m!g+t<k#4<y&H1qXqW;*R0>N`} z+CAthr#7fP)z4L@l}d-mpa>P;WLTaO;Tg|lM9NhWv89iUj5#SIkL}P=x=Lgwm&vTm zQ6j7Tkj@@BQ)M@9);ZB5RnDmrJ-RhPjoH0Pj(w0JycNYV_iR5gZn{_I)r5-i>B(}! zie@n}uDkS2x+#2L+vVi6BVzLFZaP2yy2`)Srl$rEse=8DdRo(FHGTaVJ>yb|Dx6!Z zXV&E^;XN&9RTqod18d|QzgNtSE|>GNlf}H&0_pGHUHEUO%cAghQFJU`E^NLpife}J zk~?i`QCXZ`+|a0&ObTkb^^97Yru4EUwQ6~<tGaaDDpeYMCd-D^h_a?DvOKn2lwaB> zS9KJK)pZ->nuqD4qS7zdo{JZig*kHFPDga|m`_Nkh1Z{-u<&0W&#$-N-~H>G>o!C? zj_aO3?g@3>qxDW_``*PqV|Y3}UH8UiPwZH)&l#0z?uq7}V(vVjlV#qs%y(QK`vl+L zC%C};u$^GXkKZ!?f$J8A1nZGWBC|xMiOdt3C^A#4nJO|@WU|O?k?A7yMJ9~Q7@0CM zXRDbsGHa`uHZpHy;>gUAsUveoJ9%E1J(rn2Fn=TgBm*P`BnKo3Bnzua1Ifc`5<xOS zQbBUDnq-h{tR@{KAFD|S$p}da$q7jc$qGpe$qPvg$qY#i$<1n#L$b4)^pO0lCP5@a zBt;}gBuOMoBuykwBvB+&Bvm9=Bv~X|t4SBh*J=_*GPatOk(`mFk*txlk-U+_k<5|Q zk=&8wk?gG|eI$RYIRcPlz-o>H<TyZ%1msvijt1m-K#mCHm~iaD@PD2YQRetaGsj1w TcW~06q?AN&YHCtyQcBc!Z&+XJ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Petersburg b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Petersburg new file mode 100644 index 0000000000000000000000000000000000000000..f16cb30406e3b28e82ab528d0781280ab5f13a59 GIT binary patch literal 1913 zcmdtiU1-&H9LMqBaZhgU_z)Gl7`w=e5$@3%L@YAe7^ggL=JBCr^S9DYYqNc=ZOt<M zUz95$qz9%%%!wAFnD*dPgqVp55tXB`0{auWhBfz_vszl`-}n6sqUfTF?)o48=k?#& z4(#Ih{-m1Q9++$XapmbJ++1CH^IRP0+rVpO!M1mPk-?4AgY7SWB_BSM2~N+xB&S>R z-M8sm!gJRb$#6rh`R=M4-AHMP8M%0^JFnk9|LLUre)t>n!y8|^(UT|4h3DUOWBbCu zZF^tFHy;l!Zg^EDI(G(_R`kh_jT?iXZf%pFD<2FlPffealWB8hB;kHNlQ6%Xn&UEu z=a^_$+?lO$6YCpuQyv>LS!)U<yLDQS({w>{=Zyw=Ri8-SbEQH4%r_-}?+#PoUXp3y zi$UQR`(6B4cQF0P3$CuRN9yYqO8vwk(@?(JH4JTXiTG_{;;jyM$LQ?v&R3q5`Jd0T z$#w0r;NW$(ad}dj`m^oA+Df^rd)O{2o+YWpr)=uVge<AqZI_&z;g*%0440i4cXwxY zh0QyM+&yPEh0C8h;O;%z5w@({;#LeS2yOChY3*yb_m%g{{pqA#8Sj-<4V8A)XtShC zXW8^ewbGV5VcYhW$V0=Q+4laMUB}1c;p(1Z_wc@<uyb+7$>sy$nwro((zzvk^qS+Y zt8s1EmHAqFZtAx^XWx_FqF&p3bcd{+Y_^XNJS|V0skQ6Yr{&4RC3byFLN;tQc1lcN zS=k;Q|N6@L+fRS~CP$xga{t3Cie4`_CW?;ik7Y;Eo<TF!KNtG<nY^2;qv(TUv7-6O zl)1f5??rkq)w?NW>h-x^KjVt|cS!yF8Sd6U_GftVk8kOJ5JfFnnL*@CBj*}9+sOGw z&Ny<;y?WM>^NyT(<lG}?A36VCZ2;-Ot1TcsAWa}$AZ;LhAdMiMAgv(1Ak84%AnhRi zAPsr7BcvsyC!{I#UGYF$T-p~GjUk<J(Hhbl(j3wq(w<lQLmKpIhg`IX^oTTxbcwWy z^oca;)lRu+)vLWC&3d(4q+O(6q+z6Eq-CUMq-msUq-~^cq;aoyj<oL8-jU|L+C9=f z(m%2R$POS|fb0RX3CJ!W+kor?vJuEmAY0+py+Ag@tGj`0hgbIl*$`w$kS#&>1lbg1 zSCDN%_66A(WM`1A@#@|no8#5pLAJ-M`-5x{vO~xgA$x>u60%FkHW|M^|ATQV(9NpR W%_>XYP%*cnsw`PuT~S?8Rq#9Z#I;iZ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Tell_City b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Tell_City new file mode 100644 index 0000000000000000000000000000000000000000..0250bf90f8fc8716a77163e7c1b92663fc6d96f7 GIT binary patch literal 1735 zcmdtiOGs2v7{Ku}X%8hMq{4;hqJ>10X)jtR1Sz65K5|l1v!|Jrc`Ow*v$F_o*CI%Z zm<-H_=tW@G!opsQ2uUNNAVy6K%{-}rLCo#^4+T++Xxn$W_jkRFyYGLz`K5bSh(9L8 zeBow_&CPqHG7n$7Tb;lARkk+HQy0#@kxx$qRPW*`*}Ee|_f<`b7wbl~-&dgeOTBWS zAyEycu95Fb68%F<BIIyNf_NXkN`Hun5+5cO>5<t&j69##qr(H@<IUH4?D0eK>0ZAa zzwTGM!lyr<>{b)UF3QRBHZ`@aNq)&_RDsS&?ObyFqB)>lP5u6)<<&AdDN80#-WDm# z%5+NK8Ic;6p;K=i5ou%bdc(P9wee-T^wig?^o}^0QCOfdPlwB_ghaKeYD#8DMl0{; zm(u&qrE*qx%ABDnkr(ws=RFc)OQ1vNw+)Dbw=KHx_(QR^t4<f~ZWr5pg?hWETkUA7 zl{=TUsa+)na(7sxDo#n1#bbL_NldgXd6uS1XS-zSRkzwVJSg{{P8A1w99>quR2;nC zugi17ggp5`SF9cvhsrPN!wa8_%8Vvm85mR5YujY?+a6UD(I{)WZmQbpy|T`CN*#HX zChHrj)X`42Y$(cA$6Dse87^}L1zULi{hB$;T)%((*S{U-+GQae=gtjRu;W~56?5$S zT>G5QQ!H?tC-+?uzy4;U$1Bz+nLFFux#rIFie&SiY`!rr`~3Oh{re*BF~7BoIQ`>4 z9WzBifmUQOk>x}d6j@SaQITb}nuSG{7Fk?md65N1mKa%NWSNnLMwS{`Y-G8uX2Fpq zx0*#qmK|AmWa*K`N0uK60Eqwz0f_+#0*L}`7`zY%V*&vpK|(=dv6^5QqOqEA7~-*- zfEXe|LPBCff<mG~!b0Lg0z)E0LPKJ+n&6P=tR_5$_^c*Ch6s@m8Dd0&WQY<8lOawd zP$W_$R3ug;SR`7j2^WdiY63<gwwjQUn313vqDI1Ih#Lu<A#x;ihS-td8KSqE@R9hf z<^Vtr0joI(ki!5u5RgLwIT(<`0XZO$LqgbN;eR+KLd{{a#vCec&&t>pv2kuse0*$t HY+UGfYubM# literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Vevay b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Vevay new file mode 100644 index 0000000000000000000000000000000000000000..e934de61adb342d238789c5dab9155408f8cd778 GIT binary patch literal 1423 zcmdVZOGs2v0Eh8AI`)vPgaQ|Xi(ItP@ex9VfuSOn<8w5dOpm1vrsb(@8p~`JmF1@0 z7!hU^B}F8ZVJ)O0Du@V_A_(jh<W4wZ3nQA_`A*9gZQ67%=l(9k49w#D2Sc6v3!Fc$ z<>nI(SFJg``(oxXax*)2_3?K(dTw>>+U1Y(_4!0>I_sO9Zd<9p3~Y@hCVkqy<tk@5 zp*@42Rnl>#lVw79#ZQ@1qeN+LM3$A7h_b~oU7pjb%4ddjMcOu1alcz{pU+Y|u3V6n z?~6sCze`p<+#sr(1F|OW6}3gVvhLWNsQ0DIV8bgB{IMt-3vY|YuWNKu<_p#Id_nI@ zjH%F#8NGXUNHw3C(0iu3RZIJ@ZXKyoBJe=A4R(pWIdQo!91!hkQQ1+RD>~*wGQ2Te zgx?g&&Si_D^KPa*F!xS$#kc70w+rfE#HSD4n^8Rt2`w*9sKbRyAL$uZN7p~oz14lH zH}OeEHpfL|_Nk0!L`8J!y6jsDi4!Ad<;f34qJJPPPfccuftCt+`jR7(JmyaJCZ#O< zN4M*a7dwvYJ{tFUUH8tYv%<cwvd^_VoprAJ@`)#-G7xl1O3kS^XQw$eL8r{TmzmGF zJoX8}zeiDLzHFmd`u!{O4_vn;IWdYvf`o#^f&_y^gM?!>@gM=AjffXQ;xaJ-K_O8g zVIgrLfgzD0p;=9ANN`pY9TJ|^#D@fkM2Lil#E1lmM2Uon#EAroM2duJHL)VWT1~V_ zxK<M{5-<`m5;77q5;PJu5;hVy5;zh$5;_t)61>$!kA!bE@goDUnh_vFK*oR!0vQD| z3}hV0K#-9jLqW!Z4903kgAB)N#)AyVYDR<%2^kYIC}dQ~u#j;f19Se1g{7L|6`J8? T2R8W&{CU}d{Ct1DKQHwc0I<R? literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Vincennes b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Vincennes new file mode 100644 index 0000000000000000000000000000000000000000..adbdbeee629ea60b60c32d0742a9a69ab5990eba GIT binary patch literal 1703 zcmdVaOKeP00Eh8ATF-a~>7wa^g-H5PR76HXv}uY_kMXLigSL}u>G7J>tL_5D#%?Mi zqoN)m4dQK4j|dVG(IP^^smBnMGlrxhnd^MVj)la|o!tAonY2w8|L+7#sw&4ge_TDy zH{4tm=H@-!HIL}gO!wfG@3LuUfA`S-XY%f@q}w*)t881=TeojWI1eX(&?>sv?WhXM zm-Um}S4HFH>(C_iW>lJt75bgGgU9K2BQu<LUxw-T=C8hg(4{}bUOFGoJk_1IZaSZ? zcF51Cl&jZ7b$ss)_sg~;GEv*?eqGuizZLIvlb6!9cg#~xV^aGXI#hm6SQg~ZmIaB6 zPGQz6UD&=y7xkN>iq6*PX`Lg~^aH!)jK`BjU`w@}d11IHE)B?8kyJ6;pCjjNh>5xB zgJf{t9TEJIkn{6Si1}{@=!F@#)WVx_y(oEHl{B~O#n1Ps((UK<lGYkkR^F(WMrSG! zI474iREy<V5xF815as>qWJO_)sOT(_p^<|`=$>Cz^+<@SmJGQv_DEDmChD5|akVO( zu2-LGSGDt!TJAls*5oO@wzg5N8+t>pFW#uuCtt|$gop?~zb@<2>O@`ZVY#uZL~M%g zkei?Q#g_Vz+<GZP)Rz^>ZTlS2&1XI-sTN*;|8(zRKITg+@Bi}gyq37*c;4mHzEsaU z+2r)H?|toaAWvtA=iR>MOPdi0I#ct_J=fd|%sngU6qxq{^BtGZJ|Xz`;Z>R+w!`cC z^;_mY@Vv5=WD_z(WQ@olkx?SUM8;_~14TxP3>6tGGFW7^$Z(PIA_GQ7j0_nWv(*e5 z8MW068yPn;aAf4j(2=ntgGWY>3?CUk5&#kb+7Ngl1}+l>5Cswj5{K0U!bK!j6ABWG z)dYh?gM@>`g9L;`goK2|gam~|g@lE~Wi^2zky%Y>NNiRU91<N89uglCAQB-GA`&AK zBoZYOCK4wSC=#jFgo?y!HNhg$T1~h}yhy-E#7M|U%t+8k)JWJ!+(_U^<W>_p61&v| zk3?@Z;Un=Q2LN&iAO`_*7$64%aws_V82BGgg+Au6$TNpUW?*dgnC#J+f!y5e-0abP Feggguh2;PM literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Winamac b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indiana/Winamac new file mode 100644 index 0000000000000000000000000000000000000000..b34f7b27eee88867fd131f0e9b87b4c3b4c071c4 GIT binary patch literal 1787 zcmdVaNo-9~7{KxKw8j_{#G-kGg-x|eipY}?ZJOd4UkxpFj7_yTrlvL2U6?o6w1`xN z7()p)MH)nqh=^9HsZI?gr5tKek-Y2tk0pu3&bhhwck}XI@)qC!i)7|59wL4_?d>Pr zoIHE;9_QId*^zkfz?JW+d`nmF;J#<-?zX5`J>Z+Fp4ZXTtZ5MsM}IK28MWe3zkB9! z@<mZ6D$J8!d&LvI+dPd{de5${GSBNvy%)!KsrsrS@8$NDN|$AM4Y?8Zs(6(5I(?XW zlik;QJ0oNo(?a6i@Su4=Feu)C?QNRuzi4{UYCbf+5+Bb#HO;qfich<*n$H#5i(ZN| z{!w3x(x?e+ZO|!+1u8XVib`#{DAESa)oC?5OnR3wI{jR}8P_~OkKeytO?Wg~hS%q* zi5L3INwdRha!IV55=vB4*EGs$aXnNd<Bp8{Xi?LXPsr(SyP286TYBcrFJ@Nsn9e+0 zW3uXZ=-Hbp&77)yoxQNs%q^R!W%#_Bw>3}BA5@|i<b>tIE`@4QTB2OkoT+jK_K-RE zLMpdii_AS0R7)CbWnRfhlYjq<URn@mmQ~c~<rz_<c2?>Y$=a-3UaD91x?xsND$=W? zFIB;a5?N4xT^05zl!aA?R8ecDTwAtTt*Z;k^~E`A!=<1s&Q4bw_Xyc0V6T{12d{s= z+P1S-pbgJ|zkL7neIb1R@|i%a@1HCe9o&0o_w3G7^z{AP*8+Vegd<{XioK`VdxpIy zM?|W9Pqm+M0`3WszmH|U{jwX&)?dG}|G@XNW1{8Am?EQ!j4Lv-$k-yI>$KyGj4(3B z$S5P@jEpog*2ri(?RX<2j*K}n>d3ewBae(dGWy8)BMBfGASoa@AW0xuAZa*l9!Mfi zn+cK%k_(ayk`0m$x_o#cA&$)mND0XaNy=%n;*gfp=7l8Yw3#8PA-N&RA=x46A^9N* zA{innA~_;SI&GFnnogT1lBm;WilmC<iX@9<i=>O>izJL>jHHa@j3kX@jil|gc_WEC zZRSYoPMbTDJd!<<K9WDO0+2O;tO8^mAS(e`3&?74+Vy~}2&Y{W$f|JKb%Cr5WNjd; s16d!)3PIKgvPy(o9{=wy>10<-vRyIp;h_mb5(dYIladmW5(anr2@}h^zW@LL literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indianapolis b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Indianapolis new file mode 100644 index 0000000000000000000000000000000000000000..4a92c06593d33d3969756f482e5d3d4b773984ef GIT binary patch literal 1675 zcmdVaT}+K}0LSs?kk@(LFc&RW7jAmD%q$zL)}m8hP9)@yXes=a&Q2uH1yVP-Da?{F zud{4KTr|uuW;Thu)jw}D*7heCne9CO-+%60xN+k-d!Em8&Q9xG{Ju}1pk!mR^T#p5 ze8S1G-kjV=y5`b!I@UdY<A-eCH_knA^p$+DJlcKGl_?+Y54%rdzRM?TN9m5re&^}D zFS;|Y(|I=ik$#?X&FOMl^oz+yoEPelei?3cU)`+Gue%%FH|Gw@?)ECTr><P8rj>4Q zNkG1>nd`pGnkC;CPIEtfo1#CP{~m6O)ZQ6SIgMfMtL;_k2|<~Wo+dK-&+5#$H7c{C zUT2M+ud*(e=>>f;YT>aunf+{@@K=}0oU73yca>i*YKRbvQxoKp%8z1c<U|?ByDtJi z`{lBf^J3YD@j5^1uFAjjRj&x2RRyOy^vdpfwQ5hRUfo`%3X2-`nx-5j{8!}K+ETGD zu0gIZ_KTviA-N$lL2T$Nki|15isDD9vSfI_D7hFVH+}3Br44g*+2gNjb1+hGY3Wc~ z^TJvlXjR)%lrG=es46Dk*4uNd)b?<X49;#4!R}i!G&Lkb?I&f`K!MoNv{&xzN)^>L z#d23$l&C4plDiK(Vu;VahDQ9p8GJi<9X4dx@PF{~yp}nR<9XLF`64{;LbEf{-jA`@ z30$2?o_Fu2Z)&zb;H0ISbE!F(n{!dX$uRdB<}(hTy+Yvcb1O1mwsRX8{44VdJg;zQ zxEYxrGC^d9$P|$|TFoSpSt8T4nt37<MP`aj6`3nCS!A}zbggE-$b^v@BU47^j7%Dt zH8O2v-pIs}nIlt2=8jAr?d-W>`W$BdKmtexND5Yy1CoT*WPzk%HF+S3AekVkAh{sP zAlV@4Ao(B(AsHblSxrtzQdW}{l9tuvg(QY#hNOn%h9rk%hopz(ha`w(h@^<*h$Lw> zSt4m#O`b@iR+A}`Dv~RbERrpfE|M>jFp@EnGLkcrwAEydq-`~MBZ*s0=1A&D?nv@T o_DK3j{>UT1`A?q#qs^ls#XK5f{WIf};}c{3NlEcZ@rk2<0ei4<R{#J2 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Inuvik b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Inuvik new file mode 100644 index 0000000000000000000000000000000000000000..1388e8a4d93dc8e04680aa2aa9e33f32495e26fd GIT binary patch literal 1928 zcmdVaT}%~Z9LMoT(u}jv#}#IRN>m;p&f$QPfUhNVLOMctBs9X8z|+wR-*)B;;@?GM zT=AbV)}}IDH7{Ce>ROu{%{9Er=5TAhEbP6_)M;JFW^sDH&)T}^$}8ue9X@Al+&$l4 zd`)s=srlbcaDU;$?Q$Q!XN&vw{k<#XX7MXBI;YP}>CwCglCAc^)lrjO^NgKZQD7b_ zm}95qSIfhh0Xsb-DAWI#tvNqtO78G|I^*o`GV`O$I_v0p$xF?!kL*7#`K!|Q(M_M5 z$I5=Nk1zkw%+CJI1`E{$$1Z6>V4EozIje=2<K~IMceLo@76}c!so_%%Qrwx)$XoNI zB++F{w@;P1(T#Rq!>E)6pS4exek1cI7TWTho2LA?`)$RouTAB-Yj(l6XUxJA0bO+F zuvy%DMVB1hXO^~qp-*)kl&9-X=(5(mQdN9FtE<*Z^`ApF8tO37i^ptDR*R`Q)nk`m zU219%y{FH8*(|Y+Ze8*5VySCRYJGp6JioA38&VmvGB>30RX<DougTh2c1{|9DY8v7 ze=<!&lkDoTA+zS)>-NQwW2U)l*uFH_V_I5I+qDCm&AQkHUEkd;8$u`b<wR0iv--3x zS}Sc=Q<?~dByqk;lM^RP@`Fm<^y>}T-2Z}o^};0cTB^dne*C&=Z_Ke<dWX%{vcK)N z_S0s2_9eTcuHWn!yQQh}lad<wPIpf0lbwT~>aNk0>>fC(d%kLtj?Oo9?<bYg+0v%_ z-p&=1F)rht@ps_Iy)Ad?^!rD~jZde0#bl<_ht*{Hzf1{5BBs8oDsJjm#!ZZm`~Kg> zZ{rn@{`+rfcf4lgMI*18<7FeS8+qXzuN-;l9IqXD@yM%3UcT48exv}T0;B|_2BZk2 z3Zx9A4x|u{N{~`GYC($ex@wSeysjRkAdZTVl8~B^qL8YPvXHuv!jQ_4(vaGa;*jc) z^1QA-q(F`ekrFv-M2h675-F3TPNYzzQlwO*R-{<3s}?EO>*_@c_PUCZk~wNdibkqN z%0}u&3P&nON=Ir(iubzek@CH+eq;f>ZUvAf;8+7>5ja)>Sq6@EKo){yC6J{+)&f}! zWHpfG@VfOt7R2jT1X&WVTN7kakX1pJg=1Zih2dBkWNA3o23Z`A)j^iW>(&QZAg@~? wWQn|PjgUn`RtZ@qWSx+OLRJdz-lYn-@tW_(t0*=%TpErP#Y#%TCE-ZmA5g@<L;wH) literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Iqaluit b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Iqaluit new file mode 100644 index 0000000000000000000000000000000000000000..0785ac576173f7b7a5e7c2740df5f1f3492a4950 GIT binary patch literal 2046 zcmd_qe@xVM9LMqR5ve<6jTME8m{8Fn+#T2{8kq*{cG?N)VM?ZN4ZEFCd{VpNkB~N- z%YU4+VOx#m`Y~5)WM-?iu`Fk;Riez+s?n#m_@hPpC~mRX?%U`2>Bm3)Lx1<ZeZP<I zzI?gA?)gf!Wge~d{&fWH6Hbo3_T+v+uf2TUd7}4H`89P(CiQQT<t-C;W`=Ie#LZ0` zD|JyKWNuzMOWzWjX^P82GJD3DnUgnD=3EV`xj&3a$;DsPyt5ai^wgJX{)tm^Yt}Oh zo<1gJtv{Ob9RvEd*g13is-yajqSs7C<t|;3`&fm7tvWRFs;c~XiC#E(NL3A|q`E(& z!lxHYq-V8?9xIlbblTMJ{9P8s6Xve9-$|@up;=rzC`+ann7X;&=(^FIw{-F|v+Vpu zbN9FXx_;nSb<YRAdijwdb?*z?^op*x)yjhh<i6(TR73k#X^do5Q)7cPjh{5}>Mc4x zeApxkR_es*PP1yPT(5rdS#|$cD<rwUL#=tYOqw?(RZCyKJWwA|Yo8pIbtQ!=)%vNV zCdbwK*z2<XryI<M((`)5xhrO4?xb#eW!OA4a#(LVc*Z<D*r_+SA2(b2*Jw$;qqZLE zkZsj{>XCF(+6%hX_IO0LkF}|EMWLj>s8^Y3<C1x^RP7kOEFFEzOy`9w`mt<@d3<14 zceUn>I&wxo5z}U8*Kxh8=p(bcd5_+m`(9=1`XoDYR&^J5OZVW*YR^QQ?CpO>?fbf3 z_V=XKQ}2~Z&*p?W@VqD9^?|(W^75zlnKpfD?@j3~`vbFw?|<@PAm8`jf6JTU`<I6T zv)q$HcP;Y$H(v5)^Vh(<zn}AFbu{WVwWPeJb@ps*<YLbUQ(l98&|vSqz@J}lum9Yo z{=?6I;;(=A?Lh`e3Qn5?k_3_kuagFn2a*Vq36ctu3z7_yjnk%s<bx!HWQ3%I<b))J zWQC-K<b@=LWQL@M<mR-=A=x=?dPsgqf=Gr)ib#$~l1P?Fnn<2VqDZDlsz|O#vPia0 zn=S|WA_*fIBPk;}BS|A!BWWXfBZ(uKBdH^~J8kkv_D-8VlE2eV05Su}6d-eeOad|s z$TT4HfJ_836UbB`bK$g;fy{=}P6sj{PCFsUj385j%n33n$gCjKg3JptG04myQ-jP6 zGC9cXIPLTx^W(G=gv^lBP7yLk$Rr`NgiI4MPsl_eGlfhQGFQlCA+zPQ(}m2J(@q#N vV@^9|$ebaQhRhl=ZOFVK6Nmq^nG4!cjM+V`N-hf5hND%<nwoG;I2!x|P;*0t literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Jamaica b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Jamaica new file mode 100644 index 0000000000000000000000000000000000000000..7aedd262a57f096cda47a8542b3b66633561641d GIT binary patch literal 507 zcmb`DJx{_=6hI$^L};s25fDe6%4p)?u2F(abkV7ckZxUFjEP@yVA92vurQD~ILOB4 zZ*Y5mz~qfgJnyM)nz(qI`%creNzaK+lirT{F$H<TGBsK5gG)Ic$}fwhK_e{%qqsDA zG_JXgJ$;k<??b&j>!&NzYh4~+nc%6XSMSfv+WC>LB!_1Gq^YZ|(A4&PT@O98u@%OR z;)mIEyg29Nf4QuEb(G4^g0<6URkZePmh+E0k!s5xbY=G=`KPam{x+%0o0-Y{`x`5h z+-+Y#%y^6)#4w{+LQEmH5Mzin#2jJ|DFCSeDFLYgDFUeiDFdkkDFmqmDW(1=l`HLb F-A~yjc0>RG literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Jujuy b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Jujuy new file mode 100644 index 0000000000000000000000000000000000000000..a74ba046226060a01c6255b6714a40211f739a24 GIT binary patch literal 1081 zcmc)IPe{{Y9LMqBa@HCi1_eb#2r1?wcu?1(G@jT&h<34o;Hg6dK~QwaAj-OQh+q&S z2ozUdqn-}dP|GKm%{<6pMnoM(MYkkkjrGr`@B4uc(V=eremr~q7(4vf=bafkeyl6_ z<60}<aJYKq@VxP&eB8R0Rrk*g=m%S#nZmZLE-ar?Q=87)smTd5-MFV`uD?(ZU%s=C z&PL7b^k!{`vgYyFn4a5LRK*Kr`($g~%pVEs`PH~89Z1?z^{!gDl+%mpn`Uw4vRz6$ zwPZfnN@v0>r;gfMUqsc$>-KZ=303ck>H7O|)7X%*jZ#H@sSWF|mEGoBF=4;Y99BPW z59rm=gmPDV^ty?ICen5?zdbi;+G4qUd-9WM|9B?f5g)W2Z(f-lD_3-<*_)47D{AMB z!PaE`so66dH~quwP5<kW))D7iXid{U-pyfd`7t)hX?AXIAqbo+mV>Z!B^yMXdtMH; zIQO<1it!w@97<<`l$>3vM^3k#-<QbzeVe{t+ce~XOJ3WZ9=%gQwnFwoHbZvv%67<p zUfB@Y5!n*i6WJ8m71<Wq7uguu8QB`y+bf$RyL)AOWPhXquXKR4;FTVbCXg<WHoVdY z(g@OtS6V@O@k%pDH%L23KS)DJN9bGPL3-l9r75H<q%EW`q%p5_hP39D-jL>y?vVD7 c{*VTd4v`kU(j$(@s|5c$%4n~oy1zU66E?ExkN^Mx literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Juneau b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Juneau new file mode 100644 index 0000000000000000000000000000000000000000..d00668ad189cc7627b9301e08c00d84f1688ab92 GIT binary patch literal 2362 zcmciCZA{fw0LSrr5xCqd)GZ+qiH=Voj~=e^RFt6@Arct3n-Ezfc88UqFovR}rX#nC zi){{P(Q%Hgw$wA-O4q{DxwcG3u=Ow(rCYO{y4KTdB)9WDwDlt1=Kt>eKZpN#`}_Lq znx6AH{`0ua7Y<L4IowmXo7cHLNor)?Z!IGYsosebWg`4ehS$-TE}XB#dLuffi)kAd zh9Vo%RMaX@D7wV0T=Htj_1ZG!_6~*IhffL5&pI^y`=cuMoAchdk9*Z^Cy#n>fA@&+ z?(g;9u_q+%+}!G&(X>g#*RB-_<$iHjQGs`6dcH`UTN9e)P83NId7-4tcyH1#OZDta z3smyRe0}$6mr6Mnt?xPblM?Mka?Xx(YHnb*oVWf*F+cC7T(IIZk(%(GOv`vrq)m+L zbkFM|ee7GEaiv9M4xZ9kqpzy${)0N__y)D8>t!w9s8+c_C4H?vwYY4jytgJ^<)t;s zeBU3c!1b^!Or9YMFE5lu<G+jhKK01^FMK76b*g^g!;|8{P>f!3;DC6j`HC*t^?_Pi z^|@ZwxL=hnI--}CcB<vSUX*3oeWGmij4Y4aBFc{s$Q9S>Ma7$+=#^h~sLHK}^rP?B zsjB+jx_V!+T2&m>kF}?$n&fir4>*+nuS^}tyQBg?JR(=8gvIK!8M1cbqNp2)l~0bH z5%s&S%clkh#G1xo`E-A`c&75IZs<Owp3NT8YlFL0W1QCO%7SX$wY@r+R<45Q+jNsF zQ#Bn9==GQ5)bsmV<qN|Z;>GsI<pv!qnga#0rRTcXm_JKyY91D?3FES@>X>K?&(!UO zL#lo3Pu-EIRmb23z4^vowWa?f-T8H!+S>KD-gY9Oy4LK_FZC5FXT&sT<dnat$*<8d zu3PvEhj)xS9N}=!c1L75{8q>rW8bm%?d7{O-uzFCBhlWI?S0P5O24DJ#+)b2sWzw7 z@2KUNZzUDxQc_`V=5g}knY{R?9+}7d1Ba*A9PX(%_{%(D^DH@i^9$V8?K_K%4;i4< zj1cV*xiCf^Ge}^R$S{#{A_GN6iVPJQtJMq^8Lia}7a1=yU}VI|kdZMXgGNS;3>z6Y zGH_(%$k36oBZEgqj|?9fztsf5Y9fGy0Eqz-1SASb7?3z1fj}aGgaU~L5)337NI0w} z9!Nl}CL%~kkeDDrL85|$1&Iq17$h=CXpq<-!9k*fga?Ta5+JLI5E3FJMo5s5C?R1& z;)DbWi4+nlBvweUkZ2*{vYL1y0kfKjAtAGxm?1$!qK1SGi5n6)ByvdTkk}!?L!yU- z&uZd_1kh?Ch=kB;Vu%D0i6Rn4B#uZRkw_w;L}G~q6Nx4gP9&a4K&>XCNJy<Frbtk& zCaOqSk+>p(MIwuY7KtqqTqL?kc#-%b0k)b5BO$h$7$ZTpnkXY-M&gVF8i_O#Y9!W3 zupOq@kr7c-{-P(py4*3h@aJ%hc9O;aHQfJ}`=6VKq!$tQtjfhXz8slVnVXxFn<G7c E1GEiWo&W#< literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Kentucky/Louisville b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Kentucky/Louisville new file mode 100644 index 0000000000000000000000000000000000000000..fdf2e88b48cecddf4eafa6d8a41ba7363e4874d6 GIT binary patch literal 2781 zcmd_re@s<n9LMp4A_$5~`2!WNr4>VwUxvTJ(v;m$ubN&=(eTmKJJj-<7(p4{=C5>X z+H_FXNJ~v?j?`>gjntSqTbVMQh2aAwm5DxPE6cpM=Y1;X)*t=XKb^aCUeC^8@XvjJ z{P~6RGM#^1(e@2D*B*OwP7K;d<@S`|tMC0Pn^$!RZrM~PTZ?{iwsnn{V{zky$If`= zljlOglYM`Zle1#X$+`+vyER5m<(+cs4i=fS^%cRdcCMHD>eAr3HH$@8P7gK|`lYdK zc<}4oLGn#O@8J351ZnbSJKqlWnD3H2&iCCMbHV=g3qLoSADSASi>GT$^N||o(uWOl zxk?9(XTAJ>W|+Bgw3iO;Pc-2-!rG|_nTXX5y491~w3d*HR5zr}M6IGmoRjGE!z%ju z?h?~!kHqZSXkyJO)lRRJ_FufF;|>O-!+{mL<Id%#(@XPo{F=GumeL3Gt%Y9Gd0Ic+ zB{$i09ok-ZOKxWp`XsCFZJSMxR_#=eA7iCwL$gXeaZ!35I<7psKa<{DcB|Wp9drA$ z8&sdXM$>nGmA)gZ#@spP1$|eS?Ix*TkxmMIB+0RPI=OzE^t&)v_pe?pDYbqzpt4X> z_YP8NWn(3MLxRc(1We}Q-_$^FuDN^CS(TO4-wevER)eGC%#fa6>LE>+&Cu%~=wU}r zn0x9fb#~Py8UB7y-@EQ}x$ouKdc>kVGIGT-b^nBoGHT8&H9D<Waz>9*IahZW?|_-w zTf5fe#*Ngudl#B9&3*OQSJue5(<78`=>i%5ZXY#ahEFDzN2&+2(`3>UO=@yty!i8u zD*yGXG9~LxHRXIqGu3lMPp!FPriFIs{H?WSdi`2GW5qu6aP>l6FsH)ItQ@bE?;V-7 zdV!ifpj;ja_|%-Z5}E5wQ*)d1C6E-a0-t6}VboPsxXUB+n@*_(<-^Rv<5%>f#fj#z zs#?7$FJxrhKK*!>Hj5Wk=%Ox%&5{YFdP(S<6b~s^#r21zB%wr=RKG5z*Yed<l`G}x zGudisSwNoI?@?t1xw7m<N41Lh>(e^2<@ooj7TZMK^oh9XeDnMN>KqPNZE~D&_`ur{ zk>T)Lo1M1qxxM@B#M9{<4u5<wB4M1*?~EF4@5%Ol$leqEPL4h2*mqnJ?i2j~{>|TO zf7t!zU;Fd7?0*mr7qkv-MoNIx04V}e1*8l}9gsq}+Dag$Kx*M?i{Wakfs_NO2T~BE zB1lP)njl3%s)CdSsS8pVq%ufpklG-{L8^n42dR&%Ef7*6q(n%KkRl;fLdt~H2`Lo1 zmEu6DxNNO}Vj<N+%7xSmDHu{Qq-3tPW=PRoZPk#nA$3Cvhg1$J9a1}_cu4h-@*(v@ z3W!t?DIrorq=-lrkutj4IwFNcDv6X5sU=cOq?$-Mk$NHpMJkGv6sajvR99P7q^z#C zu1H~#$|9vjYKs&XsV-7pq`pXjkqRRvMrw={8L2W-W>;Hhq|ivEky0bIMv9G88!0zZ zZ=~Qz#gURDHAjl>YO9Wv-PP6|DZHz#JW_h3_DJ!O>LcYx>W?e{vI58wAZvguf~#Ex zWEouTIv@+-YF7eT3S=#i#XwdASq@}9kOe_j1X&VfO^`)FRs~rWSGz9A!noR%L6*kV zt_`v{$m$@=gRBp-K*$OqON6WuvPj4(A<N`y*9loDSG!WkQn}i-LKX{IEo8Zn^+Fa5 mSuteEoR<CiUr$-AUAQc}a4EilshO$iDZY%1)Qr^h*gpU+Ex#TB literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Kentucky/Monticello b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Kentucky/Monticello new file mode 100644 index 0000000000000000000000000000000000000000..60991aa38f5ca016d9f87bd6cb5b436ec799a10c GIT binary patch literal 2361 zcmd_qUrd#C9LMn=f8?JaP*R9Q1tmlNV<Zw428>}IAw3X~;R9j}q4=42gfe{GEYq#I zDWd%u!fcIdPLgO^i?lU!{aGTb*(`dxh&4C#M|5Ff=jr$Ub=gH*>#pzFb6yWlHf}y| zZ++_%dE##us{e(D>(vkMiMIage<Q;_`u=Zn;DrVD;47Eq=P&y0iz&a#i;dyx#-$4J z$N6;SKb0=rp(rJeM2Ubs*Hqy4Ya(b%qzrDHZ_lf`Dnp8=?a-VHGW4ZXJ3Qv33_pHM zM5tal-#KVUemkt9&TO+6oIaqU-#j2<UhY(}dppHV-8Jgw)*7*}u0X|A6o^|^B&b`{ z5=4CRa=9q%hDZopBols#w-^67Clkkiw{JW9y-a%NJNx#*kK`S`XmRJRL77}VBT_bh zsP4+25O=RSs+Po!iqxgMRH}d6PK(&6(k4gjrB}*S`q&{mW1>lB_HVbdMoMLNPo14} zaEZ)q@rk^xVRBh{t+;3H6`7w}DDKVsLM{(Y6a|ZaRs~ajv10BsRd{Y(-FLa)DLQdg z-v5E^Jh1<QeDKvKr?}&!T)BIv^-yK6ENO1EO0#{kthB@`n;lZ+nH!w)iG8XfYNb;# z(ymrbr#KJ4wpXtHrr7fAXp?K+OSUT4dt_B#u=Pk$wp_by%37BgE4|g{E$`f{tjT}N zs`)8e)h3;DYCoA#b^alz{`Cp<*yKKE{q9lq_*lEs(0o{J=wIVlo_A&Ao;Ir~vrlep z@mS4KU2;=-wzX-xUbdvhS}k7|$<~lrtMz!2+&uNQ)z(+2+P|7{p7bTEr%p^b9o2p% z_m4VT@*VYb$6;q{+{dc3vfJtO{~&z@eU@+Xtn7;Kvbx5O$?og*)-(Oj%V#eYSvz`K z<a4Kzte%Dnx$}sy0t56XDA>U3zy5^G`_DeEJKQOR>z*DC2zK4K2Sk{8k2K#guKVek zfQ7t^_|+b-C@Iy?I{iGVpDM2?(?6H#pSb|@4c>p=*dhI&&Bm_(^;`NcaNUL={{XT% zWOvB+ko_SWM0RM@TSWGVY!b~bd10HJ-Y3UKk)3jE71=AYS!B1!c8z+!$cBx2#~fQm z_Ka*A*)_6lWZxVcM|RG!b!6`xn@4ufv3+F!91TD^z|jJu?g7#SqzgzJkUk)dKsv$E z3Zxet%|N=r(GH{^91St*jvy^D>YgA?LArvp1?daY7^E{uYmnY>GzaMpM|+U|a5M<% z5J!uQx<?#MLb`;s3F#BkD5O(JtB_tH%|g0`v<vAM(lDd$7}7GM?itcFqwX5gHl%Mz z<B-lFtwVZ;G!N+>(mterNCS<!gGdXFx`#*;jk=3S8<9RDjYK+$v=Zqh(oCeANIQ{! zA`L}4inP?Idx|vGsJn`^)u{W5G#2SB(psdqNOO_yBJD-`i!>PNFw$b9?lID2qwX@& zW~1&i(rBd9NUM=vBh5y-jkH^sTK^B?9ie-kue+Y%S(cTTm6PGg&CSZq%8B?BJN%qm literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Knox_IN b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Knox_IN new file mode 100644 index 0000000000000000000000000000000000000000..cc785da97de0a5614613f9ba6e502d7dc5f525b5 GIT binary patch literal 2437 zcmd_rUrg0y9LMo52q6eaCWd5SS}_9&Y$$E8wFP5`#6Nk!KM+Q0h-E%1kf_n))+8^Q zrA@&a>LQnGhSpSU3r(rDnlm&BQVB4LS;(VtK}zTJydN*KKlgpv@4U_qC|931-bH24 zPm{k~i2a0z+hrf#$7uUfzb{Ge{`7aXXLF?9yX%7b=5?PwJ9$u@EeSQ}^Uq7$#M9>c zw4>54jiw{IPCB~YGC%kZ>kB8=nv0z~^`-r9s?O#r{o|H3s;j<Gcb9opPj#lg?3u29 zDvsAz=4Y7RoDBJSMx5zOh?Bk><IGk2+po3`nO}N)<kz+q)Bk0QTsz*a1{%Ax*?me~ z-+EFHF0N5Ghf3vEpT`Whd*qL=5{<7ZQT!i77^#hrfSLgl`0{{^SRJl{N=E76IoH%( zPxR}Mly6nY8wolz>SGmp;5`X5Z>v#V+jV%yK@)MJLPs9kW=8MdCQ)_e=I$-!GN!7) z+*4K{V;82IXivI~dpy?MJ0(_PCe2XeM-EGD;CK~#BSzoXeM?Pfy{Yg2{E~`0bWz9e zJ+3BJj+O^D?NyWVugl~WpP2{K&dEc$yUoMVhb7^WO(wzDs;7i4F<FmRs_d+}D*Lyi zk~4Ls$vIaio`~7T^T`Iu?N2pNy}MJ-Y@4O>Hdg6bM^e=6qC!1q#~3v?BU3+JF{tKE zh}YiyHsu`-&;@Bts^ChTEQtHgEcilXq3?)U)X*b^owcUuwHA4%dA%uKx=$9@7nx`C zPU@1HD)rpd2EC-TP%Vwvte53vs%8Dlb!kGpDm|U6%R&NF*?}azqW7|TVTVUvJmWVj zD--3V#%{B!AVSpEQ)YGAfUH^dzF8aHD&@0lOu4ULSEe_p%FZ)-UCd^+uKAFz8d|Q_ z*KgMw+H=*$>I(fzQ<ADKp08i6jZ}dF_G?6tgSU773jUkV@BGv5)8K#j-hcYv{Qi%= z7V-O!9SjKa`#;<xBi;LO_Zr2kjJ^H&%sj6=nPs1O_L*;=IbO-O@3{c?3f_NT-#hk) z-TH=Z|1103{r=(+zCFkqkwqe_M3#xH6IrO!t`u1+vQ}iV$ZC=0I_-Lq1tTj)mW-?! zSv0b0WZB5Nk%c2GN0yGP9a%iGdSv-dyMCkqPFn$_1V{~#A|O>j%7D}XDFjjpq!dUk zkYXU!KvxdlP!G-)grg!zNjPeP6a}dYQWm5xPFonHGEQ3>j@lr_L8^n42dNKIAdU(l zCE}<NQY4NlA!XvI6H+LSN+G4<sFl+e3#k@TE~H*a!H|j}CF7_WQZ$aLA!XyJ8&Wuq z$~kT6klG=|L#pSr<>RQI(-x4Uf=*jPjv69GM5>6C5ve0mNTiZTDUn(t#YC#<wB<zV z>9hsqsHoGHl%u9dQ8}uLl$E2dNMSiDi<B0rEmB;hx=4AE`Z{fakqSF)iIEyRZIO{G zBW32OGg4@dN+YG_s5Me-j%p+2=BT&R796R#)0P~mxziRMsX9`2r0z)Jk;)^bM`|yw hy#I^bhuOad)9qh{q`b$HrzNK(<)x-3rzWR_{RxkBPOAU_ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Kralendijk b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Kralendijk new file mode 100644 index 0000000000000000000000000000000000000000..d308336bec9a539742ca3885c44a4d3c5a674463 GIT binary patch literal 212 zcmWHE%1kq2zyQoZ5fBCe7@MyF$eApsr~GZk8xICXrvLx<$1*Vd|9@Zy1Iz#a#}6=Y r`S^w~=o*+98!$Kqhk&#KK?n&J{D5cyn)&}f$RZF;l4V>#dri0im60!* literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/La_Paz b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/La_Paz new file mode 100644 index 0000000000000000000000000000000000000000..bc3df523516d19191aa9d9b88ecc97c3ea1acd4c GIT binary patch literal 257 zcmWHE%1kq2zyK^j5fBCe7+atL$k`z`?a7XJSFhCn|NozniJAHT{{u1%3}BMw|NnD3 zj2!>}A3wmr<Kr8`;0(l0!66K~1|}eLz#xRMeXKx*AUg$qK(qi&|NlR;T{;CsgX{;> WKsSJ->wz2wavZ{C19X<12^Rnvh($mE literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Lima b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Lima new file mode 100644 index 0000000000000000000000000000000000000000..44280a5c13866d06e34821f8d276178b14d97f21 GIT binary patch literal 431 zcmWHE%1kq2zyK^j5fBCeK_CXPc^ZI3Pl>~o!{@yMj<`N{II8b;;h4Pcf#du-7moku zKOiS@BtWh|)<OQ~_X`RI6Aq}(dwfAnwB~@WDszDD>^Tng|NsAIWM)Ev%>VyyKf=HO zB-en*;|Ca7{{P>(fq}!vH-tggzyyd*f!0Dm2w}$x0abz>`va;6Xy^aTcI7=F8svBo z4e|hp26+NRgFFJFL7oB8AP<3Qpr=5_fIJ4Kft~}=AP<6QkS9Sj$fF<{<XI36@-UbN WdKyI60}W%K$md)(z;LiL<pKbPQHQty literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Los_Angeles b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Los_Angeles new file mode 100644 index 0000000000000000000000000000000000000000..c0ce4402f60a135017d49933afdbee55916b9be3 GIT binary patch literal 2845 zcmd_rdra0<9LMnoMG+J|nJ5w3NlgR+0Y&jr=52tAs33_MUJ@9lp+cx-NQ*LS#+S@X zH#(ZNTp60PSrnlyS8J9*bKWAdRHotuE%DOxbl&IR{^;-4d3JuU-=4vr&-;@(yC5&Y z^N-WPe#7N_W-sqO%Iu@+y%h6yO^BH1qs-RVugiDq!c2X?YfJ0r4^T(E#dD69?Udu* zujE9XsFRKF%FjDKl*Zb%^2@rHrKzewel1ukr%ETwZ>cln_na6x9XV9aBu`RjyH8im z5z(r-b%;6_&{dtQ4^ijOTviu$Uso*$>(#}|W9m}%MY+7JMqQ~mEUjzy%hke<<=WhL z<d5`M<<BwC$@Re{(l&6Ly3uWx-0W1YZnizEZ#Czs+edTtU*DuC@4jU1dp}BfwnS>b z7rLl+D?97<%La=7{0P%wdXRJ+8)yRJE=a)o@g`7xFM&J1)IpbbN#~|&)8*J96<k+o zx_(-%y6yN#tJf;k9qUAQUsa^;ELg64WY1I~shPUxh~cVN<S-rDH$(0U4Ao)n5+$s4 zsJZ)8FA1;jX?pL!A$@kYnR~XL5M44z-}}rL(l;wu_gi#a^-sK{@0;|w8W8fMj)<yK z5#AOP8C0es8-FlS=N70zwYyDpL#f16Z7{KW=E>mFCrsRiaT1^ZicTo%FGG@x_0Wut zk{FSthb1)2@PH(p6y8B4oek3?Zk<&l58l?JPJF9I?+P{d*L<KJsJLmyys=(ASa{Nm zE#D#I(reB5g)hm3!NyFSutX+aZP3XvFRJ8*y*f2`u}a;uSx;)2swTg=$4oi0NYYks zG*dsBBI$Ehm}#40W%}rBGowUFM))X`IkQzVZ}&D?i9bo!>5+O?pDSwC*Wo(b+n{D| z_v(im_o_MNP5P1A%_?W%0X?^Bm715<Xy&ioC<|ih%%k}$WMS|&lbf6^xh>@;KVp>R zAIdWY0llSQXNp;L#wU+$%GHYxhpQze$@=kXuPV%n)JrRx)Usin_42|4swkvI7pHGm z#lApOl2j)pjpxnE&~37^_K;cCRxVFgRhy@d=E>^PO7rx-6e-OqGHbR(NjpFLY47jg z^}qA+`6>r^JU-uB;^*)4Rg`%;x%V#asivf5dZtaVH^bhC?46crf6*R~pL=BfdkJdn zAG#8>@gMg1?8)&Dt3%3w)B!03QVFCKNG+VU7)Ujcav=3U3W8JwDG5>&q$o&Lkg_0k zK?;LZ1}P0v8>Bc$b)2?5NPV2PKuCp<5+OB0iiA`NDHBpBq)<qukWwMFLW+e{3n>>; zFQi~jTQQ_$NX?L<p{p7%l#OHS1{4md98x-@c1ZD%>LKNG+WH{{blM6cB}8h76cMQ+ zQbweXNFk9*BBexXi4+s5CQ?qMo=8EFiXtU-+L|IoMXHLF6{#yySfsK@X_49@#YL)% zlozQlQeda8Fj8Wttuaz$q{>K{kvb!VMk<Yz8mTo>Y^2&qxsiG!1xG55l-y}+juaiK zI#PC|?nvR0$|I#mYL65jsXkJEr2fbPIPD4`OW?F?fGmR3t^%?Q$T}bkfvg0w6v$d2 zi-D{LvK+{IAPeHOD}pSE)2<1!C{DX7$g&{of-DTOGRV>(YlAEfvO37}AnSuH5VAtZ z5;^S}A&ca+tAs3*)2<V;P{>LlONFczvRKG!A<KoV7qVc;iXltpv}=Yen$xZtvTRPf yZpgwRD~Bu{vUbSgA*+WhpT}+8|8Dw%>>?)FWsFW65}Obk7o8R#9~&PV7xXtE&6qp@ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Louisville b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Louisville new file mode 100644 index 0000000000000000000000000000000000000000..fdf2e88b48cecddf4eafa6d8a41ba7363e4874d6 GIT binary patch literal 2781 zcmd_re@s<n9LMp4A_$5~`2!WNr4>VwUxvTJ(v;m$ubN&=(eTmKJJj-<7(p4{=C5>X z+H_FXNJ~v?j?`>gjntSqTbVMQh2aAwm5DxPE6cpM=Y1;X)*t=XKb^aCUeC^8@XvjJ z{P~6RGM#^1(e@2D*B*OwP7K;d<@S`|tMC0Pn^$!RZrM~PTZ?{iwsnn{V{zky$If`= zljlOglYM`Zle1#X$+`+vyER5m<(+cs4i=fS^%cRdcCMHD>eAr3HH$@8P7gK|`lYdK zc<}4oLGn#O@8J351ZnbSJKqlWnD3H2&iCCMbHV=g3qLoSADSASi>GT$^N||o(uWOl zxk?9(XTAJ>W|+Bgw3iO;Pc-2-!rG|_nTXX5y491~w3d*HR5zr}M6IGmoRjGE!z%ju z?h?~!kHqZSXkyJO)lRRJ_FufF;|>O-!+{mL<Id%#(@XPo{F=GumeL3Gt%Y9Gd0Ic+ zB{$i09ok-ZOKxWp`XsCFZJSMxR_#=eA7iCwL$gXeaZ!35I<7psKa<{DcB|Wp9drA$ z8&sdXM$>nGmA)gZ#@spP1$|eS?Ix*TkxmMIB+0RPI=OzE^t&)v_pe?pDYbqzpt4X> z_YP8NWn(3MLxRc(1We}Q-_$^FuDN^CS(TO4-wevER)eGC%#fa6>LE>+&Cu%~=wU}r zn0x9fb#~Py8UB7y-@EQ}x$ouKdc>kVGIGT-b^nBoGHT8&H9D<Waz>9*IahZW?|_-w zTf5fe#*Ngudl#B9&3*OQSJue5(<78`=>i%5ZXY#ahEFDzN2&+2(`3>UO=@yty!i8u zD*yGXG9~LxHRXIqGu3lMPp!FPriFIs{H?WSdi`2GW5qu6aP>l6FsH)ItQ@bE?;V-7 zdV!ifpj;ja_|%-Z5}E5wQ*)d1C6E-a0-t6}VboPsxXUB+n@*_(<-^Rv<5%>f#fj#z zs#?7$FJxrhKK*!>Hj5Wk=%Ox%&5{YFdP(S<6b~s^#r21zB%wr=RKG5z*Yed<l`G}x zGudisSwNoI?@?t1xw7m<N41Lh>(e^2<@ooj7TZMK^oh9XeDnMN>KqPNZE~D&_`ur{ zk>T)Lo1M1qxxM@B#M9{<4u5<wB4M1*?~EF4@5%Ol$leqEPL4h2*mqnJ?i2j~{>|TO zf7t!zU;Fd7?0*mr7qkv-MoNIx04V}e1*8l}9gsq}+Dag$Kx*M?i{Wakfs_NO2T~BE zB1lP)njl3%s)CdSsS8pVq%ufpklG-{L8^n42dR&%Ef7*6q(n%KkRl;fLdt~H2`Lo1 zmEu6DxNNO}Vj<N+%7xSmDHu{Qq-3tPW=PRoZPk#nA$3Cvhg1$J9a1}_cu4h-@*(v@ z3W!t?DIrorq=-lrkutj4IwFNcDv6X5sU=cOq?$-Mk$NHpMJkGv6sajvR99P7q^z#C zu1H~#$|9vjYKs&XsV-7pq`pXjkqRRvMrw={8L2W-W>;Hhq|ivEky0bIMv9G88!0zZ zZ=~Qz#gURDHAjl>YO9Wv-PP6|DZHz#JW_h3_DJ!O>LcYx>W?e{vI58wAZvguf~#Ex zWEouTIv@+-YF7eT3S=#i#XwdASq@}9kOe_j1X&VfO^`)FRs~rWSGz9A!noR%L6*kV zt_`v{$m$@=gRBp-K*$OqON6WuvPj4(A<N`y*9loDSG!WkQn}i-LKX{IEo8Zn^+Fa5 mSuteEoR<CiUr$-AUAQc}a4EilshO$iDZY%1)Qr^h*gpU+Ex#TB literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Lower_Princes b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Lower_Princes new file mode 100644 index 0000000000000000000000000000000000000000..d308336bec9a539742ca3885c44a4d3c5a674463 GIT binary patch literal 212 zcmWHE%1kq2zyQoZ5fBCe7@MyF$eApsr~GZk8xICXrvLx<$1*Vd|9@Zy1Iz#a#}6=Y r`S^w~=o*+98!$Kqhk&#KK?n&J{D5cyn)&}f$RZF;l4V>#dri0im60!* literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Maceio b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Maceio new file mode 100644 index 0000000000000000000000000000000000000000..54442dc737ede2aab1b597ee71a1db3aa8e33aa1 GIT binary patch literal 770 zcmb`^KP&@L0LSt3H&Rj-gF!vSgyd;SH5#!J9V9nkFp22IU?3qSVUie0Sl%cGxv@+Z z)I@|x8me?;A^u!i^1SaEEhJuZ{WMLkx&FTPXnZ1Q{WvA&8!k?(x%gbQXx`R$Cg+pY z{n_MapW1usl>3<(k?!7=>4&_$uDf%t_eR7`p;l(LGUE1TQ)cauc-hd-YuT81I~bSm z{(y3a+N67YDDu8@C%^5g!ehDoSY1%{Cu=e=d!-swmm@-_s&OeUn<`VPdAP=DzMmE? z;b*7i;#kPVIoVMkQ{lHGr#rbKBDXmic^(iwDeXk3{3@Zfwu*~>`6|{rvus&f=dwkm zT3?=76@xL`YL`KCh0I^(*ngPG{l3NgqSnlXOy+53L#9LKLnibzGa^%ZnmLh4ky(*x zk$I7ck(rUHk-3q{J<aUM^qyvZBmt6v$0r4n14)8pLDC?3kVKv)6Ozi)<U*1m*^qQd aJ|v;1$%v#xa$>?f+dn7uMNQt`knam56e{Ha literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Managua b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Managua new file mode 100644 index 0000000000000000000000000000000000000000..c543ffd475e374b39c6800fcc69c3c06b6eb5dd3 GIT binary patch literal 463 zcmWHE%1kq2zyNGO5fBCe0U!pkMH+y_UR{qDY|bSKoJ;Hi1OtCO5WJEfAZ&Q+f$)~V z0L{dS3R(xf1GJs*J}^4@KENsBzyoLd%m9~=yareK$OJ|vW+oOOWQNlJ|F4%}U;vU5 z3@rcuuU^2w@&EtM4GcUWl2HI6;^P~_;0wgg!66K;K<w-i0(1oegb;SWAW${f{Xd}E z{sTc~yG{y-2Kfgh4Du6*2Kfs_gZu`jf&K&0AU}d>pg%!0$gdz8<X;dC@-v7A`5Q!o b{0^c){s+;ZFaXh@aA2TCKyU#=$czgBd@pf= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Manaus b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Manaus new file mode 100644 index 0000000000000000000000000000000000000000..855cb02c4082719f3a51c8401288c013efefe683 GIT binary patch literal 630 zcmb`^IZFdU7=YnTJP7e>Y%DyYZ32TyAtEv!r3%7=;17rhRyNv2ZPSaOg{4|enMjHO zi(n^;79ojg>;fuyghUy?cP#}$aF*G3SlD6bS$}msJ?{M&ZSsW4h{<H#y(72%v&?+1 zZ{;cXxn%Osv--Tcr%DqAU8+ZPdEn8O^J}Wo8q?Loy1K8N=$b#HUJiQfYsZFqvzz+8 zH*Olsle$s7QO#)0HcvZEtKP3avRN~Dd8CK7AI$LaoE=G4Ol)^ck9OUexV_Mc^tH-3 z=e%&}*DLH?vEX^m-Cl*-Wo^C4!lLgbR8nS24#4pLaOlSOE3%_=9G(IYR1hc-6buST zP(h)<1Qi?#5Cw?>MZuzgQP3!G6g&!GP(h?X1{F*SC<T?z2bO|M0j3~RpefiCa0)uj Q$X@>fADxn)FrA8i0o?5eRR910 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Marigot b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Marigot new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC<zyoQ1C|Nmnl3=IGOA3wmr;^P~_;1~?#0zn7~ Vru_ix0GaatKYmlWfX16}0RSy&9%BFi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Martinique b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Martinique new file mode 100644 index 0000000000000000000000000000000000000000..f9e2399c9d2254d5c447fbd0b6dc6be303569262 GIT binary patch literal 257 zcmWHE%1kq2zyK^j5fBCe7+bIb$eFnKgp06mo`Z;q*ak)>W~TrDkL_V#0Fx~L{~tfV z!1@3G)eDTgKE5FgZf-!rF&KzlLO=$AK?q?BS%C^cmJ0rWXaO4k|9@t?$Po|?asWse T<OmQAatH%iF5v<?&4dd85=%V} literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Matamoros b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Matamoros new file mode 100644 index 0000000000000000000000000000000000000000..5671d25816de263b5de2d310bd974fed88238605 GIT binary patch literal 1416 zcmc)JPe{{Y9LMozB`A^wfe<7L6?kYZH`A7BSz1GLnmTQ%nUy-VKYL0YYGw~4dMXcr z35A4cmk{eRj7Y-j5`_dI(m^OmB9NY-NJ#wkynh`#b%?&>=Sv6kviB!Y)!gV2f81L0 z4i6VJ55KQd=Eqp{UEP+7uVQP<6YUN~gr_x5ru%|oSJ`Kok(wnkJ+I{M*d*cI{7HKk zSH+&6FLl=U7_~P#t+OXqRL<iYdf&jj+J9T>-0)L%p!d2wSTdsW>ccXBXRkVxUn&o8 zXjHy!>C(4+O%%i=%7VEDQMm9!9=UT%70nh)e|M)Uj;6_ynrd})aHA~E&QWFU-(-1G ziV9TB%D{?4Rc1`e%8$t+nEX};UkGu0HL9ydKZxr2Azjn^T%4Hd(zW&X#L3|rUFV-x zr}{hP=^dl$OsHDcC-kWXUyf{8YEq%p6d8J5sG4IPvU%L8S{4`N*}(#FZpPB*BiqG= ziT65OnIPoAGkr1Ri?|dX)2)ebL|aL}Zd+Ya5$~vs%+IKfO?|Rs>Y?gfZjxQYH`V32 zLfPHZuC7cvWl!xfb#-XH6pl6j<YieSxk6ah{ktN@vK~Ef#1;7iBCp&$`2q8ta)H0- zVEOk)%zv~jdf-o*;OGKrgQE|m5sprfRycY=n&Ifi*0jUX57H3Q5z-RU6VepY719>c z7t$Eg8PXck8`2!oovmq)qd!~IAV-Hti%5@1lSr3Hn@FEXqe!Pnt4Oc5rdgz0ThlH_ zzqY1fj*gL*IeJE#=I9z}o1<@}ainvkb)<Ksd8B(=(>~I_t=Ryw16#8NWDm$DICg<- zgJU1aMmTnYY=vVl$YwZpV{5j9?8nw@2-%UX*%GoRWK+nlkZmFRLN<o~duQ=xc9~{& TPJg<~<4SY--ENoLl@|XC#x-QS literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Mazatlan b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Mazatlan new file mode 100644 index 0000000000000000000000000000000000000000..afa94c2ac5c1679d39143eae062b812070876492 GIT binary patch literal 1564 zcmdUuYiLhl9LIlSF0GZvatV=8qP3gNm>Ih;V}G;lj~TZAhX0IRZM0$AQ=|xcrbM}x z2uqXfl1q6dmk?UHq$JsrvZiH7$MZRq7hZYch41M(pVO&xUVT2^++u%}{IS9I4Tt5~ z!}ADjZ)e-OD_VNXUbp#I_}Y`7_&S1T`Z~Vv>AC;D()X<CYL7^Tjx5@(qN+ye8L561 zoz<^nrY=%5qh9FPA=A~Yai3*YSFoD>?YYGDg_t>Qk0t(ckePe^gd{Y6F!PQz$o%q$ zCUN&Mos@CeB(Ja2DdC6Af|Pu{a9FiTosy_i`<ql+;AEZltV%6@J4i3-Sgz6^OcKx0 zShe(0pk&mHQOowflgxq`lT~s@vJ*!eZ~8@9KJklL5xQSi%1e_o^1RM@ebcP!+oyAH zoieLCOZ1uxH`LnJr8;l#NtJ&%Li>ufDya8JVOpVDmlrPUr=_S33B$!7;xk3#KS@z< zlG*sZQ;MHVF`Hhs%jPSCP08)2y0od=l$~qU<(2nL#hxR&GPg@@@z?6AxVx%);cC5g zY_r-HUL)JS9#A#Ia;4@)xvKpbFWawWsvVCe$j&pRrmjV#epiNRIQT{+V1RoK3<&%O zm*X6d7jc{uMgkqD`LqmmoJK9dJO`+s@6$bA@nm}?*`8(gcv8I9h2Qi3g+=|pK6C7_ z31Sq)Du`JSyIfksAeO<OrtzZ+%^l+))<MjJ*atBXVj;vth>b3-kq|2xW<u;_7z(kJ zVJgH{hOrQ9A?8Btg%}L67-BNSW|!7zh}ABw*$}%ShC?i8m=3X>VLZfohWQZt83jNp zV3Yu<fl&mc3YWGFNF9(uAeBH$fz$#i22u^A97sKkf*=(!N`lnHC<;=QOIsGCE=FOH z${3|VYJ(I9sSZ*eq&`T2kP0CsLTZE*38~VhEfZ3wOIs+UQbwteS{cPcs%4Z5|Brfu ZY}&Cl=}=E}SY%j4s3$TaEG8@>=m(pAkqZC- literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Mendoza b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Mendoza new file mode 100644 index 0000000000000000000000000000000000000000..5e8c44c8934960e2010e7b0505966747cb2c6e50 GIT binary patch literal 1109 zcmc)IPe{{Y9LMqBbgLyE1cUw%A*7g#;6Ysrs`ZEmBihBJz*C1P0-@-TL4Q`4PQf5X z5Gby^20aCqsO3}3RzfnE5kZGV(XEJBV_Et1eLv74I@GP-c=q~j3^w+84-6hV(i!}5 zt&n#(TuC|Huf8BpH?CyV-823A-uh=|a$`nME}l?RYtGrJiE%SsyRB!gzEt;Ly|WKa zH<;P!wb~A5%)`-9{b)yC<<A%G;|*0acPOmqmSd{Wldy&IE%o$bR?nxdoB5BI>_W<^ z1@pm{I^t%r_pq((imJ+3)qaT#scL7FuD%~LwdP)1E0omN%CKH4Z8P8Uar=E{zxr{r zUoVfumAl-nSB~#B(bnU+P1y<4+LX<;B|e+BPp5M2>$-LO^Ss%-bkug3JtkIuqqkf; z*pjF|G23TjMw@2Sb0BH^hF6)s*XfqDb1qa@|BsJIn1}yxI8q;}lUEVvX3qwJbNONr zcCKK9sB<rhp+@K4mP1Y42aWqu13|Bxt*Tp2mz>`>8Tk83yML`zlN&Di;qK(f9r;e@ zSPfYYSr1vzD=Q*PdSy*yQDjwQS!7*gVPs`wX=H6=ab$I5d9SRG6yTK#kP?s@yix>G zg;&Zz>OcxXD)CAwNG(V)Ua1Bt$1C+91tAq7B_TB-MWL^X8!3zbmb#F_kjjwKklMUb s98#TE%0ucy3PdVIN<?ZzibSeJ%0%k)N}-sRpBDW8OdEDeyveSHpJY<*WB>pF literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Menominee b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Menominee new file mode 100644 index 0000000000000000000000000000000000000000..55d6e326692c81a3a526ee65fa5ece091ba5101e GIT binary patch literal 2283 zcmdtiUrg0y9LMnovks8_$)peoN=t?ujt2}ylcIz%P9pv}A_!RshFHe8grlg@<HoQq zj3#AT4W-Df6;opw)*}0R(VrROT+Jk{<(iH4lwSCUozwGvyzHXOw!UY--|LLQxca<< zD;popRsT4T{Rt1(X&=5PYy0Zn@6~%hxGeiR;&sm}r{wEr!}|ENi*md+#tgSyP$vq1 zH<uFv`bu<z2@hnc8w!6CkN-RI?8wkDiSJ5G-y15{yeK!G>CzK^IB4RIw&?gHJ59n{ zJ5}PIW^>bX&1zz6g}J%0LQSg5H%X=W>Xx}F=GM#<l{|HhOpYE?DdQ$f%3sNP%Gs-u zI&?+f_Q`2UJM^PY@A+764<)EOw)M!=<$tPa>pn7f<_)X60(;H$r1w<DjBO?(Jft&Y z>rCcIzn*cv*kld9q`kwdCA)j0_VpLZ%(g1+@0u<-4I!1gAzEgYRI0m|o|n9gd1`j< zH!{bQs`97&X7bO4)!eIJnR#EHQTLpB$IL%4rVBoN-rU<cqVIcEOkvYeeSiB7d7%7t zy`a8Hie|R!;-V!|eBpgnlD*!P4DV5;apk78e~Su?=9mXxds#0$xk$>kw(3QP{Zd|2 ztrzc_C`;xS=%p<eWm#&v4lX|_!LcY^kvAX}zox0mv=gTC3#F>Uht0~~pH=n9Zd23# zxq4`DvsqQYUp?GiV;(8{RM&2AmDSn3x~`#G>f<`}nvw!pGrCqcWTZ>Ow*lSgiIT=X zuU>cV7kO+~se1fa#H<fxsV5GcHBA+9N;*F?Pv-rho@#p2Y)BeX&E?xob9hvT@_QvT za!hYZ?vPD`hji<uwX(UpOK<r;AY0p7^wR@gX<M~IKeIbt#zooBb>kh^^Zmd5L?V5s zm5M};9E=(tiM+i}MZ514+&huCnsn{w3(JCPL6Loy*=L1)76(<a{mw<XHw6EEMlaZ3 zb~Czk?XT>Ak3?!WmewJ&LZ*ex3z--)Gh}K`J2zx<$n22mA@f5f$T34?icUL6WRgxh zOO9zG^F$_!%oLd_GFOhtBD3X~E;3(^2_rM+m@+bFj!7f4=9sqA&KsFHGIM0=$lQ_1 zBeUn2J~Drf1RxpUNCA=qjwCp37LYVJZ61(BAelf?f#d>729gaV9Y{Vn5`tueBPB>q zIFf>7g(EFan-`A6AerGv4U!uqIY@Sp^dR{`5`<(3NfDAGBuP%2C5|*XZJsz1<+PdN zNEMPRBw0wdkaQvWLK22#3`rT1GbCwFn>8eDPMbH5#5rx|I8uk?jw5+U_Bhgq<c}kP zNCuG<A~{5oh-48-qtoUQNu<+e5=o`g<`PLJl1+|uBKhP<D3Vc*lp;ChNGg(5j<h;$ zUXjE)ZDx_wI&E%|<RaNc(u?F5NidROB*n_L_y61CSexqEHrd{?S-xDK-&>ZG<IC~+ GWB&#UvwV2~ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Merida b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Merida new file mode 100644 index 0000000000000000000000000000000000000000..ecc1856e1e513bed6a07a80a01e0b9d9dc706c8a GIT binary patch literal 1456 zcmd6mOGs2v0EUlcWEL2O5<Ng75W&W`nU!gy4(3e0ZZ%o?C>@)$JZg!Rj{}Mzl;t9b zB#5FHh1Md;Y#~7|A_)qqgd!*+YSAf>q^#+Dw_3Go5%dq|d@~pBYQE1Z@%s|QADd;K zaG2K|zRy?Yx-d9V9?>;kzQ~<d5MSLS5*kBfqRT6iir&fOs5Ft1@IbDby;P(wd8<?3 zd>5;~-q&f9!D`LmUA=Z-TCE%I(doxusP%nPXVi_T%+6zSL;gACtgMq+v7IVAt5D{I z`IKu#qI6Alh}__CnftU#Y<%@u=G}4$_t--1ITbB7U7OVT&2vS;(I>jFB1#q24(Z~| zIm&YP>CKVv)s~o6y;Y1UZ}@5HeL0}EO}5CAkzTcZyj1SE(l2)2%#fwsCq&uF<+9x0 zAS&9@^{!k?>@JPemC-4pDt(6bEhtje%U<j1Pf2Rew^8kX6ruJ$zpQI+OsU$@dvagz zgxY`clB}!0qhwdNtS@;c8hkafF>P2J$nnU7i_eIrSif%i(k`0A@^te=g*fzKr9OPy zDULi0(=F%ARBQi7-PVz*+It>r5i}!c=0ErZ0#~PnF!xyz90&|u7a`n1^L{>Ydo1BB zwnVl$&MeFP$G>O&g&R}9UJ97u2tV-z;tIqUhBFXv816v)VK@Zwh~bh=;}gRvh*uD| zAbvp{gLnpU4dNTbIf!=<_aOd39E5lXanYvnk>R9G<0ZpQh@T8cA)Yc^h4{*F7UC_$ zU5LLBhanzAT!#1zaoVQw8sav@Z;0a%&l#>md}lZh@t)y6#D7KukPa9vKze{QVbgR0 zX#>&+q!CCbkX9hQK$?Mc18E1+52GPSM~s#rJu#ZHX}V&xWz+P<XbjRBqcuoxkmexW d!T+(nP?Kbe$uY*0<cN2~#dzZ59LbKj&>umYc47bk literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Metlakatla b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Metlakatla new file mode 100644 index 0000000000000000000000000000000000000000..c0335970448cf4d433b86e08d79ae4b9a167e1f6 GIT binary patch literal 1418 zcmb``Pe>F|0LSrJU0qk#no9G}K^_H~W-=Q7JB1XO*rMXPsg-4Evi<YlB3fw*hIt4_ z4`GMwK^S!~hfa3LqUaQgd59vA3ZjcihYm*M^u80F3PI=%Z$9IC{r&>=q0@QVU&o_< z;c_a}<vXoky<QHd%Eg`E+7=tr^|hH&Vc*Nrwed{hy5`d(y5mG-`!3Vnm?5L8V$J9h zuk;wprsw*8>DA{=@6;0!`_(ez-cL*4+c`b{>8RZFXj<QV|AEkNj_O;6O|iA7Q{NWq z5D9fPdSY2XY%ea<lQIiL@{VdV#hWZrBl68){zn=7bkL|xvqkNTETeAigQ%bM8Aq0$ zi=%@p#<AH+(a^MD9G@5xC;ZD+<Io*>a?iYVs^zL|inpxh(iYkLWz1^HD3dL7VJqa> zEkjd5>&)kPdG<!9ac&_?obReJE?7R%8Z0#0Mpi_7L5k7Qx*$3ee;DD4hazkzSzSf* zvTNzP)tzj~?%8)%&+3@GIPuuJ^d>AX_uaO7XM(b?VZgdFUL;)+kuLYTPgL{<9@`$g zt7*19(yzH~`<Cg7ao#@Xt=slMo0h<TT`7nB0j;uH-NWius(T=ym6WR=O3KxjIxb$n z3H&>T81-*1r%GME(^mOQ9b27!&wjqb-XS9ui3AD7QN@BzFnkaVM}-5#g9L;`goK2| zgam~|<*33!;&N1hA(0`WA+aIBA<-e>A@LyrA`v1XA~7OCB2gk?B5@*tB9S7YI;vQa zU>#MoNVrJ6NWe(MNXSUcNYF^sNZ3f+NZ?51Na&6#b|iR56+IHZqlzCH05Sq(2*?<a zK_H_*hJlO&83-~GWGIemEXZIS)o766II8g=142fG3<((%GALwJ$gniEEO$iIx=*xc n1COS?PABXCHn2YnbBtws#cVIzzc(i@$H?~Q=H}$)7_q+qS6XeT literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Mexico_City b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Mexico_City new file mode 100644 index 0000000000000000000000000000000000000000..f11e3d2d66a2d7c21f498df96c3fd1db3a0d8817 GIT binary patch literal 1618 zcmdUuOGs2v0EUldWF{DuEqXvi5P_NR%*r&a!F2M`q-JGf*4U)!NlUD>4k$rTmW${i zLKM9y)E4>377-*Eg<(Wx7z0I!EjkL4^tzqzQma-$oBqSO-(}!3o9}ZKcs)_#k4-ke za9Odryw6hRakXPuwvCnhIy|}V&XfUnS8$xW>-%y2=xw{Z_gqX?-{w(Y-^fr_zkjc< zzcV*$@It8XO?CaV$dYOiRTU_s9r+?AXGF#>P7rZXPvr6`^F;jIVIBYBhgk9Tu}&Bd zP%Aqg>Q!yu)asr_op}1KTGK3bQpE$6+;B>+&AhBqc2&sKWesXwYPMV-;!%!8(b6$d zFVX@+W!kIVV#B-7GQG<oGM>)V&htye#@pjMvu2veI{8v(7cEvfW!-vH@>J!@Xx5t- zd{nt%bvjQxRr#S8WxlUXZ5gkX1rM6k))$3x+s#(7{ce&hJaI<sIJ;1~y_KTqXrkVk z<`Tt)3-qofabkC(zxK??Q6=-=>yoh;wdZ@U_72Qedxvi5(mNBXtoM=J*EFcguU(fF z`|m4x{Dj<J@LE)QN@Z0-k2tX2DG$zR5!K7Qy86p8Q4^A`YX*zNp-)To;d?3KNPmc~ zy|P2qwT|ke^~vg3<8v+i{5g}%39z5M{o7+%SAvDGtd=t(z_M;#5`mW0-R>7`Kc{Cn zUBbD^B~rMl=H|JK9>3@M3#}%8ePo%T7eq0LW)RgNy4f_!L9~OY2hq=_Q4pe`O`{@2 zM~IUAEm|_vgy_jo6rw3ZRfw((Wo;U58R|mxg(wWs7@{&nXNb}ets!bd^oA%7(Hx>W zM0betHjVZS^=%sc83{l#V59)afsq6x3q~4{JRpfcGJ&K5$pw-OBpXONHcdW|gdiC~ zQi9~fND7h_BP~c?jKm<BF;au%#z+p59V9)QCO=4mkPIOyLUM#83CR+YCL~WtqL54( zsX}sPBn!!wk*-aXFC$@_CSyj*kenGwL$ZdX4apn+2Z;xn^5abTVa}NF$nc0TXJkZp IY<NV_PXY<#QUCw| literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Miquelon b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Miquelon new file mode 100644 index 0000000000000000000000000000000000000000..75bbcf2bcf70ebc700cb98526d0f2a1e5cccc438 GIT binary patch literal 1696 zcmd7STSyd90Eh9>#6pS!5h5Y1EXmx}-Obv{wo__Sy6wr<GBdlViI!THW@=gyND)Ov z7bzs78&UMooj?vG21Rs8%pi~`kqCMS5ro3&eDl&nPrXF5%>0IZ8~A@GTvWNkYyELW znSZ#ss?5#%ltbom`jP)uY)z?3s2|ib4(}H;3wrdds<SFF{g$4+x?atRJFb(GH>jks zn{sZ<A~kp5m`wf}ujX~_lqtP_;pz-Z_x))iwKY|yo%ty|6}j45Hz0gDE<HbAiS(q& zdV%+<SQ!0X`{TQmf2db4`f)`q?s=$}d^n;q?!A>upOvY=sRwe|rIkw5o|nsymWmaj zMwwZbEwWOJWOi1P$R0kSb6lA!r?*D0nlxLjx-a$WZ{t<&<!YJtE>Q$qLUPTcXb~#* zNO|sySep?m^Xp%Vb@3xITyRH(fAq`s=_ked!C_rE{kAIX?$aB`PN<@5kM*X38dZF> zLvQYqs-&!0Z|U@^t-*_OTYE@scQwh<3Xdq8R4mJLVnzA4fUKA^A}U@d$jaz`QF%8~ zRt-HBJI_zoyWaGv>IO^K+<UBQ3qI-Hr#jT0^ln{O+pPA+-O&3&o7KLt=d!`yBpL>; z%f_k2qOq%89vBUXrp`*){60anwC2i#Pa;KYiA%O!7!+-`ZCeqM<NnDn(zag}T9$3U zP*#*}_i1Z_ZGZk4F)=R~wwA98Tdp*pxqt5~{1<B*HEZKCA7ryS-dP^9K4gK&3Xvru zYeW`_tP)wKqgf}iP)D;;WU0tnk;NjbMV5=K7g;c}Vr0q4nvq2#t45ZMtQ%Rlqggq! zbY$(w;*r%O%SYCa6o6EKlz`NL6oFKMl;LRVKnig*l^~@!np%)zkZO=}kb01UkcyC! zkeZO9kgAZf98FzFVUDIUq%=oU8&Vun9a0`rA5tJvAyOhzBT^(%B~m6*CsL@RsT3*I z(bS3*>u9P)%0=o$3PvhMN=9l%ibkqN%0}u&3U@S>Bc(f<+L7WNP4!6mNd3qOfSdxz oNr0RN$cf<iU!Mvw8Rnb__{`HAND2DfUUynb(BpA?+-Wht0Kk8;*Z=?k literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Moncton b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Moncton new file mode 100644 index 0000000000000000000000000000000000000000..51cb1ba3d2de0ad19453704e4b796dfdf0dd8464 GIT binary patch literal 3163 zcmeIzZ%ma{0LSrrMH!JOsThK(XoVV}fF%BlYv~AuJSw1=;HfkQ-hx#8lQ*oT6J9ho z^H2U6&T5itO$~D>iI}3Ma~?qnGw~>bW}u*EBv)?d`}8ul-u9w%_dK6x&+grQ_xmQ# z$$C4=_K)Lj-f(fwnTzMZ56$hv)&rubbdB6`=!z&CAFImt4{;psAEXX%=;kPIcdH|d zZ#pV2*Quj3e{}p%R-ukXf9*KF?Q>Px`xD2B+%|R6@2un0tF7u(^B%|Pz8UIt&6$cb z1=nTOkO#R{vRPH<)#wY3bLwJBsjeBcPyH0LRoB`+RhI%>`jXC5KX+`Amn&weU+PxM zE4!D7y7F{+wJ=raqBwaiD^^_350Ui=LE=V+muwj77LD-(_03*^>Q<;-|JwJ4y6s)B zZ$CVterv4MO*LPs<`c#G&XG!Sch7OzvZGkEuG=N=ttu4va|+~xc?-qw)3W8mxFqq% zh(y^oG)w*2GeSP{nW`SOP0{VQ!j-!=Og}!=Q+1T~(4OK}W!v1M?Q5%5mt|GjE4M{- zm3L(~Q6+o^mB=UjHwj<cM(NwKTs*0l$?lgnD8Gs{+2i0+)pK{A_TMsFJykeM_gXPl z_0Afj`=s?(0SUpnZ)}Hpdgx;vI5=4J^SP>n?2koIt6K(NyDIvhuanOlJt_w5tB?bC zejx_s-P0i-tr3G$F6bfIJJqu>2lR7OK32~Me5ON(FHoT!-^pRV@oHGzW;y)!F!e%t zx+|=DxHG&c&J|J8-8mvZ#5HpL9cN_b1b5UT<&2IW=pHrWTW3tD-Th+J2Ipw+dUtH_ z-lo{bO81!d^-W_>6uZY=UeYvv&vDm;vec%R*6nh|ZHsMk<P^9ju1;~jJT2Sx%KXvJ zNh1<nlPC3aPQJfY$A?c>@zpM!;1{hDN;346<^VNy`%0Nu6D5*d>GHJ$eZ;i6aZ+r2 zD5j4Oku&mY#mxR*GCAdtNN#VCsWIC{YJ*+R9#E=gAFtPG9b46$FDv!ybuKk`b+Jw_ z&rlij3-!DrhmuLV<Qr?##hc*;@~zA`G2bs+E{G2i3z`#UW~i6QJR2diyc<N;H~uoa z@uXO|DNH-h)vI@w_R#O{sZ=>BEjo8yv3f72N-xSOREq=l>m}0`swExg<<i&!v9zv4 zE)UEW%gZ;)6>W*){i0=ZWo?9T<)_IHO8rHC#yGiZ^F3kr>e9)lYqw5bcJ{^p+B=Wu z*d&|H<2kt3*45*=sO&x-&&di~ck9{Ty7x&;O14d&nQWUl$y_+gTq4<wh&lGZ@5%o@ zKgxX5nx8i7C;Y3&lQE$Adt{D~NkV1`nI=m!Psl`BnwdhT3YjZpvXI$ArVE)bWWtac zL#7OwGi1^%&8#8QW@+XPnK)$TEX~v*bB9bGGJDAMA@he!ATooNW(tuxL?#iLMPwS0 zc|;}>nMq_Sk-0=B6PZn9I+6KACKQ=bWJ-}awKS88%qlXi$h;yGi_9!CwaDBelZ(tQ zGQG(BA`^_vFfzr+93zu#X=WLjW@Mg`iAHOtd0?tJX0CzBMrIqCZe+fZ2}foenQ~;# zEzP7OvyMzVGVjR5BQuXoJu>&m<Ri0>Og}RJNCJ=yASpm{fF!}vWC2M7k_RLaNG6a} zAh|%2fn)<o2a*pYAxK7$lvtXaAW5+_SwYf*<ON9#k{Kj5NN$kiAlX6EgX9NE5RxGz zMM#d2Bw3m)A!$PLgd_^d6p|_=S4gsuY$54F@`WS}$rzF{OOrDsX_h8yNZKq--jKv0 znL|>C<PJ$5l077SNdAxnA{j(dXlZhYB+=4j5lN$^$s>|TB$G%gkz69lM6!vb6Uir% zP$Z*BN|Br*NwqXtMbc_%@`@zZ(qtA%Es|R#xkz@A^dk915{zURNimXRB*~U0%Sf6n zO`efNTbfKGsYY^*Bpb;#l5Qm5NWyLZH@wq^`<l3qGLa8UijIhi7#Wrn85t27G1B)h D+Bm-s literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Monterrey b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Monterrey new file mode 100644 index 0000000000000000000000000000000000000000..dcac92bad609fac72c04077782dd08a9876eeeb9 GIT binary patch literal 1416 zcmc(eOGs2<97q3}kXaxUw&($3fe1D_zA`J*GzZhk)UhTjAEjfXmY-T;<>Lb-5Xy29 zL=yC%7lqa$$t;i{7m)--R6-FH5w++ulBCz|o@v>tMfAITb7n5w+1ztHrG8(6{IMDK z9S-x_hwpQ={c(P1q9UqfQkK--G0vJ6NoWpLi7u}s6}?x<(P@&B@IWn}xkyqMzB8$B zf69t)_f6VVh+a8#*Q^@+u2+xrn)IVD^_qTVG8#s8X7>@bw&1MJs%lW#aou`dcA?4% z_i5MCMCF?9lH8C8mHVt(*1!Iu@@~2$e|(;CpNNqSSEo!t>ulM0_^BzZjMhc<!=^ZM zmiFZLo07;6dQ)t>*(~GQ8*x&3Uk&OlQ*Ekrv`=q+Ql_?D9+2%fGE`a5F)2SDr7HYQ zQrVGicI0|wXIZ4Fib;{`^kCzgTcm3izcDqRlJu@0W5)kzzOH?7$<$q+*7ale)b73s zz30M3)zElbt3y4ivGln#`RY`2+KBAUajSg`PDx9g-?V(~l-BS((>hTp`#&x-2X1A_ z!H40d?OeHTADA>9U75PG_py<n;D2&i)|t;DmUZQ{gjm+_wV=>^w@0#y?UU`X-zoF_ zg$L8WAF=<@vhabQIKl7&;s(PHh$9S7Ag(ZcfjGnPCP3RAhCdL8ARa+ng7^e+3gQ*S zEr?$b#~_|TT!Z)qaSq~LfVO)K{{plfWOxX1k>Ml6NrsmYHyM6H9EEraaTVe##94^9 z5O*Q|2537B@fhMV#Ak@p46h+>GyH})&hQ-KI>UE}^9=7H?nC?!&~5<I0i*>;50EAx zT|nA^^Z{uE(g~y$MlX<N7~MeHVe}KA-4LUr0PU6-JwcjcbOmV((ifyL_`f>~v$IRF Vvx{{nIh>C8Shq9Yk?e>M`vuR{Xd(ar literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Montevideo b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Montevideo new file mode 100644 index 0000000000000000000000000000000000000000..f524fd219e753111d18e26ea890a8ea16b5e0566 GIT binary patch literal 1564 zcmdthOGs2v0Eh8A<zqC>REWsZF)KBZPFgU8>8P3Jqb3{3w6ImuvZ5%crcFrYBElXE z!JBP*!T}2gdQDlTMRCaz%OL525?1t}qSxts$9A=-ZTE7|?=p*7%>2JV{f6~<)^As= z{)C6CPCvZQ-mQ<8`7iQ24mNk5P=mgcZxiIHr>niE@0E%(T`i%rom=IF59dP{ubh{c z?jH$VZa*TgoZB7>2e-?stqq}TjSaGE=Qi*4qDAt?+InwyTAI8$rPkZyE)c!bQ@y<- zsp8iDcJH0kZgJ<(`mE6jd*tZ51zE4kD&?y^{hm+Bo8+f2DV}dX3dQ%2iCNa+Ey5b8 z^+c@NCnLHS+R=woWb~6qN8R1+Suyp?M9ko3PyEy*5g%-}Ck_WiLgNN|(!+j{SX5;v z?#-3S^E&MGu7H@4xy_St?75sdsnwoU*(7HT7kDz$6GWy{XJ^Oe$ZWaLo-@=Zat=(h zi>p71k|lG)r4`4W#mQ~qva~9H+527La(9zc-nTAXF|yQI+F25=?8$Me+8TY;C*qx& z9mT$7&2OCLYchQPjdA`J1(Cj5|7-uslwn^$IR3!bUSCitWx2*hjQ`^u6&V#7t?#kZ zJq92A%^j)K?HbE6XM|EW!<JjAyFFH%QV$$g5~rol#{;g(N<ABLO;hUSh|4qfUR@Kg zYV?yU=IbXvUvTOt&%EX7w|V;QPgmd{mi7797w8{Bu`pz1$kLFtA&Wy+hb#|SAF@DX zg~$?-H6n{dR*5VVStqhkWTnVbk+mX=MOKR}*QnQvEErj_Q7;);vr#V^S+!9w8(Fte zFC1Apnx*smuy!uJcwqHLy?kW-MqL0>0aAid*MJmZ)Kws5Aax*xAe9((DM&3yF-SE? zIY>Q7K}KB>Qj$^EgcN1eRUu^|bs>czl_8}ewIRhBb#+L2MqM9LAW|VxB2pt#BvK_( iCQ>I-C{ig>DpD&_tWj5sLH!^7U**OY=*A24V}Akwd@Wo6 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Montreal b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Montreal new file mode 100644 index 0000000000000000000000000000000000000000..7b4682a39e2fc97450c99eed4576d2a4614bf294 GIT binary patch literal 3503 zcmd_sX;76_9LMpiA{wHIikTw0q=>RgxKd&+gnGpVOmRCJdIvQ*xgZo`3u-tuOrt@i zgSl@RqGFJSpr$rWiQ=B((UcTH5AL~eJKyJRFB;!9)8jn%bH};7{eAy2aS7AA+y3#? zH~+)SbHluRZ}EZo$SGJZSO0KRe6uw0=$fzdBG#^O)$3xnM66#jQE%vyEH+N>r#HC| zSDOd7*PE+?)mHDH<<?VnvF+(L`E7oX*nZ(N`P~<uB6nY++>sWb@>Y$MI~_yBuGCPu zJJMg|Pw<j^e4B{x`#zKfu38j!YOV__PKdoO_x0Xmx75C?XY`M`rK;#qq28alQx&gY zsY?=9r~~N>^}*;Qbtrza{wZjzI&4psM?CtdqXCoTFP~_6qT@YzLOfAr^RMcY5f{~| z*sZ0f+pQ};6ZUE8S=-{`b6&Gc&xa;OoUh#^FEpPgE|h1;i(5Vt<wY~)rHpu?a_sVQ z!eDVFElB<m)kR#L<SDQDxrvI%0R3mP&g#0ahrZGBj;eI6(UlKQs+$#My6W^ERed;L z-`Za$Zs(qmdUd|2$=ogPEXo#l6SL&K$tmJ~bh3QVZ@hRI6ek~bS)l&%2$zo=j#G~x zjnz-C_f=2N_R-IdwpFzS-rBjzschM`y3W$es_p`%>&aT-BA&_mBb0EBxFj0{9}o@O zmB<&Gt`}~$b<#~Q5slPh>3(jlYFzA)O$yVM$Ci(E)32s0&x~ohS<0KLc|xRaF|Mof zit^Jfd%LR_{Tk_39sI;g4XbtQx{XBZJ8rVg<!a%5yh^q$IVODaev>b+$rJ68Y`Xo2 z%SDISE4t&1T=h!WA>C=n=c=>UM(x{muJWxtDE-`Gm0$S=*|oB#>Q=N=`j^FsfSd#w zxT}W<N*gMJm$woj4u|eO=dlQljMA@;x*)=QyXhX?i$u=`jdia!=T)zY+q(CYeX7sl z<GSzfIVwE&mV9mR2kQ0AQrU0iR27l9Q}$0^B;JT#AqT`y5d(vg<e-5A#Gt#Ib!5O~ z6<M}SM>X!RqIS*FL#jKfp{p|Fu+tI3o;p(w-_t=vPq51oSuSE^c#s@5ze0@m_LMQP zM@7t&yK+p}Rx#$9haT&5SdBeYqsP^5R&ndg^!V~+YC?Lxo>(+XO^VOflXHeEVc#vM zES)K)24u;%9d;4lI9X1M3=-3-<D|paQ#gJNmkF+SMZ$I;Iiuo~n3>f_&pKA4-bwP- zvvbQ-Vr;FJnfdD7Fs0`tW~;eg2lTw?6g98*l1%EAC6dZZWOA!ykzBM+raX!h@8v9( z@1G49sc8<mu)s&8O^T9>zO;$Db)M&6J(uVC^?&NOPG|lKo6YGwQe4Ny=`7q~YiNCU zw?3N=v&Yy54K(j)^S))?5iw@GY_>YqN6f#EUZwe=HF}Tu3-dV5Gv`)v6*7Xz5F%rU z3?ed$$S@+~hzuk$lE_dZV`*sy6B$iQGn~kHA_IzyC^DqTm?DFUj4Cp$$haZ{i;OHX zw8+>ZgNux=r5Rpid@aoYBO{CqF*3%;AS0uU3^Ov$$Uq|_jSMw1*2rKZqm2wVGTxSE zz>yJ0h8!7lWYCdOM}{34cVyr#&B!A|Z)wIJ8GK~)k>N+i9|-^w0VD)S43HooQ9#0g z!~qEe5(!Ha3M3XtFpy{<;XvYn1O$l)5)vdPNKlZdAYnn`f&>PM3=$ek6B{HrNOX|! zpcNm!5Fj2CAs|FZjF2E9Q9{Cm#0d!$5-B89NUSVPu#jjW;X>kt1PqB75;7!aNYId| zAz?$}h6E0Y91=Ptc1Z9nP4tlPA@M^3h(r(xAreC*h)5KXFd}h80*OQt38kfpB@#?a z6HO$XNIa2%A`wMGio_HNDiT#BtVmpuz#@@FLW{%}39hAyE)rfOzDR(P2qPgzVvGbC zi82yqB+f{nkw_z<wluLuf^BJ{jfC6M#2X1X5^*HtNX(I-BT+}fj>H`aJQ8^%^p+;} zNboI9^pWsen)o9J0CEH%hX8U6AO`_*6d;EIavUHB0&*lEhXQgeAP0k`IU10|!O|QL z$N^z#jtJzCK#mFIpg@ia<gh@F3*^8+jtu0`K#mRM;IK4D2Xc5=n&SgGKrGD>f*c~q xF@hW<$WekECdhGu94NNuyCVLl7mAxXT*Ax&<8KcQ>>e2GZx0Cx3<(T&`x_HwlD7Z= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Montserrat b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Montserrat new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC<zyoQ1C|Nmnl3=IGOA3wmr;^P~_;1~?#0zn7~ Vru_ix0GaatKYmlWfX16}0RSy&9%BFi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Nassau b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Nassau new file mode 100644 index 0000000000000000000000000000000000000000..e5d0289b511f89e273438cc487b0dafad48093ee GIT binary patch literal 2284 zcmdVae@xVM9LMnkN$G%)iXufatO$fVzz{@AR2ZFlVtP0@O+OlVLnz*<oktnk#F+f! z8VzbSw$jSl8kt$T8WVHoKV^>9?8oq9TdcLHkFkm%@A^Ey{n=mrSD)MG9^c*ez5ia% zK(KjRvFjgan*D{#>9v>V<bC$5r*v8OueA~RZOI1pyEiBk_w3exq*lmeVuPNXs5evJ zFV=rvDmT~8&(YD7nI`snOuPD`Ca&v>j^C}#G>NJNbxp2Yqt*1v%QB<zj7pq4B-hV8 zqTH^-;x^ByBz-`VzkEffoDRtir#f}&8~e@77q;oNgH7hf_6PJ$%{AuchCH3_%Qd$w zO4hgLCYg-vTy<OGn3)xyq-Oo%mf2UvROZDWWzNtAm38b>x&7s1YHrwN?s&RKW!HUc z=4~I)cb1$rcdd9<-<|%N$;sQPbD|$fuDed>j=U;)Ka}eEgI$t898d-Q&Eh$}P<cC6 zN?}iiDhh>6@s3HgpvGtJUHgqH$(e5!77wb@87Zc0_E)-W^k=hZ>O)<A?xI=zb-%6{ z_(|^jV4uGK&}Xvb#jU!s?HyU#xmPW#eNL7)Zc$a<uvAwqSJmT3O-;dOT{GNmd?`z{ z?|7?OF*Z-HeCeR9`l3?#_q53B_p(*(M!&4-O;8V3cxCP5qiS7dngr_3tH9K_tS@;} zt^Y37)MuU3^=Ge|hUifpJTh!HjCAXbohQsggROc~W1rdFzgjE*yRxOLMQttUm4`!q z)tC~IrW&tm8VgD&CryPusgUOB<Er_sEZIK#xoYVxH?0@0>PNzv=Fx#+-BuSha_EG9 ztVEj~ZGC!Y`WdsUwq5UvUY2lKuL_S0NhBkpB7=veeIlry=szG&UaC-gIzsZ)$t=~e z$tQcCcPUqV+<)*Bi@hIqxni-8Psb(1VyE7AC9d)ZT-8<ft_#>Da035gJrjSwV*fD~ zV@1f4kToHTLRN(=3t5+=T^O=5WNFCS&{-T0td3)s2docSAhJSaiO3p}MIx(2mWiwr zS*W93DY8^#t;k}L)gsG9){86{SuwI?WX;H;kyRthM%Ili99g-eT{^OMWbw%Ak>w-n zM+$&c04V`d1EdH@6_7F@b#SzWKq}#AOM%n^DF#vvq#Q^+kb)o;K}v$u1Stwq6{IXk zU68^cm2tGCL282(2dNHH9;7}<fshIzB|>V16bY#kQYNHMj<!%pr5tUkkXkv~Vj<N+ z%7xSmDHu{Qq-03VkfI?~L&}EK&CwPPshp!N9a1|-TRfzCNcoWZAq7M#h?EeiAyP!7 zibxreIwFNcD(PrTiPX~3789waqb(;=Po$toMUj#sHARYwR23;JQdgv~NM#*uX_4AG y+TtSBb+qM0>WdT@sW4Jvq{c{*@&8?Aw+(cO4RyYMfv4D0nC~wt@)UUr-G2cP_@SKu literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/New_York b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/New_York new file mode 100644 index 0000000000000000000000000000000000000000..7553fee37a5d03e9163ee19b1ced730a02345cfb GIT binary patch literal 3545 zcmd_sSy0tw7{~FW;u1<|in&Fm6{0L|xKd(jgnGmUQpxbGKnAsVN+m4AN|bP>tkJ=? z!Q9a_a=|1E(F`4%HgPS*S5s0GdzBW_I;Z#h-geP=(N%xve?Dg%818=GCn+U!T5r!k zp2qfnczG_{m+x&}v>!$5LS@CrKdJW?d1U3=U#eB<XJz$*i+auU4`gl3dbMuCL9;%j zKo$4dWQu)j^~Tn(nT^%?`u#dFo60lw=Bn}LgQ8KoWLsbJVQ!?}^6og<nlVyso7YFS z$498rDIu~W>{j*B;NN9u6QjyHo{+MdLuyyRuVz=}cJ;}*W9HM6Z*=*-GP8ThR$Z~? z9kVBEnckcCg83{lTklJoYCeyiq$|DiWPk7=eIPPb4%AOn2ZQ3|;PHX#i&u;s>iUZu zQa5zfoO9-I+$nt|xzZf%yjvfODK^JFEA@$x#pZ-wpuh92m+vdm^~vf2Ikn+sRb4(q zP8XypUF4NBnGdS7xzX}NLN|3TwUwNo7^Q3CBh8QfTj~p8!RBJyYx+`?tLD;ghxJc2 zRp#>19lEx%)LhwJrG73sBxXgay1Hb$T${gK)nygRFH`5LUlViWw;_+H-=kBczT30< zkKkCj-fXhIUO&m)xG-4%d3=!h>%bk_x3iP+ulH-ua-V6Ce?~WaR+~oRQvvEPX*^b| zCUK{wY0tf?>8tJKmX>SOEt{8_K(k0S*9)b^iB&qNB13L1%hSOd7MPZAP1CIk(#>si zAJVNe<4v2%-E~MpxM@4Eg}yz!xoOuWT(xgjYdSP+t~y)`l#XX=Ri|$+%N={ZR-s$I zk~>#!QJu3r=B}5PsxHZAP1orq`tF#0=AMyn=zBxfnXvA&beQim2@g!x;ni!U`=$Q6 zM|r+PR3)j%qD+a})=x#}j*^~B+o@g|8K(C$*HxeR1k-o?Nfi^;!}RN2uKG6(G6On( zrw7#hYzE%=L=UR`)(rl>NXM33k^6SNsPA9$jSP9`aUGYnRfguxR}UmElVNF(so~Mt zGGh2JHKMNA#79om@l}gWLeNm1ux+LpS=&{QdbdDEAB|Jqc{60pjxH*3idV)K2B>kd z(K3EcjhfJ@l_Vt}P)RrH<l&f&>f!UjW>RRSp0w|(nd~dpDQl|CBh`!bl)O^&X!%T? znzr0bEgGYhce^~6KSMnpStw6rcvV_Zj-<y&tMu9w$p~wuGQNzJ%qDdzb91OnuQ{S- z6b>>ozrL!U%<g2KDyh<0$vz`XO7+t*+B}oBT+a&GYi1|T)w6x4C3`@j%C6ocIqh;( zPWft?Tc4tyD_SVeACFb@ax>(GouMi>H9_XT=}`?E+~mJT0XO*zH~R<vyPx;_KQ8ik z{QmtF4FdfBvJXAY-1iplv*l=Sl4rzl`%bX$MEj0SvRmfyG;kkD|Gt5>_6OYt*7F<o z@!Kc0k$D~2L}V9{ZAA7F*+^t3k*!4b64^{-H<9gh+WkZ})M<AV*-~UrkxfN*71>r~ zUy+SPb{5%MWN(qpMRph2USxlf4R+ccMz+{#_ZZn^WS5a`M)n!mXk@36tw#15*=%ID zk?ltI8`*HD-Em~gop#TWO-FX!X}2BOcVy#{okzAF*?VO3k==LN?ML<>X#mmzqy<P1 zkR~8qK-z%x0cix%38WQBFOX&+-9Xxb^uuWzf^-CF3DOg!DM(k4wjg~$8iRBOX${gF zq&Y};koF+`K^o+=9YR`!^ayDZx-RjBHu2a#0gXaBg|rIk71AuETS&W{wqHoYoVH^~ z%aEQSO+&hdv<>MS(m14ZNb8W^A<aX&hqMprAJRaigGdXVwueX)kuD-_MEZy{66qw; zN~D)aGm&m0?L_*CG}LK3inP>edx|s_=_=Azq_0S0k<KEmMS6=g7wIn2UZlTBgOLs+ zEq2--BTYuSjI<f)Gty|J(@3k4UL(y$x{b6O={M4Fr|me>a;NP%(sZZoI?{He??~g3 z&LgcydXF?8=|0kar2ohb;IwxDatk=^J%HQ<PJ0(1w*hh=AU6VXCm^>1axWk^19CSY zw*zuNAU6bZM<BO^)7}%vP2se61#(+B?R|mV7|5N0+#1Nef!rL(-GSU5$o+xbAjlnp z+#*hUk03XR)7~Y>ZQ``|338(#cM5W=AomJ#vmkd1a=UnL`V#q{9xs9Rrirn)O@y~k SRPU&s5#C<CqIyO34E!5Kf4#l{ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Nipigon b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Nipigon new file mode 100644 index 0000000000000000000000000000000000000000..f8a0292b25999d4726ceed52d1b55a4713700045 GIT binary patch literal 2131 zcmdtie@xVM9LMo5L`4TAlq4yULB$UFZ73QQChR)(g!C{a(~kz;PAJ}?-JlG0VW|JO zM#Ea|T4~MN9GTg)8k2J7*2<LC`ZfC0CVy1e$LJ3h8`tOg`L{p%v#sy#^L>1GyYGMZ zdWM^$TPnSOTvyv~I9z@9@H}&uy~R)V%c~#!s?JRY8$a7?<YK{}a#2m`)VafQUFKos z^A3s6Jg3t19!dZ9b)9iKBG;el)id6D)?~i4MQ816H8*rWpl^%@%*=H~Iy>k$H!Vun zv;1i$C*QAbP8~P1lhf4fKYTLhr*V~g;WxQu_`J$H_J!Q~+A%dZ=9$}e_pAJ-pH0Em zL4AAG8FR<7=k=Z0Z<@lQZMty!6Y=|+w14ysDf(rho;TDd#Uo)=5|2vh@dc`^XStO3 z=ctNE#8h_vrRE2M=B`ygsH(zwW<lkUT6k@Ssh;zlt{(f{ESmaAFFt$0-2HuA*9=~k zdp_Ky?>+FP-1qWkUEA@lEa}~;?r(TOmbPqCb!9QBuUo3>Cytmv$wnO**>8dwOLXvf zyID3~pqIb0PgZ<gt3o^4Waaz$s$qRd8V6F;gEeKc>d7&+IyXzgP3Kg2YC_gjy{*<< znqk)Foz-j4TrumWkLc#ZBj(}J{d#@x3G>KMyWY@p&}@va)GG9zZ0c)Mn@a}d(MU+O zWOPYupiH%nH%p{2OGQ4fk?1uODta_ewvK(J+6ESz_VZWtW3gQG_~3}{Xqq;1;Dmmn zN}JA(gL+%`$7Xv&x86Seqr|EQRBUuux^lWy*U%y9o@`c6#rMe5-_)obJrQ~4WS;8T z5R{!SdMY_7Iq9kt?*Hj0kvLlGd5OfQr;}0=iBs=*sqQ)5-7{B&!d`uyJ*(|`$ezZq z{Y!gZlDmcfeF}T+58V_ddBa`_dv#vkU5iWunFlfvWG2W|oOUj}%Vdz*Ak%T$`5+TQ zW`s-$nG-T8WLC(uka-~!LuQ6d4VfD<Ib?Ro^pN>E?F5k-B2z@>h)fchB{EH9p2$Rz znIcm~=88-fnXS`K7n!fqP8gXnGG%1W$fS{3BhyCajZ7SwIWl!*?#Sek*(1|O=I^u# zKr(=&0LcN81SAVc8jw67i9j-eqyotWk_;ppPMZ!SA5NPPBqL6n5+o-`Qjn}5X+iRW zBnHV0k{Tp8NOF+uIBj~6{5WlbkPJC(ijW*3NkX!OqzTCrk|-ooNUD%rA<06rg`^9~ zm(wN;$(Ylo49S_(CJo6Nk~So7NaB#pA*n-hha?Zl9+EyJe@>e~B!f<yLL`Syn?xjw rNE(qmB8fyYiKG(ArT0JRrO#%x%I35<G{3a6w7fV}QBhh^TJHNBVTxT5 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Nome b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Nome new file mode 100644 index 0000000000000000000000000000000000000000..c886c9bc0f46a6677e9824fb188761b6dbc418ff GIT binary patch literal 2376 zcmciCeN0wW0LSqQ0uK-rB`6}%^$p@hJb)k+<pW`aid^OKNz7CVVp#GLjNuC?r6adW zN1MZGcAR6YE!&xHrm^Tr*V?k&6K*BBDBYUrlC@qflH2*7+WMpBe|hfi{T!b0@9*nd z-S||l^{*49f8piq(J%MZ?fP?OS6Og))=w?NwT}1q;gxFQM62Vz?(<sx)lSF@1>Vrk zd}mVYia=QH@?iM#`GJU%yr3s%O2G5d#$Z(3^+42-D^B$HUj%OZdL$S#@~byCFc`f3 z<cRl<H_kh8`v$#t?s~_$YtyUV$&Ck`_?m6rgo<uwN>QzMYWjL7ab{OwT2!f%6uK^u zlo=nF^h2qderb+O9-gi4KIM@qM<didhptGwqe#r?IU{HKr;Aw|zO`rP-w<;ae`Kd7 zd?nH{-n7#u##MUs%Xa$cmn!3Gi=8?2p2|A+qMUp1kjg&RB<FQMuf!{k%xRD!w=GxZ zl|LuuSI5izv^r6c`-@!Qc~}%CPqqs$-z$p7f41-YFk0My;d8rKrK$%8PudUeiBt>s z@3+0pS5?W5w`FP7$LgWFeX?xcTWV3+7P;uhi=upPzg>Rrw5W*PY*!o`5R0#^v6mcp zUoHKtQ&w(0td<>LEvwe-RL<UFxxBbRt>{RR)yWmg=eMNqk4)vyza;(NJR(-5xb~_K zGDOY9MSJx?jCgGHw7q7>n0S0>z+PJy6zdN5+UqO7Q?<Q^<r8zyswW$E%DPyk>dPBs z{k1;TkX9iZK518to=n+z#IH77j+IaEZ4=J~Gwh8WD@Buvv77x1M9c0myR~4Nc(yrc zw<U~=_Nt?HyE|2N6rPnGqra)nL?t_iE~rh{`{d?>@2V~5+vV2o*HqUDzwBPyqqg-I z%8<}WAz|S+dm<t|xA1Y@x-rXg-Fn9gbKSP}A(7@AV}6M<_juRsX|WPrclY*?WOJXf zw9;oStJd$M`gMF(slJz%`K%gk$r61jS)y<Hgz(|~b@Bc8(xUZ0@N)X}%RTivf9d1u zv+&f7PqC}FY8Y7{vO=R?BC<v_i{!#8IeMAEI+2AUD@B%ytQA=-vRb2FF0x*uUNEv^ zWXZ^ykwqh`MwX4N8(BEAa%Abq+L6U0t4EfPtRE==QURkbfl=20DFRXjqzp(MkU}7p zKuUqs0x1Sk4Wt}MJ&=MJbw!Yp7<EmMq99d4%7WAdDGX8>q%=rvkm4ZKLCS;F2PqIz zA*4h`T_dDONR^N>A$39ug;WYD6;dmtSV*;yav}9X3TD(5LrP}UHA9MK)Kx>uhSUuy z98x)?bV%)x;vv;T%7@esDWFkT5GkQi*AOY9QCATuBT`4CkVqwwQX;iPiiuPcDJN1- zq@YMek&+s9O_8D+bybnF8g*Tf!XlMLN{iGMDK1i7q`XLdkpd$XMoMhdHAaeT)Kx~x zY}9o|3XN16DK%1Sq}WKck#bu)-C?2O5jT50QIWUsv8>}w^znZ!_&*8%`xZ^xi>Q27 TWnOk}w#cf?$;r;i7SVqK2N!kg literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Noronha b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Noronha new file mode 100644 index 0000000000000000000000000000000000000000..6d91f91452d0c8474f917c41add3395838504ac6 GIT binary patch literal 742 zcmb`^y)T170LSsCMMw#^8gMiv1_?FLRQv--NS=tW2@+FBw${vW69dDI;vdkONF>-K zSj0=cEY#phs7A@T?<FP+iEFN(rqkv7^C#!$;@*!_VgKQBI_>3mUB-T`Zq3eQ8<NM_ z&$!A3?Q-X8PMr5^d0u#u7mX+SA~z&1%hmE~{a9RIuE@L}6EACleyti6#oclFRxea( ztXY;0w#9p7U%ziYs&b(uKbBLf=_n(c7mrkn?$EM#Pqi*h$+p@Z6`cs{=<T$K_1){( zX-@PNv${XKAQIPSGVu^n$-_Voq&$^2#(3e-uRdX8ZZ<s6nBX~7Y0O>Ts~H~gy{_&a zd%uJDe~44EaX9u3hDb;#r;UXKL!u$!oHia3&}kzgA(5C!P$Vi67Kw`lMj|7joi;WS z+-ajD;gR^fodJ*$kRgyUkU@}9kYSv59AqG;9SIo<84DTAX-7kbL&n3jeapWN7#Xl* H4#p#2M|K~x literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/North_Dakota/Beulah b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/North_Dakota/Beulah new file mode 100644 index 0000000000000000000000000000000000000000..8174c8828851a7ac72aa65cbd5135664152e3182 GIT binary patch literal 2389 zcmd_qZA_L`9LMo<BZ?5%$;1#4N=pI(0mXo%C?J9)2yX#}FbYKMfy)O-4E2-*<3;l< z7f~ibq1BoRb8|R0(#+OY=ECN*HcDGh7Im-OVu4(p@9AYP+FI}W-@ES5g_Dgpzpr;~ z{rVK~w+k@;!ozi%hxbTjzPb-)=vRjVR8L!^?tS?O^-)K#9$9$B8L18ve~dTEiSNtB z<oPmr^}`G?HJC2lHzS4Uijbc6DdG3zRXJ@_xbm-_r>7TBsu@`mIv{CO1w5Ck1LbiQ zcxXTd`Ce1keATOiFT5*4j_=m9j`fOb-|CU_rQPDXj%VcT=2~%meT|&6CQpRs-y`Ry z$BG+bZ<S#Sa@D-RAQ|o#qr!h(q$4f`s>qR<dj5$YRn)*`9o>6c-Pkl=-t_cgwP3X? z7jFDm+?;w*-cs<6SQL6v#>8n6Gxep84caVXM^EUu%U-c~_;nqB)=>%F&+5b@C2Gme zI-Rt4nM$r}l_^{1s-=1B<+74-l^S!eT%Pi|x-B41rbYZN(!L9p=@S=3#s@#k+rJnR zEBb?U=Fom|$DW_`o%?o)tcFkYU9J1n-9`O6yJm;VS@OKj%~_{%e>)}f5?Vyw**9c< zNR`Mx(jg1Rvc!F_9MCJzRjR_4W_AD3B&VpdLKSz;b{<%fsY)8JIIAL~m3Q?y$2;j! zrKy8X={HfLJnA!F`6(gROda*D?Ykr@M!S5Kt?!EmhqwExY7UBZ-IczF3g1)J?aj`^ z34Q93x(cTzq)pZ4WjeKE8&q9Pv{QGwK-C9$occrYYU8(GJCAnei^tBmzQ>yuizg0W z^fi=*2xrep-=@@G#O8(rzAd4jimgTMzO7SZswu6{X&ODFwuQAh+lB{J^Y{j5d-q=T z)cFFZWoM(>F&OXctSVExx@I|k9`l*zZ{hVn{>+&EuRgBZ^MVkr`*x4V-*xwEG1I;W z+i&T*$ND{Uco$(S3%#N+$2_adQ)ZrGugEq(XPcinkNpPkKd<dY^Plb7#{c>)^B1^o zm49$QvOZ*i$O@4qB5OnzX*H`vmWiwrStznnv`giMwQ^>$z-l>`i>wz}FtTD~$;g_m zX3@y1t!CNCx{-w=E9Y1`vUZNeBdh0FKC*s}0w5LOC;?Ifjv^pcK+1sB!D<SDR01gl zQVXOQNHvghAoV~Bf>eZ~BuGs-ih@+dYRZDt#cB$JREDE8NNtefAk{(2gVYBp5K<we zL`aR0A|X{m%7oO(Y6^u^ilbCWtvHH>REwirNWC};hExnG8B#N(Xh_wprff*vtfp{C z<*cT3NbNX^hg1(KA5uT0fJg<A5+XH3iilLvYRZVz(P|3GQAw*QB}XlhVscayDJMrg zk%Dqm6e%fEQ>3U!RgtnHb+wwpB9*n8(jv9Bn&KkWMas)jU!=es6-G+TQDdaY992fj z%u#2nDKt`Pt0^^7YpW?XQf;K%NWGDQBNazVj?`S(e*Z6W9%Kr?+>}1PaA{&nVp4oz La&lsFVp7nb$wqRA literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/North_Dakota/Center b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/North_Dakota/Center new file mode 100644 index 0000000000000000000000000000000000000000..8035b24fafeff6fad833094f814905213b2f3a0b GIT binary patch literal 2389 zcmd_qUrd#C9LMqBL6kp%J((235lRa}0Rh8+q%0r?c?9*yA0Y~Z5X<=a2NX5>l?>*E z`D`x4{t!5FwK8OGhI1p#YOQoGY|hq3YRego^{l*Lf#>P>{&m?!>!Pc^XU};*IN7-Q zy!`7MpUV(`yAb^^JY1K4c=sy()pIn*e&=+E>h18_eQ*4%zTOvMk1o6^M{A?SACt}2 z)Qt);eYxDa^<|C-3}svHdtM>BV=Yf-Km_f$WzE<Uqk?Pa*)vP0)vOg$c1YTo3fZ4& zhgl;k?AV|c?i^9KUFowUzWYQ(j_kE(pXw90zu#+Fhxdv*_PuJ&X{{G`Hr82lD+@(b z(G%8P*-7H=qzA0%1qEteShy7vl&E5UOR!_Fg(>f7s6GG8FDh<u+>Y-%ukLA?Z{7RK zfLgH5wH9tVC+^F<YTaM_kw}O-Yb7qSMPlFwJ1KmtNE$n1FB<oY#lx?w<ck|*O3x0J zdLmygX{%Ie2NPs^LyO4R7Alt(R)`1JjLXc#9I-6poO~$6E3#s5I9b;NB75p9C+CYR z;^FT<be0cHtK8GCIgcC|Q;)tWofS<Z>aq6y^6`=*DzC0d<}Ybi1^KIG!S5f7!j#QU z;l;zED6+&UI<Z?6Poz6fzI8~gytGRC_Oz;1C(~p}b(JdZnj=>)&sA%hZ_2gac;#Pr zN&2Tfsw{Izmi-hbD&oF%Dn1vYGH}va-+xV1jdeTK?VpLKhIctNbw|b1J=M-LzE4$c zXRF+h(yyLvsFHP&9jd-CSJqE#R1Jyovf+HOY7Ftn#$(B9)Ab+a^Ib*ag$u5;xn;3< zap0=cR2C`ZfwRt*%wNUUrgxofQQwH|C7sUpz=Uea>X$8J7u3$^4!Lu9P_<5Ol)HKk zs@<21<({@?_0mwXY^y0(FL%$DK_2~?5p2Tizxd3W`Ja7Uw=Yx(*ZrW^6YRQg+alDw zN0@KRbx$4l%;jA~uk`taFJC`v^;52&QoqR4Kj-P6Igj}!{(oNEA^o4t+9v<{E&Ug` zZcT8+amxBA3#6=&vP8-nDT_4KtE4QGvQElEDJ!LBsl2dOPA`_QT8`yX)=OD1WyO>w zQ`T&%7fo5Usa`f^-IRq>R?e|>%Gx;=Pgy<3@+s@*5P+fphXfQ2I7FbRK#_r>gQ*sR zq69?>iWU?xC~8pTpy)vngrW$CBos|JM4_l+s%4?*VycCqD8nHQMH`AZ6m=-_Q1qb) z#Gw#HA`XoxB5|lhk%>bmQ!NySQWU8;w4#VbQHvrMMK6kA6vZf#Q8c57Mp4aF%f_Lb zsTPhyIa4hihjtY4DC$w<qv%HwkfI<(LW+hI5h*H~Y8fdynrb0Alr+^+a%f2rlS564 zoE&;m1m#eaA}K{vil`J-DY8;@HPymWlr`1TQnWSI;!@P5$jhNGMPLqvDH3yNOc9ww zWs1xkI-6>tDN37asVQ2UYOyJ5Q{<-TO%a@;I7M=b=EC^>zu-Jv3%^WDpX^(jnvt58 N>`PBiO;1e={}b5BehUBq literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/North_Dakota/New_Salem b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/North_Dakota/New_Salem new file mode 100644 index 0000000000000000000000000000000000000000..5b630ee66715d60ee99405709d86e0e6a0c8b29c GIT binary patch literal 2389 zcmd_qUrd#C9LMqBLG;K8?3allAe5E_0s@L5Nm)Pyc?9tYC`3^pVi}KrAW_3#NiZ** z&*p|$Nnqq^&4jrbj*T#@wUu*Wb6Ojvt$!BvtlVOOJpJCkF1zTW?)siR=k?%Z<L2`Y ztgqXUCjNF_{VzORyMB0&R{E>+Sf>5zfLC?3M%&#l{h&VH7itgBzitgzMTkGf8szx* zWn$t|sl4$~rkLzsCfzrph3JTqp0-I5^z02eWlN+Au9{;{Et*i%R*c);lo93KpKgc9 zAr*4GPx_pr>ZY%{?a<5bim;)*_Kefr;^sHIq&&P=+_LWlIkTxo+*(&HXRXT<;rS2A z*~{X^ZSi-@h<UkcPKZxN2F0n!Ul-U>S3^|v@N|3bnIBb5-!(h7`<%MHajv}M`4ej1 zT362B^oh7L{ffM+;BB!W{H%;iutnVDmv+2wtB4;tV<%h-h=qf%*@+h{mDKs7oqV!b zEo!c{Qw}asskJRKZQE?MIB$boQaq;8;~thv(>_=CcoSqs)bAqWyHL4o{IbaW@Mn4N z7XxB>ug}gJI3n&l@RNQ2%X`F%`XTi|%YN&@!lNp?y57oJ)S_~8R$IBhy(99HHamG2 z4vYM-LMQ*^E>SR=>OAtwA+_@2D$BpSNv%4SVii`DtD^Rq*6QV1s<`30wI(`N1=e1) z0uvrplHPBXd=n$eVm@=qJ`iHv<SA!;&s9-A(&1FJye}Rd-04(S9}|ytRydFQ-&0j> zP1X}hJ?hEYa;rM5Rn_EWSv8{@Rc&0XReP>L)p<Qu-SI@V>D#ZZr`q$y)8}1hbK^qs z%!w;beMy+G4xDwir2ite*1zs-3;$GXFKlzRPmZd_j2^3T<h<Gu(Q54&>{Cr+8?Bw4 z2i2}i1=jB72K8KjqSahks`hluu!21LGbPx->%aa?oBAJoT(@gf2-kh9%M<LnM{F_O zyoZ`^>AI(TJ+pWh5i9)x;m^^}8vT^&rzjw@_0QS*XU=23A@I*@JEZ@!S=-oOzoq{I z*R2c=?M2pyED%{CvP5K!$Rdq;mB=!Ybs`HzR*Gh+ys%bIFBVuW$8wSNA`3=Vj4T;h zvr#V^S+!9w8(BB9aAf5iOGnnuv3O+l9Lq=6&rtxR0vshkYQRwhqzXtGkUAK3A&^QS zr9f(d6a%RSQVygZNI{T_aFhh82}e<osu*=ykh&OkVUWsjlm@8{QXHf@NO_R@AO%7y zgp>%W5mF?iN=TWIIvI7LkV<is3aJ%Gv5;zUlnbdBN5PPaAtgg<h7=8{no*aHqi#lB zIF8C0b?G>2hZGN~9#TG}en<h43L+&$YKRmOsiIMr5vik57m}lrMqNsdS|Y{ds3uZQ zj(Q>m<)|o9QlzFxQIV=5Wku?0)P+SVYt*GhYHQTRMXHOGm!rN&fjKISl$fK&NRc_J zjFg$9&PH8mq|!!RYNXaiU2LS<NV$=EBLzn)j+7j!xiJ0yU*z1U3%^vCKGDB8IW0LQ N(Vv={oSK~C`xB@nc3A)b literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Ojinaga b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Ojinaga new file mode 100644 index 0000000000000000000000000000000000000000..190c5c86dd8f1f00ecc13448c649acc97e93df58 GIT binary patch literal 1522 zcmdUuO-NKx07mbarHJVx3IZj90wHvAG=DS8w9Is}bebkLwNi)4(kW%I^5;U5wDJ}r zfhh$^iwH$zG6GxKt}qv2K^9~`M1cj7VEVe})govWwCTItcODPk?i^Q{uP#aau@(9Y zmzC?w`)a8lgTeRyp)U=uf_3idySY=<W09%VV~ZE(9NvIPY6|x!=X=DKk`MlrxIIQ{ z(sP*>w#i73`6kleMH(3k&qU_eRcdSSiO3p`P}}bJi|p=?YI|RY*wOe{<#hJRoyEf{ zx28?zZN8#*<yFbu(akD<V~)(9zibqQZjc4jO~&5Y<+AYFpelM=A)T#VYF{v07FYPx z{@(TSKvtnD353Yf*aYP|I4fP>qm?`5p>)5AH$3ZJ3(rK9ad>G$l--;&%4cqgiq0wH z$oM5uSu<)>4b+Qj=cGE?)g_N5jHu&YpR8HgqiXXDW$mYc^2Q}d?<0@$g+)u>olJG& z?Ppovd&oFB86{2yGK_|yInn5jG5p<A;&jTQai(!pG_84QG#B@Z=A{J{NFR}bnHQ>M zZI5gjf1u9J2jsbd8|wVDN4B<KP;FzGvc0labzF^=!WP0=rcbEt-yYK(h!w&#`;`ba z&D+;SxM|)Swng%8Gk$(Aa=L`G)FpDcdHP-7-{<-ZZRUUdXzHO4L?MVq5S1W0S=37L z8(J~cg6PFi45ArCHHdBy<t%FLAnGyngDA+*5TYVOM~IRPEg@<`^n@r1(G;R8L|2Hi z7PYnzbuDUrAqqn@hNukD8KN{qYlzwqy&;N2G>52eQR@y--lEnXqP|6~KO_N029OjO zIY5$NWC2Nokq0CZNG6a}Ah|%2v8c0wq+?O%14+oD&IpncBqvBxjI1DOG4g^W#>fnk z8Y4GIau#)Vkn}9-{2&Qh)EPojgyaZG5|Sk(O-P>bKS(q}_nW4>jdv#7lkAD{PKU$p IuqQ_R0G#QIJOBUy literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Panama b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Panama new file mode 100644 index 0000000000000000000000000000000000000000..5c1c06372c6dc8610ffd14f74e923bdcb9b21d31 GIT binary patch literal 203 zcmWHE%1kq2zyQoZ5fBCeCLji}c^ZI3_m{*Mj7<OkZ!KV80Fn|6EdT%S+`z!$;~T=@ r48*R%AwX3i5JK2wm@Pa%AX<PX{{NrZt|kDYK{hcEXB8LFR#Pqj=|C+j literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Pangnirtung b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Pangnirtung new file mode 100644 index 0000000000000000000000000000000000000000..df78b62682a5b376f6d7ca894c842f6b3d57f363 GIT binary patch literal 2108 zcmeIyaY$WN0LStB(l(Pxjk4Ty*0iN=`rgy^$}Lx#+T*R>yt(JvbZuu#dpIpmYZGnD z8EsViXMYqrD5PP3j3QPoM-hldtSF+pnGqFwD)NuOIA)Nv_HO5US`-99(4XDMxu460 zry%~miRShPE1bVwOUxS%*8y{QF6lS7;mOASkDmTUoG<)Io)@#;&!KSjLUGI3)$Qxl zHQVyM>{zb4wld{iSA0?B6y&<gGN#q?z(sfY+^-_{>uERd+>}@`KJMnfJ|?a|G3?&3 z|3|g*i6d@7)2FI%&k66w$QiY2<CES^*~eAU>Nc+^H6ltfqh86>pjiEViMM97SCmfN z<(3V#h~VjBH`KR5grCh3<;kS3*!zoE8;$9k8^02fqBZ)KicxXv;w)X6`-Q5UNjd8- ze5lu-J*RK`a!6I3`ay1Zr(fNE%#(LKzgtyzy&-E39};&qJSS^gcZs@Cx2&(L74`Ec zb+l}!icTEWv8);uJKd=_&K9b>UwB5|^I5fsAMB8u&J>7-?Q!W2q>Fp2Lb7rHjM$vF zR3@4}5Qz)(a!cf8vE{of_165eYU_J*dRywGYJO=#w@e*X+YgWE`$s#~j@CiFb7+$i z@i*nJr#i&$vH|%(GA>%PdSqKPB-&=1WwL0gNPb);+ZWA?_E+=eo|#F}F|b~Deln*X z>dw;-pPEozO(`vpji^T=O7HC&RF7uAtM@hZs(q<x*<Cpxx~IlvPfm~M86B3r3(exO zp(FCZ=T+ihUs68)R=()l5tE0GJHkl|T$YxecKJVk7A?N?$++T=r~g3nUuG`fpY#Ih z_MYMU?~gf4eE(Y&xXQlC=1pLk@4x+;v%>dZdC^(Pb70k9-<O5MPR)G@r^YpBvpIEj z+!9Vb$GoWJm}kcc*kAek?a$}Lzu3mYrJvxNi;W;VLAGKwdqFmX?1rzg9b`YqhL9a0 zTSE4PYzo<x)ocsd7qT&AXUNu&y&;=Jc86>a*&nh&WQWKWkv&??CXroQ%{GyJA{#|^ zifk3xE3#Q+x5##p{URGic8qKp*)y_fWY<=+Z7%kWY#iA+vUOzd$mWsVBil#zk2C=3 z0MY`a2UgPrqzhKl2BZ&G(+H#!NGp(DAk9F!fwTkZ2htFvBS=e-o>)y&kgixwTadn3 zO=FPGAgw`qgER-}4$>Z^KS+a+4k0Z<dW19y>5|p73F(v7Gz#gI)wBxf71AuETS&W* zejyD*I)=0i=^4^Aq-$2wHl%M>(>SDaR?|A9cS!S)?jh|%`iC?S|L-7w1;@!Wv5c5# Umd4ixD}v$DczJoSJQ&XW4V<53a{vGU literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Paramaribo b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Paramaribo new file mode 100644 index 0000000000000000000000000000000000000000..1b608b3e57479c8ba49789483e4f19e037058d9a GIT binary patch literal 296 zcmWHE%1kq2zyPd35fBCeHXsJEg&KgwMAp6?`?S1|TuRc}BE8RWNB#f*{}~vWm|0l< z|3BlwzyKs|7(i?v5P9wd1IPdWS1&N|`S^w~1OTzFfw8dxhy<Di10f{1;RjR`(AxhX gw}5Dndq6bEO(1EIyFhe3P>fQyaoGUfYG=#^0M%qrm;e9( literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Phoenix b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Phoenix new file mode 100644 index 0000000000000000000000000000000000000000..adf28236a2feb68f177f5b002ea068db59eea997 GIT binary patch literal 353 zcmWHE%1kq2zyK^j5fBCeZXgD+1sZ_Fyk%As=I>^2SkNXjVd1Qo4W~PKCY%?)FLS>C z>6#0TQZm1OlnVTQ5y8O32!zZ)$jJ2n|Fm}u4FCVHUckum|Nq<x3>;uKkB@H%gRct^ z2Lo|<2+(i{2qEkw9-vCFlYT(;{0D+K7M=|t8stO}4RR)k200Z(gPaSZK~4tIAZLSV apwmG#$oU`|<OL87@&+aT-~xKgoC^R+MSyDn literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Port-au-Prince b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Port-au-Prince new file mode 100644 index 0000000000000000000000000000000000000000..7306caeffa0ba1d1b8a9b37d9459ebaabaecdb20 GIT binary patch literal 1455 zcmd7RO-K}B0Eh9}G9kf(2f@;`??uyfSF@rFmRggd%}IAHSF^X$7C+M0k8JJ33JRf1 zNVR$hA?gr8Sea1}k`dG;QY`2Y_IB|QR3l!b!tH$~sK7&XYloTNFuThh_W1`wb%*ne zIj?yAgp*gfKDkdiqc2yRTD~tz-8Q<|@x{A%>a&^pVa%I0Hfk;z8T6*#y=P`zyW(Bi zbJ<*W_Lz5h@U>b|KcH3?JW#84^r+Rzeae~Drkt}wA~PYVGADXP*2nc~&2Xp49uJ6| z{z~B*Tqkn7HVgNq6yXV1sJzy%BENW(T3h-~Y$=JDTWgBEzTA*m<a2sUs{GQ7#);j9 zxw7==q$o>El7Zk05%@MO%L{Iaz0ansirJegbYtAAoVcK>&J9`nh8tCNO|P}Tf0q*e z`|?0{gE*KIm50K9QIi;vwZ*xj_ESiPok=46vQXB=Pm8+S>9T(EjcAB&v>IPcsUsa} zR@1<^Y7Wj?^5T$cDNt5xbFXSkermOsbgK5*_p)O{RCG*?$Vf^=M24@+&Y6%n-hW=6 zcv~n=c7^4shv}lLdb>Q`X9&mqdB2Xh1#^$z@BYhOEcV!K82ain;$pGqPaF$lu}61| z#6RzM`U8e(>PwMNpJf642OEaNVPgdTp2i;iNjr_1pI^~G7mL+(XP8JTJSG<;8C#tV zk`9s&k`R&+k`j^=k`$5^k`|H|k{FU1lA5i~4N1;cXNRPR<cB1PWQe4Q<cK7RWQnAS z<cTDTWQwGU<ccJVWQ(MW<clPXWQ?S2t8+$@MzTiIM)F1yM>0oJM{-A!N3uuKNAgD| zfXo1yf~}qdG6`Ef3uGF$dLGC`keMJ;LFR%?2AK^q9b`Vpgpe5_Q?k`_LMCOaXN645 vR?iEW7&0?tYRKG>$sw~trf2-8+e^@6EYPFO_UF6uT<&bY$K&$2+zCGbaw3PQ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Port_of_Spain b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Port_of_Spain new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC<zyoQ1C|Nmnl3=IGOA3wmr;^P~_;1~?#0zn7~ Vru_ix0GaatKYmlWfX16}0RSy&9%BFi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Porto_Acre b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Porto_Acre new file mode 100644 index 0000000000000000000000000000000000000000..b612ac23562126db6b0aa3ce26dea6760c15be24 GIT binary patch literal 662 zcmcK1J4nMo9Dwm_Z9%H&B7%sIb{BjVL_{)nP`pYead0ZQxd`HtI_TokNfZ}XJ2^?~ zbX(CWb7`xBn?(>`rL~;@FVsm89K3M(jzGfYckbF|W?KCiQF+2-#AUMX&C2a4e~>Hm zXH=oRZi>%ZU)6F>dG1D+>w~&7)b=aIZKv9r(zVm3b6dUCckYVwdOYmEMR%Qdlhq%6 z^QMtb=|<_^X~v%X=J|kW)f4*j@WhN<=k@5$iy1ph`{Of@Ccd|?6Fm<mwc+ViFJe4v ztqO;Jxw^t(Yt5EY)|N~tVr}t4b+cBT_0qDdCMIWPCgo?ohx-SFM&~8jiv@!M5>!wq zFccgLP*6dlKm`>n3K#{A0!P8408$VskQ7V`Xi!0=zy=ju3NQtk?E_80rhrq>Dex40 W$^lRgfqL?-e>sTQf*j0ZGWG?L1Q>+? literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Porto_Velho b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Porto_Velho new file mode 100644 index 0000000000000000000000000000000000000000..2423fc19a8a0c15451876f0f8f7c469112c2d078 GIT binary patch literal 602 zcmb`@y-UMD7=ZD#eo*Sy;-G`pk5%EY9YjP<T~!bQf`5P_IJxLn?b=-g9UQIGjjcFj z5Q>{1I+Uh%5gY<4_z_y=e4jW9g5ZVAF9a^+ecaWJwF&3PYLPc=R!TO{ox5_|Kg~T9 zdsd!`pG&6nJg3iUIaQfF*OefltNo8&wY0AM=BTb62I|f~(RFuLy&QCTuWg&^&D+xN z-D4(Pp3-6YS~Ze&uTf|>&7fC*<nw0W@<<Qv+?&+#yf>8g&G6o~9_hF>S!=BmkNtAR zt-a`T9BXf`Vy)JeJtwiS=sM#nEjuH>&2s-RYWRIbzGy8YBV$L(=*aj;0whDEq(E{+ zN)jXsk_O3xBtkMFsgPVqG9+81q(kyWN<t(fl9JyiCz2G&iljyIB8f38Xa6B{a$1tl GWRhRy+5kfU literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Puerto_Rico b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Puerto_Rico new file mode 100644 index 0000000000000000000000000000000000000000..d4525a68a60351f19a16e0b6f4f1422b71ff3fcf GIT binary patch literal 255 zcmWHE%1kq2zyK^j5fBCe7+atL$T|JZ=)fiAF9nwp-d<p2W@7yR{}4L^!~g%s4=}L& z|9|xYBL|q|@$n5|a0~|G03Z$z0qSG~;}F93u>uu>>=gI`BtYOl5L7*zx)Vf$><7^x SH-KrNI~d4u2p7<GCR_kVPD_#i literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Punta_Arenas b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Punta_Arenas new file mode 100644 index 0000000000000000000000000000000000000000..4d84eed4e235bdcfb3ed8ea457e1bef3717db3bd GIT binary patch literal 1911 zcmdtiYiNye0LSs?oHIMK8Ji)-nEPcee>=m*Hvf&?%;m6QX(S4h7%?xXA+fwL7D-4X zNt*xWvQ3tM(uB!5FHWOK$t9A7+%J#+_YrTr@X8y{)AM{zr?c9N|MyqCpln&P^~V)q zzTx58U>-hq+hShTA}{w)=@b9a&5v4+S!=agw6UdbV}`8HvI6Hy%JumHDgFy-C*(!X z@<2m;n{Ie~$lv&_R5x8u^tbj|En8PT3xs@gb*Q37eha3{?=_#5-FjPF>+UPp2ETU8 z0_B;TtiuwMRENx2GTe$*-l)sc`><U_e2<eIuLM=(gLg9O)GN`cxlKk_1w`i~O|r|1 zdJ(fDq+|2<iLT2Ublk8C(Jl9&?jBpCdJNg5d%Rw+dWJ95J%jU9ucwoBd|j&QeWz6R z*&DAC&SuKK<zH35>O`4X=@I?cxaEMfFJj=#$8u1w2V!ulr3ZhyB8I&AsE6J=CWd{u zriWi`RwLR@>ZIdG)yRe_J!;!lHTu9xJ!Z*LCARF8W3zKq^5Qb-%V-lRvQUnTnkvRe zH_8d&A(d)%k*T*LMB4KtJ@NDlG3iFMPT$okGETMY%$2oba#c{vqMIsf#W|hrJENxL z@7Gh~cB`DR^)lyexyp;(C-d6fBL8)TEV#TzObeFC!bVF?ubU`mw0Ol#|294AaINxZ z-_x@<?@@D--piuWb*eb(uAD#NshEHEr7oGWLsi<g?Q*+3t}yqn+v7HOhwy)Td%d>Z zyxp?Q`;rxA+fB-fu<h&G>crb+-l2di=GQl&fE6=+MzK|7o>5|)c~Z>dGe^-}F4u31 z|9+4w%#WRe{PE{sng77H%XTJwMGiJ{xRC>n9CGBKBZnP1@W`P@4nA`DkpPeg98CyF z3`h`26pkhgBo0Rt2oec8q40rNxJ)oWG)Oo|JV-!DL`X<TOpYcfBq}5<BrYT{Br+s4 zBsL^CBswHKBt9fSBtj%aBt}OQBod{g2@{DE2^5JG2^EPI2^NVK2^WbM2^fhO2^opm z(FBb|jf9QFjRcNFj)acHjs%ZHkA#oJj|>1Z0>}^`V}J|-G788rAmiX@1_BugWGIla zKn4RD4P-cw@jwOy84+YikTF391sN4bGc3rsIGTY$Mg|!gWNeVZK}H7|9%Ouw0YXLy k86sqikU>I52^l6wGfv1rVWm0p)_=27k?Cf;GJTOh03OvyAOHXW literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Rainy_River b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Rainy_River new file mode 100644 index 0000000000000000000000000000000000000000..70dcd2d8014a3dccb9c3cdcd83158af262afe6a2 GIT binary patch literal 2131 zcmds%YfRN;9LIl$Y$A!0Ng)z=DG7xm2Sbl0WeH=Dh?gT5A@hPEmhnr0M2#LdCiWuT zq)b=CXjE&}lvswf$gXQ$#*pS}71LU-xlvE$3rfc6`8;oX(aW~JXV3Hb@9g~FozEw- zzV3k>`Nytx-*8x)J3LP{?&|36G$-EvQ$HOk-}w2#F>`MA6+O3Uf{mTgZzd$Xp>fh} z;_Op;-Gz2D@tadN{%oU}bmoYi{K^qYINo5df1*LAG?&>M>dItlW!NSbh2_S@$@Zp< zWJ#K}NT<b)N^;CJP5wQ}Ouu+lQwIJrH^2YArgnd4(mLPM8BLRA=KfBdweB~Wz5QK# zOYWfDT71IJNqke%=kB-ZV*@55Zi~$r>M?UK7ub1yPn*oa&6?Hms0sGu>-?5VlifZ? zL$ytkvny5?6jsRXYc6YU`Vv`~^Mx)NpCaMuKilx7F<E@|GrQ!I3v$P|ui2$1M@`;4 zhwPn)hs<5is$Eup*4*9tq~25df>~Zuule&^O+o%@E%@UtDa_ho3kQ!&QGBT_>e(yB z!y$X`bI+I+=T~aUzGk!XbhegOSD95ur|9aXd1g)H6<wQ>W+Lm(Yh*NF%5r<P?59+z zNd3xId@NEqcG|A*`a!COj@jzgkL13-J$6IQOLBilwcS|qf!Wm7tedmC%$C|Jt%*Ni zwif2;*5PfYHa$&izbrO&;{#gvdZyWa=|_F=XpubBAGJH0=E=h+FWUODc+tZj+DCGK zm7Vo3+FgkQ(oouF8^(rBQ@Bf;hWgF!qyxIUuiG?_Y|}j*?Pl-S#k#Mh(LC0hsVy7I z&Ev-=X-ps{aLqXG|MU}$zFZ*^jh;Cb7#EGc`n<&Y=ZXGKSWyy@<@xTcb*J2&RT1}> z776$(^6x4fbU*YejPQnA)NSW4ZMQ+xfan2H1fmH<6;G!N@1hJu8;CldP9KOu5RD)z zL3DyB#n1|(7DF$HVhqh7sxfqfD96waq8>v(Pp2S6Lx_qH9U)3Ww1lY1&=aC4LsN*V z3|%40GPLz{>O%DObP7W>hNukD8KN{qYlzwqy&;M-G>53p&>f;YLwkt&4E;S_0vH*9 zq=1nFND?4ffTRJE2S_3ynSi7Mk_$*OAldMA>0so;(<OwF5l@#AMou6}fn)`e7D!$o ziGgGWk{U>EAjyGb$J3<;k{?f(AV!8fU5Xevf+UHNB}keWd4eR0kts;3Ai07h3z98J zx*+-TbP0oG%+sX|k~2@2G)UGUX=CIKk~l`@AgN>I4w5`Z_8{qF<j>P35RySpmqJJm xJzWwZS%jn!l1E4)A(@1v5|T^#AM`TLWpts-X=cfSU`{YQvm_J>hJx8~e*<d@HIe`T literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Rankin_Inlet b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Rankin_Inlet new file mode 100644 index 0000000000000000000000000000000000000000..9f50f36ef45a0210a0dbcfda1ff734fab79f8153 GIT binary patch literal 1930 zcmdUvT};(=9EZOLT^buTGYXNQ%tZ8XI0r+ICTVdP14Vo}5(t_piulIYw<Gw1KR2c~ z8W%3J)i51;qfQ<3Z7wqHWotUeaIURlTFaap^JlfmlyUk!|GH@FLcR3E{^wx_&bj(N zPjp`0(p>r9o^kK+um<<=d$MsazwcTt_j9H%>>a)F#?ZQQd9gBVlZ(Uh(!^Bza#pIO zjF_Oq2KPy7!Z1z!BgG8A`B2k3ADCCZ`dQOY{$w%^e5tR-o|o6QAJ7qV@5$@Szpx|o zy5x<L{dQFHDajnO-DdW8nykddHmkecjJZ>2$98;V{9TJQyLF=pv_~|jsnP`ZjM7kT zOmf!^)^SA@^5*P2nwL3A#^+wv34_ukJp8r|-|Cl%55KpQzP%xD{dm+)KHg{Y&wps& zZs|5tK2<xl{(^a@dAGh>cF+{m)N3TC*%U@*Y2lw|q$qopE$aG2ij&H0ar<T|=?U3s zpM7kmU!S3+TN=%b(?Kn(t}-+CzM!)v=bPCZ{?a*V874aSx<>narabSgmj9M673tS( z#Wy0A{ip4`wqK;Gd!Mat{#xdDY_bb#4$J$k)plX&Mf1VVMqQNMW)|00X-(1&v!p0r zm-H+%wV4@Od!@wG4f1K-F~3=U>sMW|w^&wQjoVeRv9kL3O<P}{B-(PxuF1PAYwHi$ zb;+HwzHFym-``_m;WmwRUo{OWJG7zWq-pG3rkh&#n9V<w=$57pX6sqMHZ6G1Y}@yo zN`fz8z~lZ5eC+>x<Ttvra*M~myWkrbk01G5o{h)Po$w{`DntHxCp#FFf=DDP1#{fv zSm^lQMW5i1d;k4l+>L{V95v*y`7_7OIB>|3GY%bc?2Lnl96jUkJ-XwE2mp})A_7DP zh!7Ad7-B%=U<d+{gdqw<7KSh$oiq?}JUV$G0zo8#hy;-dA{0a_h*%K07=l40V~7Tk zjUgOFI)->2oqP-dArdk~gvbaH5+Wr;Oo*HiK_QYtM1{x-5!R!VmLaZ3Coe-_k4|ES z$Pk$!LPMm6hz*e&A~-~Hi0Ba6A;Nof(nG}e=;UVvz@v)*BLt8bFoFPy0wWBNI4}YM zi3B7RkXS&10f`1A93EXfAOZ2{A_57CM;8-FP#{rZgar~8MqnV3VT1+}8%A&-(P4zg zql*tDKptI$AR+STVgv~iBubDlLE;1n6eLpcsSj16E7*8fFn{T|KyD!DFAaqPp+GS4 EZ|0Bbod5s; literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Recife b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Recife new file mode 100644 index 0000000000000000000000000000000000000000..fe55739dd36e6dfea5f340f2d099c30424ba4e62 GIT binary patch literal 742 zcmb`^KP&@L0LSrb{fU&6#bCgrF-S;W94XO=k%U3g0fR|MCkBH>%3zWhNm$+}2D!0J z7FrV#B5C>$ldur#(vs(W&uAg>n(L=&a?SPoi%!SpdaNI(!hFNU=`<Ig>sHO%*8Y4V zQ9oKpd=9I_=U#b~TN3I11DSp-$xPF&lR2Cc*>a=I?dC*2yCVxxTfA;-=dEf+6pv@+ zd$2{h6J63xo`_Q5(kbn^s{B+fKQ>lW^Vz0sS-w%Ns?QPjxoTUB$@bcm3QaaRp}R%V z5q@zxu96~LJazgL>tY~(F9#m#)nH0Hk%gd&YppH6?^iFs*7*_3()zyOtJM1X!m1e? zk6PWb$6U7gqnzj;;<(?pm|xVINJuD86AKB3L_@-Pns`V+PZJRdiNr*LB2kgBNL(Z^ z5*Z2YX<{S6Jxz2ZJQAPBX8>dbWC&yoWDsN&WEf8~4l<CZ83`E*84DTA(~O1;hm41D P^DO^5U?5_~9I^vnF4rb2 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Regina b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Regina new file mode 100644 index 0000000000000000000000000000000000000000..5fe8d6b618e34c4c87a7eac43f7a27af41161d02 GIT binary patch literal 994 zcmc)I%S%*Y9Eb7Wl%-5VyqJMViy|}(1r3t+8kQv%jsz`aOcZA2p+6uFEegTK3$7jA zL`0A)FT<2XUdjsyW0`kP-ZCvYM2iUO_&%@gbmPi1yytT`%rJ}R8@(TIz9RdsljaSF ztIQmpb6s<L^0)n5qQ6^rrZU~0rW(Ju<c8OGz0~hlm%U{E89(VqywvFuCpEd_z23=o z-Xu@O-aX9n2OGA%p}SlD@U`~XNa=+C{zA9+A@7?r8lH%a{uy`1wqC`?TO&Gs@t2cs z$d#$aHubT#MLyN7s_F9!^7-npnvnsSJ^NhEdR;R2Z&1&_eJl%0&-CJpgu68GR4=!> zu9t}GFYyQN%A;F)^=5^;R$r{w3k%$h$}06WyIeLe6{*di`Ley?tMAiO@?#{e<XGsy z&+&`{U+;bSL=beWi3GvRu26<~){<qPBlfPiOU~`;s8m;&Q)|vWbM8c?+&puI{@<gA zDDd0<hJOtLN+30mB1jda3{nRvgjBMcQb;YUDTY)-$|3cTf=ETABvKP8id03)T1{P~ zFj5&QZ8f!#;z)I*JW?N70I~vP30AWPWD&?JkYyn2Ko)|m1X&8Q7GyEVYLMk1>p>QT XtO!|>)vO6w6tXHj`elX9)XKuUTnF}q literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Resolute b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Resolute new file mode 100644 index 0000000000000000000000000000000000000000..884b1f6470c78c482f0daa280b85e01312c764da GIT binary patch literal 1930 zcmdUvZ%ov69LGP0E{%;^DTPQ-S`vC3cNlt_^p9f<5=lJ?gv=B}EaOw^32O9VWBRCk z;540%(os*UnPZu)N%nthHODB|Rxz#Rnj7oepDh-Q>-+lE1Gb*n!`|$7uiJ6=?DKjh z*S0-gD*xIw{u3V7;UC^-n}7LgtlR!5S-yVs&Yc%$wAIVZ#<<I_j>}C;a^1~Exso$y zvCf)4F1Z=AH22pWn|*0a^9KL4Ti*Xp^G|(i1;^jhTT|D|ZQaLp&YE9j?v}Ie_Oc<l zqvj<yFZ(SioZsyVCkCx3bE7L7?zj0@D&2yCgBBjzpvApAEz+ORlCDOJ9+;=G)|8ZP zo30D18sx53SG25fsVpk}To+HvllbhPT>SEcEE)UEE&b%8-2Kh#ZrRCkD}VO|chCM| zyZ5NN``XXh{XNg?19c~?qNQCEB|TP|SgDo2y)9M6TV2)A5vk6qbJhL3rDi1N9z6D< zEx)irYxi{8iqlc8Yi_cK4&A6LmzCS99lz`9yaG$ExuD7Mkkyx+(fS|rr6K=I*YL4O z<HTvVw(oms8b0irdp?p!26nl1Ew9R>z0GcY?FaVQzE0gx+-DnGo3tfsuWhO-*G(gv zt+lW~TfeBWwrL@4dn0UHF8`oU9IBQl&!^qi)B<_x<R#Z$pC!8gL-%yq&+<(BD{fo% zplq+(=eAFbSSsG9sp0e1k+WAj22NS$=w{v3d%$*oU88%tcG$CL!rHa&VSDcIbt)O5 zj44<9H+9lCC%?bi-%t9UKP$g<`qOivsp<4<FUz&*^gC~cvUrsn{{Eym8kLGfA}JNC z{o`5bIeE$daLA+o{4njuK|_uja@hQv<7ON<<j5I^4mozl!9$LoargoK@k0cFNB|K5 zA_GJSh!hMlAaXDSfk?s-1tJSWn1Egyh&TbgJP?5(5<x_Q$OI7zA{9g|h+GW8Ad)de zgUH4Z4k8^xyntRlhJX+W86rYtga`?d5+Wu<PKcloNg<*_WQ7PD&`Zk@H=vi7A#gx1 zF+*gC%n+d=QbWXs$PE!3A~{5Ki0lyI1A6Hp;s^BdGXfCMM}QFmNDLT3fJA{221pzj zfq+B;5(-ExAi;n{0}_saJ|2*O1oRPsge0Jk2_z_xs4&6;i3=kzkjOAX1BneIIFRTt z!V}QP2NIxwK0=TX1@tk31PKx)NSGjTf&>Z@Dfqt+Ri-c4B44m@?ZQZDBpR-b#Uimt GH1jVLtLuRP literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Rio_Branco b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Rio_Branco new file mode 100644 index 0000000000000000000000000000000000000000..b612ac23562126db6b0aa3ce26dea6760c15be24 GIT binary patch literal 662 zcmcK1J4nMo9Dwm_Z9%H&B7%sIb{BjVL_{)nP`pYead0ZQxd`HtI_TokNfZ}XJ2^?~ zbX(CWb7`xBn?(>`rL~;@FVsm89K3M(jzGfYckbF|W?KCiQF+2-#AUMX&C2a4e~>Hm zXH=oRZi>%ZU)6F>dG1D+>w~&7)b=aIZKv9r(zVm3b6dUCckYVwdOYmEMR%Qdlhq%6 z^QMtb=|<_^X~v%X=J|kW)f4*j@WhN<=k@5$iy1ph`{Of@Ccd|?6Fm<mwc+ViFJe4v ztqO;Jxw^t(Yt5EY)|N~tVr}t4b+cBT_0qDdCMIWPCgo?ohx-SFM&~8jiv@!M5>!wq zFccgLP*6dlKm`>n3K#{A0!P8408$VskQ7V`Xi!0=zy=ju3NQtk?E_80rhrq>Dex40 W$^lRgfqL?-e>sTQf*j0ZGWG?L1Q>+? literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Rosario b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Rosario new file mode 100644 index 0000000000000000000000000000000000000000..8c58f8c23eb0d66d80714a9789b5c20abf043ca2 GIT binary patch literal 1109 zcmc)IKWLLd9Eb5YZKE+13@9if60xNuA{=dF#6R_w3{^2p4Je%yL=XhU!3v_$#YqTA z5QLV-c2zuwYH6cAjWyb#1X2;fA)vS^A~D3OJ%7)MgE;8s8!n$C(@VZjX7I%EZtst0 znf$`x>662K^(FZlyOvY;&JXza*FD#h8*={S!f7?N=7O1;7}wLyyMFQdOZDK@Tl4T- zSkFwa_03>TKN=bFAMYrs(#48-vc92bkF@!-OL0}+lQiYp9rf&T-k(d~)N>!MnEA9- z^ZLE1#u9oVb<EUvg;agCVLk^>sz!IjZ@e4T%~dJWELYW+`jEd^-KxKq66Ra+p!$Am zz+W0pD0{WnUp~HHhdNFbHsvRDM<ie9On%axAI}!LHm+1%mBWRYXZ@JoUWnJKYV(al z(PZPP-Zm50{X;8s|LbgY@8X!=S3DZcT5AI>t^fE5w*CCHV5_?atu0kN&waO9TQ*+E z+832TyR~m>fe3f6{Xjb7rQ~c;y>fcw{Ju%%?<?*8wNg_qJo4i9%<yfEtcEOytcNV< zlogRBow6pfD6%TDEV3@LFtReTG_p3bII=pjyi?Xk3UEpVNC`*{PALMZ!YO4Sbs&Wx zl{lpoq!y$Yr&NQK<CJ=kf{==kl8~B^qR>^vMats8r7ol}q%x#5q&BA%hg9d3@{szR lQXo<xQX*0#QY2EPQ_4i@L<+^Myqfp_GY#*Qc>8+7KLGQ$?e72p literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Santa_Isabel b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Santa_Isabel new file mode 100644 index 0000000000000000000000000000000000000000..29c83e71ffa6b071c52b2360295bf55009c09b76 GIT binary patch literal 2356 zcmdtiZ%oxy9LMnsp(Ki)RFwE*C#4t&SN@A){}j7|+(5aKn2{NO24#>CO3@7+TWihn zMJMGdIyrm5S`o9z{xNdPtu<C*E6m22DKLy8){4q;JMUi)d(^Ym@9vz}-QDi)?t{<U zx2w7`&GnCGuKk3=(_s(qBU|kCTJNtdT}_2`-P6rey_My4??x-vnXu`p&nAwv46gd6 zZt%qWb>Bpnx`x`{p1S1UD&OaAm&;|zawQ={Mm_mzZ1FZV9-pMfr_<F$=t4Cy7@@9? zPN^Syep8d@2Gx)47u5WbZnNORfC}$#Hw)kER*Smc)apdLy5U*Ti<|1zlIs0>Y3U9X zk>}IP)~!{`6S8#V%3`^3UZjo+&XlO>3=@56xx@@EGqE54E-QLw%uTOe6y3O5-+bsJ zSy>XUSJiy0;xd2Ix8#4QZjJa-$0v5G_}NL55Z0m+hCern6MNO_z8;fw-Y?0W$Bg^* z9$Djm+@u`aD5+IPby|I#q<b24M)6$9j4#nyY2&gsG+SrK%u(56QF`5<V`}}mYdYuR zr)tAnk><7!URSrb{bBBS@rb&!_B(S|YnN;+>@%CnpOf4*#%#{rCwDJ8Y&>y=;+c6x z=Y^z7-q0bPe|d>)ed)9*__{{C&Bx6>y#-RZ`+zCxaLcw0rDl7hlH!;g<J&PUzH70j zB=ajNxw2mGT=BEo`ALi}ojtF1otoA64Zovyw~pxh`<_>2<!AK+olR<wci24m%yHS9 z+;1MLIw0lYCryQ?R4OK0O;vo3R1H*`>d;uJeq)QN84bw89TobKFJjcbMvs29dsfw! zB<Q-f5%pNse7(Q+tg4Tg)D4BFR6}5%Y0U1I#^I~xVB|?T*f(IBW?JQm&TjMMP^C2c z+s#vFwury1-aP$6qPT*BgMzOM4w+-G|IRBAI9lLx1p+UND<lwTYjIuA`=ABRt(<#l zf!F8q7W-U9oLqbEwWr8ucVeHQzi;2aSlEp76@E1kU}?zOki{XZLzaiE4_P3xLPxtq zWR1uokyRqgMAnHc6j>><RAjA=cCpB69qn?F^&$&KR*Wo}zr&i5MI)<5mW`|%SvazC zWa-G-k;NmcN0yJQA1MG*0i*<uwgyNMkSZW$K<a=L0;vR23ZxcDF_3B?<v{9z6vWY1 z1SyH5tqD>Tq$)^Rkh&m+K`Mik2B{5F9Hcr(d64=b1wtx>l*rN62q_X$C8SJ9osdEy zl|o8|)CwsUQZ1xhNWGAPAr(VP=4fk%6wT3A4JjK^H>7Y#<&e@LwL^-BR1Ya1Qa_}C zj<$kG2_0<>ks><UDk5b>>WCB)sU%WLq?SlAk!m94MCyqY6saguQb$`;q^ORzsz_NK zZC#PVB9%o-i_{h=E>c~jyhweK0wWbhO6+KBj1<|?Rv9U?qpdSiXr$6esgYVE#YU=) d|NnA_*{o;VtS5QX-D&QWByVb}JJp>M_7|antVjR= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Santarem b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Santarem new file mode 100644 index 0000000000000000000000000000000000000000..d776a43877dc872da5bc370cb2b186f7b0890b47 GIT binary patch literal 632 zcmb`^Pbfq|9Ki8eyR@x;7Z(RCuGC~5Bx!Kj14_M8PEMp;xRA@@x_2oKjyTP6R}SyC zDF-DE+wR(n+ZM{7ZI{OH`?x8k%xm6vdi7p2pXV*CEDfn2BO*_jjJQnJE!%S2JKaiV z+UFlLAG0?5G-=LCyE;E|Zt|6=DRe&gh3v8}*85HAprY@J$ENH}=;!@b|D|zNzxwOu zt!=<o=SEF6cdcu&vR^xCvh_-bc~7Tp*Ttdf-n_RxM^k=0QMA1~8>X-M)+QI%Ov*W@ z8bZHZ;jnX;8Ks=Nxe7&`%lRtGT1BR3JT<5jGKb{^U&H&uxvSsr$QB&urx*kk2Z{y7 zgJKd?Tqrg{#fM@<aiUmJyeMWAH;Ns_k75{994VGT#gk%6ab^41QhX`K6laPx#hYSI RQ}T`fbdQb6pBPWXJ^@0N3Q+(6 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Santiago b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Santiago new file mode 100644 index 0000000000000000000000000000000000000000..ab766a41bc06db2956154da6e2fb0dfc3e9c1b2a GIT binary patch literal 2538 zcmd_rZ%kHI0LStB00M!esHvbPS&|0A;g13acq9-=!KZ}$hlb<`%m6`P1SR<*skKOE zBWFdn*330MU<EdqXRV<Y6|4wdI$JYat;Nhr%=C7?XYr~Ry>070yZ3YV>|x{0?|TY1 zl$Rzr|F{CpH{4vWo16D(&F0Z6vR51^`p(<Y^lkq^XNOjs>-#(F(`46jXYkm%D*f)f zMDKekZ_4+5A06xtAJN?x4|scS6zSt9BfJB%E9AiT?+3p)*Q&pqby^K46zbu+HhE(x zRo-m-L%9Q=YNz^~^3-~@kIYrRh4I=iB1%n2UoHKe5ET&ID+4Z!s=%95<t-<MRM7d~ zWbmP%#l*f58PYN+ZhiH*oK)5&CO<Q*Lv!|tDW%<d>Vi5kZN+|lTWFh_9@(g;U*4%^ z`ft)RhaOU4mzL@9&SW*~^CCHWZ@8LsG+o|a^{1NK8X+Tg`HDL#edN59>*CJ*i}J3p z^J0Foqvu~cAtHYn(|4aeC>C7(NH6@bPu(-}mX3O(Lq&JD=taAqRx$0{bZp5ZN;L10 zaT!@EzPMZ_q>YF~xl%3;&J;^RdSsISuu66&$>dK0MamCRdg<XG#l5FPbn0^hBJI$q zPTzJ!ENdCka?QtTdD$_Yk?@YXFXttlIrRmV71t%Ru2iY)(0wv{)JNo8u9LaFmE!)P zb#i5oqgHh;m3jREBHz1PuYTo-@@AaX4>Y}~9*p{3t|_Wk1;J<J+N4Wj?a`m~`WdIh z`qt0%hO39g#!97&MxGa&R=4TR-4){D*qyq#-7B`t+$&3(x2uw|8o9MNUu`{8C`+YK zr9DZqEMkf(dpS&&yT7V!_1EQN7rs*4x6IIwpSmI{<TYJ+XjoK5e64r1bc<@IPuG;~ z7d78>=-Qk{QG2>q*2lH0`nO+`4WSLH;n`;S#N~~uvAje!4W+3kb2H^ropaT$g^_ZX z>$)BvkFUqiXZ+*q<2%9J{sI4ykH72oec(9u>F2t~l@sW?C$%$?r^h^p2R)O=pM1Y6 zufSPj?nPp;xf9JjzC@45OwC*#Pr<+M^Pu^m-RGG78~y{=E#EVzAK5FiS!B1!c9H!e z8%B1FY#G@zvT0=3$hNI!-^j+1og-Vfn!O{NM|O{F-)i=cGyrV}c%cPcrUyV1kS-u? zK>C0*0_g<O3ajY_(hQ^<NIQ^zAPqq}g0uwb3DOj#D@a?Az95Z3I)k*vYI=h-$7;HR zv<K-A(jcTmNQ;mjAx%QMgtQ6i6VfQ8Q%I|<rdLR_kZvLELi&X?4CxrsGNfln(~zzq zZA1EoG!E$;(mJGfNb`{HA?>r8{vi!SI*7Co=^@fYq>D%!kv<}gL^_GI66qz<OsnZ8 z(oU=CC(=-)qex4Uo+3>}x{9<F=_}G$q_aqCk=`QBMY?M>?M3=)H4R2OjI<c(G16qD z%SfA%J|m4rI&C$rMtW^E%|^OyHSI?FZ8Z%?I*zm)={eGLr0YoAk-j61M>>zR9_hW+ zG#}}{)wCb!ztx-p$T?s&X902^AZG${E+A(Eay}qugw>?O`9B^ML8;~lNl!G-g!JgV T<e2!FC9&~|(Rqviehc~wJPiO4 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Santo_Domingo b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Santo_Domingo new file mode 100644 index 0000000000000000000000000000000000000000..cc2cbf2b1209b83085dd63f3df98e8f5c4fc3991 GIT binary patch literal 491 zcmWHE%1kq2zyNGO5fBCeK_CXPr5k|6uKP|2zc{=v{91n4;s5fF7Z{wYPcXJ$zQFja z-h;W|Ljdz#77x~dO98C=J3ZJ9wgj***mHtYU|N9X?>!H!G#@4~GBYu=z#%I$>;M1z zDi|1mWC8=r|NqAiFmnF?zjFfv@BjY?b}$NpL>R<<d_x$5U4VqE3lIl~Fz6bX7#lD+ z0$Io)gs@+Pfa<}1`2p4V9|$trWjR1J$ZsGT<UcSC@*@a<{0Ra;zk(=`f59}+&mbD) uZ!it?JBSAPA4~(o07Qer0ZaqK0z`wt14M(u1Vn?vg@Kj<!vzc}6D|P9{f)H% literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Sao_Paulo b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Sao_Paulo new file mode 100644 index 0000000000000000000000000000000000000000..308a545ce5931939fb6353fa4273aedf00b49372 GIT binary patch literal 2016 zcmc)KZ%h<)9LMp~k|maZ+FDstzedK$6Tf#ydYX~S5J(OpZ-!JxA|U<=Wn^esN0zy< z$*hcxw)UG3n&p<iwwmasBWEFNjdb>p=}EHn&s^C|uhY7I?_UplP|rMe_qws;&x7y# z6R&A%2nYW4xXyitCy#db<o%5a_wvTOEg$rZD;w(hbG|unp+FBNTV*i%o(`T%>#2#~ z+EWLX$>}S%X!6aZoH^aCL-B}QtQoDpk6JF5dROY@v6D@zDyXTxk0qUb!lw76%$1+B z^lHZzGpT=vPHy|kOfmObi42&j?M-^;m}4fVdc4g!yI!Wv{>@H1*(bSE$7$}NVUriQ zpn0!%$UQ&ZWT&qQn_%yUI-}vJ<eM&?x#(@t#3l_*ika{uVk23rr63yDg49PcOQz_o zfmLSqrB7_M=Vh65W?1KT2F-oPGIn0`Sabi;omyD&t9fAOUphbkgjujHr3-KUMT)A9 zXwl{4QXE@ii-*pek_p?iWdANHP4Bj4UC+pagD=^|iPuc|!3G<vecDuXm)gp*SLLCt z^;#u!%)@mhx+HLii5KN*{Ku#)%^9Oh`)`-(k>|B~&sAA=c0g;kC*{#(t$l1&uRMNu zp<Q0OTb_99E?XNqY@U2&fv%WfOkHD+)};?geaR@TKfcm5-u=BczQ4#cW%X-Qcb%;M zZm+J{Tr6t``s})zTzP8WJGOc5Xj$LcVK+=XCoRqEY|9^?No!@LZT)7SY>ckejYmE; zZMRiu+uk2!Q%ZI7i=F1#uW#1pR%|d^_FvTJ7w?c4y1vxF$Pw4}E0Z}{9tdPI!$Tvk z&t$&%Ja9vKEFPGlVfT!<1LYC_@1x%8KIV@)<$r?Tn8|S5$boy^kt2tW96NIGUU&4! z;d|ZjBLN^0AR!<zAVDBeAYmYJAc1&YBuFS;7Yh;$5)J=8;UMuK0U;3~At5m#L3v$N zNLXGM7ZRA)MTUgtb+IAAA<-e>A@LyrA`v1XA~7OCB2gk?dR?4Ipk5a#5-Jia5-bue z5-t)i5-<`m5;77q5;PJu61LaHjRfv>kt3mdUF=BkNc2efNc_kEAR~Yb0Wt>2ARwcF z3<EL_$Uu1ANFYPubz^}HhS!Y-G91WwAOnJo2r?wdm>`3Kj0!R=$haT_<8>p042{=~ z4Kg@hH#*4hc-{CQ1B8qaGDOH2A%ldB5;9E4I3WY&bt8ogmDi0GGFV<WTF7vD-FP7b zhKv|8WXPBygNBS6`eAb>rvKAJd&2c_ZD)?Z{(q(}d!C!U!iakf7Y1XYP<|+1XKFAO J&X4dW`){q=QK$d_ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Scoresbysund b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Scoresbysund new file mode 100644 index 0000000000000000000000000000000000000000..8e1366ca39a856d17bf16d5eb331e308890c7ce8 GIT binary patch literal 1930 zcmdVaT};(=9LMoL0m+ziUmp+(kP->VBb<YP4;?^6VAMGjcrp}`kO#4ll8m6FW0NuG z82h%d977gP$DBjzg6ab7IcJMln`2Ik)nuAhb2f9}8vFNq|Gns<t8Usl?3~vh7~|&q z{seaHZOE|x@mgSh!kgD?=FPJlG9Ueid@)P?&6}2GuKF}8@!sI_r9WtN=(ieu=VOhT zc~2j@IH(VQ@wUc}pOd)Z5s5$kuB_-*NjUhHJkr!6kM69MmA-%^dJAQhD_fG{lI5|5 zF_Ii!pviYLHRW28rcN!_$G=*jt4HV5b?T<B>Aj+9o!@BsfpK-$omEfOWyxq6m9;rz zl3D(aWW^6ic1D|I&j)3FY_G1Lu8|FYw(7=n1@grA73v+T)||18n%kAHdHr#^sUcDG zJ2%MYqHry!i<K=&KWkyZycC6ht;LByO7X4hQWE-^luTZbCudH|QzK`@H}Q%3d%C4` z_=G;)+$_&@AJ($xj%s;Ry>4CCqyb;CJ{w)C+gz!-ZPu+7QAJX5X^C#Xn<6_t{6j0J zB4y{`VyPPaO?CzENOkX|JYPK}H66d`?!1qrwrWD_68fY*XG9w!KF~e!s(Wr6)5iH0 z-Fu;3UziT)i>E8K>HL1#cO*xfhqlT~jS2d4SC;HA^GZuYw7lY3AqNWY$*YSkX-$vP z*4tO5ZPBc@efOob-@2@?zds~zOpfWnzR$H|WI*3+Kc&H*payN*wn82VeK0KiKmH1{ z?Oz)$%eH5Jvdr`Ew?iWSezqday;&g(?|=II0c(vr!@ND#YPXx;m>0h@4_4q`objCV zNBmdY=A8MKlSa-OIc?;;krQ_`XO5gYa_-2<BWI7CK63s@0!Ri(3P=t}5=a(E8b}^U zB1k5VCKV(XBpD<dBpoClBq1atBqby#Bq<~-BrPN_Brzm2N0S<oo1;k%$qq>m$qz{o z$q-2q$q`8s$r4Eu$rDKw$rMQy$<@&$i)8C)(naz`5=JscQbuw{l18#d(nj(|5=Sye zQb%%kG|3~`JDT*7{E-PjW&oK2WDbx?KxP4%24o(Pi9luonF?eskjX%1!_iC!G9So< zATxqY2{I?hq#(0`ObaqE$iyHsgG>!FH^}55v*T!{2bmv7GeO7<Ayb6R5i&{0EFsf` k%#-8)f1)D2X1a1cUNc^~F8|uJtTcDJ+vWGTohMJ^Ul5tY3jhEB literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Shiprock b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Shiprock new file mode 100644 index 0000000000000000000000000000000000000000..7fc669171f88e8e1fb0c1483bb83e746e5f1c779 GIT binary patch literal 2453 zcmdtjeN5F=9LMnkqQH%ZQ;8v<nU)CgtR#>b8FWLuL3o9$5k`URY6U)Y(?UE3#rVT< zR*YFDf|Ayni4SWwHq<p-TiG0LO>3b&mt~`eO%k}b^FAy8>5u+w>&t$;e%!&IpEvGR z-Zfd`A2->2!ozi$hxe(9ANJ?zJ^i7o`=tck^V$z;Z>?YNYndW?3oq&3_WkO^wg^2m z=l6!8>R53#-KR(Ab%;NrJ^EUhPh1;)Mvi^&5##48<irPg!hbwh2Hs2%VrRSzYW0iY zXD8&O^>Hesdb*xmI<BVVkLl2iVHLU~TZhY&D*WJK=@{9oZn)H=BQBf}ktdsV)O$T5 z`mJs$Uu_mQw!I*4+EOcS_SVR$E1e>y=m9w`H%Z)*G*8CPE>zRQ9WpLBQN{f_SI2)D zt`dgA^o&zKs+or`>sx!ys9C-l^0w`V)a(@jIcM!h;`Zz><Q+@j5p!eSmx;+*B>FGv zB*zAkG<-@YUv`T-2lnZda}6rB>qVV*v`nQp)#;2^7O2d+7MZninwsxiBNvp7s_euE z<y~2ys)eD+GAI73$oVcp=8jzud8dDtcYoF|7WFywJ^j1I;`X2Py}P!F{Q8geeJ#7x zl9E1sf6Z1^kp8kRELg1ye;bs})JEYvcR&_JR*9mcZF1?Ad{O-R8+zF%mCDuFsvmlH zu_~$b>e9|x>fuGjy37}>mM5fY_lmETdpuf~XP;K(-=s*-%&&xJFiNiU4~kX2Bl3~q z1ER8JNIp8yCaP+V$<<x!#AB|ry1KPhJ)U|*KT+pZHIW^<)>*7-ulRIbVydb;<I&#G zXyrYar`LY_i(1!NA)h=OC7$x-%BK&Fi2Cw)+0Z^D)@M)14fV&w#+Zw8Q%R@T<R8<% zoFmFN{JGv7+o3iOoX}fFed@Wc9{v1zk7{gc)?1I~sivx0y=`ZL3J&_~Yf{Md*S|md z?+pZYcL)&(yxkoXV&1g~v+oi1yIkgS3s-@8mYb)-Jf&{4A|Zn8H}}7<Z;$y!yS`EW z!d$>yRY*i1vPNW)$SRR#BI`sJimcRXmWr$uS*+Ep7FjN`USz?@imhhJ$eNKwBdbQc zY+hJ5XBG~uoMY+8+L6U0t4EfPtlw%1fK<S0N`TY=DFRXjjxr#1;3x!A364@AwcscQ zQVouBAobuV2vQNGBuGuHrYJ~Nkg_0kK?;LZ1}P0v8>Bc$bvVj{)Q6)$NQJDXL`aRS zrbtMYILd_72`Lm(DWp_Lt&n0N)k4aJ)C(yXQZb}tNX@LKXh_vK%7)a9qi{&&I7)}q zj-z-;^^o!*^+O7XRM2Wlh}6((iilLvYRZVzk)x1EC6Q7hwM2@ER1+yDQct9yNJXut zq)1Jzrl?3&t){F<T{#MiRF<Q(NNqWai&U4RyhweK0wWbhN{rOlYKn|h*=ov+)Y)nZ zjZ_*bHBxJiVk6b&C^u4Xj)Ef<=O{T+bE_#jQgy2-J5qP6DLhhnr1VJbk>VrON6L@X jUtDkg|1SRy^Iu`1`R|b8nxB@HmXYGh%uLHn%W(V&DI1f# literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Sitka b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Sitka new file mode 100644 index 0000000000000000000000000000000000000000..662b8b67e581cab42ae250c672580bf224bd17e8 GIT binary patch literal 2350 zcmciCeN2^A0LSrr5x8)LdP+z{q$4U<UOhm(6=f(!hy=#<CPWs6F{}iIF?=I69l2Fp zY;!P+j&rQqQqOcNT?<Rs+A<m8*2`R!Zq0J)T5q$F+|KuGwYJLjm*?4eJ`dc#e_wxn z%aggTf1D`uA1<fYT)tDbn%AjaiE3=l&uwFksm|olauIqX-EsA&3HS3cPFUAeF=gHS zV0fcXMJ)FQBTJ)|M_vkgUR<oAosnSlf#bsagAPvp=8%f{^0X8CexJJO*dgcUHxCMD zpwGEwS5Vx#q1~C*vR=g1t#INi{NlFaLT7qffk;TH4bF&85Q$;=!NiO>C-KKcdgl3g zDrs!4o^`^bk`G7f+xLC1L}#&_z3r4r3Cxsp)_f=C=3kTZmVPW!<G+@^^tXg>azdwh zUlM8KU+MIVZ6ag%xXwKDg321)r?Zc&RSUYG)AHpSmD8+bZhNlEE8i~fsEt$kz9w0a z`>QJS+$W2Yrir2p^JVeGFXGORyz;KIpNkTms_%aHn7Ahxr5EnqEADN*s7rUeqZU<v zq8B#}sImnI^^&qpYROOMWO-J<C_i&jR>W=;6-S2T(#s8^@|6$tvd_9y)#m;Bfw${b zb;C|wv!_HYFKN~fb|$OZqzdg1xRn3*3?0ZnuL9rRFIOgq#L5rTW!>aCQ9l$TA09s` z8g^Wfj|>lqRZXMv(ZL?^Sk)!n*t1_fo;9MMXx^!sVzpjf-mF$%-mRN`6{`7khi>s? zsFni(z2-u!dTLL*e0nrpJkz;CuGKN3HBczqdasCe1vBLO)=|+OKOsA+4~ve_blq7r zqB_Ta(_IN#bq$}@8?NqF8wcOho4)8!o4a4rTaE@)_o{9B+5RHs4x8c*zupt!iM)YN zD74Mza)m;@TV3Iy(5pdrlzqq8w_|>7bH(wiJHdX>T2|$E)zq5%khwMHmib+E=I_!< z^HEx9zRYp+GRNxw_hEU>?_5rwxqPQy<u7wW<}5sM?Ni*+lQ)124jG-*3=i%2_+Wq> zGeTg9$QY49BBMlxiHs8&sMU-V8LHKc6&WltT4cD$c##1kBSwadj2Rg;GHPVl$heV# zBO^zKj*J}{yw!}}YKD)D9|-^w0VD)S43HooQ9#0g!~qEe5(y*}NGz--7)Ug%CLBmS zkboc&K|+GW1PKZf6(lT3T#&#ZkwHR(#0Cit5*@1v4-y|FKuCm;5Fs%_f`mi~2@?`0 zBv44CkWeA9vYKEa(XyIwA@Q=BfFTh>LWaZ)2^tbLBy33Bkia34Lqdnd&T4{(M9*r% zhs4in0*FKq2_X_gB#1~9kuV~0L;{IK5(y;|OC*>`G_59_NIb13ph!fmCZtGAk)R?` zMZ${26$va7StPVbY?0t1(Y2cJBJs7F03#8$nh+x~MuLn)83{8IXC%-_q+O=c;b9Ti zdm^KwZs6l`jlE75|9`AavXgAGXa85S>uj<AmhHc~{b#|w1ihHFXIAB9=Vr^ys+^qc IoNVd+1O3HSBLDyZ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Barthelemy b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Barthelemy new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC<zyoQ1C|Nmnl3=IGOA3wmr;^P~_;1~?#0zn7~ Vru_ix0GaatKYmlWfX16}0RSy&9%BFi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Johns b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Johns new file mode 100644 index 0000000000000000000000000000000000000000..a1d14854af6b82bfe814f2c64aac01bc053d5fcb GIT binary patch literal 3664 zcmeI!YfzL`9LMp+3%4}yh1kVX0ZWmK)@oU%AV*uo4Cz>#89ZrJ2Gyfx8HM9?nNnve zyG#aDR*(`ArK`qfH+xddQPYm&Mz@U+Ei@N1B)9WD_Qsl~rkUQg%=&p))Ny?A|Ne5X zEu5X^`qL{$f5MwrtA6v`{{j8W%I*2xo?q3}QRXXotbD_sMHOow4!qzFdtO}8-}};% z4$mt4xc6nN(euj8GXt+?Z1Pl&{UuN}_(jiaXYUKF?y|(Q`q%ovn&50t^%v!V*S9Th zsHravyisj6)V@;YeRJu=hPnqI@V+%Ssp0L}`QEkJu?_1cW_W$W4mPY$NbqhrBgV6_ z$N0d;wgaAbS_cQ}cW?9f%d&6tZ;z~e_wAavO}<q3dkb4D-+!i$`-2%fDnDG%=Kg4O zUFGH*d-*=@zoOC^cf|Ker#F+f4BYM8^39V;4X$;*or6NfJ7+aHyB^L^pH1#8cHf+! z_6+?_d@iDuiuqXVO>9wn_g9I1y`tp4ogX=YMq7SSHCHtH^W>M0<cP1z-16(fRPjwo zy!>`j9}&F1Lw=XkCYrM1)c3u6In4>}s`>B{rzJ9=T6XMq4mA1I!P<4s51T90kMH_L zYfZg8R9+ztFE5ur-M?5IDJqqr8!XY5Qy`CCK2iKUB2ykq%6ES0kto}vGo1Eg6V&nM z1gAp{QNKFT&Iw<X3cuRoxSrmxA|7gRIxX0$Iv4L3U1l}Qu2Z*)$Z=a_R7$mo9=Jxv z^jt1tUCU(bp*zHBUlq%4dzU)hH%*hLuP<_XRNbz6K6{nZ>yfFd_ri3iPhpzsJENb| zZ&F`%hWlsd%%r0#?)<)@zy5EDk2osg54XtyUn_Cej?d-U?`;t0)UKE3zOYgZoVPu6 zUTMB{{`A_=pj(PR8$9ZX(2&Wqf(iZRg%U3qA51*)xIHv>aByg&ZC}t58NAS+XAj#o z$QoYewv+3+S|dv0?UcvAw^HX^6iU1E6DvI{E;Mr5YHL(Nd+4IHrPjrffslJZRnXn! z4~;(lXmHHtiqIwd<_0gVskg^&%n4q$yxhM0xzymeqEh>cB~z^NIR*9vE5*tfk!fdU z^tLjOEC^){PY!17nh~1Ry=QPz-6f&P2Y(7)xl-DmJw2@KMPuwKYrBgauUk%C(JHRG zG+s`dw?kY#phM<PuM@e)8)e?86(TR#uH-qjj{Gp7W}JA!xptLbUDr6z@h+)Q*ZZ$= zW?GBY4P~R9S=r_C#)tAm{_s*+Fvl&d?gjFutax$L51DdKLWh{MEm0OmHj2XP7<o(6 zCUNUiL)2~C1J2y}QR?;@zf&}Qzbal{;oLE5tGcsjv2$0yYGvnGj(uW_obN6b^BdR5 zyW<MP-Tq~A;jv6{Pg$|NS0#!?CDY`6z8Fz5bELfg=>wuu#6S4z()pCH$gZb+d+HDW z{dZCS$=`bFzb732^huX191g6C=whCu^|Lb1j_c%eH}gr)aJZ#8qVLI{da`p}Q!@1X zYW<$7-`DWH{=mE<PU3R$ko)&{^N{{+=58MQ{SSuqYv$OI2a!99+*0J8A~zMetH^Cd z?kjR*kvogrTIAj$Hy63P$n8b$FLHy8`VJ$v7`eyDO-AlAa+{I+jNEACP9wJ(x!1_e zM(#FpyOH~i+;HTM8}%(m?m2SPk-Kixw;j3f$c;ztJaX%idym|F<nAN4AG!ZX1CS0N zEimdHAWcBJVAO3u`hYY7=>*aWq!&mtkZu@tJCJ@DbwiMjAT2?9f;0u`3epy&FGypM z&LFKpdV@3v>5fsi2kDPdHwftv(jufsNRyB*A#FnXgft526w)fBS4gvvZXxYL`h_&i zs5^$V4CxutG&Eh~fwu9|eFGYYbPj19(mSMiNcWKTA^k%dh;-1XTZr@!X(G}^q>V@) zkwzk&L|TdT5@{yVO{ASjKaqwa9YtDd)ICL-igXoeE7Dh_u}Ei;)*`({nu~N7X)n@W zq`^krVWh=I-D9N5NSBc|BYj31jdU7mHPUOO*+{pMb|d{p8jf@vX}MAN9BDezb)@Y` z-;u^6okv=a^d4zG(tV`;NdJ)yVAMMR*#btr2aru*)Vl!L2FN}@HUhE}kgb611!OZI zy8+n_$bLXJgi-GZWJ?(Jo<KH*QSS<5TOj)a*%-*qK(+?5H;~PN><(mmAo~N^Ajl3u zwun*h5oD7X^)5lSiBazpWTPNE1=%XdUO_esvRjbtg6tP$!yr2b*)m4GXOK-})Vl`R zHb%W~kd1@v9AxVtdk5J($nHV5kLw>ypNLL>SSA0DpT8bIv3ek-k4aC_TWMH!dU9HF P%CPLz)a2CUl-S<@u$syt literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Kitts b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Kitts new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC<zyoQ1C|Nmnl3=IGOA3wmr;^P~_;1~?#0zn7~ Vru_ix0GaatKYmlWfX16}0RSy&9%BFi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Lucia b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Lucia new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC<zyoQ1C|Nmnl3=IGOA3wmr;^P~_;1~?#0zn7~ Vru_ix0GaatKYmlWfX16}0RSy&9%BFi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Thomas b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Thomas new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC<zyoQ1C|Nmnl3=IGOA3wmr;^P~_;1~?#0zn7~ Vru_ix0GaatKYmlWfX16}0RSy&9%BFi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Vincent b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/St_Vincent new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC<zyoQ1C|Nmnl3=IGOA3wmr;^P~_;1~?#0zn7~ Vru_ix0GaatKYmlWfX16}0RSy&9%BFi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Swift_Current b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Swift_Current new file mode 100644 index 0000000000000000000000000000000000000000..4db1300a26ed4ad20bed915020eb3af8ba639e33 GIT binary patch literal 574 zcmchTp-V$i6o=27PbXvG4T{eu7*<Sz{sS5v2zxSETN#+tVqOw#*J2RVHG{~iAlS65 zy}7x8K@i47oeKtwT+es0h{@u4oZokN@Ob>bxU^AOkY6*VZ&)VM<<vyPpTq@kQO9 zT&Amyy{t{<-nWu+B+Z?`+!i*awf<<@v-fhhcx^gzCEcl0(|xE(KR0ilrYGdNH#9Hp zro7%HX0YFoxB9>ix6dUk&sa$XsgY3Q=QHZuVPC|#vsw^3cVuPU-&voAl*!e6Ecq4P zf^JcFEtX}ynczF)KXm=@-|$!G=nUx%=?>`+ap0*QATA(2AWk4&AZ{RjAdWoM6T}t7 Z7sMIF8^j&NAH*TVBl90FGb(o}^95{L?5h9( literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Tegucigalpa b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Tegucigalpa new file mode 100644 index 0000000000000000000000000000000000000000..7aea8f9989fbdedf0527a172b41871e70d439060 GIT binary patch literal 278 zcmWHE%1kq2zyQoZ5fBCe4j=}xc^ZJk5+83Dg;{qVD8|eVP^#MZK-s1?z$Lclfy=cM z7Z{ir!SMh8bv6tP|NrmYz{v9d|LO${96r7w49+e<92^2P0|Y`yu=fW<3(&;>|3UVH hxF9!xXplQVG{`L=8sr`j4RRBhCedA7Kqs1U0RT7DJ_!H- literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Thule b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Thule new file mode 100644 index 0000000000000000000000000000000000000000..deefcc8df5a1111fb1ccacd1d67f44165d9a736c GIT binary patch literal 1528 zcmd7RO-PhM0Eh84Lqak<Bm{kcC#}}iUEQ+O)Y8OqbJSJ*ZYs^x$~5yk^B|=sgRnrw zljNxkjF1A0%DnV3C58~lgsi88C<>wQZJxJdrw&20%>H&4b}#$<{iU_1J&r$4u=#|` zk>>JxV~=?n?rHrQ^{qok2P)Lg`EtE$CROd8ZkNuv!y;z1Ou8ncMQnG9jJpvk+%+ZI z)AUKiXC>)`{MW)8wO#M=+!K34ES<RZrAl1>sQ3N2tM<QmrjtHgSIHAvraWp=2L_(X z)M39$YZ#O1y^Z2v?p1lHsz7AKo{^auUXl5ANN2_5sjRs!ogEghvL`F`k(CH_bfi<} zy!QxSSA{(GV6(_APnBZuyU0t9l=+tyM8URT=`VaJ{67M+$U7p60ztZX$CN65zNAam zhE(a;j4oU3Qsuqlx?;LgRaW)u<D+Rx`fke;*DA!xm_B)`CRJ30)ye9tNKw6#BWt39 zMa?UhtPKf>+B@O0Zh2PJ-*oD;Z<f@#mQDKn#Efbv{H!kwjH^cPJKfaKuP#R1*Uh>0 zs(J0LY)R}BEsK+~ZA+bKn;w+yt2v@$v{iO4xI|ZXiM%`&F1jm|<ds`rgkxRMKYdu% z<6eiuvgTFLddr%9;@FVm^E=Wr%q{Sn|DNOji(XcLf5rTwWzh|9X$R>CX$a{EX$k2G zX$t8IX$$EKX$<KMX$|QOX%6WQX%Feo)-;H8h_s0Gh%||GiL{CIi8P9IinNOKiZqLK zYirs?`n5F;BOTkCmXV&3rjf3ZwvoP(#*xmE){)+k=8^7gP5Vgywq^s!4s6X9kUb!q zKz4y_1K9_%5o9OGR*=0Qn?ZJiYzNtot=SN=BU`g2WKXtcQ^>B6Z6W(YHiqmB*&4Dp zWOK;wknJJ+vo#w;c4%w1i0skUY!cZevQ1>4$VQQ!B3s4(Wv`)Tx(Q~&PG7vs<BD_o L+-{fK6&LyoQ=^pQ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Thunder_Bay b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Thunder_Bay new file mode 100644 index 0000000000000000000000000000000000000000..aa1d486097689f5c686d24abbeea0868667118e6 GIT binary patch literal 2211 zcmdtie@xVM9LMo5h(-q_loTnEg2fK`ZKr5d8n6rXg!C{a(~kz;5Q=wb1Zil7k><vl z4Qe%3Y0Z9(%xt+DlXBM9%9PdmHTu{Vf3&cV(H}0>U7zP?{nsD;+xp%<pU3TXxBKh$ z4A(bpDfRyGOtYVGc%HO}_ZhqG)jM~G{`3hW=W>3Rb81q1{ZSLwb6&?kq|FqGss#0? zOs&-_arUrWo_bjMyhGwMkE<lTSCYSZNvE8Q$Q38L_4HRCGpWyR(P{e{&6S;Z=&PE7 zX2!ZaonGxXS1(G|*Z7l6Mz&vFn>cP}#wV$nzxia=`Eiwb_7}Ns_>9Ur`ng>H;!!ob z-7|9@J*cwlel$5-2ld>*DRaZJr}T~KubAAtZ8|slk@$Ue+CTcT<o&!*&l~EI{E@IK z7-*8hV+&MK*K#R7n4wA{5mVashngR(HaD&MUIlXJnFXaoYT;!mrfk-?x@_!MvuN@I zz4-K5bMtotx_t10-17czeQWO*a@%v8^^(@NWoh>=b$iXzQqizURTi~NRb_>$nmA&D z1sip6WWT9SS*oj#wU}k&IePi?`((veOH^oQv#fkKTh*)&No`+(y0g4URy{bTR%fP3 zxb71bo}7?1f!EZUpQf9&S*P{dQy0y;=n-9ic*NX2x?ivFK5p(AYS9}S`pw3Hm0E?~ zkxf0#YI8xK+#3n0hLjFz3>K-z@p_5mrm4th<<c~5LN&daC0ob7RLy;hP0N{!`o8u| zbN}FoZmo+N={>F=2x!yR+OM~ze`vPXbn5NVAEdpkPqmK@OGieB>KHmCotNs>!vlL| z$Jga*XIDfXc`r+KZK#%A&v+^>eo8|8)c?dQ7JDP?d9m2*FL(*D*on8i#8~X(lX1!J zPIdQ;6``<Ku{!KkaqhCG)}BiHGyBB-eTV;jDhKRW-Bd2Q-|(-oSlfj?`N(vT`5+T= zG9w;4C16g-q>x!5(?aHjObnSBGBu~28!|a$cF6RQ`5_ZTW{6A?nIkeuWR}P@k$EB$ zMP`aj6`8BkP8OLhGF@c8$b^v@BU47^j7%DtH8O2v-pIs}nLF*&k-0nV<dNAU(?{ly zBml_(k^&?LND`1NAZbAIfFuIR1d<9Q7fzcDBpXONkbEEsK{A4*1jz}K6eKH1T9CXT zi9s^sw5dUI<Fv^^vg5SrLGptn2+0tVA|yvhl8`JRX+rXZBnrut)20f^mD45*$(GZm z3&|IfFeGD0%8;BPNkg)Rqz%a%k~k!DNa~Q>Ic@Tg>^W`vko-Aq0+9?NDMWIJBoWCX zl13ztNFtF;BB?}j>9om2vgx$xMDpph2}LrBq!h_1l2jzCNLrD+djG?|`fPjyHoEzt S`Guv0#rdI<lERY0V&7jSTy6jW literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Tijuana b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Tijuana new file mode 100644 index 0000000000000000000000000000000000000000..29c83e71ffa6b071c52b2360295bf55009c09b76 GIT binary patch literal 2356 zcmdtiZ%oxy9LMnsp(Ki)RFwE*C#4t&SN@A){}j7|+(5aKn2{NO24#>CO3@7+TWihn zMJMGdIyrm5S`o9z{xNdPtu<C*E6m22DKLy8){4q;JMUi)d(^Ym@9vz}-QDi)?t{<U zx2w7`&GnCGuKk3=(_s(qBU|kCTJNtdT}_2`-P6rey_My4??x-vnXu`p&nAwv46gd6 zZt%qWb>Bpnx`x`{p1S1UD&OaAm&;|zawQ={Mm_mzZ1FZV9-pMfr_<F$=t4Cy7@@9? zPN^Syep8d@2Gx)47u5WbZnNORfC}$#Hw)kER*Smc)apdLy5U*Ti<|1zlIs0>Y3U9X zk>}IP)~!{`6S8#V%3`^3UZjo+&XlO>3=@56xx@@EGqE54E-QLw%uTOe6y3O5-+bsJ zSy>XUSJiy0;xd2Ix8#4QZjJa-$0v5G_}NL55Z0m+hCern6MNO_z8;fw-Y?0W$Bg^* z9$Djm+@u`aD5+IPby|I#q<b24M)6$9j4#nyY2&gsG+SrK%u(56QF`5<V`}}mYdYuR zr)tAnk><7!URSrb{bBBS@rb&!_B(S|YnN;+>@%CnpOf4*#%#{rCwDJ8Y&>y=;+c6x z=Y^z7-q0bPe|d>)ed)9*__{{C&Bx6>y#-RZ`+zCxaLcw0rDl7hlH!;g<J&PUzH70j zB=ajNxw2mGT=BEo`ALi}ojtF1otoA64Zovyw~pxh`<_>2<!AK+olR<wci24m%yHS9 z+;1MLIw0lYCryQ?R4OK0O;vo3R1H*`>d;uJeq)QN84bw89TobKFJjcbMvs29dsfw! zB<Q-f5%pNse7(Q+tg4Tg)D4BFR6}5%Y0U1I#^I~xVB|?T*f(IBW?JQm&TjMMP^C2c z+s#vFwury1-aP$6qPT*BgMzOM4w+-G|IRBAI9lLx1p+UND<lwTYjIuA`=ABRt(<#l zf!F8q7W-U9oLqbEwWr8ucVeHQzi;2aSlEp76@E1kU}?zOki{XZLzaiE4_P3xLPxtq zWR1uokyRqgMAnHc6j>><RAjA=cCpB69qn?F^&$&KR*Wo}zr&i5MI)<5mW`|%SvazC zWa-G-k;NmcN0yJQA1MG*0i*<uwgyNMkSZW$K<a=L0;vR23ZxcDF_3B?<v{9z6vWY1 z1SyH5tqD>Tq$)^Rkh&m+K`Mik2B{5F9Hcr(d64=b1wtx>l*rN62q_X$C8SJ9osdEy zl|o8|)CwsUQZ1xhNWGAPAr(VP=4fk%6wT3A4JjK^H>7Y#<&e@LwL^-BR1Ya1Qa_}C zj<$kG2_0<>ks><UDk5b>>WCB)sU%WLq?SlAk!m94MCyqY6saguQb$`;q^ORzsz_NK zZC#PVB9%o-i_{h=E>c~jyhweK0wWbhO6+KBj1<|?Rv9U?qpdSiXr$6esgYVE#YU=) d|NnA_*{o;VtS5QX-D&QWByVb}JJp>M_7|antVjR= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Toronto b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Toronto new file mode 100644 index 0000000000000000000000000000000000000000..7b4682a39e2fc97450c99eed4576d2a4614bf294 GIT binary patch literal 3503 zcmd_sX;76_9LMpiA{wHIikTw0q=>RgxKd&+gnGpVOmRCJdIvQ*xgZo`3u-tuOrt@i zgSl@RqGFJSpr$rWiQ=B((UcTH5AL~eJKyJRFB;!9)8jn%bH};7{eAy2aS7AA+y3#? zH~+)SbHluRZ}EZo$SGJZSO0KRe6uw0=$fzdBG#^O)$3xnM66#jQE%vyEH+N>r#HC| zSDOd7*PE+?)mHDH<<?VnvF+(L`E7oX*nZ(N`P~<uB6nY++>sWb@>Y$MI~_yBuGCPu zJJMg|Pw<j^e4B{x`#zKfu38j!YOV__PKdoO_x0Xmx75C?XY`M`rK;#qq28alQx&gY zsY?=9r~~N>^}*;Qbtrza{wZjzI&4psM?CtdqXCoTFP~_6qT@YzLOfAr^RMcY5f{~| z*sZ0f+pQ};6ZUE8S=-{`b6&Gc&xa;OoUh#^FEpPgE|h1;i(5Vt<wY~)rHpu?a_sVQ z!eDVFElB<m)kR#L<SDQDxrvI%0R3mP&g#0ahrZGBj;eI6(UlKQs+$#My6W^ERed;L z-`Za$Zs(qmdUd|2$=ogPEXo#l6SL&K$tmJ~bh3QVZ@hRI6ek~bS)l&%2$zo=j#G~x zjnz-C_f=2N_R-IdwpFzS-rBjzschM`y3W$es_p`%>&aT-BA&_mBb0EBxFj0{9}o@O zmB<&Gt`}~$b<#~Q5slPh>3(jlYFzA)O$yVM$Ci(E)32s0&x~ohS<0KLc|xRaF|Mof zit^Jfd%LR_{Tk_39sI;g4XbtQx{XBZJ8rVg<!a%5yh^q$IVODaev>b+$rJ68Y`Xo2 z%SDISE4t&1T=h!WA>C=n=c=>UM(x{muJWxtDE-`Gm0$S=*|oB#>Q=N=`j^FsfSd#w zxT}W<N*gMJm$woj4u|eO=dlQljMA@;x*)=QyXhX?i$u=`jdia!=T)zY+q(CYeX7sl z<GSzfIVwE&mV9mR2kQ0AQrU0iR27l9Q}$0^B;JT#AqT`y5d(vg<e-5A#Gt#Ib!5O~ z6<M}SM>X!RqIS*FL#jKfp{p|Fu+tI3o;p(w-_t=vPq51oSuSE^c#s@5ze0@m_LMQP zM@7t&yK+p}Rx#$9haT&5SdBeYqsP^5R&ndg^!V~+YC?Lxo>(+XO^VOflXHeEVc#vM zES)K)24u;%9d;4lI9X1M3=-3-<D|paQ#gJNmkF+SMZ$I;Iiuo~n3>f_&pKA4-bwP- zvvbQ-Vr;FJnfdD7Fs0`tW~;eg2lTw?6g98*l1%EAC6dZZWOA!ykzBM+raX!h@8v9( z@1G49sc8<mu)s&8O^T9>zO;$Db)M&6J(uVC^?&NOPG|lKo6YGwQe4Ny=`7q~YiNCU zw?3N=v&Yy54K(j)^S))?5iw@GY_>YqN6f#EUZwe=HF}Tu3-dV5Gv`)v6*7Xz5F%rU z3?ed$$S@+~hzuk$lE_dZV`*sy6B$iQGn~kHA_IzyC^DqTm?DFUj4Cp$$haZ{i;OHX zw8+>ZgNux=r5Rpid@aoYBO{CqF*3%;AS0uU3^Ov$$Uq|_jSMw1*2rKZqm2wVGTxSE zz>yJ0h8!7lWYCdOM}{34cVyr#&B!A|Z)wIJ8GK~)k>N+i9|-^w0VD)S43HooQ9#0g z!~qEe5(!Ha3M3XtFpy{<;XvYn1O$l)5)vdPNKlZdAYnn`f&>PM3=$ek6B{HrNOX|! zpcNm!5Fj2CAs|FZjF2E9Q9{Cm#0d!$5-B89NUSVPu#jjW;X>kt1PqB75;7!aNYId| zAz?$}h6E0Y91=Ptc1Z9nP4tlPA@M^3h(r(xAreC*h)5KXFd}h80*OQt38kfpB@#?a z6HO$XNIa2%A`wMGio_HNDiT#BtVmpuz#@@FLW{%}39hAyE)rfOzDR(P2qPgzVvGbC zi82yqB+f{nkw_z<wluLuf^BJ{jfC6M#2X1X5^*HtNX(I-BT+}fj>H`aJQ8^%^p+;} zNboI9^pWsen)o9J0CEH%hX8U6AO`_*6d;EIavUHB0&*lEhXQgeAP0k`IU10|!O|QL z$N^z#jtJzCK#mFIpg@ia<gh@F3*^8+jtu0`K#mRM;IK4D2Xc5=n&SgGKrGD>f*c~q xF@hW<$WekECdhGu94NNuyCVLl7mAxXT*Ax&<8KcQ>>e2GZx0Cx3<(T&`x_HwlD7Z= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Tortola b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Tortola new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC<zyoQ1C|Nmnl3=IGOA3wmr;^P~_;1~?#0zn7~ Vru_ix0GaatKYmlWfX16}0RSy&9%BFi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Vancouver b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Vancouver new file mode 100644 index 0000000000000000000000000000000000000000..9b5d924173e6e71c2c0a73cf2aca368d3af002e6 GIT binary patch literal 2901 zcmd_rT~L%&9LMnmUl4>mnJ5w3NlgS50YOnT({xr$L}ev0!<PgG(@?QhOVK8pvNsNy zFHJ@N$cvmxbQl{OlhB!*nxYcSL~SEWWl&Pl5|ws4-{-cAuA9!zc|OnVESG$Lp7})u z$*zB#ZuT24r`BHH`)sz4$^(bw-Gh<l>n+jJFzEWqhQ&ivqqmnnp1o21F!!oHp<C6D zsg3%i>l@V+xnDOm?@_1x$K-UKQD=NL^3(2*jIVZ^{JiBg(_Fb$ekodM&X(rNuNia9 zZ+Yo*E-uBKchA!ode2rZvEy`0`zX~K5~EuiBGtw7etl`*4b^tES^r+qq%K!?%9T|I z)zxyJv~N0Wt`*nG^@Z=7KeDRi&xy~Q8zal4V`!DS*{eWq^(a%fI-b+FTMAU?$$b6S zm(!I0fLjMXh*z$iaXRS5a233+r|z<9sOh>mR=Uj&Gu<bJN=V`*(?egBQ1z_|bJa`O zmA$5CbG3vw)vJiQ3c2IcYSnA^9<AQ2P<L)Iy7&4Lbyv}9-6wahip=onzGKIzesQC9 z)WB?WcW9K34o)@E?J084*?uObp|A8meA5ir*CF@5ciL$0Fn!;%hs?m72t8=oDK$9t zvc7-%XKF~~F&!IUsbc+Y5*M~v#reLI_|_$ASnWO;-dJizRBn}onnh-0=~I%pb+Sn+ zd|fA(3^t?OYjjF>cas{MqemyVm@y$~IxVJ~O1luP$KJl6#vSd{>8HO^<M&3%go7Wd z2g+~B#J65m4;G)1NoBju<g8kGXz9yl%1Du^Q&yO%*BZ5Z#7oNExL;>PELRydJM{Fn znd;%UYh=cWWhQgOHktX!43o8Bt<2h<U}lfcl{sEzvSZT4Gq>G%I{Qmb>T#2EZk(Ps z;HsMUMU2k%H>&x&{Q6Pfezl;iSwB|0L**?!q8C=KSBo-zviOB<X32;;dAx9~SsGC# z`R-hk-&Q7tvFWC;zCemX`kSJUr^&MO0rSN6e7*ep7`4Ld)=yUZRdG(7URmC(R*mkd zR~H{qC6R4<P1Y{8CJ-v#v^wMUU6gfERc2jny{zvjGf!7m%QGhn%!bkm*?3@@Da|X9 zO*`XFa8PiUuJ-ZY`~(88yy0>M0`Hihu7N=LW>*jAJ=}S!8JQl}tSR<p+xv*Uvpn{{ z+~o>#9-e=%!{_#gP8~Y<3wr|g<W2BiLTZ5&1E~g54o6!Lq##H|kdh!ZL5hM@1u2W8 ztqW2Zq%ufpklG-{L8^n42dNKIAf!S_iI5s0MMA2Cl*!T72`QALtrSu!q*h3=kZK|2 zLh6MS45=7WGNfim(U7VkWkc$Q6b`AJqb(g$JEV9>_0TCFFVv4?3kXyYDIrorq=-lr zkuoB6L<)&i5-FvlttC=Sq?$-Mk$NHpMJkGv6sajvRHUj%S&_OTg+(fhloqM2qb)8{ zU8KB7eUSnq6-G*o)EFr;Qe~vfNS%>FBb9cvrABJ)Xp4<h8!0zZZ=~Qz#gURDHAjk$ zR2?ZhQg@{ANac~zBei$5#Yd`-lpm=-vH-{mAWML(0kR0lDj>^%tOK$T$Vxccr9jrg z(JltE8jf~3ko7<o1X&SeNsu)`76n-qWLc1PK^6vC8ArP`$l5sC#X(lb(Jl|NKF9(g zD}*c&vPQ@vA*+Ne6S7XoLLn=KEETd=j&`w-)pE4Ug{+sOT`*+DkR?Ob3|TZ})sSUF z)(u%WWaW^hL)OmGE*`Raj&}Kw^>effh^!#8gvc5qi-@cuvW&<&y8gfO7-m;9&93M0 U%uxx+35mlqladmW5)#Ay2A}k~%K!iX literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Virgin b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Virgin new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC<zyoQ1C|Nmnl3=IGOA3wmr;^P~_;1~?#0zn7~ Vru_ix0GaatKYmlWfX16}0RSy&9%BFi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Whitehorse b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Whitehorse new file mode 100644 index 0000000000000000000000000000000000000000..6b62e2d3c39a8406cdc087b387fbdac0709f9141 GIT binary patch literal 2093 zcmdtie`wTo9LMq7Qm4zo2XZ>soqcrX+}v(I+~(=*mrc&u&MkLdo|(Ve54N-|w@xjc zCyXK^_AXI_?EE<VM<kpKMX-hm1tF4c5s@(@D5Dsn1+A2Q`#eAOUw`yx(D(R$AD{cO z-G8rVbVFN9x$hs>RQraTYrnlY&rRA#>cW+Q;}`Rcxi)>^ME`K|${Py@hVy>y9Nt=? ze=AAI_`-g5H?Ubf(6&?0h_6%GwNX8D=@K=opi1Y=Yn0j3a&&HbrR3hJFb{q=OZ>w# zP2l6-WzN}g^U#Uwq7#es+yfuUyjYgbYyU>gue_xnUj3n3ko|?uFHEWYi7``<nN$U% zpPRy;wy8x!XHC&aj})g4nc(0SDd~CHlpb0kWv%b%@~-)^INYr(8mCBQeoR-D|0qi` zLOSH1tU@<)_0qdH)v{0T>FVpBspSJX=8=o1)uVlXn8)6GOFiE4t$CvNl&q*9GEX+W zDK#a=tgP7~D}Nc$;o^fTJaSIgW^GrsgGcr1u{G+cV}qvdt9FU(J#5yTuao+XyG+B8 zpgg@iZq_E0H2SMebln|^-V2ym<(CrsewkiB=eAn^iC@PjM%0Fr6Z+ZFb82Jn4gK8E zQMIY*vfiB9qqamw&DPfs%eLZ6=K0oL(v;P2n!|Bv9_ux&`PI^TwZ*h$1f=bQRi^z1 zPhL3EthZnDs~w53e(}tN>WCHS&b}M!rK<b&&W_8fD|<|L*Pm3~-ZYa4U6RD;&t`W{ zzw92mYW9rx$}6ce=GCuTWN**gX5WQX(zB_{?0>gV($nnczDenm)2C$o2OrNn(B$)Z zUSBe8l0A3&rn>WV_f($uzNF3I?3?Y*fx1Z47q78*qrIQ8cLU$;iw66m!OE59o_`tr z_Z=kdpSvB5|NVcSz4pC!@EEcSWE;pnkc}WaLAK(wdqFmX>;~BmKhJ)U4Iw*1wuI~n z*%Y!XWLr+VFJxoL&XBDkdqXyd><-x;vOi>l$PSS$B6~zO>9o5<w&}F{L^g`-6xk}W zS7fuuZjtRG`$aa4>=@ZHvS(z|$gYuXJMF%ajUzipwvOx_**vm)Wc$eekp>_gKw5zG z0BM5Lb^&RF)Aj*rgwu8cX$8^?q!~yzkai&bKpKK{1ZfG<6Qn6l+ZCiOPTLoxF;3eV zq%}xykmexWLE3}#2Wb$}A*4k}kB}xIT|(OAw0%Mv<+PnbTIIC8LYjqi3uzb9FQj2e z$B>pGJwuv?bPZ{n)AkK%oYQs=X`R#d4rw0JJ*0g||BwbE9Yk8_`@hF9(^fKMD_Im- S94rr(7DdX+f@Q(d%s&C-P%%gV literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Winnipeg b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Winnipeg new file mode 100644 index 0000000000000000000000000000000000000000..2ffe3d8d8e012445aa961fc53b38934681dd2a2c GIT binary patch literal 2891 zcmeH|YfP1O9LImh8;GK2VuS>y6+;mL#qdH|>KG(~dPEd5H4L$g#~X?l{56@ES8A4* zQKXJqt`$0$;$>uN&aIq7lq8K1m7GdGsavST>HqoH3%A^S)62eR|L61Y49+=k_<X!m z^WKV+f6dQ4;jlyIaNk4CrNr}~zU0`!^wNqQ`pLn2&e83|^;lZ5`~BKhJ=6EDb7n?I zw|ebtIXiK>dv4M>sp);n{V{&OoR@O9HfWpF)^Bt#n76xdGTXgaw?%*6wZW;YEYx4t z&3Eccv-DD)*J&t9)XT|3oGa79^=f3W)0i|ue(gTky%rHB*KYT8ubXdky{g&0(byn2 zYmU23CyvW+8xOm8^2_vY>`eDw*A%_)Um*9JbLGLcOldivEPtLHCao3y#rJur$kssd ztZ9`ti*HHWc_G?vMkj5b-l82wHmP4so%;7atsT1^(E!<_0q#!iRKH0(*M1{`M;2+9 z13RSauJ@(ewpH@Px`oocaF#rommxi-BuP+mg7h33DNjX)NN}Gq`m}$GgtUp!klVr9 ztD&QYR^8WUzP+JgdoF7Dj#JvZpsPIlUb*&}^t<$(^MgDWUoFq4d?hageJv6FHb_Kk zl|}~4m&m%k+V8qo`d5CaQPqwPC|#`4dnf3?qA41)X|TrT7D(KJo;oNgQwC3H*7%5F zG9>ONebFye5_<h135~5X^g*o*J5evgFC3B)<t>_c=wo?leVx8svQkFoAJJEeOZ3&W zojNLKg^t>vUq5<av5px%-WhXeza$Nq>n2riljOiOH+k<eNok68U;Ai_j;$Hzq%JSi zar<JNwCpTR-`w39KO#{lEV}DV3=LQBq#DQD;?a!w3Mb=gm}G{Xbu+&cnbNw?om$o) zS#?|8?Be6{dgW4gTFz(kMrpP?J@u&0SX1cC98jij=4LrLfva^^Qlc}fX}0D@ggd#X zQZ&!c<K%rArE?lDJ8y4JmUmA3+_?q)<=yg2Zhl6fIO~tO^WtyG{QOVd1wmD^Fl~*y zu(e4G63U!{y3@KOc(t>na*q}^&vurUZqjAvQ=H{Ri*!Xrlv6Zqvaa0P#cAWQueR;} zx%vCYM_t=@_-|j2{lG_kiHD#0d}TL9e7*y_J?(tHFSd)nz3*(V-5!2EHq|SmMw>Iy zoXO^-d(FSNh{xlx5b-`<4~NaKT0J!LH)cMwoGZIOfat=~C<CGmh&mwpfG7l_5r|4G zjZPp+foKJy7KmOTih*bbq8dx18;Ei&jdmdFf#?UKAc%$_DuU<;q9llxAZmi>38E;7 zrXZ@aG`fN)%hG5IqArNOAPR$M45BiK&LB#IXwA~74Wc)Q;vkxXR(0H<J7$!}&>lm5 z4E;eA2+<%!g%BM=lnBuxM2(h4j}S#dG|5mUM3)R@GPKE1CqthMg)%hCP$@&F45c!( z3Q;RVua-u!5Y0kV3(+k^xe)C_)XUH>M8OOVGgQpbF+<4=En6BjL-cHE6b;cdL)8#n zLzE5CHbmVJeM1z^&^Scp44p%i&d@qU?F_wJ8pSg-&rm%>_YCDjw9imKME{HgKr#SH z0VD^OCJB%%K+<4o^1w)hrO5;%6_zF!jATHv0Z9iWACQDVG6G2nBqxxhK(Yc!i>1j6 zBr%pIGmO+&n%ppw1IZ2}J&^n`5(LQ*BSnxLL6QW?5+qHKJV6p=X)*;#m8HoQBw3aw zTaa`?^2JCPBx8(}L2|}O8YF9sv_bO5NSvj~93*v?CU=nJS(@xY(g(>OB!Q3&LQ)9H kAtZ_Ne-n*Bjr&Y4hnQ?er4EXYi;js(jg5_tjgATU3t;rI(f|Me literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Yakutat b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Yakutat new file mode 100644 index 0000000000000000000000000000000000000000..523b0a1081ade2feb2e163279c0b9960f8550099 GIT binary patch literal 2314 zcmciCZA{fw0LStFMc@KKQ9?XOx04Sbj~-B-iZT?UD-sabn-Ezjc88T<Foq8hn2y{k z$~K3y!EuhQwrpp*m9D_jwYE$~xb-lK(ydu8UF&HU$?bg4=Bt{on&-ds|6K0fc=!AI z8d{#rbN+SQ<{K_&z+66idd=(fp2d1{!EbGoP0PgWu{ARIMv-s^3#IFYWD(MpB<E}? zP@zrvI;=KPg_kC1xBN@FU#iuSv0tmm!)IjF&tvMQ??!d>S5qS9gJFI1@lkQhoA1lm z{lntcJwx)gt$ku{ORtQp-yq_>>*ejm<su=yOeW56Qc01iGC5?WO3sXnP5xzto_Btc zN|{`!?>OmJo+IJ<&VxTH(OE3->N~CG`{&68n|=@r^RLQ9t3DB_@!!g{jJHJE?2Jy2 zdRe4Tf1@)lwu#KK6FTeciz<8Qpw2nESuN>#Udz{NRBk}Yy!Je`v|^`RRu`x8)0$;L z-XE&a{eUb=nJbDeESAMHzl*y+j*|CGeIZJ8s=oKV<Kn(oBJ}bD2gLoY7j@~bch!pO zPxZ>?{i<xq+q%4LyDI<noUF(m6cuMrNpH+H;XOJcS6ylpm9KxOSAX86s&*XG553c% zsvCFfntdgzwj`h*?)0d-6tDLA9p(EwQ~UGJEC2To%C(-LSo=|ite-t68b+e!qtmBE z<F3o{v9S@cu6aT}KGZLssQO7a^&e8}v&Z#@z;4wXqxHs$fZBLzuMVVnRp3mAZgFR- zmcxF%=|YTpYG1p2dLlzS)44`&*3qKXUntuKE{iP%N%Gm&3DF)uBRi^(h>l=_?kpNt zozs8nu0*Z6#-{YvD|^+pp;5j4%MP`pXIOV1^Q)e9efqh<BIOF1;|jg*6CQR0pJ1@t z<v77$Uz-ye3=Z_VBJ4ZbzGLk>&i<OTy2|I&*O~i>xi#jN`JB>9`!e5}WB$)IeE&VB z2=hCa^QyUg_FUmFbAsk9KY8^t?Cvi-fs70pn$?UA?cn&p=s0G0!1#~>A|phGh>Q^# zBr-~?874ALs~IRVQe>#eSdqaZqeX^`j29U&GGb)N$e58qBcn!!jf@)^I5Kjp8M@Vs z9T_|_dSv*>_>ll05kNwK!~h8b5(Oj-NF0zrSWP65P*_bYkYFIuK*E8<0|^Kc5hNr? zOpu@;Q9;6j#03cq5*Z{kRudZ}I7oDm@F4L)0)#{e2@w(_BuGe<kT4-}LIPzqkwQXc zHL*g1Wi`=4!iB^O2^bPFBxFd;kf0$^L&Ao{4GEmpL=Fj^)x-`7p4CJT2_F(aB!EZ+ zkq{y=M1qJ!5eXv_M<kF)B#}^BO)QaMT1_;Oa9T|~k$@r*MM8?i6bUL4RV1uPT#>*c zkwrplHL*p4Yc<hD!fQ40MFNaO7zr^FV<gB(l#wtyrp%!sVb^`!;WzMcobf)g_`e1E fTA{5`rImHQVgjC3wKOL$M`l&!=H}$)$f$n+=weO| literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Yellowknife b/env/lib/python3.6/site-packages/pytz/zoneinfo/America/Yellowknife new file mode 100644 index 0000000000000000000000000000000000000000..d9d6eff70d7aadc55e202b7d31ccb1359870b81a GIT binary patch literal 1980 zcmdtie@xVM9LMnwV5v9M&sG=;nrV4L+;N~JkeL;9Azc`rh(wqM-cBn>-OUp5-NG^c z;=OXL&1AW1{?STO*J^Gw*YKaRxpQm%Sh#*~mJO>-Hj8hc=jY%4>aVum?(X5-ZvOLm zzLIU}9hK%Eccc3kKHPry;qME?eT|hCnvVzbHM{JOp6s?lbFI9sXKKZv$(z!h_U0{d zQ_%3BonKR8ZYfz|3zyc*g4rRvFgGj<e_O6a-_Dle>?~b$_GelA?q$98*m+r!S!8c} z;e;&R6xd}uKQXsgePi!f|BhK+@R1FdstHeD(vr|_Q!;T@OD`wQonx<S*~K1-j2zPF zsb(qfOKI%oRZ@}ax0QS5%gXo;yQ+Cgs=^Q1yDGnw)iX<Nb<s6b{ZqcJx&DQzJ@>1< z`>Qi%&1gvPIdjyk9r{J@J#x^j>;6>l>pvp*H;w88od=|@{3Wfg+bZ?HkK1^p*TgR# zw+(YTOv9-`yZ*{L)A;I}`tWD%lIR`KN8Vd2P3>vjFuX(_UDK$|nOxae9MR;a?<M)? zJZ-5uCoMme+1AD1nbz^ScFXj*X?tVRJ~naOwD)K2<70!Sqw}=gI<m_=k+`7S1_oq% z<fJ~CN=xUQ!`c;Zl&&ioO@$+pI^U}4ne!z5cCGIG@v7_^-fW+`FxNbtsj<(Tm^9rj zMYd-sYo4w8%kJ(zZT1vgvU{6`&EDzjnyEf1nTfA;U*TcdH};`EH<gj+M~><K&s(Lp z??pZEey#L%bm_rYipAvR<mS!DyWv0h<6fD&Ab9JwoEiRmRuCLjlOF^hjOJMXF9nfU z%p^7^O=5%lY;>QxI=+(bhdKV|d?)|K+n(~j;=c_7-Z=8kk+<%3?;Uyb$h+rfynW>T zb2NZ-z|jKI1JVT2h1a!#^no;jbb_>k^nx^lbc3{m^uy5*(h)~XNKanZ6w;N~wT1M> z(HPPh(i+km(j3wq(jL+u(jd|y(jw9$(j?NQ*R_fC$<Zj%DMzbFuN=)H-Ey>x^oul% zbd0o&^z3y_BVBu4+eqJD*ErHSN9#!MNb^YdNc%|t$Oa%gfNTM>2VS=c$S!!@HX!@p zbsK@~1jkk&d%>|8$Zl|K2eKa=8-nZzvL(o#Ae(~hiq~xmvM*k@G04t%-PRy`gKQ46 zI~?1C><`BVAv?seMaUj;Y!b3dUbjuiK6%|nAv@)DTZQZuvRTM(A=`!Q7qVgaf7!8+ ZTe#J3+{zLwqm|KES)!sMS`m$f{sy2q-OvC4 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Casey b/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Casey new file mode 100644 index 0000000000000000000000000000000000000000..d0bbacc8a9647d6d31b36a561c6b5922c71d505c GIT binary patch literal 311 zcmWHE%1kq2zyK^j5fBCeP9O%cc^ZJkKe;mvUbom3y!%gm@LM@Y!Qc7pga|I{hNva{ z0`>p@|7T!iVq{`wVq^rW1%rYHkmgw%7&sUhKx|zD0|spa3kGdNLy$a39b*Uyj`#sK wm4N|h*ME>>KwOZ6Ks3lvAR6Q_5Dju1hz2<jM1vd&qRDnBmkrSAcDfc^0IIGt!vFvP literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Davis b/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Davis new file mode 100644 index 0000000000000000000000000000000000000000..40a992664e2b228b0f6ab99a7aa655df53015dcf GIT binary patch literal 311 zcmWHE%1kq2zyK^j5fBCeP9O%cc^ZJk^EnO--`wvD{BvK;;B`yHz<c^H2fvl84E&wH zAE^KT|33pG10xeN6EibVEf^#ffHb=WFmNz1fY`bQ1`OH;=0I!;l4k_r5E2~m18gb- x1JJJjAjg1ckb^)p$Wb5wau|pPISxdF90;O8js(&5K(%B!l*<O_bUR&hE&$zhJeL3f literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/DumontDUrville b/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/DumontDUrville new file mode 100644 index 0000000000000000000000000000000000000000..06863534c4c733f9a1a11800b614ec901e64cc6c GIT binary patch literal 216 zcmWHE%1kq2zyM4@5fBCeRv-qkIU0b(l|5w*_e>HJp4%`r)c^ngpMjBqkr5~k20aTH ySab~x7_<!y!2A#r4F3T&18C-dkR>1*WD$r4Sq74>2eR>6$Ylex+D_NdfC~T|oGK## literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Macquarie b/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Macquarie new file mode 100644 index 0000000000000000000000000000000000000000..aea2be77cccb5187ecacca5673cc5b42543dd3c9 GIT binary patch literal 1543 zcmdUuNk~<37)QUWK1FjV6EmmO2It{fnWjF|oLv+olA;z=ri-9Og+%EGS#Oh@5|Ruw zLtKR>HY@})gH}e1$`%b+i->4ggar<-|GDL=MXMJ59`~Nh<^L9scTR0XTT8b5F+cN$ zi#3{y=TJ4bzGv-vZ6faLfP4JlY+$P_Jm{9iu?|^!R4>b?M%6Q%FH(0&Eq9D~6^)4Z z@jUejiWbM-D0R%|s$b@k`aO?S|ByH8e|u2_zD((={`C_0;<W^IEJ*OZ`x4UpN>=v{ z${N?Cgtm9d+Suz7w!cNfmd{CeZl$dA-Xjt5hc)7zQzET$jT}CtQFBQe?Jm=psQ`^V zny4GDj7eO5hsNt;O(^M;jcp%gQ^Hx<Ts0$GybefW@`xmUyCX@T<}~?fucXXOYwGxE zNxM0u={=2-(e2jE6E4X*)~3#;2F<R!AX^IybX!iVY>$c6obYPNSrXkbpC-94-)i3Q z2i-aGQ1d5Kv|wmX3i|`qb$(ikIzH>JBSTW${6tI2+_Kv>rllD-v@Etq%N;dR;de_a zz7I;}`^!=_+a=YHPD;()7U}eOJeHT`ZTUF<!N=Fv{FU}D4v3G))ltiT<;}0>mG=^y zo|PrVW&3K)t+ZBBolXu~mbG%e_U{HYnX|S*KYoA7{5a1UN4P+Yf>;GH3t|_<Fo<Ok z)1Yk|4;aUcbpZ1i_Av~ESjaHZrm+!Xq)lTb#7vvUPKcopOBtpzY-JeBu$Eyi!(N8L z42u~i+cY*ajAmHPFx#fFn_;+3V>!fhi0u&LA=X38XV?!Z08#;?1V#;vA{bRL%CKqb zU=#wWgi#8l7Dh3UY8d4}>R}WFsR&XMqb5jEjH)1I*)(-Q3S(3TDGgE^q&P@*jPfA$ zF$#oK$S4s~Bcn)2m5?$abutRIX)0xu%BYo5ETdZ1X_S=zBi(=kld~(`=?wS@y2w-r literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Mawson b/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Mawson new file mode 100644 index 0000000000000000000000000000000000000000..5197dd97b9f871f533ff418b1870e593530cd501 GIT binary patch literal 225 zcmWHE%1kq2zyQoZ5fBCe79a+(c^ZJkBL$-duUkqE_5c6>XJBMv0t$gaNCHTmTL1%x ru7LrAwt*QCo5JKsu;T|*577AkAiF?ZkbNMU6g#<WfcD$znsNaEFL)jU literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/McMurdo b/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/McMurdo new file mode 100644 index 0000000000000000000000000000000000000000..a5f5b6d5e60f15ebdbb747228006e8fe06dd4a01 GIT binary patch literal 2460 zcmd_rdrXye9LMqJpwhVKWemws!@O`gTvUW2LIninl6fQ~qi>S%pdpkbbb(AGa|~<R zvNfZU955{rNzhUdL#TBjTa#+pH=TL5LUT2xEbMtd>%Z3evp;&y&hz^1>^y&*KR)l{ z$}cI3HU4(pV12@yYnS!rJbt5fYi)0HA6U92udV95d2r!*Y0v93-wExLL-8$U$EX(R z45~9bf9%!6!=>htkDBz|!5s6wb8bEQ_7(TB6CY~Vwn2Ay<32sU?uff*^EN$^z03W6 zVY&7?YuqO@>!r_M-2D+{(towuJoVKp$zDN!sMo$d>V0XC_%!^kzJtfrujY*UcQ$H3 zLAT!ca;=U{+O6ZNigkR*GddwJLnn+BX<&S~242`KK~vKusBg6dJHjNmX_4IYov%#V zGD~jmz9=EZJ~H|B0hy9EBvUuG$t}@d8d~y+P7C-!Z*@9ky2k>Y;df4EoSmjKhk9jJ zkEe!x+$7<9LnY#^trA%`Mx6C)C2GwDi7w5On8hDRY;K9p4%;hp;+E)K?|O*~jMTWF zTXf!)0FD2=P7}VkthXI3)!V!LC2>cN-qG-rB(0CpJ8Mo!^2%(PU(hZK5~F2d(o2#u zX`G~nR7vW!UnFfLUlv`h)$}uol5x6N7at3eB^?=>*<eW4&T!4HJ}WtuetK8Yh%S{d zxjRjDS+cLVruXRb$rmNp^G(gYJ|Ope|E%VH+a@bcnwsDBqTIW`R#rB*>V3}_OTly7 z_5QL95wk)c$O)HK>A6}M<tGo$jMvrvm!xRyaw!`2(c;VV#5_HuC7(@{H65So+SY6G z(9Q!|`ocG|u5yQ#m3K+Gw93OPzSc*Qx6As3qgpYgLLQy?nr<+1Wy7yeYo*3Z)t80( zSa+~IemGU1Xo=G&cdwG_twCC|DMg+#e^U>K$EbgLd5#|QpS}#EanPEBvA^Fj+A!Ye zG`tL>xtsH$0f#?l!#Z=%%yJpo`OCQ3rxX{@84ibyb#wju1YfnjW>0YVuZON%*Zhvm zW@M7cERks<^F$_!%oLd_GFN1>w$^Ns=_2z*CXCD&nKCkGWYWm2k!d6IMkbET9GN;Y zcVzO&>}{>-BlG9yk^oyP14s&x93V+RvVf!k$pexIBojy~kX#_iu(h&*q{G(A2a*t5 zD<eosY^|IiNkOuLqy@<fk{Bd2NNSMWAjv_pgQN$^50W5TD?><%kQ^aNLb8OU3CR<Z zC?r!zs*qeE$wIP)qzlOxk}z8<V@S%7oFPd=vWBD$$s3Y5By&jWklZ23L$ZgY56K^r zKwB$=ND7f0B1uHDh@=t8Ba%oYlSnF&Tq4OtvWcV<$tRLfB%?@5ZLORlNky`Xq!r04 zl2|0ONNSPXBFROvi=-FHFOpy+!$^ve9NSt+MzV~g8Obw}Xe84}s*zkH$wsn`q#MaM zl5iyBNXn6%BT2WlvW}!3$vcvGB=bn>k=!H6N3xHk-}s-j-<yAi$e7sd{1jJ2R)TY` QGddz`jx)v?9W~qgPuMCAVE_OC literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Palmer b/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Palmer new file mode 100644 index 0000000000000000000000000000000000000000..43a01d3e62d45456e5f7317d2a9d17a5e746058d GIT binary patch literal 1432 zcmdVZOGs2v0Eh8AI_aQEg^Sc)R@%#~qvK;V9Zx>cw9Ir!34<tzbTiVTHi71%B%%it zwGiw95n+0?MJ|jE6-f`MLBw1HK_L`@^uWfnZs)t^s#Ub=Uhe!ZGt8aEeE-0nrh}Q* zABV@h!^IIW7xxR>%**8E+t%cGmkP(1=<v|6nEHBDejm<J(+{)t%wV1P(OoNlc4iCv zbe4?1Ua34O!7Gc!ph}3^s}n-IRpNM&PU`ciB`+G~((_4bS+8F%Z<$dm&L+#`)=053 z7$H~XO^eml?_^5iYmw@+bn4W7vF3AFuN}K8)_r@V*FU(YHjLiTY1g{c#=9Llz5Te_ zbg5Zy-hV)ewv#fWxJ+g4Ym(l=QIRF9WOi(+$cY<}xltp^XDyJvXC9IFK27J}`XIJE ziPHtAo{Pep6WZV0BZ@jgTGl;PTMymQ#olgJQgKn2#-CAT8U3>CON%OBbU~I+M2Lz{ zN94A_pr{Nr$f^NLZSTvMJ03@i>Y8@Fv$IFl6p!g$$IhwRv`JalcvuBuU&;F1aZ%s< zQ8$#FP_4FYTM@3g(;ew{&5Qcy)9p5YkvVL8NLy~(9(n2V*!J5uuKC<s=6?2_xlb)m zx)4^n$TFALT)#_jxn>_@4E#kO;n{DQU$<>KaWt(Uy*Qd?kZzE6kbaPckdDx4i5t@s zkEbc5E2J%?FGtfD(izek(i_qo(jC$s(jU?w(jn3!(j(F&(xszm6X_Fa6zLRc73mde z7U>pg7wH#i80i>k8R;2m8tEEo8|fQq9O>NAw2t(SG>>$Tw2$<UYyjB-vIS%h$R?0o zAlpFp;b=C3?8MP*1=$O-8Duxec98ub8$x!3Yzf&DvMFR&$hMGuAscfvJ43dHttO%W NVQ(=7W{G}p%r6}>oFxDN literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Rothera b/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Rothera new file mode 100644 index 0000000000000000000000000000000000000000..56913f8a10cb61305ddc4025c51f72e56b195781 GIT binary patch literal 186 zcmWHE%1kq2zyM4@5fBCeW*`Q!IU0ZjFOx1q{r~^}85kLXJRtc0|LO$>7F`1a23-SV but*3AM*aZnfDj;~L4bhqTsA<9?2Nepj*AyY literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/South_Pole b/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/South_Pole new file mode 100644 index 0000000000000000000000000000000000000000..a5f5b6d5e60f15ebdbb747228006e8fe06dd4a01 GIT binary patch literal 2460 zcmd_rdrXye9LMqJpwhVKWemws!@O`gTvUW2LIninl6fQ~qi>S%pdpkbbb(AGa|~<R zvNfZU955{rNzhUdL#TBjTa#+pH=TL5LUT2xEbMtd>%Z3evp;&y&hz^1>^y&*KR)l{ z$}cI3HU4(pV12@yYnS!rJbt5fYi)0HA6U92udV95d2r!*Y0v93-wExLL-8$U$EX(R z45~9bf9%!6!=>htkDBz|!5s6wb8bEQ_7(TB6CY~Vwn2Ay<32sU?uff*^EN$^z03W6 zVY&7?YuqO@>!r_M-2D+{(towuJoVKp$zDN!sMo$d>V0XC_%!^kzJtfrujY*UcQ$H3 zLAT!ca;=U{+O6ZNigkR*GddwJLnn+BX<&S~242`KK~vKusBg6dJHjNmX_4IYov%#V zGD~jmz9=EZJ~H|B0hy9EBvUuG$t}@d8d~y+P7C-!Z*@9ky2k>Y;df4EoSmjKhk9jJ zkEe!x+$7<9LnY#^trA%`Mx6C)C2GwDi7w5On8hDRY;K9p4%;hp;+E)K?|O*~jMTWF zTXf!)0FD2=P7}VkthXI3)!V!LC2>cN-qG-rB(0CpJ8Mo!^2%(PU(hZK5~F2d(o2#u zX`G~nR7vW!UnFfLUlv`h)$}uol5x6N7at3eB^?=>*<eW4&T!4HJ}WtuetK8Yh%S{d zxjRjDS+cLVruXRb$rmNp^G(gYJ|Ope|E%VH+a@bcnwsDBqTIW`R#rB*>V3}_OTly7 z_5QL95wk)c$O)HK>A6}M<tGo$jMvrvm!xRyaw!`2(c;VV#5_HuC7(@{H65So+SY6G z(9Q!|`ocG|u5yQ#m3K+Gw93OPzSc*Qx6As3qgpYgLLQy?nr<+1Wy7yeYo*3Z)t80( zSa+~IemGU1Xo=G&cdwG_twCC|DMg+#e^U>K$EbgLd5#|QpS}#EanPEBvA^Fj+A!Ye zG`tL>xtsH$0f#?l!#Z=%%yJpo`OCQ3rxX{@84ibyb#wju1YfnjW>0YVuZON%*Zhvm zW@M7cERks<^F$_!%oLd_GFN1>w$^Ns=_2z*CXCD&nKCkGWYWm2k!d6IMkbET9GN;Y zcVzO&>}{>-BlG9yk^oyP14s&x93V+RvVf!k$pexIBojy~kX#_iu(h&*q{G(A2a*t5 zD<eosY^|IiNkOuLqy@<fk{Bd2NNSMWAjv_pgQN$^50W5TD?><%kQ^aNLb8OU3CR<Z zC?r!zs*qeE$wIP)qzlOxk}z8<V@S%7oFPd=vWBD$$s3Y5By&jWklZ23L$ZgY56K^r zKwB$=ND7f0B1uHDh@=t8Ba%oYlSnF&Tq4OtvWcV<$tRLfB%?@5ZLORlNky`Xq!r04 zl2|0ONNSPXBFROvi=-FHFOpy+!$^ve9NSt+MzV~g8Obw}Xe84}s*zkH$wsn`q#MaM zl5iyBNXn6%BT2WlvW}!3$vcvGB=bn>k=!H6N3xHk-}s-j-<yAi$e7sd{1jJ2R)TY` QGddz`jx)v?9W~qgPuMCAVE_OC literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Syowa b/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Syowa new file mode 100644 index 0000000000000000000000000000000000000000..94a9d5a282eab90fadd38e144b3987bf6f4e7da6 GIT binary patch literal 187 zcmWHE%1kq2zyM4@5fBCeW*`Q!IU0b(^NkS<_5c6>XJBLm^1wjLfPqEVz<@#9z!=OA cA;HKWP&0rg{s$Qi0tAfbvH@CTr)$gw0Lh>jKL7v# literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Troll b/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Troll new file mode 100644 index 0000000000000000000000000000000000000000..3757faccb282a975e3130e5ed9a84bc0e5881a79 GIT binary patch literal 1176 zcmc)IPe_w-9LMoznlMJi*FZe<#~^~Dsm~_TMGe$#Sk5w+nP&D!r`B4jC9bjsK}3jr z4?%bcib5h{bm-5aLlhAPGP+1~h(MttLV^nGAnW)3)vZ%>=o!D~^=yY7p3ghh-X9M- ze_Tt<FC4CrIXo{tXKuC2B2I0%K<m0!O8xrx8me2AaKQ_Wta~ew`FGNg|3n(HQ?hmL zhHT5+md5GF8XX^#raRYldon3I#?EW=?n}Be(W5P!6B=ub=&s@>ZS`-|)&*DFio(+N zYK89kwn5q-eASK_pX|L>D4q8{$-eX#>AIYi{arJ1VDzIN-11Nkbxvz{$%OP&XEg4; zuf3~Pdp}&!zQsZ9pGxWBH!(eOt3wmd2ju9*YE4eI$g#c>J$|852AZp6FkUP}fi-d> z^g~V-Ix<{dq{E+QWn}q+j=X#(srlD>>h7eR&OXvJ6Hj$Cb5+l#Zfbfwt?67Y=j40x z{`He*-mAS<t@ZqR>0bNQTk5*bCO2q~>o^{dJ+Z$S?ECNk1%|Vl@sI(L5s@L0F_A&7 zW>jQYs~Hy=7#SHE8W|fI92p%M9vL4A0Eqwz0f_+#!fK*G!mye+kU)?~kWi3VkYJE# zkZ_QAkbsbgkdTm=kf4yLkg%*KE+jA{G9)x4HY7MCIwU+KJ|sXSLL@{aMkGiiN+e9H zi4zIbY9d8KMPfyQMWRK*MdC#Qw*HTZeN`su8rR%|HKjp+w4$uC%q<W2qXE}GRrr1a D*BAYi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Vostok b/env/lib/python3.6/site-packages/pytz/zoneinfo/Antarctica/Vostok new file mode 100644 index 0000000000000000000000000000000000000000..9fa335c4478a8205ecacb645c636ebc07ad714b0 GIT binary patch literal 187 zcmWHE%1kq2zyM4@5fBCeW*`Q!IU0b(%ZSc~`v3p`GcYm&d0-Hdz`&wwV8EblU<Ton bVB`;|89)>NgNz1o2^i011GLCa*Nh7QJb@Xr literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Arctic/Longyearbyen b/env/lib/python3.6/site-packages/pytz/zoneinfo/Arctic/Longyearbyen new file mode 100644 index 0000000000000000000000000000000000000000..239c0174d361ff520c0c39431f2158837b82c6e0 GIT binary patch literal 2251 zcmdtie@xVM9LMqRfk?4(Z-2m9fRIS1cJdqi5tq^l%)GOd(?~^75wR}H%NV3anRBi& zeCt@|$f@O+HN*a(`~$6(+GyeBFXpUVTdb@!x<tZitUk|gYyDAM|Mf@T@9z8f?z{W` zaofF~fu-wea;(1{vE~UMju!Lb{`Tk0rOnl6wO!a))qT`o)pKROoPOP@;a9!#$*w#( zvn5^5h7#o5s#v*tBvr2+^2zrvd!&DRhWyZwB9U5=AIlwbz4W>aOz)E$t_w2gJS9I# zM29j*%24E-jtTFMjP=izak;-}eA-n_82h<8qfe`I;9VWxcSP?vzhCb>u~QR|9haoT z9g_UYKAF(0lCtS}NezbNuH~y`qAwt6g~c+-T_EX6F1h=*@#2c{s%tP$Cx4Z$Q+gA0 z>Zw@0r}L(|4}PoDT0hl{tsiUVhGUvl{ibGDT#}qnr{sFNByZ76lApX+3UV5xV7N(U zB(~~|%PVE(uk||XxL5A|tXvD*E7j9AOYhrOq_f+SbWTm07Hyp=_m{+|w>nYgreD!w z@354_e59pmUr1^H*D^2qeVG^TmIwM?lldKQh_B~8^|v(3g2M;&!MZwmsQCq5`0$Im zD7Z$;rUy0PE7ir$1-isNMVAa^X?c8!lwTa9j|@(hrSII(Wxa8-eE(>v=)5K?ng*n@ zH7r$?y|Qxice-l!QCVHlqtz*UWR0goYi@a4*Cwm3{bsk;4u^DIccVUfIiQanTBgAd z*URJEJzCdZCQsC+=#$&>W&OfJ3Dr2|sq6`|q4;NcdbB0=nekd5`BEB24Qa!flhW9K zNuPPET{echbkm*>baTgEeYWwSHnlWqlq1R!J>nnEsF;!e{b^Zo<IEq~N=kK%u&hyH z-TLSCk0*aU_xS^sx44W;fHxfGKywrL_u1?)U$kd)(|*UYeltg?e^L;ck*%2$GACqG z$gGfQA@f2ehRh6^8ZtLza>(qE=^^t&CWy=snIbYrWRl1%k!d3Hv^5ju=P*-bs>ocC z$s)5wrfX~Fi%b}qF*0Rj&d8*ZStHX%=8a4onK?3bWbVl1k=Y~DN9J#95`bg?Ndb}r zBne0skTf89KoWsu0!amu3nUpxHjs26`LHz!K{8@%Qi9|JNeYq`BrQl@ki;OFK~jU{ z21yQ*9V9(Sevkwq8L~AgLULqll7wUlNfVMMBvDAFkW?YLLXw4K3rQD}FC<||#%xW> zkeu0?q#;>D(uU*>NgR?nBy~vckmMoRL(+%j4@n@BK_rDp4sA^mkt`x<MDmCv63HZz zN+g#^GLdW|=|u90BoxUgl2Rn6wkD}aR&7mMk-Q>_MKX(|7RfD=TqL{J|FFApCdJdT UiL%?Dn~|T9<@RT1VP<aJ?<;p%>i_@% literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Aden b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Aden new file mode 100644 index 0000000000000000000000000000000000000000..e71bc4e802cbdfd9f90598029e0fda04c4074ab3 GIT binary patch literal 187 zcmWHE%1kq2zyM4@5fBCeW*`Q!IU0b(RcW&=_5c6>XJBMxV9-7T6k*UZU|{j_4PnqW jFa~iMLP#+32UrIK1JK0(AfrKC0>*RM04=i9HRb{Uf07!( literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Almaty b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Almaty new file mode 100644 index 0000000000000000000000000000000000000000..49a4b4de7b31924d138a6e22aa0a16af1868cf1a GIT binary patch literal 1031 zcmd6lPe_wt9Dv{H{KGOBfzEAOX4YKGoYSr4zkXdJ!D`3|iGqkygn|$mL4|^O>ky$A z@h~ttM3+P_;o%o_^iUvOROn!b4kd&K#dN*Tm%`9fx89HUecrv__QAL3N#7Xgi<tw% zV^5fjxSj01BX)V?f4n?5y%sGGJ<V2B&Uk&L?`nLT-(FOHU(NcL{NBLAKsGp^(W<xo zT0PaPN1u1;u_vusb3dZTZ&qn-_hCKJwxe~CH9Z;ps;3Tp){uK!LhC=Je)UHx{AeN- zUi>7dKYozN?2MdwH7yO}<8t=dm^2P$Bzm`Bnr^++*x&<g?ikXRo@*MfzOIQ_lP0#4 znhai*<Vrx#Z$+hb{<pR*9g+6ee(89>E}dgra$)j|$h{T0IQmP6bGe*x6&C!x+(mg; zaY^2X{2T1u#$2wm|Ao1fGIoFGp{vxG^lekNx9**|?8*M`bYDrENHlKeIXe?P#<-l} zY@H?j4^lREe`en$ha89?j*$cr1(D^@3WG?4h=a(32!u$4h=j<52!%+6h=s_72!=?8 zh=$092zO|uL&QVmJG22nB7lSdi2)J>Bnn6vkT@WLKq7&J0*M6@3?v$dHXKMi4sAe? lh#(<BVuAz(i3$=HBrZr`kjUV$b@E?^=4rPHb~YqDe*g<+-Uk2x literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Amman b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Amman new file mode 100644 index 0000000000000000000000000000000000000000..c3f0994a75301237adcad05cb13a085a59359c70 GIT binary patch literal 1877 zcmdVaYfQ~?9LMqhkz7v4-2*e1A|%JDj*?3*p%SID94#SVN-dY-NXz{a9+?L*vzZz3 zfY{9SfqUiwJ@B9F=K6ITv#{A*H{<vIo6WG9ht665*XgXDeczv8L0O64`t22NzTt;g zmHFXwn>FTfqVAU6BD7Rndg=qMqLZc7s|U6_e6G5$b__&3f1{o|OYAn6drRBeD|Wm6 z;nF^I(C&8VrgY!c+U`;NMS8B&KxF8lM9ppp^vb;;y(d1fql-^Vp8@CXzR4BRFJg}! z6R}uh9#07Le|lD88%yi~r{<`)e!M+!M~3>UkK1wUizPmPpC%Nii$AqN291l6#HhWJ z6z!J5u8oqsY>Onn+@nJtuh5~_&&jZcLLGi|x1`k7%7`sBGP0&pMs3fL(M5TZTK-<g zWQBBWN|BE91~n};PSZVuH6yBnW;AutOt()m8$auWm+fU@{XNaP{8qBJev(Q1Z%9t% zUCBLK7?_;6OM<z_bV}#7lIP#1d0%#Fe#dIfzrR|izAey#GqZHswG^GcbF9ud(nAYZ z_R*O;hKtN-sk4@Mm)YsjQdIC&iu<;ZIl~@FN%#vX?Rrs4pIwo%u!mZ9>7dMgazW?q z+br`h*6D)H2eiCygI1Jp(uFmpx@c0lhKe$Eu`i&NStYWhtzWCW<E83Dj#PVMrTTV) zEN$}0vXh-;dE;wYvF)9#Jnq(2RkvmJ)+V)@x&HPJb2;zkVZ6V8{^i;1m*4;VS3iAg zSy6}0L0Pf!uI852Ez)ZJ<H<K!*+DBUZ8DD_f8}!Vw1WR|t{<KMu&l4<#o6*Jr;D5~ za>9=0jFD4zH0O+*G;-F+X(Q*2oH%mk$f+aej+{Jl_Q>fY=Z_?SWPqfA<bWiBWPzmN zX!1Z3K{7#7L2^NoL9#*8LGnQoLNY>9LUKZqLb5{Aax{4%i6NOGsUf)`$sySx=^^<c z2_hLHDIz%{Ng`PyX(D+#nnaOI9Zjl8u1K;-wn(~2zDUAI#z@LY&PdWo){Z7^ByUHP zIFdP%I+8n*Jd!<<K9WB&0muv>Q-I6?G6~2mAk%=%gQJ-UWF{QVR3LMKOa?L=$aEm{ zflLT8BL<5pLFNRR6l7MAX+h@2(M$|7Gmd6zkhwu72bmpYdXV`+CJ6tt8FHId8t7-A YyxD_%NxsB5f1)=#A>MpUic4^R11fmc_5c6? literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Anadyr b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Anadyr new file mode 100644 index 0000000000000000000000000000000000000000..0e623cf746b29bf73989ffec96f26c49e737ccc7 GIT binary patch literal 1222 zcmdVZPe_wt9KiACbz6-U6qV^*EB~a+)@++5mO7Vz65BL_oeJScS<s(DM2A2^qk<rV zf(~U;K_x>XcBr5*@Gzo-M28@-f(MeIOYBgP6|Cp`u3*urWAAvL&%1Z;2HWqu)Sg2J zqw252W3I3`I?dvI)u`E~y#9%Yx5s8m1HSO;@?E#e+yj}ih0EdckC)=!xooClwlwaW zu9h|TR*HYZDQho$k#)l#r1HpXsoMKosuPc;rgciz2d_)5Z%pczoRWYutplI0=?(K& zbM+_Vx%yYbdSmve4n7#vo9+zg(3K9|a5k(rpQzH|p%pqZ7?bF6uQaw-O4IJ|vc+E_ z&5?J~{9|5PDi(Ch+h?-%+Z(-YIxVepPj%bftZsjDQ^(Ft>W=Z#I)321-hSqWboLz9 zUHeC+yEdtNy7tKqSA*`Y>6YG43F&jxN#Bc*?0oN*#QkNGoQ>xCFXoSp<nwuDIZ7N( z%RF3*iXMyIMNf)f`19P{=2B{M$o!ZpJ)@jTWwqs2>d{NJl-)9W;rj1$%iJ@bQOgV0 zll>_bj6}_9GON+72p^@C#ZfqAPyNGWzy5lg`Eq$Cj7*u!Od6RsGI3iob!76$^pOIP z3Xl?z8jvE8Dv&aeI*>w;N{~{JT99IpYHUq8NIghFwx%MaB%~&!D5NT+ETk@^Fr+f1 zG^949IHWqHJX=#AQlPD=5GfI<5h)U>5-Ag@6Dbs_6e$&{6)6^}7AY607b)1*RE(5t eYidS{Myf{2M(V~9GyDIea8H~0AH+hDsOKjf4;u^s literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Aqtau b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Aqtau new file mode 100644 index 0000000000000000000000000000000000000000..5803a3d3e3fac98ec8bebf8a7cce414a79e137a4 GIT binary patch literal 1017 zcmd6lPe{{I9Kheq>6jv(+-zyn{w!VUTHBa4ZC#?ngh%`!h=O-0LGvU=9Rh<!5Oj$0 z6dod@LqsU#Nres`Mc<*DRPWHGO9$(q=vd#+Z$*KpZoS8QpSSnh2HWT3T^Soqt9>IZ zH!Ma*7U$M`vb}Dfaf@@6Vq0`Q6$vfGBRk)pN55{(w{Irm2iI=T$Ck_X(3@MfV|LVb zKDulV-@Rz#6N9$vYOn1s#q5znvpt%u*kiG8_IT50n+P`P#E(6F;>)h-xie>aK77)> zs~bA`?!7+wazXdaywLsSr+Q%OiAi0*Z%$o%>(Y}C-RYsnE_3Oo%XUn<TzbUiDxS;7 z#!dcX&JFIC%$b#_JG<F$3a_$eXsOi<KZuxf&$dkQ#*R5(ZgJDqYE=d54%FAl3^x2V z8$-1hp~n3)7*uK?E`Ni{7gfDd7qki~<r&q?9+3U_P3;_zr+?mA8u3&zm64T|mE&ua z3NUI%cJCj2Z2$V0{FEwQAa1zE55$pA@dR-N@da@P@dj}R@dt4T@d$AV@d<GX@d|MZ z@e6Sb@$6GvLwrM=`xNgG_YnV(1|S_kT7dKbX+ogr0@4Pg4@e`BP9UxLlwKgs_>^uS k?Lhj0Gz94g(h{U6NK=rmAZ@{Eaq&O)6)s49!+qKCPt^<F0{{R3 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Aqtobe b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Aqtobe new file mode 100644 index 0000000000000000000000000000000000000000..808a50261335b48599f0a77e448de23b301ea78a GIT binary patch literal 1047 zcmd6lJ!n%=7(j2*Mq|>!McW#y&)?TJ*0!+;sitXMf<uLm5-3Ou8ER2*5{rU`N(nBC ziw+_V#fpPCq@a@u9UKe&2DhRvT|{tD2c?GeJ1-pqo!q>Sd(X?gFM+&sN|&zAq}+}X zw;d)UZIk=GcWimt`QXZ%rCMR1f3Lc~W5sKaE_>~NelB)={ZZ{)_q_wFH>-(v6+QTB zR=b|g=%M=)dU)=Fde?^a$Yie`9ZTr3T$>(G*7QW;yPk}HQ9lwF|JPsX-q<Yn+*&I4 zeEclEAJ!!KdPPn>UzWZ{&*XIFq4eLV$iVch3{EX->iTUxGqRvV<5QaMn$}EeR5P`r zW)qW={nV|)n|V3=KBni^J;}ZBW#nyG@^|;h=#vd8T>d8KD^BO@^|}j%TUuMfc13nI zUAv=AADR!ikJvkTZ12Kl3$E3<qPVDYrE;i^)!Ev-Q<RXMHlA!e!|B+?l1mN@*-YEa z@MF$}g3-7S*3v)7`Mv!;`%iTwK}4~QEQm0Ow16fKA`c=EA`v1IA`>DMA{8PQA{QbU zA{inYA{!zcA{`<=pvi{>0Er-=g#d{G5(Fd)NEnbfAb~(4frJ8y1riJ-8b~;hcmi5L qkca|WNRXHyK|!K|gawHU5*Q>hNNAAQ;Ji8cuY-%_EXaIcI<^Iu9Nj|z literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ashgabat b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ashgabat new file mode 100644 index 0000000000000000000000000000000000000000..046c472827eb47d46e31cd69611cff04e45400c0 GIT binary patch literal 651 zcmc)Hy-NaN9KiA4rIx8fOQ~Hi_Ii~B5-9}|4iOCo4hKOrI0->(*bvyzR{aMJfm<}V zv<dYGG(3AlOG`^bH54V+?|UW$O-((=^L&p-@3{M%yR}nUkYA%$cbE)YC+B^Kx?Chr zSFh@=Lcep|7)act;^FH|{Qc!T@%Y?G-Z`nk+iJsZ9+{!bJu`e(F(W6NX7pg)q;{9h z*!G+mFWP1zA2XBbmYK4j%yjhLIH73CdHpQSJbaXA_v_`F=XqiUy4puD^yBF62?RuP zO??tsDo9YoEn8ucwUWd-YiGyOo%VnDwYcF*I<u%Vr!&h9#M0rP_>21oins5}^hrHP zC@gG4L*bzi`Bf$g6@`mJMq#7SQTQl?6h;aqg_A-`VWrSgcqzmbX1_{J;iiz&n!dw7 Nut)OxIacPgkuUo>fWrU) literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ashkhabad b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ashkhabad new file mode 100644 index 0000000000000000000000000000000000000000..046c472827eb47d46e31cd69611cff04e45400c0 GIT binary patch literal 651 zcmc)Hy-NaN9KiA4rIx8fOQ~Hi_Ii~B5-9}|4iOCo4hKOrI0->(*bvyzR{aMJfm<}V zv<dYGG(3AlOG`^bH54V+?|UW$O-((=^L&p-@3{M%yR}nUkYA%$cbE)YC+B^Kx?Chr zSFh@=Lcep|7)act;^FH|{Qc!T@%Y?G-Z`nk+iJsZ9+{!bJu`e(F(W6NX7pg)q;{9h z*!G+mFWP1zA2XBbmYK4j%yjhLIH73CdHpQSJbaXA_v_`F=XqiUy4puD^yBF62?RuP zO??tsDo9YoEn8ucwUWd-YiGyOo%VnDwYcF*I<u%Vr!&h9#M0rP_>21oins5}^hrHP zC@gG4L*bzi`Bf$g6@`mJMq#7SQTQl?6h;aqg_A-`VWrSgcqzmbX1_{J;iiz&n!dw7 Nut)OxIacPgkuUo>fWrU) literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Atyrau b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Atyrau new file mode 100644 index 0000000000000000000000000000000000000000..27072eb51cd838727233db12dee866fae3b81967 GIT binary patch literal 1025 zcmd6lKWI}y9DpxvV?$D;gP7D<jelbkTVK;Aq&5BHQaVI9Bv2?8Zm0#tNh~c?sFcvD zTb;zAXmJpS6zZgcgQMtga4TvCmo6QugQb@E`(CURoZP(Q?z`OM4SC;J8oxP}F@KG) z-mn<Gx;P)XuiLY>W&2_^Vvok&d(qHZBD(W!CAPU;Y1>F9j=h_y#9x)=`15ILe>5d0 zmd51dy-`Wr?vqp3Q*yc(motSHIh)>-bMY-XANeB5V8kZB|Fjpr?z)cIC$8h;XPbKa z!KT;W*o&)cw)4Sb+f`n+m+qFWH#u#)CsrhLXHI$s7Nqyugk;+%C6^hJ+}?=f<F{P? zQ?A;#TXg-eW7W$WU9Rvn>jqx7y1~V$8+y3yhHvb+E9Jx0`C6@J0u7B#4LXB|4$S7z zfge0*4*i+IpfO%WKZ8kmrpcHQYeL49oM~YX=)QkbKL_+_e%Q3u?~7MUChhg;%IeDL z;?wE~FzQG4(my!)@%uIXJ2l)u{BVsUh$o-w3gQdm4B`#q4&o2u5aJQy65<o$6yg=) z7UCD;7~&b?+Nb)4IEQ%msqP{EAq_w}fV2SV0n!Ad3rHJ~J|K-iI)StT>BXlu1L?-6 nwgc%0(h#I0NK25OAWcEKg0uzc3(l*H|FyAjK`R{W%!Ypfi5lAf literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Baghdad b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Baghdad new file mode 100644 index 0000000000000000000000000000000000000000..3aacd78b1d503a3adf7fd3a9085d1c1eed94fa16 GIT binary patch literal 1004 zcmc(dKS-2u9EZPWUWkT9i=BTmv$9jKUOn^P@>So|bUEQ2j)oF+s7NXra*&Yxf)EZu zP(wi>f>A>_=pqP0G+0D~Eh6ghTMk{C0;w>r-}AZz4$;)-<#*4^@g5xS^JK0L-E5M5 zJ7~Y*u#`RAA6EM@<#?vlxfPWDEO~cydnt%*bophUpRSZIee^44KLnL8ht09c8>VXX ziaCC#*Hrhr=0rMfYP>RYvZ2VFD&IAyi@uuLXl$@{XGG3yEoyx2m(+dg(E3G38{XcR z#Ea{4cJj5J8-1qE-F0aksL-ZhK$3nS&5cj=eD%1t99foB-h!mQ{ZKdS$%WO2dU5%l zwl2(z_hv@go@cdvVpTgH#KgaSO)vFNNoV@8bhUla?!<e&Tosj`;vMM~SI2U>TqN(n z-jyHyhbvzs{>r`;iBF5YuXCh8#PuVG{=853Wu()ddM9a5GmlFo5<)V6fBmj~GJO5t z_kJSw-{yuM&22(-@HI+8S}hPY5Iqn@5KRzO5M2;u5N!~35PcAZ5RDL(5S<XE5UnAt zT83VTVuogjYKU%za)x$@dWe2T0+0+CDKK(?Bmv2Skw!?H2P6?jCXiGhxiFGpWCKYD lk`E&xNJfm57&$?bf@H-=i;))`v##VnHCE`^^4b&0!rvMC%&q_c literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Bahrain b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Bahrain new file mode 100644 index 0000000000000000000000000000000000000000..a0c5f669628d8ecd61c398bc475cf2405d20000b GIT binary patch literal 225 zcmWHE%1kq2zyQoZ5fBCe79a+(c^ZJk!cTJzSh^-1sQ>@}KLaBZ69a=m08p60!U3dC y%YcEy$2WvQ+rR{fjX@GX5JG|-KOkBdfX4p^*#+W)>;uuH*vVxBwBJtGm<s@Gqaf1& literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Baku b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Baku new file mode 100644 index 0000000000000000000000000000000000000000..a17d1ad8c8ef1fe25c033ca2ff88b75da7627828 GIT binary patch literal 1269 zcmdVZO-R#m9LMqR+GaW}cCo3;p3IiIV(r0ZO<Vaev{^nZg{at}q(Wjv-6VfdNe3w? zJVc&5ghf%ORM??QU_#I#li&wYK~kNfuwwn*{~AOcqGO-&-+TUj0QY+>wg1>aQ2v<9 z+~Hzv=Hj(mnWvlOV=Zr<mvoi-u4Px0%?3P$kK>+2+nutvk}aQ)1S)1OWW7^Y^va2I zx^n!qUNw3|uO8~tYX%c~?SZIXm#ooMu?oH3U#zRWU-gFKciQJHPy0T)(i>kDWon)- zW@;ZUWCE`jvRe+$xr3+fyMvE1*-+yx3HMx-Nabaz4<48LZzm<{9Z}I2z0&aIsM<Ci zk?r#xDt4n=HBS0f(@03|xV~J)2TIk>^L4VT^^<Boyi8iE=TvL+2WfLoszlWjNxXk1 z?e=@pKKnpAUXID`+vAd)xvM%aos~TkSJmDVY3Uk0qq_TtRL@|)>fL%srIHP5pC_*R z{9ZMj%jLvoU-IMOaQ^ZrSX$t)7Z&Be_m|p5Hcy#1SZV{}FmHO7txzNymy-N*{*KMu z`Tb{-ohhjcgv=E&SC~H|Hfz>P>#Nj1ob>0Pzh?fq94AgWb#8O=l+&jOuxb=gBv3R^ zL{L;vWKeWagiw@Fq)@a_#8A{w<WTfb1W^=OHIgWrD55B;D6%NJD8j57WfW-)Z47Y? zbykf$hCZuCAVVQVB10oZBt<1fCPgPjC`BnlDn%<pEJZCtE=8|ZBbcGss*%jl%n;2` oO_9yeO%cvePLWR0P7zO0Pmxd2Plt_-|Kk(jikZ(w)26WNJFq|ijsO4v literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Bangkok b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Bangkok new file mode 100644 index 0000000000000000000000000000000000000000..8db5e8a61ee9e63164ce7f07969557fafa982dde GIT binary patch literal 220 zcmWHE%1kq2zyQoZ5fBCe7@Ma7$XS$?ex&~Y|No3kObiThHXwN*$-=;pRKURD;~T=@ z1jO0~<{$|m2qA1dP%qdDo*!Tx3=BZi|A%dBJp!UZ)`5iUfoujM?c}ln+Ha?8&IJIu C{UtE~ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Barnaul b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Barnaul new file mode 100644 index 0000000000000000000000000000000000000000..60efb41b45ba1dc51ad67f65be0361085ba9cb2b GIT binary patch literal 1255 zcmdVZT}V?=0KoCHoNmkr3CfSPvdorE^SElRrgqAf=%zsop)v^V!=Pw{&<BMJtzIHV z6hs6eN>33N`gjPTLj}D^4`KEoQc^GVP*D<v#ybCNgCGcc>|D<MogMB5+xaiC^WgrV z_+xm?4wDfzlXXGL+y-8pp1C(TS-QY~B)za`NEEsU3ktu#OclKwOBavli;|IDY432i zwdmFk%XhuST6`g5Ejd+bl^*h0OFQOS%VHU;Eb_@(9+<Ssz0a)``HwBX^P}`ny_PHA zzETy(&#Q{jM{?DZJ2G&0NLCJAkyV$^$<_VGWOYxstbQ6#$-M_u&F)J&c=WJdQ{SW4 zZr`TY`Py|TSffKf!aD42Q{jmcy?$D%4Z|}!GCp6`-7Hr152jSZ*=e=$+L%)NCe)_B z&w6w8p=#_J)mzF3RJ8HF-kRH|n#!)~rVlrCbIwWK{OpWwdDEd|x4U$F<b-Oy7}bf` z7uo*xz3R<mGQyEFXZCXD{&wZLvM=1OyuV&fm(wXkTfg~jMPiRI>txE|7NRpL=4S8N z9f#SOeWo+XnrGsz2@#MXGs9-q@-f0;hH%(R`<D2JL;m*bZRWdWIBev=+2+uZgJ*E~ zwk7~10we?mF(5%8Q6OO;aUg*pkszTUu^_=9(IDX<@gM=&nuw5)keF;uP)JlrSV&w* zU`S+0XtpLcBse5GBs?TOBtRrWBt%;iBNC*oi4qADi4zGFi4+MHi4_SJi53YLi5CeN vi5LkPi5Usn)<lhjZENC20!Jc8LPugpf=8mqUem;X5x%F+{3RNyYCS&z3(gxL literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Beirut b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Beirut new file mode 100644 index 0000000000000000000000000000000000000000..72f0896341a76e6be611d04e0b1aaec26f6f4a45 GIT binary patch literal 2175 zcmd7Se@xVM9LMo5AciPvwPqnC280239vD9|k&-SlIenWal8Jm2PlALJg%mQJX}M*r z?3~eRuGEXlF(ks8xms&YUBi!C^pC@RR;`#K(4<u}SD)v%Kl`Kp^n1JS<NLiI+x>B` zXK;1x#$4xLM~r=iC&w%H$#ud`dwMriZ4S5Yk|RrZs-wldI+C7Y-pgpyo%dWdotJxL zD07b)8u!Sh$)BoA14>^W9yY^$VYzZ{vl;2QLq@xgs<D<bJt4AI#b!OLCziyRo2Gmy zacKoA?#e|SA6sYQ&)p)Ee;zecjvthSGpTCo{<C`8`>|?z<1w8$?lm*dzonA~)~n<m zEqC@EFtfrjI_2OQHM^-m?mC=pQdfN@X*E?Uy>L|Dz2IY&k(erTl84NkYvnrAsW+MZ zlXO;Zzsf#xTs+}!<=y?d%-y<Q`8G63PGy(L4ZNo36}@Wi^=#33$&DsIVWrN$9yEnB z*GS=DzPaz(LYaT+c60xQ6j>0-Q$;7EW#Nl6RPmcX$^)T4)PuVny~uw-l~jMPA4)%^ z{3T!Mhhq<^($t799XV+h$L!UM``<Q?{!pivbhVkXZ(fo>`#Q7qSgkB;C^52ci7a2G z)MJ}GvZ7#-3YJZil{3;*dDbsd{@X-Vaa)3}_-;r&K0c~f_nlQwe0yH6IrNjM?EOfu zZ98qM!Uwdf`oOH)(Ie}N_nGR-uxyysU_wRBvN7&uRg+vHHAD64$?Nl__VhAUH<%<( zb>^r|Ul;27y|=1{NTPna=2!L1o<H^G<u{wg&_%t)chzk5f1#VE513}}pfr!2Fx#en zF57g6**@})v~;(c*8UgdxsEr~j;@eAzr97ZwU<hC)D84-oY{}q|FDzki<;m#Nh!{x zvOv)B`&V!Z@{T=%|GkKI`!RPBzq{XXoWJZ*-uLAv$U=~nAWK2kf-DAE4YC|pyB=gg zu69Mpl3eYYkVPS@LY9TB3t1SlGJYOQL)L~Y4p|+tJXgCuWPz@Bg~$?-H6n{dR*5VV zStqhkWTnVbk+mX=MOKR}7g;Z|U}VL}l94qdi$+$BEZfzt8(FxkT{*IJWbMe}k<}y1 zN7j!N0I2{{0;C2=5s)gl+A<(@aJ7X%DuI*&sRdFDq#8&$ka{2mK`Mfj1gQy96r?Ih zS&+K8+QJ}}akZsEYJ(I9sSZ*eq&`T2kP0CsLTZE*38@lNCZtZTwopi=Ty3e4S|P<k zs)dvbsTWc(q+&?PkeVSyL#l?94XGPaIHYo}wsc7Ckm4cLL&}HL4=Es0L8OF84Ur-u zRYb~&)DbBpQc2gb6Yf2=<ep+8)kMmP)DtNvQc<L&NKKs^D5g=-|0}2Qwyd698k-%M S=gsr_=K8V&Ilh0c;{OIN*MD#T literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Bishkek b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Bishkek new file mode 100644 index 0000000000000000000000000000000000000000..e3f81ee3327308132b4e9910fffef558850d0ce4 GIT binary patch literal 1045 zcmd6lO-K|`0D#}Pn=EU=bSRxxbNbzMQ^!?z*WWh57}14@QHJOsQOZ(>PMz`x6eZ{o z9i_v9=|Q>_bcuf6A!rX>f(`?+&_NL|8Q~#WY<u6R6m{y>d%X8O9y5oT?{ltZN8{qJ z5mq-WMnWyl`jpz1jS2n5!lrd3I-WaP^<1wEJu@q}*B(}FtmcdrLqGN*n~S`<E32Q5 z$(qNmJpQ0x*52!o=Ixj~ajjaO96TgXb^eg2W1I3!<g+|m@li&DJL%}wm-M+e3$kuK zY1O}5G-K}#>->{Bvtg-dHO^$srrCmJ-54;NN8eiU%vi3a`@YwDWjNnfbIVJ_Q~AVB z#!E$p>{Ox2YcD44j@O61&K1M%T55E=Ul#42S>3%bU$E0RzquEuxAVPypWNQ!BXhD; zDhVx6wmX9dew*dmuNl(*n1O&0eP7iN5tlBDpb*#ZXdxk-5pi&T-@nn+&F(YK2xp!d z%s3)uwW*a<iyth6riOpm+nj$e^L_6f^)V$3K`e2NDTpnIF`vpB#2myP#2~~X#3aNf z#3;ln#4N-v#4yA%#5BY<#5lydPh}orA5s9M0!Rsv8X!eLs(_RMsRL37q!LIekXj(c zK&tVn%7N75QxycM2vQQHCP-0`svu=S>VgyosSHw@PgNV7RBryy;=*03#h!*l_$T?; B-P-^F literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Brunei b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Brunei new file mode 100644 index 0000000000000000000000000000000000000000..cad16b0dfea2ec12074c8cb674cffedbac27b0c6 GIT binary patch literal 229 zcmWHE%1kq2zyQoZ5fBCe79a+(`5J)4+Ab!SUFq%}_5c6>XJBMvVqnPL0~BV+tN^Ji zXkg&-@eN_nHZV6f0Fo9UDIf?T!ImEoEet>l{)21-(I6W^!u3El@wRf=09|0GYrzEo DhVUm{ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Calcutta b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Calcutta new file mode 100644 index 0000000000000000000000000000000000000000..b57972dd8ab55785d8cb21b90b5277d8fc850fdc GIT binary patch literal 312 zcmWHE%1kq2zyK^j5fBCeHXsJEg&KfF``kUdPTlU&IKx^fab~K~ic2LzGZ>kefslbA z=mJmygSQ(<wQmFi2LnTN1|yG$ZwP}g5PJrPFlZZ?85@8Ufk6m%yIFyfAPfRPGr(-I zA7Cy6!+#)9+a;j^qMe_=HUQBeH-Tu7yFfI^ZD1PcJ`fFZBLmQpEI<f!<qeQgK(_*2 h33Mya2B2Gcz-|@r0lE{ndl?wGfS%AbwK6u~0sv>)ONsyh literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Chita b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Chita new file mode 100644 index 0000000000000000000000000000000000000000..95f56456e5264b11295251f941115a9b877a64e8 GIT binary patch literal 1257 zcmdVZPe_wt9Ki8sZMvZnze{N?bEV5{YIBvibXuv2Iw@)tC4tZ%$|6P-6+{aCJxKfx zMGzf~{y|8W{v6C(3Kb$$o~(mlhq{<VL=v%{@0){Br;a@@@AG;0?A>5{zHegpp#u@~ z*T}UioQx)WaxJ}fy7g|h|H<tUv!HZ)a$&)x$$a0%nf%#jT?L;8lZz&%Cl`-4B>h8i zT`~~U!kd-4w692u&WHw%xO7?P7hT>yrNy=5S`r%3p#Q0^$i1(np0`r^by!w@cpfM_ zbvRHqeos~nUzgD1%Tj*lv{YQ}mdf*;vif*j!uw)UwdbHljy39<Sevfh7Swfx<ysx_ zYW2_gTH`N~n)fpr{gEx}hu&!I#7C*S`&nXxucZFMYiYPKARG2Sl8xtH=%(gt(zy45 zHU&>gb7P-w&e|m{#YeSe`h>Q+w`%LyPHmftX#2yc#z(hE#}&UO+FzO--^KzxsZ`3i z-1B}Po~(=`+nezs^Nl~ReO|ZQnC>q7x0+<H@fg$B@A4UQR!v@dPOrG^O8OpS?@6zl zym&{#gu*o@R28;H`7+~jhI2VLCH`Tbzt4Tf{=^jfjqEvteMj~l*?&jd0MY@{0@4H0 z1kwf42GR%82+|4C3epSG4AKqK4$=?OkfZGgX$k4c(Kdy2g|vnAg*1kAhO~zChBSwC zhqQ<Ehct+E=xAF+dUUi+B3&YFB7Gu_BAp_wBE2HbBHbeGBK;x_BON0xBRxCXrjf24 iZQDrSNaIN7Nb5-N44TIt+rt0UeNLTyCh9AqIlln5o(JLp literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Choibalsan b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Choibalsan new file mode 100644 index 0000000000000000000000000000000000000000..15b358f2f4ddec8b2543f883ada86d2aa3771136 GIT binary patch literal 991 zcmd7QJxCj29LMp0Vx*!%ZBb~osW0_C#zgD;dz%!8nog;TMR*h`NIE=&TT6WgDYVUS zgHut-pjgGxv1368H#>>L4cg*j4_d)3K40k|bg^6SxO^@L2O-~Ia$@#ntNeA`uur%+ z+U>=Cv9TZT_J7phEI4%Toa~yr2d8f7-k>h~Ql;)~RqEhUK;2L0=<?U!RYmGpSH9R) zRZn;HgWgS59oo<}WocDg@KM(VKD%|l*S-3q4{pP^_g>@IS8ixK<u$E7bHj@XuX!Qr zM&=@3bmq3(GC87K6A|^Wzgf41Z>jcRk?tt`rFu@*oZjsf)3>_p^e-ASurTio&P|)4 znJMQ{GHxEnV@|BC*2Jr8oF~DU4FBBpo|k;ejOI@1(X%(1vE!JY&t|jY^9ORSfBJLn z=@&_j+y6qQwFE@8kvx&NjueWdGrl73efFN`BZ;JhqMi2YvRAi+TB5vo<w*WRfb&1^ z+25Td1Ye1P1VN%8VURdTAS4nJ3W<dTL!u$!ka$QyBq9<LiFv6FibO@iB5{$xNMs~5 z5*rDQL`T9S@sR-_BS40Li~$)0G74lE$T*OJAR|GBf{X<j3^E#IILLT77!WcdWJs6V TF(HG(|6)}6eRgmI;ja7(v&hqR literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Chongqing b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Chongqing new file mode 100644 index 0000000000000000000000000000000000000000..dbd132f2b0bcc8beab08e04b182751795c853127 GIT binary patch literal 414 zcma)%y$%6E6ov01AsdnK0RGwCh(;k=S&0xTQ;84_wi_?7<`F!PCs;~}D7?f(B^vIT zl7ch2`)zh+C+8E>VAZ0p#Q6&b$@1Vmt@shmEEPQ+dAwxQ={D8*Lz@c0P8P$BDh-yh zJRhox=gVq;O|{%Y*PQ{??_KRC8|0oVI%a(=qV1LMrEqU0h@_&_Xe`L@@k|6ZIO2E7 z93L|!ALb9D7bk4{9*EM0TpUDs5CS+32?Qb_WIzakkOCnFLJol-2uVX01tDw5!t~*5 M#r`q2S-#n^-;pX<T>t<8 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Chungking b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Chungking new file mode 100644 index 0000000000000000000000000000000000000000..dbd132f2b0bcc8beab08e04b182751795c853127 GIT binary patch literal 414 zcma)%y$%6E6ov01AsdnK0RGwCh(;k=S&0xTQ;84_wi_?7<`F!PCs;~}D7?f(B^vIT zl7ch2`)zh+C+8E>VAZ0p#Q6&b$@1Vmt@shmEEPQ+dAwxQ={D8*Lz@c0P8P$BDh-yh zJRhox=gVq;O|{%Y*PQ{??_KRC8|0oVI%a(=qV1LMrEqU0h@_&_Xe`L@@k|6ZIO2E7 z93L|!ALb9D7bk4{9*EM0TpUDs5CS+32?Qb_WIzakkOCnFLJol-2uVX01tDw5!t~*5 M#r`q2S-#n^-;pX<T>t<8 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Colombo b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Colombo new file mode 100644 index 0000000000000000000000000000000000000000..28fe4307d75db1d500d815b4269cfd82de7cbfe7 GIT binary patch literal 413 zcmWHE%1kq2zyKUT5fBCeP9O%cB^rQ4``o=Ur=w&v&P>%_aH+Jx!f+XbhEXAdf{Xnt zjr#xp|1&Z%voNu;F)=YPc-;Z2XYhK$z{0@b8v!yRB!Q8Sfgw7BQ3ymb0LhR92A~K? z*2g!5!54_N4NQ#<fTS4+16k-Ggs_9TfO^0V{sGktbl3l|Z8KFsG{^%W8srHO4e|(x n26+ag9ONMo4e}I-26+rb*8|ls(A;-iHemnT>6%&@8*l*ty`oU( literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Dacca b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Dacca new file mode 100644 index 0000000000000000000000000000000000000000..98881f093ae76753e8757dc3fa50fcf7fd1f8b63 GIT binary patch literal 370 zcmWHE%1kq2zyNGO5fBCe4j=}xWg39QsoQNE&af6roSAC0;_iw|GrX*i2zcAvm{9-! z|9?g%W+oO^78VAEkRw2)3_%wdfb8fDkO{sK417TG1O^cXhNJ>U2_N4O1`i<CHZU_b z0FtI)(hSHl2Wf?a5W?=`1S$r*?*~*D&?)~j+uHVkXpnnBG|0_h8t85i4RSk3ImrDW Y8srBMT@RFFpoK@cY=B<1(>3D)07ULl`2YX_ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Damascus b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Damascus new file mode 100644 index 0000000000000000000000000000000000000000..ac457646bb0205eabde9526bff8ae41d33df550b GIT binary patch literal 2320 zcmdVaeN0t#9LMn=AVP>}q$?i=q$0+{1+IVv2o@4)7yAVl$Aij`C<zHV3K0s(acw@$ ztSxIU(pChh)moZ!%dxgfo4V$&TCv4yM~h`ocH}{WR66hDpZ(Q;^}W0Ib$0LgkI&m* zUc25a{&7LhH@vz1&YS0{rt|T}(AzC<Zc8@1)*Ue3s=D3m&gn3EmJOS|QB}sFn6J&l zA?@~+7mCDC#cRfJ<05k;YpXF@@~(L`+HYUWxFD`gWY}X7--@vdC++dEhs0kW|6yDo z_*~rRZZdB633Fn5uPqLyiGZ2`Be0{!3|bLkPpbOO3{H5-o}3pWLc%{YLKhU9p}&vY zVZl2@*m;jJ<x;o^fB%><^-QiAQCDV9JJKko`<^%M8aX9q4b&U6`&WuNeM3f6$$SyL zd(em}*dp%UHOrot-6ZDM9kdsCKQd!u^Nocwmz#0HaYo$L3Tx3%FUor^&QbAaXUc@5 z2`aIxQYUp5Se`v?GP$i)rPRNtQ!9n?$~3(==b}nWeNv`JeX24dJN1&lSFI((=jFYF z(bj$6_+{qlVwH7bhtBTZZY|w^Qs(UZP~~pEte4g0s{7Y8>IZzAth}YYa(PmZ%8$Gu z9}Lg13gUBQ!Dz8sF=eA(ac<mN`AeXD`1qa5cjiY~xNl6YI(%9_(lnstu0!%sf1fI< z-7SmLURVC29$hkHn_BJNsaKCwtJ25~y7apat8B=p%loRViZ4BS&4D~??a}FaU3;um z*_kBO+7PR%b((xE`)8}V{EB>h;biqh=2vok@Fi6<`?#zbJgaJhzR<O&-ccKVIHv2m z_o}Bpd0E%*Jz_O<J*%H?*kc*bmFbPE8mvv#S-R09ttMZod?vzcH791u=F2mzmhic< zr9V||{`IEZa^eovdd`w<uZ*hp53cL2&Hbul-}hPs2K*;qLd*+w{`EzCO28x`=EjKI ze1(3IpI^*}pI@Bw|BE60dFxNagmVlE85J@tWL(I=kdYxnL&k;-4jCOXJY;;x0Fe<Q zLqx{t>I@PYrK>YcWSq!Ak&z-pMR%+`Fj&qREyr+?@gf68MvM#@88gSAkx{!k!$!vK z>I@tiImghEu_J>=M$a*PWc(ZfKq9~q0we|;K|rFw5e6g<9DzV0frJ8y1riKbCmKjN zka!>gK_Y^L1c?a}6eKE0Sdh3Nfk7gJga(NX5*#Etu1<K6_#gp7B7}qpi4hVcBuYq_ zkT@ZMLL!BP3W*gGEF@Y;xLlohApvuBB8G$vi5U_!Bx*?5khmd%Ln4QS4v8HSJS2KZ z_*|X%ApvxCB8Y?#i6Ih1B#KBFkvJlOL?VfV5{V@eOeC5}IFWcF0d;jEiiFhFi766P zB&tYQk+>p(MIwuY7KtqqTqL?kc#-%b0d{pFjD*<Li7^snB+5vbkvJoPMk0;>`%s5D a%}(-C@5I8zo^(%oa!O)hYRavru)hIHh^b=$ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Dhaka b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Dhaka new file mode 100644 index 0000000000000000000000000000000000000000..98881f093ae76753e8757dc3fa50fcf7fd1f8b63 GIT binary patch literal 370 zcmWHE%1kq2zyNGO5fBCe4j=}xWg39QsoQNE&af6roSAC0;_iw|GrX*i2zcAvm{9-! z|9?g%W+oO^78VAEkRw2)3_%wdfb8fDkO{sK417TG1O^cXhNJ>U2_N4O1`i<CHZU_b z0FtI)(hSHl2Wf?a5W?=`1S$r*?*~*D&?)~j+uHVkXpnnBG|0_h8t85i4RSk3ImrDW Y8srBMT@RFFpoK@cY=B<1(>3D)07ULl`2YX_ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Dili b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Dili new file mode 100644 index 0000000000000000000000000000000000000000..c94fa610f952981a865abe4a9d9cbdc26946ab3f GIT binary patch literal 253 zcmWHE%1kq2zyQoZ5fBCeHXsJEc^ZJkq-PRGPR}&@!2Qag!1C}+hWh{i|1&T$F)}eR zFqG~ADqtvRU|?ZjsGGpR;o}>^plx6Q#FiilAP6DBwjU5J3_uJ1gKP!SAe%wLAlpGS W$ORx!50oR`6<jtz7uo4rasdD#5->Xe literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Dubai b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Dubai new file mode 100644 index 0000000000000000000000000000000000000000..c12f31a141db7e6e02f7274e114c1cd6be2ff1e6 GIT binary patch literal 187 zcmWHE%1kq2zyM4@5fBCeW*`Q!IU0b(!cQ|-)c^ngpMjB)fx-9&P=vw4fq})xH-tgk jzy!o)2qD49A7C8}3_uhAgNz1o2^i011GLCa*MtiIP}?0H literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Dushanbe b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Dushanbe new file mode 100644 index 0000000000000000000000000000000000000000..67c772b4d9d711dba166b802bfc5e1f64ae004da GIT binary patch literal 621 zcmchUJxc>Y5J30zG$ATnqsb*v<L3!M2tpK8B$XgUNO6UTg;*#73oENg0=9zw0E+`V zu~D$Im|ASCZ7j5~F^z>5dd@oy2v#=EGV_*U7iQo3tJTdZ^DE-oA&Immd#9o0)ZK4f zo-_-+-d@m`x^R;5^IY=n@hJ855V&`)lfJ12nX6sNo^470@rn!_EXiPPR&v{U8CuKA zaJffD=00RJ-;}Y;y^MF=if6x<yqD+F#PwmN9)_W@+S|UC-SJ~}#$v{l7{V-8jBU)u zj-_+?>!!O^TOCU~oGCB+CSNFO&S*|^)mR$QMvMQ4*S~&W(g8y(Kupla2E+)&N<=jS zu>&y#u>>&%u>~;(u?8^*u?H~-u?R5<u?aB>u?jI8QSCwu!@AD?f0h&T`UeY>#l$CU CZF_?N literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Famagusta b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Famagusta new file mode 100644 index 0000000000000000000000000000000000000000..021f8a2dd78a0a493f58af2ae4107dfddb4f2d40 GIT binary patch literal 2042 zcmdtiZ%kEn9LMo<LsnkYzGy~v`D2nPc!3L7Objw3#6Mg;#!X8ljtLhmqC_Dm?Yc>< zIp)49n{(-A6IpAHbkllZf2KKSo1?R(4_afLvQ=xs+RWLXvHHD__NWKWr=7cdUT0_b zKKs2t@wJ;0CDwngyUiQETuJlgIemw@9epi3JbvT@FF3r{P5ZOKoABv*dt!f?Gild% z?!Db1N7`4q=^MwK$+ez6B{t2wFM8IVntR5%e@efdk#fYzxYlDoaOEv$+E>l?gP%7! zp^v-W%ws#ftb++RySLHH*|x-;-cs#7v_9XRQIqH8mQQdW4ySoDL!<5^X(QgO)HHk6 z^`JBRhY>sP(p4w_+?V#8lS59p|Abx8cibuL-e(tW-Rnde+U?@HL9ZmX$BkB~H@9Gm zTbkA8%`54Yd1FbLAL`Tj!yBaRuMS=CNu@mY&9hoQP_GrI7wF?Xi*(^TnfgQ`R~L1c z$&-sywXz{3PtE*Et18E2aq1UZo%^j+kNzl2Q_jfJp$oF?$`M&U_`cL!{7hrJdu7G3 zcXefJtJu9eb=8_(`gBX9);`*z@tSIVCS!%J4(I9W8xdXOCf(<*&z7|x{-*0L2W9=? zbg4UeRh~~?lls0Paq2J2hV8%T3kyGzjdd5bA?JWJRt#$5o>SVCrP}oKer+CW(@ht; z^u^)0zVzNYZ8^VLUVghmTL)_8mF66MwWm}zuPT?eM22iB&X%oJe@J_}B^^amwd2?C zrE}5^?R3vc*XY;!`tbpIW9YPQJ8)LF4<6DtyZSY`JE_)Pfjj%8jJy3O_4Yrx^$W|& zxnxeCl^+R=v#iYN*7$$#H{aw(tcf?@)WqUeRaGswxHTsd<zIn-6}UzG4$k}U{MTH6 zn~QVLGiM(;|40K!2S^J@4@eV87f2gOA4nreCrB$uFGw@KrW>Rkq#vXqq$8vyq$i{) zq${K?q%Wi~q%)*7q&K8Fq&r{J9?~DuAkrbyBGMz$B+@0)CekO;DAFm?D$*;`EYhv7 zX&33&*EEcDjI@mOj5LjOjkJyQjWmvQj<k;Sjx>*SkF<~U?`t*y*#Td(1;`#Cn}F;B zvJJ>SARB?~1hN&#ULc!+>;|$O$bR^m4MBDU*%D+=kWE2$1=$v4UyzMKb_UrRWN(no zL3Rh(9%O%z4e~WRglrMAN6022yM$~LvQNlHAv=X^6|z^zW+A(UY!|X$$cFiv9YeOv v*X$XxX~?c2+lK5LvT?}H;s0stf@T5>N*Kd%ELvDv7%7T|W5p5wX>RZzDF@N9 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Gaza b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Gaza new file mode 100644 index 0000000000000000000000000000000000000000..60d0de00ad21c8a7260046dc49fdfa9e040838f1 GIT binary patch literal 2295 zcmd7SZA?{l0LSrj1xaDl$ZXCLLL!QC@$z)TBnY~onAj<=Opu2^$%E$8DbYcI+?uhO zE`~kL$-)<mjZG)!m~)LDSdXUN6Kbus*1s1jLx`h1C^)76_wcf*>s{yU+|Pd(?wh;& z`+5tvRwioyx`Na@JX|g6;XK!;UcN8*RGz=`m44y)J?*D&M@@for8GNh&7b!R+4t@b z#>I**-=%HR?00V#{i9BOAmpqVxGc=;gOk=^_YQ5y-)Q~)@l9>G>$G9jF7n-IxoX`k zEYL=F&o@R_9`cQCXfwtm!hE;feb%k1Y;9s*kTLPsIc;)cg>`3fmOiWgpqzEL%bYzv zAc8xR%<#`|%cl<9FeA=Si3Q?_9{Khu5tZ9*K3(#OTo_rbM;E%pA}vFY2^o?xzg#sJ zU-!!;Uk2#0mj=bs&IRVO&njfx0f*^4x=O~EcIwOPVx?>KH>T^_BQl{pM<izUiX_hx zksQ@7Qlf)}+tDQ42S?;HPnL<)Adj9p)gV_43-j4OOXW&`mYMcNrhM*Xw3*(vP-Zj- zna?-Ql$k|u$*huV@`cPZmdDv4vLC;z=REqG%<11{tQwprSDy(pbAOzaYmTLf7f-~u zt}W{^^FEkpU6<0Uzf}FZ&^NWnm%TmW70)q!eR7-d=5E(F%<Yr;vFr5wv0iKAoO5F1 z<#wxJ<hUs8Zn8FAd`}d8xWy_yZHSV6?--@6homT}GRo?A%2(5Kjm<^6ERR@bRHP-! zihH3(RrnLK>W^V#%bjt#^}7)3wJUSPwvHid``2S)N0Z;$+5W58Re8!X_Ma86=e%#d zQGQhHj@e^Xdm6-^#||5Nok!%};cBBMv`*Ie^Nre}V)^FDRHN=fhOBR%Z#0~UlKX1h zR^vzMVus_vcVNJM?-`Dn+z<ZzfBvhcMe6Eztu1vq0yQl>Qk!j`L+x|e^gC<wyjphl zdS1L*u17uUY+sIPyboW|VfCN(if&JTM4M7qL(iHnWL3zrkaZyoLso_?4OttqIAnFm z@{sk}ss$n|v{g$))`%<;StYVeWSz)Dk(DA#Mb?Te7FjK_Tx7krYQe~gZPk*IH6x4W z>tNNkYT3xTk%c2GN0yGP9a%iGdSv;?`jG-46+lXW)WB910jUB~2BZ#1A&^QSr9f(d z6a%RSQVv^H52PSSMUavpH9?AkRK-@61*r>C7+X~tq%=rvkm4ZKLCS;F2PqIzA*4h| zjgTTCRYJ;y)X7#A3aJ!QDx_9Ov5;yZ<wEL(6bz{tQZl4wNYRk0A!S4Ah7`_LRSqeg zt*RYTJfwO^`H=b{1w<-{ln|*QQbeSRNEwkjB89Y7l|)Kut7?f96R9RrPNbelL6M3g zB}HnA6cwo|QdXp{NMVu6BBixewMB|+tE!8X7pX5&V5GuGiIEy3Mb;jc%=-p8An<>j d<Y3imXCl>(%S(z+j(0Clh|5cG#iwuz{s)+r^!ES& literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Harbin b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Harbin new file mode 100644 index 0000000000000000000000000000000000000000..dbd132f2b0bcc8beab08e04b182751795c853127 GIT binary patch literal 414 zcma)%y$%6E6ov01AsdnK0RGwCh(;k=S&0xTQ;84_wi_?7<`F!PCs;~}D7?f(B^vIT zl7ch2`)zh+C+8E>VAZ0p#Q6&b$@1Vmt@shmEEPQ+dAwxQ={D8*Lz@c0P8P$BDh-yh zJRhox=gVq;O|{%Y*PQ{??_KRC8|0oVI%a(=qV1LMrEqU0h@_&_Xe`L@@k|6ZIO2E7 z93L|!ALb9D7bk4{9*EM0TpUDs5CS+32?Qb_WIzakkOCnFLJol-2uVX01tDw5!t~*5 M#r`q2S-#n^-;pX<T>t<8 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Hebron b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Hebron new file mode 100644 index 0000000000000000000000000000000000000000..a2e1b364f9b50814468b5bf76290bfeec2bac37c GIT binary patch literal 2323 zcmd7SZA?{l0LSrj1xaDl%xvZeArVEn$m0!@An1Z#6FTLU3G(nKdC+`1B{~QYYQ$!` zm}xbq3tuocmQBpD<{Uk+9!<L^)LP5dzZVKah@(6xIHmvh@Up2}uR3Sve*U{~-`w5b z*PFk&JYM_P6`<bX;X0@u&T|^n%lCPo%5zt~($62itNrxti0NxCmu7pl`E#R?J@5Qr zTqx^kxVS}{eXfn7Z^Wth2b~uEmxOt3V8R;c+@=ls>aD*&yrB(soHDHHg$>saT(xfG z=V`+`=NTg__BV{KYca+`XEoe(^;kD2Gqv%#0mk@WXSIp(<<{**8TyRceR9T~4s+&M zzX)tgFhf4SC7;}T-3&c9DdvmAde~bhMYyNae5&Xpxge}ikI0V|3$-*oGH6gn{&Lk^ zbj>Fhf9a=3T^tZg+UJ`~&y>mNy$;iPWTlKLZr7L9M9J7y-<Ywz56QUFED@jHEfU;| zMPhiXNQwv)E=QAa?HiU)KVBk|1KfJ@WSv|-B+O_2ES4*L8D`2C>GIhV5oT)30-4qv zU_MtrU8WbjDKm<C<@4#ES#D>W$b9sUp7ro=GOKTgv2tLpTy-Wu_xv~^S076eFMNDi zt|{p<vk#3Q%}MIka|(iFPUB_$#me7>zTtp;$=fAfb|2H%CbkH#XRE$$PLIru%F%O2 zyRG%J&x-YzTCKd{<08Ma$=Yz?T~YA<Cadt2A&U0AZ4@8fFGW#>QBu2IzLM%OHWuiz zG<2y^mJ%<^?gkqbA&<$5KZcA=x5wn>?}DsXugn%(+6JwyUyq7yO+IUT>#t%*`AN%Y zJS|?!I%vIKdPMAu++|g|>%^`{-ZOSP56j&{l}1%?jjZzJ8r6e^@{JS8M$P#&S=&6% zsQV;b?x}KF^&g~)X^#8f{(kqor#Yr`-~aRf;jfw&)~kNu+LBm@zovzRX*2C}uzj92 z_0F1Xua=p)mKU$)ajQq2?aMKR_rZHQr2f<1)2*qGXp`!y>stK;vM*#~$j*?hA$vnM zhwKj79<o1VgUAkT)fSOG+Nw<=yF|8$>=W52vQuQM$X=1nBD+Pli|iNKFtTG?wPj?_ zwrbPJu90o?b+B(+wQ*$U$kvg)Bb!Hdk8B^=KhglC14s*y9w1FXx?ro?fb;=r1kwql z6-Y0TW+2@_+JW=~X^5@r2+|UyCrDF}t{`ne`eLgZgLDRIjjiep(j25aNPCd}APqt~ zgtQ3h5z-{2OGulLJ|T@lI%TU`h4cz(7Sb)GT}Z!>h9Mn8T4t+yW~-WpbPZ`6(l?}W zNav8&A-zMIXREr0w9i)c4{0FML8OI950NG!T}0Z5^bu(!(n+M1NH39Q+Ny3M?X*?> zL>h{86lp2aQ>3X#SCO_NeMK6JbQWnX(p#jtNOzI;+N%B{4YpMsMp}&Y7-=%nWu(nW rpOHpu4~pnL6Yb~!zm9aEs<$(qjz?!F#3aVJmc>P9$Hm4ZaSHqgVbA@L literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ho_Chi_Minh b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ho_Chi_Minh new file mode 100644 index 0000000000000000000000000000000000000000..92642679c8f9e68c125a8c6fa740ebf61df82417 GIT binary patch literal 389 zcmWHE%1kq2zyNGO5fBCeZXgD+MH_%bM~`#E#KqcNXHB0qT#P(daLL;4!&Rf*0#CL* zX?UwYgMrKTI79va|Nj{nnV4Can1PUqfguH`k%1xk8v_diLs9_)Cj&!40|PGuL)`=h zL5PTtZwNyGh|o4L2Vx5#wghW}5g{bF^9NKD&>8<h?gh~xH-l)9yFoO_?I0TDeh>}v g1BeFs14M)T0s<iafIvM^jM{$UvH|+mPS>0Z0QvW1Z2$lO literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Hong_Kong b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Hong_Kong new file mode 100644 index 0000000000000000000000000000000000000000..dc9058e4b578ca8c9bb954af1bbe26a964eaf408 GIT binary patch literal 1189 zcmd7QOKgl$9ES1JxivGCw22K1;+B(05gVrxw3R5GRu?8|QlVubhN3I1CL<z|5Rpho zNGz=WMMPLQHkNu3;#y<Sx^=3h>e7xGTH`z)iG`KeIFs-DolG*Dd7oHQeCrbR#|gS` zcsQ%w!@V@=UI*h%*{-2R+nrt}H>3NK_v*H^-QRxIJUCt}534Vko>SN4(TXaYS~yoy z`3jq!5|Z@e8TQHW8+mFc?6X_l(tCEg>ASE&Ki@xYUYzLCFB{_K)t>YE^`fZhZ|;!H z)N(VhvRwvpYwh67!!np!V227j<*hBT@A?{K_~eLv-`T21_TIH0WS#!lc*TA?v{-+x zykkbUmFclrmrb^2RDYS5Fu7@)bw1c)@?)#?*Zykrt+!dmZ*Q^_*Q@0F#Y$T^Ge^|X z*|sPV6yL6Z^=}-IKz+srYXUm7)NhLCXLRv_<t7~J)ZuK0iDb^{Nj>dm@{J?9^mK(O z+ubV4SLE}%=ifd`?TENPSS5}X1(a$_sxWs|^7CF-A5&|h>v_dgt@HPZ{l$d2Uq36j z!<5LJ$fU@u$h643$i&FZ$kfQ($mE`Gc4T@_H$Rd9k^zzek^_<ik_D0mk_VCqk_nOu zk_(cIr^^OO$J6D5B!pyyq=e*zB!y&!q=n>#B!*;$q=w{%B!^^&q=)3^=@LXTL{da@ zM3O`=OD?3zad`rXBAFtoBDo^TBH1G8db)g(ggsrxNXkggNYY5w_#dPlakZD5$PYD~ B5Gw!x literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Hovd b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Hovd new file mode 100644 index 0000000000000000000000000000000000000000..f367a550ff9affc140c118081917effaa31d1f13 GIT binary patch literal 921 zcmchVJ1j#{0ESOJ5>etV60cJ4N7b_)LF>JsgXAJ1(ndtQ5(cBk8AK4n8M-mtK`<EZ zU?CO>CL6IBF%Tn(K~m>yY$l7pIsNXvxlNk?`_0jrsXFm13Hk|#`1Il0kos%m_A8R+ ze6=!O?p5Z4Q)JzYsqFKB%sKWe*PdJE?&PVw<v8h{cc}caCt1+{s0v%JWs&Df6=xsG z66c{RO}G-J@0Ui|)1fH8J1{CPw}j_p)2Q5^6W++IQMDcv)k_0L&1AKx9iLWp!G2lq z?^8anS2nmRRby(JY<an`TTjlcw*6DvAK9}4>$`UQ(yG-lxng&kVXLbrWOw_zte&Dy zyVn(R52YX3!-)%W_+#51c^;H&(P&gS;^Y4Ki5Ft%P1hjIFr6$)N0JZ|iy~zpXbMkF zqdv|0H1SF-<{$d~{P~UkYf*aUmaZXvLpmq)4(T4!Kg0pV1H=Wy2gC`)3&ah?55y6~ z6U0@F+84wb!5hRK!5_pS!6U>a!6(Ei!7Icq#4p4##52S-#5cq_#5=@2#6M&MkR3p_ j0NDd%6Odg%wgK4(WFs-^oj|q%|7$PFZF)TcZ&UI&a<;sc literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Irkutsk b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Irkutsk new file mode 100644 index 0000000000000000000000000000000000000000..84136366d176279caf6b7a55c0cb1e270b41d334 GIT binary patch literal 1276 zcmdVZPe_wt9Ki8sxz)135t`Q2o-$i&t=24?I{(zPvE~lNh!`1ISqL3QL>NepOu9sL z$PUusN9HX$X@m$Ocqovdj0y?*d+8De5k=X0zV8}H9Xj{EJkR&t^S&GGeLrt9)qOIk z{@PZW6$V?x433L4X1cYiXmV_L)>Y!^%&aaOoG*0_s?u){(`6qXWY*06m|r`S$hap` zQhp~c6*t3DdBrd5dMibrE|T>}lx&E9lB$Ncve7py)$VE8v|>y=_BYz|`H9~A{&B$D zb3WjGI;yu!k7(cM4ZZdDW$nM-qqkk^*4xjfbl^l>@8~!u!7~S>rtz@sJXj;UD(a;+ zSR%Cx<q~oSb?D_!srz26>nC1I!`v4g9$C<h<1cj6z$@K6G_G5^ru3fvXR^0#SVxZE zm)7b5-4+>=eU4K)TGcDjkNpxWI4ZG6U9$gOi^T6nrG2JDC$4%W8Gr8T%Vx8-CH8{c z?pV5LU*=r&VeyGSkC!_fN_9?|JIZWV>R4JimAY`vW^Q#lqg?qpzhW~h`D=I_`E})L zPb5{$3}0Zk8Fgla_*tcFxh?l0Hz)u7xLeJY*5m&A`-|q^W)I!F*i3o+6at)v0fhpE zgH=O<!h%AB!h=GD!h}MF!i7SH!iGYJ!iPeL!iYkN!ihr4s$oT;Md4-D5Th`oP@{08 zkfX4p(4+995Tr1qP^56AkfgA*YG_h;S~Wx|Oes_;Tq$HJY$<dpd?|z}j46~UoGGL! rtSPi9ysaAI6y{b9bqaS1c?x?9eF}ftXAG$S^c^S+n>VD%A1eF>x<V9k literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Istanbul b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Istanbul new file mode 100644 index 0000000000000000000000000000000000000000..9a53b3a39063e86859b12aa755b78cfb38a26709 GIT binary patch literal 2166 zcmd_qe@v8h0LSs~aUAC%)b)e#2L33-Ls3H@It;<Wlc5Ja>4}@+_AJqKI`%DVt8C*I z%*}N3aoJ>}IZ`4=&bD-ITIsP`!!UYMED2aoYqmKz{nnyi%&zbIskWMb)cUvYv*%vN zaq!3OKJWOtEt^WkO=qfpg`3l^Z=Qz|`oY>g(b<)iYj#~cZ6DgW!8yF+1*4}iVZK&# z!tN~$JFkb^?7pHRv+t`L_8UL%a*n>g$QWq5W)7tK?XxQ;%#Vf=#^B&F^W5cr<I^Xu zI_I|rj0=Y<&EeWK<6<n^`E2eE`}5-K&ZWS5dnE0EIdY}O{^I%(bM$<(F*a<Q<Hz^f z-+q76{I2DcJ+V2%{9$Iq7Tu>D&(`C1TH+PQyYgi_ealJHSN@if(fX3~R~||DC!ZF9 zP;Yl&bg`4E{B~Aw$jlm<ZDjXUnz!`sGj8q3GlM&~8@E5+nY^Q+Lfl#RTr#I%JaJdJ zIhpG{lbGhOPKI{vH$r3klldQBu%~zB7&G>4S2Le)QM0zKQFk{lSNFv3R|Qo?YWDn) zD$LJRbEZzJurDmb6VqhT<$#<!{EM9T!Bts&Voc8O`B*O4-L4jPyeaQ}s#}({?30Vu zzpF~0JfO;!98~42o>livPpFF0M^(j*$5bTm85tR0rYa}b$;I!@Q4fq($*SI3x#XQ0 zvU+EstmzVR=_bE?up?hR6ulyAH~gxW6%NX1t)rryrOtBSS*xz#%V6ELZ-VvS{$Tyk zKyby^hl4AR^#x<+&R7k5$F0VruUZecwg!zo?bah}U$j<j+i0zhoDRlgb=I2PMysj% zpw;waiM95Vg+^N{mGXGg{>zo_`}0ckX88Z@<((3^>GJu6$Scy{NRhW$qzf^x#4|;R z{8=Jfh{#gUZM-6K{{H%3-@|WX4RKMg?`U*24{=dkQm*eZ{;NJ7eSUxD{vhK2yp<09 zPIoIm{qcwTqe?ZsJ#Y!x4zeFty&+^r$d-^jA)7*Wg=`Dim#f|wvNL3B$lhG_=8)YX z+e7w;Y!KNYvPEQ%$R?3pBHKjviEI?vsjJ>9vR7oY$ZnDCBKt))jO-ZQE%UkQJp-FY zc8zQs**CIrWar4%UG?5MY~EGx9@##!e^=cAqytC`kRBjSK)QgmfkPjVMj)L)T7mQe zX@;xr2GS0sA4o%xjvy^TdV(|s=?c;oq%TNgkj}X3)*!t>nuBx)X%EsLq(MlBkQO05 zLYjnh32773C!|qGr(AWbkY2g!W+B}|+J*EBX&BNmq-99YkftGBL)wP)4QU+GIal2} zq<2X3Ty^)5_POf*Aq_-2h_n#tA<{&oi%1)hJ|c}oI*GIr=_S%kSKUpdo!F+o>*D_y O(99|w*XjjjnZE%*@2d&` literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Jakarta b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Jakarta new file mode 100644 index 0000000000000000000000000000000000000000..37b4edded8bcd470f1afeab00ad16b6b1090ebd3 GIT binary patch literal 392 zcmWHE%1kq2zyRz(5fBCe4j=}x6&irV%8J$pyTtA#oZjEuaVhLw!F8Ks9Jl_fF?`v& zuY-|^nT45^nT>%VMFFT3M6xh2q$_|-$gE)CV_>M8z#ziFP|(02!N8DIz#!w}8^YiO z#M%btMg}0#7(`kEu?0i8rxVB!BoIQ_t(-vRV7LB&>iZ7__Gz98AR6R$5Djuahz9uq dM1%YRqCtKE(IEeTXpo;6XzeL3pg(oZxd1b?U(^5q literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Jayapura b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Jayapura new file mode 100644 index 0000000000000000000000000000000000000000..39ddc84363768c6b407df1077f69ba9dfca8494c GIT binary patch literal 251 zcmWHE%1kq2zyK^j5fBCe7+a_T$k`=!aK?p*ogcnzU6jGV$i&RR(EI|VsBQvCUF!@6 z4ltX~$2WvQ+rScpjSU#WJwrf>z#xPK8-GCc{09P%%^(_NJBTLV1zbSq=vs0C0F<0E AHvj+t literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Jerusalem b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Jerusalem new file mode 100644 index 0000000000000000000000000000000000000000..df5119935c5b01676e63d1fb1efd7273b8b4b452 GIT binary patch literal 2265 zcmdtie@s<n9LMp8WDL=qEV0%QuuMboa)Ap{Ng4_Qf!yN86a$VCn5p!b#55tF?1vc^ z&SkCnbIaK1heO4-<|1p;*+y_umZ_LeKft>LcU*$tru4i|t=jsdKim4U!|Uu0{$YIH zfs*A_DdHa&W<KHJ8qLFdlrrCTd$V3UvG|~F@A<<2^+z{F$MLInXJeJNo7dUjylm-{ zuYV;^Rko<E71};sGQm21A@=BZiHGHxm~4G^e9$?2>5x9x>J#0c#_99=4e@<zhq|!u zbEO+<#Sg{x;>Wf;>!&oI{qxRiqG#enySL_)z8HB@1*eUOV6e~*uWQiZH(Kn`gTGi2 zO(}MC+mN2L`HCIWK4QgM@A~659<V0o9=7i)+pX`8EA>w)@mlwYIsU0*`t{Urd+lkL zI`zFrNBQHs`mE{AvG$B3l{#T_h&^-10_`bp_9w27*WR3Cc2dsUI(c@rKPBmq<-7By zof^5_O1;wLpA{OeW_5<h^ucm<U-z)S-|m+W>|bKdes8bL*n7}=@THA%PTd|ocTJjn z$iL6Z%x@I)ath`Agw-M|X3)tVTO_jkDx3v<sUqj%fXr>bUF5axbRPb2SmbXR?G$W} zQVTcjkcBnZl)o&~c_eT~J-RYi7Ws~-K>iH**o0lGIN?@Ve076b6cz6*>RPH64~%h2 zTIZ=HUH#7EyDU-K+Ub<lXNvN-4mein6tT3vLq3raCMrr><de}sQJJw#R)vMCWs_>; zvYuY`)Qw`f{MZTg^!aqT;{6ZQ%HsiN)hk<6byJ%2Ol7r_uioLTUf3X>t-S86@vIUx zS>4Wa;|fKsXIR!=P7-U!os(-jZWHS+e<q**ctEW0*dy!SiB$FbUz9J@{;D>-Ss_D4 z-SX#G=wGk@)teBL-GA@6ArUIXbZ<zw5YcgBtPtsQL&pF9Nq#{<<e4XTzIlq!y!`zF z|Gu5A=I7n*4E^zoBj$=f`SLolC1g*?rjT7B+d}q*Yz)~MvNdFH$mWpUxti@E`$IN} z?9kP05!oZMNo1GEHj#ZI8%1`CY!%rnvRP!e$aazaA{%x!JLb>9mR-%Bkxe7JMz)RY z8`(Irb7bqt-jU5CyGOQ<>>p`>tLXsJ0;C5>6Ob+-Z9w{fGy>@a(h8&(NHdUbAnic< zfi%R`bOdRMtLX{S6r?LiTadmWjX^qtv<B%7(j25aNP9T?gEWYvLr9BUO^=W!adZi3 z6GxwrMsaisX%*5dq*+L}kai*cLK=p23~3qCGgs3zq-(CGZAjmc#vz?UTF22lq<I|O zL)ypDKcs;i9Yk8l(L<z(NEcmA8<9RDjYK+$v=Zqh(oCeANIQ{!A`Ru}DAH1no+3@< z=&Gw}D@R{lO=CGai?o)bw@7o5?jr3)`YZmA0gf<Po|$a=oKTSB@p*iS$!Q4%NnTH? H#~bk*I^)#B literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kabul b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kabul new file mode 100644 index 0000000000000000000000000000000000000000..80429ec408040a47f02ad8ce33fd203013071acf GIT binary patch literal 229 zcmWHE%1kq2zyQoZ5fBCe7@MyF$hq+Ix<mc{|Nj}8m>3uw9sorcEF2hE7#Qq57&v@< zLm0FTOhDM!03-zjA%rae>IPfF_XDhhfdOd#|ID^o3qUl;LXdDhkj+4}tz0%>H`wW# JSQ#5|0RWOzC!YWS literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kamchatka b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kamchatka new file mode 100644 index 0000000000000000000000000000000000000000..fab27defad0cacc1110a5f513671def88583e0e9 GIT binary patch literal 1198 zcmd7QT}V@57{Ku}owY0|4D&0?k94gy+h&QS&gDm9n?bRwYVgn(^l=f<MIfP3K~O<~ zM6IZczD7js!h&Lf7bCim@FEDT;DIFQCU#Mf6|DDvmayomyUuve@9aHigYEgpb{;$s zwEj94m>m{Jk6BzV88xT-JFa9N-%d=GyFKA$mGM-CYxhLO>}7xDr%U0g=}fA6vOHXq zZjj{<mrCsor>r>tRaOpvl)A%jq<+^+X^1?P#*PQF%6na!YA#6g!js~5Ce{7rny#L? zn)RFrXFacnbxr1oddK6s_Rco-jdg3w8NaSOUa$UrWg3WwBzUY!TD$6G{q`TSp|(ic z0`H~m=Zv&h&uaU-7qapDTiujSO2_nb?Yx`Ou4lJ2boQQhCr)X2|2f@!`lj^sAJX2v zqte$D)&AZ+vZbU&2O9fi;B!O<i<@Qel~1;Qa7pCRVu?-$vqKkiM@MqGoaHDk%D<c? z1+RJY3*Hpo`19I4=CrJ_wD~iwM9MPPPo^9$%SujIi{{RAI}Wpxe}=o!p_t_j1k7qR zD?c>{2VZ3$v45E6x8I*JKPbmkk;$^nbdd=oQ?@meMy8ES9GN;Yd1U%X0Z0W%2}lh{ z5l9tC8Au&SAxI@iDYm8-q!^?cTT>2F4^j|P5mFLT6H*jX6;c*b7g88f8B!Wjo2@Ah zsm|7vht!7@h*XG_h}4J_iByS{iPVV{id2e}iqwh}i&SfC%0=q6H3cITBPAm>BSqtg Snf?D;b!n%mJ>&}nOMd|b>ln%a literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Karachi b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Karachi new file mode 100644 index 0000000000000000000000000000000000000000..b7dcaab8f2068adbf69cae0b1cd80f98e7a2da13 GIT binary patch literal 417 zcmWHE%1kq2zyNGO5fBCe9v}v>WgCD*XWgGAXQmphxKtuE<L-(FE13LO8`!LO-C#Rk zV}Xa`0s~KO_XVC$?izT#sa9ZMWMX7yWdTD52D>{z?F_yV3@i)`(HV?f3=D1o3<3-c zAqk8kV78=>ZwQ07fvK?pkTe66rVIhz!685*1f&-ZLP&7*52&vHKmc+)hz5B8M1wp5 qqCp-3DFb;1M1wp8qCuVl(IAh3XprZ?G|+<}nx>xQ0{U9llnVgqLRHiN literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kashgar b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kashgar new file mode 100644 index 0000000000000000000000000000000000000000..b44a1e19e9b1212585e55bd65be77665f4ecf6b9 GIT binary patch literal 187 zcmWHE%1kq2zyM4@5fBCeW*`Q!IU0b(hJU+K>i_@$&%nsYzz`$@6k!NSU|{j_4PnqW jFavQJLP#+32UrIK1JK0(AfrKC0>*RM04=i9HRA#R5AqyS literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kathmandu b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kathmandu new file mode 100644 index 0000000000000000000000000000000000000000..0cbd2952bb480dd403bc567bdc412c5d92813af1 GIT binary patch literal 238 zcmWHE%1kq2zyQoZ5fBCe79a+(1sZ_F!cVm=auNnB>i_@$&%nsU#K7SH2Pn+o8v#-m zaEF1*$2WvQ+rZS=07ROYf+T?;gamtjK(sIbt@sbJ55xu838G20m&*q13Oij>D-%;L E05l&Z<p2Nx literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Katmandu b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Katmandu new file mode 100644 index 0000000000000000000000000000000000000000..0cbd2952bb480dd403bc567bdc412c5d92813af1 GIT binary patch literal 238 zcmWHE%1kq2zyQoZ5fBCe79a+(1sZ_F!cVm=auNnB>i_@$&%nsU#K7SH2Pn+o8v#-m zaEF1*$2WvQ+rZS=07ROYf+T?;gamtjK(sIbt@sbJ55xu838G20m&*q13Oij>D-%;L E05l&Z<p2Nx literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Khandyga b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Khandyga new file mode 100644 index 0000000000000000000000000000000000000000..918369539a5b7d587be3ec826019c7bfa0a13f09 GIT binary patch literal 1311 zcmdVZPe_wt9KiAC)y+RM<2htnX0F^?HZ@%(E}ctl$=r;nosvN4uN5((LqX^R{jr0@ za|i<QP?U9%@K7C=xAag*P<gTrf;$v}iF7C;*7JRBFzD2=-^=@c-}l}74mLhd;^@Gs zfcR@l%^h|oVs_5gDRa30A^qjqgBelgJCa;qF}zS-baT1<$BX`o&#B~w`K5)8v*Bdr zR9tVG2<fV^2EBQtT326{+I`;ATl!Y?*4U!1X`Ry^|BUulKG)ky9&4ZDz4U#XmfO=W z-L;p`xohVh$sN;orT^(IS@-aYtiN+sHeBzMJ1@m$<B5=LI(9|}E_Uf%p&q^auvhP? zs?*JZLf!mpt!}CG$d-@GI{33d?wxw4TjxK?w(+krlzJ`OZ@iV^(Fqw|NhkN692Xte zQmQjDEV_=5sr}vo5$PIK2b?j{UDL0+mrko_ey57QiK?DOkBU9<tN3h(=)GO44qnJR zl!(0&LzzrQSowLYo5Pv26%^)N<bL3ExQdE%w~}IqLx`a=^WTc366Sp5o>e5opcZSg z$Lt-;+*y5QbkdSMDa@0r$I(e)?(kN;HzE9uEyCZ_Xc{za;>(3)kBNPiJtY1C!1q5d znctlO0tEvD2nq@c47&yg1qcNR1quZV1q=lZ1r7xd1rP-h1rh}l1r!Ap1r`ODT?34Q zi~`NB!A1c`K}Uf{!AAi|K}dl}!AJo~K}ms0!ASvX*C3@pwQI0az*5js;8O5X08<cC zAlo&VDWDlpQ(!aTrT}L^PJzyV-L3&oL2uW<r{Je70Lmg@SO}EGKv@vXkTLP!FA7(i KSvc+WLDz4A0uDX^ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kolkata b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kolkata new file mode 100644 index 0000000000000000000000000000000000000000..b57972dd8ab55785d8cb21b90b5277d8fc850fdc GIT binary patch literal 312 zcmWHE%1kq2zyK^j5fBCeHXsJEg&KfF``kUdPTlU&IKx^fab~K~ic2LzGZ>kefslbA z=mJmygSQ(<wQmFi2LnTN1|yG$ZwP}g5PJrPFlZZ?85@8Ufk6m%yIFyfAPfRPGr(-I zA7Cy6!+#)9+a;j^qMe_=HUQBeH-Tu7yFfI^ZD1PcJ`fFZBLmQpEI<f!<qeQgK(_*2 h33Mya2B2Gcz-|@r0lE{ndl?wGfS%AbwK6u~0sv>)ONsyh literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Krasnoyarsk b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Krasnoyarsk new file mode 100644 index 0000000000000000000000000000000000000000..faec35d30403b70fccff6de6a523af81467036b8 GIT binary patch literal 1243 zcmdVZO-R#W9Ki8sIcqo*FQv6sOS7fZ`n76WQ=2U_opy*t*b7B3Vk9KJP=~-lRuB=3 z>JnW-i4GAzsV?EEB05MxL>(kbvY<m2(IJUg&-cFuQm2kR<N1G{J^OF4J>Mm_9q0<1 zzlPWDFd1<>Sr@15<@)5zulEMV0<(gL(z8nkO^IiqxMcEGs`TT_^qf(znLDyQ?Hlga zdAB;%f2~dDpO5N-lhqpN@$14JMOv1~XnFLDE((olh3~m8_CD00`?CbUy_F^7Z(@}v z&c-TVJdmnKcO-OoP^t&7NX_Llvb6uWEIZmQvaeH?@9fv`;a$3-p-We8Zqil$jT#A; zYvgB@*7;&m_qtG5|0t6+!xI`EbxZvXuQc5MAdRO#$=a(=B)0#VG@TmPb<IN(-}_kC zS6q_j_)XoAe@t4+&uPoVMQzRN(blI&wC&v{P2Aq1?IQ=J<3g<_6Ym3knM}sG@}^}U z?))i_=`*H0nfk(?YmeJ)%+@0Ni<;zaW7oYYm&cery{0gC&F#4CPVOFK@5$|(!uF1& z2}#7xIy-CmFyqP&XPvzy|KW(g|9+qSh8d0;Ic~N&a^%<<9KEAGek1`T0|qG|IUq?O zSs-a3c_4`(nINekxgg0P*&yj4`8e8ykc^O&9BocWQb<-vT1Z|<Vn}94YDjKKa!7Va zdPsgqf{r#rBt=J?Ba$SNC6XqRCz2?VDUvFZE0QddEs`#hFOo2lF_N;Q%^6AB(PoXL cjpU6aj%1Fcj(xU)|0wr@diz&stf?*d1<Lsg1ONa4 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kuala_Lumpur b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kuala_Lumpur new file mode 100644 index 0000000000000000000000000000000000000000..5c95ebcdc377086c8c646e251569792fe3602c4d GIT binary patch literal 424 zcmWHE%1kq2zyKUT5fBCeP9O%c6&ip<TXXB;UFm)k4sbn5IJNm{!s&_e9G5(DKZs5I z%2EIS|9?g%W)@a9R(5s<hWId`dWOVU1{MZ}qymr;=?aWIAd&${W>zo=F)-9kV31&7 zC}?1i@$n5|2nJ$p19K2IG60drAkq?uEkH)0fDpnC=K^W~JNyS!C(wQW>&!w|foPCN tKs3lRAR6Q$5DoGahz5BKM1wpB(hl+<h^_~UG0@GITsA<z+v!?x0RVnQVY>hT literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kuching b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kuching new file mode 100644 index 0000000000000000000000000000000000000000..62b5389229af5216670b89966a2ddebe56695154 GIT binary patch literal 521 zcmWHE%1kq2zyNGO5fBCe5g-P!B^rRl+Ag*UyVBh|_E)Pn?Efqsa4>dF!@;Wy0uI|f zU^u+(@`a-c!VO1fum>DxbZ0nTX?5Ym<1-8=qxWApb>x4;X`9ahrzb`*T=K~MQ2+n` ze+EWoCKSlR%*?=$UI4U)A+v&kg@K{efRT%Vp`d|5fPtZI0>~vGwvTTJgSLUWu>p{@ zFanc6mL*6V9E6bIuOCodKqvhN`42>c{0O2!{shxNzk+Cxf59}+&mbD)Z!it?JBSAP zA4~(o07Qer0ZaqK0z`wt14M(u1Vn?v1w@0w222CP2SkIy2;{7KAe)xq#AO2vLpxmy FE&yxUwsimi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kuwait b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Kuwait new file mode 100644 index 0000000000000000000000000000000000000000..e71bc4e802cbdfd9f90598029e0fda04c4074ab3 GIT binary patch literal 187 zcmWHE%1kq2zyM4@5fBCeW*`Q!IU0b(RcW&=_5c6>XJBMxV9-7T6k*UZU|{j_4PnqW jFa~iMLP#+32UrIK1JK0(AfrKC0>*RM04=i9HRb{Uf07!( literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Macao b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Macao new file mode 100644 index 0000000000000000000000000000000000000000..2c20a32651d9da10d1d6f63495c3cf1fd55c7f16 GIT binary patch literal 771 zcmcJMKP&@L9Dv_dTUGV1#HJAvUL;M#;^I$&N-WBti9|vrL@-n*7HMu02@x@1GKn06 zfs&hym?RdXO^u48^{?JnqlLlm^4|AymmA*qg@&dkn#7N>t2ZpBK`qW6Lv3pbpSYX5 zw323@ydONY9u^zrWAnI?n)J%2>I40`CnaCRm`K-eCo_&wJ@XWhuQy%#+jZD{zZkJT zPCCuc{SGU;Rcnfs8cT~gOk2cm<qhAO_TZ7>Xy1%F{ToJpT|DWkjEe&2xh!}-)7^<Z zS$MUpdrp^S(eaXAyc3Zn>usWR!IvzX^$G7xYqUJvVpQ~3MMa)w``shN^t`&1h^=bs zhEYSfzW3f>NCdjo8VK=OV}<@;D$QSNvT85}u?8^*u?H~-u?R5<u?aB>u?jH@v74hZ z46zI`4Y3U|4s+HyV4hL-0R<oxASEC*AVnZmAY~wR7z*X6DnUv?YC(!YszJ&@>M<0A SRAeX#sR{qDs9WXr>+UaoVev)) literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Macau b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Macau new file mode 100644 index 0000000000000000000000000000000000000000..2c20a32651d9da10d1d6f63495c3cf1fd55c7f16 GIT binary patch literal 771 zcmcJMKP&@L9Dv_dTUGV1#HJAvUL;M#;^I$&N-WBti9|vrL@-n*7HMu02@x@1GKn06 zfs&hym?RdXO^u48^{?JnqlLlm^4|AymmA*qg@&dkn#7N>t2ZpBK`qW6Lv3pbpSYX5 zw323@ydONY9u^zrWAnI?n)J%2>I40`CnaCRm`K-eCo_&wJ@XWhuQy%#+jZD{zZkJT zPCCuc{SGU;Rcnfs8cT~gOk2cm<qhAO_TZ7>Xy1%F{ToJpT|DWkjEe&2xh!}-)7^<Z zS$MUpdrp^S(eaXAyc3Zn>usWR!IvzX^$G7xYqUJvVpQ~3MMa)w``shN^t`&1h^=bs zhEYSfzW3f>NCdjo8VK=OV}<@;D$QSNvT85}u?8^*u?H~-u?R5<u?aB>u?jH@v74hZ z46zI`4Y3U|4s+HyV4hL-0R<oxASEC*AVnZmAY~wR7z*X6DnUv?YC(!YszJ&@>M<0A SRAeX#sR{qDs9WXr>+UaoVev)) literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Magadan b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Magadan new file mode 100644 index 0000000000000000000000000000000000000000..2db063560c48c67f0280212a320b183d49d8cffd GIT binary patch literal 1258 zcmdVZPe_wt9Ki8sZMLNzB*;Ilm1eeVsoSz;Z7xeqY#R+3L{<>=2Wpoe(Lw59P#q$z zOLQ1gLWLG9I+#Rw*dgeUM2K{d4nlR1ARfX)MX;Xly9T399ec*}ex5yhH`t!<QiEqs z2i0Gr!t5{^J!Z15x@0bo>O$ks?oBSP@$O7k*QRqduCs4zzTfStUAmL3TbRqOoo!6I zXZ*4*+aR8?)v|uXE*mZ`OMUv2G#q&+8)L7eG5lOM`5sD>J0qJbu8Y?(t-Z?=ddtTN z)jV`iHP2tuTVGz#zNxg{cK@huxs}kZSE9P@e4F;4tk;2KaS5LD%J$AyX&+c6J3Lhq z3VfE(&#%(qcIuAz^Rny5lHNTtE#ZaNI`UvncfNS6qgSW&p3F_%b?Ua>dpRrndWUrP zi5t?>G^l&KMfR6PbYJ6u^nE!j{q}b0e-oAiAF3tx#3S+9piYeDB^Apq9&(SV;e0-? zZ1xp}hoh|IvC>)cr1XV9*Dj~Sq15Pc^Rudvab?z%Ih#wV%u`iaycTzCW~X?MGWQhs zRb@PpQoexS%%GV8K1|sP!&(<Esed@<Z@=GXzG9y9M$VjV&K)^><oqp714su*3rG)0 z6G#_G8%Q5WBS<GmD@ZR$Ge|c`J4ioBLzbo^q$LJDS(>Jhu8_8nzL3U{&XCrS-jL>y z?vVD7{*VTd4lPZKNRO7LNu*1pO{7nxQKVC(Risy>S)^N}U8G;6VWeZEWu#|I(=^hx jrD+@K8)+Qr9BCct9ci91Y+CrAx-XBI|3tJU5Gelz??EM8 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Makassar b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Makassar new file mode 100644 index 0000000000000000000000000000000000000000..3a5dcb27007e1a2ffa67bbf3a30095303b5d67a2 GIT binary patch literal 288 zcmWHE%1kq2zyPd35fBCe7+bUf$XWO)cET>Ps}oLl$TnOGdso1~$i&RTz>xm|q!LI1 z*#!*@91IL~6Bu~FYyltN5C&f$);6#JVoQc_&k#qDYA6UH!395{dj10e$R!{e<QfnS RauJB8j;pvpuGF>Q0szU5Jx2fl literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Manila b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Manila new file mode 100644 index 0000000000000000000000000000000000000000..06859a70d9b6a7a10abe85b38775b97319409608 GIT binary patch literal 367 zcmWHE%1kq2zyPd35fBCeE+7W6c^ZJk!8u<V4v85Qoc{Ej;lfJQf=4-)43DlZ`oLf6 z)*z5p^`ZX%|Nl&kOw2&c$i&3JP*?!e#85YZk%fVwpn-t{#Abl9eSAY0v<)nQ*aD;o z3PP~Ehz%$W!XUr{#2_}$4-gLsfR_Fjd3Z+vL|4n(PXN&%mx5@JYe6)~#UL8wY7h-_ yIhY2z9%Kf{2Ot{c3lLoolw@FJW@2UmVkTzh|NrH|;-vcs7=$)JKicV9Z~*`$(PRh! literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Muscat b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Muscat new file mode 100644 index 0000000000000000000000000000000000000000..c12f31a141db7e6e02f7274e114c1cd6be2ff1e6 GIT binary patch literal 187 zcmWHE%1kq2zyM4@5fBCeW*`Q!IU0b(!cQ|-)c^ngpMjB)fx-9&P=vw4fq})xH-tgk jzy!o)2qD49A7C8}3_uhAgNz1o2^i011GLCa*MtiIP}?0H literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Nicosia b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Nicosia new file mode 100644 index 0000000000000000000000000000000000000000..3e663b215327d8899a4b3fbe4623f066630b97b2 GIT binary patch literal 2016 zcmdVaeN0t#9LMoP$d(r$zG#Sgc`(Toy1)g*#K4CL@PVtxxEZO$QQ?9`lqdwI;wG}@ znER@1&Q)7YVy!vU%^o&;nC6^rj?PxDwZ=MWtLB8YnX`1ve($sOSAV(nPv`8O*V)~@ z|NY*d==!a(0_$H_g87CI*Ix7CeIhUKzn=c-_>q%dVEC|`c(>l0@Wn-YVt=tS>A+9! z<nEv&ZEM`5Eo07<YR{e;nc+QBc-~IVJm*ZC+Ha@CA9qr2_1M#|?{{W=(`3*5s?iC4 z*6pUA*yp9a9dpxr8@!BN%iUScmENNpv)$QMSzczz1oyE}qBkcv>OP)0;>}G+wCCOq zIP-oUv9rFt>11E{+J559kQ3@ZZRhlza&o(m*m*k+JK_2^JHK|&D~KF&3rp3TpR?U9 zN^A8N6m-afu`XE{?9+wAo22+&yDs{?OrH9oMoR|jwDjyEUEH%wm%Np#OJkY3th-pA zu1L_b`k*{B=T|K+8<UEJ%UYTFqg0OmA}iw0$%>&%vhw<ISvB~fR9*Q}BL{nB^@;a% zO-qZ|z58_S^9S_V<_4{PvRR{5mHJ%DYF!t~(sg&jT9aHMHQ!Cq7jDm!^&kJC8?FXq z<GV>xd*-IR*mX<l`i8`*yDFP@-_XrVK9Mc8SF}Fkm^73QYV6^Uv@uP!@wcPeG}fwH zFLmlm!%=<tgALkzahtsIW~sIeRLiSP8TwjJk!)LABCW9$*`A*+JIeo(wj@j1^OCjw z_iNHI>5g`|7o>Cad);|zK;9TStGkY!*WH8f=$_7g?K;?{)<ba*cE^wVPjAc0SZ<D> zl^u>7XIZJUtnvRm`KBrowaUw@c|`eDT%7-iKEMJ0lmD6PzPUK)ymQ!*1CJbf<lrNR z9|-`700{w!0SN+$0to|&0|^9)#Mgv^#DWBaM1zEb#DfHcM1+Kd#DoNeM1_Qf#DxTg zM23XsYhpu!L!v{%L*hdML?T2&L}EmOM508(MB+pOMIuE)^);~~!TOqLk#LcCk${ni zk&uy?k)V;Nk+6}tk-(A2k<gLYk>Gtz^ho%=CVpf9kP$$J02u>h5Rg$oh5;D|WFU}{ zK!ySt3uG{S&1fLQ;cLbN84zSdkRd_F1Q`@$RFGjo#swJ|WMq(`LB<9d9AtEm;qf)& zgA5QdLdXyyV}uM6GD^rWA>)J$6f#oCP$6T53>Gq4$Z+|Z@j?d7*Nhl4WXPBygNBS6 kGHl4W;s18v0%q@W3Ru2Sq%gNAH=I`(isXm=xA}p;0b|0?X8-^I literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Novokuznetsk b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Novokuznetsk new file mode 100644 index 0000000000000000000000000000000000000000..ed4b248276dbabd6b9424bd5f7252729419570f0 GIT binary patch literal 1197 zcmd7QPe_wt9Ki8sIm?`cmvXgM%VkTa^=s8!O>Jt+{1+7KV1Fq3BL+g!A1VkOWCani zs4me>iHL|_RG08DDx!lFMASi|BndiX9Xb>d>-oMWEIM`T8Sndf_q^{0+wYs&cAz_A z{#rBb4hO5r9$c5E?dhgBki9=R7MLA6n3>}pGG#@BrDfk=r@bFvW#*20%)F898Q*Y^ z&cD^A{%dWz;Cx&co~+hDuU{AKn5E_IISt0Y=;H90R`_1%63-(Ixj#$j>pNLG{x(s0 z{A{A~<wL1@d{@HvhNODviqu>_Bg+Pk$?_vTBKx{z#m)hZ9NMKT>$`Q;mPTFe->A_@ zP@_MqH0Dc4>`jTT`B5%whbJ^X>Xy3e9;tuuK^jhfl66;~N@D+WX*@NqP02gby!VN& zuec=1<{P@9@Tjx|&uPoVMQttU)z)W+we9^TZNI%$J4Q|<Ixi%%{kdGuxC*9CzTAa* zujw=L-sIo-^V&Y<HfD3y{+TAV+t};Aw5!ONJ$<HR>O8gMvOAN{aJQo~Wx^7*N6a3z ze1UPXoGVVLf0*XC-=DHSD92Qh$+FFKkqNVyvZI|eGHqnyET)c39+^H;08#-`0#XA~ z1X2Z322ux72vP}BileOsDF&&=(UybMgA{~Rgp`ETgcOBTg_MQVg%pNVhLncX=4gvU zs&lmEA@v~zA{8PfA~hmKB2^+~B6T8#B9$VgBDEsLBGo$Da*=u+ZNW&zNXbaeNYU7D RXaE0JU0i2tZ>Xs){sqhn1EK%` literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Novosibirsk b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Novosibirsk new file mode 100644 index 0000000000000000000000000000000000000000..a5d39dffc110700f3f423b7ddd2f525dc09b50bc GIT binary patch literal 1255 zcmdVZPe{{I0KoCLoNmkr2}<W$S!!m}`nhVZruNE~=$t`}qB02WFBFZCs0dt`bcq<{ zAtDIb=oImwKZg){RL9XF#12DB>e8j6JQP;zeSand9in6J$NPQX9={E?_gP}^vBN>} z$MBdPCL?Ml>#~%&jpn^I4>D6#i|S6M7guCOxjR!<{`F<5;@w2rn=ciWxqWHh*ip6Q z&K~8z*`=0Vj;Lkl8&uVCzgphAK&^-sRCVN|stHV~THgz`vh=B{bAGVure9mD-oBFc zXD-S5@h8^mXZNhY{j3%E(IXnN8QC~8BG+6TmTL!2OY7hf*|h(v4xT!p*R>Al^*eXy z4gOs^6l~I=?_nMG^~msKrQSGW$xUP5bR@q}w%qp0)<@H_ZFoj*zA+)?p-H)A=#$<W zeJtDi$Mv?_Q5kK2sJ9mn$&Tvly5s#V-C1-_cRs(MyWaHb*xi2JojWVzSE4!*`)uvL zmeYfULP0o+=FMKt;@_?k7cbndlD}R~m(wXke9Zi|B5^>Nbu#5}3(=Pp^XKk!I}Wom z`%GVwHP3X%6Cz-R%nX~^%*P0a8Ny*N?OWm>4*AQkx0&x&;IJtN&NhcmId}$#Z`S~z zAfP~CfPn&nf`S5rf`bBtf`kHvf`tNxf`$Tzf`<agu0ceBM8Ra&fTEzHz_M#_F#twE zMuA4bMgd1bM}bGdM*&DdNP%eAV5ESwYfw^PQgBj$Qjk)hQm|6MQqWT1Qt(m$QxH=i rQ!rCN+cl^uu<aV$6yOx(6zCM}6z~-EbkLaiFW`Gx%wM9dvDxzz1GyQD literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Omsk b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Omsk new file mode 100644 index 0000000000000000000000000000000000000000..5e0d9b67a3ee9bf444359b006b2da687604ef190 GIT binary patch literal 1243 zcmdVZUr19?9Ki9joHf)yg3>uH%WT=QrsXpK^(&pSoFy?L!XUB|p&&w}qOdUEA|ebT zf+j{!5oGB@2>pVdLZU!=5h2JPdPt#sD9qOR-ZdEY)MNK@?&s|6Zm^y2jUDbjS!w<{ zvh50kqtOoLxtHx_=!uMt4$lQw`Y**-<&Wg%c^((%egAkP|I<v|JLSzSnCOoC#(TB! zVV4%&J*2CzMRd)@8ZAB*(6t?fx-Pn0OKN}W`oNr)`rhk??6>N7FN%MDRyNLjZZA7` ztG#UUjcj`PLIO`mWb^%DDIXk^EmsF*YfrBPk9SGMk(*k1_Ox!R@6qiC_G?wqK@C+_ zXlSuot9|>Vdb&V&E(E1!{D;;~t&qA0Ua5aJFAW0=vg^(}krUIhyZ?*s2|t&{&Pi=5 z9g=Y4m^NqjOH0XZZJE8Nk&Fu(d3{Cqe(2EFN1Ym-I4^D2!y0S-8th9X62_IWBzd|s z)6S*K(q5#$@#oy*b{o_5%KoFKx!u_Lu`@1@F|ng2Cv{D&xa>;m9%JuGt(%-^Tg(K4 zRd(#KBgB^(SMqS?$xG}Xw)orc&)Hv?V5^bsW|=KVwjJ4eN4xz<0!Ri(3P=t}5=a(E z8b}^UB1k4kDo8F!GDtQ^I!Hc_HX$S<Bqc|i6Ot5?6_OT`7m^r~8Il^38<HH79g-fB zACjP>%@9e^(dLLGiDZeSiR6hSie!qUisXtUi)4$Wi{y(WjAV?Y>}YdFl6JINBWWXf ZBZ(uKBdKGbZQwu3J*&<>3Jv9<tY1XR7FqxR literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Oral b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Oral new file mode 100644 index 0000000000000000000000000000000000000000..b8eb58d135f62054868473914994d242e1b8be07 GIT binary patch literal 1039 zcmd6lJ!n%=7(j2@J`HJ+4%Q^bYSh};#MYShg;dj8m*S8@kOzgL@C~)#&r}p0gbD^h zbP#nC2gTwbA`x^_(ZNyV4sNw_2NwsIieRZFe&?lwpi4LJ<$mXJ9|<Jq1XHJG3g)jB z(;E)U*TeJX%Q_w>Z-06*-(Ky?T(56QyiLcW&(iVb@Ang5m+HxdOnU443-#2in%(x~ zyzRa>W4GU!v^y>xwdu3NcIWY|-Bn51-Q^D3lUucWQomep`!|>Il9Krmlf7S7r0?QG z>HGM(ntit@x#u5b-~1ctzf+fi+5_2t?v~`IugHNDPhDa5nj0Ls>4uJ-aenu#D;6eP zaW!zI)M+U#7TxelMGn4BxI+sAQhwyi$jeR{y&9Lpcb8=B<gy&8ZE|ysM#Dr}TH9J% zbbA}t_QojNYvLc%^Nh*&=zn3ddDCXhc-2IW2_(|NG&=vh8Lvk4ZE4JOHt#DFfyw0u z_4s;<dWQHaV<N2PBQscsn4fDO)8ErT5JVE!h=Rz12n(swAmSkMAOay0AtE6%AwnTi zAz~qNA%Y>2A)+C&A;KZjL#lX)d`JKxH3CQokQg9AK%#(z0f_?=h{6yFBos(2kYFIu vK*9;B@jwC!sS!a!g2V&~3KA70EJ$3Cz#x%9LW6Va<iCwAR@NLx`~BE&jKSbH literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Phnom_Penh b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Phnom_Penh new file mode 100644 index 0000000000000000000000000000000000000000..8db5e8a61ee9e63164ce7f07969557fafa982dde GIT binary patch literal 220 zcmWHE%1kq2zyQoZ5fBCe7@Ma7$XS$?ex&~Y|No3kObiThHXwN*$-=;pRKURD;~T=@ z1jO0~<{$|m2qA1dP%qdDo*!Tx3=BZi|A%dBJp!UZ)`5iUfoujM?c}ln+Ha?8&IJIu C{UtE~ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Pontianak b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Pontianak new file mode 100644 index 0000000000000000000000000000000000000000..ec98c62bab86bc3afbe58caef094734022600c82 GIT binary patch literal 395 zcmWHE%1kq2zyRz(5fBCeP9O%c<r{!R_y0bIU1IkcPFF7BxD@uT;JVE*j$8lL7`|-X z*P(c;q=A8viJ6IoiIt6kAq}J+M6xh2WL7Y6Ffi0jVBlk5C}?010+V743`qqH(muW+ z3;{r_ZD4L}03<De*n%P4GsF={IDyPS0wE;0_Xkwpe;@$48$^TL4x&Ns2hkuufM}3E XKs3lNAR6Q!kal|bi3{jWU2`q~jo)5v literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Pyongyang b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Pyongyang new file mode 100644 index 0000000000000000000000000000000000000000..dc24926e80f24b564669b5979736a191114a998d GIT binary patch literal 267 zcmWHE%1kq2zyK^j5fBCeHXsJEc^ZI3_w|o+COs4TaY;Y1AoLtlLDZX?3I;|dW=3WP zhSDcM1q|gKAk}pf7&yQri;r&zgLiNUgBK9PKnMx8{(x%w4+J3FK{Ut(AR6QfkTl38 PAeubaZ~>jCYsm!wM71|t literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Qatar b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Qatar new file mode 100644 index 0000000000000000000000000000000000000000..a0c5f669628d8ecd61c398bc475cf2405d20000b GIT binary patch literal 225 zcmWHE%1kq2zyQoZ5fBCe79a+(c^ZJk!cTJzSh^-1sQ>@}KLaBZ69a=m08p60!U3dC y%YcEy$2WvQ+rR{fjX@GX5JG|-KOkBdfX4p^*#+W)>;uuH*vVxBwBJtGm<s@Gqaf1& literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Qyzylorda b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Qyzylorda new file mode 100644 index 0000000000000000000000000000000000000000..0fc7fada699081c12e65301b7daa240ab530ab79 GIT binary patch literal 1047 zcmd6lJ!n%=7(j2*rZ!0jm)h1?ef96Pjnx{PkZO{~B{)?0kbod9WT*wfNh}Hq6$xE* zQx|avDh}e1f=()Qa4h%@Zbe+Wh~UsVC^f|IymUx$a`Qg!JvaBg7f8-2U%E1%a(|7u zoiG_`o9uVrvE}8!Qti#NMzPnwSKrgW;`Mbedwst)YyDq0>H}-Ow|DhMJ@Kxp`(7>R z;FEdXfB&K$xP4B&tCKo3GpYxt6M87uqlc3XJ(BpYN8?}AkHp3Q@mq%1w<;qyAIZok zEslQpD9P6=a_srCj6HlR$Ey$I#PzC-&n?Kw*_x)V-O^L}yE<`UR?~xXnn_J*rcu&t zVn(u`hjntRAgABQ^vs$kxfj0V--e~I6qTvR>r%Y@P0m)GE;gG@7YcWDc7*MUbhTZ( zBGLAbJND72bNPz>7cN_Loz9iSbvswCgnC$=t?efz3EAD&ldZcvIeotDlH(IL(>61F znRB6FwD!ST{s%cfx8Jkh)I<_Q6vxPd2!luqXyPF9AOay0AtE6%AwnTiAz~qNA%Y>2 zA)+C&A;KZjA>sp?d`JM02m)FNkQg9AK%#(z0f_?=2qY3nD3Dkn!9b#cgae5upalep pD4>M|i3t)EBq~T)khma$K_Y{M28j(Wnv?%JxLD3&ER1Dhe*khm-6H@1 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Rangoon b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Rangoon new file mode 100644 index 0000000000000000000000000000000000000000..3cc2aafac4e27bcbb2de4b08c6451c1a56452808 GIT binary patch literal 297 zcmWHE%1kq2zyPd35fBCe7+a_T$XWQQIPmnRKN1&brGKdZ|NlQD6EhPN14EQPNF|VD zVPJ^PVBlb2sGGpR2Vwj8hA;#Hv9^Jku>p{@1Sy4r5W?280hNNBAoK&O2Wad6ux*_e kK{UuYAR6Q(5Djt`NLf9Q%|M~sxNN{Kw$nAUGB)4>0I_91JOBUy literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Riyadh b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Riyadh new file mode 100644 index 0000000000000000000000000000000000000000..e71bc4e802cbdfd9f90598029e0fda04c4074ab3 GIT binary patch literal 187 zcmWHE%1kq2zyM4@5fBCeW*`Q!IU0b(RcW&=_5c6>XJBMxV9-7T6k*UZU|{j_4PnqW jFa~iMLP#+32UrIK1JK0(AfrKC0>*RM04=i9HRb{Uf07!( literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Saigon b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Saigon new file mode 100644 index 0000000000000000000000000000000000000000..92642679c8f9e68c125a8c6fa740ebf61df82417 GIT binary patch literal 389 zcmWHE%1kq2zyNGO5fBCeZXgD+MH_%bM~`#E#KqcNXHB0qT#P(daLL;4!&Rf*0#CL* zX?UwYgMrKTI79va|Nj{nnV4Can1PUqfguH`k%1xk8v_diLs9_)Cj&!40|PGuL)`=h zL5PTtZwNyGh|o4L2Vx5#wghW}5g{bF^9NKD&>8<h?gh~xH-l)9yFoO_?I0TDeh>}v g1BeFs14M)T0s<iafIvM^jM{$UvH|+mPS>0Z0QvW1Z2$lO literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Sakhalin b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Sakhalin new file mode 100644 index 0000000000000000000000000000000000000000..8d6b4dfe21720eb32abd17fcd627ee9567f93836 GIT binary patch literal 1234 zcmd7ROGs2v7{Kvwn(>hpM3h>Srj0h)q|TknG#~j)Y-UJeXwd_LUYMIhw1@&dP!^Fy zi&leJi^PJ6kO+}k3@FsXA|zT!n_i1HaTXF9#B~1Ggd$qB>$}|hzaCJS`F<u2_neN% zAG6$^a5L@p=6$WUkE`az_)K`rtPAe$&M)ju7v!FORj_cUx$yIF_xjnH^oHp`w?7qD z8y}XcqM<dac+jghU7AxRy`NO+kvD2{^qC6OkE^oK16A%%sx2#SsG#qu3C@k0tsh6F z;=%!`czxY$o4RO1PkPPvdq+*>?YODB(qyX7SDWz365||;smQsY+R<30b{;IyyNZf+ zjkBO@zKgE)=S%I|_qy)eyzEI$>H67OX&8AYjT0$p8km&5$zf?eH6ksSCv<DaHEBCB zq}$7Tq@%4*@5_oxXW+Q*{BlNjd0TbY%PzhDLz#{~3hCH%i^OlP(uwGVIrMc*`ZJk~ zc)ZINFJIO#S9Z>CKV;|p?egXLd?EwS?bj-Ox5U2RPkVAjl8<HO(sSv=V^0R}%Id}Q zSUe%2aGivlh`k+qhxsGoVca(!iGSGb{LkOAA1lL-DSPIaT~qc=*|}T0cgpT5`==P7 zIG|Xdc%Yb|xS-ge_@Ee}IH6dfc%hh~xN&RkQ2a0$a%&w?EKxjBOi^4>Y*Bnsj8U9X ztWmsC%u(D??76l6C<fhHhZKtp9w{anTr${X@X275!6}1PidP1+6t@g^DSjCYyS0uf kmfc#<6w?&f6x$5GDaI+znSN{Gf4833VE+%ADjg^92NM?%(f|Me literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Samarkand b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Samarkand new file mode 100644 index 0000000000000000000000000000000000000000..10c7af7fed8b1118aeb50229697655055a02d5d5 GIT binary patch literal 619 zcmci8y-UMD7=ZCNX{)8};G%7fAMrD45sFwppp;JHP$5G=5Qk0$!Bt#@I_RW-f`i~D z4s~!7aTOeHcW`lW&_Nv(Eb)D+AmZd=j=Luu9Nhham9^TG{QBB;hso#Y<iBHEm*ed2 z?AcMX-0AH#x-yq;I)3V=-<}UM_fL)Njpue>Z#JCs9n*8NVR{d0rf+}Q^lvR1cYWRr ztWKE0ierX~DKngFni1#Gj3#f57fbry%ey~z_u!9LFY@&;48^h|(TJ@h*7jrAwn*tp zZz6MLiHa=v5*G=oR*F@0{obi81(M58>n!M;;gE=BeO7`$JpJ|SjNUASfWp8w6ci2$ zNvq02p`q|lh$u`HDhd~cjKW5tqwrA(DU1|K3MYk>!rH3RQg|uEw61UX2j)ak`Ad_9 F#0R(bef0nU literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Seoul b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Seoul new file mode 100644 index 0000000000000000000000000000000000000000..312ec40a112d5ab54ea2d92d735735be4b27a44d GIT binary patch literal 531 zcmWHE%1kq2zyNGO5fBCeF(3x91sZ@v_w^4Ilb(tFxa6$<;gP@QhbP(Fe>@eI$at2- zT=87c=fw;Cf{GW<zhu1P52|=|{94BA_a+r@mhZ@TdtR*KUHhzz52l3`3X2maDCVV4 zP-=FXpzP;4fq{{UnUMtvGP43914B6n&_agt4h9wmhPnw1Kpv0;vRh{`@-Q&;EMVmE z@eN_{4h~`P0%C8M5Rh^>2qD2|KcKq)0|Cf~AR6RL5DoGvhz9u<M1y<`qCvg}(IB6L zXprwgG$;hXG%yrEG$<s%G%z$kG$=$sG$>R+G$>?1G$?dHG$@2X&H;rIhz5lehz5lg Ph^AeLaRGx;*OChWrb4=L literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Shanghai b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Shanghai new file mode 100644 index 0000000000000000000000000000000000000000..dbd132f2b0bcc8beab08e04b182751795c853127 GIT binary patch literal 414 zcma)%y$%6E6ov01AsdnK0RGwCh(;k=S&0xTQ;84_wi_?7<`F!PCs;~}D7?f(B^vIT zl7ch2`)zh+C+8E>VAZ0p#Q6&b$@1Vmt@shmEEPQ+dAwxQ={D8*Lz@c0P8P$BDh-yh zJRhox=gVq;O|{%Y*PQ{??_KRC8|0oVI%a(=qV1LMrEqU0h@_&_Xe`L@@k|6ZIO2E7 z93L|!ALb9D7bk4{9*EM0TpUDs5CS+32?Qb_WIzakkOCnFLJol-2uVX01tDw5!t~*5 M#r`q2S-#n^-;pX<T>t<8 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Singapore b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Singapore new file mode 100644 index 0000000000000000000000000000000000000000..78583666698a0332b822c0cd15daa58abf60eb27 GIT binary patch literal 424 zcmWHE%1kq2zyKUT5fBCeP9O%c6&ip<TXXB;UFm)k4sbn5IJNm{!s&_e9G5(DKZs5I z%2EIS|9?g%W)@a9R(5s<hQwH)dJxINz>rh`G9q1pkq1OF0Ljb>1|bH9x(N&t3=9Pg z3^G2xAq>GltZiTp!bS!l(ilWq0<i_iC=?Jv*x_724Pb}=fa(Og?|+?H@L~`R@(73q qc?Lv-JOrXao&wPzkAY~A=Rn#)9t6?#Krsfo`I5^9=yy9^3oZbMHDSa6 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Srednekolymsk b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Srednekolymsk new file mode 100644 index 0000000000000000000000000000000000000000..16b1cd8f9757d845a7966ae83543750bdfc79e63 GIT binary patch literal 1244 zcmdVZPe_wt9Ki8sZRS!B66BxeN?W#Use7lHwYjWiV%uoQE?J%W1GUSK=pc14s1A|+ zIYfsM6;x<p(ZM9b!wx}*BtoQvbP%$G1o03aDuVTV-!&L@>ew@$_w(%8yTSH+mmWHK zBBcHrm1c*@h?&W{`n<V3uJ2fUet&#njemQprY@7Kb)R}y`~7Zj-O`;@{rqfh?Tjzw znGQ<BM3Z=Lua<SAPFa6$SsF8+r0LLm*${s%zVHj#7<eSjo~&%Dydr+rl=d%=>CKB{ zs%7|qYMHyNx4b&51CtrO^}%7?dNZlpE=G0xnRXpK)~Kx`2??F{OGi(ebPlePZQg3> zvOY`K&#%($Dc9W}=48i@CB1WcO2YGRbmZZz?s@q{M=wq4UD@lp_xLTn`@)3m=^NIu zqt|3_^N{X~iR>$j=ziaz^nW=h1I|tvcpH}eA8RE3)GLXZkWOCBOFBNWFp|&bmBYEB z@NkutJXV&MJSlzQ&$Zj-QtC#7`9;;}J!RJ8Ifq-R>@!tWycTyHW~X?MGWQhsRaGLH zRskz$X2?v74^xi9u-Ao4`X7$?+wb?8Z<yz(k>h5YBS(%MIeJ@j{73>w21p7>4oDJ6 z7DyUM9!MfcCP*qsE=V#+Hb^>1KDH(yBqIhX*_xb?q>!wTw2-`z#E{I8)R5ee<dE!; z^pO0J1Z_=*NQ$;5M<hukOC(JsPb5(!QzTU+S0q^^TO?g1UnF59V<cr;lQWXEt;rfm c8_63<9LXF>9Y;(9|55H05%X7wwpv!jFFREuFaQ7m literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Taipei b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Taipei new file mode 100644 index 0000000000000000000000000000000000000000..748873bed9a167d0226bad2246556c45432880d6 GIT binary patch literal 790 zcmcK1Jt)L+9LMqRd3J8VWKkFtB{CUAc^K$S{uE`Cvi&@iMEWfTEG7@5Vo^r2=mwM$ z1CBfAJagyy7XyRh_dYfQi{JJCzpj7(8}9RVT01*R<;O|1Pq;V*_TqcGwzt)$I=$nx z=-sgay;rqh{Iz+q@Aa93a-Rf}I#e+GSqCH2=I|q5LK|D^=(I(L=fxa%_sGd&yE+{l zma~aTbKbVBFZwpsWyQT*mE@@F+%Ab^&Z%hJtd70}%*{nWZv7s0x1Of&mnzM}>$^No zt(m7_u{Qm0D*3%xQXWQCYIsDuw^vL?Xhvr`VdaW*#sBFe(iOG;N0gQXk)d%(;qLN$ zJ@Ga;Qr+lCy}hby9A5lB&hLX2+E-(Py?y`Y!=B!4lZ6b3jED@0jEM}2jEW44jEf8$ z(~gV`jf{;9j*N~BkBpB5Kq4R^kQhi1BnlD+iGu`+X(J(_kXT4CBpMP9iH8J4A~Fvl XImASQB2kgBNL=~%x^5f2sLcHdVF6z% literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tashkent b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tashkent new file mode 100644 index 0000000000000000000000000000000000000000..6f7dea4abca3b703de677eba9a88fe02436b1ad5 GIT binary patch literal 635 zcmci9Jxjwt9DwnQMy*w$i?%UU>uZ!C1fdEll1UIMbjTp$P#lzkLnkMtf~$icz#-r! zE(&fU_W@j-T^#D*pj`xY(D;9%1;NS13wKXA61e;ltd=*X)UT0{9VWw;$$e^1mJ@5g z{o=UpbvwIZPv$(8PMqb^Z}*3phgxXeTB+Xaa%f*}>+I>K?mJr1{Rc~WU}sk6wp=~9 zmeoUrE<HT=p-0@h9<}fESjUZa%y-{;e)7j_kHv}7l~;|TNEyvdUzgeP<7#a)45j=R zIf+^<DpRTTiXn#!N~*K5Hg*iz;hDm6pj<C6b4KPguPQ@Ed?QQn2ZGn{m*j*I6ci4& zA)&BPXyOVFg^0pLp`vh6$S7<SItm|!kitlzq;OJ5DXbJ)3U6E?rZ7{eX;r@AAGwqB K@-r4D^T|&sXn$D% literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tbilisi b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tbilisi new file mode 100644 index 0000000000000000000000000000000000000000..4b2d2e296e7626c26b8ee570b59d171a8f2f72a9 GIT binary patch literal 1080 zcmdUtKWI}y9DwiDRP#p%huS7-{nM&3{iA1_#HXh9T2Pk?4+#N9A%iUl4jqb<P{C$% zDlQf$6$f#6gE*89PY`raC#ROkEh-9viy*f0{CzJ82oBEP<L*1|T>{DXRW4qg9aVpg zl$<abC7JBIZp!jFz3hBi+wOJc?gqPh-s<l78`J&M@q50gAiZAHnb-9oySik1pWL#2 z_vY>H+cS30jjG*y?X2B*$+i8{c{@<f*!{yPdmy`M52n7_TxYtL`<|>F`jD8**VerJ zyZ7GUi|1zOY2Xb%d|*cI-1iPI*3FUm>qcL>ppRZ&3dY7~{lfW4zt~p|N}~gQ>6aV0 zS)<)XCOE#)uTQLQ1}E2Jy8I|@##cV-iTZ{)wfst(nT9cqm;U5p@3*PbpXV0BFmyU% zTbfVj)<2$Y@s0!!uUhZh_Y(0~OsRY*e+aAW2enElb;fgcDCHWpv%R)Y964#;aSeO! zOrNW$%9JcadQ4_fW`S>1%8}9BwbshNH64?yk)|K5&&V$etIt*(h&G5iUZW495TY@n zsD$W*D1~T+sD<c-D28Z;sD|i<D2Hf=sE6o>Bml_(l0rnu0g?nHi-?j2Bo9a;kW3({ zKyra31IY%G4kRB)LXeCgDMgf=AW21(tRQJY@`5A=$qbSjBsWNMknAE#dXW4e3Bm<& QqyB>qlVxdiVx*A#4Y(KMMF0Q* literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tehran b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tehran new file mode 100644 index 0000000000000000000000000000000000000000..3157f806b7d7fe650ec6726f31398824caf82cec GIT binary patch literal 1718 zcmdVaOKeP00EhA02NkKd+7c0ohIsaMJJZKe+Kvj2=~GQx+IqB@K4{Sw>eVg~LKnnB z<DFE}s0ATTBNYphBS=b0NJ+5Kg@y!)AQ|TyHezLA;ZE-T&D^AmCjakv%4)q1>yInM zyy52Znw#%a+RdYD^B(<pC{l;rydcAy<6RM*E<0i*-8H_rT1PH+tEjYhIy!!WnjO=s zXAhlLbMBwgb8k&kvAqK_?re#O?+nrj?T1w2mNaQs_KT##I_=2*pq!5PI(bHmN{OB> zQ%CoR)Xyb4?d=OCo*tI-?nQ|7zHmAJ(h0GkD_dtA(JFIStIVqZE?gBodSOAL%FasA zi;}LXrEl&@`J_NByPGfFHwHz{#SWc&GDPJaye{+G=8J-wVO?0>pqA&<>!OU0YDL_6 z>6to96ptI!C03a#c|S&$zU~w&A03se?nH^QtG9J|_Yzfc{F7XLpiQi4nWfiOUsmf% zdv>p{m3yxjxn_HcXDv8$(a3jo+3^@5n<@{qljV87LC9w|u(p%6_%!oKtQ-)+g#a zyt<+3o!U~>CASvGh;56f%Eq*A(KNG2H&1$|T0#!Uman0rb+}J&AIefY?q}+q{dZJb zZ=>9G=8M>Uv|I0Kk5zjcp38leYsEgF&u0Y$1_uQPj{2vcK+Cd)834~<%SsZ~7|U8x z7!YPz!evETR+bEy`15^kp2zZ-JJxnOZSzbqcQUv6_50=di}ii^^{3_^_OU`_iM(cw z$Rd$d`Zdc$)`=_>St+tqWUYS9Vv*G%%SG0UEEriavSeh<$fA)|Bg;nCjVv5lIkI%W zX6?x0k<}y1N7j!NfK-5#fYg8#fmDH%fz*K%f>eT(g4BW(gH(f*gVci*gj9r-gw%u- zg;a%<h17)<=GRn)l!nxX6o*uYl!w%Z6o^!al!(-b6p2)cl!?@d6zbPhij<1fiWG}f zi<FDhixiAhjFgPjj1-Mjjg*bljTDYl?$?x#)Q%L7RF9O8)Q_A1$SHuF1juQCoCtnR l8~^XAFm{3e<j9P7raNu(-JBblac*HJ+Hr12YI34I_6L+)nlb<Y literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tel_Aviv b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tel_Aviv new file mode 100644 index 0000000000000000000000000000000000000000..df5119935c5b01676e63d1fb1efd7273b8b4b452 GIT binary patch literal 2265 zcmdtie@s<n9LMp8WDL=qEV0%QuuMboa)Ap{Ng4_Qf!yN86a$VCn5p!b#55tF?1vc^ z&SkCnbIaK1heO4-<|1p;*+y_umZ_LeKft>LcU*$tru4i|t=jsdKim4U!|Uu0{$YIH zfs*A_DdHa&W<KHJ8qLFdlrrCTd$V3UvG|~F@A<<2^+z{F$MLInXJeJNo7dUjylm-{ zuYV;^Rko<E71};sGQm21A@=BZiHGHxm~4G^e9$?2>5x9x>J#0c#_99=4e@<zhq|!u zbEO+<#Sg{x;>Wf;>!&oI{qxRiqG#enySL_)z8HB@1*eUOV6e~*uWQiZH(Kn`gTGi2 zO(}MC+mN2L`HCIWK4QgM@A~659<V0o9=7i)+pX`8EA>w)@mlwYIsU0*`t{Urd+lkL zI`zFrNBQHs`mE{AvG$B3l{#T_h&^-10_`bp_9w27*WR3Cc2dsUI(c@rKPBmq<-7By zof^5_O1;wLpA{OeW_5<h^ucm<U-z)S-|m+W>|bKdes8bL*n7}=@THA%PTd|ocTJjn z$iL6Z%x@I)ath`Agw-M|X3)tVTO_jkDx3v<sUqj%fXr>bUF5axbRPb2SmbXR?G$W} zQVTcjkcBnZl)o&~c_eT~J-RYi7Ws~-K>iH**o0lGIN?@Ve076b6cz6*>RPH64~%h2 zTIZ=HUH#7EyDU-K+Ub<lXNvN-4mein6tT3vLq3raCMrr><de}sQJJw#R)vMCWs_>; zvYuY`)Qw`f{MZTg^!aqT;{6ZQ%HsiN)hk<6byJ%2Ol7r_uioLTUf3X>t-S86@vIUx zS>4Wa;|fKsXIR!=P7-U!os(-jZWHS+e<q**ctEW0*dy!SiB$FbUz9J@{;D>-Ss_D4 z-SX#G=wGk@)teBL-GA@6ArUIXbZ<zw5YcgBtPtsQL&pF9Nq#{<<e4XTzIlq!y!`zF z|Gu5A=I7n*4E^zoBj$=f`SLolC1g*?rjT7B+d}q*Yz)~MvNdFH$mWpUxti@E`$IN} z?9kP05!oZMNo1GEHj#ZI8%1`CY!%rnvRP!e$aazaA{%x!JLb>9mR-%Bkxe7JMz)RY z8`(Irb7bqt-jU5CyGOQ<>>p`>tLXsJ0;C5>6Ob+-Z9w{fGy>@a(h8&(NHdUbAnic< zfi%R`bOdRMtLX{S6r?LiTadmWjX^qtv<B%7(j25aNP9T?gEWYvLr9BUO^=W!adZi3 z6GxwrMsaisX%*5dq*+L}kai*cLK=p23~3qCGgs3zq-(CGZAjmc#vz?UTF22lq<I|O zL)ypDKcs;i9Yk8l(L<z(NEcmA8<9RDjYK+$v=Zqh(oCeANIQ{!A`Ru}DAH1no+3@< z=&Gw}D@R{lO=CGai?o)bw@7o5?jr3)`YZmA0gf<Po|$a=oKTSB@p*iS$!Q4%NnTH? H#~bk*I^)#B literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Thimbu b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Thimbu new file mode 100644 index 0000000000000000000000000000000000000000..a8bddb9fa333da5c60c48132507687b2da74470a GIT binary patch literal 229 zcmWHE%1kq2zyQoZ5fBCe79a+(`5J)4)n}q5iiy4}>i_@$&%nsU#J~{30~BWPjR2_& zNnqge@eN_nHZV0d0Fq`PDIf?T!ImEoEet>l{)21-aX~hMXwq!uvH`llPS=bJ0M+&( Aa{vGU literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Thimphu b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Thimphu new file mode 100644 index 0000000000000000000000000000000000000000..a8bddb9fa333da5c60c48132507687b2da74470a GIT binary patch literal 229 zcmWHE%1kq2zyQoZ5fBCe79a+(`5J)4)n}q5iiy4}>i_@$&%nsU#J~{30~BWPjR2_& zNnqge@eN_nHZV0d0Fq`PDIf?T!ImEoEet>l{)21-aX~hMXwq!uvH`llPS=bJ0M+&( Aa{vGU literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tokyo b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tokyo new file mode 100644 index 0000000000000000000000000000000000000000..8ad44ba981a2d97d40d3de2c8bf29606be50aa4b GIT binary patch literal 318 zcmWHE%1kq2zyK^j5fBCeP9O%cc^ZJkbvvel>u<du-1zaU;O1G~2e;W7KHOf*|KQHE zM<4F?e0;#n$OM5549(0y^$a}=7=fDWCNOY7NFU!21}_&N4h{iHGlFmkVMlNQ)qx%H z1EdQG{sTekp(O<%8ssQ24RjcY200EygB%E=L5>8|K!<{8kYhnK$iWO`xtI&+d|gW} E0KK7P_y7O^ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tomsk b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Tomsk new file mode 100644 index 0000000000000000000000000000000000000000..919b0031d1cc782d9789984e00f074fb6f17c4e1 GIT binary patch literal 1255 zcmdVZT}V?=0KoCHyt=VT_)=Q7GM6o#)@jvTZQ3<kHm3zK2+Jt6#2{!y&<BAFtzIJb zP!JJ>IO-+hKpzhwbg3T2hY%w~N~#B6EXoH%Vx9jrVboKPoy)oZv$GG_&Tq*>Cwqe8 zui@4^Oh!yk)+K3u8~nWQ-P56|ie>)OndN0!QR*5hDgE&_T{b<QSux=eD@P7z%7=T^ zs)rrQd$(1sz8+C)F4d}vQ(m>UbE&FK<diS+Rjmt5sjBifYQ5)`@*7{IfA)ji@aer- zeg2wRJ@!)8ynZYLPqMN$drQ{cyec;ioRgc*_DXrYLvB7Ypn_+Psx1vYYU_a}wavR% zg@Qg6`dOpG<)#cz7OU;^m2$`MjEYPcGJ4-5qodQR;rXoDcxB$)d1u@-PfVIk{okxz zu@`1@_n5W2YS4@|KeP4}_M0uf+g8iWJu6;t(Ta~=wpu@ST8T&9R@=x0v;9WQN+!O` zgYh}5FPF;+N5PVX%P7pdikx{5@}6)xi~hI_r(p=O|AGE(Me>-?>r~p|5~3?5iWl#T zI}W|G@S3g^YhKgVo)iHY(le}QJs%?+dI*QTv~S6OIOMtCZ`0o`$6-?roNW%Ba_|fe z->wEgK|q1P00RXC1qB5L1qTHP1qlTT1q%fX1q}rb1rG&~U5$tWiGs<l21P+dfknYZ z0Y*Vafkweb0Y^bcfk(kd0Z2h;S3{&=w5vf<P*Px0a8iKU)krB&DOf3BDQGEhDR?P> vDTpbMDVQms?P}B%*mgB;3UCT?3Umr~3U~^7+NVwY7x3Lt{g-I0t9Snb_fHt= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ujung_Pandang b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ujung_Pandang new file mode 100644 index 0000000000000000000000000000000000000000..3a5dcb27007e1a2ffa67bbf3a30095303b5d67a2 GIT binary patch literal 288 zcmWHE%1kq2zyPd35fBCe7+bUf$XWO)cET>Ps}oLl$TnOGdso1~$i&RTz>xm|q!LI1 z*#!*@91IL~6Bu~FYyltN5C&f$);6#JVoQc_&k#qDYA6UH!395{dj10e$R!{e<QfnS RauJB8j;pvpuGF>Q0szU5Jx2fl literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ulaanbaatar b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ulaanbaatar new file mode 100644 index 0000000000000000000000000000000000000000..94ddfea5f2df592be9967734c16563c72e63b021 GIT binary patch literal 921 zcmciAJ1m1y7=YnZDiN2sBoepQT~Vc|dt3L34x~t22GU5x;0z{&Gl&WK2LodG2OAr~ zU^EzPE`t~uEb=Ev43au;V>4Nt=JaWsHfg^13y;mt)Qexou0P>&eERY_UFoOwo7aJ~ z<VPd><x1r|d|A2IA(eOGmH9`t%CVm*of{ddVCh{J&b_Ik@q1a^f2T^?&t<9mOqJ#B z$@1h~<+7hzuJ=<>@w8)A-tLI1%SFq5vM8z#CM?hPgs52yTD1#7;f)kpb(3SNJ`|7* z0l)Hj3S^_hshU!4vhC&AY(F_N{0E0-V0+65u5Fqf3)1L}%$r@|A)~u@(CqQ~jNZ}) zv(GV@KAg2@jwDRUk&k6_^tnf_Mx#+-i;w%`BVLFI^AOXLnXDMLL?NUSDFdOfaC@8e zZPmAhuf$IH5A*%}`HlW-QRd8BW{u1nnK?3dWcJAXkp@Tyqy^FgX@Yb?+8}+9Mo1^5 zRZQKBLo=ishjvIm4h@lx99kkhIW$GOB5jerNMock(i-WFG)KB4?UDY-4j_Ag>;ke6 g$W9=8f$RpdAIOfx)O&*L3jWu=B>DCB1U)TD-}#Kg!vFvP literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ulan_Bator b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ulan_Bator new file mode 100644 index 0000000000000000000000000000000000000000..94ddfea5f2df592be9967734c16563c72e63b021 GIT binary patch literal 921 zcmciAJ1m1y7=YnZDiN2sBoepQT~Vc|dt3L34x~t22GU5x;0z{&Gl&WK2LodG2OAr~ zU^EzPE`t~uEb=Ev43au;V>4Nt=JaWsHfg^13y;mt)Qexou0P>&eERY_UFoOwo7aJ~ z<VPd><x1r|d|A2IA(eOGmH9`t%CVm*of{ddVCh{J&b_Ik@q1a^f2T^?&t<9mOqJ#B z$@1h~<+7hzuJ=<>@w8)A-tLI1%SFq5vM8z#CM?hPgs52yTD1#7;f)kpb(3SNJ`|7* z0l)Hj3S^_hshU!4vhC&AY(F_N{0E0-V0+65u5Fqf3)1L}%$r@|A)~u@(CqQ~jNZ}) zv(GV@KAg2@jwDRUk&k6_^tnf_Mx#+-i;w%`BVLFI^AOXLnXDMLL?NUSDFdOfaC@8e zZPmAhuf$IH5A*%}`HlW-QRd8BW{u1nnK?3dWcJAXkp@Tyqy^FgX@Yb?+8}+9Mo1^5 zRZQKBLo=ishjvIm4h@lx99kkhIW$GOB5jerNMock(i-WFG)KB4?UDY-4j_Ag>;ke6 g$W9=8f$RpdAIOfx)O&*L3jWu=B>DCB1U)TD-}#Kg!vFvP literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Urumqi b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Urumqi new file mode 100644 index 0000000000000000000000000000000000000000..b44a1e19e9b1212585e55bd65be77665f4ecf6b9 GIT binary patch literal 187 zcmWHE%1kq2zyM4@5fBCeW*`Q!IU0b(hJU+K>i_@$&%nsYzz`$@6k!NSU|{j_4PnqW jFavQJLP#+32UrIK1JK0(AfrKC0>*RM04=i9HRA#R5AqyS literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ust-Nera b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Ust-Nera new file mode 100644 index 0000000000000000000000000000000000000000..7431eb97fc4eaae4d8b4bd9449821d4d11a4b183 GIT binary patch literal 1290 zcmdVZJ7`l;9Dwl?li0L66w$UCqph{Z8rvqZmei!N+7~s6mYT(?lMm1!{1JzuU<*D# ztdBt)g7`qC3RN6xk<cN7;$RUe#i8n?4n>fIWKa>}`EDFSog6)vbH97fO#-?1casB0 z4hPj=W3gFbGCIxVxN6w!?!5bud3yJSTISs?6;-KhrR(UM${)AdtLAR@EuWpvu9&Lr zb5HtZ<yeh)ZY+^iBX(JRDks&c&r-AZt*nW?l-lq!S?hZ!b?&sRTXa#pjtT9}jq3HE zMpgabE>%BsL2r0|Qv1eJdgHx)y5U+vH=d2^rV~xtf3R8y_Qxf7+$)>f8l}0vR5p9c zr6ur1T7G?#kh?^O-p@$u&pEwiazethuXN=8v~J5h($RC{dTaWMZa;KgZ#y$4+q(vJ z$AQbzSvR1&Iz+mi5#3YUFFjxPNUyzFdS8cS$HxkZJ@!a^DyS2e^0M<-u00vc=%IW* zuWSn(c58DkEZRz3MJL4<<{y_jT`r~0cbnU)WLP<rI(=1{mGl$t_k*mheEzY`tdHDQ zOS!7d)xu|3i6@fE=MS0Ls(gWfnL#uC{IpWG!YG^+_Q`*E?%)4>(fs8+&!1v|fdh&K ziU+I41jPl#2E_-(2*nA-3dIY>48;w_4#f|}5XBM262%k6lvU%3VvB(<tHv0`8O0jK z8^s*O9mO8SAH^WWA;luaBgG`erB!2-;?t@zN^wfDO7TiDOL0rFOYuuFOmR%HOz})H vO>s@JP4R8j7^gV5YOGVdQ_NG`Q|wdxQ{DkOWGwv8??G9_yd}|wfWPcF#)vJ@ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Vientiane b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Vientiane new file mode 100644 index 0000000000000000000000000000000000000000..8db5e8a61ee9e63164ce7f07969557fafa982dde GIT binary patch literal 220 zcmWHE%1kq2zyQoZ5fBCe7@Ma7$XS$?ex&~Y|No3kObiThHXwN*$-=;pRKURD;~T=@ z1jO0~<{$|m2qA1dP%qdDo*!Tx3=BZi|A%dBJp!UZ)`5iUfoujM?c}ln+Ha?8&IJIu C{UtE~ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Vladivostok b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Vladivostok new file mode 100644 index 0000000000000000000000000000000000000000..80b170bca4100c8915a0eab6602589e22bce9ee3 GIT binary patch literal 1244 zcmdVZPe_w-9LMqRbk<OVb;wyuv(%O?+a62NoGV>6)*M9ak~J9g5AhZq><{{diU=e+ zbSRMsOzL3KAq~cEStkh(br~J3ymSen4jRPzy`ME0b?Vq>Z12a%Dd_LD<lxY`Hu-B+ z<_=F5F;A}DYF-|nh^A(z=2kZO`qMSFmli#)ktNUC(_^(?X40E;ON(0;th6^9)~z!E zt($Dn`tgmr?P{4e41d$@Czo|c{Jl1IztJZDOKtW((VbQI)#rRIzVCCgYkAh)GLmw) zyt^m6=WmJs#bw#^XjoeBo|3)S6B4)>mf)FySgAp6yU?!tdP2JYxJTRT>b1jK(~h5_ zo!%Ph{P0P;eyqxY?7Vj8auS;UC_S01gm1i(gA)(r(AjC}y_V6w{+kjxJ*kJAhonC; zsz)l~5^YRt^y_(zmG^1vZA_1TZqoQOzYZ+)O5*kgO~x}TWBGhu9OY$&hqI#Ov2K0I z7p33$^V;Qfid>m8|4|veBj$SIxx*zg{!prmuf;nKbEo(oG4Cnfm+FB;QvAU#@mrR8 z2Ki;;D9pYtyd?kOh*y9AocRm$95v;*xy_MNj-7J!cFplq2v8VMC{Q?1NKjZ%Xi#`i zh)|eNs8G02$WYi&=ur6BHG~+9D3t6PP7G2ERt#DUUJPOkW(;Z!ZWMA9b`*LPeiVXs z4MPe=yM`l$B!eY|CW9w~D1#}3DuXM7EQKwDE`=|HFoiLLvR%WOLfWojO`%QUO(D); WPN7c6jDi1%du7Oc6vC}mu<{o<5B_}s literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Yakutsk b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Yakutsk new file mode 100644 index 0000000000000000000000000000000000000000..220ad3db5fe8c36d1c32b88fd789ff9c71d3e94e GIT binary patch literal 1243 zcmdVZO-R#W9Ki8sIbTqT-=(ycxze?4YIBur>9krVx=B%kC@F$o=#3arln^QOdXV@X ziXb}l9)xu1<zU`YsF0xYWE})MR0osjP(-Zf`(J}mr;a^;{{PRjXa5bh=lf4?-M1%X z{u(8Ag_9ApCv)kum+NmoZGUug#LV(<O3f}mH&Iq}cDn5QlO5$B2UBy#rzYl(wx&Eo z37t0()rzb2I={bCD^G}e54v<g_ZM9lpVX?RF|7`asL%6Q7nR&qzx$2&zYfdd_fNew z#}9aG#_q_H;mZ<ucwTC6^+?^t{ZfCrTb3S4NN`tF8n*4#(BYUai+1YrjXqsbQLBxi zLT&sxQ^THW3BQ}x$d3Y9IrLha#y?2&?avY&d?77oUP<ef0a>;Cfvi6DOxLtslGx6B zy4H70+G72>E`N)(R~^#!siWGFw?R8zZr0ApkjC#vG%>nfx-NJ$8Gmki)9JKv<;}<( z?);pipfKk}?i+v3i`;HwdT!W%)TH{1o%dgJ6&Z6<O=<R;U2)l!>^;WblU+BZiLRsx z1j8oK5VS`4GULh&XP&tv|6z;2{r;T&g=w}L*=`0~j%+)!^^SJ?kpz$okQ9&{kR*^S zkTj4ykVKG7kW`RdkYtc-kaUoI9Bo2KMo3DIHYX%0Br7B>BrhZ}Br_y6BsU~EBs(NM zBtIlUN1Gv%qNB|bNfOBtNfXHvNfgNxNfpTzNfyZ#Nf*f%Nf^l(N!iinj3n)7vqsWJ b@<tLzGDlKp^x6jgquh&|?W53A7b*S)6q5sF literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Yangon b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Yangon new file mode 100644 index 0000000000000000000000000000000000000000..3cc2aafac4e27bcbb2de4b08c6451c1a56452808 GIT binary patch literal 297 zcmWHE%1kq2zyPd35fBCe7+a_T$XWQQIPmnRKN1&brGKdZ|NlQD6EhPN14EQPNF|VD zVPJ^PVBlb2sGGpR2Vwj8hA;#Hv9^Jku>p{@1Sy4r5W?280hNNBAoK&O2Wad6ux*_e kK{UuYAR6Q(5Djt`NLf9Q%|M~sxNN{Kw$nAUGB)4>0I_91JOBUy literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Yekaterinburg b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Yekaterinburg new file mode 100644 index 0000000000000000000000000000000000000000..c1abb935c655ad4a8288bc3bd9445847e832f270 GIT binary patch literal 1281 zcmdVZPe_wt9Ki8s>S~)FBr2_Ct<0t_Yprd}nscSjtO*Y_2tsL)2}PG8DJU>vR8SOA zr|MuCp87K(PZb$>wE7*ogj7&oBJfZh3LESBzH2b*)UoH~dB5-byxU;!`*~CQ4h;s> zU!%&bFd3a@a=iGA+1=dl^xj@Ho}U=a#g}+5W|!7ZdTJcc{53zmj?{kol3h0A^(>!0 zk##@G$clSIQg>@mR$kgGt4{9_&yiMH-PbH@l5VMwR>@jlP8!^Evaa&Ic<q(i`|YQ0 zoSjcMog7U!y?&>gr(S8_qe;E~?znEben)S}T+<tmXS9D{NN?&Nk-)K2vbpW71b6pK zsBS>Qfp!V!5)yIu>B!qgX`PShEl*vtb;hHkV_x0%xJ<{+Id%JuSsg$4S#QfI+1~X+ zcO3X2oed9lSI3kj>{oSn{g`ym-It#73)1uQitPM&Sav-)D#_{dy7%%PNhNLZ;e0-? zY-JVYh0VU8WLxMcIVrvH=dsgnS1R(x+*TEQt}4uNTsf3VrETVZC$y?A?u#onvr_yF z&!o7ns*}AbrOovDgJy=!4Dqu{*$Sg@QrM^d;hBH`{YCS4^E`XX^Jidy%Q&D|pm?xq zOi)}<Y*2hqj8L3VtWdmA%uw7=>`?qr3{f0WEKxjBOj$LqD7GlRtQun!XB2A`ZxnMB zcNBXRe-wihhZKtxj}(&>msX8UichP?D8(toD#a_sEX6IwF2yg!FvT&&GQ~5+G{rT= oHpRD9W1QmLs<BS-PBBk$Pq9z&Plt_x|MeYkMa>%$YYDl20dKP)N&o-= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Yerevan b/env/lib/python3.6/site-packages/pytz/zoneinfo/Asia/Yerevan new file mode 100644 index 0000000000000000000000000000000000000000..4c4e045bd3a47ef74883c0ec74261e8c6a41dc1a GIT binary patch literal 1213 zcmd7QPe_wt9Ki8sZ7Z7+UTT|5`(w7|zh34xR!v=+E^7;k(IGlKQ3m}vyo6APU_sU? zqN0MNsDpF}sSX|jGj`}AsE6p(3Vsj~6h#C<S+Sn)n}XP>Tkp&Bejc9vf$jIr?>~AZ zs{Xos=7h!7WfuGDv*vL>IK1=S%c}mB;hV)(ff>tRF`f3$x*i4It75Pew`!)&7ei03 z%IeW`QhRGq>Mjn*nlt-k?TL)6JCKz1*@)DqYGgxmr8I<o%Ero%67~jl_^VHEdb`|? zyqL8cpM9{K#$MUYBhz-vwWoIL&_jFk;9c7~e8rAV+Qqi^%M#mrT;jDSB@xX@;#Xdh zp<_DvIw4zrWcAjGfNU$Z>eRisZXd7K9T)t%bNGu+4}90#hgL{ePf2$l`YbydCUj5t zbLsWo(V6-&$$T1@KKBjjo4GB!-kg@*4+@f<x~_9|iFEGLLD@5UN*BuIvU0hX{JA~e z1^3crbGOI6@Obgwyy#IX^4R>Ds?kzr-`nr1P%4>LRrAmJ6PGzz{GMzsubQkjv*KpO z_!{MME@$gJ^8YZ^Z*w0rKdj7jkqL9ml#xjz({?lyN2ZQU9+^H;08#-`0#XA~1X2Z3 z22ux72vP}B3Q`MFjH9UrDF>+sDF~?uDG8|wDGI3yDGR9!DGaF$DGjO3(G-VN=V;19 z>O%@dDnv>|YD9`eszk~}>O=}fDn&{~YDJ1gs&zEwBK10&f{}`ml98H`q8&}uNZD90 Qv;Y6=_N7es9W626Z&TF~XaE2J literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Azores b/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Azores new file mode 100644 index 0000000000000000000000000000000000000000..1895e1b1e19120123e67367e55de3f2a5dac99cb GIT binary patch literal 3493 zcmeI!XKYn<7{~Eb8B&HmjLInEN@Y|Ebg*sVFbX0R1d3FKDAZDhj0%Jywm}?dgO3Jh zfgXlnDwH7zN}&fCQd$avRgq2EAVWYvg4h4|5EBv;<%Pbp&Ap#;b8pg+^!I(@`wvfw zwEpr{!MwtQuS4d+&(+e*)4M0LoQ!TWHRJbUXU^O}otu}b^HOJNX4YPD29MSG>7{i+ z`)j&z^jo?pBvd}>nJZbP%W2lt&tzHN7Rg>>%W`L=u6Vt!tep9VtQy-^KTlbttDp4g z+Q?*G_uz3|=P%MPFK*ME?cd3Ubz5a)=2Xr7aEZ7X6=c(-neuh=6xp0GQns{vShu!{ zlWlE2mv0ib>o@yuX<k4#&AS|}JC3DF{+cn8f8MnU(vq!$1G%#Ey<W0&^RTqSlw7ND z>weueAxd^Fonh_nJ>S~>$p-zlZ;*VOo@DKbe8t-HYNqae`lx)@b&nLa{@MOM^pO2S z^<DOlfv)}I&6W1PGYjkkUrw+Ou5#Q%3*K=LzdPMMGW|vOXiB1cY)D_XxOZpwc!!qm ziO7cT$-33uQ^B$J>2fXX)3+MfXU<i#&+ZSj&+YilKL6D@`{%3!_Jxmk+85v4XkVJL z+`c@*v40tw?_TMX<6iB!%)J&h&;2#zZTEWRH1|e{aqf*PL);1(q4L06HRQn=u`1(R zXvM?^S}DH1Q@K;1R%uqo2@1LCJXHCtQ?<l?r|Ojgr&@9Tit2l_of?}no#32&U(KaC zzFKpZ`D&-n^VNCjZJ%#cn(yI(<9v0y5AlUW_whXv*2!0|W^-Tt^7VZUO7(IYT<hR8 zJQ3xD7CquTx;4mYwEBjIEjpu(XaA&4X5?$r@i`ivxJ)Ak?UTrq)e;r6UYhlOUz*pN zD=i`?NsHU5(kghiwmSHnw7xY_+id74kL^#;=*5FIW_=rN`&xT#mr+ygq!4XCqqTJC zTuM7821}=Ur!=<XZRuPpPrHO1mM#}gO4kybq-)`Jd7^lsbj!|??ghCTH!EFwES;}C zM~{{#(_hhE{od5xqn_73Ek|j5_b%GEY7c!XEL5Mm9<B*Noh9LiO8WG*hSL9wt2&^l ztPFH2%AnQf<(bqgGI(~OJUh5Zo||@7hqPNOLkAUTVx5oV`Iu}?Dziq1)lwaH`U6eA zoub3HPu39!<8|bx19a53aq_|&F*<s29~qNeN5{U_T*mc^mXxHb@?u1786SI7CRDU! zV$&d<c<Go-df>WF+Py_4U)ZNpRxFmOh3oaDkG|Gv*>m;f$%`~~R;s4@{r-T`_mwDF z>K?!EH}AXqaZjHMyuZxfewQs*{=fPz@Asd&WLbXyiH255zyI>3fHHo6@e#|se)rG3 zV&=yR5MH;;dwAv*s|rtlzTQ19-g?|T8iz-iN2Jv#JlyIR8_!#;0KRzk{$+Ut1jPUI z0v<LW@D}jKollzI*6)8NCRmY0L{`z$EF-dx$U-73i7X|umdIivtBEWpvYyC-A}i`? zmK0f2WKoe-MV1v=S7c$4l|_~oSzBarJ<aMO%ZscpvcSj+BTI~|F|x?WDkIB`tTVFE z$Vwwi?P=B;S!_?U+Q@Pv>+NY499eNsv*gH{dzwYZLbK|gX4#Q-_cRNSth}dLdSvZA z&Eg}g?`f7FS%0JeNCl7*AT>aW;AyIWlmV#&QV66HNGXt7AjLqcfs_NO2T~BEB1lP) zns}O`AXP!ig46{m3{n}SG)Qfb;vm&Q%7fGgDG*X2q(n%KkRo}SD)Hw*nUFdmg+eNY zlnSX8QY@reNV$-DAqDd^6+=pf)C?&aQZ=M(NZpXaA(cZ)htv)!9#TD|d`SI}0wNXk zG$ll8h!hd2B2q@Ajz}SqN+P91YKasRsU}iRq@GAYk&1enk|H(rG(|<Kij)<pD^ggb zvPfx>+9Jh8s*98tsV`Dsq{2vvks5oNA|qAyG-XEWj1(HFG*W7$)=06DY9r-F>Wvf} zsW?({q~@Nc=t$K)P1%vUBZWsQkCYy%JyLw6`bhba`Xd(ras?ol0CEi=7Xfk=c$&)q zxekyE0l5;8O98nSkc$Di8j#BYxgL-U0=XiPO9Htjkc$GjDm=|)fm|1!=E6X(4CK;4 zt_|ekK&}qt@<6T+PxHrZ{eOboIZF5s`SaBH_fC+q(dJxf8xd`em9}AVQB9gR32z!6 M78eoj{S;C54<V{N`~Uy| literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Bermuda b/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Bermuda new file mode 100644 index 0000000000000000000000000000000000000000..548d979bd1ece82ce76c18fff8a4d9a919ba531c GIT binary patch literal 2004 zcmdVaT};(=9LMoPG)E^fDoGPCu?NK92nSCgXlP+)7$?5U!8j`Ttsn#~k^~`%%7>CG z+X^jnY%Z7UqO4)gYGW9gH$D8!hw0kz=Nw$L+)Ov;rt<gq{&(3`SFJz$^MbR%)%X30 zuV{WL9QenR>^|Y;+3a3^p7@sgc5parEM>-k-jTLbf`L8_Uh0%P?M9t=_5+hTl+a1v zbeOw7Yqx11)tb9|pR@OLm6&^*=h?}LOp{)bVecy*H&bTav{Pqh$o(nj?6jntGVQM$ znsNS|WF8;UtRqKc`d35xz}`<}M*C%({q{DQS##1p*s|9=6ggxcUb4kJlK!F1ncZk| z#=q9wVASM}?9$nP<eEnZyEN~_(~{r6QbPl?WKPclEqFgDg-r<?Zu?V;qNR53vR@>U zlVu+Ze<_bo@N99$Po{Y6yq$Ob3sdsrur2-lJv0A+)dk<RnJ0F9uVo*{&BE4wx@hZq zd9vyqeX5~a%IB=n#pMxMd}X(d<}WqT6Wz8Vwa8Qq)Y~N&)6Fw`HfiPQu*ACS^w~p` zrD|1~O5askIzL;NwV#pd%w&z%d@J$mqq;n@N0yH!+1lxcP3;dCY+`)3S+Vb^T{+Ti zR&CvH>jvvheM7HZ-M`Ri>{EUI-8y+8f4jceR3;6nE!r5(md1;f+LV(lO+SaUdBUhP zf1ai-W5;Ce&OE#B*9+$5j!E{F14m73%_aNlj{RnR<Tu;a+G}1*KWH~ptu-6Qk84Nq zcIg-y(9WqX(mB|tUDqn*_5O{z=}btvdlLG_;WX)~FV#19U6w%7ZU6C?=M9$x0-kql zC~&*yoia%&m9cnWQMG%^<L*j%;{W2Zul@Ze?mv1S7tXI-IdbX9wfnk@N3I^ZeB}C( z0+0%j5|A2@B9JPOGLSluLXb+3Qjl7ZVtidSNI6J7NI^(NNJ&UdNKr^tNLff-NMT52 zNNK*VHl#RTR~=FwQXf(vQXx_zQX^6%QYBI*QYTU<QYlg@QY%udud5a*7pWI17^xU3 z8L1g58mSs78>t&99H|^B-PhHQ6z}V*N6Png^&<;_tN^kE$QmGvfUE+t49Gel3xTWz zvJ}2<Es(|Vb*q6ahp$@?WI>P>L6!tr6J$}4RY8^oSr=qskd;A}23Z?qaeUqCAj{+H z)(2T2U$;WY5+Q4ZEE2Lx$TA`8ge(-YQpi#vYlSS9uUjo-xqRJvAq(c~Rt#A(WX+I8 lLsku0He}uK|FdvGH+Xa1;N``NLg7$BUaYV%R2V7<-U7n(2}A$@ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Canary b/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Canary new file mode 100644 index 0000000000000000000000000000000000000000..544f443a096dc1edb447ff85e71d0f17e40a227e GIT binary patch literal 1911 zcmdVaT}ah;9LMqhnYuzEzE)<Nnk_SXIO#c>u9~hnQ>i?y@>H&Lu4Pus+;Yvd)m$th zLgEuh5fv2+8Bwkas|$Nzk2ER?nvAx^V6oa0;b>vo5chlkcQfc}H-r7<oY&z9dh>mM zqRZFS<#>-gkGpSp@oaW4J}126-a2Bd`o1XYA2%|R?~Tg+Ly74>YxKx(loWGKN%t>m z%<T{K#EqkR^5eZqK7YYdPIg-Ap(8f7O_sKGyN#=Fw5O_8+4%CPr5BXggkZj9q-5H} zVPh;au2`85rfbso8JgTXT2nq9qN!aE6+Hf<rnP^itQ}XBz3IF{wWk!WzG*p)U6xze zZPOR;v%J*9mY>sX`2(#sBe`8OZmqPLe>Q2>g<^Z=>q-@L)Tpp~mY&^Nq}c~k^juxK zigwJj=S$;MT$^ljGVZFRc)&{IK37@#w^nxVj?Inv(B}4Bwt2VTv-zDTto-UFMfSAW zf|JMeLPLWsY<pLWmhIN!`qip<x?a)pGA&73prygdTG}5{Wn!sSemO!fKA2?7&;F(r zz42CcG{LI7ezBKY?^{iKkG)*eYb&??q*rF2vsKksRhxFeRu^`vZs-}UNtM?8uwQEj z8ny0ni(b7I)%y2WsQ$B!wqaMH8agWMwY6z_eP^C+TvT9<bxHO{c&u$I`Q6@3@T@62 zQB8f{S@ZCIHDCY4TJGJ{meU>fR!_IK9{5PxIuGmZmg8#O(<(3USWIl(fBrK#_^Uk6 zz4v*s?z%j0sOJs0z_5RQE{{aLX`!&UtR(7Q6;XZ{2n7Bn`Y-PHq5oI;cY}l6F>=d% z%{}vQ)5u*Tw~gGlue)*N&XHS3?j5;#<nEE%NA4eK0O<f}0qFs00_g&21L*^4#MgC# zw1V`4G=p@5w1f16G=y}7w1o78G=+49w1xDAH0J9%Lt687y&=sZ-68EE{UHq^9U?6v zJt9paT_SBFeIkt_og%IJx?YiHeO<RmyGXxC!$`+S%Sg{i(@57y+eqI?<4EU7>%Ojc zq<LT0J<>kXKe7SH4j@~A>;bX~$Sxq;fb0XZ5y(y;TY>BavKhW^H<0Z>_5;}vWJi!K zLG}dM6l7PBZ9(=0*%)MJkgY-X2H6~6w>!x8_`3Z;HVD}vWQ&kJLN*E6CHy~cQ@mTO W5Q`Oz<Ywh%g|frJNI1l&_`d*S?6GSA literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Cape_Verde b/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Cape_Verde new file mode 100644 index 0000000000000000000000000000000000000000..6bda6db7604c316edf78226eed2f785ee5f8aecd GIT binary patch literal 284 zcmWHE%1kq2zyPd35fBCeHXsJEc^ZJkq-U!ZoS8aD;ZjMB0Jr#ehx-5j|1&T$F|)Az z|Nrs}1H=FSj~_4q*&jbJav<0Y96r7w47vtJKy1jszz8%5OoWhN`wx&NAOKqUALI%U d4RQ&H2Dt_#2XYaJt_O;d?<y`EpeyYRxd34xR0041 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Faeroe b/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Faeroe new file mode 100644 index 0000000000000000000000000000000000000000..c4865186b035081c6f216f75486310e78f03c716 GIT binary patch literal 1829 zcmdVaTTISz9LMn|X<Ea+v4l=Y5|4UHg_I=KP)<FPLvl!pL`f@3jG58e*VdTPFs+#- zF2Y4(j$>#xYv#~w%-NbPEoLph_ur)}H#YzF@AdRtboG6IoXhibBW!;>{mnPLJSFDk z^Pt`4*P+eznO=dPb=c6C>h1bSy}PdI@HZE9#FJAx^44MXx!NkejZNZr;-vUjs{~Z+ zkx>Q3GJ54Y8I$Uiz}O@iYmb&7-(VRxaJU4!C2DYYq>g_Zq#^CYbi$nhI<fh?+Uq~+ zq}t~iy6=I8m0VRv&Up>bdL|LY%`!QzMIsj;ktn}9iH<0f=-w)s>Qk#zJJ!gwpQRep znkds>WNK_fw#K!@=#2dd8h_kZXXXZK!oF!TE7?sGb9`iW(0ffv?3HA<`<fE?N>V!C z$sE@kGN<jH%zbl4<~5y@)Q8tKt)^P$H=fo7g@v-Pdao{8dQcY^tk?7@1?o&q(IwvV zHNzgF89ffo^h%b@r$cmE_jp-;`J1k2_mGvRJSD67i>#{ZlI+?xS)JW3YbrnK+W0H7 zF6*J@1RRs~aZQ@*en~g@sc!giRP%a^HUD0@ZtQUCrn4)w;BJv@J`ksc4e7EaFF?2M zkCLKAu~MAtE!)EVr6lQxZ1=QDX_%Lmetj)vgL|~>@og#Ze5N}tHptGl7OgmTQ!AV5 zbXR%3R@GFg&845~zy9g#`*GHs5s$g4!Q$fn`*UiV)3!8;H{EHzadF{w{>5>AxBidK z)@L4$ls`FC<XDk|wKPYI94>Oa$N?iqj2tp@%*a6_M~xgda@@#)BS(%LI&$pD!6Qd+ zX$~Jbek1@S0we?^1|$e13M3394kQpH5+oEP79<!X8cP!n5|5<`2#E*@35f{_3W*8{ z3yBK}42cX04T%j24v7v44~fsx1c*dvX+lI|M1n-3M8ZVkL;^)3MM6blMS?}5MZ!hm zwKM@E5nGy&k(iO7k*JZdk+_k-k;swIk=T*ok?4`|k@%4TKt{mQ3;{9*$RHr2fD8jN z4#+?tBY_MBG8V{SAftf{2QnVWfFL7cX@&$D6H7BF$fzL0f{Y6?Fv!RtLxcat*m#)v XaWFsjw8^1Sp^mU{ds?`IPaeMj0dSc= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Faroe b/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Faroe new file mode 100644 index 0000000000000000000000000000000000000000..c4865186b035081c6f216f75486310e78f03c716 GIT binary patch literal 1829 zcmdVaTTISz9LMn|X<Ea+v4l=Y5|4UHg_I=KP)<FPLvl!pL`f@3jG58e*VdTPFs+#- zF2Y4(j$>#xYv#~w%-NbPEoLph_ur)}H#YzF@AdRtboG6IoXhibBW!;>{mnPLJSFDk z^Pt`4*P+eznO=dPb=c6C>h1bSy}PdI@HZE9#FJAx^44MXx!NkejZNZr;-vUjs{~Z+ zkx>Q3GJ54Y8I$Uiz}O@iYmb&7-(VRxaJU4!C2DYYq>g_Zq#^CYbi$nhI<fh?+Uq~+ zq}t~iy6=I8m0VRv&Up>bdL|LY%`!QzMIsj;ktn}9iH<0f=-w)s>Qk#zJJ!gwpQRep znkds>WNK_fw#K!@=#2dd8h_kZXXXZK!oF!TE7?sGb9`iW(0ffv?3HA<`<fE?N>V!C z$sE@kGN<jH%zbl4<~5y@)Q8tKt)^P$H=fo7g@v-Pdao{8dQcY^tk?7@1?o&q(IwvV zHNzgF89ffo^h%b@r$cmE_jp-;`J1k2_mGvRJSD67i>#{ZlI+?xS)JW3YbrnK+W0H7 zF6*J@1RRs~aZQ@*en~g@sc!giRP%a^HUD0@ZtQUCrn4)w;BJv@J`ksc4e7EaFF?2M zkCLKAu~MAtE!)EVr6lQxZ1=QDX_%Lmetj)vgL|~>@og#Ze5N}tHptGl7OgmTQ!AV5 zbXR%3R@GFg&845~zy9g#`*GHs5s$g4!Q$fn`*UiV)3!8;H{EHzadF{w{>5>AxBidK z)@L4$ls`FC<XDk|wKPYI94>Oa$N?iqj2tp@%*a6_M~xgda@@#)BS(%LI&$pD!6Qd+ zX$~Jbek1@S0we?^1|$e13M3394kQpH5+oEP79<!X8cP!n5|5<`2#E*@35f{_3W*8{ z3yBK}42cX04T%j24v7v44~fsx1c*dvX+lI|M1n-3M8ZVkL;^)3MM6blMS?}5MZ!hm zwKM@E5nGy&k(iO7k*JZdk+_k-k;swIk=T*ok?4`|k@%4TKt{mQ3;{9*$RHr2fD8jN z4#+?tBY_MBG8V{SAftf{2QnVWfFL7cX@&$D6H7BF$fzL0f{Y6?Fv!RtLxcat*m#)v XaWFsjw8^1Sp^mU{ds?`IPaeMj0dSc= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Jan_Mayen b/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Jan_Mayen new file mode 100644 index 0000000000000000000000000000000000000000..239c0174d361ff520c0c39431f2158837b82c6e0 GIT binary patch literal 2251 zcmdtie@xVM9LMqRfk?4(Z-2m9fRIS1cJdqi5tq^l%)GOd(?~^75wR}H%NV3anRBi& zeCt@|$f@O+HN*a(`~$6(+GyeBFXpUVTdb@!x<tZitUk|gYyDAM|Mf@T@9z8f?z{W` zaofF~fu-wea;(1{vE~UMju!Lb{`Tk0rOnl6wO!a))qT`o)pKROoPOP@;a9!#$*w#( zvn5^5h7#o5s#v*tBvr2+^2zrvd!&DRhWyZwB9U5=AIlwbz4W>aOz)E$t_w2gJS9I# zM29j*%24E-jtTFMjP=izak;-}eA-n_82h<8qfe`I;9VWxcSP?vzhCb>u~QR|9haoT z9g_UYKAF(0lCtS}NezbNuH~y`qAwt6g~c+-T_EX6F1h=*@#2c{s%tP$Cx4Z$Q+gA0 z>Zw@0r}L(|4}PoDT0hl{tsiUVhGUvl{ibGDT#}qnr{sFNByZ76lApX+3UV5xV7N(U zB(~~|%PVE(uk||XxL5A|tXvD*E7j9AOYhrOq_f+SbWTm07Hyp=_m{+|w>nYgreD!w z@354_e59pmUr1^H*D^2qeVG^TmIwM?lldKQh_B~8^|v(3g2M;&!MZwmsQCq5`0$Im zD7Z$;rUy0PE7ir$1-isNMVAa^X?c8!lwTa9j|@(hrSII(Wxa8-eE(>v=)5K?ng*n@ zH7r$?y|Qxice-l!QCVHlqtz*UWR0goYi@a4*Cwm3{bsk;4u^DIccVUfIiQanTBgAd z*URJEJzCdZCQsC+=#$&>W&OfJ3Dr2|sq6`|q4;NcdbB0=nekd5`BEB24Qa!flhW9K zNuPPET{echbkm*>baTgEeYWwSHnlWqlq1R!J>nnEsF;!e{b^Zo<IEq~N=kK%u&hyH z-TLSCk0*aU_xS^sx44W;fHxfGKywrL_u1?)U$kd)(|*UYeltg?e^L;ck*%2$GACqG z$gGfQA@f2ehRh6^8ZtLza>(qE=^^t&CWy=snIbYrWRl1%k!d3Hv^5ju=P*-bs>ocC z$s)5wrfX~Fi%b}qF*0Rj&d8*ZStHX%=8a4onK?3bWbVl1k=Y~DN9J#95`bg?Ndb}r zBne0skTf89KoWsu0!amu3nUpxHjs26`LHz!K{8@%Qi9|JNeYq`BrQl@ki;OFK~jU{ z21yQ*9V9(Sevkwq8L~AgLULqll7wUlNfVMMBvDAFkW?YLLXw4K3rQD}FC<||#%xW> zkeu0?q#;>D(uU*>NgR?nBy~vckmMoRL(+%j4@n@BK_rDp4sA^mkt`x<MDmCv63HZz zN+g#^GLdW|=|u90BoxUgl2Rn6wkD}aR&7mMk-Q>_MKX(|7RfD=TqL{J|FFApCdJdT UiL%?Dn~|T9<@RT1VP<aJ?<;p%>i_@% literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Madeira b/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Madeira new file mode 100644 index 0000000000000000000000000000000000000000..e25f8a599622cb45cd2afd7bf739e4d1ed5ed589 GIT binary patch literal 3484 zcmeI!X;9Tw7{~E*Gt$uXD4HTJ6`5G5e{eU+jKs09MQ@4=nY$U95;`s!r!?D~=8>Vr z(8OG77^A;tE-9M1;(}XLnrLb6+ch;Q#^`)cHDl9@dQ-2uckcb*l{@1bf8QtA=gFb* z>MxI*>?gc<l-L(P*EjYp>)0&Gd2o`+`K4H9%_=jq3ul;jGmV*(w^8NxA86*jR%+hs zbll7vHQCIMj#3LAUaa!MuA00{v(<-%%T)gR@#>@8RO7!?Q!RdFqFOTAGaskFX_h`x z!z_>QV^&oA(yS=oY*wCMVODS1qzYCnS8L{sG;80UsXlwBT&){FNqs&%O|4H!RU0}+ zn2l{as!i<|sLd&#n$7#pn!-SWDZJ3kY&|?iZTn<^+ID7fXuIhh+J0cM+VMuB+OfX( z=%RE#RJ3u2**Uh6+PP?KXjk{F(5?j^nca_7RJ*753hjx{2<>@shS}SDull0v236ed zkoRSiUEWub>%6aTT;hHG(>!nA$!zby%3)r~k}Ut=dsF;FSsDJfFAVX&OYi9)9@xb{ z(!H(!{e4aRqw#h9W4BfIk4LrfPE?NZPLxG@Cr^iar}me5r?;N*&U{+poy{xq&b_nN zJO8Thg(lARE{w?Xei&5XU+j_ZUrL<gUv44(kI~QjS0d8=(h7t8(u+a=rkp5M^|dOh z+SE2iXT+H5gCk9iU>%7_EHgD*{4BMiPs_~_2c&j|9a8(^8mV)nAUASvuGC#WL!wp} z=z5Ftb^Td$bc5-l8$SEIz9lVP-`a1GzAZVZqdRogx5u{Djp{YlcT}mP8;2!I<I622 z=4c~nQhc-Axv`?$wX{@X=bx0O+54s0)NRr{W3_mLKa{xCeL6mUscw<5O1JF(rfyY# zwr(9iUbnuQsoO?nOWTqGx?R~gX<v}2@BSu5IxOrj39H)6J<~c#$DDd{@6c%JG_{?+ zFDXnC2S@4q8y%O<iC1+}SfO-@KBT*xJEpr<Sf{%dZP5=LnWrDj&(q1<*Gjk7r|X9n z&6S5ojna=ypCXU;dR4lo4Uryg(j=JNMINjDko1ggB0aBol2R*4r+igIdS8yweU@F4 zzQy6XUv71sy7Y{GJoBRNpIxM%=wGY{Og<$8J1*CQQn$<ChBNh$gnSuV=@S`NUu4*c zw`BO$ba`^i1Q}5hl&3!ED`}g?=#eic$f$)q^wYx|%IImW^q5CG==7nr^)qn|bVlc& z^w{c3k855_#)S^+@l~(L_+1<Hgme33qQ6j2Dq1Da&iq^^=g*esCd`-2*E1z>!*vy| z5BrDDiuU)eef^{Fg;%=qnrG!I|KYQ|JQSmpd%xapiwaaKFTW5{Rj&Q{k58)VfL6-B z*W_<i<e%U9Jh@v?CE3?qo~N36an?B1t8<T_eG9tZ4!DO31l*sLBM=Dw{Q@4eUvLXp z`r9|{Ke@d8@q~yJWD${7blPP^))84qWF?WMMAi~nOk_2Y<wVvKSx{s}opwo)HANN` zSyg0Nk#$8D7Fk(jX_2)>7T0N47g=6peUSx5Rv1}gWQ~zUMphYFW@Md_g+^8yS!$<U zYh<yVcD0e^M%LSD7aUn}r(JSn&7F49kyUrvWk=TCX%`+@d8b`^WbK`H@sZVc+T};q zA1MG*0i*;-4Ui%@Z55C*Aay_rfm8x11yT#77)Ujcav=3U3W8JwDG5>&r!5Ln6{IXk zU68^cl|f2_)CMUIQXQl`NPUn3Ar(SOgwzNrlG9d+{~nZy2X*2>p^!=;r9x_j6bq>q zQZA%kNWq-8Vo1r5nju9)s)m#esT)!_q;g2<klG=|L#l_A52+tgK%|0BTSBCUND+}L zB4tGCh!hg3BvMMGmPj#?Y9i%C>WLH-si@PI6sf7x78R)~QdXp{NMVu6BBe!Yixd~B zE>d2kzDR+Q3L_;(YV5Q{Myl+zWk%|Z6dI{CQfj2uNU@P>BjrZwjT9WII8t(?=1yC5 zr0PyvcBJk|;gQNCrAKOy6d$QRQhucV$OV900mvnQTm#5OfLsMmdl?|t0dgTAR|0Y= zAlCwNF(6k1aycN^19CwiR|IlNAlC$PQ6N`^(_R+Hb>XxZ26AN}mj-feAQuO6bs(1q na($@(&yU{^l7H>|2)EaX$C(n_twpm|&AjGuvEAZ4ehU8;Pr47v literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Reykjavik b/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Reykjavik new file mode 100644 index 0000000000000000000000000000000000000000..dc49c32470906fb2ff94fc599c73141597443f98 GIT binary patch literal 1188 zcmd7QPe_w-9LMqBnvSrDL7-$%-zYVZVEsW5ifFMq)DE#h)Ws1MBn2r&w;GlZ{UJTn zY1KFJ6#auhFm?!nL``d%wwBpnb83Gy%Vt^Y_kMQh(5X&64|`q@&$C_jd55>}*}cO2 zad_+#J{$r2;ocjymy?mB8f#o&&L@sHT&Ud=z1ZvaYN*~^YN=>wUR@S#Zav_=8!R?$ zYs#c8Rdl1hbiKCUDV0u7iFRJ`>%IOi={mJUyW`j8{$7thh&4)LQ(7O^U)7%S4t=!i zjP_P0q%TmTkLTT#WJySqQ)eYLXTPML9FnJlMcUuGO`dhHl7Xg!`utj<yr|o%FHdLW z)s9Miy)Pwi{PT4%7?-z;e&|p|Qq!}FWw`K`4v$RB$Ye}MI|k%k-(h`!`L=v$iRi}@ zO)_@QujAFn<kOKVnFy(Vu39HwR@Uj{+Qss9?oQ1t%$Ll>2A%pjB;R_L>i4u;r{e)J zGhDw<ciump`PuAf#QwMTaOGvQ<6~xK?wWhzyqLW?7&hfTpIPQxZZ_I`zU*>2?@ahF zM$F`XVzTyNOk_}GRAg9WTx4KmWMpVZJ2o;nGP<K39vR=!27pB1XhT3^K!QM`K*B)c zKmtJ`K|(=dL4rY|LBc`eakK#;5jonBkeHC5kf@NbkhqY*kjRkGkl2vmkm!)`kob@Q zkq8}ah)9e`kVuqBm`I#Rph%=hs7S0xut>B>xJbN6z(~Z7He@7bBxodR{BOc8u%-J7 FegQpVSd{<( literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/South_Georgia b/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/South_Georgia new file mode 100644 index 0000000000000000000000000000000000000000..56b383b16db1ec8394d33e31863e468ed4bbc9db GIT binary patch literal 181 zcmWHE%1kq2zyM4@5fBCe7@MO3$f^JT|34$+|NnOnFfjc8|M&p|i;r&zgRTL@j1a=6 jF+)w{_yJ^q0MNkynQea$fY>0@8E_lVWdpRx&WH;D2q-0^ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/St_Helena b/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/St_Helena new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Stanley b/env/lib/python3.6/site-packages/pytz/zoneinfo/Atlantic/Stanley new file mode 100644 index 0000000000000000000000000000000000000000..3649415bd14a3356419efd7340b430e645509e0c GIT binary patch literal 1251 zcmd7RK}ge49KiAa(qUPL1_e%2XQilVKAp~V)1EnWEyGn_v_mr@G>E8Mr&&Qs5M<pu zR7h0Nt4oK5{K@Ikl2#FvMU13FK~Og{==HvT2&4{m?EQKF&*NSGjNf;C1H*?KtUp$s zxnZ-q&E~vr#Jnzt4}>Or2kcZJM@-d)?CJF%M7p3~rnO5wS`5mWcMEFvRk3_LF{Pf| z*K#g!N_ag#r1wQq6@E?mif$ef8=j5(iqCh6k{kO~>5+i+jkc+>Xo1`~RH`;rEy>Lh zmnvWNR#w!Ph>DLBveILT%IEWT)tC38`d-rBl1Yi0OIPe#H7>TE7_sXvjta5&pk06R zx@rg}q`&{HYAhO*O)W#J>3gSa_C{6nn{v7BTa9XYkSAMTS7(A_v)RzY?pWLL+u8Q3 zRk7^@GjVx(b*!WHR(!|c=U8Xqg?PB*X)L^SEZ$XiH`X=3CmzY4%S0xRW_NzNlZjsI z&F-2V%k-S7&nC3iuACLi9(S&JmS6na+3nVPCS_U9J6G$M8P`g!)3&un>xHaqEjO0A z;l5{gpS92ITH!al(d^&H*!TC*Mt+TU(HvOjm+0Yho+L7AWZcNek+CDAN5=1H5<oIQ zQb2M*l0dRR(%_rq;b;;;GC@*7azT<ovO&^8@<9?pGD1>9azc_qvT`(OA$d8P#E{I8 z)R5ee<dE!;^pO0J1d$Ap6p<W}B#|r~O`1rajwVqgQzTU+S0q^^TO?g1UnF59V<crH sXC!GPYe$ndlDDHt9Le0#q>ki{B#&f|q>l+ROY1-EKR;yt8}0u5pBDRd_W%F@ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/ACT b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/ACT new file mode 100644 index 0000000000000000000000000000000000000000..aaed12ca284d69e3a8ba25891701790bde7f6743 GIT binary patch literal 2223 zcmds%TTGU99LK+J!U*z^<v1##W`ZFH6@x<a00=3D@^UPV08#AV3#4Kt{ya2lOG&$s z88f6AT~rvYX^urKH&zI&E;^9r=%!(9)J@6E?EiVzMOzo$wm#3E|L57e=Y6l<@5f(W zTeC?2AlQAv4M*I~^Hg(3+xj=#+Ir%C9`L`ff&P-f#k0jW*qIR+YH!eRb4*}TAj5*D z%v5kej0OLiZXwAxEab~*3!QewLO=N3!p8gTw%4OH`RngArRln+p8HtSHec55tsR=a zyjOQL9@3q$r!=FqMl=36sPOb$MNEFlB4<}yWdDAP_T^Y~XO+c_#@o#1Y@5{=X0dzP z?5^Y8iYsff**0WzR=uXVwLj|aIeRrPZ&35buWEimm*x*&R6^)cC3L;31!E1mxAiki z{G!DcHok31-F22+dc;zW6<X@1a$8iNW{a0)+LDbCmNqNamZnbF(i^h$m~f?E`ra}+ z$L#)|3--Xsh%!%)YT5Z7Eq~z?J$T}zR%|(~l?Tt;s_bU1u4=Qaq&F;kb(7@;uhp8l z6}INL4(0yvs`9=%q=))<YVFw?t?MjN{_$L`Z_d!eyBDY+P-Gh_a%^M4eO9<4-ux+1 zRumUz#nY}^@y(yCWc;#~UKz5o;a)56J!6lYyQD|o8q}uNkF@#4E<M)Ru8M6(^?2z4 zRpvIRD!p3O@kQDanWLKNb+*-4X<L6Ov~8nVR{MFHZST9so_H_9o;-D{J@v|jJ#ANQ zN8novY!6Xg*)`SYjj17HM9(A)XlGQ9c7?v9T@xp@d#pu`-<bA%TBm0}tkiSehwb@} zCfj>#hcz9juzmF#>=xf&*QB8Tz$G2N;Qq)uamE)Uz1*(Qzdx_b_iJT#7Du6<*L*%s z`Tu<<E$%gMC)d1hNH-k~;rlW%yYcL{gV~Q~w;{}qFk8ax39~7Fj$L841?<bS+ZbkN zp54|kd-LozhuIxwd%*sH4FWp^wg~JI*d(w^V4J``J-dwpI|a6i*{f%_S<G%dyX|83 zi`g({$Cxc+_6%$qvun(@fqerT2X^k+Z5`OVXSaD^_nzJMf&Bvwz;po90!$AuO~7;k z(*{f*Fpa=;0@DgiFFd<uV7lSiwFA=+OhYgo0a^m|1ZWD-6`(CZUoedUI)iBqrZ<@8 z0NwHI+5_|lXb{jLphZBBfF=Q50@?)h3DYP{r!cL;^a|4~Ot(C{c47L3X&9zsn3iFB z1~d)m8qhYNZ$RUK&H=3hdIvNQ=pN8MO#eK)24XshX(6VEm?mPnh-o9HkC;YcItjEA z=q1ohpqoHDfqr^+4Fx(1v=rzm&{UwSn6_g2ifJsSv-&@THjMBtOidxeiTSB1NvTOG MiTO)V^5U>R0gAQ*YybcN literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Adelaide b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Adelaide new file mode 100644 index 0000000000000000000000000000000000000000..4f331a87df4ed78be0d00b1a82b5c66c773069c9 GIT binary patch literal 2238 zcmd7S|4&tQ9LMo<x!ibPN)Tg=G>9)5Qhd39IWWYFS5lHIgf{~M(p2z56rtf2P3g-W z?_zUiP4Aprr4iWbhg<`7ZjHI5qGFo%ZEcORjIGVAt*ou+^E|UHSAT%#?A*t>=WciR zhx>X48iFl_)_)F{dBVl9%Us;Y_L`RiE8l$ez=^EO<ALKcKGq!iaHL5;8uW%fez`{` z`Z7Y!POnDK$P`yihPr-TtTCQwjrlZP-SOY6d)TS57vIzCUrLi3KL1W;?2MAQqhk`^ zd|qZo4ogDWDVY`Omm5=uB(bhT690Nml8UP(>B4K8oZha<W2&<qew{torgJ9q_2#}R zO&Lwl)UAi~mVL)0tv;;jz2`Jz$wA2s{w%j<JR@@}&&q94Es~WzELjtuNVYp7+2iZ= z_NgAZBXU}E&g|9P{&|{rB&7M9E5x&>K?|DJYGGHY7A^AW{MuB#Gi9zWD719JpD|iI zCq;_Cn$nU#re)!YalPxyNhuxpK^7f9DP`dgWbyt1@osoS%AX$9B^A5Gx9*TG&3j!d zd||!YRV&LfTXorQhh_PN{Zjc^zuYsnT~>^ANY!Ao`1e)I%091DZz+(P&PKg=jbCeP z%5;@CUjv>ry)P|6SI>;n)ziP|{TI(`-Ph-|e&UoiyfdN?9Q{@rht5h<<XvgjVR<k# zC~G!GWNqEE^3d`gSy$XH>*qDfhGf6AB!slp5!BX84Z3l1g$7TTYTIb8w!f9C55E$x z9nV|(Nbe<mwDTM7Y>Sh}>Zhfv@+a9;GAWz0&&cC9os@3(+tU5}fNYuCE1@s?Wb6AO z={XjZZAV_z?FYm9#GY>5@l>ns?5fi!$Cc9=eGQzJwPUyWBU>*FIn2L09J%_w+_x%V zmHU<kc<gZSY6bp(BiEUaxH$Mm?&B`=&K%R`*p$?th}&j4JCW_!n*AUfvNbzGwuI~n z*%Y!XzLRYs`$9HmYj%cg&DQJ<*_^G}9kM-Sf5--r9U@yq_K0i}*(I_~WS_`JZOu-R zt=gKsBAc}}yG6EZYxaw57}+thWn|CDrjcDE+eY?{Y#iA+vUOzd$mVU$?vd@=n*AdU zKstc50O<kJ1f&Z{8<0LAjX*kqv;ye`(hOVE4Wu2mrXNT{kd7cNL3)BT1?dXX7Njpo zW01}utwDN&GzaMp(jHsWAEZG@hmaN_Jwlp<bO~t_(kG-*NT-liA-zJHg>(yPm#yg+ z(lDfBNXw9(Ax%TNhO`ao8`3zWb4cru-XYCHx`(t6=^xTSThl?Lg-8#PCL&!#+KBWK zX(ZA~q?Jf7k!B*@MB0h;6KSZe=_t}tq^C$zk**?bMf!>~7U?X~TFc>#x^hOlu7T6C zp8V1Njk~RAySZ+=xmVp+`AP1-y8BOftymuCmMkptkmMYHfhVsZ&y(Yy5BWv0e*=T+ B3P1n= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Brisbane b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Brisbane new file mode 100644 index 0000000000000000000000000000000000000000..a327d83b7696f39c01a9b5cdff1d4f0fd4b1f94d GIT binary patch literal 452 zcmWHE%1kq2zyPd35fBCeK_CXP`5J)49KU6A=Il}Ua5`i&!|CJU1!uO0HJn{;S#WMa zF~j+G=>p~g%LW!+83t9)vka>5Uoog_u4hm`e}+Mmb0UN0`gIIUj8MqT0)Y$+{S`oS z8D?!@WMN?FS-`-F%=YmOVQ_SH0TIC=AZ3h<Ad(@3u(O4L%D~S40oK95@E-_@-p=O$ z(I9VtX`oj?G{`$(8t5eu4e}O<26+ucgS-c#L0$wI2l6I}26+_(K;8w@Kre#;$lG8V Y=yebPc^^yzgMeCLzy%5uT|)yd0M{6F@c;k- literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Broken_Hill b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Broken_Hill new file mode 100644 index 0000000000000000000000000000000000000000..768b167857dd82807b5ef912be31518086c77141 GIT binary patch literal 2274 zcmbW%eN0t#9LMo<d5ABhBI=AZh=&+aJY2vW7$S(IBv%M;1_Y$}fPfTX;T26;$@rG7 zj7skuTc$Cv)m&Df&aE+*R8&mUves6VJ@6M%TUlGt@BLeIx%i87cJAxk%MQDL+|S!v zS+^;}`QH(2KH=hc&RpDwwV9W$Rd2l7bvp6NkoS}f4c7QR>aW(1d-8mr9BP%}jyPXn zQ=SG*kC))mI1T=7iH5j?G~}~b4GsT6L;C_XZ0vo#{^b~%cIkVWzB@o>oEVhwn#(fN ze^erJ&&n*{UYQ-;D|5;lWzOGE%iOFIxnbHqjf`#3$U)V4E|1RZsn_`<X?jyfu|^F< zX!Q0Ydh>yk5>wHxv27PMZs}o(ulq%AiQ6LyMHeJtY+MqP`Xq7qQ%MT-OVY7-<hIdP zx!r$Glh5zh6yIB#dfcaJ<u9qbvr^Nm*J(y`j%F?{&;@1DdPh`(F3fOr;a?$|H9tzS zwj8^Z_0_0m|2ZLxP7mpwUyevl_fN9;)EUWb|4^13?3TQZuS@=u1G=>ESt(e5M3<$$ zriBIVdRK6nERV0%<-Z@56+a%7qR;or-Ge)2Wq+d-_tc2zK#8pC$di(7=~CKMrT47$ zXjy5luFgwSuRBKXjfv1TGXr$Z#IJha*kvvM`l410pVi8D`}O`4-$_;P1*!JGCpFq9 z5BPdy?PkBMD}PEJT+u4)vl?W>tyQuy(j%K9d|K<O)7tS$-8`~V>(1q9{XmK~ycw-q zUJ2L6=N)~hZCoF2`c|9jXUHQJ6VhDtvuw>Ckw=rx%VRg5k(SW6rR9%q**3agd|!0P z_78m0da_P-9Dh-F9&XpiJ6m+u6Sca#xm*KW|DJ(CQ#@T$eQ>hp9B0=)^J{iq=yjQ& zcDw)D`}~5{UMIg`nU{}UE?yn)<U<KDf8yfkG#B@?ukg+s6Xw`DS2tiwQ1>^uV^&}j zG9;@R6EY~P85J@tWL(I=kdg7f85%M+WN=n9I%IfOGd^U1Rx?6mh{za`K_a6>hKY<5 z87MMRWT?nkk-=KcXp!L}<3$E+H6uobY&By>291mw88$L*WZ=lik)b1FM+T3K9vQyX zj2{Vr)kFXZfz`wS2?7!YBn(I#kU$`jKth4U0tp5Z4I~^$Jdl7`O+=88SWQf<CMZZ$ zkgy<eK>~wB1_=!k8zeYLbdc~M@j(KFL<k8H5+kb#5)vgOOh}xNKp~MrLWRT%2^JD9 zBwR?mkbof(Lqdkc%xZ#$L=6cW5;r7pNaT>vA+bY(heQtv9}+(#fJg+95F#-|f`~-X zYQl)b5eXy`NhFj=ERkR$(L}<D#1jc95>X_iNKBESB2l%Pup)6q0*gcz2`v&^B)CX) zk?<n%bzA{gBN%+$6wkn^J~-KPj<ctWZ}-e0C&;!q)V6qrZSght#U7L5Lfhm2MEPo# dO~|NqO7@~mH^ol&q`OnoQ{Bm)1(22*_78DE8g>8x literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Canberra b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Canberra new file mode 100644 index 0000000000000000000000000000000000000000..aaed12ca284d69e3a8ba25891701790bde7f6743 GIT binary patch literal 2223 zcmds%TTGU99LK+J!U*z^<v1##W`ZFH6@x<a00=3D@^UPV08#AV3#4Kt{ya2lOG&$s z88f6AT~rvYX^urKH&zI&E;^9r=%!(9)J@6E?EiVzMOzo$wm#3E|L57e=Y6l<@5f(W zTeC?2AlQAv4M*I~^Hg(3+xj=#+Ir%C9`L`ff&P-f#k0jW*qIR+YH!eRb4*}TAj5*D z%v5kej0OLiZXwAxEab~*3!QewLO=N3!p8gTw%4OH`RngArRln+p8HtSHec55tsR=a zyjOQL9@3q$r!=FqMl=36sPOb$MNEFlB4<}yWdDAP_T^Y~XO+c_#@o#1Y@5{=X0dzP z?5^Y8iYsff**0WzR=uXVwLj|aIeRrPZ&35buWEimm*x*&R6^)cC3L;31!E1mxAiki z{G!DcHok31-F22+dc;zW6<X@1a$8iNW{a0)+LDbCmNqNamZnbF(i^h$m~f?E`ra}+ z$L#)|3--Xsh%!%)YT5Z7Eq~z?J$T}zR%|(~l?Tt;s_bU1u4=Qaq&F;kb(7@;uhp8l z6}INL4(0yvs`9=%q=))<YVFw?t?MjN{_$L`Z_d!eyBDY+P-Gh_a%^M4eO9<4-ux+1 zRumUz#nY}^@y(yCWc;#~UKz5o;a)56J!6lYyQD|o8q}uNkF@#4E<M)Ru8M6(^?2z4 zRpvIRD!p3O@kQDanWLKNb+*-4X<L6Ov~8nVR{MFHZST9so_H_9o;-D{J@v|jJ#ANQ zN8novY!6Xg*)`SYjj17HM9(A)XlGQ9c7?v9T@xp@d#pu`-<bA%TBm0}tkiSehwb@} zCfj>#hcz9juzmF#>=xf&*QB8Tz$G2N;Qq)uamE)Uz1*(Qzdx_b_iJT#7Du6<*L*%s z`Tu<<E$%gMC)d1hNH-k~;rlW%yYcL{gV~Q~w;{}qFk8ax39~7Fj$L841?<bS+ZbkN zp54|kd-LozhuIxwd%*sH4FWp^wg~JI*d(w^V4J``J-dwpI|a6i*{f%_S<G%dyX|83 zi`g({$Cxc+_6%$qvun(@fqerT2X^k+Z5`OVXSaD^_nzJMf&Bvwz;po90!$AuO~7;k z(*{f*Fpa=;0@DgiFFd<uV7lSiwFA=+OhYgo0a^m|1ZWD-6`(CZUoedUI)iBqrZ<@8 z0NwHI+5_|lXb{jLphZBBfF=Q50@?)h3DYP{r!cL;^a|4~Ot(C{c47L3X&9zsn3iFB z1~d)m8qhYNZ$RUK&H=3hdIvNQ=pN8MO#eK)24XshX(6VEm?mPnh-o9HkC;YcItjEA z=q1ohpqoHDfqr^+4Fx(1v=rzm&{UwSn6_g2ifJsSv-&@THjMBtOidxeiTSB1NvTOG MiTO)V^5U>R0gAQ*YybcN literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Currie b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Currie new file mode 100644 index 0000000000000000000000000000000000000000..a3f6f29a49617167750848c71e463faf6f3974fc GIT binary patch literal 2223 zcmds%YfP1O9LK*$V8lr!%PAKnw317B058ZXB{e`$NsPlaMMC6anFm27%hPXWo?c{j zag$|Bq%nHqRJv#zi<oV$5m;OIG^1WLx>;K@otZtKXZ5NVy={G-J^#<yv*(;w=lcoN zv^37qKZtUkaKqEi&F6{c90xz@uowH2e;x|Fs-e@B!S_y7*l@2uIMUUoGvUNwM9^=M z6Q(JuG|{3i=2~>-Rg3;O!D3?1Tg>afoA<(?U4JNE<3IgU6GB%s@#Ncztv#n3I=eL~ zZ$LM8?AA?5hc&sXQIr38TvKw3G&N$C;$}2h+&9@u@Dy4??|MrdO|@y^0-HYQwWOvL zyZJz$lB+{D!$xf8(u2CC<$K*abEj@A9@eaJ>y(mqR4Ko{q0}EoE$zcjr4OI7+h5zE zj3YfZd;c2w_J%F9Jx^JiS}c2Qjm@dpqq&O~+Ps`*&7YoRIa8J^=c;VM#SG=1{o4G! zV|GXXJ9g(lx-IM()uKb=E$@j_x-0aPE#A<hCAII{(t@z=&g-*$-^*5zwBHJ&R%luD zaV`6;TSebKui{U3Yx(J|T5+OLD|;(da-c}}g#B8zJzb^2a$8+jX!n*bu(HLe7RZXX z@?@`7#9pzAYd=`!g>zPQe#ENJ3|P&;F}v^NS*>|xSZh1qQtgvRwXUN}b<NM}{;FN7 zFKW~J+y*tImTN;?p&BQ(T9c>Vntmy>=Fxm>`Cz_n9Gqnj9G_|r9=^^tJ#*O}vhQqj z@CyrWj8<#)C2c7lQ=9*DwWkefYka@9#T?PL%P(pB*gkc9W_tMDR_%DRUXS$cwMV-{ zw)45o7TQ&3k8N3Po^gL&5t09aOWO05^GDWGU5b=mIOd7@`{c>Wl7N;JlyT$-c+KPC zRN&utvd?+V-N_~Q8`3q0c3)&NW;dAaVD{tM*$`$&m@Q%UgxM57$F4Bj0`}$F*%)SL zuAQx6_U78z9A<Zz?E(7(HVEtx*dnk;V3WWufo%f&bnR>u*eS48V6U#7%>ui1?Q9pb zU(ALvJH~7ovu9w_m|bJG4eT4(IIweI>%iV!JDUe~kJ&!3e@p{_4q#dU^Z?TYpbMBb zVEO<w0@Deg6_{SQcAA0d2G9;nKQIl!bOh59peLB70A0bf1?UT=F+gW9t-<sL(;T2X zuATM({Q(*TbO>k>&?BHpK$n0v0e!+W3ezb}t1!L7Gz-%$*G{`I{lYX1(=klTFg*jB z26PQ*8_+kPaX{yQ)&ac(ng?_bXdkA3uAK&AI*4f@riYj&V!DWFBc_j-Mq)Y%v=Zng z&`hA4Ks$kcx^@~0bQEYQ&{LqPKvyws#q<@^SWIX2e+aFY`p(YIqQ4m>*;&49UsguR JJd`=t`zK#J`gs5V literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Darwin b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Darwin new file mode 100644 index 0000000000000000000000000000000000000000..c6ae9a7ba253089d4fcdb9668b70aaad20dc94ad GIT binary patch literal 323 zcmWHE%1kq2zyQoZ5fBCeP9O%cxf+1P9KW?o=Il}baXMrd$LZs76=$}`cAQ=AP;qWS z703B@r3xlSC}d!$n*dbLFnI+dD+5F83<d^A=im?^;SvH827(amR<i)5Ko|tLff&T* z`vKws!G9nq7Mm0SqN|0N3qUl;Nnjf2ED#NH8khz;4@85U2%<sG1koU;f@qL)85o(F pppb!~Y6Zv&ptFHi0-X)?H_+KEKE5DVqdFZJ7$6~CODkgoE&%?nVk!Ut literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Eucla b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Eucla new file mode 100644 index 0000000000000000000000000000000000000000..99f07a9fe53bfa85ef4c41f90dfbae413aaf12e4 GIT binary patch literal 503 zcmWHE%1kq2zyPd35fBCe5g-P!1sZ_F9KRg~bM_duoDMlM<@E7PmNVNkM9wbvX5r-j zD8jjoPeh6_LPTnLsfea(kcj5`{Zm{Ew@h(kjhW)ETqNTDu2RI4IYh+sT(n62|NsA) z7{QR41p!$Y7%DCRtzqbtVFa?Pc^J5m**?A@4B7^kCZ<5r0!RV@BO{1p2qEkbQJ^xg zKYoC9Ffag}@V{gk*9H&`@)wu}`VB;b{0F9iegx4Ve}ZU`UqRYI{sq%OKZ9tHzri%n z?;slFe=rRU0}u@g2M`Sk3lI$o4=@c36A%pw7cdPB8xUO&w2Oh70mNkk4l+Ak3o8>- FE&u@na1;Ol literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Hobart b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Hobart new file mode 100644 index 0000000000000000000000000000000000000000..07784ce5d751f040ba7ab43713f32cb92ce9afb1 GIT binary patch literal 2335 zcmds%YfM&k9LK*8ml5P8%PAm`S_v2;ctJ?TOMsw~7>@{~NQhi4^Fa{F^659Vp4MWO zxnS86ZH(SHkS^NhBC?G&0&DA@=BTztH*0IAL!|RLt5?0~ZR>M({-3k6^Spb$A8%=G z)ja)!Q0EOdyx`nCPcY{=@=24ubY}Xm{odEq-?!2C!Koq}=*aL5w$<xgV482_-DX|u z_G`4SMAsX;Y^-CkRCLHZfmj7C>r}8W!$QW#D70Xjg<ej#u(_ia_Gz?*PrPX1Z(Os8 zOTBjE(I}1k{A-Q(k7&Z_cQmo=f^KR(u1T3ay1D6~Zizdl$;DNgeEoo?q~~gC@H$1# zsI<s$Qx)yWvFMJiHf<=uVglI~+Z$nV)$w*~OSh(%_-%#_+RT+lbX)C@x_#z8-H|t- zSwUMApV+DR-``Zi&qJ2@ajRwzoV7b&->ak(?KbD|CM7oqY;HrQQg+l@>gH0LSM-GD zFI{2_(rUCYHqO$ftXA5nY|-T;rJw)CGCGFst~2l1;-1;Iq<u(BkB+m<$Ioh+{})@n ztz9e1KD3qD0o|S1ZCS~$Sa#fD%L!ehRbeN!>W|~f{r-97eRfc*`*v&1sVc4Q*r@!L zT-_7M(7HXdRp8rT>&tWO-hxF|xIDqUDN(jzdW02C9I>LYpKRl$3s!t_&`QqrSZPm} z-FN!DHoZEa&8=^%?8#1TX=+n>&2zfHc)u!g>$Nq#Qk4lCv@J46Rg>zh+EZcGzZF`| zP?ptxw9vNq&awwiPPGS*-C#SOy=o8H54O|ymHD=ZsjlRTcI6GLKI2O@B=&1})EPB~ zpHSn~m$hg388v-jdiec1?R~34k90TNqsRTW@6b;3?=QE<cC9y0&|g<@$baaPo{Dk) z@OrvUA<~Oop76g<-mJ~{YDIP-N0yh*JRVMY|9wv_&S&nPuDHLD#vB@w^&Mtkn2lj} z=GxgBW^b6yVRnbv9%g@-4PthP-7WIK9>LipVwb=+fqh~&3hdOivsKJqT|1k_?AEoj zUCe$l8wPd^Y#G=yuxVh|z_x*X0~-f+?%LTpuy<hd!0uf;+XwdV+Gzl$1DF<IdVpyH zrVBtDFnz!@0_X(L3ZNH2Gk|WmcG>~-1Je+oBbb%|J;5{u=nAGSKwmJ8!E^>_4W>6h zb1>a;?X(BeAD}^)4q;k^=@F($K$kFW0{Vn$6woP5tAJi%nuX~Wrd>e4TssW|ItH{1 z=o!#7pld+efW85Z13HIk9j14f=3%;rX&<J4uAK&AI*4f@riYj&V!8;l5$Ge(NT8EI zD}i1D%>=p$v=itj&`?ZAT{|ts^c2%nOjj{&#q<@^SWIUzt;O^fXfDuQpuIqUfd&H| zcI~tn=rPb_pvyp;fj(mzjp;O|)tFxE{}k*9Dn2JQg^nlXr=}#QCZ{CjFF<qWNBjwi Cm?|a! literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/LHI b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/LHI new file mode 100644 index 0000000000000000000000000000000000000000..57597b0b9743475c547021ff8cdaa0049729d0e0 GIT binary patch literal 1889 zcmdVaT}ah;9LMqhnRQ_o>wzXdtjr!Tm!19pS#fM?R<p9HH9a$}td?cx*u#EV=C`LM z#byt(MPW%7TLobZ#>z=q)K<ih5fPJE^gv}@gmlp_3Hk-Gv)}uVcU^QBbPk@^IpVnx zzVA<XQM6`~^TuPK{S7}nHrXHECpFra;s4b<ObF$hgeUtViD^}m*tIs29GEW2Z6%SE zo;3Nm$&HM-5+@_K{Gd{g{9;B`DwWpQC8K9H>oL*CX6)$odR*aOX56#8Iz6M!q~Ext zGvfD}jNg9NpY*OV6Iw5-%<erhQGQav_FDP0qEWdAi^QucQj_X}l9d}&lS_xllo3PJ zRIg8_zV1_@ln+hl-c6O=_DH5(JgYvt(`|B2cI(^=9VYL)4n6(oVUxe=u%5B|cbQor zdRBFdd>(931+yAt_P`uHXH2Ecd3i|BeX`#a-m2I0y4IO5&MnkMZSzg>k!)Sk<eB*m zDSCk_kuR5rROtd&%JP#{*c~QcrM_0>gI`E_?7mv~>|d#Pa9LH}=#)i&o=}TBZ<-~i z{?JveXU)<*$MiC3HOnKr^w$+z&5F4zb#-WoSvh{TUX_?<YKB&;)q%yb`gwth^yEtP zpLA7wB~{j(iC1fn2V~uWr)quUWBEp1S1KAa8!CHsU15*en0-xe%D8IklYZ5k<4>B+ zuYS}Gy-h}LZ_r!*j+(9K%k;MP?`8W5rFI;Q$hW&n)y}$kvNINo1>!#Fe-4OyN6!I{ z(=yhck<)z4vG0#}2I3v(&?#rI;~YI5_^7Bj?93=A3p*1$FUz&BuK()sjs8dY?FU<L z|I|NN)Bhd&zs5EuG~DAO2aFuCuRUbsn0@U*BS-CP4;wjd<iL?5M-Cl1cI4oZqel)O zIesJnBm!R>0uqC-4FZY6*M@<_fdqm?f`o#^f&_y^gM@>`g9L;`goK2|<ZFXMqVlz2 zA#ovrA(0`WA+aIBA<-e>A@LyrA`v1XA~7OCB2oI<Fp)TsK#@q1P?1=XV3BB%aFKYC zfRTuikdc^?ppmG3ZP-ZMNZ?51Na#rHNbpGXNcc$n$N(TCfD8dL2FM^Fqks$pG7i3W zAdrzjh5{K2WH6A?K!yVu4`e`)5kZCo853kskWoQ~1sNA#J21$|AVY(U4Kg^$=pe&` tj1MwE$O!q`Hl6o!i3a8P`;<4)bEjpw_Je2NdYQ$Z8}x#1X7OYg^gr4}_uv2k literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Lindeman b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Lindeman new file mode 100644 index 0000000000000000000000000000000000000000..71ca143f29f2e5799b865478d05a0e88465d92f2 GIT binary patch literal 522 zcmWHE%1kq2zyPd35fBCeF(3x9`5J)49KU6A=Il}Ua5`i&!|CJU1!uO0HJn{;S#WMa zF~j+G=>p~g%LW!+83t9)vka>5Uoog_u4hm`e}+Mmb0UN0`gIIi0T~apUKTTG&p6ef zbE&^Uw_;5L6C)Hdvp^sdh+<&qUIVm{Vb%slAiHM)11B=u$2Ww*(bWY+1c!i>F*1Tk zh7iL35eF&*`{xH(2Lr=@ASimf=n9Ai`43D3{RpB#{shxNzk+Cxe?c_J&mbD)Zx9Xg zJIFYY|3Ne;3_t)B4qzG>79ao$4=@c36A%D}3z!Cm4G4h32TTLQ2n0ak1foG<1)@RW Q#X!vf;{pYsuAu=J0Hh0uumAu6 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Lord_Howe b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Lord_Howe new file mode 100644 index 0000000000000000000000000000000000000000..57597b0b9743475c547021ff8cdaa0049729d0e0 GIT binary patch literal 1889 zcmdVaT}ah;9LMqhnRQ_o>wzXdtjr!Tm!19pS#fM?R<p9HH9a$}td?cx*u#EV=C`LM z#byt(MPW%7TLobZ#>z=q)K<ih5fPJE^gv}@gmlp_3Hk-Gv)}uVcU^QBbPk@^IpVnx zzVA<XQM6`~^TuPK{S7}nHrXHECpFra;s4b<ObF$hgeUtViD^}m*tIs29GEW2Z6%SE zo;3Nm$&HM-5+@_K{Gd{g{9;B`DwWpQC8K9H>oL*CX6)$odR*aOX56#8Iz6M!q~Ext zGvfD}jNg9NpY*OV6Iw5-%<erhQGQav_FDP0qEWdAi^QucQj_X}l9d}&lS_xllo3PJ zRIg8_zV1_@ln+hl-c6O=_DH5(JgYvt(`|B2cI(^=9VYL)4n6(oVUxe=u%5B|cbQor zdRBFdd>(931+yAt_P`uHXH2Ecd3i|BeX`#a-m2I0y4IO5&MnkMZSzg>k!)Sk<eB*m zDSCk_kuR5rROtd&%JP#{*c~QcrM_0>gI`E_?7mv~>|d#Pa9LH}=#)i&o=}TBZ<-~i z{?JveXU)<*$MiC3HOnKr^w$+z&5F4zb#-WoSvh{TUX_?<YKB&;)q%yb`gwth^yEtP zpLA7wB~{j(iC1fn2V~uWr)quUWBEp1S1KAa8!CHsU15*en0-xe%D8IklYZ5k<4>B+ zuYS}Gy-h}LZ_r!*j+(9K%k;MP?`8W5rFI;Q$hW&n)y}$kvNINo1>!#Fe-4OyN6!I{ z(=yhck<)z4vG0#}2I3v(&?#rI;~YI5_^7Bj?93=A3p*1$FUz&BuK()sjs8dY?FU<L z|I|NN)Bhd&zs5EuG~DAO2aFuCuRUbsn0@U*BS-CP4;wjd<iL?5M-Cl1cI4oZqel)O zIesJnBm!R>0uqC-4FZY6*M@<_fdqm?f`o#^f&_y^gM@>`g9L;`goK2|<ZFXMqVlz2 zA#ovrA(0`WA+aIBA<-e>A@LyrA`v1XA~7OCB2oI<Fp)TsK#@q1P?1=XV3BB%aFKYC zfRTuikdc^?ppmG3ZP-ZMNZ?51Na#rHNbpGXNcc$n$N(TCfD8dL2FM^Fqks$pG7i3W zAdrzjh5{K2WH6A?K!yVu4`e`)5kZCo853kskWoQ~1sNA#J21$|AVY(U4Kg^$=pe&` tj1MwE$O!q`Hl6o!i3a8P`;<4)bEjpw_Je2NdYQ$Z8}x#1X7OYg^gr4}_uv2k literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Melbourne b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Melbourne new file mode 100644 index 0000000000000000000000000000000000000000..ec8dfe038c2d10aed29763ef8f664c8f0cd35c8c GIT binary patch literal 2223 zcmds%TTGU99LK+}!U#&qa>zjmwG<3Fs2G%z2S7+Ml$T?v1c+h>UqA&b>6eGmMM_$m z%$Oq0=%T`CO>-<_wXs5Eb<u$|M>j^bSvMs!v;XH=7j0d1+xk3v{-0;>p7*_azaM{j zZOsDxgJAawHym;|&y&p^?Q36eZ|{x!dC>o^1_w$4=g$<|P*+CaLPvv!TVete0~r=H zX}W?7Vl4RAbPGwoVIf~cTWI)Y3w`f*3%lBHx1Na7ZC`z>NzK<a`Rs=Z-*`#4w{>bt zW}oh8+OIofPiksuji&ywPZ8<4inwyZB4<`x<iPV5?aQ&~t}2TejkoD7**2p;%wl)9 z+g(R{6j#=4GaFygtQD_ncI}V4d)6M^lQ*O}6E<mXLbv7)pI1WYVI_3Gt$AY&y0`69 zOZ>dm<~O}*Nj-IzydzU7M+z-<L%A)ePqT$fm)N59(=2U9tSwF*x5YPP=`j&XzxbVH zbdA~ly&u~HBO_XJYE(<l^(yngM|$wsaV^_?O3U}1vlZDbT3OX@SxK*1_R40<30|#L zvny=XZ=K4$^0M;2+^>fQc53yR8m;LnQU1|ft!>HB!;SM)5Gb;B6*;!P;65u{7H|HP zC@YE!v*PgUR($g(E4g~fN-tlqvf(}}?>lXeoV}<=-x$({why%Ng>G$X>QKej!+NZA zuPSpJRFz(>>i8mUj?7WblsenutF$e@6x!C&EUW!2&9?Q=vB%$;W>1{F#h!d=+@7)@ zY<u7v3v3HfUD-9&=Z&c$V?;X=2DLM)SGz*r(ysC2Y8-1-)7PfmpVaB;_bc^G&p~^( zv)T3>*>25yE9|-YbvD8G*EKQdKX6Gc-@8Auj-B=eNiTLN^zYAW^8H$#oyAe;=QW>? zQ~rP7NvnI!+sQTW8`4cjL&Skd%x*lp?O^uf*=-23Bg~dCd%|prpJP{;Z2|l8>^6qk znP;~(%-%e^&0%(j*&eVzV1vL8fh_`i1U3om64)lNPtR_nz)n58tz!1-*=-iHThDI0 znEhfljM*_}%a}a_o5t)K*fy|lVB^5ffvp33_v|(g?B286KCpkF0hkV8T7c;RrU^h7 zFm1r}0n-RfCorwR^un`i2BsUHT{|%Sz%&HY5uhbNPk^QXT>;tx^aaxxpfi}(V0wdT z4$vLXu0241fCd2_0$K$02xt<}C7?|}pD>NWbPCfdOs_D_!gR~CYZs<pn1*3GhG`k5 zXF$_{t^sWW`UW%(=p4{Gpm#v?fbIe9!}QOyYaphBm=<Dsh-o6Gi<mZI`iN;HrjtM` zfnEa51iA^d6X>UB*HECNKudw10!;<FifJpRub9SSI;;OfXu}Ba{L~a8oS2`Ql9Za1 Ml9;~;B`*y76Gy`Te*gdg literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/NSW b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/NSW new file mode 100644 index 0000000000000000000000000000000000000000..aaed12ca284d69e3a8ba25891701790bde7f6743 GIT binary patch literal 2223 zcmds%TTGU99LK+J!U*z^<v1##W`ZFH6@x<a00=3D@^UPV08#AV3#4Kt{ya2lOG&$s z88f6AT~rvYX^urKH&zI&E;^9r=%!(9)J@6E?EiVzMOzo$wm#3E|L57e=Y6l<@5f(W zTeC?2AlQAv4M*I~^Hg(3+xj=#+Ir%C9`L`ff&P-f#k0jW*qIR+YH!eRb4*}TAj5*D z%v5kej0OLiZXwAxEab~*3!QewLO=N3!p8gTw%4OH`RngArRln+p8HtSHec55tsR=a zyjOQL9@3q$r!=FqMl=36sPOb$MNEFlB4<}yWdDAP_T^Y~XO+c_#@o#1Y@5{=X0dzP z?5^Y8iYsff**0WzR=uXVwLj|aIeRrPZ&35buWEimm*x*&R6^)cC3L;31!E1mxAiki z{G!DcHok31-F22+dc;zW6<X@1a$8iNW{a0)+LDbCmNqNamZnbF(i^h$m~f?E`ra}+ z$L#)|3--Xsh%!%)YT5Z7Eq~z?J$T}zR%|(~l?Tt;s_bU1u4=Qaq&F;kb(7@;uhp8l z6}INL4(0yvs`9=%q=))<YVFw?t?MjN{_$L`Z_d!eyBDY+P-Gh_a%^M4eO9<4-ux+1 zRumUz#nY}^@y(yCWc;#~UKz5o;a)56J!6lYyQD|o8q}uNkF@#4E<M)Ru8M6(^?2z4 zRpvIRD!p3O@kQDanWLKNb+*-4X<L6Ov~8nVR{MFHZST9so_H_9o;-D{J@v|jJ#ANQ zN8novY!6Xg*)`SYjj17HM9(A)XlGQ9c7?v9T@xp@d#pu`-<bA%TBm0}tkiSehwb@} zCfj>#hcz9juzmF#>=xf&*QB8Tz$G2N;Qq)uamE)Uz1*(Qzdx_b_iJT#7Du6<*L*%s z`Tu<<E$%gMC)d1hNH-k~;rlW%yYcL{gV~Q~w;{}qFk8ax39~7Fj$L841?<bS+ZbkN zp54|kd-LozhuIxwd%*sH4FWp^wg~JI*d(w^V4J``J-dwpI|a6i*{f%_S<G%dyX|83 zi`g({$Cxc+_6%$qvun(@fqerT2X^k+Z5`OVXSaD^_nzJMf&Bvwz;po90!$AuO~7;k z(*{f*Fpa=;0@DgiFFd<uV7lSiwFA=+OhYgo0a^m|1ZWD-6`(CZUoedUI)iBqrZ<@8 z0NwHI+5_|lXb{jLphZBBfF=Q50@?)h3DYP{r!cL;^a|4~Ot(C{c47L3X&9zsn3iFB z1~d)m8qhYNZ$RUK&H=3hdIvNQ=pN8MO#eK)24XshX(6VEm?mPnh-o9HkC;YcItjEA z=q1ohpqoHDfqr^+4Fx(1v=rzm&{UwSn6_g2ifJsSv-&@THjMBtOidxeiTSB1NvTOG MiTO)V^5U>R0gAQ*YybcN literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/North b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/North new file mode 100644 index 0000000000000000000000000000000000000000..c6ae9a7ba253089d4fcdb9668b70aaad20dc94ad GIT binary patch literal 323 zcmWHE%1kq2zyQoZ5fBCeP9O%cxf+1P9KW?o=Il}baXMrd$LZs76=$}`cAQ=AP;qWS z703B@r3xlSC}d!$n*dbLFnI+dD+5F83<d^A=im?^;SvH827(amR<i)5Ko|tLff&T* z`vKws!G9nq7Mm0SqN|0N3qUl;Nnjf2ED#NH8khz;4@85U2%<sG1koU;f@qL)85o(F pppb!~Y6Zv&ptFHi0-X)?H_+KEKE5DVqdFZJ7$6~CODkgoE&%?nVk!Ut literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Perth b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Perth new file mode 100644 index 0000000000000000000000000000000000000000..85c26d509a81d77ebfd6525b4777ed32cd6c9e1a GIT binary patch literal 479 zcmWHE%1kq2zyPd35fBCeVIT&v`5J)49KS<*=IpT*I303c;q>w131_w!EjYV8bOI;; z?**LOBo;_9CM}R!UcW$7HD-b4`cn$7h5HrUSko2Um1`EbziVFL$sD)9^IYlzCPpx1 zW<fv(hMYM-8yM;)FtRW(6f`h!BC~ybLl_*xT|h)|2uK+tBZy=OA?yJWpfa!ret>l_ zF#HFCl4W8~Ks3lhU>fKt5DoGem<D<dM1wpCqCuVnX$N@}Oana&qCp-8(?CyyXpqOj qG|=-P8WaE^8Wad18Wa#<8W<QL8WbR48W<=HR0|a@P$=nIZ~*{&uy;lP literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Queensland b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Queensland new file mode 100644 index 0000000000000000000000000000000000000000..a327d83b7696f39c01a9b5cdff1d4f0fd4b1f94d GIT binary patch literal 452 zcmWHE%1kq2zyPd35fBCeK_CXP`5J)49KU6A=Il}Ua5`i&!|CJU1!uO0HJn{;S#WMa zF~j+G=>p~g%LW!+83t9)vka>5Uoog_u4hm`e}+Mmb0UN0`gIIUj8MqT0)Y$+{S`oS z8D?!@WMN?FS-`-F%=YmOVQ_SH0TIC=AZ3h<Ad(@3u(O4L%D~S40oK95@E-_@-p=O$ z(I9VtX`oj?G{`$(8t5eu4e}O<26+ucgS-c#L0$wI2l6I}26+_(K;8w@Kre#;$lG8V Y=yebPc^^yzgMeCLzy%5uT|)yd0M{6F@c;k- literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/South b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/South new file mode 100644 index 0000000000000000000000000000000000000000..4f331a87df4ed78be0d00b1a82b5c66c773069c9 GIT binary patch literal 2238 zcmd7S|4&tQ9LMo<x!ibPN)Tg=G>9)5Qhd39IWWYFS5lHIgf{~M(p2z56rtf2P3g-W z?_zUiP4Aprr4iWbhg<`7ZjHI5qGFo%ZEcORjIGVAt*ou+^E|UHSAT%#?A*t>=WciR zhx>X48iFl_)_)F{dBVl9%Us;Y_L`RiE8l$ez=^EO<ALKcKGq!iaHL5;8uW%fez`{` z`Z7Y!POnDK$P`yihPr-TtTCQwjrlZP-SOY6d)TS57vIzCUrLi3KL1W;?2MAQqhk`^ zd|qZo4ogDWDVY`Omm5=uB(bhT690Nml8UP(>B4K8oZha<W2&<qew{torgJ9q_2#}R zO&Lwl)UAi~mVL)0tv;;jz2`Jz$wA2s{w%j<JR@@}&&q94Es~WzELjtuNVYp7+2iZ= z_NgAZBXU}E&g|9P{&|{rB&7M9E5x&>K?|DJYGGHY7A^AW{MuB#Gi9zWD719JpD|iI zCq;_Cn$nU#re)!YalPxyNhuxpK^7f9DP`dgWbyt1@osoS%AX$9B^A5Gx9*TG&3j!d zd||!YRV&LfTXorQhh_PN{Zjc^zuYsnT~>^ANY!Ao`1e)I%091DZz+(P&PKg=jbCeP z%5;@CUjv>ry)P|6SI>;n)ziP|{TI(`-Ph-|e&UoiyfdN?9Q{@rht5h<<XvgjVR<k# zC~G!GWNqEE^3d`gSy$XH>*qDfhGf6AB!slp5!BX84Z3l1g$7TTYTIb8w!f9C55E$x z9nV|(Nbe<mwDTM7Y>Sh}>Zhfv@+a9;GAWz0&&cC9os@3(+tU5}fNYuCE1@s?Wb6AO z={XjZZAV_z?FYm9#GY>5@l>ns?5fi!$Cc9=eGQzJwPUyWBU>*FIn2L09J%_w+_x%V zmHU<kc<gZSY6bp(BiEUaxH$Mm?&B`=&K%R`*p$?th}&j4JCW_!n*AUfvNbzGwuI~n z*%Y!XzLRYs`$9HmYj%cg&DQJ<*_^G}9kM-Sf5--r9U@yq_K0i}*(I_~WS_`JZOu-R zt=gKsBAc}}yG6EZYxaw57}+thWn|CDrjcDE+eY?{Y#iA+vUOzd$mVU$?vd@=n*AdU zKstc50O<kJ1f&Z{8<0LAjX*kqv;ye`(hOVE4Wu2mrXNT{kd7cNL3)BT1?dXX7Njpo zW01}utwDN&GzaMp(jHsWAEZG@hmaN_Jwlp<bO~t_(kG-*NT-liA-zJHg>(yPm#yg+ z(lDfBNXw9(Ax%TNhO`ao8`3zWb4cru-XYCHx`(t6=^xTSThl?Lg-8#PCL&!#+KBWK zX(ZA~q?Jf7k!B*@MB0h;6KSZe=_t}tq^C$zk**?bMf!>~7U?X~TFc>#x^hOlu7T6C zp8V1Njk~RAySZ+=xmVp+`AP1-y8BOftymuCmMkptkmMYHfhVsZ&y(Yy5BWv0e*=T+ B3P1n= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Sydney b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Sydney new file mode 100644 index 0000000000000000000000000000000000000000..aaed12ca284d69e3a8ba25891701790bde7f6743 GIT binary patch literal 2223 zcmds%TTGU99LK+J!U*z^<v1##W`ZFH6@x<a00=3D@^UPV08#AV3#4Kt{ya2lOG&$s z88f6AT~rvYX^urKH&zI&E;^9r=%!(9)J@6E?EiVzMOzo$wm#3E|L57e=Y6l<@5f(W zTeC?2AlQAv4M*I~^Hg(3+xj=#+Ir%C9`L`ff&P-f#k0jW*qIR+YH!eRb4*}TAj5*D z%v5kej0OLiZXwAxEab~*3!QewLO=N3!p8gTw%4OH`RngArRln+p8HtSHec55tsR=a zyjOQL9@3q$r!=FqMl=36sPOb$MNEFlB4<}yWdDAP_T^Y~XO+c_#@o#1Y@5{=X0dzP z?5^Y8iYsff**0WzR=uXVwLj|aIeRrPZ&35buWEimm*x*&R6^)cC3L;31!E1mxAiki z{G!DcHok31-F22+dc;zW6<X@1a$8iNW{a0)+LDbCmNqNamZnbF(i^h$m~f?E`ra}+ z$L#)|3--Xsh%!%)YT5Z7Eq~z?J$T}zR%|(~l?Tt;s_bU1u4=Qaq&F;kb(7@;uhp8l z6}INL4(0yvs`9=%q=))<YVFw?t?MjN{_$L`Z_d!eyBDY+P-Gh_a%^M4eO9<4-ux+1 zRumUz#nY}^@y(yCWc;#~UKz5o;a)56J!6lYyQD|o8q}uNkF@#4E<M)Ru8M6(^?2z4 zRpvIRD!p3O@kQDanWLKNb+*-4X<L6Ov~8nVR{MFHZST9so_H_9o;-D{J@v|jJ#ANQ zN8novY!6Xg*)`SYjj17HM9(A)XlGQ9c7?v9T@xp@d#pu`-<bA%TBm0}tkiSehwb@} zCfj>#hcz9juzmF#>=xf&*QB8Tz$G2N;Qq)uamE)Uz1*(Qzdx_b_iJT#7Du6<*L*%s z`Tu<<E$%gMC)d1hNH-k~;rlW%yYcL{gV~Q~w;{}qFk8ax39~7Fj$L841?<bS+ZbkN zp54|kd-LozhuIxwd%*sH4FWp^wg~JI*d(w^V4J``J-dwpI|a6i*{f%_S<G%dyX|83 zi`g({$Cxc+_6%$qvun(@fqerT2X^k+Z5`OVXSaD^_nzJMf&Bvwz;po90!$AuO~7;k z(*{f*Fpa=;0@DgiFFd<uV7lSiwFA=+OhYgo0a^m|1ZWD-6`(CZUoedUI)iBqrZ<@8 z0NwHI+5_|lXb{jLphZBBfF=Q50@?)h3DYP{r!cL;^a|4~Ot(C{c47L3X&9zsn3iFB z1~d)m8qhYNZ$RUK&H=3hdIvNQ=pN8MO#eK)24XshX(6VEm?mPnh-o9HkC;YcItjEA z=q1ohpqoHDfqr^+4Fx(1v=rzm&{UwSn6_g2ifJsSv-&@THjMBtOidxeiTSB1NvTOG MiTO)V^5U>R0gAQ*YybcN literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Tasmania b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Tasmania new file mode 100644 index 0000000000000000000000000000000000000000..07784ce5d751f040ba7ab43713f32cb92ce9afb1 GIT binary patch literal 2335 zcmds%YfM&k9LK*8ml5P8%PAm`S_v2;ctJ?TOMsw~7>@{~NQhi4^Fa{F^659Vp4MWO zxnS86ZH(SHkS^NhBC?G&0&DA@=BTztH*0IAL!|RLt5?0~ZR>M({-3k6^Spb$A8%=G z)ja)!Q0EOdyx`nCPcY{=@=24ubY}Xm{odEq-?!2C!Koq}=*aL5w$<xgV482_-DX|u z_G`4SMAsX;Y^-CkRCLHZfmj7C>r}8W!$QW#D70Xjg<ej#u(_ia_Gz?*PrPX1Z(Os8 zOTBjE(I}1k{A-Q(k7&Z_cQmo=f^KR(u1T3ay1D6~Zizdl$;DNgeEoo?q~~gC@H$1# zsI<s$Qx)yWvFMJiHf<=uVglI~+Z$nV)$w*~OSh(%_-%#_+RT+lbX)C@x_#z8-H|t- zSwUMApV+DR-``Zi&qJ2@ajRwzoV7b&->ak(?KbD|CM7oqY;HrQQg+l@>gH0LSM-GD zFI{2_(rUCYHqO$ftXA5nY|-T;rJw)CGCGFst~2l1;-1;Iq<u(BkB+m<$Ioh+{})@n ztz9e1KD3qD0o|S1ZCS~$Sa#fD%L!ehRbeN!>W|~f{r-97eRfc*`*v&1sVc4Q*r@!L zT-_7M(7HXdRp8rT>&tWO-hxF|xIDqUDN(jzdW02C9I>LYpKRl$3s!t_&`QqrSZPm} z-FN!DHoZEa&8=^%?8#1TX=+n>&2zfHc)u!g>$Nq#Qk4lCv@J46Rg>zh+EZcGzZF`| zP?ptxw9vNq&awwiPPGS*-C#SOy=o8H54O|ymHD=ZsjlRTcI6GLKI2O@B=&1})EPB~ zpHSn~m$hg388v-jdiec1?R~34k90TNqsRTW@6b;3?=QE<cC9y0&|g<@$baaPo{Dk) z@OrvUA<~Oop76g<-mJ~{YDIP-N0yh*JRVMY|9wv_&S&nPuDHLD#vB@w^&Mtkn2lj} z=GxgBW^b6yVRnbv9%g@-4PthP-7WIK9>LipVwb=+fqh~&3hdOivsKJqT|1k_?AEoj zUCe$l8wPd^Y#G=yuxVh|z_x*X0~-f+?%LTpuy<hd!0uf;+XwdV+Gzl$1DF<IdVpyH zrVBtDFnz!@0_X(L3ZNH2Gk|WmcG>~-1Je+oBbb%|J;5{u=nAGSKwmJ8!E^>_4W>6h zb1>a;?X(BeAD}^)4q;k^=@F($K$kFW0{Vn$6woP5tAJi%nuX~Wrd>e4TssW|ItH{1 z=o!#7pld+efW85Z13HIk9j14f=3%;rX&<J4uAK&AI*4f@riYj&V!8;l5$Ge(NT8EI zD}i1D%>=p$v=itj&`?ZAT{|ts^c2%nOjj{&#q<@^SWIUzt;O^fXfDuQpuIqUfd&H| zcI~tn=rPb_pvyp;fj(mzjp;O|)tFxE{}k*9Dn2JQg^nlXr=}#QCZ{CjFF<qWNBjwi Cm?|a! literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Victoria b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Victoria new file mode 100644 index 0000000000000000000000000000000000000000..ec8dfe038c2d10aed29763ef8f664c8f0cd35c8c GIT binary patch literal 2223 zcmds%TTGU99LK+}!U#&qa>zjmwG<3Fs2G%z2S7+Ml$T?v1c+h>UqA&b>6eGmMM_$m z%$Oq0=%T`CO>-<_wXs5Eb<u$|M>j^bSvMs!v;XH=7j0d1+xk3v{-0;>p7*_azaM{j zZOsDxgJAawHym;|&y&p^?Q36eZ|{x!dC>o^1_w$4=g$<|P*+CaLPvv!TVete0~r=H zX}W?7Vl4RAbPGwoVIf~cTWI)Y3w`f*3%lBHx1Na7ZC`z>NzK<a`Rs=Z-*`#4w{>bt zW}oh8+OIofPiksuji&ywPZ8<4inwyZB4<`x<iPV5?aQ&~t}2TejkoD7**2p;%wl)9 z+g(R{6j#=4GaFygtQD_ncI}V4d)6M^lQ*O}6E<mXLbv7)pI1WYVI_3Gt$AY&y0`69 zOZ>dm<~O}*Nj-IzydzU7M+z-<L%A)ePqT$fm)N59(=2U9tSwF*x5YPP=`j&XzxbVH zbdA~ly&u~HBO_XJYE(<l^(yngM|$wsaV^_?O3U}1vlZDbT3OX@SxK*1_R40<30|#L zvny=XZ=K4$^0M;2+^>fQc53yR8m;LnQU1|ft!>HB!;SM)5Gb;B6*;!P;65u{7H|HP zC@YE!v*PgUR($g(E4g~fN-tlqvf(}}?>lXeoV}<=-x$({why%Ng>G$X>QKej!+NZA zuPSpJRFz(>>i8mUj?7WblsenutF$e@6x!C&EUW!2&9?Q=vB%$;W>1{F#h!d=+@7)@ zY<u7v3v3HfUD-9&=Z&c$V?;X=2DLM)SGz*r(ysC2Y8-1-)7PfmpVaB;_bc^G&p~^( zv)T3>*>25yE9|-YbvD8G*EKQdKX6Gc-@8Auj-B=eNiTLN^zYAW^8H$#oyAe;=QW>? zQ~rP7NvnI!+sQTW8`4cjL&Skd%x*lp?O^uf*=-23Bg~dCd%|prpJP{;Z2|l8>^6qk znP;~(%-%e^&0%(j*&eVzV1vL8fh_`i1U3om64)lNPtR_nz)n58tz!1-*=-iHThDI0 znEhfljM*_}%a}a_o5t)K*fy|lVB^5ffvp33_v|(g?B286KCpkF0hkV8T7c;RrU^h7 zFm1r}0n-RfCorwR^un`i2BsUHT{|%Sz%&HY5uhbNPk^QXT>;tx^aaxxpfi}(V0wdT z4$vLXu0241fCd2_0$K$02xt<}C7?|}pD>NWbPCfdOs_D_!gR~CYZs<pn1*3GhG`k5 zXF$_{t^sWW`UW%(=p4{Gpm#v?fbIe9!}QOyYaphBm=<Dsh-o6Gi<mZI`iN;HrjtM` zfnEa51iA^d6X>UB*HECNKudw10!;<FifJpRub9SSI;;OfXu}Ba{L~a8oS2`Ql9Za1 Ml9;~;B`*y76Gy`Te*gdg literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/West b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/West new file mode 100644 index 0000000000000000000000000000000000000000..85c26d509a81d77ebfd6525b4777ed32cd6c9e1a GIT binary patch literal 479 zcmWHE%1kq2zyPd35fBCeVIT&v`5J)49KS<*=IpT*I303c;q>w131_w!EjYV8bOI;; z?**LOBo;_9CM}R!UcW$7HD-b4`cn$7h5HrUSko2Um1`EbziVFL$sD)9^IYlzCPpx1 zW<fv(hMYM-8yM;)FtRW(6f`h!BC~ybLl_*xT|h)|2uK+tBZy=OA?yJWpfa!ret>l_ zF#HFCl4W8~Ks3lhU>fKt5DoGem<D<dM1wpCqCuVnX$N@}Oana&qCp-8(?CyyXpqOj qG|=-P8WaE^8Wad18Wa#<8W<QL8WbR48W<=HR0|a@P$=nIZ~*{&uy;lP literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Yancowinna b/env/lib/python3.6/site-packages/pytz/zoneinfo/Australia/Yancowinna new file mode 100644 index 0000000000000000000000000000000000000000..768b167857dd82807b5ef912be31518086c77141 GIT binary patch literal 2274 zcmbW%eN0t#9LMo<d5ABhBI=AZh=&+aJY2vW7$S(IBv%M;1_Y$}fPfTX;T26;$@rG7 zj7skuTc$Cv)m&Df&aE+*R8&mUves6VJ@6M%TUlGt@BLeIx%i87cJAxk%MQDL+|S!v zS+^;}`QH(2KH=hc&RpDwwV9W$Rd2l7bvp6NkoS}f4c7QR>aW(1d-8mr9BP%}jyPXn zQ=SG*kC))mI1T=7iH5j?G~}~b4GsT6L;C_XZ0vo#{^b~%cIkVWzB@o>oEVhwn#(fN ze^erJ&&n*{UYQ-;D|5;lWzOGE%iOFIxnbHqjf`#3$U)V4E|1RZsn_`<X?jyfu|^F< zX!Q0Ydh>yk5>wHxv27PMZs}o(ulq%AiQ6LyMHeJtY+MqP`Xq7qQ%MT-OVY7-<hIdP zx!r$Glh5zh6yIB#dfcaJ<u9qbvr^Nm*J(y`j%F?{&;@1DdPh`(F3fOr;a?$|H9tzS zwj8^Z_0_0m|2ZLxP7mpwUyevl_fN9;)EUWb|4^13?3TQZuS@=u1G=>ESt(e5M3<$$ zriBIVdRK6nERV0%<-Z@56+a%7qR;or-Ge)2Wq+d-_tc2zK#8pC$di(7=~CKMrT47$ zXjy5luFgwSuRBKXjfv1TGXr$Z#IJha*kvvM`l410pVi8D`}O`4-$_;P1*!JGCpFq9 z5BPdy?PkBMD}PEJT+u4)vl?W>tyQuy(j%K9d|K<O)7tS$-8`~V>(1q9{XmK~ycw-q zUJ2L6=N)~hZCoF2`c|9jXUHQJ6VhDtvuw>Ckw=rx%VRg5k(SW6rR9%q**3agd|!0P z_78m0da_P-9Dh-F9&XpiJ6m+u6Sca#xm*KW|DJ(CQ#@T$eQ>hp9B0=)^J{iq=yjQ& zcDw)D`}~5{UMIg`nU{}UE?yn)<U<KDf8yfkG#B@?ukg+s6Xw`DS2tiwQ1>^uV^&}j zG9;@R6EY~P85J@tWL(I=kdg7f85%M+WN=n9I%IfOGd^U1Rx?6mh{za`K_a6>hKY<5 z87MMRWT?nkk-=KcXp!L}<3$E+H6uobY&By>291mw88$L*WZ=lik)b1FM+T3K9vQyX zj2{Vr)kFXZfz`wS2?7!YBn(I#kU$`jKth4U0tp5Z4I~^$Jdl7`O+=88SWQf<CMZZ$ zkgy<eK>~wB1_=!k8zeYLbdc~M@j(KFL<k8H5+kb#5)vgOOh}xNKp~MrLWRT%2^JD9 zBwR?mkbof(Lqdkc%xZ#$L=6cW5;r7pNaT>vA+bY(heQtv9}+(#fJg+95F#-|f`~-X zYQl)b5eXy`NhFj=ERkR$(L}<D#1jc95>X_iNKBESB2l%Pup)6q0*gcz2`v&^B)CX) zk?<n%bzA{gBN%+$6wkn^J~-KPj<ctWZ}-e0C&;!q)V6qrZSght#U7L5Lfhm2MEPo# dO~|NqO7@~mH^ol&q`OnoQ{Bm)1(22*_78DE8g>8x literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Brazil/Acre b/env/lib/python3.6/site-packages/pytz/zoneinfo/Brazil/Acre new file mode 100644 index 0000000000000000000000000000000000000000..b612ac23562126db6b0aa3ce26dea6760c15be24 GIT binary patch literal 662 zcmcK1J4nMo9Dwm_Z9%H&B7%sIb{BjVL_{)nP`pYead0ZQxd`HtI_TokNfZ}XJ2^?~ zbX(CWb7`xBn?(>`rL~;@FVsm89K3M(jzGfYckbF|W?KCiQF+2-#AUMX&C2a4e~>Hm zXH=oRZi>%ZU)6F>dG1D+>w~&7)b=aIZKv9r(zVm3b6dUCckYVwdOYmEMR%Qdlhq%6 z^QMtb=|<_^X~v%X=J|kW)f4*j@WhN<=k@5$iy1ph`{Of@Ccd|?6Fm<mwc+ViFJe4v ztqO;Jxw^t(Yt5EY)|N~tVr}t4b+cBT_0qDdCMIWPCgo?ohx-SFM&~8jiv@!M5>!wq zFccgLP*6dlKm`>n3K#{A0!P8408$VskQ7V`Xi!0=zy=ju3NQtk?E_80rhrq>Dex40 W$^lRgfqL?-e>sTQf*j0ZGWG?L1Q>+? literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Brazil/DeNoronha b/env/lib/python3.6/site-packages/pytz/zoneinfo/Brazil/DeNoronha new file mode 100644 index 0000000000000000000000000000000000000000..6d91f91452d0c8474f917c41add3395838504ac6 GIT binary patch literal 742 zcmb`^y)T170LSsCMMw#^8gMiv1_?FLRQv--NS=tW2@+FBw${vW69dDI;vdkONF>-K zSj0=cEY#phs7A@T?<FP+iEFN(rqkv7^C#!$;@*!_VgKQBI_>3mUB-T`Zq3eQ8<NM_ z&$!A3?Q-X8PMr5^d0u#u7mX+SA~z&1%hmE~{a9RIuE@L}6EACleyti6#oclFRxea( ztXY;0w#9p7U%ziYs&b(uKbBLf=_n(c7mrkn?$EM#Pqi*h$+p@Z6`cs{=<T$K_1){( zX-@PNv${XKAQIPSGVu^n$-_Voq&$^2#(3e-uRdX8ZZ<s6nBX~7Y0O>Ts~H~gy{_&a zd%uJDe~44EaX9u3hDb;#r;UXKL!u$!oHia3&}kzgA(5C!P$Vi67Kw`lMj|7joi;WS z+-ajD;gR^fodJ*$kRgyUkU@}9kYSv59AqG;9SIo<84DTAX-7kbL&n3jeapWN7#Xl* H4#p#2M|K~x literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Brazil/East b/env/lib/python3.6/site-packages/pytz/zoneinfo/Brazil/East new file mode 100644 index 0000000000000000000000000000000000000000..308a545ce5931939fb6353fa4273aedf00b49372 GIT binary patch literal 2016 zcmc)KZ%h<)9LMp~k|maZ+FDstzedK$6Tf#ydYX~S5J(OpZ-!JxA|U<=Wn^esN0zy< z$*hcxw)UG3n&p<iwwmasBWEFNjdb>p=}EHn&s^C|uhY7I?_UplP|rMe_qws;&x7y# z6R&A%2nYW4xXyitCy#db<o%5a_wvTOEg$rZD;w(hbG|unp+FBNTV*i%o(`T%>#2#~ z+EWLX$>}S%X!6aZoH^aCL-B}QtQoDpk6JF5dROY@v6D@zDyXTxk0qUb!lw76%$1+B z^lHZzGpT=vPHy|kOfmObi42&j?M-^;m}4fVdc4g!yI!Wv{>@H1*(bSE$7$}NVUriQ zpn0!%$UQ&ZWT&qQn_%yUI-}vJ<eM&?x#(@t#3l_*ika{uVk23rr63yDg49PcOQz_o zfmLSqrB7_M=Vh65W?1KT2F-oPGIn0`Sabi;omyD&t9fAOUphbkgjujHr3-KUMT)A9 zXwl{4QXE@ii-*pek_p?iWdANHP4Bj4UC+pagD=^|iPuc|!3G<vecDuXm)gp*SLLCt z^;#u!%)@mhx+HLii5KN*{Ku#)%^9Oh`)`-(k>|B~&sAA=c0g;kC*{#(t$l1&uRMNu zp<Q0OTb_99E?XNqY@U2&fv%WfOkHD+)};?geaR@TKfcm5-u=BczQ4#cW%X-Qcb%;M zZm+J{Tr6t``s})zTzP8WJGOc5Xj$LcVK+=XCoRqEY|9^?No!@LZT)7SY>ckejYmE; zZMRiu+uk2!Q%ZI7i=F1#uW#1pR%|d^_FvTJ7w?c4y1vxF$Pw4}E0Z}{9tdPI!$Tvk z&t$&%Ja9vKEFPGlVfT!<1LYC_@1x%8KIV@)<$r?Tn8|S5$boy^kt2tW96NIGUU&4! z;d|ZjBLN^0AR!<zAVDBeAYmYJAc1&YBuFS;7Yh;$5)J=8;UMuK0U;3~At5m#L3v$N zNLXGM7ZRA)MTUgtb+IAAA<-e>A@LyrA`v1XA~7OCB2gk?dR?4Ipk5a#5-Jia5-bue z5-t)i5-<`m5;77q5;PJu61LaHjRfv>kt3mdUF=BkNc2efNc_kEAR~Yb0Wt>2ARwcF z3<EL_$Uu1ANFYPubz^}HhS!Y-G91WwAOnJo2r?wdm>`3Kj0!R=$haT_<8>p042{=~ z4Kg@hH#*4hc-{CQ1B8qaGDOH2A%ldB5;9E4I3WY&bt8ogmDi0GGFV<WTF7vD-FP7b zhKv|8WXPBygNBS6`eAb>rvKAJd&2c_ZD)?Z{(q(}d!C!U!iakf7Y1XYP<|+1XKFAO J&X4dW`){q=QK$d_ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Brazil/West b/env/lib/python3.6/site-packages/pytz/zoneinfo/Brazil/West new file mode 100644 index 0000000000000000000000000000000000000000..855cb02c4082719f3a51c8401288c013efefe683 GIT binary patch literal 630 zcmb`^IZFdU7=YnTJP7e>Y%DyYZ32TyAtEv!r3%7=;17rhRyNv2ZPSaOg{4|enMjHO zi(n^;79ojg>;fuyghUy?cP#}$aF*G3SlD6bS$}msJ?{M&ZSsW4h{<H#y(72%v&?+1 zZ{;cXxn%Osv--Tcr%DqAU8+ZPdEn8O^J}Wo8q?Loy1K8N=$b#HUJiQfYsZFqvzz+8 zH*Olsle$s7QO#)0HcvZEtKP3avRN~Dd8CK7AI$LaoE=G4Ol)^ck9OUexV_Mc^tH-3 z=e%&}*DLH?vEX^m-Cl*-Wo^C4!lLgbR8nS24#4pLaOlSOE3%_=9G(IYR1hc-6buST zP(h)<1Qi?#5Cw?>MZuzgQP3!G6g&!GP(h?X1{F*SC<T?z2bO|M0j3~RpefiCa0)uj Q$X@>fADxn)FrA8i0o?5eRR910 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/CET b/env/lib/python3.6/site-packages/pytz/zoneinfo/CET new file mode 100644 index 0000000000000000000000000000000000000000..4c4f8ef9aed8bf567ce727c33ba1b97da6f6ee7d GIT binary patch literal 2102 zcmdVaUrd#C9LMoP#8PJTMTJ8GViBQr<UgbasT~hY1yVT~se}fi7EzptL20aF&KYx` z8p}DvKgC!zJ9>e&hSvH!T1Zx74a>FV%Kpu{nX^A*^?N^8ZC!O&zq4nrXXm+jE}qXj zylz`lf$tyZYWEKpXP3LUpV~Lgzp-!p%*Fi^QS}cT*MT>sgWY>IGPPBs=U3XPSNs}# zF=(GWUu37B&azJ<$@W?G5?wpcpoC{<*>&9^SzCz`8?!Y%oT3?3%WP)h?@G%2Ny#(5 zl0Wv8{FmNR%DJPu{_{hc_1<$zJ#o^~jzlee|3RDGBg@$Sl+9_5*bN&R?8eHlWtN8Q zra*~hrDfaA(^4!uE~xCwMVk9<mgbEo>y{6$(XB(jDlqVa=J%daPWQ*kZ9Ad7#@Ccz zH(>>lAuB8!wxTsJSaEv4l@xSX$z+!;NbS{vvzu(;AMILnGHADdS*y~)dX)_?(j7aO zYH?qhmNaE*Y4<|Avpi10##CFDbzY(1q?N~gq>9Y1t>VIWwmkM7TRt{oE6%-SE2FPk z<>=e0+S6mzM-J<*mKIyp^Nd#Cvs-JLH>>8hW`!#&bhp1+YXkGN_DY^=lgh33i|M-e z@?2Z@`Y&2P9&Z~CC0gCk&vsweC9CfpvyJuRwrR(Yx_|K-)=)R9#*BToxh$%tDaW-X zUE1>fUTvL>Xxm7q9ylA;gD<aF^M|eW(5^DI4A$7gTQl^?&SGm_U22ggzdf2i+uB0E z*<*=5YtKzm`^9gpBjJiVPQP!R7bf)hu|a!cY*^d(y{jG3em&Vapsqb#^2J;g`(OC^ zd}(uH+y|RrzPP{N2SYVsUoaGQk79W43;&Cc|4(-qfTtS)G6ZA{$RLnWAj3e$feZv0 z2{II9EXZJx(eQo@2N@4CAY??ykdQGUgF;4y3=0_-GB8g!GGu7T*pR^?qeF&=j1L(g zGD2jC$QY49BBMlxiHs8&C^Aw{H&kS-$Y7DtBEv<-iwqbUF*0Oi%*ddTQ6s}f#*GXd z8M&t$Ix==oH+W?9$ncTzBLP4nfP?^v0TKix3P>1`I3R&QB7uYgiG`;N1`-WV7Y-yI zNI;N?AR$3wf&>ML3KAA1E=XXI$RMFXV&mz8gG9&Eg$Icb5+Ec(NQjUaAwfc-goFu+ z6A~ySQb?$fSRuheqUGtrg~SU97!olgWJt`ApdnF1!iK~R2^<nRBy>pZkl-QF^K{`u v;^*lCh(r(xAreC*h)5KXFyj9*j`6OP^YdwDpsFyZI43Vqm7j;Xh4Fs@!4?eK literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/CST6CDT b/env/lib/python3.6/site-packages/pytz/zoneinfo/CST6CDT new file mode 100644 index 0000000000000000000000000000000000000000..5c8a1d9a3ea46457985198597d90f95462a70168 GIT binary patch literal 2294 zcmdtiUrg0y9LMqJAjKa!NEU@iP+BtNfPkTBQWh|FC<OJ0AY`E!Vi{i(P}JygV^A07 zCTUm=rO4IFsj&=ek^Nok&kSkKW-+bhoEz~}UYO81J@4nXtL|D~_MF#m+<)GIRSl2k zsDE6z{SS}ZYaicnJ%@byz<Ynn!H!tn`{FtI>Y1QEJ@bm3u8lI+uGXvZOMWwP*028h zBHaWB(pBhnuTkAz<>?$VVOz%3q)jmrUOQP|SN^M9zj#zfWPK|UyHa&j+}jd$_*E5c zo|nnzyL8O=M~(MnvyMHn$4q%+kBZyhWNvu2Nlk67FgG?-sGF+tOuRo&-Mk>t+>(~4 z5@yVoX^|5uF>IP7{+^(xUl^C9p=<iqkA9ToV?XGW-Vf!rmMQA?oxL(+#cyinh7Zgg zxx?ztk^^Q|{5vXj_D+);9MWmgYfainzn*=$$fOVM)xP01lF_q8XZ9D$oVF^R)iq19 z>swUL=17@aT&eC_c3E;$7pi$VU(5W6B$YS)lF7RmR13zxGz&jJukJqgmRWRULg&B# zoVjQBh`#qFF^d~d>igPv$^B)o=z_XNDV)=;iwc)Y(I4-s;*5=^czD0^d&`W!f4eFf z%{C9b{DOY)%n~Wx(W;jm&yup5YF)l>sw`cUua`Ank>yD#I<Vr51SUMXB6mP4eo0o9 z$={jE&y=bP9yhD{ep1yV-KM7fQ}xi`HnY0!kb1bM#ynE`v99fGl{FcCdTo8R)OkDf zy5f9UH@aTer>02#Hzm3u!XpibeR{*ipXITAe)agNklEOhuAVq@!8BHQmF)h+Y|0%| zPd2`0HpdUCrm{}c6dctpd419{a!PMa=#Z_0$8_t}^|Gy}OK(40B0Jie^-}{rX<NNg zKiwTG%A-7ClkCrb=`R#Ianz$ip*IhzaQ7YQeq#y>1FB%TeJkxN4=@f7FYqr$6tc&V zkTD^HLPmuQ3mKQw4h$I?GBjjt$l%b8jt_>%+3|4<5E&uI5Row=gG5G&4AW`Hi44?f zN6Il&WUR<wk<lW<MaIi9U}VG`Lq^8TF=%Ae9K%M&%`tFf<QzkH+OZ>pM@Ekf9~nOq z03-q&AwXil5d<U(9AQA>z!3<ijRX=3r;P;?3?v#zIFNWC0YM^yganBRM^KQcaD)Yk z3rAp($Z&+lX=B3?93(m%;X&eq1PF-`5+Wo<NRW^yAz?z|gapcIBgGLar;Qayu$(qp z9N|LZg#-+V7!oohW=PPGs3BoP;)VpyX(NY(&S_)E5j>}j9!L0)_;Cafi6BP^kr;9W z5s4xaMkJ0%AdyHSp>*0<BEfXpXd>Zs+IS)XMIy=(QY5AvK}Dj<5mqFw9Dzk5%Mn_q zjV%&fr;RQWUZ;&O5@004NQjXbBSA)@jQ^W3N84J@v(@&M&dtop%<`3HXJ=+-W<~!E D%injH literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Atlantic b/env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Atlantic new file mode 100644 index 0000000000000000000000000000000000000000..f86ece4c4032634b3526187c5f86364077b9a9b1 GIT binary patch literal 3438 zcmeI!X;76_9LMpCm=UE?iki4l7HWtOD27Y%SQ?{(oMcf_@MM~SrDWogggR--OsP$o zWk!pJOQn}ELruKyj%FPo4NLK;p=MfQ8*bdr_q^>z<4iBR%yU0?81C@y_x-0nmi=Um z=O3?|%x`#j)tZOTE$5iGIdvbuzwE~v-#dp}I7Mai<$Ifti4Wgx?XGZ2oR!a~yB~#h zbc$2n_Z43o?p@t~tGoJWFYnrR@40Ia_H{nKHrM@R+iT9Jzf`;HUTJXFe>uqY&Q5bm z)-88G8=C7aU0C35=)A@Ge3EoGhWfl;4135e3u^7%)cGghrZbnlTN)qmZK-JGZ1rTi zTi=cLmS5W7Zd?4l<EuL3+n$x+?09vdyHgZ7yPnB)cRO>OJyZI-dqc)L`+86E?Ymsy z-QP<1_8*bn0}D>d%DaEht&|s3Ro-F!t^18Slvbt>cPdfe#V*s;p4Zip(0qMF=c%JL zv*fWgQ$<aAraWGlE_6}6JdqtIP8NjAQ-j-z(+M5*nLFC3vk?LMY~2-guE{BVuHvZr z`SfmmzNB2$Eh?6mW*3UzCNGe`kC`a`NY0Ue#wUrtdL+vFF4^jG^C)>GXsEhUKSW<W z+g)9&j?~xpHCGK~&9r}2o$@Tbr~_WEQjMlp>cHHKqOtr{-XtnT6SqR%>}(K0ol4~` zA#aLc&k`AYafY~6PnWkHc|$ezWyofmrm5y@@^r}CBh~GNBlI0J`>Gb%eRRu=b}Dpm zm~Iu<pjvmiuG`!dChiP6uG<D&7j5gV$*>d0MZ1a`*?#+W(V@gAJASZ2bjrJ=!{^Qy zccp!+@6P!^b&lPr?-}x<x;OMK9no#PifH&;b`4HYU2B%gZs)qH`^sm^$f`#~_o7iU zs<eyfQSg9tUJn-06Ebwnl=GrzLO<PW_}3yfqP^}N;}!Qe@$0y-uT<RW6S~jUb*k^4 zox0x-i&g*iT0XF4ih6L-4jKPWs&Xf<lmq5HD;`RINj^L-MGWlmlpHiLRt)-WnNH~b zs7k2H*Mpn(RD(;i^pFdoYUuJ=GVySXNXpNY!#1`Q$)n??Sn{WMw12o9o>whW+6Btg zw9O**YOPF<T`tmV1N6uaWoqQEQ#zwznR;yHZau0dUyYu(N@td5sjP8@dQ6e4WKyww z{N+qBw)+D4#DsV;u4#@OpAas_Ur3Y_A_B#PgHbZONv+8KG(_f{-YX`)8L6NC=9GG7 zS~ERq{cbfm?V`?Iv`Rf2TdAi^E>zEjZqQScC#tCp6>?hK0x_+oRL*FVBW9E@ku&QP z#q&ke<*e!`kzbG@UnmO^1zG*%?4_54r%}KS{scC@!7tqCKltze-tXTfJs!XRkP2w* z_wV(2g6!wp?0ZOJQmSXbK=Y)SXM}k~su?PeC&0d?-oU`s+wbw8{C<WG89QX~kkLbi z4;epX0Fe<yh7cJ;s~JRO6s=|$k#V$|fkZ|U8A@a<k-<bp6B$lqJdpuKMid!RWK5Ak zMMf1FR%BeQW?+$#wVI(t#@1>E7a3h-c#-i%1{fJ(WQdV5Mg|!fWn`F<akiR)Mn>9d zh8h`bWU#Gfw2|RP#v2)MWW<pnN5&i(bgLP4WZ02$M+P1ld1UC3u}20U8GU5<k?}_Y zU^NjyLSQv9K!Sio0SN;V2P6<kB#=-bu|R@>L<0#25)Z2h2oez_BuGrq289nqg_j8n z5EmpcNMw-EAhAJ$gG2`j4-y|FKvokWBt%GzkRTyZLc)Z^2?-PuDI`=#tdL+K(L%z7 z#0v=+5;3a@84@!jXh_tMupx0n0*6En2^|tUBzQ>lknkb#vzh=R5ww~RA~8gQh(r+y zBN9g>kVqtvP$IEJf{8>E2`3UyB%nw{ttO;MOp%}>QANUv#1#oF5?Lg)NNkbdBGE;{ zi^SJz0*pl1YC?>}*lL1|L>UP)5@#gPNTiWaBe6z;jYJy>Hxh5F2{;mQs|h(0bE^qD z5_Kf(NZgUYBauf!kHj7cJ`#N-{7C$f0{}S!tmY6vjsdGV2#}+|Y7PVBI6w{r<VZjc z1>{&j4hH0CKn@4wct8#a<cP4ELjpM_tmdFVjtZ+eERf>@IWUkT135I1V*@!j@c*NO gGvr^6j$m_(^fEU|WKz$lm?$SQDLOhTI?4(D8&gR`p#T5? literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Central b/env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Central new file mode 100644 index 0000000000000000000000000000000000000000..2ffe3d8d8e012445aa961fc53b38934681dd2a2c GIT binary patch literal 2891 zcmeH|YfP1O9LImh8;GK2VuS>y6+;mL#qdH|>KG(~dPEd5H4L$g#~X?l{56@ES8A4* zQKXJqt`$0$;$>uN&aIq7lq8K1m7GdGsavST>HqoH3%A^S)62eR|L61Y49+=k_<X!m z^WKV+f6dQ4;jlyIaNk4CrNr}~zU0`!^wNqQ`pLn2&e83|^;lZ5`~BKhJ=6EDb7n?I zw|ebtIXiK>dv4M>sp);n{V{&OoR@O9HfWpF)^Bt#n76xdGTXgaw?%*6wZW;YEYx4t z&3Eccv-DD)*J&t9)XT|3oGa79^=f3W)0i|ue(gTky%rHB*KYT8ubXdky{g&0(byn2 zYmU23CyvW+8xOm8^2_vY>`eDw*A%_)Um*9JbLGLcOldivEPtLHCao3y#rJur$kssd ztZ9`ti*HHWc_G?vMkj5b-l82wHmP4so%;7atsT1^(E!<_0q#!iRKH0(*M1{`M;2+9 z13RSauJ@(ewpH@Px`oocaF#rommxi-BuP+mg7h33DNjX)NN}Gq`m}$GgtUp!klVr9 ztD&QYR^8WUzP+JgdoF7Dj#JvZpsPIlUb*&}^t<$(^MgDWUoFq4d?hageJv6FHb_Kk zl|}~4m&m%k+V8qo`d5CaQPqwPC|#`4dnf3?qA41)X|TrT7D(KJo;oNgQwC3H*7%5F zG9>ONebFye5_<h135~5X^g*o*J5evgFC3B)<t>_c=wo?leVx8svQkFoAJJEeOZ3&W zojNLKg^t>vUq5<av5px%-WhXeza$Nq>n2riljOiOH+k<eNok68U;Ai_j;$Hzq%JSi zar<JNwCpTR-`w39KO#{lEV}DV3=LQBq#DQD;?a!w3Mb=gm}G{Xbu+&cnbNw?om$o) zS#?|8?Be6{dgW4gTFz(kMrpP?J@u&0SX1cC98jij=4LrLfva^^Qlc}fX}0D@ggd#X zQZ&!c<K%rArE?lDJ8y4JmUmA3+_?q)<=yg2Zhl6fIO~tO^WtyG{QOVd1wmD^Fl~*y zu(e4G63U!{y3@KOc(t>na*q}^&vurUZqjAvQ=H{Ri*!Xrlv6Zqvaa0P#cAWQueR;} zx%vCYM_t=@_-|j2{lG_kiHD#0d}TL9e7*y_J?(tHFSd)nz3*(V-5!2EHq|SmMw>Iy zoXO^-d(FSNh{xlx5b-`<4~NaKT0J!LH)cMwoGZIOfat=~C<CGmh&mwpfG7l_5r|4G zjZPp+foKJy7KmOTih*bbq8dx18;Ei&jdmdFf#?UKAc%$_DuU<;q9llxAZmi>38E;7 zrXZ@aG`fN)%hG5IqArNOAPR$M45BiK&LB#IXwA~74Wc)Q;vkxXR(0H<J7$!}&>lm5 z4E;eA2+<%!g%BM=lnBuxM2(h4j}S#dG|5mUM3)R@GPKE1CqthMg)%hCP$@&F45c!( z3Q;RVua-u!5Y0kV3(+k^xe)C_)XUH>M8OOVGgQpbF+<4=En6BjL-cHE6b;cdL)8#n zLzE5CHbmVJeM1z^&^Scp44p%i&d@qU?F_wJ8pSg-&rm%>_YCDjw9imKME{HgKr#SH z0VD^OCJB%%K+<4o^1w)hrO5;%6_zF!jATHv0Z9iWACQDVG6G2nBqxxhK(Yc!i>1j6 zBr%pIGmO+&n%ppw1IZ2}J&^n`5(LQ*BSnxLL6QW?5+qHKJV6p=X)*;#m8HoQBw3aw zTaa`?^2JCPBx8(}L2|}O8YF9sv_bO5NSvj~93*v?CU=nJS(@xY(g(>OB!Q3&LQ)9H kAtZ_Ne-n*Bjr&Y4hnQ?er4EXYi;js(jg5_tjgATU3t;rI(f|Me literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Eastern b/env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Eastern new file mode 100644 index 0000000000000000000000000000000000000000..7b4682a39e2fc97450c99eed4576d2a4614bf294 GIT binary patch literal 3503 zcmd_sX;76_9LMpiA{wHIikTw0q=>RgxKd&+gnGpVOmRCJdIvQ*xgZo`3u-tuOrt@i zgSl@RqGFJSpr$rWiQ=B((UcTH5AL~eJKyJRFB;!9)8jn%bH};7{eAy2aS7AA+y3#? zH~+)SbHluRZ}EZo$SGJZSO0KRe6uw0=$fzdBG#^O)$3xnM66#jQE%vyEH+N>r#HC| zSDOd7*PE+?)mHDH<<?VnvF+(L`E7oX*nZ(N`P~<uB6nY++>sWb@>Y$MI~_yBuGCPu zJJMg|Pw<j^e4B{x`#zKfu38j!YOV__PKdoO_x0Xmx75C?XY`M`rK;#qq28alQx&gY zsY?=9r~~N>^}*;Qbtrza{wZjzI&4psM?CtdqXCoTFP~_6qT@YzLOfAr^RMcY5f{~| z*sZ0f+pQ};6ZUE8S=-{`b6&Gc&xa;OoUh#^FEpPgE|h1;i(5Vt<wY~)rHpu?a_sVQ z!eDVFElB<m)kR#L<SDQDxrvI%0R3mP&g#0ahrZGBj;eI6(UlKQs+$#My6W^ERed;L z-`Za$Zs(qmdUd|2$=ogPEXo#l6SL&K$tmJ~bh3QVZ@hRI6ek~bS)l&%2$zo=j#G~x zjnz-C_f=2N_R-IdwpFzS-rBjzschM`y3W$es_p`%>&aT-BA&_mBb0EBxFj0{9}o@O zmB<&Gt`}~$b<#~Q5slPh>3(jlYFzA)O$yVM$Ci(E)32s0&x~ohS<0KLc|xRaF|Mof zit^Jfd%LR_{Tk_39sI;g4XbtQx{XBZJ8rVg<!a%5yh^q$IVODaev>b+$rJ68Y`Xo2 z%SDISE4t&1T=h!WA>C=n=c=>UM(x{muJWxtDE-`Gm0$S=*|oB#>Q=N=`j^FsfSd#w zxT}W<N*gMJm$woj4u|eO=dlQljMA@;x*)=QyXhX?i$u=`jdia!=T)zY+q(CYeX7sl z<GSzfIVwE&mV9mR2kQ0AQrU0iR27l9Q}$0^B;JT#AqT`y5d(vg<e-5A#Gt#Ib!5O~ z6<M}SM>X!RqIS*FL#jKfp{p|Fu+tI3o;p(w-_t=vPq51oSuSE^c#s@5ze0@m_LMQP zM@7t&yK+p}Rx#$9haT&5SdBeYqsP^5R&ndg^!V~+YC?Lxo>(+XO^VOflXHeEVc#vM zES)K)24u;%9d;4lI9X1M3=-3-<D|paQ#gJNmkF+SMZ$I;Iiuo~n3>f_&pKA4-bwP- zvvbQ-Vr;FJnfdD7Fs0`tW~;eg2lTw?6g98*l1%EAC6dZZWOA!ykzBM+raX!h@8v9( z@1G49sc8<mu)s&8O^T9>zO;$Db)M&6J(uVC^?&NOPG|lKo6YGwQe4Ny=`7q~YiNCU zw?3N=v&Yy54K(j)^S))?5iw@GY_>YqN6f#EUZwe=HF}Tu3-dV5Gv`)v6*7Xz5F%rU z3?ed$$S@+~hzuk$lE_dZV`*sy6B$iQGn~kHA_IzyC^DqTm?DFUj4Cp$$haZ{i;OHX zw8+>ZgNux=r5Rpid@aoYBO{CqF*3%;AS0uU3^Ov$$Uq|_jSMw1*2rKZqm2wVGTxSE zz>yJ0h8!7lWYCdOM}{34cVyr#&B!A|Z)wIJ8GK~)k>N+i9|-^w0VD)S43HooQ9#0g z!~qEe5(!Ha3M3XtFpy{<;XvYn1O$l)5)vdPNKlZdAYnn`f&>PM3=$ek6B{HrNOX|! zpcNm!5Fj2CAs|FZjF2E9Q9{Cm#0d!$5-B89NUSVPu#jjW;X>kt1PqB75;7!aNYId| zAz?$}h6E0Y91=Ptc1Z9nP4tlPA@M^3h(r(xAreC*h)5KXFd}h80*OQt38kfpB@#?a z6HO$XNIa2%A`wMGio_HNDiT#BtVmpuz#@@FLW{%}39hAyE)rfOzDR(P2qPgzVvGbC zi82yqB+f{nkw_z<wluLuf^BJ{jfC6M#2X1X5^*HtNX(I-BT+}fj>H`aJQ8^%^p+;} zNboI9^pWsen)o9J0CEH%hX8U6AO`_*6d;EIavUHB0&*lEhXQgeAP0k`IU10|!O|QL z$N^z#jtJzCK#mFIpg@ia<gh@F3*^8+jtu0`K#mRM;IK4D2Xc5=n&SgGKrGD>f*c~q xF@hW<$WekECdhGu94NNuyCVLl7mAxXT*Ax&<8KcQ>>e2GZx0Cx3<(T&`x_HwlD7Z= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Mountain b/env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Mountain new file mode 100644 index 0000000000000000000000000000000000000000..d02fbcd47f845bd101a7ec97150df7821b826357 GIT binary patch literal 2402 zcmdtiZ%oxy9LMnkp}>tlrxHU!BP|I6<c~Lj%%~tjyg_h5G{Pv5U92FrJ1xXh;E0}h zDq}X1KqY9cFm;W_2AadIKRS0^E-TTp(pfh8v&F)=o%eaf)}tP^^<}^N`T=7+`n=t1 zJnPcMKW>Kk2@lt49^Pju^YP-?iSu291NzOOVAZ|rW!>%DuJ*iiN$+i)DfZQWqI;c3 z)mx<(WnWaU>d*LC_D_8zK5Y-vzJ<Rx`l_SF=Vce<>A0`OpA*l>$x9VtYA{Rs`(s5= z^&CB;_^O(jGpU19$5cq%Ssf}5D~A}-j`2O}+Ved+?97M=Kir}tKI{>ZZ+A=idW*QO z{RMgbmRfOxr$)|NoiCya?w7N(62y%Ox5?-Qd1_9mL(UD1S95<|q+`AfRk6MhJ@3em zYX19|^-Vp;Rh)O8y!rV7wV=!|7uJ6!ZgGAiZ(Z@8SQK?s#wThKKXpzgI5vocu_HS1 zvRm9fyjLfkYE;Qx+jYvp61BLwPN%-QM5WcW%Jhx1RYv|gxuj%5IpZIYccg!*mIf!v z%$Pq!=EX3XHF-v4ANyI}`PGnEw%?)e8rm(E@AygI-MLNVG@Q`)w05d{i}vgLYPPD} z#johR+_ft2w^5m&+$8c(^~r+pDp7E-U9Py2BMRT>)hka|DpymRe(;0ks;JVVi#y`f zL(2+vi8oM{#wKfb*>}o)HBy&5kE!zSlVrvG3!-8)Lav?~6>Ij5%ZJDML}jZ_J~G@c zs%j3&wO#AQqpp*>x~)w;mV7`zUguFY;X8G0exa(p;?;HW$*S&nh4utTD$l#wy8ee> z)cTH9@`;lX;z@6od}?4^G?d54#vMNKwDT{yq2Z9&7<Eo=D(VoMrY5yF^MLY>oz<J8 zcdE_9BYMk(S3TR+qo4btLNztF=&gg<s=2CGZ`&2CL}0+QuWN#)eKY+R|HZrC|5~>Y ze*Zh&0YQHMZY@IWdzk%{D_w5k$~8}^c~+UH*llJbM1cKp|BJaz*uUdH`TfienI1Af zWP(;RLu87`9Fa*PvqYwe%oCZY)yx!`Dl%8AnJh9}WV*<FkqM)nF)wDy9CJn{jm#RE zHZpHy;>gUcX6ne?k;!w+9+^JJ{E-B3WB^G4M-GrAaAW~V14kZ^L?D?!Qeic@K$3xE z14##x4<sQ-Mv#;sIYE-bkrgB@9C<+!V>OvUQe!o_L6XCf9V9(Sevkwq8A4Ko<OoR; zk|iWfNS=^HA(=u_Wi`1%lEslNBwZZ&LK4Q2F(hRiIYW|$WDQ9hk~buAR+Bj-bykx* zBzaboJtTb``9l(jWDrRql0zhkNEVSaB6&m-X*HQdQfW20M3QMW*+kOGkxwL{92rGY z%8^qfsT^5F(u(92Ni33CB(+wPTO_$wlU*deR+C>O!AORY6m#SlNis*4ku-DU8A&up zrjb-zO|Fq-TTQl+bX!fnk%S`|M^cXD97#Hob^ITv-C>Hq)RaHTm64L3lA7d7OG`;h HNp<`UV`q_f literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Newfoundland b/env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Newfoundland new file mode 100644 index 0000000000000000000000000000000000000000..a1d14854af6b82bfe814f2c64aac01bc053d5fcb GIT binary patch literal 3664 zcmeI!YfzL`9LMp+3%4}yh1kVX0ZWmK)@oU%AV*uo4Cz>#89ZrJ2Gyfx8HM9?nNnve zyG#aDR*(`ArK`qfH+xddQPYm&Mz@U+Ei@N1B)9WD_Qsl~rkUQg%=&p))Ny?A|Ne5X zEu5X^`qL{$f5MwrtA6v`{{j8W%I*2xo?q3}QRXXotbD_sMHOow4!qzFdtO}8-}};% z4$mt4xc6nN(euj8GXt+?Z1Pl&{UuN}_(jiaXYUKF?y|(Q`q%ovn&50t^%v!V*S9Th zsHravyisj6)V@;YeRJu=hPnqI@V+%Ssp0L}`QEkJu?_1cW_W$W4mPY$NbqhrBgV6_ z$N0d;wgaAbS_cQ}cW?9f%d&6tZ;z~e_wAavO}<q3dkb4D-+!i$`-2%fDnDG%=Kg4O zUFGH*d-*=@zoOC^cf|Ker#F+f4BYM8^39V;4X$;*or6NfJ7+aHyB^L^pH1#8cHf+! z_6+?_d@iDuiuqXVO>9wn_g9I1y`tp4ogX=YMq7SSHCHtH^W>M0<cP1z-16(fRPjwo zy!>`j9}&F1Lw=XkCYrM1)c3u6In4>}s`>B{rzJ9=T6XMq4mA1I!P<4s51T90kMH_L zYfZg8R9+ztFE5ur-M?5IDJqqr8!XY5Qy`CCK2iKUB2ykq%6ES0kto}vGo1Eg6V&nM z1gAp{QNKFT&Iw<X3cuRoxSrmxA|7gRIxX0$Iv4L3U1l}Qu2Z*)$Z=a_R7$mo9=Jxv z^jt1tUCU(bp*zHBUlq%4dzU)hH%*hLuP<_XRNbz6K6{nZ>yfFd_ri3iPhpzsJENb| zZ&F`%hWlsd%%r0#?)<)@zy5EDk2osg54XtyUn_Cej?d-U?`;t0)UKE3zOYgZoVPu6 zUTMB{{`A_=pj(PR8$9ZX(2&Wqf(iZRg%U3qA51*)xIHv>aByg&ZC}t58NAS+XAj#o z$QoYewv+3+S|dv0?UcvAw^HX^6iU1E6DvI{E;Mr5YHL(Nd+4IHrPjrffslJZRnXn! z4~;(lXmHHtiqIwd<_0gVskg^&%n4q$yxhM0xzymeqEh>cB~z^NIR*9vE5*tfk!fdU z^tLjOEC^){PY!17nh~1Ry=QPz-6f&P2Y(7)xl-DmJw2@KMPuwKYrBgauUk%C(JHRG zG+s`dw?kY#phM<PuM@e)8)e?86(TR#uH-qjj{Gp7W}JA!xptLbUDr6z@h+)Q*ZZ$= zW?GBY4P~R9S=r_C#)tAm{_s*+Fvl&d?gjFutax$L51DdKLWh{MEm0OmHj2XP7<o(6 zCUNUiL)2~C1J2y}QR?;@zf&}Qzbal{;oLE5tGcsjv2$0yYGvnGj(uW_obN6b^BdR5 zyW<MP-Tq~A;jv6{Pg$|NS0#!?CDY`6z8Fz5bELfg=>wuu#6S4z()pCH$gZb+d+HDW z{dZCS$=`bFzb732^huX191g6C=whCu^|Lb1j_c%eH}gr)aJZ#8qVLI{da`p}Q!@1X zYW<$7-`DWH{=mE<PU3R$ko)&{^N{{+=58MQ{SSuqYv$OI2a!99+*0J8A~zMetH^Cd z?kjR*kvogrTIAj$Hy63P$n8b$FLHy8`VJ$v7`eyDO-AlAa+{I+jNEACP9wJ(x!1_e zM(#FpyOH~i+;HTM8}%(m?m2SPk-Kixw;j3f$c;ztJaX%idym|F<nAN4AG!ZX1CS0N zEimdHAWcBJVAO3u`hYY7=>*aWq!&mtkZu@tJCJ@DbwiMjAT2?9f;0u`3epy&FGypM z&LFKpdV@3v>5fsi2kDPdHwftv(jufsNRyB*A#FnXgft526w)fBS4gvvZXxYL`h_&i zs5^$V4CxutG&Eh~fwu9|eFGYYbPj19(mSMiNcWKTA^k%dh;-1XTZr@!X(G}^q>V@) zkwzk&L|TdT5@{yVO{ASjKaqwa9YtDd)ICL-igXoeE7Dh_u}Ei;)*`({nu~N7X)n@W zq`^krVWh=I-D9N5NSBc|BYj31jdU7mHPUOO*+{pMb|d{p8jf@vX}MAN9BDezb)@Y` z-;u^6okv=a^d4zG(tV`;NdJ)yVAMMR*#btr2aru*)Vl!L2FN}@HUhE}kgb611!OZI zy8+n_$bLXJgi-GZWJ?(Jo<KH*QSS<5TOj)a*%-*qK(+?5H;~PN><(mmAo~N^Ajl3u zwun*h5oD7X^)5lSiBazpWTPNE1=%XdUO_esvRjbtg6tP$!yr2b*)m4GXOK-})Vl`R zHb%W~kd1@v9AxVtdk5J($nHV5kLw>ypNLL>SSA0DpT8bIv3ek-k4aC_TWMH!dU9HF P%CPLz)a2CUl-S<@u$syt literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Pacific b/env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Pacific new file mode 100644 index 0000000000000000000000000000000000000000..9b5d924173e6e71c2c0a73cf2aca368d3af002e6 GIT binary patch literal 2901 zcmd_rT~L%&9LMnmUl4>mnJ5w3NlgS50YOnT({xr$L}ev0!<PgG(@?QhOVK8pvNsNy zFHJ@N$cvmxbQl{OlhB!*nxYcSL~SEWWl&Pl5|ws4-{-cAuA9!zc|OnVESG$Lp7})u z$*zB#ZuT24r`BHH`)sz4$^(bw-Gh<l>n+jJFzEWqhQ&ivqqmnnp1o21F!!oHp<C6D zsg3%i>l@V+xnDOm?@_1x$K-UKQD=NL^3(2*jIVZ^{JiBg(_Fb$ekodM&X(rNuNia9 zZ+Yo*E-uBKchA!ode2rZvEy`0`zX~K5~EuiBGtw7etl`*4b^tES^r+qq%K!?%9T|I z)zxyJv~N0Wt`*nG^@Z=7KeDRi&xy~Q8zal4V`!DS*{eWq^(a%fI-b+FTMAU?$$b6S zm(!I0fLjMXh*z$iaXRS5a233+r|z<9sOh>mR=Uj&Gu<bJN=V`*(?egBQ1z_|bJa`O zmA$5CbG3vw)vJiQ3c2IcYSnA^9<AQ2P<L)Iy7&4Lbyv}9-6wahip=onzGKIzesQC9 z)WB?WcW9K34o)@E?J084*?uObp|A8meA5ir*CF@5ciL$0Fn!;%hs?m72t8=oDK$9t zvc7-%XKF~~F&!IUsbc+Y5*M~v#reLI_|_$ASnWO;-dJizRBn}onnh-0=~I%pb+Sn+ zd|fA(3^t?OYjjF>cas{MqemyVm@y$~IxVJ~O1luP$KJl6#vSd{>8HO^<M&3%go7Wd z2g+~B#J65m4;G)1NoBju<g8kGXz9yl%1Du^Q&yO%*BZ5Z#7oNExL;>PELRydJM{Fn znd;%UYh=cWWhQgOHktX!43o8Bt<2h<U}lfcl{sEzvSZT4Gq>G%I{Qmb>T#2EZk(Ps z;HsMUMU2k%H>&x&{Q6Pfezl;iSwB|0L**?!q8C=KSBo-zviOB<X32;;dAx9~SsGC# z`R-hk-&Q7tvFWC;zCemX`kSJUr^&MO0rSN6e7*ep7`4Ld)=yUZRdG(7URmC(R*mkd zR~H{qC6R4<P1Y{8CJ-v#v^wMUU6gfERc2jny{zvjGf!7m%QGhn%!bkm*?3@@Da|X9 zO*`XFa8PiUuJ-ZY`~(88yy0>M0`Hihu7N=LW>*jAJ=}S!8JQl}tSR<p+xv*Uvpn{{ z+~o>#9-e=%!{_#gP8~Y<3wr|g<W2BiLTZ5&1E~g54o6!Lq##H|kdh!ZL5hM@1u2W8 ztqW2Zq%ufpklG-{L8^n42dNKIAf!S_iI5s0MMA2Cl*!T72`QALtrSu!q*h3=kZK|2 zLh6MS45=7WGNfim(U7VkWkc$Q6b`AJqb(g$JEV9>_0TCFFVv4?3kXyYDIrorq=-lr zkuoB6L<)&i5-FvlttC=Sq?$-Mk$NHpMJkGv6sajvRHUj%S&_OTg+(fhloqM2qb)8{ zU8KB7eUSnq6-G*o)EFr;Qe~vfNS%>FBb9cvrABJ)Xp4<h8!0zZZ=~Qz#gURDHAjk$ zR2?ZhQg@{ANac~zBei$5#Yd`-lpm=-vH-{mAWML(0kR0lDj>^%tOK$T$Vxccr9jrg z(JltE8jf~3ko7<o1X&SeNsu)`76n-qWLc1PK^6vC8ArP`$l5sC#X(lb(Jl|NKF9(g zD}*c&vPQ@vA*+Ne6S7XoLLn=KEETd=j&`w-)pE4Ug{+sOT`*+DkR?Ob3|TZ})sSUF z)(u%WWaW^hL)OmGE*`Raj&}Kw^>effh^!#8gvc5qi-@cuvW&<&y8gfO7-m;9&93M0 U%uxx+35mlqladmW5)#Ay2A}k~%K!iX literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Saskatchewan b/env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Saskatchewan new file mode 100644 index 0000000000000000000000000000000000000000..5fe8d6b618e34c4c87a7eac43f7a27af41161d02 GIT binary patch literal 994 zcmc)I%S%*Y9Eb7Wl%-5VyqJMViy|}(1r3t+8kQv%jsz`aOcZA2p+6uFEegTK3$7jA zL`0A)FT<2XUdjsyW0`kP-ZCvYM2iUO_&%@gbmPi1yytT`%rJ}R8@(TIz9RdsljaSF ztIQmpb6s<L^0)n5qQ6^rrZU~0rW(Ju<c8OGz0~hlm%U{E89(VqywvFuCpEd_z23=o z-Xu@O-aX9n2OGA%p}SlD@U`~XNa=+C{zA9+A@7?r8lH%a{uy`1wqC`?TO&Gs@t2cs z$d#$aHubT#MLyN7s_F9!^7-npnvnsSJ^NhEdR;R2Z&1&_eJl%0&-CJpgu68GR4=!> zu9t}GFYyQN%A;F)^=5^;R$r{w3k%$h$}06WyIeLe6{*di`Ley?tMAiO@?#{e<XGsy z&+&`{U+;bSL=beWi3GvRu26<~){<qPBlfPiOU~`;s8m;&Q)|vWbM8c?+&puI{@<gA zDDd0<hJOtLN+30mB1jda3{nRvgjBMcQb;YUDTY)-$|3cTf=ETABvKP8id03)T1{P~ zFj5&QZ8f!#;z)I*JW?N70I~vP30AWPWD&?JkYyn2Ko)|m1X&8Q7GyEVYLMk1>p>QT XtO!|>)vO6w6tXHj`elX9)XKuUTnF}q literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Yukon b/env/lib/python3.6/site-packages/pytz/zoneinfo/Canada/Yukon new file mode 100644 index 0000000000000000000000000000000000000000..6b62e2d3c39a8406cdc087b387fbdac0709f9141 GIT binary patch literal 2093 zcmdtie`wTo9LMq7Qm4zo2XZ>soqcrX+}v(I+~(=*mrc&u&MkLdo|(Ve54N-|w@xjc zCyXK^_AXI_?EE<VM<kpKMX-hm1tF4c5s@(@D5Dsn1+A2Q`#eAOUw`yx(D(R$AD{cO z-G8rVbVFN9x$hs>RQraTYrnlY&rRA#>cW+Q;}`Rcxi)>^ME`K|${Py@hVy>y9Nt=? ze=AAI_`-g5H?Ubf(6&?0h_6%GwNX8D=@K=opi1Y=Yn0j3a&&HbrR3hJFb{q=OZ>w# zP2l6-WzN}g^U#Uwq7#es+yfuUyjYgbYyU>gue_xnUj3n3ko|?uFHEWYi7``<nN$U% zpPRy;wy8x!XHC&aj})g4nc(0SDd~CHlpb0kWv%b%@~-)^INYr(8mCBQeoR-D|0qi` zLOSH1tU@<)_0qdH)v{0T>FVpBspSJX=8=o1)uVlXn8)6GOFiE4t$CvNl&q*9GEX+W zDK#a=tgP7~D}Nc$;o^fTJaSIgW^GrsgGcr1u{G+cV}qvdt9FU(J#5yTuao+XyG+B8 zpgg@iZq_E0H2SMebln|^-V2ym<(CrsewkiB=eAn^iC@PjM%0Fr6Z+ZFb82Jn4gK8E zQMIY*vfiB9qqamw&DPfs%eLZ6=K0oL(v;P2n!|Bv9_ux&`PI^TwZ*h$1f=bQRi^z1 zPhL3EthZnDs~w53e(}tN>WCHS&b}M!rK<b&&W_8fD|<|L*Pm3~-ZYa4U6RD;&t`W{ zzw92mYW9rx$}6ce=GCuTWN**gX5WQX(zB_{?0>gV($nnczDenm)2C$o2OrNn(B$)Z zUSBe8l0A3&rn>WV_f($uzNF3I?3?Y*fx1Z47q78*qrIQ8cLU$;iw66m!OE59o_`tr z_Z=kdpSvB5|NVcSz4pC!@EEcSWE;pnkc}WaLAK(wdqFmX>;~BmKhJ)U4Iw*1wuI~n z*%Y!XWLr+VFJxoL&XBDkdqXyd><-x;vOi>l$PSS$B6~zO>9o5<w&}F{L^g`-6xk}W zS7fuuZjtRG`$aa4>=@ZHvS(z|$gYuXJMF%ajUzipwvOx_**vm)Wc$eekp>_gKw5zG z0BM5Lb^&RF)Aj*rgwu8cX$8^?q!~yzkai&bKpKK{1ZfG<6Qn6l+ZCiOPTLoxF;3eV zq%}xykmexWLE3}#2Wb$}A*4k}kB}xIT|(OAw0%Mv<+PnbTIIC8LYjqi3uzb9FQj2e z$B>pGJwuv?bPZ{n)AkK%oYQs=X`R#d4rw0JJ*0g||BwbE9Yk8_`@hF9(^fKMD_Im- S94rr(7DdX+f@Q(d%s&C-P%%gV literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Chile/Continental b/env/lib/python3.6/site-packages/pytz/zoneinfo/Chile/Continental new file mode 100644 index 0000000000000000000000000000000000000000..ab766a41bc06db2956154da6e2fb0dfc3e9c1b2a GIT binary patch literal 2538 zcmd_rZ%kHI0LStB00M!esHvbPS&|0A;g13acq9-=!KZ}$hlb<`%m6`P1SR<*skKOE zBWFdn*330MU<EdqXRV<Y6|4wdI$JYat;Nhr%=C7?XYr~Ry>070yZ3YV>|x{0?|TY1 zl$Rzr|F{CpH{4vWo16D(&F0Z6vR51^`p(<Y^lkq^XNOjs>-#(F(`46jXYkm%D*f)f zMDKekZ_4+5A06xtAJN?x4|scS6zSt9BfJB%E9AiT?+3p)*Q&pqby^K46zbu+HhE(x zRo-m-L%9Q=YNz^~^3-~@kIYrRh4I=iB1%n2UoHKe5ET&ID+4Z!s=%95<t-<MRM7d~ zWbmP%#l*f58PYN+ZhiH*oK)5&CO<Q*Lv!|tDW%<d>Vi5kZN+|lTWFh_9@(g;U*4%^ z`ft)RhaOU4mzL@9&SW*~^CCHWZ@8LsG+o|a^{1NK8X+Tg`HDL#edN59>*CJ*i}J3p z^J0Foqvu~cAtHYn(|4aeC>C7(NH6@bPu(-}mX3O(Lq&JD=taAqRx$0{bZp5ZN;L10 zaT!@EzPMZ_q>YF~xl%3;&J;^RdSsISuu66&$>dK0MamCRdg<XG#l5FPbn0^hBJI$q zPTzJ!ENdCka?QtTdD$_Yk?@YXFXttlIrRmV71t%Ru2iY)(0wv{)JNo8u9LaFmE!)P zb#i5oqgHh;m3jREBHz1PuYTo-@@AaX4>Y}~9*p{3t|_Wk1;J<J+N4Wj?a`m~`WdIh z`qt0%hO39g#!97&MxGa&R=4TR-4){D*qyq#-7B`t+$&3(x2uw|8o9MNUu`{8C`+YK zr9DZqEMkf(dpS&&yT7V!_1EQN7rs*4x6IIwpSmI{<TYJ+XjoK5e64r1bc<@IPuG;~ z7d78>=-Qk{QG2>q*2lH0`nO+`4WSLH;n`;S#N~~uvAje!4W+3kb2H^ropaT$g^_ZX z>$)BvkFUqiXZ+*q<2%9J{sI4ykH72oec(9u>F2t~l@sW?C$%$?r^h^p2R)O=pM1Y6 zufSPj?nPp;xf9JjzC@45OwC*#Pr<+M^Pu^m-RGG78~y{=E#EVzAK5FiS!B1!c9H!e z8%B1FY#G@zvT0=3$hNI!-^j+1og-Vfn!O{NM|O{F-)i=cGyrV}c%cPcrUyV1kS-u? zK>C0*0_g<O3ajY_(hQ^<NIQ^zAPqq}g0uwb3DOj#D@a?Az95Z3I)k*vYI=h-$7;HR zv<K-A(jcTmNQ;mjAx%QMgtQ6i6VfQ8Q%I|<rdLR_kZvLELi&X?4CxrsGNfln(~zzq zZA1EoG!E$;(mJGfNb`{HA?>r8{vi!SI*7Co=^@fYq>D%!kv<}gL^_GI66qz<OsnZ8 z(oU=CC(=-)qex4Uo+3>}x{9<F=_}G$q_aqCk=`QBMY?M>?M3=)H4R2OjI<c(G16qD z%SfA%J|m4rI&C$rMtW^E%|^OyHSI?FZ8Z%?I*zm)={eGLr0YoAk-j61M>>zR9_hW+ zG#}}{)wCb!ztx-p$T?s&X902^AZG${E+A(Eay}qugw>?O`9B^ML8;~lNl!G-g!JgV T<e2!FC9&~|(Rqviehc~wJPiO4 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Chile/EasterIsland b/env/lib/python3.6/site-packages/pytz/zoneinfo/Chile/EasterIsland new file mode 100644 index 0000000000000000000000000000000000000000..060bef81898b9b45492ce2e2e35fb218f5cc213a GIT binary patch literal 2242 zcmdtiUrg0y9LMqB0R#d`5mT@~(jQ3!;Q`{02KbF2A>~hrnTmzv7?25pNQ5PMk<=`e z*@!LEx@e?LAM66yU_NWDwCvBhAav#IqPf*tjI6{ge?9M~x@xVvF8ZDG{9eDEjh&61 z&%0_}LtUo#w=2Sa!r|I&56`o^>}~i1>HaO4Cs%uZ(f;X=jo0#}4zyL85GmE6t20bk zVzQoAP$A)7jE;yNk%;pXI`V3qTyr9*qt5*#(f!}6>7(Nk)BTyc_Q0^rs6Vb|J~w7! zOZKZ-b%Q2u;Wjn<-ad0(Y>%Fk)M@5SZq@PO8%%ugL7i}6nVC0`tFQmGT5i}kPv3Z~ zKyGUORnOm(D2Y2l)y+*Ia?6q{>elkF<+g-#YC*1N7F<4|l75&nx1W7oExh!eS@iCx zzGM7Nll;a(oif;M7Vmskr}j3QwAx3t>e?mgg~d9fra?0E$JL#(Lb9TZRCdge<b;pu zTyKWteiW&ee3NXJ9{yI{bt=Z>y>MFP_fMFD#-nOkchJbH5B1&k@0h~OxAi?GFPoyc zmvnLZaVfsote3~`m*o>7s$_DTl#Vp1`+^V2iXl(099Sx4??<Te%AKa-)uXzy@T|Fi z`(FJ(^3SrWx<yw-pOG~=7u1?#-<h>@PpY+hJ~8Vq9aig`w5cB7tu|Ejn1=_qs7KPa znws89wK0C5)OKywwNtIKsis_SI<s2pMCrPr9H~#7rR!fwkOu!p-PnFb9y|ZJ-n?<H zdHm!>wM8zQrv5S2ocM)#qI*!ac%!DZzE8D&b<nhxbgH&fBhsGUtJ~jtO*&#bbjR~u z^3>#d-Puqp+k^S~>Cz&3W?;VFu_#G)_`V+q3km(l*R=5e@)hp;eNTFxyNCJykwbw< z-#@U|oBr4R$RW>uGOPnLSC&<I#r7;#+4khvlVyMRfdKD6sQUNKylDT#ZDz_oaM^!{ z->|FDeuw=QyCGyp=(fZId*ZU20(OOL3)vU4F=S`R){wn9?dFi(A=^XthinkpA+kke zkH{vGT_W2=_K9p1*(tJBWUo%US!B0PyIo|z$cB*}BU?uHjBFa&HL`7F-^j+1og-UE z_U^QsM|O{FAK5?B0Hgy*3y>ZlO+dPUv;pY@(g>szNGp(DAk9F!fwTkZhtoC$=?Kyi zq$fyIkggzYLHdF;2I&ma8l*Q!bCB*hZF`XZIBkQF4k0Z<dW19y=@QZ=q)$krkWL}3 zLVATX3+WcpE~o7m(lDp(7}7GNXGqhKt|4th`i3+P=^WBJr|limJg4m*(mto{AJRal z?I6-Zq=!fokuD-_MEZy{66qw;N~D)aGo7}ZNIRXjpGZTUwxdW(owlb)Q<1JBZAJQu pG#2Ts)274wzs5Ey&xW`l+upMZQp$2uGg7nDGBQ%ivi^LE`W?cWOo9La literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Cuba b/env/lib/python3.6/site-packages/pytz/zoneinfo/Cuba new file mode 100644 index 0000000000000000000000000000000000000000..1a58fcdc988ea6ec1bb660ceabe85ea19e5b0774 GIT binary patch literal 2437 zcmdtjZ%kEn9LMp)-yCXL{y`%Nlf+6;u<Ixk&n5~6SHHy76okkSycl6dLZaqcNwzk6 zP?AYJsEoN)8ri^Vbe8yMj^;PWVWTkv<3XES%NZKU?f3q*wzeMhpdNM3&Uu~P-Mjnf z^FD#XV0oJJw<Fqo!o|^SE}kdtF}L?Oe!1tP-q)n9rr14GRIX=w^WC!<IePZ<Dz7_n zlXN$)_P*{KFFhGoz20{&%Qta--biVbj)Zo(*Oz4J^`Qdq*ZvG0ZQbkscJi>^XxQwI zbvPPo9rB!}K@Dp->4n#JX~fn+H?s75iSkYHqTLg6)3h9ST*4t4H&XA#I1Li>Q>8op zVo)Z0yVslO<;u;+H+#3Vy)Bb=9dvKa|50x%Z}%prcI%WC2fV38&+D`~^<Hd7nNE)` z_GZMb&>6!^z1v6UYTUUE?j8LxI`c@T8^5DK?rcnO6AFDYt2*4hYgxX|UUkV$oV`%* zPC4bynfkIO#SVH&W34iGq(|==*eCb)9n<?dx5&Kq{hHk3mz1VDoxgj&ET}Ei)QVh9 zdt|XL%ud$(-Sy&|d0rN!9hF5l+9f@%ThoVo^?}h3^}$o0Xh#35y7)+sEctAkW;Py^ zr3VA*mKW*rET1gD@``3B#!B{ghh=5Nby@kb=tD#OlG|J*dEF!Ge{r9#`eK!?uDz)8 z`Y)2dc(OeF%mpckyDEV-pGsh~PuBXHW$mRax-Py$*L9uO^<!^p;k)fxG_X@Qyl_B^ zJ4>{rtX>~&akbR{hCJ3#EE|(|$m11DrEKB`*_1s?HeJq?ilhjs_$ony(LYG=gK()F zJ}q1J$Lka4hO}zCqfZ_=snu%+b!%g*KIQAuZPokq>8bB&&FU>$Gj>9@r@tiI2M$Ty z^h&AgY>;QK<;jkgpwwSTk)6A9Wmm@}*<F$>&o%ulVG-f~_KXbw%NZ5<-<_dQ+Zo3( zch89og-&~6<3ge1N1X|O-uWcYA8>NawJghA1p%`j#|aCwIDvoO-CO3Hc6ZnQ_=)+q zP$<|iw*%QBvPEQ%$R@33m&i7)W}nDLt!AglR*}6Tn?-huY}ab`i)`3xc8qKp*)y_f zWY=i7%>(=9FdGMUj%*#-JF<CX_sI5<{UZ&qnhqc>u$mqqO+dPUv;pY@(g>szNGp(D zAk9F!fwTkZ2htFvBS=fErYA^KkggzYLHdF;2I&ma8l*Q!bCB*J?Lqp3GzjUC)wBrd z5z-{2OGulLJ|T@lI)$_f=@rr}q+3Y4kbWTzLpo+PEkk-{HBCdhhO`ao8`3zWb4cru z-XYCHx@R@*L;7bm4MaL<H7!JXh%^!DBGN{rk4PhtP9m*DdWkd>>891R6X~bbG!*Hm z)wC4pDbiG=t4Ldsz9Nl9I*YUx=`GS+q`OFak^WjugOLtfO^cBpTTPRZE+cJ5`iwLh z=`_-6q}NEZk!~aHM*3|v4M#d|H7!SaZZ%Cux{kCR={wSRr1MDYk={H1=itYfH-OK) Z3Fi41rlh4Tn7?42KQ%Qa)jXxf{0?B7_b&hd literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/EET b/env/lib/python3.6/site-packages/pytz/zoneinfo/EET new file mode 100644 index 0000000000000000000000000000000000000000..beb273a24838c96e81f0469e3827bea20ff930c3 GIT binary patch literal 1876 zcmd7ST};(=9LMoPj+LzFi-wRqS%fGa;qaIcM1}{Za#Y}Gq#_xJQba+-z$DgU&J}Z? zHdb@x)XA80dh!D60;%~pTgKcRYgW!JSK7l|n>wGy=I{N#?W(K#pPjw_JKMSYzCYo$ ztu1-Je>@MlU-<HDbzk16KQ`Q?*pFu0;{!oiSCQhHGc_ZesF|T9_C)?Y#i#$NSu=lD z!nE&|aN}zwUim^#Uf8SICqGuwv2ja26tR@g_gLznEN$ESHm9xAo?72zPgjR6y)<ag z_=_wfIn(AonrNBP70SF-pn1P$X#V6ZWu1Ol3r6qAKm5BE4*jU?f$x;lbxeWg!^&;A zXnCEZmR~kz1+|}8VajeR%ImSBseW6OG^9nBHd^uhZk3Ez*mFPCt8}DMWn(3Je*0pT zf0nElTGF+6px9olj8;W+k}b)&s$j*GRYrfSs`SfNb^SM6I_<bEoj7aDuI#tvk%Lx! z?i+=64%&)C`?RvX-Bt~LpqiIHRBhWP)jiv$aCMbdC#=vK|9q{v9Z-FIrPcp1LoeT& zXKTN_sdbYvwtjD%HH`jc8~Sfp<Ise?(l}`w`~J|Y<zHD-!#Opl?Xpc}5w$#WRGU+z z&DTEFmZ?s)p6%6Zm%@7ez&f>^>999;l&O8B&feUTrnj~iT1QQ(b+#nf+qtRM6})Tj z#QCf{Ctlru|6)C{x7Bn0l=WV}sI5mv?A?hmZQFH1eUaUIuXkAeJNxB}`p-VTtU#1I zwd6Uz=wPre><b3Nz9{#E|HYZQ|NA&q<Xn-HMa~vEUF3X`6GqM$Ic4OWk(2gxXN{aT za^A>^BWI4BI&$vF$s=cvoIY~?NCHR(ND4>}ND@dEo-Pd}4<r#J6C@QR7bF=Z8zdbh zA0#0pBP1mxCnPB(D^Hgel9#7T49N^h4ap5j4#^Hl56KTn5Xlfp5y=rr63G%t6Uo!l zC5mL~=~6{<MUq9bMbbs`MG{6bMp8y{Mv_LdM$$&|_H>COnR~j_k=&8wk?fK5k^GSf zKxP1$0%Q)5NkC=+nFeGYkcmKM!qZI!G8f2XAhUr?2QnYXgdj75ObIe4$fO{%f=mlC zFUZ6oGvn!|2ALaAH#x}cAk%})4>Cc>3?Wm5|I;~&al5oIkA?Dw^0N!G13CHrP;S6` HD~Ndj&PSQE literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/EST b/env/lib/python3.6/site-packages/pytz/zoneinfo/EST new file mode 100644 index 0000000000000000000000000000000000000000..ae346633c1690d49530e760f8506218bfa9feef1 GIT binary patch literal 127 zcmWHE%1kq2zyORu5fFv}5S!)y|D78c7+ixxfSeG*`e0_T{D7H)YycO~98)d;2yPBR literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/EST5EDT b/env/lib/python3.6/site-packages/pytz/zoneinfo/EST5EDT new file mode 100644 index 0000000000000000000000000000000000000000..54541fc271644e44973989a27f3846a16800caf5 GIT binary patch literal 2294 zcmdtiZ%oxy9LMnslIR5zDhd^;pkg5Z?J6P_CXB9jh4f-bre6)bLnuyaHz>oJDCyQ* z)1ZH&EHi6!WM<Q9OwE~FGgDTxKf_~NtX0%8df+7Q?Yz&^9`&qs?#}OZcAtOV!G`A5 zC7yqrIQtKm^Mbv+ll%5$sMkLFMPA?HQM+C~C7(ZQ)Y+^*)ma(SmoL?uv7hGXzs{AJ zD<`Mv$f0x-eLJc>y%7`BeL=@QuFW_VkvO@cuBz2CzT&)!FFGdi&rMbnlXuI+eLIcU zY>@=LStWk+hE6&XQdb}D(v#lVWRhQ6ty8u(nQJ=k(bqOto9mY5>QsM@xqfz{z9A>U zq-EyF<Ow5YN^F8m`NONGUKo+|Gry{7M^8(}{x8&xyY|aXVUM}_nH`c@_k+n=)30yw z9XGeme_r2~y2oVauG86(PgRb$PUj50rE-6srKb;ctGvOW<o7kJf&(+9uycVb+L0#3 zp^zzAds${w`_1i(z87EibThMLKxV}!nbN7>>eAug&FryHblHhBX3lqgy1f54b;rj~ z>pQo9sqT7Zm9A)eU(M~>D0kQFRP!2FN@ZbKRaMTDs?oisI)8<(9^7X9NprRTK&zQQ zlBE~Cx>eo#b%g{rw5Ww2W=hSnfU50@ll#gG)uP9SWpR3n3f7&J;Mk~I;(J$?{5Z+f zXPnUW$1j?tk-fTM_n>)TXq#Tvb<jLG(5ja=_L>!a3$+A3P%FDzWL18TdMFf-#-w)D zR9z@dBMmB)og$%A<*Ir7s5I}(P-}+2l9rw_(|Y=%emI<N9_b&{ZFLc&wjb1w`m|Zw z)~na09y9A}I`sO;c@-}0k?_z_)t=Ta?E`PBj!O;lMBirh<hgR$&>2!s9m<f-<$ksC zMUQx5JTbB3?B~Dqi$*^?661+R55MP$bMFc6n^;vD^i(aj_kMe8gN!4F8~hg|irT}F zkTD^HLPmuQ3mKQw4h$I?GBjjt$l%b8jt9fz*zo}aL`H}V5g8*gNMw}AFr9Xs$UvQT zq{vW_u_A*-MvDv=880$mWW>mjkuf8KMn;Vc8yPn;aAf4j(4BVd$l#IDBg03=j|2dT z01^Tu21pQ)C?H`#;(!FgX(NGz!f9iH1Otf%5)LFDNI;N?AR$3wf&>ML3KAA1E=XXI z$RMF{+Snk$L860%2Z;|7AS6Oah>#c|K|-R0gb9fg5-6vQ6cQ?@jTI6sr;Qd8E+k$^ zz>tU`Awyz@1PzHA5;i1mNZ_0{a!BZ$Hg-txoHlw$_>lM^0YoB*gb;}#5=115NEneg zB7sCAiG<Q=V~GUQX`_jR(`n<01QdxV5>h0lNKlceB4I`1iUbykED~C$jV%&fr;RQW uUZ;&O5@004NQjXbBSA)@jQ^W3du^?Kw%U1t83iQ;MR|eZ;)3FWBJbZZQm>H! literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Egypt b/env/lib/python3.6/site-packages/pytz/zoneinfo/Egypt new file mode 100644 index 0000000000000000000000000000000000000000..ba097504459673e254831edea689c2f8143990df GIT binary patch literal 1972 zcmcK4Z%oxy9LMo<K@qQ5UbPhS8X6>Gl#oH>O)9+(4cD*T%Sy-Uhc#Wb+-c;NQsd0E znq_Oo16^(DGFzoqW?3|K%eF$aIN8Wew>-3#WQHdMjhjU0ee79Vt_OY@_r?4(o}Bk5 zz4Vdg<>FsA%>4=9T(kS;=h4c2w6%O>+A>Z0;Ds}${i|+$=+JxN;S)3T*E_n*H(&PY zj+d7k{myFr?X#gd^1{xbyW*j+yRS4jHL5i{)fwtvPiO4uj~^Gmbu`<ajlJT`2fL+R zU8H{B@U!h*x<s69oFx0^tXKVYo8`GFF?GJ;h&?})6ay1;<Um)u$P853KMxG53&;1{ z!EI~RUweDy&~tl)c)iF*R{Wx}nwQAzg>f;g;RiW<<{KhsUd)cj>r}agNtxR-QbdP8 zWuqMlHR|M`&D-_48hx-#Uhzu5xboeH?U*NvRqVx0a%^3_7`HMm$5(cVtL_Qx)%oMq zge@(0LZ(LK%dIy5c$1pcm$29D7iw~+l=028#gsi;?X_x!D%f~bPHlKiOj}bgubW>W z3YSfnMMWufec}r_J?D8*96x7^PkyFKM3XIPuU0d=-j_FQZxp2mezZ5Pe@or8Wt^Gy zSe`Cx?l<L&di2c?9yPN|w7#Xf)l@`3)Ro2Ghm~hLf~wq4!>aFg1-Es-8qV3>5+vH5 z4sUN+70iA6fv~#qzTl1xYs{VXjry)9>dm}Fot}U1T$3C-7%V6&FbmA1K}{^n)O-@t z3++#4(Mzqt-NzzfZNolYyY7Bd`(`Fct-95ucC6E>V|&AN;o2Z8a+$uev;O<*;_nqA zUgut)D47*;PjPaA7<uvM+Qn&+Os4oq^Y;-x(wBbI%KbC{rvKnSxEpf!%)Z6jkvHyj z?;LsS$a_cLJo4_5w~xGkqyeM@qy?l0qzSL<0%^nR`al{%Izd`NdO?~&x<T4O`av2( zIzn1PdP17=x~`D6ysj^#F{Cr3HKaH6&G7@>ajrd}KcqpVLyi`a9yyvsx<uOax;{A? z^}0@xR*_zjW|3|=+C}<B8b&(iXc_65qiLjTq;0S3o1<~B>l|qv=^bev>7Ju~q<>@s zkR9OI0%Q+3HUZfMWE+ru;MfSS+X-YVylyX$%|Ld8V>^)jKsE%~5oAk{JwY}F*%f44 zkbOZm#_M(l*&46g8)S2k-9feo*&k$skR3v{2-zcKlaO6Pwh7rMWTU)pr;x4ky1ha+ x3)wAXyO90zx()NX9YeOv>-Nm+HVxS|ahYZ<a*0{X{^$4puVIV2l`Ace{tf(fqG134 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Eire b/env/lib/python3.6/site-packages/pytz/zoneinfo/Eire new file mode 100644 index 0000000000000000000000000000000000000000..655daf37889fd12380c7f055f2cdd85dd4665e03 GIT binary patch literal 3531 zcmeI!Sy0tw7{~FyAqu#?np>js8g3Zk0hiPyat}pvIVov|mZByZk};qeIhCd3IFAiw zrsnHTF1X=}<{Ii`E-5a!Yc9C2i3{5M{nvEUMK@k_(K&O@=XmCD>F@h^dk;y7aQ^WK zu;1|Fan8Q@T=5h8Hnr}GNmCD&DnD(H$4sBI)qFo~x|uP#nlYcR(=$^ZnOOs;>Di-p zm^ra)%-pDldTvIPGp|lRJ-=SA&TJ5>GcOmK1<ro6Ag_{MSa3rx+McKv@BCF~&5zYf zOqyBxg-0*@YOh(I6k}E-FE`m;!_3O)-g=csnbi>~W=*+UW^IKyv-bJ{v#!9=IfrlR z^+$W@4Qux6AD2Bg8^2wtH%$%Hn^Pz2EhCHc*0?mit=k;)Q}aGLx8-KNJ?@~{p6_oC zM4!@mHEWrJv5WMf(E9pt*hGE!{?|IcY?98ud_^BUSH&FrdAB~k?Y=pYeaW26s;p0C z>@ug5Uo~fjj?ibjH!#0>#hi;MZO%t7))xXp^u>x^bMav*T~IV!U&_C#FCXurudLgn zudaAtu6?&mUmr8b+?X+1-|Qc4Zl%ZT+no~ho%q)JZmlxr_o!ff&$(_2gM4-2seR`D z;|u!1rWNMlMIZBM)Iw9VJtxU0DOVQ{iPOGaGj$37mb#>8ye?T#UHhv<?Z4x(4!9Ak zOD(TvO7HdPGLs&evP&!Ia>Gv;H6_fH@3zTQNK@w7@L8r}+$~cn__9=Po-0*+evztG zGo|X;CGuS1c&WB^k_4Wfuc|M~QZ+V@Q#B`u3Ys}o>BMAJYfKmQe2iBG_iv<Xhc;7n zI(3%1rK_rX4H`<lyG<k{pqzx{Rh9+?H|2%wLJ8fuUm9lQOV|%9rO}A>;u$wt!uuXk z5y{!Aab%8a()}CNw8nJREFx7kD@s?*12d%g@&2mC!xU+`p{;u9Xq>c~7cY@HE#>90 zttDz|b$KNrSXzJ9Lbd7OD{Th^s&;iQNc*-$s)O%N=@@)cb-Z?7Mf+@4(Ytr3PG@JU z&RGjo%&v{nW#R-CyJ)7oHf)&cI^k34)@!_UPaG&cUQ85kOh<XWO04t@4UwMrJQ7!_ zgNi#;LEgAqU-e#fNBZOis5eb{)i?W!dMo|5iqF`s`o-s|{-ZC++fl34JAHS_fS@UA zU}Tmg_^p&dHADtooGgQjl4Zz_G<o;9SB7Txk;L5L>b)-`W!StP>ixk%k~Frd8s4py zN=~SvJ_xU=Mzp`LJ}mF3ltz^#<;EG6TK1l#?%k%+t{st)OXsOkyL05DDO+T8)^zze zZH}Z*OqXKM6!-c2S;DvYQ-1oF^!pDypFDX`+i~oBIj6Ye_yra7bDXlOSZTXQj-%Yq z_M1OHJ5Dk88^`^P;}_G#>$J14=pJ6DjeT{rpY3n#!@jwH<o?s~{`(M~x4(3Uu<)<H zdtx8G_5~b6hSAlIBQlW4NFqatj3qLd$Y>(NiHs*QpvZ{2+95^86d6=xR9)?`BID|6 z2NoGwWN4AGMFtlcU1WHX@kIt08DV6IkugRF85w0{n2~XIwF8ZeG&0o4SR;dtj5add z$ao_Ij*K`m<j9yKgN}?kGVHE)+>wDtMjjb@WbBc_ceSIB48N-#e<T1%1dtFQF+hTV zL;(o{5(gv@NF<O@AhAG#fkeaAh69NQ5)dRJNJx;FAVEQ*f`kQ$3lbP4GDv8U*dW0{ zqJxCT)y4-25E3CIL`aN~Ao1UTC?R1&;)DbWiIl4i6%s2~8!RMRNVt%AApt`ohJ*}> z84@%kYDn0SxFLZ<B8P+yiJhwr9uhqyd`SF|03s1YLWsl=2_h0jB#cNLkw7AmL_&$g z($xkNiKeR!ClXI2ph!fKkRmZff{H{H2`ds;B(O+ik<cQsb+y4oqU&nIi^LZRFcM)T z#7K;hAR|#m!i>Zj2{aOEB-BW(kzgaycD3O~;_Ye!jzk;@ITCXu=t$I&up@Ct0*^!< z2|W^fB=|`5U2XW0_`BKz067AXLjXAjkb?j@3XsD9IS!Bm0XY(oLjgG!kb?m^8j!;Q zIUZc?0f8J5$RU9o6Uae<92Ll6fgBgefq@(u$f1E88_2<d939BvfgB&M_5eYS5LbJM zAjb%DkRV41a+n~;338wyM~d_Rm7<=`l@egjn1<o@hVg`U3HO9G4YM|eM&SW}06*^b Ai~s-t literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT new file mode 100644 index 0000000000000000000000000000000000000000..c05e45fddbba6a96807d30915e25a16c100257e5 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<UarX@YGC~OJgPFnd17-@c0bD?H47dQeKL=s} literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+0 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+0 new file mode 100644 index 0000000000000000000000000000000000000000..c05e45fddbba6a96807d30915e25a16c100257e5 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<UarX@YGC~OJgPFnd17-@c0bD?H47dQeKL=s} literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+1 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+1 new file mode 100644 index 0000000000000000000000000000000000000000..082986e76d623d5b0d47e7b690cbabd9cf9a0dee GIT binary patch literal 148 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}85sWm|M-D{LD#?#C=x<~2|s}9VF1%4 LE*qeUc7|L4a%mO_ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+10 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+10 new file mode 100644 index 0000000000000000000000000000000000000000..23276cd13aa7a8fe46d109e6a9a2f33372d271a8 GIT binary patch literal 149 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}85sWmFHT@!&^0swiiD70!VjQ&7{D}% M%LZtoouL62029g-8~^|S literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+11 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+11 new file mode 100644 index 0000000000000000000000000000000000000000..28c579dcab62c958f1766847fb316f64e8854123 GIT binary patch literal 149 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}85sWmPYqyT&^0s!iiD70!VjQ&7{D}% M%LZtoouMHY00%V_<NyEw literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+12 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+12 new file mode 100644 index 0000000000000000000000000000000000000000..c740603969f103c523c13b04b57ed2eed427d908 GIT binary patch literal 149 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}85sWm4|iZ-&@}|<3?adUA3*gmfN2t! L4bVh8LnAH#`jHc? literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+2 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+2 new file mode 100644 index 0000000000000000000000000000000000000000..721cde2f3896162d84430f832fc5426bf2edb9ad GIT binary patch literal 148 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}85sWmfBb-fLDvAJGlT>aegM_O0H#S? KHb4{YjJN=14HeJ; literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+3 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+3 new file mode 100644 index 0000000000000000000000000000000000000000..ae06bcb654b641b86c40254dd0adce48af80c5bf GIT binary patch literal 148 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}85sWmzj}dzLD#?-C=x<~2|s}9VF1%4 LE*qeUcE(%)S+NzE literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+4 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+4 new file mode 100644 index 0000000000000000000000000000000000000000..5a7f878c98d53b58dfa59738c26f0689a1b6c6da GIT binary patch literal 148 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}85sWmKYoCLLD#?pC=x<~2|s}9VF1%4 LE*qeUb|zc^O;r_O literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+5 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+5 new file mode 100644 index 0000000000000000000000000000000000000000..18cbf1fe2bb028343e36285553af105cd1ed15cf GIT binary patch literal 148 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}85sWm-?@Q-LD#?(C=x<~2|s}9VF1%4 LE*qeUcBWhaK=~CY literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+6 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+6 new file mode 100644 index 0000000000000000000000000000000000000000..1aa4be88302308a9a764d0dd632b74b3325592f5 GIT binary patch literal 148 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}85sWmU%h~VLD#?xC=x<~2|s}9VF1%4 LE*qeUc4k}vG@TUi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+7 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+7 new file mode 100644 index 0000000000000000000000000000000000000000..cd8ed49af3204f76cf25844942015205fe7e360b GIT binary patch literal 148 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}85sWmpF4qpLD#?>C=x<~2|s}9VF1%4 LE*qeUcII3FC_xms literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+8 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+8 new file mode 100644 index 0000000000000000000000000000000000000000..e0ba6b8897028777fa930ce4a832307f716df241 GIT binary patch literal 148 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}85sWm?+0>p4J?2nAtada1E?MbFiqmJ K0h(xM!36*t<rIJb literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+9 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT+9 new file mode 100644 index 0000000000000000000000000000000000000000..eee1bcb70e8f59e5a4fdcd04fbbd9acec88fa04b GIT binary patch literal 148 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}85sWmZ!BP7&^53GiiD70!VjQ&7{D}% L%LZtooh26l4~Y~= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-0 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-0 new file mode 100644 index 0000000000000000000000000000000000000000..c05e45fddbba6a96807d30915e25a16c100257e5 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<UarX@YGC~OJgPFnd17-@c0bD?H47dQeKL=s} literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-1 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-1 new file mode 100644 index 0000000000000000000000000000000000000000..4ff8701406074eaf82aa119ad75049e7d41b0c6e GIT binary patch literal 149 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}ffNIu00V=zfgz9;LV^iDKw6;y(<Ck% Kpow<6hFk#O7Y?le literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-10 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-10 new file mode 100644 index 0000000000000000000000000000000000000000..e12e461d50a0a48872d7a6d814936abaaa406782 GIT binary patch literal 150 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}ffPf}0tN<cLjxcygai|QfV4sZrb%2j LKojkB4Gp*eWk?YD literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-11 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-11 new file mode 100644 index 0000000000000000000000000000000000000000..37f273974d5ab91321e2d236121f27e957a0d9ab GIT binary patch literal 150 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}ffU264Gaw0hK4{^2ni<q0BMB+Op~~5 MfF|1M8X9r|0CMsXIRF3v literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-12 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-12 new file mode 100644 index 0000000000000000000000000000000000000000..09297f1bc24ec1d8c42eb6129d3a4641286f5a99 GIT binary patch literal 150 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}ffU1v0}Kq>h9I3GB$)66q!kJ<P2#cv LnrNqMXv75ofSeI| literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-13 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-13 new file mode 100644 index 0000000000000000000000000000000000000000..97ae1e140a4db63082e006a9efff0ceeeb25aee8 GIT binary patch literal 150 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}ffU2G3k(d}hQ>fv2ni<q0BMB+Op~~5 MfF|1M8X9u}0FFixxc~qF literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-14 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-14 new file mode 100644 index 0000000000000000000000000000000000000000..58d6d1b2adcec0a790f8266afc78e02e653befe7 GIT binary patch literal 150 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}ffU1$2Mi3_h9*E(2ni<q0BMB+Op~~5 MfF|1M8k%qc0Gsd;`2YX_ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-2 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-2 new file mode 100644 index 0000000000000000000000000000000000000000..f0dc70625cf4e9dd2fdef1ed8d3c91827199be5d GIT binary patch literal 149 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}ffR#`0t17#0Z3;E2`2miX@vqzlelbv KCfey5aRC7Af)4Ef literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-3 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-3 new file mode 100644 index 0000000000000000000000000000000000000000..a0790fe9cd25455191d6e49e9557897e926d0d46 GIT binary patch literal 149 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}ffR$50Rw}!fiaL3LV^iDKw6;y(<Ck% Kpow<6##{jT?GG&g literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-4 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-4 new file mode 100644 index 0000000000000000000000000000000000000000..a75a173dc6414570ed2a5da2de82f8500714207d GIT binary patch literal 149 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}ffR#<0|SG$feDZmLV^iDKw6;y(<Ck% Kpow<6CR_jqXAf-v literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-5 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-5 new file mode 100644 index 0000000000000000000000000000000000000000..85ebf22e8f941954a5b58e4a26071a5ae6135897 GIT binary patch literal 149 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}ffR#V00V=zfhmv`LV^iDKw6;y(<Ck% Kpow<6rd$9N(hscw literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-6 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-6 new file mode 100644 index 0000000000000000000000000000000000000000..95def1f9eaf83160e6dcd3d3e883fa8d4f6b0d31 GIT binary patch literal 149 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}ffPeX0t17#ff<NJf(buBTA={bBrY4E JiFUeXTmT|H5A6T| literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-7 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-7 new file mode 100644 index 0000000000000000000000000000000000000000..c6a776e95bda241701c82f9ee62e2c12f1f7c360 GIT binary patch literal 149 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}ffPeh0Rw}!fjN*BLV^iDKw6;y(<Ck% Kpow<6=3D?Ts1Pjx literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-8 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-8 new file mode 100644 index 0000000000000000000000000000000000000000..f74a16f98a8710c8ef291b73e1c3e66e368a6203 GIT binary patch literal 149 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}ffPdlkfUv20c3@cV8RcORw%$UiOU9P JqMfb<7XUpI5N!Ye literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-9 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT-9 new file mode 100644 index 0000000000000000000000000000000000000000..9b647c0fa95c3838ab9a1fd7caa888468c53d654 GIT binary patch literal 149 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}ffPgC1O^6e14|$)gai|QfV4sZrb%2j KKojkBEx7<neh{qy literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT0 b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/GMT0 new file mode 100644 index 0000000000000000000000000000000000000000..c05e45fddbba6a96807d30915e25a16c100257e5 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<UarX@YGC~OJgPFnd17-@c0bD?H47dQeKL=s} literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/Greenwich b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/Greenwich new file mode 100644 index 0000000000000000000000000000000000000000..c05e45fddbba6a96807d30915e25a16c100257e5 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<UarX@YGC~OJgPFnd17-@c0bD?H47dQeKL=s} literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/UCT b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/UCT new file mode 100644 index 0000000000000000000000000000000000000000..40147b9e8349c50b9b5459d34a8bf683c91b182f GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<U33UzuGC~OJgPFnd17-@c0bD?H47dQf)dy|> literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/UTC b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/UTC new file mode 100644 index 0000000000000000000000000000000000000000..c3b97f1a199421d6d9625b280316d99b85a4a4e8 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<U2@P=uGC~OJgPFnd17-@c0bD?H47dQg2nTKe literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/Universal b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/Universal new file mode 100644 index 0000000000000000000000000000000000000000..c3b97f1a199421d6d9625b280316d99b85a4a4e8 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<U2@P=uGC~OJgPFnd17-@c0bD?H47dQg2nTKe literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/Zulu b/env/lib/python3.6/site-packages/pytz/zoneinfo/Etc/Zulu new file mode 100644 index 0000000000000000000000000000000000000000..c3b97f1a199421d6d9625b280316d99b85a4a4e8 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<U2@P=uGC~OJgPFnd17-@c0bD?H47dQg2nTKe literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Amsterdam b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Amsterdam new file mode 100644 index 0000000000000000000000000000000000000000..6dae5e4702f5b87a85a9f099e2aaef7ea2f0c6a4 GIT binary patch literal 2949 zcmeIzdrXye9LMp4LK;2dS4<$qTSRz4Jb;p#q=aZD5bB9Y!Al})ks%Vp(&(br>Noez zvCOG?Lr{l^hPMIUZ%rkuQEIw2UD?IR&74|e^?N_nmg|q&`lEk(9-iOpoWssvjQ7Vk zVOCz8=Py@~{e*{Whkbb6^JROiaAK~kX!<mJ<=NfNs`7o#dzF#S>SZC$8ci~5i*GvX z3L?z<%yUk8Mu6FnQ14VkPBfKK2h7Gvm&~R%>&>R-3C`xri%eD2MW^c64726?Jg2&H zwAtFY+|<0@*KFI?&1^4t#Cd<|IHxwFkF#U$BD2%$IJ<m%oZao4ojt=#%$^@kJ9S}m zOx>9)rvBCjr(t)a*?aDc+4pXZ+24?34wNi42P;pQgXeRy4`me<94<XvaAZ(s!O<x< zt{&@Ic=h;*?HMO8kC9WGLbY*SvV6F7fSi6cQa&mSk&n}dYr8%z1?|eGY0z7prTxMb zm7)agkQ=4JzA){WHbU->|4BPVe5WBD&ueJwQyO}0uZCUPs`q@dMmryRTf5XZNO)DX zbX~bxx*3)3^Iwr3`GwLmdz##v>XV3}DRN(Qf<%T#$^A~4L<J;k)b#<{>&r;(-4vpI zj<nIfwKp}o{2T38c1~j!p3we9_3F*tqOmz=C9bem;uGs+z_@p0VAoZW5LY4zEu}K3 zOPLP3FjWTsT&zPHlI6kAvvg?1WKFCaq7N-j(qSvYb$DKcCM_H+504Dc<lHVYBJ#4P zB)7=OfDd$3#1}HE`70US`hbjXJT8x3S}$X&H%scteVVq!NP5*;eQfq@$uO_!*eBl9 zarx7A`~&&wOC6<;ho)<0bZ^bP;nl28BPHvzV14p>FPX6ON1fOdDA{W|NKWk)nN)gB zCYLqJl*vspb-}m#)UaJLE$5`>c3&>j6RS0^?GByMRdvSK%XDT-q0Tx!PoKWv(`PnL z)cm7!<k>e8b#}#gd2VKREm%BI=8PRGg?XVeH@2G;rQDL|J9wnHe<v-zdQnP(ZfMEr z!!ob=tiDiFAul%8>HOsfbwTwieQ92~mM$sPR(`Eo`~SncO~4)Y4s3g;d$+sG`LAw| z$G^<}FM5K@{ruh6ZEt_=U*mD?C;R<^|2Tuh6Xx-R_wWn9{rv57&eSxYCw;unlVt7d z^~QPa8Eenv6rX+UFYM)KFMIIwr^~+c{e4At+HbfOx%vB7f3a7q;Ia8gRa|XZkh&m+ zK`Mik2B{5F9Hcr(d64=b1wtx>lnAL2QY2SfC8SJ9om_39kV+w?LTZH+3#k@TE~H*a z!H|j}B|~b46b-4Gt1TN+H&<IYq;g2<klG=|L#l_A52>H4Eg(`sq=ZNfks=~hM9PTN z5h)~6Nu-oWEs<g()#T4gIbCf%k%A%>MM{d)6e%iFRivy)U6H~fm36hHMQV!_7pX2% zUZlQAfsqO$B}QtD6d9>9Qf8#iNTHERBc*n=wML4KR2wNbQg5W-NX3zoBQ-~gj#M2f zJ5qO~@JQv6(!1K)BgJ>M)kn&Y)E`*@WCf5VK-K_R1Y{MEWkA*eSqNk$kflJ@0$B`K zyBf%HxZ3qV76e%lWJ!=UK^6sB6=YeEbwL&eSs7$$khMV;$JMS5vOKPKeUJr0RtQ-l zWQ~wTLRJY`CS;wEg+f*eSt?|$ki|k)3t28#yI#nGAuEO~8M0=`q9Ln>EE}?J$ig8j zhb$ekcF5u(tA{L~t6e{20bT71B1?#@A+m_bDk95>tRu3Jp8qG(PG``+oyb5tr2S%9 U(&)7Kn1M0g=(Jcb_Ky$z6}+vK{{R30 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Andorra b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Andorra new file mode 100644 index 0000000000000000000000000000000000000000..b06de7a5904dd87bc1c43c023418bf2829c01df0 GIT binary patch literal 1751 zcmdVaT};h!9LMqhQB7-R-)v;l1Cqkgb5TlioLUsBKjk5b9>ftPtvtn;nYH%4Xv{E7 zYi4OS;UZ=XW30K*3^N<^I5Qg-v&QfJcj>~N&A*-VI&0_LecoStd2Mx~^~V!n{=<vM zXI^{`-fi9<W<Rw;yL%+;*a-=5Q;FEIN5=aaWWvg|GBMwl$PBMca;HjEXtYcoF;=1j zJR1EqSz}&B>6C#nI`!6Yo!0wJ-JS1rddCxu-FHvp8n0-4<vC3#dn}0!y^@sKC&@*J zBqi*)q$V~?>QJl99M_>UpH)cOk0wpO>XBJbOEsgbTr>O9b@u*j%{m&YbE+dXdtaK& z%?VIX<v5ua^+vs(A;}52tGSWSCHLcN$@9A|dHr`}{>#&{py#aQ-@m2>?QOEK`;;!K ztCPiTd$q9SpceV6w0MS3?fhI_60%U2x~J&U!FVklog<}>M(MJzF|z#97hN$BBr8t_ zOIhy+S=IVk$~*dHb@_l)?0m0lvM$Tovin*YaYU*zd$c<6qSl0|*1SEe>xLS%_D+kg ze`f23Gb_}0yIwXP$ke*7V%fAVLO1VEk@~_6X{Zj7EeYY$=>0BRgDq)_8?8;BUP$xE zL2Z6;Q(8Vg)@|pzWP5*~?l^KoclI3DT`is3+TN<JVSfJq>d&%V?1abPvPOy(Xj!4- zT_b*f&M&a760dD}oL8~U*IX{=&HnoUH<~Xx1N_GC%=6PcyHYQ7AcN##l*llVaUugn zMv4p-87neaWVFa|k?|q}b~GbKhK!6E88k9#WZ1~Kk%1#4M~04!9T_|_dSv*>_>lk{ zO$0~?jwS{q2qX$53?vRD5F`>L6eJcT7$h1b93&njAS5CrBu5hy5|pEf3JD8|3keK~ z3<(X14G9j34hav54+#*75DC%I#E1mxXre^IMB+pOMIuE)MPfyQMWRK*MdC#QMj}Q+ zMq);Sb~I5VVIy%Pfg_P4p(C*)!6VTl;Un=Q2LL$&$RR+E0df!=%~3!OgQGbP$bmqP s1ac^lV}TqD<Y*v=!}>2SN07ND(-S=2V%uGi6q^zo?=DD)$GD`RU#;eS#{d8T literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Astrakhan b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Astrakhan new file mode 100644 index 0000000000000000000000000000000000000000..90d7c2a810846febe4c2a6c60e8c0156505d4d91 GIT binary patch literal 1197 zcmd7QJ7`l;7{Kupn>00Xa8MiDq&{jI`iNfBq(n_?Y?_8@3PnPPs!)6s1i=r<pdhqh z?WBSth$tc$9DENtStUppvF+qmg$qtTkh6$l(Rlthb#f3l@8z7|<!}>7zJFol=vYks zaV<9|EUsR&cwRYSwmX4r@kz-)v^sLNye2Sj)z{6X>%Y4m1wN?q+J%JGFnhM#IDN?u z-aBK5t{3g5b4ToTr-$wJ$8z?D{YiUczSV9{HQ1Xv{B}6#(=Gl*9q~jY@+BaxuRo2p z&3u&h$8V&gG$+x^&t>!EL)lWiDc0eOvh~2Uj-9xoJG*b__})`G5t`6lv3<Jh$3dNJ zJSoYyeR|vCh-`n>qIWE$By~F@-A|jOXDTc^uPI57d1cpRQ1@ovOXko|-4}i(*^I6G zJ@+No{6gnGztjUYB|R{ISMPo|rT08Iuk*9FWbndqUC4(<hrh0p@k*tlTs6z8x7)LH z*LrKcZnnSQZm-*|RCI;;GgX_V%ya*ct4^t8TA8!z{`<sbPO9&4o*yizcB|8@gjsRE zK)JY_C$_>rO!M2)r_2wkFjZu-95Y>H!pM{z&7_fOBNIoaj!YhzK2iWu0a5}|15yN1 z1yTl52T}-92~vuqsRbzpsRk(rsRt<tsR$_vsR=0xsR}6zsS7C#sm#%ohScV0ibJYH z%0ucy3PdVIN<?ZzibSeJ%0%i!3PmbKN=0gQG{qv-I+}8kdXa*Wijk6$nvtS$+|2&} Nt=g9|wf98hzF%%35ugA7 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Athens b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Athens new file mode 100644 index 0000000000000000000000000000000000000000..0001602fdccd0bbb1849adf237c93aa93ea80a73 GIT binary patch literal 2271 zcmd_qZ%kEn9LMo<g`0R){37uu0nvz1yYe6W6GV8uFcS>nYNR3<h+;%a#2_(l#auJy zzAaWcGp!hN%&tA4oI`3^IV~W!#+sFb<;pDQSn9MDtKa+N)>;pG){}mB=e!PQm+^$p z+qbT+Hr@KiahW$<oIU2^d6wVY_WS4hkGEYKJAUfu*)u;^k9{hKO9n5ll%d|D@!>b4 z$3B0lZu@BX>s6!2TqRR3Z>*XceXVNhP{5itaL}6Gy~ZEfS>d12vB)pmev`18o#Qi0 zd&k2Io~^n)<77!h!cQ6*{*6Wj4QbTX_ci*$QN81fLptlD7d7U@fW#gNNZe}&Wp<}Z zeA_cJr@m3<Zm5<!i+qxh<CVMI*^(HWBzI4XmL#V~ldfm#yzdfq{z#-Q_++x)(|1$d z-IsJ>*H@a{@tLMHozT>pw>7QeoTNAQNk(qJWR|`xS#bv>JH17+C)#CkOqVV`ze$$- z+N?_lJaX^X<(kt|sk!}2^}gNtnzuhz@2^eJ{Ej8EtiY+BniyH0cu~Ec2`O-%(ZYmL zDZKK%tO)v0Rt%q(2QIuND+9e!H28rQ@9mUTN8Zo}8ycjf^EqAp&>k(V->hYe>eW|N zsB5BD>00-EU3(){%OeV;{L7j8@b!7J?!9Zee#9jk4uwfY-(}g@epM>FhNY@<L^kdE zQ6I@WCe;;#S`)udHs=PkHsoF15~sT5hgWp#M5ET7Zq;q)efsF(^;&;whdlOtt~T_P z$>Uq&b^Got*|9oD8f&BEiL}|$<o!jS46~#;B|@9Wzm=BI8`^UAV`;r|PM><GM|KYP zYumn)x+`!%pKk5e_Py=u2$~c$$r<bnHr_Vb{_rL!WXga2v8>1(^H0k?*s>O+IzlWf zc8)dOvO>iQ<Jl4M_xFE&r>NLx<(tdvE#v01JoaUN`13f-x%Fr{9KL_w(F^9=_Kt4e z`nBK9Y07(bIkGEcTefCj$i|SJAzMTChHMVm9kM-Jvp@a}8$@<!YqrQ?j~q6M>=M}~ zvQK2A$WD>1B6~$Pi|iKJuC3WGvSDP$$d-{kBb!Ecjcgm)H?nbL=g8KPy(61Pc8_cy z*}tu60MY@Z1xOE&CLmov+JN){X#~;<q!mankY*s=K-z)y!`3te>4>dq3DOg!DM(k4 zwjg~$8iRBOX${gFq&Y};koF+`K^lZ~$kwz7>5;8z64E84O-P@RMj@R-T7~oqX%^Bg zq+LkAkcJ^0vo$S4dS+{yhI9>S8`3wVaY*No)*-z^nul}`X&=%*q=85WkrpC7v^7mc zx`?z9=_Areq?1T1kzOLrM7oKz6X_??P^6<sOOc-1nx-OMwKZ);`ie9b=`7M(q_;?O ik?vam$MCvLnir-MX?Jl(a#nI`N`|{QE!BR?bo~Kn113@c literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Belfast b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Belfast new file mode 100644 index 0000000000000000000000000000000000000000..4527515ca3f249a44599be855b3e12800ebe480d GIT binary patch literal 3687 zcmeI!dvwor9LMqBn$hO=nHU-N(OjC={5BD438AqqmtXotj4)(rmX?v0Q>h%sdF!Yp z<U?Y)%x%kMU5MsB*GZFmE_1(T%%;!t>o})#oKr`C_xqiB{Lb&^cR0@Re!e`th9*Y2 z?s+_D-{FVH75l^M!1wGg(;6)@)Asu5>D%3A#+-HLtLZb$%#7N`e7IWAO1))f51OvO z9<$NRiC<~HX;Y%-HteV8HO|wSO~Q5NnPM~FwcE@usG%1WUDOLVB<V$)cIm8n@p`dI zGfO^l>!qLWFw2r-&GM9GCcA5d$%*NuzjG_IA}ZCata{n3s^>F6=R@i(EQ)-zEM1 z{+IL*D|hQ3mzJ5IzR1yQrUmP@qcZf*qf7NVZ<=1;ZI1b+WpAC=dad5z-D@@!`kT!` zjbwAi%d%x>J=yy9Q?hNTOY-}9)pj{5JBIg_ohd<TXGxUWHMC6c9uQ{s#2nTI_3E2~ zcBN8q=&;%wzfkWj^ELY-Ch2`QKGTI&l6B#kvwHvWn&$T{`TD^68|Gm4X>%wmNFScE z)f`EA-W(k^QXlKy#QfnAb3Ce&IT5``p9~Jur>c9*sgjDisC0xrU3gBPInYC&UA<kO zTYl4=|8l9mFmAB9ICHGN)IY{tPEXKRIw$I@32pVY`jyR}Z9?^R*9B7?;-`xb?=&~c zPU@R$mYb4OzUJ1L1*UXEZnAH3o-P;W)qY(wb$S2Rx`KOxu257*`>Q1Fzp+dQT#VNh zm(@0vcDQxr$+t|ECDnA*5eJQ$8esyvtufWolzAv}wyEyDY-)s_k)W1&Qqy;v)T)&! zwT>;8hl?jj?RAqS_{cm}XJMAAyY>@RZ=$G>S;Lf0N>TO4#i>VPJu0++Q`I26g=*Nj zi!`cKOEqrtxHP`@goFiDm9T;!X;O4a9?LG4@J+kr@hOE8@okPY9r?YuKgkgH$p)^- zKEJ7`lx)>3I#)f>{d3j4?hMr;YLseGnyy*~Pmz`f`m0tYsnYs~_UftqUU_<Mf<))G zmS@Jdl{VAr$g_!|()NQ^s$EAvX+JPnJ=gH0bZB3yI{IyrPN9cXr}HONjPF_%lfO}Q zKK8Zhk~Lq&Zv9E(CQVfF3unm-!-uP`6W^C^FHMl{NrR-vlS$%<?IbVOjF+C_Vbb%u zTf8+oD(|n=<mGFRs$Sn+mEHvb>J<~H`edI~ebcX~gem!|UqXTEKlZe|+Gd3s&}XX* z44JA1MQ2IkgE=y|uE^k188W0aMTTxnlh+P-WLRczNy;0cUjH~+hR^Mx-WU=h$>W== z5#63vDTy`Jo00X@$PPEuTY)Z>+O&qGUOcKsRk<#scC1%v=YNyYOXjLE`ML7W)SqQ+ z)(m+!ZH}Z*N|y(G?)T~IbN9(NeedJ@<;vgtoBhiF)3d@qefVELyM6n1j=f;6D$1uE za@6W*pGv&rvhV!;eLjti^SIje@VGkRbM}SH$H&M1INwFjzu(TQcm5bxVDX)Ax$Ix! zcI`g?Taa7oXzwX<Q<1xh+*ahiA~zPfv&gMQ?k#e2k-LlBUgZ8FH`vkMVdNG&+Ix)L zWaKU*w;8$5$c;wsG;*tvdyU*|<ZdIk8@b=e4M*-ca?2g<Jx6Xja@UdDj@)<T#v^wg zx%J4sM{YiH_mSI=+<&A2NCzBk3y>Z-+9n`fK-z%x0cix%38WQBFC1+%kZvIDK>C3+ z#L;%d(Y6HXiKA@_(iKPB7NjqZwlPR&kk%l*L7Ibf2Wbz|AEZG@hmaN_Jwlp<bO~vb zqwN#YD5O(JtB_tH%|g0`v<vAM(lDfBNXw9(Ax%TNhO`aoo1<+U(mAAcNbiv5A>HG@ z8|_2-hcpoBAksod+e4&@j<$<P8<9RDjYK+$v=Zqh(oCeANIQ{!A`L}4inJ8zDbiF& z+f}5kNMDi0BArEAi}V(0F4A43y-0tN1|uCtT8#7<X|kj3GSX&8+h?TFNT-ojBfUnN zjdUAnx1;Sh4;qei9BDbybEN4=*Bx!!k-j_H#v`3aT95P|X+F|@r2R<$kqv<C0Avdw zdjQ!4$Sy#(0kRJq?M6U$f}`CE$X-A;1F{>C?SSkDWJ4f30@)JCo<KGQvMZ2nf$R%M zyD^ZR;b^x8vNw>;f$R=sdm#G**&xUcLAD68N03c|>=I;~Ao~Q_D9BE6v|9z)E68R+ zb_=pyko|&e7-YvFTL#%P$fiMd4YF;JeS>TqWal{At%K|xN4t5D-Ggi&Wd9%=2-!i% u7DD!r>wf1D6X@>q|3PH`740Ajuv5uxCsTM_vxw#q?xvC9aglCb1^fkK<6HXx literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Belgrade b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Belgrade new file mode 100644 index 0000000000000000000000000000000000000000..79c25d70ef09aaeec21f0a10a029650967172a80 GIT binary patch literal 1957 zcmdVaYfQ~?9LMqhL0H4S7)2!{xg6a~D7WL3s9ZY8CAlQGB%zgCF3rrEeOY4-b6vxT z2jM}?T<4O6HN%WC*O}Qcw>5t6|7Np!;F-<;od5sTPMse8-XG7D`8ko+A6E<W2{%`< zx%s*ETJyNxB<4x&%h7W4u#eX5cgd}7(Ne!YSn9ty@^9}elRIfc)z_Vs@B8eycDlP) zJ73(YerL{#|IsQ5*tJKxl&J)+S|?p|3#8k$+0s45BSG<Q>EVo(VE+*5*~(W!yj&Xc zDO!6y57yod9ktKp7TUM^i#iX!)_&y=G_>@FhAlgz;n~MDBJ;jP7F0`ALXAX^-!3r$ zyCpWVNMajHB+jo~;~veF0pAOC;8~XpdYGZ{m06llGf)R_9Hv8d`s>h~ARSgZK!zuJ zsVm!0Mg+f9x2sVSy{>6e&|^t@_d=4Jo|ojht1{}@0U2F&L{e(cY3i0TNjthv$K>V7 z*s={eZqjBQpF2m>`{$}BB}pgvr0GOwZ=Lu#Tr)Z(O2)l*I{8yCnR4o*PHpg(X?xpB zX7yW{Uh+Y*%IjoCR)fr3{YGaEIW4m@Yc)HtLgpk?X-><NIyXRd?yDU-udzVqUoF-J zk372Y&{WO6vP2ebO3=K@bXhzvQ1ds&$dYmKQjp^#OC!3-GWS<m-qw=Bunt=I{;3qT z`K(3tm!$aJeO+;)QdZX0=&Fhfy1Ht&t|>mKC0k0=(X2^R^KzKCSMz`QvaI%T=6qRg z#A^2I>EG4S(z1Sk^4pYDkL7Z6r+ds791h-G4u|LOr+J0>L;Ey;vA<(kKg=~f{{2Yg zbR#DmIpxSnM@~C(;%&{TM@~L+`uVpMU~4KsN<eBria@GB%0TKs3PCDCN<nHtib1MD z%0cQu3bHj7AtfO-Aw?loA!Q+TA%!88A*CU;A;lrpA>|?UAqCo+3Xu|RO^ryANR>#L zNS#QbNTo=rNUcb*NVQ10NWDnGNX1CWwx(vJXj@Y?QZ`aIQaDmMQaVyQQan;UQa(~Y zvH-{mAWL9t)&N-qTeAwtG9c@KECjL=$WkC{fh-2H8pv`W>wzo?vLeWmAZvmwimh1{ zWLc1PK^6vC8DwdYwLumKSsi3~ko7?p2w5RyiI6oy7RlDE60%ITW}T3QLRJb{DrBvY k#X?pKSuX4Uo-S`QY5gMDG-qm5XiR9hGc_U{!=k)@0!L5Jf&c&j literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Berlin b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Berlin new file mode 100644 index 0000000000000000000000000000000000000000..b4f2a2af6de4526f909f743425c0858d32cd14ad GIT binary patch literal 2335 zcmd_qZ%kEn9LMo<`IExPeZ}CKfKWF=><X8^i9u#pU<PQ)%}6CA5VQ#6Y79!lk~!Cy z`|23XA^9i9tQqwHZ4I^AztKVx>}F`LEmveU$7W8dvHHDFTebC|XFciMd(P|JGk9>f z`+57;Y^q7O{&9qvC!8F6&B=Y_4s+>>aIWtPT%0&C)~DS)$MuzWR1dax>R?!d4viPd z+1H#pe8elC?#q;OFD1%|KUzjkbXcQhr+17VULm8GGAGVg#pq{6E)C?2Bm`>5HE?u` z^?7o?TyW2kFTy%xEa;4mU2T%c;m(O!B}?VDj9)a$^@B#w`cj?2XVrQAw9dZrrrv)3 zkj{C3ug07_C9%i)B<{dLncJokzwJdysPoGm>#F3=VxPEj^W`pgwj{<T$=wmNB`MUa zNjEZe-ZzPw9EjHWA5PPI`X|-hb5$3#U(}S=Pc(JwN%d5}p=sq~lJ4)9jJyHKT-702 zaov)g-Xz&mEwV7CT^C-il$>81b<ruW+;^!=b9*Z^Z(xz$ziWvu?uyk1YFxUcHAfyS z2vu)&j4VwY*L?4k6oh`Pg|4rp@Y=VsEclEp8y=L0uDmKmeXmRL(7Rf)r%je0JFF|} z>*e9Lox1YTmvvR$1}(j}PJP9N`iOJ6u68Hu>YE-diz<+^3z7QRjd`->#80|55H9Nu z&6M)~ALa3u>r&A^EbA)*Qn~$mePZ$3QdK^r)$#jfLtdZOOh2x*ajLc7b?U|`zit|A z)+aCf^r@q3weF(^d3txA*7ugm=8f_C%&shHSeYyS8mDYYn=4!Mf0k!wTGE&rrHvC` zOVf;-+H~#%X}&h5&%M<v&kqmiw*Bwv_P%a?p}9v}_Oxh_BPclJ&sV4;gpap4>_<V< zrvLXZ%Zk`-e%DrPf+NJTW(Ys^zu(XIIBvD9Kdyg0FD~&}-uzN7oaU1`91in#aAyvO z!}ssg+GqaIp4O!OKbG~IIW`}uoJ6K(YvzVb4w)S?J!F2!1d$mcQ$*&7OcI$TGEG}E zPh_IDW~R1gs>ocC$s)70HPf{<^F=0%%ov$6GG|*eX}%`2My8F-8<{vVb7bnaX70%3 zk=Y~DN9K<t0LcK70wf1W5|At)X+ZLTBm&6<k_sdjwk8=!Hjs26`9KnaWCTeGk`p8; zNLG-vAbCL&gJcFt4U!vMlN=;FwkADDevkwq8A4Ko<OoR;k|iWfNS=^HA(=u_h2#oJ z7LqMnlP)A*wkBan#*maDIYW|$WDQ9hk~buANam2#A-O}6hh)#zqz}oTtw|t~K_rDp z4v{1xSwzx^<Pk|El1U_$NG_3NBH2XJiR9DPBoxUgl2Rn6NK%okB56hPiX;}vERtFz zw@7l4>>}w!@@s1njAYo>q!`IDl4K;yNScv6BZ)>bZT*ir9c~)FAdQ;4OEOZjQatXG KG!Ld`g#Qj|*MPME literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Bratislava b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Bratislava new file mode 100644 index 0000000000000000000000000000000000000000..ba82f311b74838347de6dda464a22e95a16e0408 GIT binary patch literal 2338 zcmc)MdrXye9LMqJfXHRazG!eru7!lukvoJ2nH_<lU@9jgl|V(*B7&1KC=E;ITx0I5 zGMz)xjInA)ZDFmUwXUOucs&`GYs-~s<=o6kHCDg(6ERx;@AGiZ>&L@(Ha0%*;aRr6 zCd2yQ6=a_9;@V+eoWnMow~k59st(_|tNVs}w6p85zWl!G{?<L(AKah=7Z=E}*PS|e z&@IPzX32>c6Xm2gLQWohr|Q(IKsmiRTF(@pk~6z{<9&YKcwb4be3jfQXI<0e>)_oo z<fxaSOHCrbR@aABC+QSVq)siJC%0w(q2UQXYsA!V)#-mso!36l$O~`j?WYguv`=<u z)RChSeW*ub_U)J0HkG)IFGzf?SMFG`TJ9|INJ3tLOn2o-Vsw(+H7Qb(0^FK(Jxgc& zkf_PN2)+BWK)t7TSY2J0bY}ZGO=<m7Q#TyZwCXoCy<$i*yuFf{-zQm1c1w0lr{rWb zNzO=%<VLk??)jB6>(54=ebg=YeN(P^-Ibc(H(T%DI#=g(MC$`J2|BlRmONM(pzi7@ znU{D`3)~}881RJ_C447ESALXY|Bs}2uwUk1cvTkkydfn6A8P6LHd%P+6@93#P8PLo z(Z!Fvq)TeoXxY8B>M1GGhn)*`sViBR-bmB(@Iom+J4GM8K0}th`<pKJg~*BnVN%ii zi#*nHO)A?5rK-{=D>q%%Rde2x)fEF;9k*B3<o9Sz&|zI0qq_E|J-TkhtLyul_3`r_ zed4v{TKjo}Jh?4j>$=P2sdaH$zcpJL7Uzk##wkyy$I6C+-{qMwOBz$dwejlr(iD0_ zn@)Tx%~yu>*|)ppxxqf&xc6h-)YGZYH+N~v_7?SX`1wy5yB+2jF#5o6V&JW~Eo<^N z^LK4U$2%riR;XCP|2<olWAgYXi%LC~yP%9Wk5y#8F_*)^mwh=Lo|`Yqd@LVrU%N)n z;lra3{xVl=@y=naJ$~7irSUy$Ll%du4p|<uK4gK&3XvruYeW`_tP)wKtyw3sP-LaZ zQf<vzk;U4Y)gsG9){86{S+T8IGQS>cMiz~%8d)~7Ze-!e%8{kpnzbW~M^=w4A6Y+A z0Hgv)36L5fML?>6lmV#&QV66HNGXt7*qUM>)j-OD)B`C9QW2yiNKKHUAXP!ig46{m z3{n}SG)Qf1O>vOw*qZVn^+5`RR0t^%QX`~DNR^N>A$39ug;WYD6;dmtSV*;OO}UVI z*_whO6+=pf)C?&aQZ=M(NZpXaA(cZ)htv)!9#TD9Q$D1Awx)nc1(6aWHAIStR1qm7 zQb(kaNF|X{BDF+{iBuCQCsI#aQ&6O$NJ){JB1J{2ij)<pD^ggbvPfx>+9Jh8s*98t zsjsamFj8S#Q(~mXNRg2$BV|VFj1(HFwB_(ORSp=t{mgOV=mY=2pj&bO+i6p4(`i#{ y^DjxLSpT$n>}*;bHvXq2CeSx4_V{F*bBKxg%yhEuD$PvEPDyi>rl(<QX2{<symJ`< literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Brussels b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Brussels new file mode 100644 index 0000000000000000000000000000000000000000..d8f19a6312a2c271e09103ca2674b33c6c33c439 GIT binary patch literal 2970 zcmc)Ldu)w&9LMo<+8&ppzF1qT=4@S8ta@*U>0GJ=t!qc8u2aQSSDi^~C|P5S$k!r6 z${bW(Mwzv4E#_VsosBX!k&rRlV$Wv6wlwy8KOz#g#D9LL&+|H`d7A#yzCURrCTGPs zo;Z!nceptF%*FeFx#qT_SKqnCjYD&bYcE-=E|%%)k_!EFuj-nj<+`TkUX*9-25aq{ z%G`Ak&8_t_Q>>EsdDe!}A-ScY@z%z$Vrx?er?t8MY-{taxZEu_##o<S7-Vfd(Zkwy za-&tYF~r*L8JN4{LzngW!YS6y+!W6jc{@GjgCaZ?eMh)=MTuv3%nZ+-rhj<$+`s18 z``bCszVnORmEZmDuByK3-oNLp`@ouoo`WkY++P;Ab|1=L?mj$ef%`~$f%|B;NYAlO z8Sdj<4!KXHpZA=&)X-Diua*1Mjs9|abD-9&OOmfYj*~MP2gjbt8Li*s1<1Dpy6f4c z-Q`^W5N*<-F1Ja^c=cNpBu$HwRi-Csv&=AUp4L)Zr1p~MV*ghEkY6;Q#Sa?jeOd!= zAJCT9wri`i9t}FOR9jc=m*A~s(q`2fd491<+gbC(ot-Byj2tg7rld(o*JODqJV8Q( z!{p`0EhWsAq+xgBwB3)P+P*eGI~=R8uaw`@@RFO_aoHt}DEdkxr&noI<~EHUbx~sS z$|W|jQsRbuBJpijOF~S(B-9m3r`F4~)8%o}`JWluWq*>q_I<i`EghqYm0k4p`8~8- zaj<sJ3eg@#ouy}QmnLPlmR_MZG&!kGdb>_)pO7ok=hk)U>wQT2)>KQsYwM+d*%nDT zbx>26ES3RV*XqEW92vCu106j4BOQ`GL5IGYt!XKJ^o_s)IxM`s4!awr>HfVX{X%nn z^G-V%vFlIGsP&bRo@O$t{8t%Wcw5FStC6u|YGvH)pY^S7yJh^SQ<~X!rA$aH)2s#+ zI<bxF#GjVyq`EwvTwS1VUry60n=&-}_%xZiAW?HlhsryX+G_6nc$qf1tK?+`%Dd6e z%k<>G<h^DNnGxx)GpwtU?{`=8&m5J4TNicaj#7EQrc!6EJgl?JR_mOC5-nU(s7^1J zm$#3%PlI~(pZdk~-Y#xFUJZSp)yv`V2{Qkr93O|nPaF*#j$pU5@#F8E%^p7U;K@hN z^h`~23{OsTB-z(cKA4+1PN(@Y$NoY4vor0#+hKm2gNsvTF5U;-<C8h}%^9}q%5|K2 zGrJUN3epu@(-x#JNMn%BAg!@Ay+N9TbO&h<(jTNjNQaOXAw5ExgmlT)v<c}G(kP@; zNUM-uA<aU%g|rLl7t%1KV@S)8o*_*`x@K$IhV%_-9MU<Ybx7}!<{{lf+K2QHX&}-; zq=iTiktQNtv^8x+`iL|V=_Jxhq?brDZA~|kb|U>m8j5t3zX4imYkG<_73nI{R-~^; zW0B4xtwnl^G#BZvt!XdPU!=iEhmjT|Jw}?0bQx(g(r2X6NT-ojBfUnNjdUAnx2@?n z(r~2XNXwC)BTYxTj<g-=JJNWh^GNHF-XqONx{tKq*7P6Q09&&I$QB@bfNTP?3&=Jg z`+#f&vJ=QwAbWvq2C^H-b|CwKY>2Jd5oAkj&7L5eg6s;iEy%tg8-wf&vNg!wAe)2i z4zfMS{vaD<Yjz0PB3rXZ$R;7XglrSCPsm0gJB4f&vRBAvA-jca7qVZ-h9NtKY?-au zGi1|{T|>4F**9e4kex%e4%s_o^N`&`wh!4qWCM{MM7Ge@>>;v=wq_TRZAA7F*+^t3 zk*!4b64^|L)62V_w~x!m)v$hpr+)FgPd#ofZ{J4G>h*9n&32mAG{>x_pRikI7v=Hy vPXC9`Jb3caGbv^}|93y17*OWa*UW6k=p-|-;i<6^@extssnJmw8SDEm0cgd8 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Bucharest b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Bucharest new file mode 100644 index 0000000000000000000000000000000000000000..e0eac4ce33159ec614c51379c6c48935a114b470 GIT binary patch literal 2221 zcmdtie@xVM9LMp`1y}6)u@4e~0ivRTcEACmXy7jbvN%OKja1}oA!41BsO(7eZt0v; za_@|3mTtHlvu4B}lo=YSWVE@Hez?{e^Sf--nzJ@?)*P$P^IQKk*ZQZwdf)CIx8uk7 zgV!^#d|gAny5-``Cp@{`=E?i`h<Q2D^I6=<*(27;pAK2?Uw+Z*ySU9dHTZ^o`n^}} z5034z`(J6ZKisw6J`-AHpKV-ZpR1W`4?L1*e^fNlKA)9ff1Dg^$G`QdHKy9GO7Pz~ zbZ^0sm6$Ocxi29aNs8%@B#rb$lCK<zjQxCH<o<KpBPpleR;kB3RobDK)VTH+RC?P+ zH9lCcCahelCRUcKjN%z;k~d3DP93WrxXYt5-G!0NaIc+t-D72ad()co#Z_zSz_9gT zZ`AT0?YFWgb=uh-Jyy<+m#cC&?X!IKo2&9_yY2kuEkgw*eR|rG{kkyiu%4bD($h!7 zx+tYXiiTI~nKxTx*6C9H(APCm+*KzfeY0ft&UrHDwN#ngkRkJS%+&MC-BMbgq9308 zy_A)X>T>rdQju|4S6ur}FNis#7eoj3qgP(nk9EJPD=+kme{Z{9c>IVgYHHF|?b~JX z<GW=^aE(;Y2uh%`LY5{ilx5y2vh0RWY7)zJ&6i{3iR)Q<`MbZ!ic227vNK-S_FmPi z!Xvt_BdV>sOM3OzALYq8C-hUb7o<M@pk7nbEe&z+$l5fKwLct?#?fY3H`ppq4+rF# zV=E+heuG}Wt3;Z*s`ayt>GIspLcL*ev2Jci(i`)}=}l$7>CN#<x8x>D%g^8F&^<RK zG&G=FuYD!Y_jKtk(LQNAcviM{AC_&cM<u*BEH3wLF@Jx=x?^wojPLmCL-SQ0x8Lz! zKa@%inSWYdW0jifbD96+)bZ->e|lf(52*R(SyoofOThf7%k}4zU*mEG{(UZc%r`o7 z`IR3r_q(~^MLQ0e4>BP~Gb3b5$efT#A+tiJh0F_?7&0?tYRKG>$sw~triaXrzljMV zGeo9{%n_L+GD~Edj%J?7M3I>yQ$^;=F<Fk;a!i+Fz8n+gm@&tcIp)kUX^vTQOq*lg zj%MPHX6DG$k+~z2M`n*qADO?SNdS@oBn3zgkR%{kK+=HZ0Z9as2}hF(Bo~e*8Avve zbRhXa5`ts|NePk@Bq>N%khCCqK@x*x21yN)8%L8IBs-2KJxG3#1R)thQiS9PNfMGJ zBuz-3kVGMwLQ;j~%F!eX$(Ex@7m_a|VMxZ1lp#4ol7?grNgI+kBymXQkklc$Lz0JN z&(Wk0$sdwHB!fr_ksKmPM6!sa5y>NxNF<X;Dv?|w$wad0Xwr$~)6pap$taRiB&SGH mk*p$VMe?ft$9?sf=w|1WU$4I)r!dEtTj2HQ`J8vtJbwUKI6@Er literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Budapest b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Budapest new file mode 100644 index 0000000000000000000000000000000000000000..3ddf6a528983fc02f9ec0a534756e4d6c17e0e70 GIT binary patch literal 2405 zcmdtiZ%kEn9LMp)zf5NCONLhigd#!;`FDZDAhR1V5=`V~Bodg2e<DI62BlHSTyxBQ zYfOiTq7hj$Y76ZFS_##3ArVFf>M~Kl{xFK1W@GhxpC_&Lpl3bkoZa&}oO|z+J3eps ziuJ{D)<2FZ<_Ry32J_<nrWehnahjvBv43!U_duIAwH(qtF}Y=X-i&D8Tf0-6clCwr z`?yn2OmvlXk1UpxZ#lGQze~P&Ena$G4wK&N9%WxPRGshJ)=_wBX{&yf(I@>$UGjBA zyPTf=hI})1hYWZg(}BxXGWb)po*B-Q3;Pd@4`u&ScyY_wvP;ge^TQS2jgR;pE*y3D zS))I9>Wr*J>lWvA^$-0)XU;gQ4zH8ynD|g<4ZW+gPc`eUU9W1uk)slLuuXz?@0B@q zD#4pxl8};exozcYxjoY@p~<OoM`WUe1%}I=(`HGyk4wX^#OvHkVH(jtQ}6n8ir(FR zO(R<_>%96wjjH`zqbrVROwl_Un?E3N<?Z53>5%wkJ0u~fNfP6#ByqAv<_Fa4{Bx@$ z>Gw)qaMUIDp2^kZ)&foGSfKZ9U8D;e1NHvmP+e4;BoCzdsH-SI9t<1NRM(`W`JB-7 z&~GJuY*;e9j!8z(ad~Lyby?i@wq$mHq*>eRWXZvO`fzEfEUnw3*^h41WhHAg=bjRE zXQt~TjwQN0GD4SMjnQ2HG|4@Evp#lZuB<rxtLF9l%F1Rx$#4Hr9<P~@g8CjQEa;b2 zo4?m57JeYB^SiYuxIxyWv}y5;hjeX_>e|trx^A*u*B`Ie4d>kY<bgab`D~*+^-7AC zw&uvw>w>jxYl3XdPL}dwhddKIM=DZ(lV|-bsf_m5%JB<QHT|kq^?oANV*~o!d#&<( zPls-5=+w<^P5MH0i`HzfQBMy~FK=@(r?;0m?Z3I<v>)@9&kg_iX<2?3&Huj@7~<h= zS<}Uu`uF?4o@8dZEmvv|7dPMVFbA3&_rI@gm-(Q*wrloxEb9+*Y#4h#5LutCSs=1P zWQoWcZOtN)RU*qo)`=_>St+tqWUa_zk=5Fo<s$1v7L2SISu(O_WYNf~k!2(6wlxb! zR*ozkSv#_LWcA4Mk@X`5Kq_EsO2E%W4LB$QQU#<8NF8iVA&^QSr9f(d6a%RSQVygZ zNI{T_ASFR+f)oX*3Q`uNE=XZ)O=XbMAhkh?gH#784^khbKuCp<5+OB0iiA`NDHBpB zq)@h|Qb?(6O|6h(A=N_4h13fv7*a8$WJt}Bq9Ij7%7)YpDI8Kcq;$5Xc1ZDTP4$rS zA@xHFh*S_MAyPx6h)5NYG9q<E3W-z_DW$EcB~naVQ%$6tNIj8)A{9kSiqsS-DpFOX ztVmsv!XlMLN{iGMDXy)lE>d2kzDR+Q3L_;(YK#;asWMV#q|Qj8kxC<_Mrw@|+tySY zDYvbuH&Sq<;z-Gnnj=L=s*aT1vQtjI|Ced^HMO4?OZOwQoKXo;F_BrZF&ORi{S!K& B&MW`` literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Busingen b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Busingen new file mode 100644 index 0000000000000000000000000000000000000000..9c2b600b103dc4d1f49b5f087055e19d3e031129 GIT binary patch literal 1918 zcmciCUrd#C9LMqB0S*~U_N9i$0t7oE{yFj=LW4|?z^DVIoRk#AKL~0>K}Mh$G#PV^ zxvv|mxnj{|%sHYiP%rS$(sHK6+KM$S=cY5wnsZYI)>!@CPdBW(_IY-m*K>diE`IM% zaQ%+zbn~CfZGXat>mB>yeP)||T=eWT7q8WuOA`Sze74j?dw<hePyDQ&&~Mdq@3hXo z{<%K+eUCnM?x4n;JSDOH0}}V~Co-o~CBEf7NvLg*x#e5s=|E7tx%o2Bmo160NizSj z*^=b;Ytne8F1V4X3rA<^qOTv(#e<XT>-|-ibY0fu_Dh<w>!hYueWqy@W0Kx5C>eP} zlDXys$%^Zi?DQtdo@$kxm@dt^vRRh?*{I7-`Q_Q4$~3pHQuBtE>2n<`boocI`h2xl zSF|scl?86~SH;Mx#G9J$pOOOiH(KbuDus7`k)qHqrD)`Wym0-nyf|=F0>fvt_&}$W z^na=^)z!(%o%?k4y7zTW?KUlardET2LVd+kqHBE%b?ro|mPHpx*^iIwtK$n~{qg&{ zVKhR@d!nRb@ORnRdQU35Mr2dvsBGSQTemDfAzLekwJQFQY|9(a>KVthCQh~H)`z-% zszG;LXx5!qg8JH#4O)A?US8jyr*(a$^2YXfeX}D=>R0DVL$ycVN}D6Q@*l|CQAQe5 zqP6kvHED{R(5COclIA;Oy8DYh*)uYvEr-tP-hpm?x4Bna5438C%if`35BoMI{11Do zOl*QH%$P_qk4}GISsXO}{8Ao4{>tTY9>M=Vv*Grae7KtJhxe#SzS-+9d(FFhyAA7q z2=747vZFoE$eBjYH5X?aIp4?`ceLjmIqS%IN6tKQ?vb;PoPVSNqywY{qz9x4qzj}C zqz|MKq!UNm3epSG4AKqK4$=?O5YiFS64DdW6w(#a7Sb2e7}A-eZ4K$o(Kd&4hqQ<E zhct+Eh_s0Gh%||GiL{CIi8P9IinNOK>S&uqx^=YeBK;x_BON0xBRwNcBV8kHBYh){ zBb_6yBfUG?=8^6lZTm?7$Oa%gfNTM>2goKMyMSy1vJc2cAUlC<1+o{&W+1!aXtx8| z4`f4-9YMAP*%M?_kX=Ex1=$y5W00Lewg%Z7WOI<+akSfm?2n_}AY_M-EkgDP*(79_ zkZnTt$+$v8>>cKQ*tan=_#fCQyHIwg?AJ&!GpD}?>`wiAtNs})`;4&TwIq!h^A%?# PXC<fliqldtB_rZ*zi7H} literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Chisinau b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Chisinau new file mode 100644 index 0000000000000000000000000000000000000000..2109b52a734c1eadbc8ffa4fbdd5e6a53fbbb60c GIT binary patch literal 2445 zcmeIzeN0t#9LMo5@hBo)*NA5|OhCQzD2gRY;>8ruiy)E-o{|s+EmRB>@HgieO#>5a zR;*ZSDw{G~j8f33Oj!|7W2=VN*_ve!)3eIyGFQL%Ay%zFT7UFkpSyG4c!y3mzc2FF zY%EQZzs;dP;bD#X;p=Fn-(D_%CE$&YkNr+9_|Z6Zt;IO~#WP0tV4cy^*Jhr1^F{OQ zv3=%ShilB<hRx>N)y3vJWh=~g3)9VW4<?%LrAC?OWA8FAgm{^KJ<Z1ZPu9Bozs_=e z*cN<X;MEMr;KA9Bq2&jRpqQgWcZXa|5B3{z2mgF$Z^*ZSL!l!X>0$lNL*YHGR>bjV zt;m)=)};O0tjRSc)|84>);()8t*Gp|*3|j2*1gV9Yg&ZA6+Qlz6`hf7#(1ZwnCsCh z_HwYAZd@^@4}WjWI5+5?**Rc1UpwiZb*RIL+uQ7p-+s_YD5-KM7PXs6TXzj5r=5`5 zD;p#wvPtG7ZIU^+D<w6&UZq~jlX<tw)O}}C<o=OmYW|U2mDcT43+j{A!j}S7dTEe) zVDC&><hr3UN+!tSX`d-q#x=?G?p8~tek4n-ekw~nUzeo=r)Alf&q-E$i)8noR5^`x zvix|nTCv3t_x>uCyShfLtXQq`<`%2`?0ITcaJnjRhO2@bQK~Q~RSG}xQV(4ZlQmsm zsfUMuk+rR!Qq=jm6z{$)>kjpbvF?IAQhQN7y0A^w7oAllllRGEX|1Z%@2J`kxkqid z^0X?uU7|Mjm8(scGFADp9940Cy=-oXS6hy_<ngkJ>WTUosmzU&t)=5+TcSg@yS|eu z#}Be2-dF9ghNU|28&y5jB|EQvpq}bDEV~9ysG1i#Rc(8ds@wUz+TB>GJnr!J^!n?| zXPmE}uitq8(O1BPKVCliJH37W!<R_7pZ<?5VLyqNNJzElU(HDH2oQ;wA`{2nM?VR4 zND!ZRgpR$Zrh81{`z31Zee9F5KOs9OUo!NwNIx!D9&h>s`_aG1$y=<SEdBx>9;1)_ z1t0u*`Tu;EP5S%oyS(}PZ~RrSpsVjD<xVNLO1W3N`erG2OSxUj{Zek2a>tZgrra~- zrYUz#xoygQQ*NAc=agHg+&kswDR)n~eaih)44^o`KMM;e9#BlMt6iYjK=FZM1jPx8 z6%;QhW>DOq*g^4wVhF_%iX{|JD5lueu25{Tt9_vuLvhBgwua&j#T<$|6niNCPz<6t zM6rnC5yd2iOAMPBKH1eqF`TlitzvjZF^l0A#V(3p6vHTvQ7of)#xRZI8pAe<Zw%uo z&e_%0F}$;@&11O7u#e#%#XyFG6bl(1QcR?{NU@RPBgIIHlXkV06ff;+GbwH|?4<a~ zFqGja!%~K)3{x4dQfy`TN->t<EX7)iw|2F;6n821Qv9VDOmUcDF~wts$rP6vHZy!? z7|n2+VKu{RyV`7q+jh0x48JLcGaRQ_PVt;#I>mMQANGF_yTAT%Fe`~449=Y7xRkgA MXHH@QjZgOf4F;`^qyPW_ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Copenhagen b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Copenhagen new file mode 100644 index 0000000000000000000000000000000000000000..be87cf162e1a1a8470574ae2c62b92f61589b903 GIT binary patch literal 2160 zcmciCdrXye9LMqJoWl_#WnVNn7B>qCts{5H3utx(W^%l7R8kR5L@6RUDT7ieGv*p| zUmeRia#=Z6O{hPp{=mz$%W&b5vNLAobUL-$%*~uuWA%GKj^V1m{XKi0*Tciy{DaRs zu&ky!+xpLmG+($mE#~HZ^vmYap47UkJ$$6U*SB;--<7#?@_mnn-toz)-MP}gEmgh_ z#>qDmL{B%pCf~+)YItR*gwLIqfszwCaP+v03GKNy)<0YB%lT7dQ-0C7u|KHCby7Xo zkLb9IAL{+5_v-_n?b7%|hb7@)mn6RVmL#>RByWC2#@7br!4+%dq0)e)6cot>Z@#1^ zq{+jh#z~snr)f8Gb>gK|ofMAKN4|*EN4syS_rOm&x$Qem-~N?mY&@iyRh^nuaYnL( z-I7z-Be{#-ki5iwlAqlu`Gd_eCB99koLwzb|7y@_hkY{r`*JPlsMNxqX*#22md<QX z(8sG&bk_E%@<g#)eO2)?JN1ed`39xfeL_o8&PmDD%QDAxOy-1o<;jch%G|CGq_pp- z`ggX<yn}D+{JJ_>(E6$_T>82$s$HvPkJW0Rv_uzs=IIjeBwcbdQ_EwErF>wFK6PWF zEc@hlT^^2-75ihPqWf1_*?e6p+d{IcGAygN{H#yU{8ZLd^l4S{URhh%rPU)p)^&-h z>wes$>j#5c)7zxaoDJx+?=9EbFYD#G9fexgQ6|r?Pu2}Bc~ZZyK!VjCc_AxFHWvLM zn_?_!$cWX3YZs(3`ldGae=bc|&*+OEb;wJh9^Jh6xNhm%r(2s2X!Fiybvm35SHvBc z(>>zuUzT-mpZNo|62?0sEGt^9dxoC3zYKr(`2&`(sEkK|f8j6(%}e0_=P=UzlAE)` z+`Px!;wN+dHm9cLR5#X-`YjikiLIFmG8ber$ZU}5AoD>cgv<z;5;7-bQpl`q&9snt z*_w&*e`03H)R4I$lS5{QOwZQL51Ak`Lu87`9Fa*PvqYwe%oCX?GE-!#$Xt=hBC|!N zi_F*7Oc<FlGG%1W$fS{3BhyCajZ7SwIWl!*?#Sek*(1|O=5K2ffMmedqyWhQk_03R zNE(nlAc;USfusV-1(FOT8%R2kd>{!yGGc2|g5<>3Bn8O|k`^Q{NMex8AgMuegCqyZ z4w4=uKS+X*4B46#Avv-&NkX!OqzTCrk|-ooNUD%rA<06rg`^9~7m_d}V@S%7oY|VB zAz4GxhU5)N9FjRCbx7`z<RRHZ(ud>^Ng$FzB!x&0ZA}u9EZUkhB6&m-iDVK<C6Y@d znMgJ*hs)`5M!4^|Tq7dy{xuX%lTQ;)lSxyzXt72QJ#T**{$5Hx|CP|;hMKo1)78mY U6xQp{NzY5q^!l?hF(W7HA9HUhBme*a literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Dublin b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Dublin new file mode 100644 index 0000000000000000000000000000000000000000..655daf37889fd12380c7f055f2cdd85dd4665e03 GIT binary patch literal 3531 zcmeI!Sy0tw7{~FyAqu#?np>js8g3Zk0hiPyat}pvIVov|mZByZk};qeIhCd3IFAiw zrsnHTF1X=}<{Ii`E-5a!Yc9C2i3{5M{nvEUMK@k_(K&O@=XmCD>F@h^dk;y7aQ^WK zu;1|Fan8Q@T=5h8Hnr}GNmCD&DnD(H$4sBI)qFo~x|uP#nlYcR(=$^ZnOOs;>Di-p zm^ra)%-pDldTvIPGp|lRJ-=SA&TJ5>GcOmK1<ro6Ag_{MSa3rx+McKv@BCF~&5zYf zOqyBxg-0*@YOh(I6k}E-FE`m;!_3O)-g=csnbi>~W=*+UW^IKyv-bJ{v#!9=IfrlR z^+$W@4Qux6AD2Bg8^2wtH%$%Hn^Pz2EhCHc*0?mit=k;)Q}aGLx8-KNJ?@~{p6_oC zM4!@mHEWrJv5WMf(E9pt*hGE!{?|IcY?98ud_^BUSH&FrdAB~k?Y=pYeaW26s;p0C z>@ug5Uo~fjj?ibjH!#0>#hi;MZO%t7))xXp^u>x^bMav*T~IV!U&_C#FCXurudLgn zudaAtu6?&mUmr8b+?X+1-|Qc4Zl%ZT+no~ho%q)JZmlxr_o!ff&$(_2gM4-2seR`D z;|u!1rWNMlMIZBM)Iw9VJtxU0DOVQ{iPOGaGj$37mb#>8ye?T#UHhv<?Z4x(4!9Ak zOD(TvO7HdPGLs&evP&!Ia>Gv;H6_fH@3zTQNK@w7@L8r}+$~cn__9=Po-0*+evztG zGo|X;CGuS1c&WB^k_4Wfuc|M~QZ+V@Q#B`u3Ys}o>BMAJYfKmQe2iBG_iv<Xhc;7n zI(3%1rK_rX4H`<lyG<k{pqzx{Rh9+?H|2%wLJ8fuUm9lQOV|%9rO}A>;u$wt!uuXk z5y{!Aab%8a()}CNw8nJREFx7kD@s?*12d%g@&2mC!xU+`p{;u9Xq>c~7cY@HE#>90 zttDz|b$KNrSXzJ9Lbd7OD{Th^s&;iQNc*-$s)O%N=@@)cb-Z?7Mf+@4(Ytr3PG@JU z&RGjo%&v{nW#R-CyJ)7oHf)&cI^k34)@!_UPaG&cUQ85kOh<XWO04t@4UwMrJQ7!_ zgNi#;LEgAqU-e#fNBZOis5eb{)i?W!dMo|5iqF`s`o-s|{-ZC++fl34JAHS_fS@UA zU}Tmg_^p&dHADtooGgQjl4Zz_G<o;9SB7Txk;L5L>b)-`W!StP>ixk%k~Frd8s4py zN=~SvJ_xU=Mzp`LJ}mF3ltz^#<;EG6TK1l#?%k%+t{st)OXsOkyL05DDO+T8)^zze zZH}Z*OqXKM6!-c2S;DvYQ-1oF^!pDypFDX`+i~oBIj6Ye_yra7bDXlOSZTXQj-%Yq z_M1OHJ5Dk88^`^P;}_G#>$J14=pJ6DjeT{rpY3n#!@jwH<o?s~{`(M~x4(3Uu<)<H zdtx8G_5~b6hSAlIBQlW4NFqatj3qLd$Y>(NiHs*QpvZ{2+95^86d6=xR9)?`BID|6 z2NoGwWN4AGMFtlcU1WHX@kIt08DV6IkugRF85w0{n2~XIwF8ZeG&0o4SR;dtj5add z$ao_Ij*K`m<j9yKgN}?kGVHE)+>wDtMjjb@WbBc_ceSIB48N-#e<T1%1dtFQF+hTV zL;(o{5(gv@NF<O@AhAG#fkeaAh69NQ5)dRJNJx;FAVEQ*f`kQ$3lbP4GDv8U*dW0{ zqJxCT)y4-25E3CIL`aN~Ao1UTC?R1&;)DbWiIl4i6%s2~8!RMRNVt%AApt`ohJ*}> z84@%kYDn0SxFLZ<B8P+yiJhwr9uhqyd`SF|03s1YLWsl=2_h0jB#cNLkw7AmL_&$g z($xkNiKeR!ClXI2ph!fKkRmZff{H{H2`ds;B(O+ik<cQsb+y4oqU&nIi^LZRFcM)T z#7K;hAR|#m!i>Zj2{aOEB-BW(kzgaycD3O~;_Ye!jzk;@ITCXu=t$I&up@Ct0*^!< z2|W^fB=|`5U2XW0_`BKz067AXLjXAjkb?j@3XsD9IS!Bm0XY(oLjgG!kb?m^8j!;Q zIUZc?0f8J5$RU9o6Uae<92Ll6fgBgefq@(u$f1E88_2<d939BvfgB&M_5eYS5LbJM zAjb%DkRV41a+n~;338wyM~d_Rm7<=`l@egjn1<o@hVg`U3HO9G4YM|eM&SW}06*^b Ai~s-t literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Gibraltar b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Gibraltar new file mode 100644 index 0000000000000000000000000000000000000000..a7105faaeb14ccf0a5d3588e74be709fab224275 GIT binary patch literal 3061 zcmdtjX;76_9LMo{L(&+>j!u9H>M=va5CNCm$P|$%AeWosuBoU=q+|pxQIlEIIL^O@ zB1gz$5;_LBg!`5oxFCuMDQ24H?wDncnxdWW)A*`p8sGGy=b4AknL9l1?*0Be!zL$3 zy8d?6x1aEEmD-1Mvv=*Eg>8z=!WwU#dobMO<?l3K=H{72In9juV544~e%E|8Ay+S% zxyR%ut~X!zx}yt%#^|NNrFvPrP`&K(EwkKJWtLa_>TfPy(}iUzx@hlVUA#0=uP~Wr z<%i*V)u)Hd>Z$SO+qBiDq<@%M6E{q+4OeDeWV%`Z_-|%IlO(g@mm_B5MVH=m^jE$4 z_z=BieU;w2>Ymy5#TxzH!T`NJBS-I;QKxq%W$Il6^3Cq9!*yx*?Yb<f#+21QX7>5D zk$pKsWq)N0DSx|%99ZR&iZMy*pq!RNsiUMa%}-U{iByLt-_up&Ld=o4Q@XlED^uO4 zPO49yQZ<Q%y5^pjIT|)!AHDsVu5C0`*IvG&kDv87KkTp2CwAR7KbBlFCyV{`sqAv| zQ`!sW^pxrP%)oZ$XOEb(kqyncs3LtnAVgny!ejis4v7Ett<v<&3VE{1NVA=vNWkVT zDzLCvHP2n7TFevGa@KoFr=+P?BgUzx;ytQ$&lvS|XcrX}7_8bfY^s92qb2x8Cuw^& zNJ6Upq}}fN@=VDs3C+JK?XznoY|eh^FnzOxk1vvlk;hbIT8ZiywMlgv__^xbJWq9r z%urqGvQ*cAZ0UMptm<|rUAk|HRy~d<Nza1O61AzjJU6$u^ja7w&nLH*-gCOCKK;BU zdVGND8+2Y`qU%&Y@4XV+`lO1zdQQc8ZC7y>d(?|(mZ+DCm#g^lZIUp5o=PlSEH9^~ zs{ZrdmjOdQmVqe~WYDuI;)#!ySNs!YaA=4Oz8Nk_zWr3v_f2Hzjkapo+TUe(wT~KM z8mo~dSJbGi>uPj%g&H%uT8*83NnY)>PK_H`F5_D+P!pnxCAq;Gnb=%p;)NWURF@`` z_hiazCp<D`*>FiIou*#@C`wWb2B|kDwUnuIJF95}daAT!fAwZW3pG9FwtB0vOQm=4 zmGo<;RYs$ml5uF4%Dj3^W~?kwGb=X9tOYw{c5$A(lbJ7B^Rwg;_ru;^?guw-di~S; z^}HYbZ}0o}|F~;^;I2l>T@PIiT&_UvZpbID#?IOIuXB8Y$JJ+$#}$ix?IAjb#~xg6 zx7$9>>u$H_-`i4V|J7;BE$1DV>%P5)RQl9ITH|PYgER-}4$>Z^KS+a+4k0Z<dW19y z=@QZ=q)$kr9BrqNRyo>UA<aU%g|rLl7t%1KV@S)8o*_*`x`wn3=^N5Gq;p8?9BuEA z<{{lf+K2QHX&}-;q=iTiktQNtMB0e-5osjSNk`jCq?eAinMgO0b|U>m8j5rjX(`fE zN841St4Ldsz9Nlvw4HUdtwnn4Xq$_4*U`2Y>93=0Fw$Y9#Ym5lCL>)&+KluWX*AMl zq}52Tk!B;^M%vBSmVP_hh9ezET8{J_X*$w%r0q!Gk;WsPM_P~c9%(+(eWd+J|B(%F zv^#)o0kQ|kCLp_jYy+|n$VMPLfouh`7szHHyMb&6vLDEXINBXSw#3oy39>23t{~fj z><h9n$j%^JgX|5mImqrH+k@;6vO&lWAzS2V_Xyb}N4rbNHX-|jY!tFn$W|eHg=`kG zTgY}H`-N;6vSY}WIodr#HqFuQ8nSK3z9Acj>>RRn$lf8FhwL7*eaQYH8;I;6vW3VV zBAe)FcM;h}WFL`@M0OI{N@OpQ%|vz+*-m6Xkqt$56xmW_PmxV^w7ZIItE1glWMh$? xMYa~%TV!*Q-9@(7^{~^++&;Mf&*ST3N4b3js~nopF|2b~cxXaIICkjh^C#k8TKNC~ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Guernsey b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Guernsey new file mode 100644 index 0000000000000000000000000000000000000000..4527515ca3f249a44599be855b3e12800ebe480d GIT binary patch literal 3687 zcmeI!dvwor9LMqBn$hO=nHU-N(OjC={5BD438AqqmtXotj4)(rmX?v0Q>h%sdF!Yp z<U?Y)%x%kMU5MsB*GZFmE_1(T%%;!t>o})#oKr`C_xqiB{Lb&^cR0@Re!e`th9*Y2 z?s+_D-{FVH75l^M!1wGg(;6)@)Asu5>D%3A#+-HLtLZb$%#7N`e7IWAO1))f51OvO z9<$NRiC<~HX;Y%-HteV8HO|wSO~Q5NnPM~FwcE@usG%1WUDOLVB<V$)cIm8n@p`dI zGfO^l>!qLWFw2r-&GM9GCcA5d$%*NuzjG_IA}ZCata{n3s^>F6=R@i(EQ)-zEM1 z{+IL*D|hQ3mzJ5IzR1yQrUmP@qcZf*qf7NVZ<=1;ZI1b+WpAC=dad5z-D@@!`kT!` zjbwAi%d%x>J=yy9Q?hNTOY-}9)pj{5JBIg_ohd<TXGxUWHMC6c9uQ{s#2nTI_3E2~ zcBN8q=&;%wzfkWj^ELY-Ch2`QKGTI&l6B#kvwHvWn&$T{`TD^68|Gm4X>%wmNFScE z)f`EA-W(k^QXlKy#QfnAb3Ce&IT5``p9~Jur>c9*sgjDisC0xrU3gBPInYC&UA<kO zTYl4=|8l9mFmAB9ICHGN)IY{tPEXKRIw$I@32pVY`jyR}Z9?^R*9B7?;-`xb?=&~c zPU@R$mYb4OzUJ1L1*UXEZnAH3o-P;W)qY(wb$S2Rx`KOxu257*`>Q1Fzp+dQT#VNh zm(@0vcDQxr$+t|ECDnA*5eJQ$8esyvtufWolzAv}wyEyDY-)s_k)W1&Qqy;v)T)&! zwT>;8hl?jj?RAqS_{cm}XJMAAyY>@RZ=$G>S;Lf0N>TO4#i>VPJu0++Q`I26g=*Nj zi!`cKOEqrtxHP`@goFiDm9T;!X;O4a9?LG4@J+kr@hOE8@okPY9r?YuKgkgH$p)^- zKEJ7`lx)>3I#)f>{d3j4?hMr;YLseGnyy*~Pmz`f`m0tYsnYs~_UftqUU_<Mf<))G zmS@Jdl{VAr$g_!|()NQ^s$EAvX+JPnJ=gH0bZB3yI{IyrPN9cXr}HONjPF_%lfO}Q zKK8Zhk~Lq&Zv9E(CQVfF3unm-!-uP`6W^C^FHMl{NrR-vlS$%<?IbVOjF+C_Vbb%u zTf8+oD(|n=<mGFRs$Sn+mEHvb>J<~H`edI~ebcX~gem!|UqXTEKlZe|+Gd3s&}XX* z44JA1MQ2IkgE=y|uE^k188W0aMTTxnlh+P-WLRczNy;0cUjH~+hR^Mx-WU=h$>W== z5#63vDTy`Jo00X@$PPEuTY)Z>+O&qGUOcKsRk<#scC1%v=YNyYOXjLE`ML7W)SqQ+ z)(m+!ZH}Z*N|y(G?)T~IbN9(NeedJ@<;vgtoBhiF)3d@qefVELyM6n1j=f;6D$1uE za@6W*pGv&rvhV!;eLjti^SIje@VGkRbM}SH$H&M1INwFjzu(TQcm5bxVDX)Ax$Ix! zcI`g?Taa7oXzwX<Q<1xh+*ahiA~zPfv&gMQ?k#e2k-LlBUgZ8FH`vkMVdNG&+Ix)L zWaKU*w;8$5$c;wsG;*tvdyU*|<ZdIk8@b=e4M*-ca?2g<Jx6Xja@UdDj@)<T#v^wg zx%J4sM{YiH_mSI=+<&A2NCzBk3y>Z-+9n`fK-z%x0cix%38WQBFC1+%kZvIDK>C3+ z#L;%d(Y6HXiKA@_(iKPB7NjqZwlPR&kk%l*L7Ibf2Wbz|AEZG@hmaN_Jwlp<bO~vb zqwN#YD5O(JtB_tH%|g0`v<vAM(lDfBNXw9(Ax%TNhO`aoo1<+U(mAAcNbiv5A>HG@ z8|_2-hcpoBAksod+e4&@j<$<P8<9RDjYK+$v=Zqh(oCeANIQ{!A`L}4inJ8zDbiF& z+f}5kNMDi0BArEAi}V(0F4A43y-0tN1|uCtT8#7<X|kj3GSX&8+h?TFNT-ojBfUnN zjdUAnx1;Sh4;qei9BDbybEN4=*Bx!!k-j_H#v`3aT95P|X+F|@r2R<$kqv<C0Avdw zdjQ!4$Sy#(0kRJq?M6U$f}`CE$X-A;1F{>C?SSkDWJ4f30@)JCo<KGQvMZ2nf$R%M zyD^ZR;b^x8vNw>;f$R=sdm#G**&xUcLAD68N03c|>=I;~Ao~Q_D9BE6v|9z)E68R+ zb_=pyko|&e7-YvFTL#%P$fiMd4YF;JeS>TqWal{At%K|xN4t5D-Ggi&Wd9%=2-!i% u7DD!r>wf1D6X@>q|3PH`740Ajuv5uxCsTM_vxw#q?xvC9aglCb1^fkK<6HXx literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Helsinki b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Helsinki new file mode 100644 index 0000000000000000000000000000000000000000..29b3c817f4637e98623c5f76a6078f18157b5cfe GIT binary patch literal 1909 zcmdVaYfQ~?9LMp064tP9EC-b$$>pfi)vbh_aw(*PTvDjq5<<$YW15*Y``Q{a4AYt! z@gQts?#8q;n>E9ljk(UuhGEwDz5kxrJn+osoSpycteu{H-yhGs<t6dfAFod4FMN1a zm=E8Byyj8AZgt2#Uru4m)fa`qk!{{yJ$+t>{}r#}<5_R-=V!FfgQME_#$FA%bXh`A zHA>i_Bhs%{C49?vi6||X{tK4KfGm$hrnqIGGg+cSqh(On-Vz;<uF;<pb?}QQ9n#uM zhu-R}!<xRTv;K_^uY0U9HTN`j{Uvo3oz}SgM-pG&Bnhd_k~njpB!wN8<oHTSZm*V+ zA$2;kZIO)nUZJBer^}cpd79EtpsCHHb?naZnszW$$CX6t_?l5NAtON3i$Y{#)GKwT zw@XIAUCoSqDw*$J%4Gj*GTC=WraV6`QyWi8R?8L5-cu{nP94+fWo0s>c8BK7*{w56 zi#2ydsd}<9b(UkA&UOyb*<W0m7n~t^4}0j`&x2*&`A<5(HBc5D4U+t(ce1ehqZHKn zq_Ch>7Hxg2i_<R1lKd7e3O^vlsf}9F<(w`JQ(gLczb<Pp*X4JrbVZv-SDu)!rMK70 zs$HpC){rZ!mxb$^ok_AbCq>Fj9I`I1pR9L(lMO+ZRKx~r#fN87+5L-F-oGhT?;q)= zvkkJ@*Q{F(T-U9Qhjm+3y;kq3RzLp^_Pb-izkFMkqu3l2&yJQg)aBR3vO*)QZohxe z%Jx{3%*XA{<>BG?mY?6Rr|0jdyV3m8KHabUi+TMpuiT4+4kD+FoHKIL{D!keP8&II zTXW*bnIor;oI7&z$k`*OkDNb}0FnWc0+Iug1d;`k29gJoh^@&4Nd?ITNe0OVNe9UX zNeIaZNeRgbNeamdNejsfNzB${hNNa|azm0svP05C@<S3tGDK2Dazv6uvP9BE@<b9v zGDT9gHMt_m+L~;Ubdh|KgprJql#!f~q>-$Vw2{1##F5OA)NM`fNb<HOdnA1%e`ErX z89=50nFC}JkXb;c0htG6B9NItrUID@WHM~cY#`Ht%m*?d$c!LUg3JjrDafoK(}K(k zGBL=^AX9_P4Kg{lW_FP2u{HC9Ob{|d$P^)SgiI1LOUN`?|LHabnz0%l&ulrf6JnBL PT(Jqx>^PVGEiv#Xi)pZk literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Isle_of_Man b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Isle_of_Man new file mode 100644 index 0000000000000000000000000000000000000000..4527515ca3f249a44599be855b3e12800ebe480d GIT binary patch literal 3687 zcmeI!dvwor9LMqBn$hO=nHU-N(OjC={5BD438AqqmtXotj4)(rmX?v0Q>h%sdF!Yp z<U?Y)%x%kMU5MsB*GZFmE_1(T%%;!t>o})#oKr`C_xqiB{Lb&^cR0@Re!e`th9*Y2 z?s+_D-{FVH75l^M!1wGg(;6)@)Asu5>D%3A#+-HLtLZb$%#7N`e7IWAO1))f51OvO z9<$NRiC<~HX;Y%-HteV8HO|wSO~Q5NnPM~FwcE@usG%1WUDOLVB<V$)cIm8n@p`dI zGfO^l>!qLWFw2r-&GM9GCcA5d$%*NuzjG_IA}ZCata{n3s^>F6=R@i(EQ)-zEM1 z{+IL*D|hQ3mzJ5IzR1yQrUmP@qcZf*qf7NVZ<=1;ZI1b+WpAC=dad5z-D@@!`kT!` zjbwAi%d%x>J=yy9Q?hNTOY-}9)pj{5JBIg_ohd<TXGxUWHMC6c9uQ{s#2nTI_3E2~ zcBN8q=&;%wzfkWj^ELY-Ch2`QKGTI&l6B#kvwHvWn&$T{`TD^68|Gm4X>%wmNFScE z)f`EA-W(k^QXlKy#QfnAb3Ce&IT5``p9~Jur>c9*sgjDisC0xrU3gBPInYC&UA<kO zTYl4=|8l9mFmAB9ICHGN)IY{tPEXKRIw$I@32pVY`jyR}Z9?^R*9B7?;-`xb?=&~c zPU@R$mYb4OzUJ1L1*UXEZnAH3o-P;W)qY(wb$S2Rx`KOxu257*`>Q1Fzp+dQT#VNh zm(@0vcDQxr$+t|ECDnA*5eJQ$8esyvtufWolzAv}wyEyDY-)s_k)W1&Qqy;v)T)&! zwT>;8hl?jj?RAqS_{cm}XJMAAyY>@RZ=$G>S;Lf0N>TO4#i>VPJu0++Q`I26g=*Nj zi!`cKOEqrtxHP`@goFiDm9T;!X;O4a9?LG4@J+kr@hOE8@okPY9r?YuKgkgH$p)^- zKEJ7`lx)>3I#)f>{d3j4?hMr;YLseGnyy*~Pmz`f`m0tYsnYs~_UftqUU_<Mf<))G zmS@Jdl{VAr$g_!|()NQ^s$EAvX+JPnJ=gH0bZB3yI{IyrPN9cXr}HONjPF_%lfO}Q zKK8Zhk~Lq&Zv9E(CQVfF3unm-!-uP`6W^C^FHMl{NrR-vlS$%<?IbVOjF+C_Vbb%u zTf8+oD(|n=<mGFRs$Sn+mEHvb>J<~H`edI~ebcX~gem!|UqXTEKlZe|+Gd3s&}XX* z44JA1MQ2IkgE=y|uE^k188W0aMTTxnlh+P-WLRczNy;0cUjH~+hR^Mx-WU=h$>W== z5#63vDTy`Jo00X@$PPEuTY)Z>+O&qGUOcKsRk<#scC1%v=YNyYOXjLE`ML7W)SqQ+ z)(m+!ZH}Z*N|y(G?)T~IbN9(NeedJ@<;vgtoBhiF)3d@qefVELyM6n1j=f;6D$1uE za@6W*pGv&rvhV!;eLjti^SIje@VGkRbM}SH$H&M1INwFjzu(TQcm5bxVDX)Ax$Ix! zcI`g?Taa7oXzwX<Q<1xh+*ahiA~zPfv&gMQ?k#e2k-LlBUgZ8FH`vkMVdNG&+Ix)L zWaKU*w;8$5$c;wsG;*tvdyU*|<ZdIk8@b=e4M*-ca?2g<Jx6Xja@UdDj@)<T#v^wg zx%J4sM{YiH_mSI=+<&A2NCzBk3y>Z-+9n`fK-z%x0cix%38WQBFC1+%kZvIDK>C3+ z#L;%d(Y6HXiKA@_(iKPB7NjqZwlPR&kk%l*L7Ibf2Wbz|AEZG@hmaN_Jwlp<bO~vb zqwN#YD5O(JtB_tH%|g0`v<vAM(lDfBNXw9(Ax%TNhO`aoo1<+U(mAAcNbiv5A>HG@ z8|_2-hcpoBAksod+e4&@j<$<P8<9RDjYK+$v=Zqh(oCeANIQ{!A`L}4inJ8zDbiF& z+f}5kNMDi0BArEAi}V(0F4A43y-0tN1|uCtT8#7<X|kj3GSX&8+h?TFNT-ojBfUnN zjdUAnx1;Sh4;qei9BDbybEN4=*Bx!!k-j_H#v`3aT95P|X+F|@r2R<$kqv<C0Avdw zdjQ!4$Sy#(0kRJq?M6U$f}`CE$X-A;1F{>C?SSkDWJ4f30@)JCo<KGQvMZ2nf$R%M zyD^ZR;b^x8vNw>;f$R=sdm#G**&xUcLAD68N03c|>=I;~Ao~Q_D9BE6v|9z)E68R+ zb_=pyko|&e7-YvFTL#%P$fiMd4YF;JeS>TqWal{At%K|xN4t5D-Ggi&Wd9%=2-!i% u7DD!r>wf1D6X@>q|3PH`740Ajuv5uxCsTM_vxw#q?xvC9aglCb1^fkK<6HXx literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Istanbul b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Istanbul new file mode 100644 index 0000000000000000000000000000000000000000..9a53b3a39063e86859b12aa755b78cfb38a26709 GIT binary patch literal 2166 zcmd_qe@v8h0LSs~aUAC%)b)e#2L33-Ls3H@It;<Wlc5Ja>4}@+_AJqKI`%DVt8C*I z%*}N3aoJ>}IZ`4=&bD-ITIsP`!!UYMED2aoYqmKz{nnyi%&zbIskWMb)cUvYv*%vN zaq!3OKJWOtEt^WkO=qfpg`3l^Z=Qz|`oY>g(b<)iYj#~cZ6DgW!8yF+1*4}iVZK&# z!tN~$JFkb^?7pHRv+t`L_8UL%a*n>g$QWq5W)7tK?XxQ;%#Vf=#^B&F^W5cr<I^Xu zI_I|rj0=Y<&EeWK<6<n^`E2eE`}5-K&ZWS5dnE0EIdY}O{^I%(bM$<(F*a<Q<Hz^f z-+q76{I2DcJ+V2%{9$Iq7Tu>D&(`C1TH+PQyYgi_ealJHSN@if(fX3~R~||DC!ZF9 zP;Yl&bg`4E{B~Aw$jlm<ZDjXUnz!`sGj8q3GlM&~8@E5+nY^Q+Lfl#RTr#I%JaJdJ zIhpG{lbGhOPKI{vH$r3klldQBu%~zB7&G>4S2Le)QM0zKQFk{lSNFv3R|Qo?YWDn) zD$LJRbEZzJurDmb6VqhT<$#<!{EM9T!Bts&Voc8O`B*O4-L4jPyeaQ}s#}({?30Vu zzpF~0JfO;!98~42o>livPpFF0M^(j*$5bTm85tR0rYa}b$;I!@Q4fq($*SI3x#XQ0 zvU+EstmzVR=_bE?up?hR6ulyAH~gxW6%NX1t)rryrOtBSS*xz#%V6ELZ-VvS{$Tyk zKyby^hl4AR^#x<+&R7k5$F0VruUZecwg!zo?bah}U$j<j+i0zhoDRlgb=I2PMysj% zpw;waiM95Vg+^N{mGXGg{>zo_`}0ckX88Z@<((3^>GJu6$Scy{NRhW$qzf^x#4|;R z{8=Jfh{#gUZM-6K{{H%3-@|WX4RKMg?`U*24{=dkQm*eZ{;NJ7eSUxD{vhK2yp<09 zPIoIm{qcwTqe?ZsJ#Y!x4zeFty&+^r$d-^jA)7*Wg=`Dim#f|wvNL3B$lhG_=8)YX z+e7w;Y!KNYvPEQ%$R?3pBHKjviEI?vsjJ>9vR7oY$ZnDCBKt))jO-ZQE%UkQJp-FY zc8zQs**CIrWar4%UG?5MY~EGx9@##!e^=cAqytC`kRBjSK)QgmfkPjVMj)L)T7mQe zX@;xr2GS0sA4o%xjvy^TdV(|s=?c;oq%TNgkj}X3)*!t>nuBx)X%EsLq(MlBkQO05 zLYjnh32773C!|qGr(AWbkY2g!W+B}|+J*EBX&BNmq-99YkftGBL)wP)4QU+GIal2} zq<2X3Ty^)5_POf*Aq_-2h_n#tA<{&oi%1)hJ|c}oI*GIr=_S%kSKUpdo!F+o>*D_y O(99|w*XjjjnZE%*@2d&` literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Jersey b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Jersey new file mode 100644 index 0000000000000000000000000000000000000000..4527515ca3f249a44599be855b3e12800ebe480d GIT binary patch literal 3687 zcmeI!dvwor9LMqBn$hO=nHU-N(OjC={5BD438AqqmtXotj4)(rmX?v0Q>h%sdF!Yp z<U?Y)%x%kMU5MsB*GZFmE_1(T%%;!t>o})#oKr`C_xqiB{Lb&^cR0@Re!e`th9*Y2 z?s+_D-{FVH75l^M!1wGg(;6)@)Asu5>D%3A#+-HLtLZb$%#7N`e7IWAO1))f51OvO z9<$NRiC<~HX;Y%-HteV8HO|wSO~Q5NnPM~FwcE@usG%1WUDOLVB<V$)cIm8n@p`dI zGfO^l>!qLWFw2r-&GM9GCcA5d$%*NuzjG_IA}ZCata{n3s^>F6=R@i(EQ)-zEM1 z{+IL*D|hQ3mzJ5IzR1yQrUmP@qcZf*qf7NVZ<=1;ZI1b+WpAC=dad5z-D@@!`kT!` zjbwAi%d%x>J=yy9Q?hNTOY-}9)pj{5JBIg_ohd<TXGxUWHMC6c9uQ{s#2nTI_3E2~ zcBN8q=&;%wzfkWj^ELY-Ch2`QKGTI&l6B#kvwHvWn&$T{`TD^68|Gm4X>%wmNFScE z)f`EA-W(k^QXlKy#QfnAb3Ce&IT5``p9~Jur>c9*sgjDisC0xrU3gBPInYC&UA<kO zTYl4=|8l9mFmAB9ICHGN)IY{tPEXKRIw$I@32pVY`jyR}Z9?^R*9B7?;-`xb?=&~c zPU@R$mYb4OzUJ1L1*UXEZnAH3o-P;W)qY(wb$S2Rx`KOxu257*`>Q1Fzp+dQT#VNh zm(@0vcDQxr$+t|ECDnA*5eJQ$8esyvtufWolzAv}wyEyDY-)s_k)W1&Qqy;v)T)&! zwT>;8hl?jj?RAqS_{cm}XJMAAyY>@RZ=$G>S;Lf0N>TO4#i>VPJu0++Q`I26g=*Nj zi!`cKOEqrtxHP`@goFiDm9T;!X;O4a9?LG4@J+kr@hOE8@okPY9r?YuKgkgH$p)^- zKEJ7`lx)>3I#)f>{d3j4?hMr;YLseGnyy*~Pmz`f`m0tYsnYs~_UftqUU_<Mf<))G zmS@Jdl{VAr$g_!|()NQ^s$EAvX+JPnJ=gH0bZB3yI{IyrPN9cXr}HONjPF_%lfO}Q zKK8Zhk~Lq&Zv9E(CQVfF3unm-!-uP`6W^C^FHMl{NrR-vlS$%<?IbVOjF+C_Vbb%u zTf8+oD(|n=<mGFRs$Sn+mEHvb>J<~H`edI~ebcX~gem!|UqXTEKlZe|+Gd3s&}XX* z44JA1MQ2IkgE=y|uE^k188W0aMTTxnlh+P-WLRczNy;0cUjH~+hR^Mx-WU=h$>W== z5#63vDTy`Jo00X@$PPEuTY)Z>+O&qGUOcKsRk<#scC1%v=YNyYOXjLE`ML7W)SqQ+ z)(m+!ZH}Z*N|y(G?)T~IbN9(NeedJ@<;vgtoBhiF)3d@qefVELyM6n1j=f;6D$1uE za@6W*pGv&rvhV!;eLjti^SIje@VGkRbM}SH$H&M1INwFjzu(TQcm5bxVDX)Ax$Ix! zcI`g?Taa7oXzwX<Q<1xh+*ahiA~zPfv&gMQ?k#e2k-LlBUgZ8FH`vkMVdNG&+Ix)L zWaKU*w;8$5$c;wsG;*tvdyU*|<ZdIk8@b=e4M*-ca?2g<Jx6Xja@UdDj@)<T#v^wg zx%J4sM{YiH_mSI=+<&A2NCzBk3y>Z-+9n`fK-z%x0cix%38WQBFC1+%kZvIDK>C3+ z#L;%d(Y6HXiKA@_(iKPB7NjqZwlPR&kk%l*L7Ibf2Wbz|AEZG@hmaN_Jwlp<bO~vb zqwN#YD5O(JtB_tH%|g0`v<vAM(lDfBNXw9(Ax%TNhO`aoo1<+U(mAAcNbiv5A>HG@ z8|_2-hcpoBAksod+e4&@j<$<P8<9RDjYK+$v=Zqh(oCeANIQ{!A`L}4inJ8zDbiF& z+f}5kNMDi0BArEAi}V(0F4A43y-0tN1|uCtT8#7<X|kj3GSX&8+h?TFNT-ojBfUnN zjdUAnx1;Sh4;qei9BDbybEN4=*Bx!!k-j_H#v`3aT95P|X+F|@r2R<$kqv<C0Avdw zdjQ!4$Sy#(0kRJq?M6U$f}`CE$X-A;1F{>C?SSkDWJ4f30@)JCo<KGQvMZ2nf$R%M zyD^ZR;b^x8vNw>;f$R=sdm#G**&xUcLAD68N03c|>=I;~Ao~Q_D9BE6v|9z)E68R+ zb_=pyko|&e7-YvFTL#%P$fiMd4YF;JeS>TqWal{At%K|xN4t5D-Ggi&Wd9%=2-!i% u7DD!r>wf1D6X@>q|3PH`740Ajuv5uxCsTM_vxw#q?xvC9aglCb1^fkK<6HXx literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Kaliningrad b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Kaliningrad new file mode 100644 index 0000000000000000000000000000000000000000..37280d05f9a993b27468b028bcd8f0774a3ce017 GIT binary patch literal 1518 zcmd^;OGs2<97q4-E45K4!p3)|`N&62<D;pxrpD3Kku0Z84{#w;5v0{J;zI_Bh?*YK zLdy^_f)t`!lnE_b1euM3kWE1l5xA(kiOPuj&Q+mkQTu+Ed(Y)##DO`-zxPnPQ~p?z zKH+9N_04;Z(vM!Ny|s7xrO`Jt61+NiFL>>KFxVfs5_}TZ861D#XnS_k9-J8P*q&c3 zw}ra$Y@s<z$7D~Sb?VG;^HezhO>)7F(2A5-ZhLIF**=%_C}lP=lp6N9)5b4_(jQ(o zGHzWkGW)uWth2|Bl_%Sb>?3=ORr_m=9Nz{bx2n)sU6N|#Wu!{JHC_s$D^$U!d{y|) zuGUP)s-nkVRq@?fRWcY>rI*Lm+Q67vcVa|2+WOV{15d?yY)HzghoyY?X{pFOFO|;2 zQu)18T<P7)^}0niemkf(jaA6zaD%EE+NG*TOVpO`GPSigQMucb)wV#fZ1;Xvp0)(3 z$(vDL&qt|^9#wU@lT!ELh15shk@|@T((txN8b|uXH-1|+ojWCdUvWo-B_c9v@rsF# ziCr>(#l<gNORfKYC0Iq4yY#Oi8QGR7kwlxs{r+C$;5;LnbC=taAX4OzWsA?n6HBuB znKbh}YyREW<QI>(g@<37T59y&C|(Y{`k>fR#=o^#=ARZl-hP????N@}kDEe$S@^pj zdPfKL?SKeoQcDIA4I&#vIEZu*@gVX+1cXQk5fLJzNi8HqN{E;ywVZq%1cgWn5!IxY z6(TG|T8Ow1c_9KrB!-9#kr^U1L~4lG5V;|OLnLR2&XC=t79Ju!M0|++kN_YNKth1T z00{yT1tbhc9E?C1kuXAG#9~qh!-&SD4u=sBBp^makdPoTL4txr1qllh7b7r8WQ@=t mu`z;!L}yZm$B56Q4v-NcBSc1wkRTaR%70U3ypFfj8UGW?P;_7b literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Kiev b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Kiev new file mode 100644 index 0000000000000000000000000000000000000000..b3e20a7e3946dd522e50db39a81c0e5f4f1bd619 GIT binary patch literal 2097 zcmeIyZ%kEn9LMo<1d6UkUt4s&0YZ|Bc7+RwqCsNU3p2QSl`D}-T<eNjM2g11G+a8@ z8o6(i)m*w+G5j-SwxK=1)}Ohu2Dkel*BrT8t!&P@vsJTltbXrf)Po-MsArwCb6$sY zxjf?Yj;`L+Qeyq%xZS+r;^;CL&(n69+i~BK_~|}h?9-9L<nZNM89C<F#G64$j2zvZ zIrB*J?kQhZdDBND-rw$cf6A4Nq;E7>b<gnrWY*ckPWFjcoScI%I8%GJI=MUIPG0LO z=iar|&a}GuPJTs!bD!VmOwaZ>GbY|}X1tvDb-{PuU9&#F8k>FYv&j8}iI~6d?Z}+o zQ?bH5`y)l$&T1fjQj41|N=f^mlvWPO+?B6NS<WGuSJEl-#=50Et5?e}t&;^eI&|T? zL3v=bQ7ihJwQ^{oKDc+WE_yv%t6K7P@ty_pP<5IH<5{v~`eh9T$D}RpYrN@}is zFH2KCkfn+9^6<BBN$tRKsT+Pz!~1$<*@-uFd0U%AdUorIM_$sEt?RXZeyc|7YIK!% znKt-mX~Xq^HfB~!;}>`8qdynO>N7v-nhPFTdw8-m4PKSUx_^}B-h{-OFUY!`SM>2k z@5mEP!y3;$AnPj!v?cwtZpcyH@WX4majac8o$t~oFGcmKqieMFqb>6Ei<R2eUoX#W z%+<|%%Vf)n3Tbcg%GTnkvMuzhJUiKvj-pKMaK4ewjO*H&JSSb(F6#DE{qkI5NOv6g zP<IX-(&xMSw0mE-+7nW4Nli;lx%K9me%s%tXTr^M;-vrLw5;rd=KqU(s=4P`>6SG+ zU`?{D46%ea_Fd!m@+#Xm{^|I$ox>aMe77zfwQ9{33Pr7OeXSKV21E7yu*AGH4;;4b zUhvmp+tGiY#CG%R?j(Ns^LxLWqv6cQabzOMOpvJ{b3rD9%*NGBhd;x7kO?6(ay3&z z=7dZNnH4fEWM0U`keMM<L*|A|4w)S?J!F2!1d$oKnkgc4L?(&M5}774Ph_IVOp&P~ zb44bL%odq0GGAoE$c$ahl#w~Rnn@$GMy8F-8<{vVb7bns+>yy6vqz?n%pXYrk^v+I zNDf?05|AvonlvDJKoWsu0!amu3nUpxHjs26`9KnaWCTeGk`q^x6eKIICM`%_ki;OF zK~jU{21yQ*9V9(Sevkwq8A4Ko<OoR;k|kG@CL~WtqL54>sX}svBn!zFk}f1)NWzee zAt^(0h9nKinyX10k~deAI3#mO>X6(a$wRV-qz}oT^?(0tkBR1-5+dmjmll>428v4k L;o^Y%G}rS7d^Gc? literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Kirov b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Kirov new file mode 100644 index 0000000000000000000000000000000000000000..40b558f82117925877dc9ad67522db00a7b169d4 GIT binary patch literal 1167 zcmd7QJ7`l;9LMn!n=~bHaM8v#sgIh5KD{PQNj0shX&b636bTMhq4+2Wf`6ogg3y4q zlM0HUqKIN}@j2*Zl^`x++sUm82b_E$XA#7r@qBOX<REU|n|nXuk_^f3TNphu9#?-H zHTDS?N1wfTzp7|&w^VoWNhvt8I(D_ZCN%HV*34yUzk42qKC1HCg``tAd!}4Jb5V!y zoz{`-6T0E-VZH9u9=-l(PH)(k(i`*5x-ng+H?;+IG#oHZ!9^4E#U%DMB+YL=kF~t~ zB(0C%N?U18+Alqqj;V*TdE%xxhc3vL{WB(h{IW^(+%TQHPnu+;Xu9HiP1lbDCRKkz zQt$dr_u{B*eb!{QEu<xVCo4Tq8>DwSD%-Cq$&CA@Zz^nd41AF6!Jno-`dS9E+6?;c zORn*S$$fcchN?<tX#TF*`F`5$dT`F<XK%~!`D3P#kBm)LDi!6aTCsF`eZS9Yf3@Gs z?eC}8?^mkhiv1r|i=(_s4UTy1Uz^IPmCN_#Cm#ER&*X;-s?|x@D`~Gzr9Ad!59BEP z!+_tGzM|~!s4yflCXX2u85J4U)sBk{jEsy7jf{;9j*N~BkBpB5fJA_VfW&|Vfkc6X zfyCiz13@A|LP26df<dA|!a?Fe0zx7}LPBCff<mG~!g957A%VHt$dJ&G*pT3m=#cP` z_>cgR2$2wx7?B{6D3LIcIFUeIZKO!3t~ORASR`5`TqIs3V4Sp@|924w(l+tl_C(+p DaEJ}W literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Lisbon b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Lisbon new file mode 100644 index 0000000000000000000000000000000000000000..a85653044e0094363ea0f30d017a20ccdce2227d GIT binary patch literal 3469 zcmeI!cXW++9LMpS5+i~iDnX2gREegttSD8|L{qVnix_E)h!yd;#tEglB~``A`BAi1 zJz}0TV#O-4M`FaP5;an4#d<VUR8@cP#~;<xKdR@n|MkhqeO*`LKcDy4-fKvFpzSY@ zyZVH0o{Q?6_g+r*GV5BZb3upU`hwpMIu|ayqc6&xtbdj4&=;p~GL2ZfzGQl#p4K*B zUz#{VU*_#@F7KRXrn}zL(;uXot8-VHnJc}`HAZKB?WeBhy3dB0>-Bp22FFxAt4m3J zb6|wNrDBf0<=Iw!>%DY+d(LJvd&??w$KqIh=a<vWUGq(IcT$46XK;+UH@cI#uZ^p| zzlFbfpygciV04ClF#nRC>*A;9-q-X)m;0NCH%6L=Z!ax4;_Ogx<b0ZWbWRiV=-v)} z^BidfdHZ+j$HsV=$1(;L9FI&XIKF&|e!^aCo|q9{a56By;N&Nh^;13fncs$QG*1Vg z(7yB8qn)X~K|5P^nRfQ^94-Im6z%-hp4x@=DaOULF~+4?@kYU<9>x!j_l(Q^gN-YZ zjf@}Lc^Ov&-HmH?pBmS_e6$<oYG^l}l+$h&{;Cz8yQAGYbWywg%`xpx`Y!G6{B_#B zX=$1{{&VgA&=l>b0c(t+D8qOVGShe%G|BkcdzA60N}N$#qMK1%6lS=8-N;scftRhq zbaz|Dl;%z`x}mdDTy<w<do@`l<cagupkHKF?_1KN%6aKo;;8g2+99i5$u_E=GGvXt zGo{z|Y*8~KQ`B0xSk#^&MV$#B3oXGR>h>8R>P6UvcdJnGnr|~vzh(pRx_dRzz%@iR zc-TZXyjoxSoc557_Lr7#WED%_WjE!Uv(L%KDTk%s=<U)!ZnX@E$rpi+ED_Xtn`ja_ zS2V4)NHhyf63w0_ixyt9Ws3{_MDUZbvSoINc<cLU*=j|sY`v|ee0xe;*=9ja`A)pI zY?~4++J(8wkT@^#ZvE>rG~}rWbIq0Qy)TLOcdv=?61zosUXFPG%2LrGGhIX+*(p1w z&Jdk4mdMVDiK5Gl$+ByYX)-clpp0ssAng(DWjD`Gvb(R3?Ec7KMpq3J(Pt{lo(~&} zUYj1s-lyF}AET0p$+|5*NG=kwv-3pX*wdol#6sD>&1Nwm=7@}|Gfxa`ohjqXY?Om) zNjd1o7jp1Zha8eKP7b|bm%~=|mI((&is4gR%fuB?V#MG&QlHXPjO^M<IO09UsDRpH zbm(I-rjkvJ^{Xn!nwLdVg-0^!_&zc2ZoVA9c7^ybZ=0MjZ;zaqxk!FAZkbF@O_sJ& zF0Yg*`I29*>gQhkeyQIp{dfO<nX=ZGTd9Bl>)EqM1JtnCDhQX7Xe;~Y_cb+_@_&48 ztIXf>sy_F4@h2lX+HE~Tqj=$4{V7Df3E>m<xTxp(i}i0ke{cT><5z6`p3U}5J&YhS zgqCUykwHX85gA5g9Fc)UMiLoHWGs=vL`D-CPD?eO$bcdviVP_-rpTZoqlyeGGOozL zA|q?5h87uHWN?wuMTQp{Uu1xh5k`g>8DnISkx@p585w6wHPFaNTdJW(#u^!HOEuca za9gVJMh4tcjW{yomTJtALAO++jtsk{8h2#iE!D^)LvN|Z9vOUO^pW95#vcg)5&=sU z0we}V5RfP!VL;-51OkZ!5(*?1NHCCSAmKpbfds@-MFa^65)&jSNK}xpAaOwggG2@i z4H6q9I7oDm@F4L)0)#}!QiX{B7Q~2$An_0-9>Rpg2?-PuDI`=#tdL+K(L%yysp5qM z42c*LG9+e5(2%GhVMF4E1P+ND5;`PyNbr#8A>l*fXQ={+L=Xuf5<?`2NEDGUB5_0l zi9`|!B@#;{m`F5{a3b-vQ~^aIYN<ks#1siC5>+IuNL-P?B9TQxi^LWQE)rcNyhwbJ z03#8$R3S!UY^j2bL>UP)5@#gPNTiWaBe6z;jYJy>Hxh3o;7G(RRmhQ;TdJTVQAfg# z#2pDd5_u%_NbHf|Bhg2~kHjB20FWa9IRubnz)~Fq$WeeC2FP)M90<sffE)_Qv49*5 z$kBit4#@F<91zG6fgBRZF=43=3goD;REGs}Tp$Mqa%3Qf26Aj52M7Ltbo_@$hnu=j Y{5e#7I|emw+SuPOz_(+7KkwXr2Z!Vh=Kufz literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Ljubljana b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Ljubljana new file mode 100644 index 0000000000000000000000000000000000000000..79c25d70ef09aaeec21f0a10a029650967172a80 GIT binary patch literal 1957 zcmdVaYfQ~?9LMqhL0H4S7)2!{xg6a~D7WL3s9ZY8CAlQGB%zgCF3rrEeOY4-b6vxT z2jM}?T<4O6HN%WC*O}Qcw>5t6|7Np!;F-<;od5sTPMse8-XG7D`8ko+A6E<W2{%`< zx%s*ETJyNxB<4x&%h7W4u#eX5cgd}7(Ne!YSn9ty@^9}elRIfc)z_Vs@B8eycDlP) zJ73(YerL{#|IsQ5*tJKxl&J)+S|?p|3#8k$+0s45BSG<Q>EVo(VE+*5*~(W!yj&Xc zDO!6y57yod9ktKp7TUM^i#iX!)_&y=G_>@FhAlgz;n~MDBJ;jP7F0`ALXAX^-!3r$ zyCpWVNMajHB+jo~;~veF0pAOC;8~XpdYGZ{m06llGf)R_9Hv8d`s>h~ARSgZK!zuJ zsVm!0Mg+f9x2sVSy{>6e&|^t@_d=4Jo|ojht1{}@0U2F&L{e(cY3i0TNjthv$K>V7 z*s={eZqjBQpF2m>`{$}BB}pgvr0GOwZ=Lu#Tr)Z(O2)l*I{8yCnR4o*PHpg(X?xpB zX7yW{Uh+Y*%IjoCR)fr3{YGaEIW4m@Yc)HtLgpk?X-><NIyXRd?yDU-udzVqUoF-J zk372Y&{WO6vP2ebO3=K@bXhzvQ1ds&$dYmKQjp^#OC!3-GWS<m-qw=Bunt=I{;3qT z`K(3tm!$aJeO+;)QdZX0=&Fhfy1Ht&t|>mKC0k0=(X2^R^KzKCSMz`QvaI%T=6qRg z#A^2I>EG4S(z1Sk^4pYDkL7Z6r+ds791h-G4u|LOr+J0>L;Ey;vA<(kKg=~f{{2Yg zbR#DmIpxSnM@~C(;%&{TM@~L+`uVpMU~4KsN<eBria@GB%0TKs3PCDCN<nHtib1MD z%0cQu3bHj7AtfO-Aw?loA!Q+TA%!88A*CU;A;lrpA>|?UAqCo+3Xu|RO^ryANR>#L zNS#QbNTo=rNUcb*NVQ10NWDnGNX1CWwx(vJXj@Y?QZ`aIQaDmMQaVyQQan;UQa(~Y zvH-{mAWL9t)&N-qTeAwtG9c@KECjL=$WkC{fh-2H8pv`W>wzo?vLeWmAZvmwimh1{ zWLc1PK^6vC8DwdYwLumKSsi3~ko7?p2w5RyiI6oy7RlDE60%ITW}T3QLRJb{DrBvY k#X?pKSuX4Uo-S`QY5gMDG-qm5XiR9hGc_U{!=k)@0!L5Jf&c&j literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/London b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/London new file mode 100644 index 0000000000000000000000000000000000000000..4527515ca3f249a44599be855b3e12800ebe480d GIT binary patch literal 3687 zcmeI!dvwor9LMqBn$hO=nHU-N(OjC={5BD438AqqmtXotj4)(rmX?v0Q>h%sdF!Yp z<U?Y)%x%kMU5MsB*GZFmE_1(T%%;!t>o})#oKr`C_xqiB{Lb&^cR0@Re!e`th9*Y2 z?s+_D-{FVH75l^M!1wGg(;6)@)Asu5>D%3A#+-HLtLZb$%#7N`e7IWAO1))f51OvO z9<$NRiC<~HX;Y%-HteV8HO|wSO~Q5NnPM~FwcE@usG%1WUDOLVB<V$)cIm8n@p`dI zGfO^l>!qLWFw2r-&GM9GCcA5d$%*NuzjG_IA}ZCata{n3s^>F6=R@i(EQ)-zEM1 z{+IL*D|hQ3mzJ5IzR1yQrUmP@qcZf*qf7NVZ<=1;ZI1b+WpAC=dad5z-D@@!`kT!` zjbwAi%d%x>J=yy9Q?hNTOY-}9)pj{5JBIg_ohd<TXGxUWHMC6c9uQ{s#2nTI_3E2~ zcBN8q=&;%wzfkWj^ELY-Ch2`QKGTI&l6B#kvwHvWn&$T{`TD^68|Gm4X>%wmNFScE z)f`EA-W(k^QXlKy#QfnAb3Ce&IT5``p9~Jur>c9*sgjDisC0xrU3gBPInYC&UA<kO zTYl4=|8l9mFmAB9ICHGN)IY{tPEXKRIw$I@32pVY`jyR}Z9?^R*9B7?;-`xb?=&~c zPU@R$mYb4OzUJ1L1*UXEZnAH3o-P;W)qY(wb$S2Rx`KOxu257*`>Q1Fzp+dQT#VNh zm(@0vcDQxr$+t|ECDnA*5eJQ$8esyvtufWolzAv}wyEyDY-)s_k)W1&Qqy;v)T)&! zwT>;8hl?jj?RAqS_{cm}XJMAAyY>@RZ=$G>S;Lf0N>TO4#i>VPJu0++Q`I26g=*Nj zi!`cKOEqrtxHP`@goFiDm9T;!X;O4a9?LG4@J+kr@hOE8@okPY9r?YuKgkgH$p)^- zKEJ7`lx)>3I#)f>{d3j4?hMr;YLseGnyy*~Pmz`f`m0tYsnYs~_UftqUU_<Mf<))G zmS@Jdl{VAr$g_!|()NQ^s$EAvX+JPnJ=gH0bZB3yI{IyrPN9cXr}HONjPF_%lfO}Q zKK8Zhk~Lq&Zv9E(CQVfF3unm-!-uP`6W^C^FHMl{NrR-vlS$%<?IbVOjF+C_Vbb%u zTf8+oD(|n=<mGFRs$Sn+mEHvb>J<~H`edI~ebcX~gem!|UqXTEKlZe|+Gd3s&}XX* z44JA1MQ2IkgE=y|uE^k188W0aMTTxnlh+P-WLRczNy;0cUjH~+hR^Mx-WU=h$>W== z5#63vDTy`Jo00X@$PPEuTY)Z>+O&qGUOcKsRk<#scC1%v=YNyYOXjLE`ML7W)SqQ+ z)(m+!ZH}Z*N|y(G?)T~IbN9(NeedJ@<;vgtoBhiF)3d@qefVELyM6n1j=f;6D$1uE za@6W*pGv&rvhV!;eLjti^SIje@VGkRbM}SH$H&M1INwFjzu(TQcm5bxVDX)Ax$Ix! zcI`g?Taa7oXzwX<Q<1xh+*ahiA~zPfv&gMQ?k#e2k-LlBUgZ8FH`vkMVdNG&+Ix)L zWaKU*w;8$5$c;wsG;*tvdyU*|<ZdIk8@b=e4M*-ca?2g<Jx6Xja@UdDj@)<T#v^wg zx%J4sM{YiH_mSI=+<&A2NCzBk3y>Z-+9n`fK-z%x0cix%38WQBFC1+%kZvIDK>C3+ z#L;%d(Y6HXiKA@_(iKPB7NjqZwlPR&kk%l*L7Ibf2Wbz|AEZG@hmaN_Jwlp<bO~vb zqwN#YD5O(JtB_tH%|g0`v<vAM(lDfBNXw9(Ax%TNhO`aoo1<+U(mAAcNbiv5A>HG@ z8|_2-hcpoBAksod+e4&@j<$<P8<9RDjYK+$v=Zqh(oCeANIQ{!A`L}4inJ8zDbiF& z+f}5kNMDi0BArEAi}V(0F4A43y-0tN1|uCtT8#7<X|kj3GSX&8+h?TFNT-ojBfUnN zjdUAnx1;Sh4;qei9BDbybEN4=*Bx!!k-j_H#v`3aT95P|X+F|@r2R<$kqv<C0Avdw zdjQ!4$Sy#(0kRJq?M6U$f}`CE$X-A;1F{>C?SSkDWJ4f30@)JCo<KGQvMZ2nf$R%M zyD^ZR;b^x8vNw>;f$R=sdm#G**&xUcLAD68N03c|>=I;~Ao~Q_D9BE6v|9z)E68R+ zb_=pyko|&e7-YvFTL#%P$fiMd4YF;JeS>TqWal{At%K|xN4t5D-Ggi&Wd9%=2-!i% u7DD!r>wf1D6X@>q|3PH`740Ajuv5uxCsTM_vxw#q?xvC9aglCb1^fkK<6HXx literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Luxembourg b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Luxembourg new file mode 100644 index 0000000000000000000000000000000000000000..6fae86c53176e605311e09823fee55d07d157405 GIT binary patch literal 2974 zcmeIze{{`t9LMo{&0@>uZP;YPt+j?`zqb$#GmdP2-dKLi3|Vt6?QFt0?TG9+ZyhIT zv}<NVjB0+hAq;b3DJGPY!;w-)x8zWr9etkPsME<m^-urw{hs@Me9yi2yMK1>^W_{f zBQw_ax5MAO!;52&d2t`O&|C^jx7HMQESgl@aNe`#TE)e+Wvg9p?@=u&T%{!!YhCV1 z8$9b4Tyd9126)!bogrlji#!`9Bx!kAf~O*Uvu$HnyJu6gw`9}xc-Q8u*|vAimb$ha z9cJ5ltin@S5o_D#9_rfuY8%_TOJ{oCb44$FKWB-nYFMOu$DlEeozddn6`Skc-SS8G z?mL&=d%i#I{@~0qNA=ks95r>99D8@4a_lQv>fZn64#$VZ!HxrYs~iWXFL4}7&v(=& zMY#`mAL}^M^MK=M`Wg4p^FHpne$kHOR|m_9O+i{;nk*l`9xo?nZI@4S0_D>oee~3d zK5}|+sQPtnborG{QU7J_rDb7?%A7=Pl@YE1&UV^5t-rL1`$gM^eyf44ztAAB6B=}5 zpSHWaO&>bt*7k>1XmHJ53E5I99oCdc$0C(Z^A?FCD@Pt4H$@&vbxLTj6nQivQNlvP z<*^p+B-}e$!*9lG=PO~_r6Ewe9&V<OSKZc#vTwB8%JUjo_>o4<snO_+tr|1_oW$l- zNnG!0i68NXBy?CSiLrT-*jOOlgI8+z3zMbCuesWDZ?Zi3dAjy0pQyd7d+Jk*`)X2g zi1x`0)xL#2q~8E<P0k3G{$W=&CAm=scpuY&p<l|t>tD$ruLCluzD}OLyj})ZZkE*J z`!#KOkqp_gPKRb^%dnzXbol7kbVSxv9r;9-I#UPgGeJXiR74jYbt_ua+YXTQ&jR$> zo1JCM&YyH_gRhKpx03Ny-^ql68!~Zay-b?eAd}}`)8~?Q$&~TOHKWsNncBNjGn?<w zX&qFjeZ5MjH|FS!x_o{9f>UR19IIJJX3GmpdTVz1NSQUgle!is$n4?0BquXSUX1A| zb5eemms;5*H>$1XdM-(x|1HfsSu6S1&*|Ll<??cUwa#08Q0G^!)dl%wTClu8ZB6Xv z^lJKm_x;Xh&F^<U=;Q0-Yo76H+N6cwe|6ey-nHiUX$x`Ko0{LN*nIwa?=N;gKDPzj zyZ-);`KEctJY_!p<9hcmq^3D-$tfebIBlc38EtONVYl-#S8o11>~`lr$jR-y58g2c zSwYfbY4U<32FVPP8YDNCCOJrUkn|w=K@x;y2uTr=BP2;kmXI`Anmi$iLNbM<3dt3c zEF@b<x{!P!2}3f5qzuU!k~Ac1NZKq--jKv0nL|>C<PJ$5l077SNdAxnA{j(dh~yAS zB9cWUjg}^lNFtF;BB?}ji6j%rCX!AhpGZQHjPma$rIseANK%okB56hPiX;}vERtFz zw@7l4>{^=iBKbuUjAR%|F_L2>$w-!wG$VON5{+aUNi~vdB-u!|k#t*{d?N`*GLEDi z$vKjAB<o1pk-Q^`M>3D39?3nDd?fow`YlcVkqNLgGk{D1G6%>cAhUo>12PZDL?APP zOa(F*$Ydb1flLQ7AIOAQni)Z+#L~<OGAYQcAk%`(3o<dt%pg;P%ndR*$m}4~gUk;y zL6&BQkSVe>bA(J1GE2xbA@hVx6f#rDR3USPOcpX*$aEp|g-jSSW5|?QnmI!z4Vg7$ z+K_oeCJvc7Wa^N)LnaTIJ!JZj`9mfUnL%U<EzKMvlW1vX5t&A09+8PeW)hi7WG?al h$-Fn5|9Ucg&A4`pVP+%J;vy3wqa)H{qA@DY_cvO3s>=WX literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Madrid b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Madrid new file mode 100644 index 0000000000000000000000000000000000000000..9b51a73bd52e9afe7d788f548213ef57415f65ee GIT binary patch literal 2637 zcmeIzYfRO39LMoLDhjCh1x2XCMI%BB<t8L1p&^h7-Z&|V#8DBokd8zQui#{6j=68m zYOGA0OdBa!VQ!(dR8pssrL0j|ELUc!ou*8?F#5g!^`I474|~%8?3~y6AK*M-`@KKT z1uKdZtbbgC%_ls$cAF>f9o6RLwRhV*JL^-ddUbeqZ7iv7^#0M^c)8Tte`>F%X>Wqn zTtCZw;Moz@o6oJW-g3uPzg@P)-7-6{`sk^vo@03h*71kaJjYi$y4q68T_;B8SD)zK z<~iN{rR&U#t2}3q&2Y8Xcep>=J<fHm`jGqMO~YKDxa!>J7ysnCFlDW~Gd#1a^Gd$Q zXXx23pPMhZeXlil`CTY?4{i<Z8uEt2GqfSa<-fJZGpr^<r94^XT9G@TC|U!ZAv!!~ znv6*NMS~)L(2>Kx(BS@OHTcGR8gi*w?>JwtcOH9Q9fyxd=)NWidv<*Q!LU+bE+- z%4GDyC307`Qz9p4$lWo?5)~RP_Y4V<Xzz55?w+7yzKPPY9V7MLwn2Jd%WaKmxT52> zf2QMWPHSxWVT~)=uki()l2FzniK(qJVeU(k6t+W>6DlRSr%EO|w(G=;izMaO3Y~N$ zT^{&6Pbcpw)YR5V`rziNn)XVlPAQJmsWmC`P^P!07dd2F)Md>`?~zRJQ<@d|m1JH2 zR;Ks=K&H2!lt(V@mKjZrl6~fV&DmBfGxzP%S*4{iyLOY#nZH%%mMqoW`%BcBou!Wk z&(wJ_V|CuGIL!;nl)O&^^|9_Tvf!PYn&06k3+n@<pyjGOUUfqXx3`O{utOHD|6UiT z9g-ylXS67yPL`%NX|c~iT^6Rg?7NqBc~6<HIJsJ%xaia;U(eT)4_C>`EvZ_%Cs&?Y z9-;2dNwR9rWGO2SmZ#&xr99(jc_zS;ir65n@O&+m{<pO9+;Lfby;Ij5*duG(TXkLC zQC;7(LpQ8$(5h`!I>4*n-yZ$F`VI8%<1xr*ps(M5@#yXSt=60v%U`?(pyg{>p`*Np z{^fljKedAG&--|vo#V9TXXNt2)7+(Vmu~JX%Zu;vGMD{*f803#{WLe2-?UHj?LYsn z*IXNt4_e6SMou_#%8`?foOWAt;*nF2oP6Z;BLzSzfRq5K0a65{3P>4{I@p>*AeBH$ zVQXrE6a%RSQVygZNI{T_ASFR+f)oX*3Q`tZQy2a%6voz61}P0v8>Bc$b&&EP^+5`R zRLItp2&oZLB&146nUFdmg+eN2Yf6RG3Mm#+Eu>sXy^w+-6+=pf)C?&aQZ=M(NZpXa zA(cZ)XKQMQ6c4E$Qa+@9NCA-wA|*s>h!hd2B2q@Ajz}SqN+PASHMK;FX=|#9loP2Z zQc$F#NJ){JB1J{2ij)<pD^ggbvPfx>+9JiZHPuDRYisI@6d0*6QevdWNRg2$BV|VF zj1(HFG*W7$)=05!O|_A7+nRbK1xG55lpLu!Qgo#1NZFCPBZWsQkCYy%JyLw6`bhb0 zP5qGtKvn=*0%Q%4ML<>oSq5YskcB{20$B=VEs(`PRs&fMTeBX>g4miBL6!tr6J$}4 pRY8^oSr=qstp6vbuZi(9>oYE%9g4|G9G^5kE+!{F4r3Gjeh1W6@Av=! literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Malta b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Malta new file mode 100644 index 0000000000000000000000000000000000000000..c1208e2d2ec5e948a30dc3976edee6921d3d4aa7 GIT binary patch literal 2629 zcmd_rZA_JQ7{~FOCwWNiLIq4gEF^qD1p%KD4GEkHmU1#uR8T}MqFXZ3QaPHrYR+9P z#W^8Cjipm+in>y2nKqk9h+%5EoX)Ilc5i0r6z%`Lo%W&^z3WZ)8Rv66!prOX^Guyz zG{pMH6KKBShi9Al;d3XidD}Sry1lWr@m%HMqfXU6EC2nsZ`+%yBhOXmf8f+C+nK*P zZ<xI$t4IEk;uNRx(gZoSEkc_%r^`2bUYkp1=<(8UX~}AFPK-{}Z^yStYgC(kvfp~= z<Ylj(3a*eFNqd|drw^UG8QEgr{G{Hwb*tWP-%#P)zS!*CS+U!;4)1b&X05Y*E2r9i zV_&uX-z;+i2E6BV=#gXxhV-$6BEs#UGs$*vTQ?0k*vb3A$v?GY&F|h$drxWS<;T4b zR(z^bu-hA2R;gXm)_J>TOp|W?cUfUEmo>cW4;tZjOe3y-rrj@mqz|2_)*fHJt33}i zNaT(>iK^Try;iG4FIg_Vi%aF<yxH<dmPcYnWk{d+p%NPzCy#dQE^z_r8h0&O`(BLI zey!o!|8SrVsK2i9TYu4kYtQJQl}B~(!ULL6xKk6SpOzt|^^%mjUy>(RNJ><d3>~sa zhPIc>@SbaR_}Ky(akETEHl)jA-{<P6+L@ZVf259Hm8N4hM(X25F`Bk=gpAD$(DcHd z@<i+<%}8&T%z&?TT+EL$?#fRx-tP+;-*iNtys$+k)NPln#?Li-&1#vrqeh=9DUqjF zzoC<!eMcu3&(WM=#p=l#r_V%8)G6`(bV^%-=7wcT?sr}Exodr8>fS$eT5E{pRfo#- z`rl+m`Bj;@wn_45wo1X$Uv<`)eKLD`qZUSQkU6P!S`@TL=SHc{{dv94YcJLLM;7bz zXFdADhtst9s|E7nTd7)7n<FpHi&lG8iY%BkN=l0&<mJR(vM}R@yb@|j+2AlObI!}6 z&TYD=`H(EWa#~;exK>_o+OJDC9Mq+CRl02PRxMvsu6{ng_g{Yg=H=tdyLsL7?!Vms zx&NEK0UiG9m$|1(b5B-eZy$fl>MU09z4yD{{QZ4aw#Q1($l=YyU-*~@%?HoFU+;GF zi|+MacmKw+?wH4JI8lgPv#Ysi<f@U&My?yVaOBF7OGmC9xp?I2k;_M}A1MG*0i*;- z4Ui%rRY1ys)B!1ktEmK13ZxdUrWi;yTunKUdLRWsw<3I?Bs``jKv9sYAZ0=7f)oa+ z3{o1THm;^PNOh3%AoW2Cgj5JA5mF<hNJy2CG9h(B3dKXEkWwMFay7+5s)dvbsTWc( zq+&?PkeVSyL#l?94XGPaIHYn&>5$qX#d9^)L&}HL4=Es0L8OF84Ur-uRYb~&)DbBp zQc0wgNG*|Kx|(Vt<#aXmL<)*j6e%fEQ>3U!RgtnHbwvt`R2C^MQd^|BNOh6&x|;eT z1$H$RMoNs-7%4JRWu(kVosmK#l}1X9)EX%^Qf;K%uBP5d!Cg(ok&+`dM~aSA9Vt6f zcck!0<&n}OwMUANR39lnQh#ItT+Ip~OMt8avIxj3Aj^QP1F{gvN+3&ttOc?d$Z8<V zfvg9zAg*RbkR@?7Yl18avMR{3AnSrG46-uF(pdkmsR=RbGcb`IiqB3OlrktGK07f1 I2PcL61)Kf$7XSbN literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Mariehamn b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Mariehamn new file mode 100644 index 0000000000000000000000000000000000000000..29b3c817f4637e98623c5f76a6078f18157b5cfe GIT binary patch literal 1909 zcmdVaYfQ~?9LMp064tP9EC-b$$>pfi)vbh_aw(*PTvDjq5<<$YW15*Y``Q{a4AYt! z@gQts?#8q;n>E9ljk(UuhGEwDz5kxrJn+osoSpycteu{H-yhGs<t6dfAFod4FMN1a zm=E8Byyj8AZgt2#Uru4m)fa`qk!{{yJ$+t>{}r#}<5_R-=V!FfgQME_#$FA%bXh`A zHA>i_Bhs%{C49?vi6||X{tK4KfGm$hrnqIGGg+cSqh(On-Vz;<uF;<pb?}QQ9n#uM zhu-R}!<xRTv;K_^uY0U9HTN`j{Uvo3oz}SgM-pG&Bnhd_k~njpB!wN8<oHTSZm*V+ zA$2;kZIO)nUZJBer^}cpd79EtpsCHHb?naZnszW$$CX6t_?l5NAtON3i$Y{#)GKwT zw@XIAUCoSqDw*$J%4Gj*GTC=WraV6`QyWi8R?8L5-cu{nP94+fWo0s>c8BK7*{w56 zi#2ydsd}<9b(UkA&UOyb*<W0m7n~t^4}0j`&x2*&`A<5(HBc5D4U+t(ce1ehqZHKn zq_Ch>7Hxg2i_<R1lKd7e3O^vlsf}9F<(w`JQ(gLczb<Pp*X4JrbVZv-SDu)!rMK70 zs$HpC){rZ!mxb$^ok_AbCq>Fj9I`I1pR9L(lMO+ZRKx~r#fN87+5L-F-oGhT?;q)= zvkkJ@*Q{F(T-U9Qhjm+3y;kq3RzLp^_Pb-izkFMkqu3l2&yJQg)aBR3vO*)QZohxe z%Jx{3%*XA{<>BG?mY?6Rr|0jdyV3m8KHabUi+TMpuiT4+4kD+FoHKIL{D!keP8&II zTXW*bnIor;oI7&z$k`*OkDNb}0FnWc0+Iug1d;`k29gJoh^@&4Nd?ITNe0OVNe9UX zNeIaZNeRgbNeamdNejsfNzB${hNNa|azm0svP05C@<S3tGDK2Dazv6uvP9BE@<b9v zGDT9gHMt_m+L~;Ubdh|KgprJql#!f~q>-$Vw2{1##F5OA)NM`fNb<HOdnA1%e`ErX z89=50nFC}JkXb;c0htG6B9NItrUID@WHM~cY#`Ht%m*?d$c!LUg3JjrDafoK(}K(k zGBL=^AX9_P4Kg{lW_FP2u{HC9Ob{|d$P^)SgiI1LOUN`?|LHabnz0%l&ulrf6JnBL PT(Jqx>^PVGEiv#Xi)pZk literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Minsk b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Minsk new file mode 100644 index 0000000000000000000000000000000000000000..60041a41890eae1902b2410f1225a38cc7614a13 GIT binary patch literal 1370 zcmds$Pe_wt9LIliT5GB8kj*z;nPx7XwVc|jX{$NKKLbna5EUMVRCKWH82Jz)9iso8 zl8O)`DIxlE$b=3Z!j`Q=V609Nfrol8(LZ86pSKo4hdTFtc%IMmKJVTQ-hIB^JNEY0 z$REkmCrr|;C;JsjFJs17@Xm<QGqt-fnRwgodUDNuYWhmV_4G`wE9uPYDX6%bTvYfn z>~^F&-Cq_wD4fkt8mUNlapG*U=>BE1`1)znGknxsa=hPMdN^*D?B8ZC+u3ZEcC0na zLSA#Z&oIl2oo4yP{?3XIg?-*PZng5cLsdTftg3F$D&I&-Ri8_!RfCgi^`UX)k6%)2 zc0HDwgQF5?xFNM$k4c^9q}12!llpH5Bv>@0g0H${-Pb*8{bZeNNVTcZ=oZy5;Zu!6 z0k!c$z6$pis7-@avN`%uMdEqVRQ^ImBk!d-XF|1<J(HGqGtz3mC9Tu<r0w;2X&)b! zj>JtBJ8?t?(&@A<%RVPNCp*VJ_s4L|`!(#&`TsU@b4839{es1KB-tXxCE^gN^ou^v zcZrJ=+rrEm4BL#%-OS$R`R%FFj##(E^o&L|yV@n98Hsjr(WDRcrph1S=fq}X>R4Bq zrTgz*wCK-TUVQrbkbV{E?RTfV5KkbkKzxBX1Mvpp4#XdbLwp?`L0p3PWKlZ>@e1M= z#4m_r5YHg4L41Qa2k{Q#9>hP0gAflPE?U$+LY#zn32_tRC&W>Rrw~^mzCxUZcnfit z;4i^pg2x1x2|ioYP7}PgsNE*`4RM^{ImC5{?-1u9-b37n_)q8nqz8m9K>9%F1f&-h kbvFq8u&6si=n0`KguXyJLk9GJE&pS2oFQGT#_E9cJ1oC86#xJL literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Monaco b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Monaco new file mode 100644 index 0000000000000000000000000000000000000000..0b40f1ec9321e0b521a0d36a8e6ca9a9de08282f GIT binary patch literal 2953 zcmeIzYfM*l9LMqVS5d^<7fmohF$qya?vR?KhDatL>OUnF!9>&~JCZrH3|-FDocr3C zWoV)z-Vm+u3f^ycBdeKdxtcER;_TPVX*IL+KFjr>2hB%4>71Sa>zuPcJYl>)-cghD zW37K2&73z}9J`&1`;a-#$I{%)A*HEBru2M-y`pxhz4E}*=KY0#*sIFAo3h+>X7!xr zW=&*kvvyXRDNmSd){PCdE5Z`Y`tVY-A*+kMvB_++@kYG8=~|B2d_L1u9`A3qoLFzF z)`yy{Wkc+3Z~B-I7EUoA+MVr>3K!VbnT_o2gQCoisH^tQ*fe`rt2%qvz1{ZipElWh zKF>8ZUmP>Fbvw=8T^r24RXO(lcNduh?-_HjsK6YWlx+@YWtby<ee9z>qs_72%gpht zO8fZ5re!AsN1KzEz2BVLGTEHIHdxMV4AlBH$@1~r@p5+I&*qcD0QoedpEj%MWi{U% zWLlJu*OqU!msayqRHi4Ye_pt@_O{bD=>z1UxZkvG=#LuE<|_?sct!(n?$dTxw(7&5 zm1+CKOEsu=uLM_CNrx4yq~l_hkk{r)r-DLxWXyQ!oaU9#J}L5OM52TRhf9~{?IheM zS;KF|Yu9hXwA;A=?S8b0K3084Bg((m9wiqwa^5M8nqI5Xd0R9l`+~$4R!dw`jl>UI zCJ7x@N@8r0B;GBSo<Swr^U^r!_4^F%y*F8&_%ciTRODz<O>cd2et+#-8m#^DL$&|B zUh>pHA5G2+k^y1YG$r}24D>mnsi9v>>W!;1sNq2wR9`30Tv;oFt2Rm6>HV6%WU*va zuGS$_r%LAHH+1O8MLMitf)0PYK)q?H`fOl^j)>@{BW_1)R@;G+b-uMecdM(6+VP8y zKIbQ6%KRm}`nrrQz9~5+^^%)&PR7mtL7(rtQ^seX*1VAAG9jr-^L@AL#15(xzk5d~ z-7VC~bu;yaOJ1F_VYC(;n<g(VNYbel!{wz(A!^T0kZD8vNMU}Uyd2X}rl<TWulQRs zBdV>=FqfsM<!voGdqig5xS+GPRmiLLHTv4}Lpr-^rM^D1T#J_!tEX{;h7Wc&^0|Nd zHt}uh_g|fs)%d#e|7kUdu^L%cOY!*fWVN_|AKb~~fB%hhAHaJn$g=*vPfPb&na(vb zWjG&P&Qmf^$<C8yc|7hf_zUhm9`C>RV~_Jiw;y-hAGfS~&M~Dfr3C2-(iB&xD@a?A zz95Z3I)k(Z=?&5xq&rA^kp3VILOO)B2<efl(<G!zNSly8A&o*hg|rIk71AuETS&W* zejyD*I)=0i>6xq3G^A@t+mOB?jYB$zv<~SV(mbSlNc)ifAq_-2h_n#tp{vtGq>HXj z8<9RDjYK+$v=Zqh(oCeANIUs+(@zc>igXldDbiD<sYq9mwjzB+8jExmX)V%QSEsp1 zcaioY{Y4s#bQozd(qp8_NSBc|BYj31jdU7mHPUOO*{)8vk#-~fMjDQE9BDbybEN4= z*O9g(eMcIPbRKCv(tD)&u1@!n_PaX$M>YW20b~o1JwP@A*#%@9kbOWl0@(><E0DcF zHUrrWWIJ4){XjOv)!7kbOOQQ5HU-%gWLuDZK{f{28Dwjay+Jky*&SqiT%G+vHptc4 zA!Lh?Jwi4K*(GF~kbOcn3fU=StB}1yHVfG;WV?|4LN?6R*)e3xkUc{-4cRqh+mL-j zHV)Z2Wb2T<LpBfDJ!Jck{X;g;)!9L03tgQ(L^cuGMPwV1eMB}A*-2z8t^ddJ!N$_h Z+0h;`ENVo0Tx3FIbVPbgG)Bew{Rx9T#Z~|S literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Moscow b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Moscow new file mode 100644 index 0000000000000000000000000000000000000000..906bd05f344ae9a806c1b20b8e2bfd7061d35fa1 GIT binary patch literal 1544 zcmd^;&r4K60Dxy*f3=#`!F1C#&2)9u(lt%X)va|EQ%iCyup%ju1f!vX6gn&+cBw!L zJXof*>Y-mAs!LW&1QJ<z=n!ZSL<b9G2%=Q_G2b)M>d+t1JZ8SxdGE1@_kF&GmS(5; zYpl{IOvX+<*{A#U(sL+j^lIs+_Fl*IOz)TCmTMjRX8K;YPW6joe}BoKJFxOtFyL|y z55yiF9&nk1LrLDcdjV@>tbJ&Fb^N3|E^?wSZgxd5{zJ^Tb;9FbGj?e_VWdwb-t1Dg zD`!>G`IBnxnI@Iox<{oPs8H)_HmlT<47EPns?riIDm`jW*~9G8{yAP|ypQu|wmtS| zzM7Seryph3-AS1pcp-BxKav|d?#PX&`ebfXm&`jjqMRqXRlciNZQ9+g3T%E==-jUg zza3UZ32n0IO}Q$bYmi$WrmL+JC9-7DBV7-YWNBxbEb9@{-5epebtJ0o-YMxh@?Dju zJ(ph3TU8NuOID^nQI*rrRFyfPs$SkuJKlAw>f0Ao&DeEWd-1sP)z}(M<`8peNN8Ai zL}Wzxvhaxc%M!KZieA3t$I=^r?qe)Mq!#NB!5S(=-Z@jG-k)Y))@_OwqR4I1U(1mz zVi)!mbtbD2iOItD`?(|6l(KMM<X(K9cWY{WqLx{&XE{rKxt`S`E4M&+y*@qlp)pLX zrungWVC;nO{oAiy`ccELufN{;p~v3)!BL2B5a%G?8Px7U{DU|M@z9`l5#pmk?IgrY zgW64qpAbj+dpw1>3h~vTb{66-#9fHL5QiZiLtKXV3~?IbHN<U*-w?+ko<m%R_zrR2 zp!Oc(KE!{6x&cTBkQN|4K$?Jb0ciu$2c!{5Cy-Vky+E3QbYoDr1L?=0ZV1v5q$NmC zkftDALE3`!1!)Y@8KgBxZ;<97-9g%e^k+~v2<gzEZV}QWq)AAZkTxNGivKxKi;g-c G-|`bm1(CG? literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Nicosia b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Nicosia new file mode 100644 index 0000000000000000000000000000000000000000..3e663b215327d8899a4b3fbe4623f066630b97b2 GIT binary patch literal 2016 zcmdVaeN0t#9LMoP$d(r$zG#Sgc`(Toy1)g*#K4CL@PVtxxEZO$QQ?9`lqdwI;wG}@ znER@1&Q)7YVy!vU%^o&;nC6^rj?PxDwZ=MWtLB8YnX`1ve($sOSAV(nPv`8O*V)~@ z|NY*d==!a(0_$H_g87CI*Ix7CeIhUKzn=c-_>q%dVEC|`c(>l0@Wn-YVt=tS>A+9! z<nEv&ZEM`5Eo07<YR{e;nc+QBc-~IVJm*ZC+Ha@CA9qr2_1M#|?{{W=(`3*5s?iC4 z*6pUA*yp9a9dpxr8@!BN%iUScmENNpv)$QMSzczz1oyE}qBkcv>OP)0;>}G+wCCOq zIP-oUv9rFt>11E{+J559kQ3@ZZRhlza&o(m*m*k+JK_2^JHK|&D~KF&3rp3TpR?U9 zN^A8N6m-afu`XE{?9+wAo22+&yDs{?OrH9oMoR|jwDjyEUEH%wm%Np#OJkY3th-pA zu1L_b`k*{B=T|K+8<UEJ%UYTFqg0OmA}iw0$%>&%vhw<ISvB~fR9*Q}BL{nB^@;a% zO-qZ|z58_S^9S_V<_4{PvRR{5mHJ%DYF!t~(sg&jT9aHMHQ!Cq7jDm!^&kJC8?FXq z<GV>xd*-IR*mX<l`i8`*yDFP@-_XrVK9Mc8SF}Fkm^73QYV6^Uv@uP!@wcPeG}fwH zFLmlm!%=<tgALkzahtsIW~sIeRLiSP8TwjJk!)LABCW9$*`A*+JIeo(wj@j1^OCjw z_iNHI>5g`|7o>Cad);|zK;9TStGkY!*WH8f=$_7g?K;?{)<ba*cE^wVPjAc0SZ<D> zl^u>7XIZJUtnvRm`KBrowaUw@c|`eDT%7-iKEMJ0lmD6PzPUK)ymQ!*1CJbf<lrNR z9|-`700{w!0SN+$0to|&0|^9)#Mgv^#DWBaM1zEb#DfHcM1+Kd#DoNeM1_Qf#DxTg zM23XsYhpu!L!v{%L*hdML?T2&L}EmOM508(MB+pOMIuE)^);~~!TOqLk#LcCk${ni zk&uy?k)V;Nk+6}tk-(A2k<gLYk>Gtz^ho%=CVpf9kP$$J02u>h5Rg$oh5;D|WFU}{ zK!ySt3uG{S&1fLQ;cLbN84zSdkRd_F1Q`@$RFGjo#swJ|WMq(`LB<9d9AtEm;qf)& zgA5QdLdXyyV}uM6GD^rWA>)J$6f#oCP$6T53>Gq4$Z+|Z@j?d7*Nhl4WXPBygNBS6 kGHl4W;s18v0%q@W3Ru2Sq%gNAH=I`(isXm=xA}p;0b|0?X8-^I literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Oslo b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Oslo new file mode 100644 index 0000000000000000000000000000000000000000..239c0174d361ff520c0c39431f2158837b82c6e0 GIT binary patch literal 2251 zcmdtie@xVM9LMqRfk?4(Z-2m9fRIS1cJdqi5tq^l%)GOd(?~^75wR}H%NV3anRBi& zeCt@|$f@O+HN*a(`~$6(+GyeBFXpUVTdb@!x<tZitUk|gYyDAM|Mf@T@9z8f?z{W` zaofF~fu-wea;(1{vE~UMju!Lb{`Tk0rOnl6wO!a))qT`o)pKROoPOP@;a9!#$*w#( zvn5^5h7#o5s#v*tBvr2+^2zrvd!&DRhWyZwB9U5=AIlwbz4W>aOz)E$t_w2gJS9I# zM29j*%24E-jtTFMjP=izak;-}eA-n_82h<8qfe`I;9VWxcSP?vzhCb>u~QR|9haoT z9g_UYKAF(0lCtS}NezbNuH~y`qAwt6g~c+-T_EX6F1h=*@#2c{s%tP$Cx4Z$Q+gA0 z>Zw@0r}L(|4}PoDT0hl{tsiUVhGUvl{ibGDT#}qnr{sFNByZ76lApX+3UV5xV7N(U zB(~~|%PVE(uk||XxL5A|tXvD*E7j9AOYhrOq_f+SbWTm07Hyp=_m{+|w>nYgreD!w z@354_e59pmUr1^H*D^2qeVG^TmIwM?lldKQh_B~8^|v(3g2M;&!MZwmsQCq5`0$Im zD7Z$;rUy0PE7ir$1-isNMVAa^X?c8!lwTa9j|@(hrSII(Wxa8-eE(>v=)5K?ng*n@ zH7r$?y|Qxice-l!QCVHlqtz*UWR0goYi@a4*Cwm3{bsk;4u^DIccVUfIiQanTBgAd z*URJEJzCdZCQsC+=#$&>W&OfJ3Dr2|sq6`|q4;NcdbB0=nekd5`BEB24Qa!flhW9K zNuPPET{echbkm*>baTgEeYWwSHnlWqlq1R!J>nnEsF;!e{b^Zo<IEq~N=kK%u&hyH z-TLSCk0*aU_xS^sx44W;fHxfGKywrL_u1?)U$kd)(|*UYeltg?e^L;ck*%2$GACqG z$gGfQA@f2ehRh6^8ZtLza>(qE=^^t&CWy=snIbYrWRl1%k!d3Hv^5ju=P*-bs>ocC z$s)5wrfX~Fi%b}qF*0Rj&d8*ZStHX%=8a4onK?3bWbVl1k=Y~DN9J#95`bg?Ndb}r zBne0skTf89KoWsu0!amu3nUpxHjs26`LHz!K{8@%Qi9|JNeYq`BrQl@ki;OFK~jU{ z21yQ*9V9(Sevkwq8L~AgLULqll7wUlNfVMMBvDAFkW?YLLXw4K3rQD}FC<||#%xW> zkeu0?q#;>D(uU*>NgR?nBy~vckmMoRL(+%j4@n@BK_rDp4sA^mkt`x<MDmCv63HZz zN+g#^GLdW|=|u90BoxUgl2Rn6wkD}aR&7mMk-Q>_MKX(|7RfD=TqL{J|FFApCdJdT UiL%?Dn~|T9<@RT1VP<aJ?<;p%>i_@% literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Paris b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Paris new file mode 100644 index 0000000000000000000000000000000000000000..cf6e2e2ee95355039a90146a7f77d14224551b65 GIT binary patch literal 2971 zcmeIzeN0t#9LMo{6$BCSi-wq>n1rYy&ybp=hDa(P>P<;SFcCG$j${r^Lzgq3=Ds#( z8JehwPl#5i82J2zOUY_xS}x0_J<Wd2oK`bC?^9c~{%EZ~`loYu&g<N>d&eK_zCWJP zQwrl<e>qy(PdGVh?UVbkdG=*Re)iI`^b%8cKFV5CzrtF3=uz|9;-9T`<=str{wA}2 zp1;`;-P&xNonb1H=9^99!mY}PB(piP%xuZ&Vr})AYqnlbw6<N#GuzK+o2rxj&5l!> zP4(t*v$K4twd+N1^ZMec<_)W}^=9!Rt0vpa+C3=7?1{N*?Tyc{YFjl}wRiVf`@Y>~ zz4bx9sr&GRsc+b8_SbGP2iE0T2VYrY4!vs3;gTYAWOA-Knv-RY_4T%n_lz|sdap7k zbE>S97n_$i6h&S>eQk)G*&3{k8&c(+mlEad&`&R)UHqMSx45mmm(@>O)b(=tZx1mo zD<){b()QA7L7K{pWDP8c)YhJM+9q>=+>`L528DmEZQFdT!A;L-@Qnl7?#fQR_x*Bh ze{_Y0)bE$js%q)5W}S3grV=)1zH}-omixv|kj@z%3Gb68_eUj5L};XR@oy)Q-l-aS zGf}&K9--aNwbky&ee{8v+Zt8zrS@2PQKJ{UtuZs|HMU@f#^qj+_~IH#NU4*=;j1L6 z!&*s>FOlRsrP4EGrS`luUV8mBQ+w}Em4`mc(LR-Vno`$WA70pB`<8`jzrt|szo3^q zGSFL73qoW-#5GMzy(0s?PicDiCz5{sstjs+SOzsV$lxm*Wk~fl$vAybGnX%utg7`o zblNn@UiN|x8?{7-7fsX=4;HB>BV8X0&eD-l-E`!wSj`C<C^_d_>*F`O%IH1c>zH$X zGPXQWa%;YkaiupTZ)Kz8=be-BbHCCj`tFqpxu>-tY_&{Gsn$Z@-8!j*>ZC7T*2#B@ zbxOl5ee#k=r*0XeMJJ}qQ;Sk`TIC3NdUBXr3zKB}us%{;7%b1kb(9%tKghFzE}0n< zq%+NDQW9`WOU@pXS=TS<>|K@eTw|TiS$#z3R<G6PXH{tF@=|qsH);CMUS7V<e46<* z_iFCv>(|2nzk0b`P2%jICRZ<)D?r@7yyG|f=X+P6%N5$m9rTB5dp`5~bM7-TJ+5r~ z9F;bLi^rAfoX#8jvCHLl|9Uz%&R^o<c6<IlCHw3TIw`sR+xPC;$JB<wOOdo7d2zH8 zgJcFt4U!uqIY@Sp^dR{`5`<(3NfDAGBuPk?kTf~kc|sC}WC}?Yk}D)xNVbr4A^AcQ zhGYy$8Im(3X-L+Pv^m;&LlTE%4oMx7J0y8X_K@@;`9l(jWDrRql0zhkNEVSaI@)<e z5{YCINhOj?B$-Gy9qn`?`9u=RznP43kWwV4NK%okB56hPiX;}vERtFzw@7l4>^j=% zMe>Uz7|Ae_VkE~%l94PUX-4vlBpS&yl4>N^NV1V^Bk6Xu^Nl1N$vBd7B<D!dk*p(W zNAiv&9?3kCdL;Kq@{#N#>36jAk4%80Jp;%TAaj6B0x}E8G$8YUOaw9$$W$P6flLN6 z8_0AZ^MOo=qdg<YlsMXRf=miBE6B7U^MXtaGBe23AajFE4l+B)^dR$tOpv2JL&y|4 z+H-_V5;9B3G$HeZOcXLx$W$S7g-jMQTgY@F^My<pGGoY;IoflEOd2w4$h0BzhD;nX zbI8;obB9bGGJDAMA@he!AToo<6gt{-h)klRJ&VXRBJ+q$Br=o8R3dYUOs4Dq%?$WA do9SndYmc~8dupRH6QYx%W1}+TVlgJc?-wH4z0d#v literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Podgorica b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Podgorica new file mode 100644 index 0000000000000000000000000000000000000000..79c25d70ef09aaeec21f0a10a029650967172a80 GIT binary patch literal 1957 zcmdVaYfQ~?9LMqhL0H4S7)2!{xg6a~D7WL3s9ZY8CAlQGB%zgCF3rrEeOY4-b6vxT z2jM}?T<4O6HN%WC*O}Qcw>5t6|7Np!;F-<;od5sTPMse8-XG7D`8ko+A6E<W2{%`< zx%s*ETJyNxB<4x&%h7W4u#eX5cgd}7(Ne!YSn9ty@^9}elRIfc)z_Vs@B8eycDlP) zJ73(YerL{#|IsQ5*tJKxl&J)+S|?p|3#8k$+0s45BSG<Q>EVo(VE+*5*~(W!yj&Xc zDO!6y57yod9ktKp7TUM^i#iX!)_&y=G_>@FhAlgz;n~MDBJ;jP7F0`ALXAX^-!3r$ zyCpWVNMajHB+jo~;~veF0pAOC;8~XpdYGZ{m06llGf)R_9Hv8d`s>h~ARSgZK!zuJ zsVm!0Mg+f9x2sVSy{>6e&|^t@_d=4Jo|ojht1{}@0U2F&L{e(cY3i0TNjthv$K>V7 z*s={eZqjBQpF2m>`{$}BB}pgvr0GOwZ=Lu#Tr)Z(O2)l*I{8yCnR4o*PHpg(X?xpB zX7yW{Uh+Y*%IjoCR)fr3{YGaEIW4m@Yc)HtLgpk?X-><NIyXRd?yDU-udzVqUoF-J zk372Y&{WO6vP2ebO3=K@bXhzvQ1ds&$dYmKQjp^#OC!3-GWS<m-qw=Bunt=I{;3qT z`K(3tm!$aJeO+;)QdZX0=&Fhfy1Ht&t|>mKC0k0=(X2^R^KzKCSMz`QvaI%T=6qRg z#A^2I>EG4S(z1Sk^4pYDkL7Z6r+ds791h-G4u|LOr+J0>L;Ey;vA<(kKg=~f{{2Yg zbR#DmIpxSnM@~C(;%&{TM@~L+`uVpMU~4KsN<eBria@GB%0TKs3PCDCN<nHtib1MD z%0cQu3bHj7AtfO-Aw?loA!Q+TA%!88A*CU;A;lrpA>|?UAqCo+3Xu|RO^ryANR>#L zNS#QbNTo=rNUcb*NVQ10NWDnGNX1CWwx(vJXj@Y?QZ`aIQaDmMQaVyQQan;UQa(~Y zvH-{mAWL9t)&N-qTeAwtG9c@KECjL=$WkC{fh-2H8pv`W>wzo?vLeWmAZvmwimh1{ zWLc1PK^6vC8DwdYwLumKSsi3~ko7?p2w5RyiI6oy7RlDE60%ITW}T3QLRJb{DrBvY k#X?pKSuX4Uo-S`QY5gMDG-qm5XiR9hGc_U{!=k)@0!L5Jf&c&j literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Prague b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Prague new file mode 100644 index 0000000000000000000000000000000000000000..ba82f311b74838347de6dda464a22e95a16e0408 GIT binary patch literal 2338 zcmc)MdrXye9LMqJfXHRazG!eru7!lukvoJ2nH_<lU@9jgl|V(*B7&1KC=E;ITx0I5 zGMz)xjInA)ZDFmUwXUOucs&`GYs-~s<=o6kHCDg(6ERx;@AGiZ>&L@(Ha0%*;aRr6 zCd2yQ6=a_9;@V+eoWnMow~k59st(_|tNVs}w6p85zWl!G{?<L(AKah=7Z=E}*PS|e z&@IPzX32>c6Xm2gLQWohr|Q(IKsmiRTF(@pk~6z{<9&YKcwb4be3jfQXI<0e>)_oo z<fxaSOHCrbR@aABC+QSVq)siJC%0w(q2UQXYsA!V)#-mso!36l$O~`j?WYguv`=<u z)RChSeW*ub_U)J0HkG)IFGzf?SMFG`TJ9|INJ3tLOn2o-Vsw(+H7Qb(0^FK(Jxgc& zkf_PN2)+BWK)t7TSY2J0bY}ZGO=<m7Q#TyZwCXoCy<$i*yuFf{-zQm1c1w0lr{rWb zNzO=%<VLk??)jB6>(54=ebg=YeN(P^-Ibc(H(T%DI#=g(MC$`J2|BlRmONM(pzi7@ znU{D`3)~}881RJ_C447ESALXY|Bs}2uwUk1cvTkkydfn6A8P6LHd%P+6@93#P8PLo z(Z!Fvq)TeoXxY8B>M1GGhn)*`sViBR-bmB(@Iom+J4GM8K0}th`<pKJg~*BnVN%ii zi#*nHO)A?5rK-{=D>q%%Rde2x)fEF;9k*B3<o9Sz&|zI0qq_E|J-TkhtLyul_3`r_ zed4v{TKjo}Jh?4j>$=P2sdaH$zcpJL7Uzk##wkyy$I6C+-{qMwOBz$dwejlr(iD0_ zn@)Tx%~yu>*|)ppxxqf&xc6h-)YGZYH+N~v_7?SX`1wy5yB+2jF#5o6V&JW~Eo<^N z^LK4U$2%riR;XCP|2<olWAgYXi%LC~yP%9Wk5y#8F_*)^mwh=Lo|`Yqd@LVrU%N)n z;lra3{xVl=@y=naJ$~7irSUy$Ll%du4p|<uK4gK&3XvruYeW`_tP)wKtyw3sP-LaZ zQf<vzk;U4Y)gsG9){86{S+T8IGQS>cMiz~%8d)~7Ze-!e%8{kpnzbW~M^=w4A6Y+A z0Hgv)36L5fML?>6lmV#&QV66HNGXt7*qUM>)j-OD)B`C9QW2yiNKKHUAXP!ig46{m z3{n}SG)Qf1O>vOw*qZVn^+5`RR0t^%QX`~DNR^N>A$39ug;WYD6;dmtSV*;OO}UVI z*_whO6+=pf)C?&aQZ=M(NZpXaA(cZ)htv)!9#TD9Q$D1Awx)nc1(6aWHAIStR1qm7 zQb(kaNF|X{BDF+{iBuCQCsI#aQ&6O$NJ){JB1J{2ij)<pD^ggbvPfx>+9Jh8s*98t zsjsamFj8S#Q(~mXNRg2$BV|VFj1(HFwB_(ORSp=t{mgOV=mY=2pj&bO+i6p4(`i#{ y^DjxLSpT$n>}*;bHvXq2CeSx4_V{F*bBKxg%yhEuD$PvEPDyi>rl(<QX2{<symJ`< literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Riga b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Riga new file mode 100644 index 0000000000000000000000000000000000000000..b729ee8c2ee26c7bd74535ead106e8160afdf181 GIT binary patch literal 2235 zcmeIydrXye9LMqB38)hh{%qla0MU?(cI1wdL1IT>2FH+dBvKJGKrJF~IVhDUWM+-A zZ^de^OfpB88MS7%Lf2)kQ6imxSZj1~xuUUVPb+7(LH*tju-01ZkN)lV?3~xb17~cE z&pWtgOJk1u+nHgW@ZwnJ#eFI_&%g8N#Nm#viJk+cdwb5bPrNqz(^mtp*Khb_V?*WO zXt_T7ibsZy`1Gew=IKy-iVn$RTVv8*3eAfBqS)gesr39d<NesN=uq5<ulR<+r$X^3 zd+dZ`PuPjiwcE2hci2h0>+LyBtL?e#O6?mf7Td{%>GnKtoSl*or&6OLRa!*8m3A@J zO8?el%^!AK3r=6QZaO|@d5?}*3%ducjE+-Q=FUDVtNyT+U3*65_)n^w^JC|7i~4n5 z)zdmZ@sM7W)1nuRx9NiTZYg;8$8$?g`SdL#HB#7HCq?~B<ktPmWa+aBQrws<%Q|k> zx0ObSuRdNcPZ^aG-?%P~_*lx4zt&|JzSAqhKF}+M2K4RcU)1G&uj-1y_r%}TsaGC* zLGEa7)|H+6WYwLIOI1^Y1Qs_*u%b*>dsa%dcfM3#%95IxQeE@eb#m9mbiL-pFS2$x zQm^ZoscVn_px3wktn0dm^oF`&efOR**|_v=eNXM6)F&O(4Mly@=zdE!C5mkN{u$Xk zzFoEqw936-2Ian&*Gkhz+w}bhiln(WpdZ+rBwP3A>uswF_4Y=O-jO|9?<~2jADpT5 zuFM$OWq+evqAy8H=(KLV@VPwnMz4N&s9$y;{80Av9g@ARN2RT+O<Y%)D=hq~h%3z% z?w-7^j-0x#iTdjmc5O7*6q8^4w|Avx9WnocyoW0_?@gCmsf0NyN~r}|N-Gtu)%84a z#ZJE$6uT0qzdL>Jn)~Ocrtd5KK~-g56@j2CDG4fnpj`Qkz7q4mA6RZ4nj7c1TuQl| zGx_z@yUP{)_a!}Ie%M*kuT!7;-JI$ZXV)XkLe_;W%+ah2SsJo7WO2yqkmVukLl%gv z(9tZBU&|ViMIx(oG|NQRi7XUZDY8^#t;k}L)gsG9){86{SuwI?WX;H;Ijow)vK`I3 zk%c2GN0yGP9a%iGdSv;?`jG-472r?;M^gi&2uKx>G9YzuG=)Ga;b=;M)B-66QVpaW zNIj5(AQeGMg46^l3Q`rMEJ$6D!Z@1BAf<6MwLyx5R0k;!QXiy1NQICRAvHpZgj5MB z6H+InP>!ZjNU0o6t&n0N)k4aJ)C(yXQZb}tNX?L<Ayq@lhSUuy98x)?bdIKWNb!*B zA>~8rhZGR0AW}l4hDZ^SDk5b>>WCB)sU%WLM^j6rn2x5JNI8*uA_YY%ij)+oDN<DR je^Sj~;vbV5X<EE6hxU5?xf%HxS(&+Be|DC0mlyd5O7kz? literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Rome b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Rome new file mode 100644 index 0000000000000000000000000000000000000000..bdd3449e76a51eb8ea7693fbc1ebeb5be5c1e5b9 GIT binary patch literal 2692 zcmciDeN0t#9LMo<@8t@j(icrJ0kO!?L<A8P6HO5v36^qG^AVp=i|9y5S}H!}$~pHf zP3M5bhgc6vtuWVoXq7sfh=yTVIay{_Hg`62YKr!IAI6x~-=1^td7XP0f1dXzFll~X zU+X_duzA9lW0(1I-)_0Nln%UZm)6%blpQ|il<&22-+ukJy|p5$p(6JkXWO#fx!b1< zuy<s2$vv8%;?!ImEyv%B(At$n4Yk|T<;0u)<iz<S4JQkx={JR)q%Nz@IW;6nzs;<Z z`si!+>7JXN)0dX(_pLX{jpRMfjWdTDZbsGFH$SU(Zr!T38@Ft7ZeKX*+*$dFZ5{sD z@yvY7_LfbueZya}gI+Cm{Jl0ftqL~S!EF{gAsK_5kV_Ms(1Zzg=&@9%ZCI+^_Jc^L z-7gJJ*xE+tzT=Iy6m_w~cb~J{=iIW}*KT$?WRH;hlmFI^v6r+{ho3aUcU&W`9MsO| zKhy_KRcM#5-q6VX2PA4&l|+|qm9FblVwS9sZuy1s;FMYNP*y-<2WQB`@%^QHRGd80 zwzI_f(>3mDKkad$yY{T_q>mmB)?U@uHGb#s+I!;<ny~tq_9@!0iL-ZW($q83x3F50 z)AmWfahoJ1x?KAAT_pV*OJrc=Mjd!|hNRvs)<Fl-<*^?p>fp-hnznC{4p}oyhn7a^ z<9V?<Y;~#(AL-Zh*^x4$`$f%2Z<LY#uXR-HIT>~NSIP8!A(^#D<%#n<WOUX0l2!A$ zX0Kl-V|H!RCkqPXsdcM#?1Z&CE`N^Z49M3&)+l{CVvLTD@2TUjCF;bEBW2=u?e&?f zJ!I0Sf9d4<P?=H@E>o-jlxZbbWctQh$(>#=GnW3LGl%Y#SyO9tcFY!;lUAj9A$xRg zwCdd7HtW2`LY;qfu|9h?pwGQKS@Vx9kmp}Z(}Kzzd0}3R+G|o|!Pvo4m=_^0CUupf zj2p5r+>+ux9ktl`Sr&y|(?ur_$>Pgr^yQB#<(1lfx@60jy0ofXmo46@CF@Jn>uEWC zK^{IHZ;;PDn{E1a*VXH9b?;BhYEx}qFDt5>C&;qG#A@AqZF<u3d|Y<GO3%pQ65u~P z=0I~3`2UwV#Qc#jN0s?<AAX%*=D1^yw1Zu1(B8koMqYAP^P(d!JMzLKFFo?&BQHM^ z03-rP2#^>cK|rE_gu&Is0SSbwi3Ab~Bo;_8kZ2&`K;nS}#MMLu2?-JtR}&N@Dy}9h zNL-M>pc@%(2n~mc4G<h8I!Jhs_#gp7B7}qpiIJ-b5)vg>6DA~1t|m}Oq>xY{u|k4{ zL<<R*tBDs9FeG9~$dH&JL31@xL&Ao{4GA0)IV5yQ?2zCg(L=(A#19D|5<w({NDPr6 zB2jcTVMOAH1QLlP5=tbNNHCFTBH={hi3AjhC=ya6rbtkcsJfc4B5`#!fkh&VgcgY{ z5?my@NO+O>A^}DsjD#48F%o1X%1D@zIJ=raBawDBp+;hj1RIGq5^f~kNWhVZBOym( zjszWvIudpy?ye^ANaS5j=#khX!AGKxgdd4NG62X3AVYwR0Wt{4C?Lati~}+d$Vj-F zp+Lq084P4Jkl{ea0~ruxM35mt#snD@WK@u0LB<6c7-VEz&CnoY<7x&6869MJknuqV z2pJ({h>$U|JU(yB8RYl!@%sEh?%8b9ue+|kR>AlFY@Q=CN@kAC_?XQI6RTbGwdqOA l^DHw;{~o56Wiof6X03WBv0L%k$q6Y5iSgM<iP$GO^dBI59Af|g literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Samara b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Samara new file mode 100644 index 0000000000000000000000000000000000000000..0539acfd78ae58d4503ea88c2027c4b489a207e8 GIT binary patch literal 1253 zcmdVZPe_wt9KiAC)%_6{;i1~nS(e$_)b%!<%u-vL4r_wM=nw@DltF)>As#ve4zwVM z2#KI5LXu7)9_(b2hoFn@5Z#9GgXj_wBMK^s^?YCFA%dV|&%0-zcMs1Fd!O$zr!Jh2 zsXxXhv%+Nbn#ui^oOzp8ow?^TzTvH*`%BvbdD~yLlJx(u<^mto()L2!u3f&qRQF;^ zg3l(T{^6MHxHTd>ubq^J%PHA)Iw8B$VQK8Dl|3y!X$n>s_xjd~A$KSn`WndYd+*PM z)oT@g`%y<;t?K6ayl$C!sax;P>FDIN-aj^_?Q;|Qz?oTzjoy^D?mN<c;*!McN2Mb+ zBpp99lBm0=6RRE4xt7)k7Xxyr5Y=5z<GOpHM)%zC>%$K|>*UCHePnX8^!68Y-`S!Z zZCcd*eQ#vI{aB|O=Oy)NK?W=D$zcAG9D8?Fj?a!udihCq==PQ3c&SuURz>Cd<#LxV zkJsa^@>Xno{F@%H%cWHGi+PwTVk>i>NUAEO28J#3lz+ds%=-GCh;5mj!3I^s9@g2J z9?Ga@yUomYGvj8Ov$V{sJh<N&nSVIxZ{;tUA6w$YkyB@xlSfV;DZtTGfRupLfE0mL zfs}#NffRyNf|P>Pf)s;PgOr2RgA{~R<Y-DlYC?)~G*uyGA$1{zA(bJeA+;gJ85Gqa z<stPU1tJw9B|4fKks=*Ul}MRLok*cbrAVnrtw^y*wMe;0y-2}G#Yo9W%}CLXrfQ^Y eM^iUaI8r%MI#N51o3sBvi?8l7|BIg1_Ud0fCJ_Pv literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/San_Marino b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/San_Marino new file mode 100644 index 0000000000000000000000000000000000000000..bdd3449e76a51eb8ea7693fbc1ebeb5be5c1e5b9 GIT binary patch literal 2692 zcmciDeN0t#9LMo<@8t@j(icrJ0kO!?L<A8P6HO5v36^qG^AVp=i|9y5S}H!}$~pHf zP3M5bhgc6vtuWVoXq7sfh=yTVIay{_Hg`62YKr!IAI6x~-=1^td7XP0f1dXzFll~X zU+X_duzA9lW0(1I-)_0Nln%UZm)6%blpQ|il<&22-+ukJy|p5$p(6JkXWO#fx!b1< zuy<s2$vv8%;?!ImEyv%B(At$n4Yk|T<;0u)<iz<S4JQkx={JR)q%Nz@IW;6nzs;<Z z`si!+>7JXN)0dX(_pLX{jpRMfjWdTDZbsGFH$SU(Zr!T38@Ft7ZeKX*+*$dFZ5{sD z@yvY7_LfbueZya}gI+Cm{Jl0ftqL~S!EF{gAsK_5kV_Ms(1Zzg=&@9%ZCI+^_Jc^L z-7gJJ*xE+tzT=Iy6m_w~cb~J{=iIW}*KT$?WRH;hlmFI^v6r+{ho3aUcU&W`9MsO| zKhy_KRcM#5-q6VX2PA4&l|+|qm9FblVwS9sZuy1s;FMYNP*y-<2WQB`@%^QHRGd80 zwzI_f(>3mDKkad$yY{T_q>mmB)?U@uHGb#s+I!;<ny~tq_9@!0iL-ZW($q83x3F50 z)AmWfahoJ1x?KAAT_pV*OJrc=Mjd!|hNRvs)<Fl-<*^?p>fp-hnznC{4p}oyhn7a^ z<9V?<Y;~#(AL-Zh*^x4$`$f%2Z<LY#uXR-HIT>~NSIP8!A(^#D<%#n<WOUX0l2!A$ zX0Kl-V|H!RCkqPXsdcM#?1Z&CE`N^Z49M3&)+l{CVvLTD@2TUjCF;bEBW2=u?e&?f zJ!I0Sf9d4<P?=H@E>o-jlxZbbWctQh$(>#=GnW3LGl%Y#SyO9tcFY!;lUAj9A$xRg zwCdd7HtW2`LY;qfu|9h?pwGQKS@Vx9kmp}Z(}Kzzd0}3R+G|o|!Pvo4m=_^0CUupf zj2p5r+>+ux9ktl`Sr&y|(?ur_$>Pgr^yQB#<(1lfx@60jy0ofXmo46@CF@Jn>uEWC zK^{IHZ;;PDn{E1a*VXH9b?;BhYEx}qFDt5>C&;qG#A@AqZF<u3d|Y<GO3%pQ65u~P z=0I~3`2UwV#Qc#jN0s?<AAX%*=D1^yw1Zu1(B8koMqYAP^P(d!JMzLKFFo?&BQHM^ z03-rP2#^>cK|rE_gu&Is0SSbwi3Ab~Bo;_8kZ2&`K;nS}#MMLu2?-JtR}&N@Dy}9h zNL-M>pc@%(2n~mc4G<h8I!Jhs_#gp7B7}qpiIJ-b5)vg>6DA~1t|m}Oq>xY{u|k4{ zL<<R*tBDs9FeG9~$dH&JL31@xL&Ao{4GA0)IV5yQ?2zCg(L=(A#19D|5<w({NDPr6 zB2jcTVMOAH1QLlP5=tbNNHCFTBH={hi3AjhC=ya6rbtkcsJfc4B5`#!fkh&VgcgY{ z5?my@NO+O>A^}DsjD#48F%o1X%1D@zIJ=raBawDBp+;hj1RIGq5^f~kNWhVZBOym( zjszWvIudpy?ye^ANaS5j=#khX!AGKxgdd4NG62X3AVYwR0Wt{4C?Lati~}+d$Vj-F zp+Lq084P4Jkl{ea0~ruxM35mt#snD@WK@u0LB<6c7-VEz&CnoY<7x&6869MJknuqV z2pJ({h>$U|JU(yB8RYl!@%sEh?%8b9ue+|kR>AlFY@Q=CN@kAC_?XQI6RTbGwdqOA l^DHw;{~o56Wiof6X03WBv0L%k$q6Y5iSgM<iP$GO^dBI59Af|g literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Sarajevo b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Sarajevo new file mode 100644 index 0000000000000000000000000000000000000000..79c25d70ef09aaeec21f0a10a029650967172a80 GIT binary patch literal 1957 zcmdVaYfQ~?9LMqhL0H4S7)2!{xg6a~D7WL3s9ZY8CAlQGB%zgCF3rrEeOY4-b6vxT z2jM}?T<4O6HN%WC*O}Qcw>5t6|7Np!;F-<;od5sTPMse8-XG7D`8ko+A6E<W2{%`< zx%s*ETJyNxB<4x&%h7W4u#eX5cgd}7(Ne!YSn9ty@^9}elRIfc)z_Vs@B8eycDlP) zJ73(YerL{#|IsQ5*tJKxl&J)+S|?p|3#8k$+0s45BSG<Q>EVo(VE+*5*~(W!yj&Xc zDO!6y57yod9ktKp7TUM^i#iX!)_&y=G_>@FhAlgz;n~MDBJ;jP7F0`ALXAX^-!3r$ zyCpWVNMajHB+jo~;~veF0pAOC;8~XpdYGZ{m06llGf)R_9Hv8d`s>h~ARSgZK!zuJ zsVm!0Mg+f9x2sVSy{>6e&|^t@_d=4Jo|ojht1{}@0U2F&L{e(cY3i0TNjthv$K>V7 z*s={eZqjBQpF2m>`{$}BB}pgvr0GOwZ=Lu#Tr)Z(O2)l*I{8yCnR4o*PHpg(X?xpB zX7yW{Uh+Y*%IjoCR)fr3{YGaEIW4m@Yc)HtLgpk?X-><NIyXRd?yDU-udzVqUoF-J zk372Y&{WO6vP2ebO3=K@bXhzvQ1ds&$dYmKQjp^#OC!3-GWS<m-qw=Bunt=I{;3qT z`K(3tm!$aJeO+;)QdZX0=&Fhfy1Ht&t|>mKC0k0=(X2^R^KzKCSMz`QvaI%T=6qRg z#A^2I>EG4S(z1Sk^4pYDkL7Z6r+ds791h-G4u|LOr+J0>L;Ey;vA<(kKg=~f{{2Yg zbR#DmIpxSnM@~C(;%&{TM@~L+`uVpMU~4KsN<eBria@GB%0TKs3PCDCN<nHtib1MD z%0cQu3bHj7AtfO-Aw?loA!Q+TA%!88A*CU;A;lrpA>|?UAqCo+3Xu|RO^ryANR>#L zNS#QbNTo=rNUcb*NVQ10NWDnGNX1CWwx(vJXj@Y?QZ`aIQaDmMQaVyQQan;UQa(~Y zvH-{mAWL9t)&N-qTeAwtG9c@KECjL=$WkC{fh-2H8pv`W>wzo?vLeWmAZvmwimh1{ zWLc1PK^6vC8DwdYwLumKSsi3~ko7?p2w5RyiI6oy7RlDE60%ITW}T3QLRJb{DrBvY k#X?pKSuX4Uo-S`QY5gMDG-qm5XiR9hGc_U{!=k)@0!L5Jf&c&j literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Saratov b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Saratov new file mode 100644 index 0000000000000000000000000000000000000000..e8cd6b10efe42e43315c12b28d87cb69bbb0c8aa GIT binary patch literal 1197 zcmd7QPe_wt9Ki8s`L5aQ(50NY*&mxrr}Z{BnKf-WbyzbbMuZhee}o|DAv_cW4l+*_ z6hTB0MWai94|U39M3+cCb+h1wPW>T%mmrc_&-Y!PIz+dgchCEIdA1$4-#0&eY&53+ zSj)@_o7H1B&npV%c3X88o)&9{Rz|LsRt4tl+UnVK?RU@Pz(-Z8n@`xQXU>)Cr!G6e z`)8fdjWMU;{84AknZ3^1<5_3jfuyrO*X%T=Hp+(9Itd5;(p0k`5pP6Cz6Nyjo6jRH zFF)z%lefCHIIG*POzTY(kM!oTTiQNyNpCqgC9#uNrM>H>#P^(*M5rJgvHjBV<B%ll zPwC{lUg=yI)?1%9$+r2FPTkGuu4fIpdorxIUspOk>eD+Wg3{CXL1zyCl-}@b-IsBs z-}^vk8(&EF%PSeED$2mzJ=ytwQg%JOAi0@4dhp^2$>%~N`!c)rc)46wo~k94%ksL{ zQXiL9;cs`#XIV-`-9Np>R_3{X$WyIUGOd;`-WN|i=A`l&-pvi>Rn%@bJ7IQQDUW%v zcyL$#AEx>3_jk+>Dl=7NvK%vAWWvalUCpGCX(JOyrjAS=nLbhgQUOu|QUg*1QUy{5 zQU_89QVCLutEmMk2B`)q2dM`s2&o7u38@Jw3aJVy3#kh!45`f3l!nyiYKlXuL&`(y zLkdJHL`p<zM2bYJM9M_!L<&VJMM_0#bv4Bz)w-H;k$RDWk&2O$k(!aBaoo)Q|E=1e LGPQTN#r?kkoc0kq literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Simferopol b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Simferopol new file mode 100644 index 0000000000000000000000000000000000000000..f3b42b004dcc2af1d69949ff18f35fdfa4cf6088 GIT binary patch literal 1490 zcmd_p-Ahwp0LSrXZq{6;&qbw6m!{c^so6B0rZt;W%*~ctFGMd3%m~DbjOZdt%DM=R zr;=`Zvl1hbD5!2MdO;$|bQj%OBm@Z&aW12fdcH^WrVIao&f)BHcE&{U`-|*3((D%h z9Lf3#7e`QE+-J4xmwsn&=96A$^qs9MHZoZ+-``JBqj!9A>~gUjyXBc!yy{+TN$N*$ zihVqk@+<je>bInrbKK`$I&vkJ_N>Q9AGl<=uAecMb+#E9C!37SmL10Oy){Nwpw!5& zSZ%Dxa~e764kPDgTWDohfwAVp_h|0WSZM9z(P&=pqfmbL!)X5NH?e|_bKZ5wIzoj_ zhrR3f^~Bsq`-FS=RII3SP!=~{l^a|)WQqHPESWkjJ!##_^XZ^0`+Z!MKljOv<GWQw z-vL!QSgtmmuTq<@r7Lf9wyNqVlUr)ym9HsHR_9DAzi&#`#1E_5?9a0H$5&ZreI@Hg z-^#6D?#lWH{W37}LIp2&%7%g4YFlfo40WDW+jm`1jV*^%xO9JeB;bl#;;i#*@wRyD z{OQx4Fne0#9GrZRV7D$voa<a@7b4@Q{vU{p31Jf=JyYl>xrHK8h$JZ_Pb`aOUOirm zbLP{Tdy9)FyiR}D<K?^jgg`JN!us<2BO(~C7e1}eALdQ9eyDFaEEe<9=$qMtr;)ic zYS3RWGcx}K{pOE8c0L`lBUvG7A$cK*A(<hmA-S3A<oHF%4oMHm&r~OfWQe4Q<cK7R zWQnAS<cTDTWQwGU<ccJVWQ(M0s`Et>MlwcHMsh}yMzS{5X(M?fi6faKsUx|Y>g18^ zk@S)LkqMaU86Z<I)pI~5fy@G#1~Lz1BFIdTsUUMfCWFidnGP}^WJ1V{kSUq!IU$oW o)w4pTh0F_?7&0?tYRKG>$sw~triaWAnV|SjHt5j9%`bBN1rVfCY5)KL literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Skopje b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Skopje new file mode 100644 index 0000000000000000000000000000000000000000..79c25d70ef09aaeec21f0a10a029650967172a80 GIT binary patch literal 1957 zcmdVaYfQ~?9LMqhL0H4S7)2!{xg6a~D7WL3s9ZY8CAlQGB%zgCF3rrEeOY4-b6vxT z2jM}?T<4O6HN%WC*O}Qcw>5t6|7Np!;F-<;od5sTPMse8-XG7D`8ko+A6E<W2{%`< zx%s*ETJyNxB<4x&%h7W4u#eX5cgd}7(Ne!YSn9ty@^9}elRIfc)z_Vs@B8eycDlP) zJ73(YerL{#|IsQ5*tJKxl&J)+S|?p|3#8k$+0s45BSG<Q>EVo(VE+*5*~(W!yj&Xc zDO!6y57yod9ktKp7TUM^i#iX!)_&y=G_>@FhAlgz;n~MDBJ;jP7F0`ALXAX^-!3r$ zyCpWVNMajHB+jo~;~veF0pAOC;8~XpdYGZ{m06llGf)R_9Hv8d`s>h~ARSgZK!zuJ zsVm!0Mg+f9x2sVSy{>6e&|^t@_d=4Jo|ojht1{}@0U2F&L{e(cY3i0TNjthv$K>V7 z*s={eZqjBQpF2m>`{$}BB}pgvr0GOwZ=Lu#Tr)Z(O2)l*I{8yCnR4o*PHpg(X?xpB zX7yW{Uh+Y*%IjoCR)fr3{YGaEIW4m@Yc)HtLgpk?X-><NIyXRd?yDU-udzVqUoF-J zk372Y&{WO6vP2ebO3=K@bXhzvQ1ds&$dYmKQjp^#OC!3-GWS<m-qw=Bunt=I{;3qT z`K(3tm!$aJeO+;)QdZX0=&Fhfy1Ht&t|>mKC0k0=(X2^R^KzKCSMz`QvaI%T=6qRg z#A^2I>EG4S(z1Sk^4pYDkL7Z6r+ds791h-G4u|LOr+J0>L;Ey;vA<(kKg=~f{{2Yg zbR#DmIpxSnM@~C(;%&{TM@~L+`uVpMU~4KsN<eBria@GB%0TKs3PCDCN<nHtib1MD z%0cQu3bHj7AtfO-Aw?loA!Q+TA%!88A*CU;A;lrpA>|?UAqCo+3Xu|RO^ryANR>#L zNS#QbNTo=rNUcb*NVQ10NWDnGNX1CWwx(vJXj@Y?QZ`aIQaDmMQaVyQQan;UQa(~Y zvH-{mAWL9t)&N-qTeAwtG9c@KECjL=$WkC{fh-2H8pv`W>wzo?vLeWmAZvmwimh1{ zWLc1PK^6vC8DwdYwLumKSsi3~ko7?p2w5RyiI6oy7RlDE60%ITW}T3QLRJb{DrBvY k#X?pKSuX4Uo-S`QY5gMDG-qm5XiR9hGc_U{!=k)@0!L5Jf&c&j literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Sofia b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Sofia new file mode 100644 index 0000000000000000000000000000000000000000..763e074795b3c7143984c9b334bd6f2f5a50445d GIT binary patch literal 2130 zcmb`{ZA_JA9LMqNC@3Bo`=cQ$AQ%v6N1pL1h>U=dU`II+sl+i6v4{|rL28H8xz?Eb zN37<`P0O)rj%Xu$L9GW}vyBd?m20hBELygv)@IHg#_Ias?95H;o!7nld~VLYF<$Wd z`Zn%p&NH_hxBZ5j6R<bW^Lp&Z=*6}2`J3??e63W*`U)g;AYDR#x!S+zNK1*``m1+N zZOQoD>e-<?3Kx&Zc`k?M&Hbo7K6)$^e|4y5{=|u%1z+~{EIf0tC*l3KP2#EJCh64| z%%a`{CVAg3lhWE`7H_UKcUF~~)S?w8&68=;6Bn4fqTMDVvaly3Fg%`d&F#<p@v?Wx zSC{-t&yIQT9uE3FC*Scd>mTxG9eUB5y}Qq!)3nE%+b}qu*ReO0UpyiO8(xyaq+_x? zuS=Fs24rPIzplKnMOOXYsjELIm3zLe*P?+&Ego5|_a0uOC9fpvea)%5=Flp+zdTY) zn-b)K^ov?nIw|FmpJ_$v_fm1?C#jtNu~Y`n$%7NG%i6)Wq-yj-tv=E#>rS1}huYf2 z+j~&gKm5FIXx*kYD_Yf8RiU-<>$J|ZMC-2SXnkC{)PFrkAGww(8&ChPo6fss^YK_| z7``Nr2ChnDe^C65=Vi<OU-Yq(_hf6ss5T`Zm2Jg?+8p(+ZckF({`1S)GTEU!&UNeK z7kv7}8=JKC)1C6<bH&;=P$N&ZBy0QOLfN^#NIIJ1WmoPZ*<JRh?1?qfnH{H{zx^Ox zG1s*#bXK~re4|ef4anZ$i0(W3iS8dfrq6Vr)WDH|x~5IHSA;8a>Y6e0rYroyF>2QB zzKls|vHyK0F~vj}6C);yXIFIid1;O-HvD$#`NlVu)jm^J<};<vUBeId!x)!~o6o7^ za{2yy472TfZqBpz=6T^C{AABR_SBsYreVjKiTTJ#kf9)BK?Z}2#?cN384tgU0U;wo zhJ=g>85A-qWLS=NT*$zXks(7v#>QcA97e}scpS#ZVSpS)$YF>a#>ioi97f4um>kCG zXb0+OM~Vy;87neaWVFa|k?|q}Mn;Sb85uJ&Xk^sLu#s^i19!9|M~3cb$Bqmh89g$5 zWc)|~kO&|lKw^Ld0f_<<1|$whAdpBPp>VXZK!V|Dqk)72i3bu8BqB&ikeDDrL85|$ z1&Iq17$h=CXdG>9kl;Al=pf-i;)4VTi4YPZBt}S(kSHNxLgIu33W*dFDkN4&upDi) zkZ>XKLIQ?F3<(($GbCt8)R3?taYF)!L=FiZ5<4V#jy8Ho_#AEgkN_ePL_&zf5D6j@ zMI?;ImNCL!k<(^OT{EL@y239UXSr|pWwY=de~b;JO`{E@O=FB0n?@T*2|wE$#)aQb qJ>U4I$_DcPB6&+C?H6ua(`9*7)Ki_GRhX5No$sm6&2gRz-2Vcz8|p{^ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Stockholm b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Stockholm new file mode 100644 index 0000000000000000000000000000000000000000..43c7f2e23f3c37c24d39e78f8822b12c5a74b5eb GIT binary patch literal 1918 zcmciCUrd#C9LMqJ0H+uu_9f+y@y{Yc^2lEyHOTaMU@FId=aC3S<Ub}-0udBOPUl)< z?%T#{4xKg~YfjM%tP8Z3TBAj*&7C<>=N7B1m}4U*)mZ)BkL#wo_Bnf=*WsLV!Nu?W zu`6~p<XZo^+~yN*t~boh`^VlfkN)J|o&CY_sd@f2GC%JRjY<7gW9Qw{xX^DkZu&Eg zpE#!ruAR^)uDq`a7cWWTnL$Z9dR&rwRZ_b5%fjYXd2)NLJXLB-YN1b__7q53V!AvN z887K>ucpr|)kQy~X+|(spZ)StT|6|ao>M>TlD=`xJot@fwO`ciy0e;7c|&qrha|6P zSe9=2K=PB0NkML>6wLL=vV=ZecDq`Z|Jk7{E_vnoo8?+KP^Cq~EA)j!t8`_5qQ2OW zs;dqzm(|5?_0}cGnzWzP=be*c_t#pIdPhp`{U~cgzmT<KBeHJdq^uu2Ev2KMtN(DX zY&i3gZft3hO}z(n^GomPmgag5JlCvtX^C!)+n{Bh3@y8#t>rPrQhxn$eR*b)RDANA zZVN`p_7l-kIW#FddZwkSZ%lSp1*Ll5FIuzmQ>m>S)w+};QeQNv4dEBGF-f)Y?uXhm z*Q&cly7ZOXw(kCTn>K&7SN6PHq%8vhd9^7;Uptg9dp8$KYeSs0<s?hH?{|4U+LDf} z810z)UOJ=hYv*@YrR&}eee?W)yfrqg-A69#zQJSqcGoHGIozYJ5Lal}!+%*;RI)i$ zR+Ly_mX)~B75>kYPXa#M@;Z0G=2tG4^RWN_%syhi<mT!!H}9io`O92?nad6y@5An` zZ8gYQb~NXi-*BdpbB&yB<a{G%+|itK<g6p-9Xa#Jxkt`Ea{iG9kPeU*kRFgGkS>rm zkUo$`kWL&;D@ZR$Ge|c`J4ioBLr6zROGr;hQ%F}xTS#9>V@PL?rZuEDN7EeA9nv1s zAJQPwA<`n!Bhn<&CDJC+C(<a=Dbgy^tD|Wa>DJM-i}Z^$jC72&jP#5&jdYE)jr5H) zj&zQ+j`Z$mnn${KH0>k(BO8G10I~(h9w3{5>;ke4$UYz&f$Rjb706y7n}O_xquCB* zKadSUb_CfHWKWPyL3Rb%7Gz(LjX`z>*&1YTkj+7M$I)yLvOkVygOD9Uwg}lHWRs9x zLbeInC(9KQ8WQGy_%F+nN&X45Q)Z!}#Cot$k<Lz;C!du1ZR_7#^<b>bGr}y`k{quu UV0-*|nfaO79)C_YX5~fv4Z~=$r2qf` literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Tallinn b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Tallinn new file mode 100644 index 0000000000000000000000000000000000000000..18f903fa6fd864237a679dd06c6a55eecce89d98 GIT binary patch literal 2187 zcmeIye@xVM9LMp`fk$-7`Js(Y;t-M)!~>3>T7$$8m~m$)C;5x$2cj0C0y!v%?}|Cs zn7u1D<|=;_qaQQoA8Kp%d(Kf_bpNo{sO4&9W6gcHa%LN=&-0ttKd$vpfA+b3zmG4+ zF`VP|jBMG}UZSo$+PuTX={Fb8i!F0IytyEH_+61BL!)v`E#EWNy*o5{;?#xD&W$!k zFE;4U-}K4Ut2O$I7s_<3=lq4V+}C5bEd09Kmps$x`+dPj3unEt^qHFKTca<=GTu9C z`;R|wXTJQjy=b`G&gyTovpY80w{5Gl7l&5ZIhA?#?SXWAi9cN}o$pb(?s6;l@=`1B z2cNZUI@!uUd(~Qga@Gor%~%B^QLAw1eXFQv!V0z>v5H&HtCFs9Ra*77D%)^Cm1jPu zDv~~EtSIT#6?21nWyXk9j<-tHnLFjK{i|iwOMa<t&ym$bcj&w8+)~q)q1P<AD77_n zy3YNn)aQJw>o5JN*Cu|f*G^69d%k~7H%z>xL(vZ<d~jH=JAO>=?d;Tz!_Ub2`<|5z z9owa8MTbN}^|H~oPMQPDr1?rvHl@|+O<&$Dn=j|-EvJ8#t<xU8?P#iQIr+1`fAAOG zIx?jnXr0y%4$R6!tKQKMw?w5a>yX}FH6iWEr({Q_$c~>zW#?R%?3(P8N4|;3qi<}L zj&pnUV+X3FbG%9K-kBwP_Lu9u>nn9vyH9r)FVa1=SM}qmO7AO5lYRDgy4QO}dShpG z-=(kQi4)`c$*D8af9Mk#n0Q5=>Kl{6gM;FlmyqakC*6>kkeHB^{MYM7&$ZW0^RIg? zNb$P8DgVt&DPNiSbK^cwDSx&rSt+kpDe=2MTcs+MA5>}a@2;$NrN`fiKf5ydE}x8l zo_F$tuFOz4q9W$1scqsGQMKj@H#L}Rjd?b2D&=zVFEDVN$G>mp>&U+^XTW^kS<Y|Q ze((=-nomcg$dVk*nvg{yt3sBAtP5EfvNB|8$l8#_A*<upu{>mb$O0YB3XvruYeW`_ ztP)u!vQA{7$V!o=B5Oq!i>ww|F0x(@3+Aw5N3&#P&B&sWRU^wr){QJ2Svj(FWbMe} zIjo+;@;R)ZLjgEcz|oYz(bNDb0#XH}3`iZ2LLikuN`ce@DF#vvq#Q^+kb*dxiXbI% zG&Mnrf>Z@53sM)PFi2&P(jc`#ii1=KDGyQ~q(F|QLP&`mO^uKuAyq=kgwzQs6jCXq zR7kCmVj<N+%7xSmDHu{Qq-2h!W=PSHsv%`V>V^~!sT@)|q;^R0km@1jL+XbV5UC(i zLPt|Wq==5DibxreIwFNcDv6X5sU=cO^?x$?ulT2AdQ4;sN~mifTv}LO7%VCcgo}gD HQ<>*aTU8Lp literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Tirane b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Tirane new file mode 100644 index 0000000000000000000000000000000000000000..52c16a42bf1ab1b5db5c1e3d4b808ab37384ad18 GIT binary patch literal 2098 zcmdVaZ%kEn9LMoPdKbGPeZlZ*fLKJJUV+QML_yOVpb46CHT**y6A=^XK*XRlYC3F< zx$lEJ%ayB^W95uoBmWGYe`XCAGS|kOI@9S=agOd*PFu42z0aez)`On3b<WOto!y=L z?0nv#RU7MatiK%|^9vV8r@43@_qMt1+Lu?~zj=o<aC4EI{lceb4-ac}MqwoS=Q+05 z(-!fbYr6W#@DeAccY!_bhfR^#@HG3;0}mW&iLvAKM<>2=#2)XQa2}g|#7<an#F;Su zYugtXiufK3UY%rxok^E!?8gVbiX<LACQ18yB>AI{WpbBF%9eLzN<*_ev1Xk-Sss$q zq7r$^Um$5o>GE{UL`nAqHT`a$X8e?<Q->z#v~M5Q>Am;Wzjs(?gfD95_U|>T<)~)Y zeyM@#3zE~^E4jselDBM^<R|Zuf}A!f810amiD8|2d94)w-m0^X1!eZ7DlO`+(c->Y zI;V4<&iyb+pRG&PdD{zReyK-;wTbdv+D$D9j!LQLjFzQdk+P8+vT)2vSvc4)&tLyc z7WM3t@`2-8v9n7S@BdVnG&af$UE6f&i|^^OhV@$cOoN8X%XGPKv99n>)fM-$wJM=h zs?Nvj%DWk|>fo=sddMqlK8cg+-aGPA$1hS79u&J~NY-w>tuN0#B<rdNv^Hh8tS|1- zy6A6oL$d0ITkmWAXtQqYZ`W5Yhji2DtF_^~CfWRMu{L&B%B%G$>U8Ey)6ybouJg%j zfyvTR@|(OKXGv>Tg0@DkNn7kaZ98{b+D9(v8wa}O&A~q1vip>7?b)MmweQuAogHdL zJ@ilKn6dxaX<6|fm=Dy76>F?zB~6Kn{_AsjMaT-4a8-tQD=Nyph5p6of1m%E;}3H% z17r%w9FR%4npyBJ({MHOKqlg9W`axwnF}%*WH!ijkoh1JLS}?a37HczDP&g2v|P=+ zkcqjPnITg{=7vlTnH@4cWPZp5kr^UWMCOQ05}743O=O;~W}?VUk*OkcMJ9{P7MU(G zUu43_jFBlLb4DhO%o>?CGH+Kiab)JMX6ne?k;x;oN2ZU=A4vd`0VD-T4v-`uSwPZ& z<N-+pk_lIn3M3b<CK*UJkaQsVKoWvv1W5^!6C^1}R*<wHc|j6`WX9E`2FZ=9Ne+@7 zBt1xekOUzaLQ;g}2uTu>B_vHqo{&T#nL<*9<jU0~3&|FeE+k(_!jOz1DMNCGBn`<L zk~So7NaB#pA*n-h=W3FNWY5*456K^rKqP}m3XvQlNyPto7QH5wGXk{IUy+-cpPB8i K2xMbcuJ=#K(Bz>2 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Tiraspol b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Tiraspol new file mode 100644 index 0000000000000000000000000000000000000000..2109b52a734c1eadbc8ffa4fbdd5e6a53fbbb60c GIT binary patch literal 2445 zcmeIzeN0t#9LMo5@hBo)*NA5|OhCQzD2gRY;>8ruiy)E-o{|s+EmRB>@HgieO#>5a zR;*ZSDw{G~j8f33Oj!|7W2=VN*_ve!)3eIyGFQL%Ay%zFT7UFkpSyG4c!y3mzc2FF zY%EQZzs;dP;bD#X;p=Fn-(D_%CE$&YkNr+9_|Z6Zt;IO~#WP0tV4cy^*Jhr1^F{OQ zv3=%ShilB<hRx>N)y3vJWh=~g3)9VW4<?%LrAC?OWA8FAgm{^KJ<Z1ZPu9Bozs_=e z*cN<X;MEMr;KA9Bq2&jRpqQgWcZXa|5B3{z2mgF$Z^*ZSL!l!X>0$lNL*YHGR>bjV zt;m)=)};O0tjRSc)|84>);()8t*Gp|*3|j2*1gV9Yg&ZA6+Qlz6`hf7#(1ZwnCsCh z_HwYAZd@^@4}WjWI5+5?**Rc1UpwiZb*RIL+uQ7p-+s_YD5-KM7PXs6TXzj5r=5`5 zD;p#wvPtG7ZIU^+D<w6&UZq~jlX<tw)O}}C<o=OmYW|U2mDcT43+j{A!j}S7dTEe) zVDC&><hr3UN+!tSX`d-q#x=?G?p8~tek4n-ekw~nUzeo=r)Alf&q-E$i)8noR5^`x zvix|nTCv3t_x>uCyShfLtXQq`<`%2`?0ITcaJnjRhO2@bQK~Q~RSG}xQV(4ZlQmsm zsfUMuk+rR!Qq=jm6z{$)>kjpbvF?IAQhQN7y0A^w7oAllllRGEX|1Z%@2J`kxkqid z^0X?uU7|Mjm8(scGFADp9940Cy=-oXS6hy_<ngkJ>WTUosmzU&t)=5+TcSg@yS|eu z#}Be2-dF9ghNU|28&y5jB|EQvpq}bDEV~9ysG1i#Rc(8ds@wUz+TB>GJnr!J^!n?| zXPmE}uitq8(O1BPKVCliJH37W!<R_7pZ<?5VLyqNNJzElU(HDH2oQ;wA`{2nM?VR4 zND!ZRgpR$Zrh81{`z31Zee9F5KOs9OUo!NwNIx!D9&h>s`_aG1$y=<SEdBx>9;1)_ z1t0u*`Tu;EP5S%oyS(}PZ~RrSpsVjD<xVNLO1W3N`erG2OSxUj{Zek2a>tZgrra~- zrYUz#xoygQQ*NAc=agHg+&kswDR)n~eaih)44^o`KMM;e9#BlMt6iYjK=FZM1jPx8 z6%;QhW>DOq*g^4wVhF_%iX{|JD5lueu25{Tt9_vuLvhBgwua&j#T<$|6niNCPz<6t zM6rnC5yd2iOAMPBKH1eqF`TlitzvjZF^l0A#V(3p6vHTvQ7of)#xRZI8pAe<Zw%uo z&e_%0F}$;@&11O7u#e#%#XyFG6bl(1QcR?{NU@RPBgIIHlXkV06ff;+GbwH|?4<a~ zFqGja!%~K)3{x4dQfy`TN->t<EX7)iw|2F;6n821Qv9VDOmUcDF~wts$rP6vHZy!? z7|n2+VKu{RyV`7q+jh0x48JLcGaRQ_PVt;#I>mMQANGF_yTAT%Fe`~449=Y7xRkgA MXHH@QjZgOf4F;`^qyPW_ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Ulyanovsk b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Ulyanovsk new file mode 100644 index 0000000000000000000000000000000000000000..c280f430fad27873cf7b20af55bf47c816ea1190 GIT binary patch literal 1281 zcmdVZJ!n%w0KoB!O`4cEI8+ncq*iMhlQ#NHlagv$qe&a8DHI7EszULjAP64Pp@PtW zwUY{pAfkw3aPf1{$*MtI#I}=L6&{F#3UZ4mEgJ9tl1?r<dN22WkK;1r-9LqqV`DM# z$5^FTn2fBRJU0~e?Uv{+J}G&I)`YH>*ZSrSuWK&tU9>&&eHP_)3kjof_H4On=91~Z zcg74{A2*xN9W~dV9yVK!=gbWUlIF&I*lbNTnwvU2X3+0eZJs3+a)e~)n@?{3=q-jP zUKGP`7iHwt7uo*!o$M&h$>`<ha?8|1xpn-eG>%-9+YZjC*vTuZv*(72?>nUufuib) z?N?nt52<9+giOBAs_vx`x&2w2+Od$5sXH0j^R!v^P6y@AYeJ^SoO0KcU+wPuBr}J9 zsci6#?8}&{-*I2&T3@Q%*Vk&GuA~O$@2Wi?rq$jD=T(08wj8{0LKX6X(c$k6a<WpX z2wUBX>ScG-u6k#^)3s9H{^qhf?RFueqx!Xqh#~ZI|B%fkL^93$uHKhdY<i`-Ct}$2 zPJfG7%^ud-nI9~OcB50zgr0Fd^{LwQr#5(A9)*86^B=V@>7QHS?2+?lnFf#!kQOX; z4@eV87f2gOA4nreCrB$uFGw>;H%L23KS)DJM@UOZPnNnVq${K?OWhaJ7}6Qi8qyom z9MT=q9?~DuAkrbyBGMz$q^0f>Y12~oi8P9IinNOKiZqLKi?oaMi!_XMjI@mOj5LjO qjkIm4`$ihK)SV-(BfTTdBi$qIBmLu~ZsC9Z54cnMU(y?myMF_y7abG; literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Uzhgorod b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Uzhgorod new file mode 100644 index 0000000000000000000000000000000000000000..8ddba9097f804a762815a54e47708bf6bfb5814f GIT binary patch literal 2103 zcmd_qeN0t#9LMo<QVefJUs`k_E)PZILsz&wDH<ety)c7oSGgLg#I+C~A__1DK7=dh zS|j&WS<RIW%TYFCv{C(mt%tcr8(nW(t~qkGTIrl)XDeq9WA%F<v#s?<fAv@A?3~xx z8QlBN{k)@Vx3m;n|2S?kZ@4(Nn~Ue{-R3qnvt7r}*UD$dy&8WtD5npUN@Dl%SmNlB zZ$AHi%1N8`dg8Y9ud2Ms6A|yPH-DIZF(r{P5v;m>{9q#U<bdNlvERvh<ryctuhYrd z+3e)Du6FKNU+vskx4_A(nC;x<&v5d69w+}uXXNe|3Y=M&mk!MN;!<qxsnd~rM&dF5 zv9}}h`iEl$dk#hlx1H2L^IKZfcvgx#Mx>;2R7zL9C}mlPWqxtD%%AF&^2~lMKes^^ z{??_7-Ve&X6AfB1*rb)Ci}b#|OLXzeKCNoW(<OTr%F^m24K`=Wvi$QJ3QkFN(x+OJ z_qEha{vgYzeI(1{V{-p@uSxCDn^HIafrj_>$%+$4^?|lFiS#|ID<6DbSG8```UR~T zt*g=1-W9sWKS$SG31~xFwKRNri#~LDwyb^kCtY{OBkKoJrE%nvJly-EH1)?N)^tWT z?7FCrEPhWOZ5-F;oI|p)a!6Z}-_cE3s+%snq?@NYbjw(eZao*($KF_{t)H~Z<Ih!U z+hDysu{lSd+*>B?D=Va<#Vegf*|IJ4i#(NTNmpT-b~)clcghv*PMngS$+NnBcu;o4 zM|J0+k9F73VST#inD*}LRr|VW=A3TNNV<AnpZwQ3!=CAJ&l_(1FHXz4^@914St(*o zw=7?-o&5Lb@+#Z>66Xf2)NA*vZ+y8{8gJ~3Yj@t%g`-w5#8n@)!u89{RcnRJix4j? z%eMc#xet6VhiymyeG@y(AG(|P+5I2O`rRB`Cx_FKjUYQgwu0;h*$lE9SF;^tKYSb; zLUx2~$<^!$*%Y!XWLwC-kc}ZbL$-$O4cQ#BJ7jyv{*VnKJ9IT$MD~bm64@oPO=O?Q zMv<K&TSfMYY!=xqvR!1q$cB*}BU^Sgdqy_xYIcom8`(Fqab)Mn){(s<n@4t!Y#-S_ z(g36bNDGi2AWd*JT|nC4YWjdQ0_g<O3ZxfEGmvf|?Lhj0Gz94g(h{U6NK;%*SCF>2 zn!X^7K{|u92I&pb9Hcu)dyxJh4MIAEv<T@D(j=ryNSj<upO8i&okCiL^a^Pf(k-N2 zNWYMVAss_nhV%?+8qzhSZLX$oNaI{h=aAMRy+fLZbPs7C(m$ku*8e?%9uvxW#gx(? TE-5H02o#q1!$kr2snqiaa76ol literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Vaduz b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Vaduz new file mode 100644 index 0000000000000000000000000000000000000000..9c2b600b103dc4d1f49b5f087055e19d3e031129 GIT binary patch literal 1918 zcmciCUrd#C9LMqB0S*~U_N9i$0t7oE{yFj=LW4|?z^DVIoRk#AKL~0>K}Mh$G#PV^ zxvv|mxnj{|%sHYiP%rS$(sHK6+KM$S=cY5wnsZYI)>!@CPdBW(_IY-m*K>diE`IM% zaQ%+zbn~CfZGXat>mB>yeP)||T=eWT7q8WuOA`Sze74j?dw<hePyDQ&&~Mdq@3hXo z{<%K+eUCnM?x4n;JSDOH0}}V~Co-o~CBEf7NvLg*x#e5s=|E7tx%o2Bmo160NizSj z*^=b;Ytne8F1V4X3rA<^qOTv(#e<XT>-|-ibY0fu_Dh<w>!hYueWqy@W0Kx5C>eP} zlDXys$%^Zi?DQtdo@$kxm@dt^vRRh?*{I7-`Q_Q4$~3pHQuBtE>2n<`boocI`h2xl zSF|scl?86~SH;Mx#G9J$pOOOiH(KbuDus7`k)qHqrD)`Wym0-nyf|=F0>fvt_&}$W z^na=^)z!(%o%?k4y7zTW?KUlardET2LVd+kqHBE%b?ro|mPHpx*^iIwtK$n~{qg&{ zVKhR@d!nRb@ORnRdQU35Mr2dvsBGSQTemDfAzLekwJQFQY|9(a>KVthCQh~H)`z-% zszG;LXx5!qg8JH#4O)A?US8jyr*(a$^2YXfeX}D=>R0DVL$ycVN}D6Q@*l|CQAQe5 zqP6kvHED{R(5COclIA;Oy8DYh*)uYvEr-tP-hpm?x4Bna5438C%if`35BoMI{11Do zOl*QH%$P_qk4}GISsXO}{8Ao4{>tTY9>M=Vv*Grae7KtJhxe#SzS-+9d(FFhyAA7q z2=747vZFoE$eBjYH5X?aIp4?`ceLjmIqS%IN6tKQ?vb;PoPVSNqywY{qz9x4qzj}C zqz|MKq!UNm3epSG4AKqK4$=?O5YiFS64DdW6w(#a7Sb2e7}A-eZ4K$o(Kd&4hqQ<E zhct+Eh_s0Gh%||GiL{CIi8P9IinNOK>S&uqx^=YeBK;x_BON0xBRwNcBV8kHBYh){ zBb_6yBfUG?=8^6lZTm?7$Oa%gfNTM>2goKMyMSy1vJc2cAUlC<1+o{&W+1!aXtx8| z4`f4-9YMAP*%M?_kX=Ex1=$y5W00Lewg%Z7WOI<+akSfm?2n_}AY_M-EkgDP*(79_ zkZnTt$+$v8>>cKQ*tan=_#fCQyHIwg?AJ&!GpD}?>`wiAtNs})`;4&TwIq!h^A%?# PXC<fliqldtB_rZ*zi7H} literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Vatican b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Vatican new file mode 100644 index 0000000000000000000000000000000000000000..bdd3449e76a51eb8ea7693fbc1ebeb5be5c1e5b9 GIT binary patch literal 2692 zcmciDeN0t#9LMo<@8t@j(icrJ0kO!?L<A8P6HO5v36^qG^AVp=i|9y5S}H!}$~pHf zP3M5bhgc6vtuWVoXq7sfh=yTVIay{_Hg`62YKr!IAI6x~-=1^td7XP0f1dXzFll~X zU+X_duzA9lW0(1I-)_0Nln%UZm)6%blpQ|il<&22-+ukJy|p5$p(6JkXWO#fx!b1< zuy<s2$vv8%;?!ImEyv%B(At$n4Yk|T<;0u)<iz<S4JQkx={JR)q%Nz@IW;6nzs;<Z z`si!+>7JXN)0dX(_pLX{jpRMfjWdTDZbsGFH$SU(Zr!T38@Ft7ZeKX*+*$dFZ5{sD z@yvY7_LfbueZya}gI+Cm{Jl0ftqL~S!EF{gAsK_5kV_Ms(1Zzg=&@9%ZCI+^_Jc^L z-7gJJ*xE+tzT=Iy6m_w~cb~J{=iIW}*KT$?WRH;hlmFI^v6r+{ho3aUcU&W`9MsO| zKhy_KRcM#5-q6VX2PA4&l|+|qm9FblVwS9sZuy1s;FMYNP*y-<2WQB`@%^QHRGd80 zwzI_f(>3mDKkad$yY{T_q>mmB)?U@uHGb#s+I!;<ny~tq_9@!0iL-ZW($q83x3F50 z)AmWfahoJ1x?KAAT_pV*OJrc=Mjd!|hNRvs)<Fl-<*^?p>fp-hnznC{4p}oyhn7a^ z<9V?<Y;~#(AL-Zh*^x4$`$f%2Z<LY#uXR-HIT>~NSIP8!A(^#D<%#n<WOUX0l2!A$ zX0Kl-V|H!RCkqPXsdcM#?1Z&CE`N^Z49M3&)+l{CVvLTD@2TUjCF;bEBW2=u?e&?f zJ!I0Sf9d4<P?=H@E>o-jlxZbbWctQh$(>#=GnW3LGl%Y#SyO9tcFY!;lUAj9A$xRg zwCdd7HtW2`LY;qfu|9h?pwGQKS@Vx9kmp}Z(}Kzzd0}3R+G|o|!Pvo4m=_^0CUupf zj2p5r+>+ux9ktl`Sr&y|(?ur_$>Pgr^yQB#<(1lfx@60jy0ofXmo46@CF@Jn>uEWC zK^{IHZ;;PDn{E1a*VXH9b?;BhYEx}qFDt5>C&;qG#A@AqZF<u3d|Y<GO3%pQ65u~P z=0I~3`2UwV#Qc#jN0s?<AAX%*=D1^yw1Zu1(B8koMqYAP^P(d!JMzLKFFo?&BQHM^ z03-rP2#^>cK|rE_gu&Is0SSbwi3Ab~Bo;_8kZ2&`K;nS}#MMLu2?-JtR}&N@Dy}9h zNL-M>pc@%(2n~mc4G<h8I!Jhs_#gp7B7}qpiIJ-b5)vg>6DA~1t|m}Oq>xY{u|k4{ zL<<R*tBDs9FeG9~$dH&JL31@xL&Ao{4GA0)IV5yQ?2zCg(L=(A#19D|5<w({NDPr6 zB2jcTVMOAH1QLlP5=tbNNHCFTBH={hi3AjhC=ya6rbtkcsJfc4B5`#!fkh&VgcgY{ z5?my@NO+O>A^}DsjD#48F%o1X%1D@zIJ=raBawDBp+;hj1RIGq5^f~kNWhVZBOym( zjszWvIudpy?ye^ANaS5j=#khX!AGKxgdd4NG62X3AVYwR0Wt{4C?Lati~}+d$Vj-F zp+Lq084P4Jkl{ea0~ruxM35mt#snD@WK@u0LB<6c7-VEz&CnoY<7x&6869MJknuqV z2pJ({h>$U|JU(yB8RYl!@%sEh?%8b9ue+|kR>AlFY@Q=CN@kAC_?XQI6RTbGwdqOA l^DHw;{~o56Wiof6X03WBv0L%k$q6Y5iSgM<iP$GO^dBI59Af|g literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Vienna b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Vienna new file mode 100644 index 0000000000000000000000000000000000000000..9c0fac5369e4924ceb9d6fe9f39ac14b0a712c98 GIT binary patch literal 2237 zcmdtie@xVM9LMo5zcQHF+YHY5Atn)GC%++4$m~2A%2~*1BoY{iT7+;Z1JkaIxz>oi zb4=$De;{Mk?CKA!HT*HN8Z9K$E<<x|xw2n#EOXi#tIzZMNB_9ifBn(-cHhTmxBLEa z+r6INRU7KFtXr;0<`dprubDUJJ6<%m?#Ygt?%;*-y<-9G={utPK2Y7?zFQCedZMYf zDOL_GuGFE(W*xq=R8GI?(2-u3e7-YBMz<%+8DET?@w?^h#hmeTwQ>4I$yo{JpOP<A z2jsjnR=$edCDS}9<9BC|)<h@$tTEHRR!7)rbzD2H(=WfH_nbSRv8P_wxTD7;{%}AN z_U@M%9V&@iUy_*(KDl>wt;{O*N>YJa?sMiza(s&1KY6;OM7T8NdXCQiE?HB9F*@ht zN&3LRO?CGDsB^n6Xj=Pcn!f3%X4Ji{nN?$w<r|Rf!a>PdzDsfwdL%EaRq`hMGB2)6 z=UrMO`G2(N{9`UzaIr!Q`m42YaK1jYW05ZGj@O6llXOvgzC2PCp{}|(S)6=D-L45K ziuhEElfIGStKUmW*hwiF8ImQJ56RNNTT(iFLOq=wvh46deYCMr9_x5n%N~DKmp81{ z@&_B#TUx9u9Lsd2GgVjK$k2-DBB?lkmp*ZQwyb*ZH?0gt$?5}BrE1_8dD4GPs=G#{ zraCBVw*90}Eqq^UtA@2MagVGm3~2rBM|53+>bf6x>-q_wZWwCQr!RT+nKvr6;ge>0 z_LV|y>@Sy%>l3wUN3JxN6^O6iA<t#bkWKF2W%E=^TGFGnW&B%dopM84M?aFbt7E$5 z-F|s~WKg&6`B1k7dh~_1KJ|C{H8dnNEIcgSzH#{b7!n$B+kgGCtUJrhf7ptj84_+; zQ^bn=r*o;tYq{Lz+`K#yVlFfX@4qjm(|plh%uV|l%lgw?8+)tMkp<bB6(LJP)`Tny zSrxJ@WL?O@kd+}zL)L~Y4p|+tJY;>gW`W2GktHH)L>7sx5?Lm)PJRvxMOKO|6<I5? zSX;ANWVy(Ckp&|wMwX1M8Cf*4YGm2Sx{-w=D@T@&tQ}cAvU*#yd}RGd0gwtHB|vI` z6alFMQU;_BNFk6)Af-TRffNI&hOH?FQV&~G5TqhVNsyW#MM0{9lm)2^QW&H%NNJGT zAjLtdgOmrUkF6;XQXyMYBBVx0k&r4OWkTwN6bh*nQYxfYNU@M=A>~5qWortCRLs_t z45=AXG^A=s*^s&+g+nTbln$vKQaq%3NcoWZAq7M#XlqJ{)DS5mQbnYUNF9+vB9%l+ ziPRD)CQ?nLoJc*9f+7{QH6=xAYHNy$R23;JQdgv~NM(`IBDJ;tf4Pk^1)iHpi=CeA QwA{1|rzbN5)3c-g0&7TFt^fc4 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Vilnius b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Vilnius new file mode 100644 index 0000000000000000000000000000000000000000..da380af0ed2f4f6eb399ab4a3bf3a0d9b6a7ebc0 GIT binary patch literal 2199 zcmeIye@xVM9LMp`1*g|V=Nk+U0z@JK>4f8#+9rw}Kc;}9ocNQZ!V`-~(H!_AzAJOC zG54w1%B9O+qjJRj!|ac2S#yo*!uyA{My*yW8*A>{%9(AfKF=>utNv(h{n_XCy~lUo zJK!H)&+x|Wkz)0?lVqN7arT;v`y9)>9b1*zaPsLDFP{1#XrGRBwG0hh+413q{8;pA zt$h5NUk{%O$(83yB^E6IGCB8^*sPS#D*T?&2LCTJE~Sk5VyUB{iaVpv$I>pIu+s;g zwKI-AY0vK6WoPbhwdb_2wX-%?+jrG1w6hoI*>?w0?YZfxDks6Ka@}QC?)4li?;F20 zZ^UEeU%qMGbAHSUoEf#|_eHIOgKt}fy9cdc>&sSA^GB+<vtN~zzphHx99E@gFSV3q z9G3;fU9w=jM;4{^=_N@sTb3MJrkB2yt}7zhdfCBy<-TgS4z;F9<=m^fDl{(D?hkZL z_7_rf?ORzM_pU4-9+DMbpO)Ievr-p*N7o<em6Zc0^{S2zY3O}QuU_|zUemr+H!f_~ z;kp{V*1u9W1?K6d8$rE3xmwnLlB73W&y$T8e$typyt4U3qBNiXUhePtQMU9AOUsrK zdEmg9esJlV@=$YBw`Lxdt>uF{;(0@F%g}n;cQ5F+@lL&cs9Qh$d00R4>L%U({!V%H zaJla2Z<NQ{GWCu_WwLYiV(E<dWmnN`*<E#09#2%Vr!ZOXvA>co-woXryDZ(;KGl2A z^~=8DxAgv_@96`B$MqB4XLQez9__j%E<VofnKpgeboZ?jFYj%WuY}wG`il3>@JW24 z{1-2!{1?qXnVNY{#Va+dO?i~^iAqo^eU1{P@`LJ*Kc2W!c+UHx3RlL|r&IT?tf`-x z_|Bhyk1MOLKCIT6tJYkhs<5guSAAoxxo_mA(mXIX6UXIJ$~Ac=AN~G`%N73T4ec{u zcQ*9%<j?(TPSb^GC9)}GSID-IeIXk|c7|*X*&DJsWOvB+_%rMe*&wn*N3%s_kH{vG zT_W2=_K9p1*(tJBWUt6(k=-KOMfQtqn8S`aY}wK58QC<lYh>HVzLAY1J4d#T>>b%W zvU?8O=dgbc4RACaKw5zGz|k}T=>pOQN7DzS5lAPHRv^7Vnt^l!X$R5|q#=%`BS=de zO;3=fAYDP)g7gJx4AL2-HArue<{;fc+Jp25X^^Ao5Yi$?(<7uwNSBZ{A$>v`g>(vO z71ArDSxC2#b|L*j8isTXX_=$x8PYVQYe?IWz9Ef6I)}6l=^fHMq<cvFkp3YJL^_DH z(9!e|X`-X)BGN{rk4PhtP9m*DdWkes{U6PL7fr9}?fhaI8>lZSC@TmSmIUgHg3ev3 F_cv^;2-^Sv literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Volgograd b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Volgograd new file mode 100644 index 0000000000000000000000000000000000000000..f4cb64f16c730b1da6fe427a786ae939712559d5 GIT binary patch literal 1167 zcmd7QJ7`m37>DsMHZdi2aM89lsh66@^rA6oO4PKbrcJ1(P$W21h2o_k2);-M1)&9N zClwSyL=nZ{;&sr;DnVStwv$^c9B}f2yh}kW8o%e%Ne6NB9M1V24#|-GpW@K5k+}Ke zsJ3soIC|{G^U861yS<Q`e_E;;Tot)iUR^gEtF4~N)PD0kuKQ@p^>fMCn(1@phN;U= z@cvmRbYs+MJb%<#duE@r?s(o=e<0;-C`6s6^d{ZdR<Ge;K$~k8HR6j%WT8%?Z$A&W zy!s@qPv)hqG$ZX-UdZN&N3vz~mc))+lC1})G=B1`Cc18F$KKPL42^4Ne7|;nKcuOK zQ<8eutJ@ZbWc%}G-7%Mz^xdp<J!_Qi$*}CaZX`3}m!65B?&|v>*~34yH~dEWvX16_ z4<z68QuAM4YkyTq`)BXz?)Q_r=ivn{Oy7}#izl>L2n~-_Diz~dR<(3`eeSv3&*@#_ zZ+EZXZ%njg|3}jjGhSm-8UEVbpl8MJ&mQ}p&lCoVrZtwZSBJfleARgD;Xc?~@gD~K z`s+*fcT^Y>8I#uxij0a3>uSeE21Z6khDOFl21iCmhDXLn0ze`_LO^0bf<U4`!a(A1 zwSgd!AfX_!Ai*HfAmJeKAORr}At50#AweNgAz``NxRAhHZDdGjNNh-ONOVYeNPI|u zNQ6j;NQ_93NR&vJNSsKZt~OF6R971-5-bue5-t)i5-^V0&HuZI18JLhcY7l66LKOE A0ssI2 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Warsaw b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Warsaw new file mode 100644 index 0000000000000000000000000000000000000000..5cbba412eef406b4baf89100f2c4a8e1f5d12798 GIT binary patch literal 2705 zcmeIzdrXye9LMn=AgP4(#T$xTG$NG{K@=4sGXiIVc{$$jhK8s`hL;FU;#o4+jJa<N zafp-_F?C9Ap)JulyUmsnUeK^vD_4Re!_c%fX217itu<@?)qnlY&hvWC**Skc?~f;Q zMa~Fw*SXhz!<)0)zPS%}?4@FNRAZ%oP*!Erh4vlIwOUnuP+xmb_4VRyx^w#>vuo*5 zQ&YGh>qP!na`N(I`D{;+HoTD{pO=o5MrTh}WAR~gYHMlU>CN?7XO`T~J3Bc-n-=bu zb3^Ood~~pU(Y;)ny^m{iYk{<QPMel%bx!LyZ#h>^l{;;{%T3$icgzoatIgHzTg{JW zGfn&Y=gqacB=b||T+>k&X09iWGdBtoou7k4O^>L~ydKpvoq$*T&3(nA9a%fX3C!;A z^z`^Sy)qI^@Ax*SPsBy1Z?6U?$mf_7)N#P+cWIZ?|4gY9{J}<Nz>&92$iAH>bjPbE ztmGvVzG00Sn44_|&7WxoT{zc%f4WB^l2YY?=md!j87vR__mjcCDLVMZNFDNBqz-NB zt5F|y)rabCX>@g~4lBE$F~y(i@U=%YHhaIuEohbztLr2_xn4$2FPBlFRgy5GKoUBO zWXymv9dmK9jQurV6OX3IxG(2uQq3Ywu20m5H%-v-l_B~_PJ~V<9xIQg`D#k`0C_C( zvZkhVN}BH{Ix*sFnRxYkndEa^CN-Rp$1lAplWX@%`pILOv86<&?AxVJtXwHmOE&7X z*_(BG?oypGI#)gE6Ln_L6rB}4RA=3c)j56AWX}1XI`_s9$vkvj=Qa7s{GEZapsr08 z7Iny?vIfao)Fg`wujrET@5+-4PHJ{|g)B|3)tqhzby=wDvX*VSymPg#II&Kjy6DlT zchA$@k5|bvFDL8Dni=x!@^H=DG)h)YOOn+&L9!+;OxC9UBF_aH$sgWF^V`3bf`FS^ z(D<ROyV|Vl57fvD4fVRA;(aZwt<o3QRcq0fBK7j|_UYo=rQ1DSeY}0`jrX0;+}`!# zhkx_(>F)R6zl`a1z&=V`yBHHP(5ss<0b+Xm^*$=rE0AwY-#@Q^{4zbmV`kfTO6m+Q z9+S#Vs=eWO*<&y6{hiOAe}BsN*k8I&`K{mociWy>hZ?6MPd)PFyV|E8DF9Ldqy$I} zkRl*eK+1sB0VxDh38WNAEs$ck+G-%>K<a@M1gQv85~L<bQIM)2WpTB2K?>t)D}$5< zsSQ#bS6dyVJV<?z0wEPbN`%x1DH2j8q)bSiI26j&RthN<hgu=ULaK$73#k`}f*}<{ zN`}<T)fNq@8d5f-Zb;!cRE|UGIMj|q@i<hEL-|~7{g4786+}vi)DS5mQbku=Mx>5N zA(2WVr9^6p6cec?Qck3vNI{W`A|*v?iWC*8s;ez4Qdd`7SfsK@X_49@#YL)%lozQl zQedRQNQsdeBSl84jFcIvv#TvMQfXIPYNXakv5{&c<woj_6db8IQgWo`NYRn1BV|YG z?rIB<RNmE=9;rQ2e5CqF`H}h~3xKQuvINK)Ad7&k0<sLqIv@*ytc0sw3S=#i#XwdA zSq@}9kOe_j1X&VfO^`)FRs~rWWL=PjK~~1qE)B9au6A*d)j^gASs!G9kQG9f2w5ZZ g{~Z0(9QoPF8WzWRMQ6mvjEaek&WMY};qiXI0efH@v;Y7A literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Zagreb b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Zagreb new file mode 100644 index 0000000000000000000000000000000000000000..79c25d70ef09aaeec21f0a10a029650967172a80 GIT binary patch literal 1957 zcmdVaYfQ~?9LMqhL0H4S7)2!{xg6a~D7WL3s9ZY8CAlQGB%zgCF3rrEeOY4-b6vxT z2jM}?T<4O6HN%WC*O}Qcw>5t6|7Np!;F-<;od5sTPMse8-XG7D`8ko+A6E<W2{%`< zx%s*ETJyNxB<4x&%h7W4u#eX5cgd}7(Ne!YSn9ty@^9}elRIfc)z_Vs@B8eycDlP) zJ73(YerL{#|IsQ5*tJKxl&J)+S|?p|3#8k$+0s45BSG<Q>EVo(VE+*5*~(W!yj&Xc zDO!6y57yod9ktKp7TUM^i#iX!)_&y=G_>@FhAlgz;n~MDBJ;jP7F0`ALXAX^-!3r$ zyCpWVNMajHB+jo~;~veF0pAOC;8~XpdYGZ{m06llGf)R_9Hv8d`s>h~ARSgZK!zuJ zsVm!0Mg+f9x2sVSy{>6e&|^t@_d=4Jo|ojht1{}@0U2F&L{e(cY3i0TNjthv$K>V7 z*s={eZqjBQpF2m>`{$}BB}pgvr0GOwZ=Lu#Tr)Z(O2)l*I{8yCnR4o*PHpg(X?xpB zX7yW{Uh+Y*%IjoCR)fr3{YGaEIW4m@Yc)HtLgpk?X-><NIyXRd?yDU-udzVqUoF-J zk372Y&{WO6vP2ebO3=K@bXhzvQ1ds&$dYmKQjp^#OC!3-GWS<m-qw=Bunt=I{;3qT z`K(3tm!$aJeO+;)QdZX0=&Fhfy1Ht&t|>mKC0k0=(X2^R^KzKCSMz`QvaI%T=6qRg z#A^2I>EG4S(z1Sk^4pYDkL7Z6r+ds791h-G4u|LOr+J0>L;Ey;vA<(kKg=~f{{2Yg zbR#DmIpxSnM@~C(;%&{TM@~L+`uVpMU~4KsN<eBria@GB%0TKs3PCDCN<nHtib1MD z%0cQu3bHj7AtfO-Aw?loA!Q+TA%!88A*CU;A;lrpA>|?UAqCo+3Xu|RO^ryANR>#L zNS#QbNTo=rNUcb*NVQ10NWDnGNX1CWwx(vJXj@Y?QZ`aIQaDmMQaVyQQan;UQa(~Y zvH-{mAWL9t)&N-qTeAwtG9c@KECjL=$WkC{fh-2H8pv`W>wzo?vLeWmAZvmwimh1{ zWLc1PK^6vC8DwdYwLumKSsi3~ko7?p2w5RyiI6oy7RlDE60%ITW}T3QLRJb{DrBvY k#X?pKSuX4Uo-S`QY5gMDG-qm5XiR9hGc_U{!=k)@0!L5Jf&c&j literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Zaporozhye b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Zaporozhye new file mode 100644 index 0000000000000000000000000000000000000000..6f148505b239ad9adebfd94606f1e4f311fb354c GIT binary patch literal 2115 zcmeIyZ%kEn9LMqB6)3tEeRabN0YagHig3AzqCsNU3p2p3awSrUYawb8AsPe!L@Vc7 zWA58zIcIK`jEWhv4ebHC{<&t2Zuddf9J#h?(VV%nm9uiJe(&RGZ9V8w&pK!4yv{wq z-~pd^Wc{|*fceL9hke7vvCCdO&w0+?hSHDwPYt9;KOLHgk4@CbXK(p5_IglaXTR)7 z&3hw0EA8t_U-Ed^_xqjir(I5or;i6K?;1N8&p30;$vpA0llAKJ&g{M&PWGM_C#P+l zbN9w7=bqXnPHuU=Gp8WknVac#?wx+a$xF)X$h(%-k^lWq;dx(NiOxUwd3eD{ELt$| zPIzJe$!OuegW;l`XVl;FwiY*CltAZ*lvIq$qP4F`Y1UC$9O#zClf6=w(XVBfHp$W( zUApXSQ0^OV)bhb*tr%UV_wQe!%a3GgWoxdk*tb+3s7lgcONOkRJE5WAq*Nt+qSd+I zNcGhpWL3fkvMP379{ldO)C><v?bv%-cc4#JpLk6lYHyct-wV3t;g@u6+h(m_(x#Ex zYF+1Btqle9wBfp68&j*K@vE8o$hCY~fBF~QaKS4ZkIj&#kt_0O@6XcQACqYF1=+Os zvOc!_U3t7|Ok1)K%jSw<ZB0I<Te4KQ{CG&WPIl_H^F8{+rHDTH<_2y1c)L9HVuiL3 z*2~jdv$bP?scc_UE}gAD*-<=Oc7}eFXJ#1bDoWKZ=UeGcxvt&ubJBD5qV765D7#~$ zy65nRx_9`fKHD>(y$5>LGcDnk#H7T8TW_ApxBcz(PP=(dzx}^Bjmi1d{*#%UxJfi7 zGsk$1neR6##-xau$s5nCse4(aCu8cXQ_r4k-f-t<YwIFrfj<!Nn@}iX>gsDu&>9TY z^I@fZX&*Q|9`}O34v#1D@5|V2zujGi`$yXkf7qkp^he3aQjoPEi$PX{EC*SSt6dPk zo)sZWLe}JJ7lo_}Sr)P`WMRn4kfkAOLl%du4p|<uK4gK&3XvruYeW|5YFCLY6Imy+ zP-LaZQjxVHi$zw8EEicXvS4Jz$dZvYBa3#mt45aXYS)b{99cQCbY$(w;*r%O%SYCa z6ac9JQUas~ND+`KAZ2j1bwCQ?YAb=10;vU345S)JIgolF1wkr;lmw{>QWT^rNLgHM zU68`K+R7lML282(2dNHH9;7}<fshIzB|>V16bY#kQYNHMNTFP9rI1n~wL*%8R0}B= zQZJ-nNX3wnAvHsahExqH8&Wr<aIUs;Na<W{?U3Rj)kDgM)DI~jQbDAI=KmCuMz8JX Z!T`N2s4FQfE%X<a6x0>_-KRy~KLI`|_-OzD literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Zurich b/env/lib/python3.6/site-packages/pytz/zoneinfo/Europe/Zurich new file mode 100644 index 0000000000000000000000000000000000000000..9c2b600b103dc4d1f49b5f087055e19d3e031129 GIT binary patch literal 1918 zcmciCUrd#C9LMqB0S*~U_N9i$0t7oE{yFj=LW4|?z^DVIoRk#AKL~0>K}Mh$G#PV^ zxvv|mxnj{|%sHYiP%rS$(sHK6+KM$S=cY5wnsZYI)>!@CPdBW(_IY-m*K>diE`IM% zaQ%+zbn~CfZGXat>mB>yeP)||T=eWT7q8WuOA`Sze74j?dw<hePyDQ&&~Mdq@3hXo z{<%K+eUCnM?x4n;JSDOH0}}V~Co-o~CBEf7NvLg*x#e5s=|E7tx%o2Bmo160NizSj z*^=b;Ytne8F1V4X3rA<^qOTv(#e<XT>-|-ibY0fu_Dh<w>!hYueWqy@W0Kx5C>eP} zlDXys$%^Zi?DQtdo@$kxm@dt^vRRh?*{I7-`Q_Q4$~3pHQuBtE>2n<`boocI`h2xl zSF|scl?86~SH;Mx#G9J$pOOOiH(KbuDus7`k)qHqrD)`Wym0-nyf|=F0>fvt_&}$W z^na=^)z!(%o%?k4y7zTW?KUlardET2LVd+kqHBE%b?ro|mPHpx*^iIwtK$n~{qg&{ zVKhR@d!nRb@ORnRdQU35Mr2dvsBGSQTemDfAzLekwJQFQY|9(a>KVthCQh~H)`z-% zszG;LXx5!qg8JH#4O)A?US8jyr*(a$^2YXfeX}D=>R0DVL$ycVN}D6Q@*l|CQAQe5 zqP6kvHED{R(5COclIA;Oy8DYh*)uYvEr-tP-hpm?x4Bna5438C%if`35BoMI{11Do zOl*QH%$P_qk4}GISsXO}{8Ao4{>tTY9>M=Vv*Grae7KtJhxe#SzS-+9d(FFhyAA7q z2=747vZFoE$eBjYH5X?aIp4?`ceLjmIqS%IN6tKQ?vb;PoPVSNqywY{qz9x4qzj}C zqz|MKq!UNm3epSG4AKqK4$=?O5YiFS64DdW6w(#a7Sb2e7}A-eZ4K$o(Kd&4hqQ<E zhct+Eh_s0Gh%||GiL{CIi8P9IinNOK>S&uqx^=YeBK;x_BON0xBRwNcBV8kHBYh){ zBb_6yBfUG?=8^6lZTm?7$Oa%gfNTM>2goKMyMSy1vJc2cAUlC<1+o{&W+1!aXtx8| z4`f4-9YMAP*%M?_kX=Ex1=$y5W00Lewg%Z7WOI<+akSfm?2n_}AY_M-EkgDP*(79_ zkZnTt$+$v8>>cKQ*tan=_#fCQyHIwg?AJ&!GpD}?>`wiAtNs})`;4&TwIq!h^A%?# PXC<fliqldtB_rZ*zi7H} literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Factory b/env/lib/python3.6/site-packages/pytz/zoneinfo/Factory new file mode 100644 index 0000000000000000000000000000000000000000..afeeb88d062851fca5523d23a73edaa1d8f5ff6c GIT binary patch literal 148 zcmWHE%1kq2zyORu5fBCeCLji}SsH*u{r~^}ffN|%8W;fS5E4xI0o4hjFiqmJ0h(xM Gzy$!q_YOt? literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/GB b/env/lib/python3.6/site-packages/pytz/zoneinfo/GB new file mode 100644 index 0000000000000000000000000000000000000000..4527515ca3f249a44599be855b3e12800ebe480d GIT binary patch literal 3687 zcmeI!dvwor9LMqBn$hO=nHU-N(OjC={5BD438AqqmtXotj4)(rmX?v0Q>h%sdF!Yp z<U?Y)%x%kMU5MsB*GZFmE_1(T%%;!t>o})#oKr`C_xqiB{Lb&^cR0@Re!e`th9*Y2 z?s+_D-{FVH75l^M!1wGg(;6)@)Asu5>D%3A#+-HLtLZb$%#7N`e7IWAO1))f51OvO z9<$NRiC<~HX;Y%-HteV8HO|wSO~Q5NnPM~FwcE@usG%1WUDOLVB<V$)cIm8n@p`dI zGfO^l>!qLWFw2r-&GM9GCcA5d$%*NuzjG_IA}ZCata{n3s^>F6=R@i(EQ)-zEM1 z{+IL*D|hQ3mzJ5IzR1yQrUmP@qcZf*qf7NVZ<=1;ZI1b+WpAC=dad5z-D@@!`kT!` zjbwAi%d%x>J=yy9Q?hNTOY-}9)pj{5JBIg_ohd<TXGxUWHMC6c9uQ{s#2nTI_3E2~ zcBN8q=&;%wzfkWj^ELY-Ch2`QKGTI&l6B#kvwHvWn&$T{`TD^68|Gm4X>%wmNFScE z)f`EA-W(k^QXlKy#QfnAb3Ce&IT5``p9~Jur>c9*sgjDisC0xrU3gBPInYC&UA<kO zTYl4=|8l9mFmAB9ICHGN)IY{tPEXKRIw$I@32pVY`jyR}Z9?^R*9B7?;-`xb?=&~c zPU@R$mYb4OzUJ1L1*UXEZnAH3o-P;W)qY(wb$S2Rx`KOxu257*`>Q1Fzp+dQT#VNh zm(@0vcDQxr$+t|ECDnA*5eJQ$8esyvtufWolzAv}wyEyDY-)s_k)W1&Qqy;v)T)&! zwT>;8hl?jj?RAqS_{cm}XJMAAyY>@RZ=$G>S;Lf0N>TO4#i>VPJu0++Q`I26g=*Nj zi!`cKOEqrtxHP`@goFiDm9T;!X;O4a9?LG4@J+kr@hOE8@okPY9r?YuKgkgH$p)^- zKEJ7`lx)>3I#)f>{d3j4?hMr;YLseGnyy*~Pmz`f`m0tYsnYs~_UftqUU_<Mf<))G zmS@Jdl{VAr$g_!|()NQ^s$EAvX+JPnJ=gH0bZB3yI{IyrPN9cXr}HONjPF_%lfO}Q zKK8Zhk~Lq&Zv9E(CQVfF3unm-!-uP`6W^C^FHMl{NrR-vlS$%<?IbVOjF+C_Vbb%u zTf8+oD(|n=<mGFRs$Sn+mEHvb>J<~H`edI~ebcX~gem!|UqXTEKlZe|+Gd3s&}XX* z44JA1MQ2IkgE=y|uE^k188W0aMTTxnlh+P-WLRczNy;0cUjH~+hR^Mx-WU=h$>W== z5#63vDTy`Jo00X@$PPEuTY)Z>+O&qGUOcKsRk<#scC1%v=YNyYOXjLE`ML7W)SqQ+ z)(m+!ZH}Z*N|y(G?)T~IbN9(NeedJ@<;vgtoBhiF)3d@qefVELyM6n1j=f;6D$1uE za@6W*pGv&rvhV!;eLjti^SIje@VGkRbM}SH$H&M1INwFjzu(TQcm5bxVDX)Ax$Ix! zcI`g?Taa7oXzwX<Q<1xh+*ahiA~zPfv&gMQ?k#e2k-LlBUgZ8FH`vkMVdNG&+Ix)L zWaKU*w;8$5$c;wsG;*tvdyU*|<ZdIk8@b=e4M*-ca?2g<Jx6Xja@UdDj@)<T#v^wg zx%J4sM{YiH_mSI=+<&A2NCzBk3y>Z-+9n`fK-z%x0cix%38WQBFC1+%kZvIDK>C3+ z#L;%d(Y6HXiKA@_(iKPB7NjqZwlPR&kk%l*L7Ibf2Wbz|AEZG@hmaN_Jwlp<bO~vb zqwN#YD5O(JtB_tH%|g0`v<vAM(lDfBNXw9(Ax%TNhO`aoo1<+U(mAAcNbiv5A>HG@ z8|_2-hcpoBAksod+e4&@j<$<P8<9RDjYK+$v=Zqh(oCeANIQ{!A`L}4inJ8zDbiF& z+f}5kNMDi0BArEAi}V(0F4A43y-0tN1|uCtT8#7<X|kj3GSX&8+h?TFNT-ojBfUnN zjdUAnx1;Sh4;qei9BDbybEN4=*Bx!!k-j_H#v`3aT95P|X+F|@r2R<$kqv<C0Avdw zdjQ!4$Sy#(0kRJq?M6U$f}`CE$X-A;1F{>C?SSkDWJ4f30@)JCo<KGQvMZ2nf$R%M zyD^ZR;b^x8vNw>;f$R=sdm#G**&xUcLAD68N03c|>=I;~Ao~Q_D9BE6v|9z)E68R+ zb_=pyko|&e7-YvFTL#%P$fiMd4YF;JeS>TqWal{At%K|xN4t5D-Ggi&Wd9%=2-!i% u7DD!r>wf1D6X@>q|3PH`740Ajuv5uxCsTM_vxw#q?xvC9aglCb1^fkK<6HXx literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/GB-Eire b/env/lib/python3.6/site-packages/pytz/zoneinfo/GB-Eire new file mode 100644 index 0000000000000000000000000000000000000000..4527515ca3f249a44599be855b3e12800ebe480d GIT binary patch literal 3687 zcmeI!dvwor9LMqBn$hO=nHU-N(OjC={5BD438AqqmtXotj4)(rmX?v0Q>h%sdF!Yp z<U?Y)%x%kMU5MsB*GZFmE_1(T%%;!t>o})#oKr`C_xqiB{Lb&^cR0@Re!e`th9*Y2 z?s+_D-{FVH75l^M!1wGg(;6)@)Asu5>D%3A#+-HLtLZb$%#7N`e7IWAO1))f51OvO z9<$NRiC<~HX;Y%-HteV8HO|wSO~Q5NnPM~FwcE@usG%1WUDOLVB<V$)cIm8n@p`dI zGfO^l>!qLWFw2r-&GM9GCcA5d$%*NuzjG_IA}ZCata{n3s^>F6=R@i(EQ)-zEM1 z{+IL*D|hQ3mzJ5IzR1yQrUmP@qcZf*qf7NVZ<=1;ZI1b+WpAC=dad5z-D@@!`kT!` zjbwAi%d%x>J=yy9Q?hNTOY-}9)pj{5JBIg_ohd<TXGxUWHMC6c9uQ{s#2nTI_3E2~ zcBN8q=&;%wzfkWj^ELY-Ch2`QKGTI&l6B#kvwHvWn&$T{`TD^68|Gm4X>%wmNFScE z)f`EA-W(k^QXlKy#QfnAb3Ce&IT5``p9~Jur>c9*sgjDisC0xrU3gBPInYC&UA<kO zTYl4=|8l9mFmAB9ICHGN)IY{tPEXKRIw$I@32pVY`jyR}Z9?^R*9B7?;-`xb?=&~c zPU@R$mYb4OzUJ1L1*UXEZnAH3o-P;W)qY(wb$S2Rx`KOxu257*`>Q1Fzp+dQT#VNh zm(@0vcDQxr$+t|ECDnA*5eJQ$8esyvtufWolzAv}wyEyDY-)s_k)W1&Qqy;v)T)&! zwT>;8hl?jj?RAqS_{cm}XJMAAyY>@RZ=$G>S;Lf0N>TO4#i>VPJu0++Q`I26g=*Nj zi!`cKOEqrtxHP`@goFiDm9T;!X;O4a9?LG4@J+kr@hOE8@okPY9r?YuKgkgH$p)^- zKEJ7`lx)>3I#)f>{d3j4?hMr;YLseGnyy*~Pmz`f`m0tYsnYs~_UftqUU_<Mf<))G zmS@Jdl{VAr$g_!|()NQ^s$EAvX+JPnJ=gH0bZB3yI{IyrPN9cXr}HONjPF_%lfO}Q zKK8Zhk~Lq&Zv9E(CQVfF3unm-!-uP`6W^C^FHMl{NrR-vlS$%<?IbVOjF+C_Vbb%u zTf8+oD(|n=<mGFRs$Sn+mEHvb>J<~H`edI~ebcX~gem!|UqXTEKlZe|+Gd3s&}XX* z44JA1MQ2IkgE=y|uE^k188W0aMTTxnlh+P-WLRczNy;0cUjH~+hR^Mx-WU=h$>W== z5#63vDTy`Jo00X@$PPEuTY)Z>+O&qGUOcKsRk<#scC1%v=YNyYOXjLE`ML7W)SqQ+ z)(m+!ZH}Z*N|y(G?)T~IbN9(NeedJ@<;vgtoBhiF)3d@qefVELyM6n1j=f;6D$1uE za@6W*pGv&rvhV!;eLjti^SIje@VGkRbM}SH$H&M1INwFjzu(TQcm5bxVDX)Ax$Ix! zcI`g?Taa7oXzwX<Q<1xh+*ahiA~zPfv&gMQ?k#e2k-LlBUgZ8FH`vkMVdNG&+Ix)L zWaKU*w;8$5$c;wsG;*tvdyU*|<ZdIk8@b=e4M*-ca?2g<Jx6Xja@UdDj@)<T#v^wg zx%J4sM{YiH_mSI=+<&A2NCzBk3y>Z-+9n`fK-z%x0cix%38WQBFC1+%kZvIDK>C3+ z#L;%d(Y6HXiKA@_(iKPB7NjqZwlPR&kk%l*L7Ibf2Wbz|AEZG@hmaN_Jwlp<bO~vb zqwN#YD5O(JtB_tH%|g0`v<vAM(lDfBNXw9(Ax%TNhO`aoo1<+U(mAAcNbiv5A>HG@ z8|_2-hcpoBAksod+e4&@j<$<P8<9RDjYK+$v=Zqh(oCeANIQ{!A`L}4inJ8zDbiF& z+f}5kNMDi0BArEAi}V(0F4A43y-0tN1|uCtT8#7<X|kj3GSX&8+h?TFNT-ojBfUnN zjdUAnx1;Sh4;qei9BDbybEN4=*Bx!!k-j_H#v`3aT95P|X+F|@r2R<$kqv<C0Avdw zdjQ!4$Sy#(0kRJq?M6U$f}`CE$X-A;1F{>C?SSkDWJ4f30@)JCo<KGQvMZ2nf$R%M zyD^ZR;b^x8vNw>;f$R=sdm#G**&xUcLAD68N03c|>=I;~Ao~Q_D9BE6v|9z)E68R+ zb_=pyko|&e7-YvFTL#%P$fiMd4YF;JeS>TqWal{At%K|xN4t5D-Ggi&Wd9%=2-!i% u7DD!r>wf1D6X@>q|3PH`740Ajuv5uxCsTM_vxw#q?xvC9aglCb1^fkK<6HXx literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/GMT b/env/lib/python3.6/site-packages/pytz/zoneinfo/GMT new file mode 100644 index 0000000000000000000000000000000000000000..c05e45fddbba6a96807d30915e25a16c100257e5 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<UarX@YGC~OJgPFnd17-@c0bD?H47dQeKL=s} literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/GMT+0 b/env/lib/python3.6/site-packages/pytz/zoneinfo/GMT+0 new file mode 100644 index 0000000000000000000000000000000000000000..c05e45fddbba6a96807d30915e25a16c100257e5 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<UarX@YGC~OJgPFnd17-@c0bD?H47dQeKL=s} literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/GMT-0 b/env/lib/python3.6/site-packages/pytz/zoneinfo/GMT-0 new file mode 100644 index 0000000000000000000000000000000000000000..c05e45fddbba6a96807d30915e25a16c100257e5 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<UarX@YGC~OJgPFnd17-@c0bD?H47dQeKL=s} literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/GMT0 b/env/lib/python3.6/site-packages/pytz/zoneinfo/GMT0 new file mode 100644 index 0000000000000000000000000000000000000000..c05e45fddbba6a96807d30915e25a16c100257e5 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<UarX@YGC~OJgPFnd17-@c0bD?H47dQeKL=s} literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Greenwich b/env/lib/python3.6/site-packages/pytz/zoneinfo/Greenwich new file mode 100644 index 0000000000000000000000000000000000000000..c05e45fddbba6a96807d30915e25a16c100257e5 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<UarX@YGC~OJgPFnd17-@c0bD?H47dQeKL=s} literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/HST b/env/lib/python3.6/site-packages/pytz/zoneinfo/HST new file mode 100644 index 0000000000000000000000000000000000000000..03e4db076900c74add3918ad293c77f771787467 GIT binary patch literal 128 zcmWHE%1kq2zyORu5fFv}5S!)y|KbD&29MwnASZ;dKA0ISKVYUH8^8rL$IyTa0N$$& AN&o-= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Hongkong b/env/lib/python3.6/site-packages/pytz/zoneinfo/Hongkong new file mode 100644 index 0000000000000000000000000000000000000000..dc9058e4b578ca8c9bb954af1bbe26a964eaf408 GIT binary patch literal 1189 zcmd7QOKgl$9ES1JxivGCw22K1;+B(05gVrxw3R5GRu?8|QlVubhN3I1CL<z|5Rpho zNGz=WMMPLQHkNu3;#y<Sx^=3h>e7xGTH`z)iG`KeIFs-DolG*Dd7oHQeCrbR#|gS` zcsQ%w!@V@=UI*h%*{-2R+nrt}H>3NK_v*H^-QRxIJUCt}534Vko>SN4(TXaYS~yoy z`3jq!5|Z@e8TQHW8+mFc?6X_l(tCEg>ASE&Ki@xYUYzLCFB{_K)t>YE^`fZhZ|;!H z)N(VhvRwvpYwh67!!np!V227j<*hBT@A?{K_~eLv-`T21_TIH0WS#!lc*TA?v{-+x zykkbUmFclrmrb^2RDYS5Fu7@)bw1c)@?)#?*Zykrt+!dmZ*Q^_*Q@0F#Y$T^Ge^|X z*|sPV6yL6Z^=}-IKz+srYXUm7)NhLCXLRv_<t7~J)ZuK0iDb^{Nj>dm@{J?9^mK(O z+ubV4SLE}%=ifd`?TENPSS5}X1(a$_sxWs|^7CF-A5&|h>v_dgt@HPZ{l$d2Uq36j z!<5LJ$fU@u$h643$i&FZ$kfQ($mE`Gc4T@_H$Rd9k^zzek^_<ik_D0mk_VCqk_nOu zk_(cIr^^OO$J6D5B!pyyq=e*zB!y&!q=n>#B!*;$q=w{%B!^^&q=)3^=@LXTL{da@ zM3O`=OD?3zad`rXBAFtoBDo^TBH1G8db)g(ggsrxNXkggNYY5w_#dPlakZD5$PYD~ B5Gw!x literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Iceland b/env/lib/python3.6/site-packages/pytz/zoneinfo/Iceland new file mode 100644 index 0000000000000000000000000000000000000000..dc49c32470906fb2ff94fc599c73141597443f98 GIT binary patch literal 1188 zcmd7QPe_w-9LMqBnvSrDL7-$%-zYVZVEsW5ifFMq)DE#h)Ws1MBn2r&w;GlZ{UJTn zY1KFJ6#auhFm?!nL``d%wwBpnb83Gy%Vt^Y_kMQh(5X&64|`q@&$C_jd55>}*}cO2 zad_+#J{$r2;ocjymy?mB8f#o&&L@sHT&Ud=z1ZvaYN*~^YN=>wUR@S#Zav_=8!R?$ zYs#c8Rdl1hbiKCUDV0u7iFRJ`>%IOi={mJUyW`j8{$7thh&4)LQ(7O^U)7%S4t=!i zjP_P0q%TmTkLTT#WJySqQ)eYLXTPML9FnJlMcUuGO`dhHl7Xg!`utj<yr|o%FHdLW z)s9Miy)Pwi{PT4%7?-z;e&|p|Qq!}FWw`K`4v$RB$Ye}MI|k%k-(h`!`L=v$iRi}@ zO)_@QujAFn<kOKVnFy(Vu39HwR@Uj{+Qss9?oQ1t%$Ll>2A%pjB;R_L>i4u;r{e)J zGhDw<ciump`PuAf#QwMTaOGvQ<6~xK?wWhzyqLW?7&hfTpIPQxZZ_I`zU*>2?@ahF zM$F`XVzTyNOk_}GRAg9WTx4KmWMpVZJ2o;nGP<K39vR=!27pB1XhT3^K!QM`K*B)c zKmtJ`K|(=dL4rY|LBc`eakK#;5jonBkeHC5kf@NbkhqY*kjRkGkl2vmkm!)`kob@Q zkq8}ah)9e`kVuqBm`I#Rph%=hs7S0xut>B>xJbN6z(~Z7He@7bBxodR{BOc8u%-J7 FegQpVSd{<( literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Antananarivo b/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Antananarivo new file mode 100644 index 0000000000000000000000000000000000000000..39631f21486c61a19639d73d7571ef0840176b3e GIT binary patch literal 285 zcmWHE%1kq2zyPd35fBCe7+a(P$l3Vr)}JkhW?eWw(f!6-{fAc=7@3$E85opWKq|Ei z7+4q>lr<PQ7#P&nFz`V{e0)O~TpdFgv<-}m4M3!cDM&FCgplBXA5cC2fdJ$X5Djt+ Thz2<bL{r64TtN5g8gl^vna4UY literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Chagos b/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Chagos new file mode 100644 index 0000000000000000000000000000000000000000..0e5e7192795decc47a8d338520ce12197ae1cd31 GIT binary patch literal 225 zcmWHE%1kq2zyQoZ5fBCe79a+(c^ZI3XWjQX2G8zpsQ>@}KLaBZ69a?u6QD4ITL4I1 yNCE?gk8cQrwt*=Sn}H;NAcO=ven7M^0FD0-vJ1oo*$1LYv6IUNXuqAV85aO6nk5+k literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Christmas b/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Christmas new file mode 100644 index 0000000000000000000000000000000000000000..066c1e9fa6e0f6aa4121fe0f2378512523cd5fbd GIT binary patch literal 182 zcmWHE%1kq2zyM4@5fBCe7@MO3$f^JT|34!m14FV5NGhp-fyKu+ghAWD9K>Y^A#55m lP#MTXjvpWmKmauGf3eugCm=S+bOzkUbJ+kbvePx^0swfE88QF> literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Cocos b/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Cocos new file mode 100644 index 0000000000000000000000000000000000000000..34a2457bef2a41129dccb62f4320ad66a52f64da GIT binary patch literal 191 zcmWHE%1kq2zyM4@5fBCe7@MmB$f^JT|34!m14GmukW_RA1B;Ju2!pnPnXv&#fFXpi qdCWj%ATzmsfHVLB(8&Kap=wJ&Y>@d3_$=VE0b6FLYi4C^zy$!wEgQ`M literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Comoro b/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Comoro new file mode 100644 index 0000000000000000000000000000000000000000..39631f21486c61a19639d73d7571ef0840176b3e GIT binary patch literal 285 zcmWHE%1kq2zyPd35fBCe7+a(P$l3Vr)}JkhW?eWw(f!6-{fAc=7@3$E85opWKq|Ei z7+4q>lr<PQ7#P&nFz`V{e0)O~TpdFgv<-}m4M3!cDM&FCgplBXA5cC2fdJ$X5Djt+ Thz2<bL{r64TtN5g8gl^vna4UY literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Kerguelen b/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Kerguelen new file mode 100644 index 0000000000000000000000000000000000000000..e7d4d3d0660ed68ccf7a9fbe6cc0b41eea23dde5 GIT binary patch literal 187 zcmWHE%1kq2zyM4@5fBCeW*`Q!IU0b(t;D2;`v3p`GcYm&d0^lcz`&wwV8EblU<&4k ckYMBws2M;L|AUMMaS0gDWdpRxPS=zR01C<(82|tP literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Mahe b/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Mahe new file mode 100644 index 0000000000000000000000000000000000000000..db8ac687561c40e85c29806b30966d0d7f7d8076 GIT binary patch literal 187 zcmWHE%1kq2zyM4@5fBCeW*`Q!IU0aON6NF7`v3p`GcYnTFc|*<iZEC>FtGUehA?Ow jn1HwpAtV_21FVCA0chfXkkKG60pq!BfEL;5ns5OC3tb#( literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Maldives b/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Maldives new file mode 100644 index 0000000000000000000000000000000000000000..3f1a76e55bc1a7445d9dc2df31be87d6b35b4f74 GIT binary patch literal 220 zcmWHE%1kq2zyQoZ5fBCe7@Ma7$a$-Oct-vI|Nj}8m>3vbUV!9*BntzBTL1%xk8cQr zFA!@Rn1UpLAcU~-K)qlqcz%F&Ffag3{~xw(z66K{SqBoX2eKK6w3EvQXuqAVDHi~T C^d|iP literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Mauritius b/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Mauritius new file mode 100644 index 0000000000000000000000000000000000000000..fd8d911129bc22fe5a1d1ff93e30a070d3452d84 GIT binary patch literal 267 zcmWHE%1kq2zyQoZ5fBCeb|40^c^ZI3XFcl-3D&m`61R&CJej9G@I2Rlq5l8>{|row zK*+?vVEO{6ioq>_k%fW5!hwOq$2WvQ+rSivO+XSr5JG~LKOkBdfHwRGSq<WXtOwIT aCxB>>Gr%;^DIl6e=Wy8ooo1(N!UX``@hg=8 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Mayotte b/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Mayotte new file mode 100644 index 0000000000000000000000000000000000000000..39631f21486c61a19639d73d7571ef0840176b3e GIT binary patch literal 285 zcmWHE%1kq2zyPd35fBCe7+a(P$l3Vr)}JkhW?eWw(f!6-{fAc=7@3$E85opWKq|Ei z7+4q>lr<PQ7#P&nFz`V{e0)O~TpdFgv<-}m4M3!cDM&FCgplBXA5cC2fdJ$X5Djt+ Thz2<bL{r64TtN5g8gl^vna4UY literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Reunion b/env/lib/python3.6/site-packages/pytz/zoneinfo/Indian/Reunion new file mode 100644 index 0000000000000000000000000000000000000000..d5f9aa49d5e0f99abbb104072db7c781137365c4 GIT binary patch literal 187 zcmWHE%1kq2zyM4@5fBCeW*`Q!IU0b(#50x+_5c6>XJBMxU@!p+Ffdp+FtGUehA?Ow hK=>gf82JNi0s{lk#Qz|pL0kgHbJ+kbvePx;0suyn8S?-D literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Iran b/env/lib/python3.6/site-packages/pytz/zoneinfo/Iran new file mode 100644 index 0000000000000000000000000000000000000000..3157f806b7d7fe650ec6726f31398824caf82cec GIT binary patch literal 1718 zcmdVaOKeP00EhA02NkKd+7c0ohIsaMJJZKe+Kvj2=~GQx+IqB@K4{Sw>eVg~LKnnB z<DFE}s0ATTBNYphBS=b0NJ+5Kg@y!)AQ|TyHezLA;ZE-T&D^AmCjakv%4)q1>yInM zyy52Znw#%a+RdYD^B(<pC{l;rydcAy<6RM*E<0i*-8H_rT1PH+tEjYhIy!!WnjO=s zXAhlLbMBwgb8k&kvAqK_?re#O?+nrj?T1w2mNaQs_KT##I_=2*pq!5PI(bHmN{OB> zQ%CoR)Xyb4?d=OCo*tI-?nQ|7zHmAJ(h0GkD_dtA(JFIStIVqZE?gBodSOAL%FasA zi;}LXrEl&@`J_NByPGfFHwHz{#SWc&GDPJaye{+G=8J-wVO?0>pqA&<>!OU0YDL_6 z>6to96ptI!C03a#c|S&$zU~w&A03se?nH^QtG9J|_Yzfc{F7XLpiQi4nWfiOUsmf% zdv>p{m3yxjxn_HcXDv8$(a3jo+3^@5n<@{qljV87LC9w|u(p%6_%!oKtQ-)+g#a zyt<+3o!U~>CASvGh;56f%Eq*A(KNG2H&1$|T0#!Uman0rb+}J&AIefY?q}+q{dZJb zZ=>9G=8M>Uv|I0Kk5zjcp38leYsEgF&u0Y$1_uQPj{2vcK+Cd)834~<%SsZ~7|U8x z7!YPz!evETR+bEy`15^kp2zZ-JJxnOZSzbqcQUv6_50=di}ii^^{3_^_OU`_iM(cw z$Rd$d`Zdc$)`=_>St+tqWUYS9Vv*G%%SG0UEEriavSeh<$fA)|Bg;nCjVv5lIkI%W zX6?x0k<}y1N7j!NfK-5#fYg8#fmDH%fz*K%f>eT(g4BW(gH(f*gVci*gj9r-gw%u- zg;a%<h17)<=GRn)l!nxX6o*uYl!w%Z6o^!al!(-b6p2)cl!?@d6zbPhij<1fiWG}f zi<FDhixiAhjFgPjj1-Mjjg*bljTDYl?$?x#)Q%L7RF9O8)Q_A1$SHuF1juQCoCtnR l8~^XAFm{3e<j9P7raNu(-JBblac*HJ+Hr12YI34I_6L+)nlb<Y literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Israel b/env/lib/python3.6/site-packages/pytz/zoneinfo/Israel new file mode 100644 index 0000000000000000000000000000000000000000..df5119935c5b01676e63d1fb1efd7273b8b4b452 GIT binary patch literal 2265 zcmdtie@s<n9LMp8WDL=qEV0%QuuMboa)Ap{Ng4_Qf!yN86a$VCn5p!b#55tF?1vc^ z&SkCnbIaK1heO4-<|1p;*+y_umZ_LeKft>LcU*$tru4i|t=jsdKim4U!|Uu0{$YIH zfs*A_DdHa&W<KHJ8qLFdlrrCTd$V3UvG|~F@A<<2^+z{F$MLInXJeJNo7dUjylm-{ zuYV;^Rko<E71};sGQm21A@=BZiHGHxm~4G^e9$?2>5x9x>J#0c#_99=4e@<zhq|!u zbEO+<#Sg{x;>Wf;>!&oI{qxRiqG#enySL_)z8HB@1*eUOV6e~*uWQiZH(Kn`gTGi2 zO(}MC+mN2L`HCIWK4QgM@A~659<V0o9=7i)+pX`8EA>w)@mlwYIsU0*`t{Urd+lkL zI`zFrNBQHs`mE{AvG$B3l{#T_h&^-10_`bp_9w27*WR3Cc2dsUI(c@rKPBmq<-7By zof^5_O1;wLpA{OeW_5<h^ucm<U-z)S-|m+W>|bKdes8bL*n7}=@THA%PTd|ocTJjn z$iL6Z%x@I)ath`Agw-M|X3)tVTO_jkDx3v<sUqj%fXr>bUF5axbRPb2SmbXR?G$W} zQVTcjkcBnZl)o&~c_eT~J-RYi7Ws~-K>iH**o0lGIN?@Ve076b6cz6*>RPH64~%h2 zTIZ=HUH#7EyDU-K+Ub<lXNvN-4mein6tT3vLq3raCMrr><de}sQJJw#R)vMCWs_>; zvYuY`)Qw`f{MZTg^!aqT;{6ZQ%HsiN)hk<6byJ%2Ol7r_uioLTUf3X>t-S86@vIUx zS>4Wa;|fKsXIR!=P7-U!os(-jZWHS+e<q**ctEW0*dy!SiB$FbUz9J@{;D>-Ss_D4 z-SX#G=wGk@)teBL-GA@6ArUIXbZ<zw5YcgBtPtsQL&pF9Nq#{<<e4XTzIlq!y!`zF z|Gu5A=I7n*4E^zoBj$=f`SLolC1g*?rjT7B+d}q*Yz)~MvNdFH$mWpUxti@E`$IN} z?9kP05!oZMNo1GEHj#ZI8%1`CY!%rnvRP!e$aazaA{%x!JLb>9mR-%Bkxe7JMz)RY z8`(Irb7bqt-jU5CyGOQ<>>p`>tLXsJ0;C5>6Ob+-Z9w{fGy>@a(h8&(NHdUbAnic< zfi%R`bOdRMtLX{S6r?LiTadmWjX^qtv<B%7(j25aNP9T?gEWYvLr9BUO^=W!adZi3 z6GxwrMsaisX%*5dq*+L}kai*cLK=p23~3qCGgs3zq-(CGZAjmc#vz?UTF22lq<I|O zL)ypDKcs;i9Yk8l(L<z(NEcmA8<9RDjYK+$v=Zqh(oCeANIQ{!A`Ru}DAH1no+3@< z=&Gw}D@R{lO=CGai?o)bw@7o5?jr3)`YZmA0gf<Po|$a=oKTSB@p*iS$!Q4%NnTH? H#~bk*I^)#B literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Jamaica b/env/lib/python3.6/site-packages/pytz/zoneinfo/Jamaica new file mode 100644 index 0000000000000000000000000000000000000000..7aedd262a57f096cda47a8542b3b66633561641d GIT binary patch literal 507 zcmb`DJx{_=6hI$^L};s25fDe6%4p)?u2F(abkV7ckZxUFjEP@yVA92vurQD~ILOB4 zZ*Y5mz~qfgJnyM)nz(qI`%creNzaK+lirT{F$H<TGBsK5gG)Ic$}fwhK_e{%qqsDA zG_JXgJ$;k<??b&j>!&NzYh4~+nc%6XSMSfv+WC>LB!_1Gq^YZ|(A4&PT@O98u@%OR z;)mIEyg29Nf4QuEb(G4^g0<6URkZePmh+E0k!s5xbY=G=`KPam{x+%0o0-Y{`x`5h z+-+Y#%y^6)#4w{+LQEmH5Mzin#2jJ|DFCSeDFLYgDFUeiDFdkkDFmqmDW(1=l`HLb F-A~yjc0>RG literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Japan b/env/lib/python3.6/site-packages/pytz/zoneinfo/Japan new file mode 100644 index 0000000000000000000000000000000000000000..8ad44ba981a2d97d40d3de2c8bf29606be50aa4b GIT binary patch literal 318 zcmWHE%1kq2zyK^j5fBCeP9O%cc^ZJkbvvel>u<du-1zaU;O1G~2e;W7KHOf*|KQHE zM<4F?e0;#n$OM5549(0y^$a}=7=fDWCNOY7NFU!21}_&N4h{iHGlFmkVMlNQ)qx%H z1EdQG{sTekp(O<%8ssQ24RjcY200EygB%E=L5>8|K!<{8kYhnK$iWO`xtI&+d|gW} E0KK7P_y7O^ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Kwajalein b/env/lib/python3.6/site-packages/pytz/zoneinfo/Kwajalein new file mode 100644 index 0000000000000000000000000000000000000000..1a27122ee09410095d433adabe6bf2eb772710d0 GIT binary patch literal 259 zcmWHE%1kq2zyK^j5fBCe7+atL$obzU9iUUP=Rp1c|Nj}8n3)+E<~#rjGtAn+!1Dip zxB~+R1H*~~3_L!*Aq?7vh77ufMnD>2LkMB}Sb@fY>=gI`G8hPeR{gItlTZNBAp1dL XAUA;MdY}*kIS%2n0XoYL=sYd}4N5Vv literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Libya b/env/lib/python3.6/site-packages/pytz/zoneinfo/Libya new file mode 100644 index 0000000000000000000000000000000000000000..b32e2202f572b8ca6ef3c5cf1d9e787a24f2c328 GIT binary patch literal 655 zcmcK1y)Oe{9Ki9Xt$}*US=39>YHJ`OImJk57onkU5{bt^(oWAP25I~WOa>97K}6ET zAO?%6iNq#Co+cUzi5v^D@O)2ACWBx5Jm0%Z+FU-5vyfZ0#jmL`PgqRUEUudiX4`)~ zkKdjiTX(TTSzf!c$`32LGB~Cx2POTKxo}&yCS<7gO@%)Cb?alF+jg@g+e=4o$JxCM z7uVg+y^!wOu2<cg&oZK>RCIYm_hc_sEEUl4q~A>>MrH5#iEC$G<WOovC5Q8Rc6Ue2 zIoE}3u2>L$pZ5t=`+di5p70f-VbQ#&2uFP8rJF2K$F*pjo^ixvdY;V@X|sOD`GdfF z^+%s(kf3N#L?|j=jSNM{s}Z6o@i$0Ow7eQIiW)_ZqDK*=C{iRTniNrrDn*u}OA)3h cQ=}={6mg0=MV_M1a02vyP9bQHWWWx70c9krqyPW_ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/MET b/env/lib/python3.6/site-packages/pytz/zoneinfo/MET new file mode 100644 index 0000000000000000000000000000000000000000..71963d533e444362250dec5465ec58517ab6c09d GIT binary patch literal 2102 zcmdVaUrd#C9LMoP#7buLMT0{EViKZt<UgbasT~i@<WS1VNF_87wTR+G3`%1ab55E2 z)L70TX~mc|J9UAzhSvH!T1Zx74a>FV%Kpu{nX^A*^?N^8ZC!O&zq4nrXXm+jE}qXj zvTj>*k?$Ypa`z7xXP3LUpVmL!zp;Pf)cFIGF%1kJ)pKu32YdEubXuFn&aSc(FZnh8 zLeM^Wro>J@nPZ<uQ|+^wWx8sxQAvAd+0{KES$mn1n{qWHlBSvA6?RSW?@G!3NvSix zl0W{0{1@I*+L>2%?dOLy>%FIye(bnq9En-xfrB=?SC+N?37gXrwd*!C+T7}hWtWHS z`aqfGWaQes>1mdm5LE8P63zcMM++uWb;Ac&=*Hn+6&U<M3;Rwfujgasw;xkM(<>^h zpR}UruoYK~Sjn1atu%AM%8ELzY^vK9rT1yk=}oryj}9$49<-aktW)_=gDOUr=$4(! zwX{D&%bK&byl1i9TA83=Q@X9lIjc}`$|@5+QdRcXR(0+>TN(e3tsEb<+s?dbt70!( z_1N1A@9njkBhTyh)>gZt_bIKuYq!?4Y*y_}Es9iE=}v!*)&>@6?WF?MrBqto7c+GC z#rd}G^<T7pBGEP+O1ApppY5LR3)av#ZW|jWY}1Y(b??$Qtg(JfO<DVGb45(eS02@t zOliya`?Pf`s%@iPy6<#E_a9!bmJi$Pfn6199jdhlw`S>~ou$^cy4<48etWoZwzY?T zvqzGB){&p0j`QDGXVN8go_ycB&Q0pk*M{t|@eys`|E_k#2K0E>pt|>V%NKW9{D0x+ z^JUD5b02Jy`4awq9}d+<eBn^UJ&NPGFY+%w{y*Je0G@6H$Pkb*AcH_gfeZr~2Qm<3 zB*;*Zu^@v%M#K9t9ArGmfRGU(Lqf)c3<?<)GAv|V$iO_^$dI8SV?zdqj1CzdGCpL0 z$Ow@kB4b1biHs5%CNfTBpvXu)-B6LSB7;RniwqYTFEU_c#K@45F(ZRUMvV*`88<R; zWaOT1=*ZYT-QbbYBg03=j|2dT01^Tu21pQ)C?H`#;(!DKi3Ab~Bo>}77)UfcT{w_< zAOS%lf`kN#2@(_}Do9w6xFCT+B7=kmiH)ZV4iX(t7ak-&NPv(CAt6FygaiqR5)vjP zPDr4TNFkv@Vub_?iI%4e7ZNWdU`WJ}kRdTcf`&v52^$hOBydRNkkBEqLxP7y&(nnu viJzwnAQC|&gh&jLAR<vj!ifLNI3~JME-a*(fpBqNX<k7fTv&kl#fg6bFZT@= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/MST b/env/lib/python3.6/site-packages/pytz/zoneinfo/MST new file mode 100644 index 0000000000000000000000000000000000000000..a1bee7c6f0b76b6b03373eba880f52e8e83471ac GIT binary patch literal 127 zcmWHE%1kq2zyORu5fFv}5S!)y|G5(w7<_|6fSeG*`e0_T{D7H)YycO~9CI!J^w$mJ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/MST7MDT b/env/lib/python3.6/site-packages/pytz/zoneinfo/MST7MDT new file mode 100644 index 0000000000000000000000000000000000000000..726a7e57176567044d585800c37cbc917b441d22 GIT binary patch literal 2294 zcmdtidrZ}39LMo5NFgUKPZtaU&9o#45Kt0MW>nA#=>*{s&<LYIcC-R7owN|2f@1v9 zd{)LRlc1ZmW?t89Y^ZCvwz9e7*0dIOzgRZ9Y_YhUp7--_fAwGM%bxT4jsHLIz^dSy z0{O?q*#Gdj=k4QL^}XRWFAv7*{-@K;z>7cX4}0QGc-|jv;l>pCYwQs<{zIKioT*il zA9!WzNU@5%nJ%(3O+|H2N%W(WYTEi#9n(0|TvRooFD@B3v3Vmp_L)Kxr;h2ky$6(M z^c8){7Xv2#>`_TL)@c&o8<3>8`jvX2Q!ed!QeC#CSuPJYsVi3cB)R-{HKRCFuFSkn zrOYkUGvhoeH9AA5{x;vFeHEwE!_&>I<3H)y2hN+T22ScZp;_wcC-&*ND<W#%+K=R# z!Y|de%ifXs$?vO-EF&3H=S-$&gJg~zH(BQca{bV&Ci`@o&gpx~<Q}Tl3p!d%-gAp| zeoL1s*f>Kk^sP~gs>gI;#vSU0f=~70*eq3)_NNqmAFqnX&x-fNFY3n624%^9kGW}Z zw=CWMv$=WKHYsWS*xb^!OW#_#-`v);RhKS!)|8d5)@8pBD_>5#_)hOp<p~W^eyB$+ zyHFw(FTHN={G?v{+q=zO?=IDq^+8kBJ4fHWq{38(qV@9h91~dajSfsCnVP~Ay5`$# zRX6)<sT)pIE2oBK)oY{bo{>FL-xXH(4)sVw(_yu`Z=KxdKV=%byY>A!2h9U5LEV(F z!!-LUbn}IfY01dZEhp<tFg8gC_j=9R?|#+mdTZ5#rxN9%P_cS=->9_Kq^Y*;VOd`| zsW!A8mW|2h)TYW_*)%n7LPZC4XykLVIc105Jap7-84Kyh`UcG7XX<o&N2l3(#H%|R zn$5PI=~|*BDtemz`7ixNB5(IcNhGq{NR0cQ?tbI_r2+9Tx3AW|ssQ7N;syT2h$8kF z5;7)aP{^o|VIkvk+JPY>LxzTo4H+D|(ec6XI6FRIfE*)4hKP(286+}FWSCAnPGq1? zJ5pq*$XJoVa*P%kF2{J00dtHP88XM1kwJ5e8W}dnxRHS)BS(hrv|~pGkBlA}J~DnJ z07wLo5Fjx?g1`|4Bn%vJKmy^kkw8M>w6Q>f!4VB497sHnfFKb;LW0Bu2?`PwBrHf= zkia03K|<rSu|a~v5gjBv9PvQ{#1SDRL>w_ff`mi~2@?`0Bv4KpDI`=*8!IGOP8%&G zTpaO20)|8k2^kVIBxp#~kgy?fLjvctkwZe~w6Q~i=d{s7!p9LmB!C<dL_)|BLnMeC zQAEOs#1RQ35=kVKP8&-km`)o_B%DqgPb8p7M3Ink#1siCM^urpa>Nx0EJtLK&^m2w zk>EOQbdm5nZG4dcBN0YIjKmlTG7@F{--Ov?YrWW3JKMi7w;(q!+n=AGo1dHK`5U$l BL=yl2 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Mexico/BajaNorte b/env/lib/python3.6/site-packages/pytz/zoneinfo/Mexico/BajaNorte new file mode 100644 index 0000000000000000000000000000000000000000..29c83e71ffa6b071c52b2360295bf55009c09b76 GIT binary patch literal 2356 zcmdtiZ%oxy9LMnsp(Ki)RFwE*C#4t&SN@A){}j7|+(5aKn2{NO24#>CO3@7+TWihn zMJMGdIyrm5S`o9z{xNdPtu<C*E6m22DKLy8){4q;JMUi)d(^Ym@9vz}-QDi)?t{<U zx2w7`&GnCGuKk3=(_s(qBU|kCTJNtdT}_2`-P6rey_My4??x-vnXu`p&nAwv46gd6 zZt%qWb>Bpnx`x`{p1S1UD&OaAm&;|zawQ={Mm_mzZ1FZV9-pMfr_<F$=t4Cy7@@9? zPN^Syep8d@2Gx)47u5WbZnNORfC}$#Hw)kER*Smc)apdLy5U*Ti<|1zlIs0>Y3U9X zk>}IP)~!{`6S8#V%3`^3UZjo+&XlO>3=@56xx@@EGqE54E-QLw%uTOe6y3O5-+bsJ zSy>XUSJiy0;xd2Ix8#4QZjJa-$0v5G_}NL55Z0m+hCern6MNO_z8;fw-Y?0W$Bg^* z9$Djm+@u`aD5+IPby|I#q<b24M)6$9j4#nyY2&gsG+SrK%u(56QF`5<V`}}mYdYuR zr)tAnk><7!URSrb{bBBS@rb&!_B(S|YnN;+>@%CnpOf4*#%#{rCwDJ8Y&>y=;+c6x z=Y^z7-q0bPe|d>)ed)9*__{{C&Bx6>y#-RZ`+zCxaLcw0rDl7hlH!;g<J&PUzH70j zB=ajNxw2mGT=BEo`ALi}ojtF1otoA64Zovyw~pxh`<_>2<!AK+olR<wci24m%yHS9 z+;1MLIw0lYCryQ?R4OK0O;vo3R1H*`>d;uJeq)QN84bw89TobKFJjcbMvs29dsfw! zB<Q-f5%pNse7(Q+tg4Tg)D4BFR6}5%Y0U1I#^I~xVB|?T*f(IBW?JQm&TjMMP^C2c z+s#vFwury1-aP$6qPT*BgMzOM4w+-G|IRBAI9lLx1p+UND<lwTYjIuA`=ABRt(<#l zf!F8q7W-U9oLqbEwWr8ucVeHQzi;2aSlEp76@E1kU}?zOki{XZLzaiE4_P3xLPxtq zWR1uokyRqgMAnHc6j>><RAjA=cCpB69qn?F^&$&KR*Wo}zr&i5MI)<5mW`|%SvazC zWa-G-k;NmcN0yJQA1MG*0i*<uwgyNMkSZW$K<a=L0;vR23ZxcDF_3B?<v{9z6vWY1 z1SyH5tqD>Tq$)^Rkh&m+K`Mik2B{5F9Hcr(d64=b1wtx>l*rN62q_X$C8SJ9osdEy zl|o8|)CwsUQZ1xhNWGAPAr(VP=4fk%6wT3A4JjK^H>7Y#<&e@LwL^-BR1Ya1Qa_}C zj<$kG2_0<>ks><UDk5b>>WCB)sU%WLq?SlAk!m94MCyqY6saguQb$`;q^ORzsz_NK zZC#PVB9%o-i_{h=E>c~jyhweK0wWbhO6+KBj1<|?Rv9U?qpdSiXr$6esgYVE#YU=) d|NnA_*{o;VtS5QX-D&QWByVb}JJp>M_7|antVjR= literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Mexico/BajaSur b/env/lib/python3.6/site-packages/pytz/zoneinfo/Mexico/BajaSur new file mode 100644 index 0000000000000000000000000000000000000000..afa94c2ac5c1679d39143eae062b812070876492 GIT binary patch literal 1564 zcmdUuYiLhl9LIlSF0GZvatV=8qP3gNm>Ih;V}G;lj~TZAhX0IRZM0$AQ=|xcrbM}x z2uqXfl1q6dmk?UHq$JsrvZiH7$MZRq7hZYch41M(pVO&xUVT2^++u%}{IS9I4Tt5~ z!}ADjZ)e-OD_VNXUbp#I_}Y`7_&S1T`Z~Vv>AC;D()X<CYL7^Tjx5@(qN+ye8L561 zoz<^nrY=%5qh9FPA=A~Yai3*YSFoD>?YYGDg_t>Qk0t(ckePe^gd{Y6F!PQz$o%q$ zCUN&Mos@CeB(Ja2DdC6Af|Pu{a9FiTosy_i`<ql+;AEZltV%6@J4i3-Sgz6^OcKx0 zShe(0pk&mHQOowflgxq`lT~s@vJ*!eZ~8@9KJklL5xQSi%1e_o^1RM@ebcP!+oyAH zoieLCOZ1uxH`LnJr8;l#NtJ&%Li>ufDya8JVOpVDmlrPUr=_S33B$!7;xk3#KS@z< zlG*sZQ;MHVF`Hhs%jPSCP08)2y0od=l$~qU<(2nL#hxR&GPg@@@z?6AxVx%);cC5g zY_r-HUL)JS9#A#Ia;4@)xvKpbFWawWsvVCe$j&pRrmjV#epiNRIQT{+V1RoK3<&%O zm*X6d7jc{uMgkqD`LqmmoJK9dJO`+s@6$bA@nm}?*`8(gcv8I9h2Qi3g+=|pK6C7_ z31Sq)Du`JSyIfksAeO<OrtzZ+%^l+))<MjJ*atBXVj;vth>b3-kq|2xW<u;_7z(kJ zVJgH{hOrQ9A?8Btg%}L67-BNSW|!7zh}ABw*$}%ShC?i8m=3X>VLZfohWQZt83jNp zV3Yu<fl&mc3YWGFNF9(uAeBH$fz$#i22u^A97sKkf*=(!N`lnHC<;=QOIsGCE=FOH z${3|VYJ(I9sSZ*eq&`T2kP0CsLTZE*38~VhEfZ3wOIs+UQbwteS{cPcs%4Z5|Brfu ZY}&Cl=}=E}SY%j4s3$TaEG8@>=m(pAkqZC- literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Mexico/General b/env/lib/python3.6/site-packages/pytz/zoneinfo/Mexico/General new file mode 100644 index 0000000000000000000000000000000000000000..f11e3d2d66a2d7c21f498df96c3fd1db3a0d8817 GIT binary patch literal 1618 zcmdUuOGs2v0EUldWF{DuEqXvi5P_NR%*r&a!F2M`q-JGf*4U)!NlUD>4k$rTmW${i zLKM9y)E4>377-*Eg<(Wx7z0I!EjkL4^tzqzQma-$oBqSO-(}!3o9}ZKcs)_#k4-ke za9Odryw6hRakXPuwvCnhIy|}V&XfUnS8$xW>-%y2=xw{Z_gqX?-{w(Y-^fr_zkjc< zzcV*$@It8XO?CaV$dYOiRTU_s9r+?AXGF#>P7rZXPvr6`^F;jIVIBYBhgk9Tu}&Bd zP%Aqg>Q!yu)asr_op}1KTGK3bQpE$6+;B>+&AhBqc2&sKWesXwYPMV-;!%!8(b6$d zFVX@+W!kIVV#B-7GQG<oGM>)V&htye#@pjMvu2veI{8v(7cEvfW!-vH@>J!@Xx5t- zd{nt%bvjQxRr#S8WxlUXZ5gkX1rM6k))$3x+s#(7{ce&hJaI<sIJ;1~y_KTqXrkVk z<`Tt)3-qofabkC(zxK??Q6=-=>yoh;wdZ@U_72Qedxvi5(mNBXtoM=J*EFcguU(fF z`|m4x{Dj<J@LE)QN@Z0-k2tX2DG$zR5!K7Qy86p8Q4^A`YX*zNp-)To;d?3KNPmc~ zy|P2qwT|ke^~vg3<8v+i{5g}%39z5M{o7+%SAvDGtd=t(z_M;#5`mW0-R>7`Kc{Cn zUBbD^B~rMl=H|JK9>3@M3#}%8ePo%T7eq0LW)RgNy4f_!L9~OY2hq=_Q4pe`O`{@2 zM~IUAEm|_vgy_jo6rw3ZRfw((Wo;U58R|mxg(wWs7@{&nXNb}ets!bd^oA%7(Hx>W zM0betHjVZS^=%sc83{l#V59)afsq6x3q~4{JRpfcGJ&K5$pw-OBpXONHcdW|gdiC~ zQi9~fND7h_BP~c?jKm<BF;au%#z+p59V9)QCO=4mkPIOyLUM#83CR+YCL~WtqL54( zsX}sPBn!!wk*-aXFC$@_CSyj*kenGwL$ZdX4apn+2Z;xn^5abTVa}NF$nc0TXJkZp IY<NV_PXY<#QUCw| literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/NZ b/env/lib/python3.6/site-packages/pytz/zoneinfo/NZ new file mode 100644 index 0000000000000000000000000000000000000000..a5f5b6d5e60f15ebdbb747228006e8fe06dd4a01 GIT binary patch literal 2460 zcmd_rdrXye9LMqJpwhVKWemws!@O`gTvUW2LIninl6fQ~qi>S%pdpkbbb(AGa|~<R zvNfZU955{rNzhUdL#TBjTa#+pH=TL5LUT2xEbMtd>%Z3evp;&y&hz^1>^y&*KR)l{ z$}cI3HU4(pV12@yYnS!rJbt5fYi)0HA6U92udV95d2r!*Y0v93-wExLL-8$U$EX(R z45~9bf9%!6!=>htkDBz|!5s6wb8bEQ_7(TB6CY~Vwn2Ay<32sU?uff*^EN$^z03W6 zVY&7?YuqO@>!r_M-2D+{(towuJoVKp$zDN!sMo$d>V0XC_%!^kzJtfrujY*UcQ$H3 zLAT!ca;=U{+O6ZNigkR*GddwJLnn+BX<&S~242`KK~vKusBg6dJHjNmX_4IYov%#V zGD~jmz9=EZJ~H|B0hy9EBvUuG$t}@d8d~y+P7C-!Z*@9ky2k>Y;df4EoSmjKhk9jJ zkEe!x+$7<9LnY#^trA%`Mx6C)C2GwDi7w5On8hDRY;K9p4%;hp;+E)K?|O*~jMTWF zTXf!)0FD2=P7}VkthXI3)!V!LC2>cN-qG-rB(0CpJ8Mo!^2%(PU(hZK5~F2d(o2#u zX`G~nR7vW!UnFfLUlv`h)$}uol5x6N7at3eB^?=>*<eW4&T!4HJ}WtuetK8Yh%S{d zxjRjDS+cLVruXRb$rmNp^G(gYJ|Ope|E%VH+a@bcnwsDBqTIW`R#rB*>V3}_OTly7 z_5QL95wk)c$O)HK>A6}M<tGo$jMvrvm!xRyaw!`2(c;VV#5_HuC7(@{H65So+SY6G z(9Q!|`ocG|u5yQ#m3K+Gw93OPzSc*Qx6As3qgpYgLLQy?nr<+1Wy7yeYo*3Z)t80( zSa+~IemGU1Xo=G&cdwG_twCC|DMg+#e^U>K$EbgLd5#|QpS}#EanPEBvA^Fj+A!Ye zG`tL>xtsH$0f#?l!#Z=%%yJpo`OCQ3rxX{@84ibyb#wju1YfnjW>0YVuZON%*Zhvm zW@M7cERks<^F$_!%oLd_GFN1>w$^Ns=_2z*CXCD&nKCkGWYWm2k!d6IMkbET9GN;Y zcVzO&>}{>-BlG9yk^oyP14s&x93V+RvVf!k$pexIBojy~kX#_iu(h&*q{G(A2a*t5 zD<eosY^|IiNkOuLqy@<fk{Bd2NNSMWAjv_pgQN$^50W5TD?><%kQ^aNLb8OU3CR<Z zC?r!zs*qeE$wIP)qzlOxk}z8<V@S%7oFPd=vWBD$$s3Y5By&jWklZ23L$ZgY56K^r zKwB$=ND7f0B1uHDh@=t8Ba%oYlSnF&Tq4OtvWcV<$tRLfB%?@5ZLORlNky`Xq!r04 zl2|0ONNSPXBFROvi=-FHFOpy+!$^ve9NSt+MzV~g8Obw}Xe84}s*zkH$wsn`q#MaM zl5iyBNXn6%BT2WlvW}!3$vcvGB=bn>k=!H6N3xHk-}s-j-<yAi$e7sd{1jJ2R)TY` QGddz`jx)v?9W~qgPuMCAVE_OC literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/NZ-CHAT b/env/lib/python3.6/site-packages/pytz/zoneinfo/NZ-CHAT new file mode 100644 index 0000000000000000000000000000000000000000..957c80b79a30f38d7db99fc06b16c3255ccc50e7 GIT binary patch literal 2087 zcmdVaS!`5g7>Dt1Dv)%#0g|Q^DEroFr=96Si*+o}mKHixrHoKZd1aYt!2&|1b)l>U z2_X_N;8is&K~jtbOsa{{LZZeC_7GemzCr>Qq%6@W3U>US<E0l~5@#~s@61W(qRII` z)in)azxmgzzrDkU*Jk_SI(ef#E`Pu8i`bOEH1^nGb>BNIaqW*a{@XK}(0W}HKRc*N zbv-)p{WcvGIG}@DBKmaNRvj|GLWlH)HMzi}$+x#i%II=Q`Ffe8y3!@}$V?gbbG!`S zK3PWe+?BLQoQ(YNqKqo*mC+kd$e8R{9oukT$0gm-@m`lqh?%Yv6K=}H8{>3R@0T+9 z>;O$acSJlLV<qF`9g?}Tzj$}8kgVmmCA%>wzFFUhKh&T(=^c`rKU?$MyCgq3Q}cg0 zs#AVT(t;~Hweac#oqDoSpXpANqP<o6Z2Nr)to7-0t(T>^c8*M|`&6bEWy_4fJ}DVK zSW44cr1a6RQr1@^Gk<T>^6N!XaV?^=PN&K2t_rPeHxk_M(K+jHNL6!!J|FJW7bIO? zEK{9Z952-q&g#68cO^97kcOfc<)vHObpB5#WWiVUTGRcWy!=U<)PB^d3*U`M-8*}9 z(W(lO`X+s)$|H-*L%Jj@L6%M`&}E7DBs^%IgdfIf<iQlFzt*b_mxjvnt_!-N^O3x| z|F|~p{!vyo@6}bSyJfX>%4-XL&^5(-WNqOoZ5q`iuMhn|*O`#4`(u+fYk{<Uw?yCQ zNtHK0FV(k>=Ih%B7R&k_DcZWZL^jm_sT-ovsH<PhQ&03AV@$_m`!JegC8nP-XKGz; zV@_6@f&ZM9!D{32`F(lz%*o~S+tV1Ai{BVi{ol|07W-$;Gyl-}g)#Quj!x?;*@|p{ z+w8#6ZUNZ?vI%4t$TpCDAR9q;f@}rZ3$htxH;#5Y$bKB{hL9aO+ASe_a<rR5c7<#U z*%z`gWM|0Mki8+BLw1L357{5GL1c%Hc8ka!kxe4IM7D|S6WJ)TQ)H{iUXjfryG6E( z>=)TEvSUZPWn|CDrjcDE+eY?{Y#iA+vUOzd$mWsVBil#zk2C=3fTL{z(gUOkNEeVc zAbmg@fph|C1=0(o8Avyfb|C#g8iI5LX$jI3N81#nD@a?Az95Z3I)k(Z=?&5xq&rA^ zkp3VILOO)B2<Z{hBuCpNq)kYlkVYY$LRy9N3TYP7Eu>vYzmSF@9Yb1%^bBbl(ltlh zHl%Mz<B-lFtwVZ;G!N+>(mterj<x~j{~keik(0$hrq4e$H_v{tr3_>Q3%z;X>@03& O1arMUd!1vi-G2iks25xS literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Navajo b/env/lib/python3.6/site-packages/pytz/zoneinfo/Navajo new file mode 100644 index 0000000000000000000000000000000000000000..7fc669171f88e8e1fb0c1483bb83e746e5f1c779 GIT binary patch literal 2453 zcmdtjeN5F=9LMnkqQH%ZQ;8v<nU)CgtR#>b8FWLuL3o9$5k`URY6U)Y(?UE3#rVT< zR*YFDf|Ayni4SWwHq<p-TiG0LO>3b&mt~`eO%k}b^FAy8>5u+w>&t$;e%!&IpEvGR z-Zfd`A2->2!ozi$hxe(9ANJ?zJ^i7o`=tck^V$z;Z>?YNYndW?3oq&3_WkO^wg^2m z=l6!8>R53#-KR(Ab%;NrJ^EUhPh1;)Mvi^&5##48<irPg!hbwh2Hs2%VrRSzYW0iY zXD8&O^>Hesdb*xmI<BVVkLl2iVHLU~TZhY&D*WJK=@{9oZn)H=BQBf}ktdsV)O$T5 z`mJs$Uu_mQw!I*4+EOcS_SVR$E1e>y=m9w`H%Z)*G*8CPE>zRQ9WpLBQN{f_SI2)D zt`dgA^o&zKs+or`>sx!ys9C-l^0w`V)a(@jIcM!h;`Zz><Q+@j5p!eSmx;+*B>FGv zB*zAkG<-@YUv`T-2lnZda}6rB>qVV*v`nQp)#;2^7O2d+7MZninwsxiBNvp7s_euE z<y~2ys)eD+GAI73$oVcp=8jzud8dDtcYoF|7WFywJ^j1I;`X2Py}P!F{Q8geeJ#7x zl9E1sf6Z1^kp8kRELg1ye;bs})JEYvcR&_JR*9mcZF1?Ad{O-R8+zF%mCDuFsvmlH zu_~$b>e9|x>fuGjy37}>mM5fY_lmETdpuf~XP;K(-=s*-%&&xJFiNiU4~kX2Bl3~q z1ER8JNIp8yCaP+V$<<x!#AB|ry1KPhJ)U|*KT+pZHIW^<)>*7-ulRIbVydb;<I&#G zXyrYar`LY_i(1!NA)h=OC7$x-%BK&Fi2Cw)+0Z^D)@M)14fV&w#+Zw8Q%R@T<R8<% zoFmFN{JGv7+o3iOoX}fFed@Wc9{v1zk7{gc)?1I~sivx0y=`ZL3J&_~Yf{Md*S|md z?+pZYcL)&(yxkoXV&1g~v+oi1yIkgS3s-@8mYb)-Jf&{4A|Zn8H}}7<Z;$y!yS`EW z!d$>yRY*i1vPNW)$SRR#BI`sJimcRXmWr$uS*+Ep7FjN`USz?@imhhJ$eNKwBdbQc zY+hJ5XBG~uoMY+8+L6U0t4EfPtlw%1fK<S0N`TY=DFRXjjxr#1;3x!A364@AwcscQ zQVouBAobuV2vQNGBuGuHrYJ~Nkg_0kK?;LZ1}P0v8>Bc$bvVj{)Q6)$NQJDXL`aRS zrbtMYILd_72`Lm(DWp_Lt&n0N)k4aJ)C(yXQZb}tNX@LKXh_vK%7)a9qi{&&I7)}q zj-z-;^^o!*^+O7XRM2Wlh}6((iilLvYRZVzk)x1EC6Q7hwM2@ER1+yDQct9yNJXut zq)1Jzrl?3&t){F<T{#MiRF<Q(NNqWai&U4RyhweK0wWbhN{rOlYKn|h*=ov+)Y)nZ zjZ_*bHBxJiVk6b&C^u4Xj)Ef<=O{T+bE_#jQgy2-J5qP6DLhhnr1VJbk>VrON6L@X jUtDkg|1SRy^Iu`1`R|b8nxB@HmXYGh%uLHn%W(V&DI1f# literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/PRC b/env/lib/python3.6/site-packages/pytz/zoneinfo/PRC new file mode 100644 index 0000000000000000000000000000000000000000..dbd132f2b0bcc8beab08e04b182751795c853127 GIT binary patch literal 414 zcma)%y$%6E6ov01AsdnK0RGwCh(;k=S&0xTQ;84_wi_?7<`F!PCs;~}D7?f(B^vIT zl7ch2`)zh+C+8E>VAZ0p#Q6&b$@1Vmt@shmEEPQ+dAwxQ={D8*Lz@c0P8P$BDh-yh zJRhox=gVq;O|{%Y*PQ{??_KRC8|0oVI%a(=qV1LMrEqU0h@_&_Xe`L@@k|6ZIO2E7 z93L|!ALb9D7bk4{9*EM0TpUDs5CS+32?Qb_WIzakkOCnFLJol-2uVX01tDw5!t~*5 M#r`q2S-#n^-;pX<T>t<8 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/PST8PDT b/env/lib/python3.6/site-packages/pytz/zoneinfo/PST8PDT new file mode 100644 index 0000000000000000000000000000000000000000..6242ac04c09fd4e4952cd16503e954dcbdacec2e GIT binary patch literal 2294 zcmdtieN5F=9LMnsqQq039#A5%lbXa8u7HA9u?H1*!9*~wBxWYY!ypY6x<VP*VwE+= zmmF$Ox1)0{*9;xA*(@TrTCLFov}V~DYx7|htE??j;&$HW-~Q^q*10>s*V+C5^A4_S zsLS*G<HXp1xSVc#d8_d2#pbnB$zq=Nnu9O>A)oF}HG?yM-#S=5TU{P|N?*BHr^e6J z>c2iNRud-!I`W2Bc@AXisAm&ZbZdg1wCy&Dsm?HySI5hgC2=M;=O>BXv&_V)&m`{X z`#S!YW0Ek^V-mj^P)Yq==IVEP)HO%m((1)7b?t7^*R?gP>l?P~8>-f*<dUGCx^RJ- zmRX=vW>m<HaVa`AI$u&R%{MoFKTXmGr<(K+{*>v*$IQ(K&x#Js(YHMLp3JCB(lZ;s zRkQMc*0-*BSItiTQfGL>Dq~{QWXA7OnZutO?+=^QoWA2GYp7ka!+VXdccaX0f5hbM zT`IZtFX+7HSu!usqUTpkk^GEGU66NC7R36sKW(z|Ur5ypuUt@zJ{s4BXFpMkkENK~ zPra$`=)7$1e0h($tLZzlq~nMzE$cJOYMz&(xyCFn+9Jz;8`6R7=Tu<mgf2<itV(+K z>lLG`)ZMT2nw4KSO6ksSv+C`YQnr4FDc|Rl)r+glJs~9(X@w@Z=8^=*(@kamS5kRy zkzPCfSGD%TG+i|@q}Cmt(Dx0WQ0qHJ^!<JN)rOkW`hjqp+E_Yls-NkWP1*hC!TKFi zlXS?`2CAfXw8PYA6iWR-ooR?omxiOorty449@<x{H-C|)wuA!u;hqWARGF!_c8;iR z1qph4(`nV5JgQsD4y%?(oC*2+B{ck_X-zpKt$hQgZLC8c4fmMG&eX}y_Ac}I$zo~W z&}^PK;1y4lCn|c9{rs1Hk;tnO<%vW(cX?vm`zrTMEH4Ur$}8->*WU6V<A~x0|HX(R z_An%5Ovs>+Q6a-Z#^tmFLq>)S4H+9UICP`q!SFbCe82#a5h6oG#)u3O86`4IryVCU zP^TR!GE`)&$Y7DtBEv<-iwqbUF*0Oi%*ddTQ6s}f#*GXd896d^ryV;ocx3d*@R9K& z0YD;vgaC;F5(Fd)NEnbfAc1h&NFbqb+E^gLK%#+!1BnL`5F{c<NRXHyK|!K|gawHU z5*Q>hNNAikHb`)g=pf-i;)4VTi4YPZBt}S(kSHNxLgIu3%4s8ogvx1Sg#^oKqlJVE zi5C(uBw|R&keDGsL!yR+4T&2PIH!#q5;~`i9TGgJjUEy{Bz{N$kq9CoL}G{p5s4xa zMkJ0%AdyHSp>*0<BEfXpXd>Zs+IS)XMIwrX6p1MkR3xfMSdq9Qfkh&Vgw|<eiv-tc xql<*sY2%9o7>O_vVkE{$kdY|k|0c}ww$^@I?X1#yzC2$}R%vdoFV~k7|2MIyR0IG3 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Apia b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Apia new file mode 100644 index 0000000000000000000000000000000000000000..4091a85f388b57df98ff9ab056958e41c7d00fd4 GIT binary patch literal 1134 zcmd7QPi%{E9LMort*fML2?w!VI3c=!j}~3BEju8StlQ9)4ck{|UFXj>R;zzAB!oC% zBs)&Tfs=9|4l)suIGK|m;u{Ay#BKim{N7I-Bn}cMPxJKkq<@n3`Mi^z{h1Z!k7Krb zg_EPsom`h6cTX3KznPcmV7z&9X(2HY(U#G-((?V>mDYTTwq<_k23al}*Qsu5oGZyC z54C;K2T6slY06H^=69!c%bVNM@i?oUQ<r7y%~P`NdO^3J?~|@G<JvvAUL>2-9jy^b zH>I?vx<qy^i)(M`N6E}<m(0u@?fbM^vd`aZ|FbgLb@!?6E_{_e6Ss8lh1W8Wzodgh zQ!*q4+1K$(_cxBqaP57~EzZe-vPm5=DH-{ELh~AzgD-mY(1UV0d~dBDxv@%*UQNsB z*@b%S<Qf^ve$g>&ZD3ZgC|DFK{)b1%n1Z`q)^@vlW^GrK30gZWfv~l`ZKl+iJJV*q zF^@E`pgEB=m7W)^anGEdiFi?W#@si5Kazi6s@{FxUux#p7u}z*1NBebFR+&7ko9<* z1(6kzC4JqR$fC%q$g;?~$im3V$kNE#zHV`3b!2&DeWU=S0;B|_2BZk23Zx9A4x|vI z5~LKQ7Ni(oR}E4QQV&uPQV~)TQWH`XQWa7bQWsJfQW;VjQX5hnQk}0W52+6+5UCI; x5vdU=5~&g?6RFeJZE60mM8kD1-G)jp>Wnp1Bx<WxR@GETD-y9P&t1gAKLN{CGT8tC literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Auckland b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Auckland new file mode 100644 index 0000000000000000000000000000000000000000..a5f5b6d5e60f15ebdbb747228006e8fe06dd4a01 GIT binary patch literal 2460 zcmd_rdrXye9LMqJpwhVKWemws!@O`gTvUW2LIninl6fQ~qi>S%pdpkbbb(AGa|~<R zvNfZU955{rNzhUdL#TBjTa#+pH=TL5LUT2xEbMtd>%Z3evp;&y&hz^1>^y&*KR)l{ z$}cI3HU4(pV12@yYnS!rJbt5fYi)0HA6U92udV95d2r!*Y0v93-wExLL-8$U$EX(R z45~9bf9%!6!=>htkDBz|!5s6wb8bEQ_7(TB6CY~Vwn2Ay<32sU?uff*^EN$^z03W6 zVY&7?YuqO@>!r_M-2D+{(towuJoVKp$zDN!sMo$d>V0XC_%!^kzJtfrujY*UcQ$H3 zLAT!ca;=U{+O6ZNigkR*GddwJLnn+BX<&S~242`KK~vKusBg6dJHjNmX_4IYov%#V zGD~jmz9=EZJ~H|B0hy9EBvUuG$t}@d8d~y+P7C-!Z*@9ky2k>Y;df4EoSmjKhk9jJ zkEe!x+$7<9LnY#^trA%`Mx6C)C2GwDi7w5On8hDRY;K9p4%;hp;+E)K?|O*~jMTWF zTXf!)0FD2=P7}VkthXI3)!V!LC2>cN-qG-rB(0CpJ8Mo!^2%(PU(hZK5~F2d(o2#u zX`G~nR7vW!UnFfLUlv`h)$}uol5x6N7at3eB^?=>*<eW4&T!4HJ}WtuetK8Yh%S{d zxjRjDS+cLVruXRb$rmNp^G(gYJ|Ope|E%VH+a@bcnwsDBqTIW`R#rB*>V3}_OTly7 z_5QL95wk)c$O)HK>A6}M<tGo$jMvrvm!xRyaw!`2(c;VV#5_HuC7(@{H65So+SY6G z(9Q!|`ocG|u5yQ#m3K+Gw93OPzSc*Qx6As3qgpYgLLQy?nr<+1Wy7yeYo*3Z)t80( zSa+~IemGU1Xo=G&cdwG_twCC|DMg+#e^U>K$EbgLd5#|QpS}#EanPEBvA^Fj+A!Ye zG`tL>xtsH$0f#?l!#Z=%%yJpo`OCQ3rxX{@84ibyb#wju1YfnjW>0YVuZON%*Zhvm zW@M7cERks<^F$_!%oLd_GFN1>w$^Ns=_2z*CXCD&nKCkGWYWm2k!d6IMkbET9GN;Y zcVzO&>}{>-BlG9yk^oyP14s&x93V+RvVf!k$pexIBojy~kX#_iu(h&*q{G(A2a*t5 zD<eosY^|IiNkOuLqy@<fk{Bd2NNSMWAjv_pgQN$^50W5TD?><%kQ^aNLb8OU3CR<Z zC?r!zs*qeE$wIP)qzlOxk}z8<V@S%7oFPd=vWBD$$s3Y5By&jWklZ23L$ZgY56K^r zKwB$=ND7f0B1uHDh@=t8Ba%oYlSnF&Tq4OtvWcV<$tRLfB%?@5ZLORlNky`Xq!r04 zl2|0ONNSPXBFROvi=-FHFOpy+!$^ve9NSt+MzV~g8Obw}Xe84}s*zkH$wsn`q#MaM zl5iyBNXn6%BT2WlvW}!3$vcvGB=bn>k=!H6N3xHk-}s-j-<yAi$e7sd{1jJ2R)TY` QGddz`jx)v?9W~qgPuMCAVE_OC literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Bougainville b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Bougainville new file mode 100644 index 0000000000000000000000000000000000000000..dc5a7d73ccc59f704ec3bb12fc9065621bb66276 GIT binary patch literal 296 zcmWHE%1kq2zyK^j5fBCeRv-qk1sj0G8E3PEOWHXfLgrm>sQ>@}KO++(GcyB2=Leu1 zL(c+`;<^b8oD2-JHZbr8`1*!0Xd4<ZXd74pX+w}2FbKhJE664g1_5>;2C+qdfOtRv zwCsP_wjd1<UG#Rz1P~2!4TuK02&4w&DiB={6k}jyVrF6iI(*^{kPy&?KsL~YK(_&1 a$P0F%pbyZ2I9<uWz-0q;wVke^Ar}Br20-Hg literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Chatham b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Chatham new file mode 100644 index 0000000000000000000000000000000000000000..957c80b79a30f38d7db99fc06b16c3255ccc50e7 GIT binary patch literal 2087 zcmdVaS!`5g7>Dt1Dv)%#0g|Q^DEroFr=96Si*+o}mKHixrHoKZd1aYt!2&|1b)l>U z2_X_N;8is&K~jtbOsa{{LZZeC_7GemzCr>Qq%6@W3U>US<E0l~5@#~s@61W(qRII` z)in)azxmgzzrDkU*Jk_SI(ef#E`Pu8i`bOEH1^nGb>BNIaqW*a{@XK}(0W}HKRc*N zbv-)p{WcvGIG}@DBKmaNRvj|GLWlH)HMzi}$+x#i%II=Q`Ffe8y3!@}$V?gbbG!`S zK3PWe+?BLQoQ(YNqKqo*mC+kd$e8R{9oukT$0gm-@m`lqh?%Yv6K=}H8{>3R@0T+9 z>;O$acSJlLV<qF`9g?}Tzj$}8kgVmmCA%>wzFFUhKh&T(=^c`rKU?$MyCgq3Q}cg0 zs#AVT(t;~Hweac#oqDoSpXpANqP<o6Z2Nr)to7-0t(T>^c8*M|`&6bEWy_4fJ}DVK zSW44cr1a6RQr1@^Gk<T>^6N!XaV?^=PN&K2t_rPeHxk_M(K+jHNL6!!J|FJW7bIO? zEK{9Z952-q&g#68cO^97kcOfc<)vHObpB5#WWiVUTGRcWy!=U<)PB^d3*U`M-8*}9 z(W(lO`X+s)$|H-*L%Jj@L6%M`&}E7DBs^%IgdfIf<iQlFzt*b_mxjvnt_!-N^O3x| z|F|~p{!vyo@6}bSyJfX>%4-XL&^5(-WNqOoZ5q`iuMhn|*O`#4`(u+fYk{<Uw?yCQ zNtHK0FV(k>=Ih%B7R&k_DcZWZL^jm_sT-ovsH<PhQ&03AV@$_m`!JegC8nP-XKGz; zV@_6@f&ZM9!D{32`F(lz%*o~S+tV1Ai{BVi{ol|07W-$;Gyl-}g)#Quj!x?;*@|p{ z+w8#6ZUNZ?vI%4t$TpCDAR9q;f@}rZ3$htxH;#5Y$bKB{hL9aO+ASe_a<rR5c7<#U z*%z`gWM|0Mki8+BLw1L357{5GL1c%Hc8ka!kxe4IM7D|S6WJ)TQ)H{iUXjfryG6E( z>=)TEvSUZPWn|CDrjcDE+eY?{Y#iA+vUOzd$mWsVBil#zk2C=3fTL{z(gUOkNEeVc zAbmg@fph|C1=0(o8Avyfb|C#g8iI5LX$jI3N81#nD@a?Az95Z3I)k(Z=?&5xq&rA^ zkp3VILOO)B2<Z{hBuCpNq)kYlkVYY$LRy9N3TYP7Eu>vYzmSF@9Yb1%^bBbl(ltlh zHl%Mz<B-lFtwVZ;G!N+>(mterj<x~j{~keik(0$hrq4e$H_v{tr3_>Q3%z;X>@03& O1arMUd!1vi-G2iks25xS literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Chuuk b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Chuuk new file mode 100644 index 0000000000000000000000000000000000000000..289b795a8a8bcb6632ca97efc363267d2d491c09 GIT binary patch literal 183 zcmWHE%1kq2zyM4@5fBCe7@MO3$f^JT|34!m14Ew=NUCQ61B;Ju2!pnv0f@^GLfABB mpfZq&96vxBfB<OV|2i|ZEg&|?bOzkUbJ+kbvePv*-~s^Q&l)TM literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Easter b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Easter new file mode 100644 index 0000000000000000000000000000000000000000..060bef81898b9b45492ce2e2e35fb218f5cc213a GIT binary patch literal 2242 zcmdtiUrg0y9LMqB0R#d`5mT@~(jQ3!;Q`{02KbF2A>~hrnTmzv7?25pNQ5PMk<=`e z*@!LEx@e?LAM66yU_NWDwCvBhAav#IqPf*tjI6{ge?9M~x@xVvF8ZDG{9eDEjh&61 z&%0_}LtUo#w=2Sa!r|I&56`o^>}~i1>HaO4Cs%uZ(f;X=jo0#}4zyL85GmE6t20bk zVzQoAP$A)7jE;yNk%;pXI`V3qTyr9*qt5*#(f!}6>7(Nk)BTyc_Q0^rs6Vb|J~w7! zOZKZ-b%Q2u;Wjn<-ad0(Y>%Fk)M@5SZq@PO8%%ugL7i}6nVC0`tFQmGT5i}kPv3Z~ zKyGUORnOm(D2Y2l)y+*Ia?6q{>elkF<+g-#YC*1N7F<4|l75&nx1W7oExh!eS@iCx zzGM7Nll;a(oif;M7Vmskr}j3QwAx3t>e?mgg~d9fra?0E$JL#(Lb9TZRCdge<b;pu zTyKWteiW&ee3NXJ9{yI{bt=Z>y>MFP_fMFD#-nOkchJbH5B1&k@0h~OxAi?GFPoyc zmvnLZaVfsote3~`m*o>7s$_DTl#Vp1`+^V2iXl(099Sx4??<Te%AKa-)uXzy@T|Fi z`(FJ(^3SrWx<yw-pOG~=7u1?#-<h>@PpY+hJ~8Vq9aig`w5cB7tu|Ejn1=_qs7KPa znws89wK0C5)OKywwNtIKsis_SI<s2pMCrPr9H~#7rR!fwkOu!p-PnFb9y|ZJ-n?<H zdHm!>wM8zQrv5S2ocM)#qI*!ac%!DZzE8D&b<nhxbgH&fBhsGUtJ~jtO*&#bbjR~u z^3>#d-Puqp+k^S~>Cz&3W?;VFu_#G)_`V+q3km(l*R=5e@)hp;eNTFxyNCJykwbw< z-#@U|oBr4R$RW>uGOPnLSC&<I#r7;#+4khvlVyMRfdKD6sQUNKylDT#ZDz_oaM^!{ z->|FDeuw=QyCGyp=(fZId*ZU20(OOL3)vU4F=S`R){wn9?dFi(A=^XthinkpA+kke zkH{vGT_W2=_K9p1*(tJBWUo%US!B0PyIo|z$cB*}BU?uHjBFa&HL`7F-^j+1og-UE z_U^QsM|O{FAK5?B0Hgy*3y>ZlO+dPUv;pY@(g>szNGp(DAk9F!fwTkZhtoC$=?Kyi zq$fyIkggzYLHdF;2I&ma8l*Q!bCB*hZF`XZIBkQF4k0Z<dW19y=@QZ=q)$krkWL}3 zLVATX3+WcpE~o7m(lDp(7}7GNXGqhKt|4th`i3+P=^WBJr|limJg4m*(mto{AJRal z?I6-Zq=!fokuD-_MEZy{66qw;N~D)aGo7}ZNIRXjpGZTUwxdW(owlb)Q<1JBZAJQu pG#2Ts)274wzs5Ey&xW`l+upMZQp$2uGg7nDGBQ%ivi^LE`W?cWOo9La literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Efate b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Efate new file mode 100644 index 0000000000000000000000000000000000000000..5cee55df32f377df2dbde0d9764df5db388434a1 GIT binary patch literal 492 zcmWHE%1kq2zyQoZ5fBCeaUce<c^ZJkq_2mzNM8DWLF&-01JbwBF32p}b3k_Xw*a}O zc?aaHZv-f0?{-j(-W8zaJ=a0mdQO0fcD;kDbX|ZNccO#Z?}PyL*Uk<aPiih`HU>Iq zz4W-CooMV(|NsAg1|~)<kcoj|?irx13@Z*WvM?~r+Q7i!;~T=DZD<6<h9C(b2qD1> zKOkBdfHwXIc?HA;c?U#;yab{_-U87euYqWg_dqnriy#{0O%M(8Du@Po7es@+45C5a z2GJm|gJ_WVK{O}`z%(!zKr|>QKr|>gKr|>wKr|>=Kr|?5Ks1HH!({^uDmz_6LoNU- C4QOKk literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Enderbury b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Enderbury new file mode 100644 index 0000000000000000000000000000000000000000..a3f30e5c7b9166d408a0851bfac437293c72b0cf GIT binary patch literal 259 zcmWHE%1kq2zyK^j5fBCe7+atL$Po%-IiSyKxuO35|No3k%*_A)$IoG4`2RoLfq~`! z|I`2m4hDv87Z`Yad_x#?4UK@<kU`tf7-S9@gb=om6{rwor@#-07NF_>|JRv^rhvF0 a`$06w4Imoi4hC`@!es+=mYuGlF&6;Tr!c?( literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Fakaofo b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Fakaofo new file mode 100644 index 0000000000000000000000000000000000000000..6e4b8afdbddeb1ff2ba52168af01a9c8a7896208 GIT binary patch literal 221 zcmWHE%1kq2zyQoZ5fBCe7@Ma7$npC-b3^_A|Nj}8nEwBduV7&K|35W=frWu#+XV&= zAKwrLT|+|#Z9`*_Mj!|wY&=je*b1H>U>zV6|NpNu5A6VPLDqq2kd+KX+R0@DwBJtG H(3lGVs@5vF literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Fiji b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Fiji new file mode 100644 index 0000000000000000000000000000000000000000..912db189431adbc130b6397e317dd40733601fae GIT binary patch literal 1104 zcmciAPe_w-9LMp;6%iI_br8bnP;^n3&o)%j1PQv#EwLeU_U9|2qEnM4ruJvip+iEK zh=tH)hyGj!Q7Qz%kRXa6Awh_WFM$Nz3Zal{ec!*1og(Nrp1rnb48}h1L~3Xt?EP^z z*&QxUn_ax#e%YSpc0DLIG*@ZE>V`D!k4V$vTZ!(d)o7tY+Lt~_EYqg({7X5u`>A$Z zDM-i09qmlN(9SnA%ZbECNjx6buE=xgnrqkOo;6L*1hxCeGfkx`wddV^J#nN?PCl8{ zQ~N69^zBiVz-Q^bdQEyiyq3O!5$Ri+m;R%1>Cfk6@Ibu`&OX$kEjwi>eN%_m)+HT1 zrz5W_b+o2i#|mHc%(iA7zrLd5>q*IsEof%tkW9pH>BOUInQWYw$s0dqswyW_XH}-Z zPRq>kB{{#^BNtlU>&3;xa;dtg+1y^qe$Q*RR4RFavVVF6JnwGN{zq?NxoorN-79)q z+hYl@#t+*S;sZO0zZm7)@2`0F+e(bXTLyC4NXSr58w(lCX`>;-A>$zfA|oP0I&Dm3 zP^XQG4C}OUk%65yGBUK&#zqE5Mn{H6#zz7`B0xewVnBj$S`<haPKyHx#A%Trp*SrT zBp9bfgM@>`g9L;`goK2|gaqZZsF1Ln78eqj(;`Dcb6RXja88R32@i=62@r`82@#1A m3DRj%I{#Og<t-LzYn>l5!>zS3-w*o1dNUXb`Um|;WBD&@X(Pn| literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Funafuti b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Funafuti new file mode 100644 index 0000000000000000000000000000000000000000..3289094a2740500bc374d20904a70d7b307c2264 GIT binary patch literal 183 zcmWHE%1kq2zyM4@5fBCe7@MO3$f^JT|34!m1H%dykkpC;3@kprAq?7v5Hmsuo5l>( k1~QT32gm>*02=te&Wz^|hz&BG0k`p7Hb9H)bPbKT0P#5*egFUf literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Galapagos b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Galapagos new file mode 100644 index 0000000000000000000000000000000000000000..76b2b3a126967bd410e57ce36cccebf7db167f67 GIT binary patch literal 268 zcmWHE%1kq2zyK^j5fBCeHXsJEc^ZJkwk19dauP=Zw8gm|XcvA7sQ>@}KLaB(6EpMw z|7(Eq|NrmYz`z0`8G+>L1q>WMz99^{2Btu429$?@5E5+t0o4Pv=s(DI5EtYE5Djt# Tm<GB8M3d(lE*qfR?98|T79%`1 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Gambier b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Gambier new file mode 100644 index 0000000000000000000000000000000000000000..625016d512b09d296dc340dd542a3d2d07c5c9ea GIT binary patch literal 186 zcmWHE%1kq2zyM4@5fBCeW*`Q!IU0b(lmHKw`v3p`GcYp#|KC`{zyKr*7+8FKLl|@o iEP)Cb7(z%e@&{N4$dv#8K}LhP1dQjh0a|2d$prwXF(Qxv literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Guadalcanal b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Guadalcanal new file mode 100644 index 0000000000000000000000000000000000000000..0c24095bf0f683d1e1c51fd30e294de40b3b3e5a GIT binary patch literal 188 zcmWHE%1kq2zyM4@5fBCeW*`Q!IU0b(6o2EM`v3p`GcYnTFiiad6k(XPfq})xH-tgk k&=ACB2qD49A7C8}3_uhAgNz1o2^i011GLCa*U*p)092eEfdBvi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Guam b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Guam new file mode 100644 index 0000000000000000000000000000000000000000..4286e6bac870c1ff15c73b8958c15210a3879c58 GIT binary patch literal 225 zcmWHE%1kq2zyQoZ5fBCeCLji}c^iO)m2+GIBT%G$3y{Omvw(pGOmg`6hA_AXhcGy2 z00{^P!EQ8A14t7Xz}UP$paTDaK;+@kGa$OoOxFh_46>4ek%{^Le=`#hp9rfN7`Q+V J(KR&S0sur8B`N>_ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Honolulu b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Honolulu new file mode 100644 index 0000000000000000000000000000000000000000..bd855772054f8d41e0158e71c2bf2c04e50e47cc GIT binary patch literal 276 zcmWHE%1kq2zyK^j5fBCeHXsJEc^ZJkZdPZH-HL?~r#o#=TuSt`xY}Fn!N>%J%>V!A zFflLy$p{9P|NpBp7&-p`FHT@!@$n5|@CXKCmk^+S2nZo;D?3mn*w!CVJ^z8A<Uzqc h5Djt#m<GB8M1x!dqCqYK(I8hbknSulpgRo>xBx^rPJ#dc literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Johnston b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Johnston new file mode 100644 index 0000000000000000000000000000000000000000..bd855772054f8d41e0158e71c2bf2c04e50e47cc GIT binary patch literal 276 zcmWHE%1kq2zyK^j5fBCeHXsJEc^ZJkZdPZH-HL?~r#o#=TuSt`xY}Fn!N>%J%>V!A zFflLy$p{9P|NpBp7&-p`FHT@!@$n5|@CXKCmk^+S2nZo;D?3mn*w!CVJ^z8A<Uzqc h5Djt#m<GB8M1x!dqCqYK(I8hbknSulpgRo>xBx^rPJ#dc literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Kiritimati b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Kiritimati new file mode 100644 index 0000000000000000000000000000000000000000..762275d3c1d28a472dda088be5b602336e38e84d GIT binary patch literal 263 zcmWHE%1kq2zyK^j5fBCe7+a_T$Po(t#Gucry`cX8|No3k%*_A)=KzKO|IY%d`~SZ< zfq{#G;m899J|Eu@23<n~69W**plxUZ)C&P2gl%L6ss!08^aH8~XxaZdbB_iP7vusE Y0J#E0gIvNuo>RDNfDW_MH8kM@0KKy@zW@LL literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Kosrae b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Kosrae new file mode 100644 index 0000000000000000000000000000000000000000..f8222e66b554dc2e5e05be129662da3f8902280d GIT binary patch literal 251 zcmWHE%1kq2zyK^j5fBCe7@Ma7$obzU9bnd-?oj{#|9?g%Mn(pP8D~I>W^DkeTXBGa z1H$(44PnqWGz4OV=^=#eVg(unvXAEn$WR~vTJyioOvVI6gX{*0f$Rs-^*|v8QXRo% M19X<1uAw0p0J1?WSO5S3 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Kwajalein b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Kwajalein new file mode 100644 index 0000000000000000000000000000000000000000..1a27122ee09410095d433adabe6bf2eb772710d0 GIT binary patch literal 259 zcmWHE%1kq2zyK^j5fBCe7+atL$obzU9iUUP=Rp1c|Nj}8n3)+E<~#rjGtAn+!1Dip zxB~+R1H*~~3_L!*Aq?7vh77ufMnD>2LkMB}Sb@fY>=gI`G8hPeR{gItlTZNBAp1dL XAUA;MdY}*kIS%2n0XoYL=sYd}4N5Vv literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Majuro b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Majuro new file mode 100644 index 0000000000000000000000000000000000000000..b3a8c184432efbe040cd44a386a99c2cb58ddf52 GIT binary patch literal 221 zcmWHE%1kq2zyQoZ5fBCe7@Ma7$obzU9Z>)O|9?g%CI*HDAQ6UH8yHv^7*-r$;PCMc zVbC@-1Y($xA%u+w>IGZD^8;iU5CBd8UuP!L0HQ(GfyC;8Oa>zD<gx+UZ>MW$#03C& C4<@1j literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Marquesas b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Marquesas new file mode 100644 index 0000000000000000000000000000000000000000..10c5c9bc1dd5807c7bd13e556a4644c5f07ef261 GIT binary patch literal 195 zcmWHE%1kq2zyM4@5fBCeW*`Q!xf+1PlmH)(`v3p`GcYp#|6gmt!0`WnRR#l#k8cQr ou7Rbo0Z<78LkJ0m{s8L$ne+cY$Z!yspaonuV9V?*t&9!00P8;_JOBUy literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Midway b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Midway new file mode 100644 index 0000000000000000000000000000000000000000..3e38e97c97ddf5e054fba822cfce1c5ccd422e3f GIT binary patch literal 196 zcmWHE%1kq2zyQoZ5fBCeCLji}IU0b(MAqLNj6ji%6$}jj|HuCTk*NU;EIz&=48g%6 mKouYmLfBN84IDrtz~Da+<k_C!0MQ^@7zkU#1+>x7kP855a3xUy literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Nauru b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Nauru new file mode 100644 index 0000000000000000000000000000000000000000..6092119f66ca00e75e8ead98c286470d7c192fd4 GIT binary patch literal 282 zcmWHE%1kq2zyK^j5fBCeHXsJEg&Kgw;^*2dr?>3)xZo5}AlS`vqW=H?{|t;wjLggo z40CFL3K$md0I9B<z`(`8u;Ks%pO0?{gSMfeu>pg&fhCZIuq}iHyMKU;1_GdU|3Pj5 g(I9t#Xpmb#av=AB=z5?Sxo+aJ0lLsm*U*Ry0Nn66!2kdN literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Niue b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Niue new file mode 100644 index 0000000000000000000000000000000000000000..df6110dd108e529d2425752744d6d2a4512a84f7 GIT binary patch literal 266 zcmWHE%1kq2zyK^j5fBCe7+a(P$hqTenjlbe%A)@N|No3k%*_A)Cv0J00FnzBSpNTy z^I+im|35W=LBPj1ghAKP(8vHp8iPrYSzr)C*iKfULXf>8KOkCwHvIozXCC4MqCsu| Y34`1LqU(WT4CFe7%LeE`J3~V*06J(pG5`Po literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Norfolk b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Norfolk new file mode 100644 index 0000000000000000000000000000000000000000..d0b9607ed73a9a4bd6ca6b49a2b685609d561f96 GIT binary patch literal 323 zcmWHE%1kq2zyPd35fBCeHXsJEr5b?59mgLHocyOUIJe%62;+a2QUCw{e?}%|CKeV3 zhPf3$1q^c=7=Y}BI~cea7}lR)6kuSOwShs*$2WvQ+tAR^2t*ni07)Y-X$aB+1tEmp z!VXjlcFPZ_9-#IA>&(PEKs3m0AYqXEz%<Z}AR6RO5M2*c!$2*^a@hbqVyA0p$OQn3 Co;%$D literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Noumea b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Noumea new file mode 100644 index 0000000000000000000000000000000000000000..d9c68f88afc0d9129366ce5b9a5ac02916cf4061 GIT binary patch literal 328 zcmWHE%1kq2zyPd35fBCeP9O%cc^ZJkq_0Ox_@3<w;1An<K;YP%073Kj4n_;SKNw56 z6x9F!|DS=05eS)CSQr@Q@Bq~_tT@03WY5~bz=6#6@eN_nHZ%faLy#gyMi9vmLV{y{ zfORl10PXw_aukRQau|pPISxdF90;O8js(#lhk|I3V?i{<4(74}dcaQC(2xrNm{L3k literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Pago_Pago b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Pago_Pago new file mode 100644 index 0000000000000000000000000000000000000000..3e38e97c97ddf5e054fba822cfce1c5ccd422e3f GIT binary patch literal 196 zcmWHE%1kq2zyQoZ5fBCeCLji}IU0b(MAqLNj6ji%6$}jj|HuCTk*NU;EIz&=48g%6 mKouYmLfBN84IDrtz~Da+<k_C!0MQ^@7zkU#1+>x7kP855a3xUy literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Palau b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Palau new file mode 100644 index 0000000000000000000000000000000000000000..e1bbea561510bf6e8a7465a49647724bc243eb03 GIT binary patch literal 182 zcmWHE%1kq2zyM4@5fBCe7@MO3$f^JT|34!m14Eq%NUClE1B;Ju2!pnPC5X!qLfABB lpfZq&96vxBfB<OV|2i|XHy}31bOzkUbJ+kbveUKX0sx!58JGY7 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Pitcairn b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Pitcairn new file mode 100644 index 0000000000000000000000000000000000000000..54783cf62eac51e9a575eee75688c4de4a15578b GIT binary patch literal 223 zcmWHE%1kq2zyQoZ5fBCe7@MyF$T4+s;;8@s|34!W)Bpc%JPZu~|94a{u>Aku-@w4- z;~T=DYhYn)03tzJfgpsi1uQ^iAWQgufHVLB(ER^(=6YX1Y><T@8e}O0(YA8g09{~b G!36-BEh`8B literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Pohnpei b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Pohnpei new file mode 100644 index 0000000000000000000000000000000000000000..9743bc3c9b8e8d2dc3a66bde761c3ef1917be50a GIT binary patch literal 183 zcmWHE%1kq2zyM4@5fBCe7@MO3$f^JT|34!m1H+UMkkqUV3@kprAq?7vh9E9O2w~Hh mfyzK8a{K^k00N+a|Le>Y)_~X`(;09Z&t(I&$WGVLkP86$h#GnT literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Ponape b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Ponape new file mode 100644 index 0000000000000000000000000000000000000000..9743bc3c9b8e8d2dc3a66bde761c3ef1917be50a GIT binary patch literal 183 zcmWHE%1kq2zyM4@5fBCe7@MO3$f^JT|34!m1H+UMkkqUV3@kprAq?7vh9E9O2w~Hh mfyzK8a{K^k00N+a|Le>Y)_~X`(;09Z&t(I&$WGVLkP86$h#GnT literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Port_Moresby b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Port_Moresby new file mode 100644 index 0000000000000000000000000000000000000000..3fa1f7fa80a3026d205cd0cb8870a75063ebc80d GIT binary patch literal 206 zcmWHE%1kq2zyM4@5fBCe7@M;J$f^JT|34!m14HKrkW|kC2G#&y-w+0ELjw?>Aq2Z| z%s?p+1_2fz2C;d6fOtRvH1U7fwkQq|UG#Rz1dv!gkj=oz1hl#ntPE%!P#@4bP9LCc SC{{8saM=K@x6?H=-~s?suq6Nh literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Rarotonga b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Rarotonga new file mode 100644 index 0000000000000000000000000000000000000000..ace1ce4b718e928c238f7469561c7ec41c3cd41f GIT binary patch literal 602 zcmb`@F-yZh7{>8yEC?B-YA8pW*s~fcLLt#Xi=u;wP;|>+ofJW8adVJ%P`mjB#2c?$ zhpv8sPNiM^2;x*XM<-w3=g>*S#S3@;OGqI1OODRZR>Y5y<|mjKK2PkuTV67~{L=LH zqnv5B)okHGdbLyKQB!6syDB@r*RptG<lsu@oE?+v)^z?QH2&R&4*HX3t{&(@=hdh^ z((~<+DaO8BXpGfj=0k?np$gxgWofymN>6=xLqw?w=eNrtYOD)Eba5!sM3=|HC2G}E zQ~L*rD96!igjJNI3h%y0^7mN-e%GG${p%zC4Z2>d4seUJqc}eb0~8J@ENs~Wg^4Y@ zps=xJ9~4Hm?1aJ!g%=7l6mBT&Q23!RMB#|S5``xUQ(JaLVQb62D2&B_BfFanzZFOB ECwcmOIsgCw literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Saipan b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Saipan new file mode 100644 index 0000000000000000000000000000000000000000..4286e6bac870c1ff15c73b8958c15210a3879c58 GIT binary patch literal 225 zcmWHE%1kq2zyQoZ5fBCeCLji}c^iO)m2+GIBT%G$3y{Omvw(pGOmg`6hA_AXhcGy2 z00{^P!EQ8A14t7Xz}UP$paTDaK;+@kGa$OoOxFh_46>4ek%{^Le=`#hp9rfN7`Q+V J(KR&S0sur8B`N>_ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Samoa b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Samoa new file mode 100644 index 0000000000000000000000000000000000000000..3e38e97c97ddf5e054fba822cfce1c5ccd422e3f GIT binary patch literal 196 zcmWHE%1kq2zyQoZ5fBCeCLji}IU0b(MAqLNj6ji%6$}jj|HuCTk*NU;EIz&=48g%6 mKouYmLfBN84IDrtz~Da+<k_C!0MQ^@7zkU#1+>x7kP855a3xUy literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Tahiti b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Tahiti new file mode 100644 index 0000000000000000000000000000000000000000..7867d8bd6c27774ad5a3b6f91fdfcec18845b7e1 GIT binary patch literal 187 zcmWHE%1kq2zyM4@5fBCeW*`Q!IU0b(lz`A3_5c6>XJBOf|G)SI0|SsuU|{j_4PnqV jGyp1KU<e_>$RA)GAXEPT2N@0G5-^_2256C;p#c{FV<04A literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Tarawa b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Tarawa new file mode 100644 index 0000000000000000000000000000000000000000..334041388cbfac92c738453b5abf364a96cf1af5 GIT binary patch literal 183 zcmWHE%1kq2zyM4@5fBCe7@MO3$f^JT|34!m1H&Q{kkpC;3@kprAq?7v5Hmsuo5l>( k1~QT32gm>*02=te&P?bGhz&BG0k`p7Hb9H)bPbKT00Vd$r~m)} literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Tongatapu b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Tongatapu new file mode 100644 index 0000000000000000000000000000000000000000..b3a5a89b66d2b74971646be2fea8066d4dd91fab GIT binary patch literal 393 zcmWHE%1kq2zyNGO5fBCeZXgD+g&Kgw$zrDo=D*!9SX{2XV8y}_U{$ok!TRy50Gps$ z4iU1~FGN%+T&Vy5|34!WGYcyd2r@A+tn~ouWLR6kz{0?=?E(WA1H+LAjC{y!AKwrL zZ9^j?10ZP(#3mpOjEo=>2to)uk_V^^?8qNr9SjUWH~g<NW8VRyL5>CqgB%W`L5>H} sAP<0PpeH~y$Ri*c<QWhR@(`E?dJ06>1I=Qfs?WG=fc~}9H8kb|0NRRA%K!iX literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Truk b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Truk new file mode 100644 index 0000000000000000000000000000000000000000..289b795a8a8bcb6632ca97efc363267d2d491c09 GIT binary patch literal 183 zcmWHE%1kq2zyM4@5fBCe7@MO3$f^JT|34!m14Ew=NUCQ61B;Ju2!pnv0f@^GLfABB mpfZq&96vxBfB<OV|2i|ZEg&|?bOzkUbJ+kbvePv*-~s^Q&l)TM literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Wake b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Wake new file mode 100644 index 0000000000000000000000000000000000000000..2dc630c606e32919db3ab5302f72d5523955c54b GIT binary patch literal 183 zcmWHE%1kq2zyM4@5fBCe7@MO3$f^JT|34!m1H&8>kkpC;3@kprAq?7v5Hmsuo5l>( k1~QT32gm>*02=te&P?JAhz&BG0k`p7Hb9H)bPbKT007t;q5uE@ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Wallis b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Wallis new file mode 100644 index 0000000000000000000000000000000000000000..b4f0f9bfb6a6cf6b98b85993dbf1632736e9e19f GIT binary patch literal 183 zcmWHE%1kq2zyM4@5fBCe7@MO3$f^JT|34!m1H+mKkkpC;3@kprAq?7v5Hmsuo5l>( k1~QT32gm>*02=te&WvLPhz&BG0k`p7Hb9H)bPbKT02g!`)&Kwi literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Yap b/env/lib/python3.6/site-packages/pytz/zoneinfo/Pacific/Yap new file mode 100644 index 0000000000000000000000000000000000000000..289b795a8a8bcb6632ca97efc363267d2d491c09 GIT binary patch literal 183 zcmWHE%1kq2zyM4@5fBCe7@MO3$f^JT|34!m14Ew=NUCQ61B;Ju2!pnv0f@^GLfABB mpfZq&96vxBfB<OV|2i|ZEg&|?bOzkUbJ+kbvePv*-~s^Q&l)TM literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Poland b/env/lib/python3.6/site-packages/pytz/zoneinfo/Poland new file mode 100644 index 0000000000000000000000000000000000000000..5cbba412eef406b4baf89100f2c4a8e1f5d12798 GIT binary patch literal 2705 zcmeIzdrXye9LMn=AgP4(#T$xTG$NG{K@=4sGXiIVc{$$jhK8s`hL;FU;#o4+jJa<N zafp-_F?C9Ap)JulyUmsnUeK^vD_4Re!_c%fX217itu<@?)qnlY&hvWC**Skc?~f;Q zMa~Fw*SXhz!<)0)zPS%}?4@FNRAZ%oP*!Erh4vlIwOUnuP+xmb_4VRyx^w#>vuo*5 zQ&YGh>qP!na`N(I`D{;+HoTD{pO=o5MrTh}WAR~gYHMlU>CN?7XO`T~J3Bc-n-=bu zb3^Ood~~pU(Y;)ny^m{iYk{<QPMel%bx!LyZ#h>^l{;;{%T3$icgzoatIgHzTg{JW zGfn&Y=gqacB=b||T+>k&X09iWGdBtoou7k4O^>L~ydKpvoq$*T&3(nA9a%fX3C!;A z^z`^Sy)qI^@Ax*SPsBy1Z?6U?$mf_7)N#P+cWIZ?|4gY9{J}<Nz>&92$iAH>bjPbE ztmGvVzG00Sn44_|&7WxoT{zc%f4WB^l2YY?=md!j87vR__mjcCDLVMZNFDNBqz-NB zt5F|y)rabCX>@g~4lBE$F~y(i@U=%YHhaIuEohbztLr2_xn4$2FPBlFRgy5GKoUBO zWXymv9dmK9jQurV6OX3IxG(2uQq3Ywu20m5H%-v-l_B~_PJ~V<9xIQg`D#k`0C_C( zvZkhVN}BH{Ix*sFnRxYkndEa^CN-Rp$1lAplWX@%`pILOv86<&?AxVJtXwHmOE&7X z*_(BG?oypGI#)gE6Ln_L6rB}4RA=3c)j56AWX}1XI`_s9$vkvj=Qa7s{GEZapsr08 z7Iny?vIfao)Fg`wujrET@5+-4PHJ{|g)B|3)tqhzby=wDvX*VSymPg#II&Kjy6DlT zchA$@k5|bvFDL8Dni=x!@^H=DG)h)YOOn+&L9!+;OxC9UBF_aH$sgWF^V`3bf`FS^ z(D<ROyV|Vl57fvD4fVRA;(aZwt<o3QRcq0fBK7j|_UYo=rQ1DSeY}0`jrX0;+}`!# zhkx_(>F)R6zl`a1z&=V`yBHHP(5ss<0b+Xm^*$=rE0AwY-#@Q^{4zbmV`kfTO6m+Q z9+S#Vs=eWO*<&y6{hiOAe}BsN*k8I&`K{mociWy>hZ?6MPd)PFyV|E8DF9Ldqy$I} zkRl*eK+1sB0VxDh38WNAEs$ck+G-%>K<a@M1gQv85~L<bQIM)2WpTB2K?>t)D}$5< zsSQ#bS6dyVJV<?z0wEPbN`%x1DH2j8q)bSiI26j&RthN<hgu=ULaK$73#k`}f*}<{ zN`}<T)fNq@8d5f-Zb;!cRE|UGIMj|q@i<hEL-|~7{g4786+}vi)DS5mQbku=Mx>5N zA(2WVr9^6p6cec?Qck3vNI{W`A|*v?iWC*8s;ez4Qdd`7SfsK@X_49@#YL)%lozQl zQedRQNQsdeBSl84jFcIvv#TvMQfXIPYNXakv5{&c<woj_6db8IQgWo`NYRn1BV|YG z?rIB<RNmE=9;rQ2e5CqF`H}h~3xKQuvINK)Ad7&k0<sLqIv@*ytc0sw3S=#i#XwdA zSq@}9kOe_j1X&VfO^`)FRs~rWWL=PjK~~1qE)B9au6A*d)j^gASs!G9kQG9f2w5ZZ g{~Z0(9QoPF8WzWRMQ6mvjEaek&WMY};qiXI0efH@v;Y7A literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Portugal b/env/lib/python3.6/site-packages/pytz/zoneinfo/Portugal new file mode 100644 index 0000000000000000000000000000000000000000..a85653044e0094363ea0f30d017a20ccdce2227d GIT binary patch literal 3469 zcmeI!cXW++9LMpS5+i~iDnX2gREegttSD8|L{qVnix_E)h!yd;#tEglB~``A`BAi1 zJz}0TV#O-4M`FaP5;an4#d<VUR8@cP#~;<xKdR@n|MkhqeO*`LKcDy4-fKvFpzSY@ zyZVH0o{Q?6_g+r*GV5BZb3upU`hwpMIu|ayqc6&xtbdj4&=;p~GL2ZfzGQl#p4K*B zUz#{VU*_#@F7KRXrn}zL(;uXot8-VHnJc}`HAZKB?WeBhy3dB0>-Bp22FFxAt4m3J zb6|wNrDBf0<=Iw!>%DY+d(LJvd&??w$KqIh=a<vWUGq(IcT$46XK;+UH@cI#uZ^p| zzlFbfpygciV04ClF#nRC>*A;9-q-X)m;0NCH%6L=Z!ax4;_Ogx<b0ZWbWRiV=-v)} z^BidfdHZ+j$HsV=$1(;L9FI&XIKF&|e!^aCo|q9{a56By;N&Nh^;13fncs$QG*1Vg z(7yB8qn)X~K|5P^nRfQ^94-Im6z%-hp4x@=DaOULF~+4?@kYU<9>x!j_l(Q^gN-YZ zjf@}Lc^Ov&-HmH?pBmS_e6$<oYG^l}l+$h&{;Cz8yQAGYbWywg%`xpx`Y!G6{B_#B zX=$1{{&VgA&=l>b0c(t+D8qOVGShe%G|BkcdzA60N}N$#qMK1%6lS=8-N;scftRhq zbaz|Dl;%z`x}mdDTy<w<do@`l<cagupkHKF?_1KN%6aKo;;8g2+99i5$u_E=GGvXt zGo{z|Y*8~KQ`B0xSk#^&MV$#B3oXGR>h>8R>P6UvcdJnGnr|~vzh(pRx_dRzz%@iR zc-TZXyjoxSoc557_Lr7#WED%_WjE!Uv(L%KDTk%s=<U)!ZnX@E$rpi+ED_Xtn`ja_ zS2V4)NHhyf63w0_ixyt9Ws3{_MDUZbvSoINc<cLU*=j|sY`v|ee0xe;*=9ja`A)pI zY?~4++J(8wkT@^#ZvE>rG~}rWbIq0Qy)TLOcdv=?61zosUXFPG%2LrGGhIX+*(p1w z&Jdk4mdMVDiK5Gl$+ByYX)-clpp0ssAng(DWjD`Gvb(R3?Ec7KMpq3J(Pt{lo(~&} zUYj1s-lyF}AET0p$+|5*NG=kwv-3pX*wdol#6sD>&1Nwm=7@}|Gfxa`ohjqXY?Om) zNjd1o7jp1Zha8eKP7b|bm%~=|mI((&is4gR%fuB?V#MG&QlHXPjO^M<IO09UsDRpH zbm(I-rjkvJ^{Xn!nwLdVg-0^!_&zc2ZoVA9c7^ybZ=0MjZ;zaqxk!FAZkbF@O_sJ& zF0Yg*`I29*>gQhkeyQIp{dfO<nX=ZGTd9Bl>)EqM1JtnCDhQX7Xe;~Y_cb+_@_&48 ztIXf>sy_F4@h2lX+HE~Tqj=$4{V7Df3E>m<xTxp(i}i0ke{cT><5z6`p3U}5J&YhS zgqCUykwHX85gA5g9Fc)UMiLoHWGs=vL`D-CPD?eO$bcdviVP_-rpTZoqlyeGGOozL zA|q?5h87uHWN?wuMTQp{Uu1xh5k`g>8DnISkx@p585w6wHPFaNTdJW(#u^!HOEuca za9gVJMh4tcjW{yomTJtALAO++jtsk{8h2#iE!D^)LvN|Z9vOUO^pW95#vcg)5&=sU z0we}V5RfP!VL;-51OkZ!5(*?1NHCCSAmKpbfds@-MFa^65)&jSNK}xpAaOwggG2@i z4H6q9I7oDm@F4L)0)#}!QiX{B7Q~2$An_0-9>Rpg2?-PuDI`=#tdL+K(L%yysp5qM z42c*LG9+e5(2%GhVMF4E1P+ND5;`PyNbr#8A>l*fXQ={+L=Xuf5<?`2NEDGUB5_0l zi9`|!B@#;{m`F5{a3b-vQ~^aIYN<ks#1siC5>+IuNL-P?B9TQxi^LWQE)rcNyhwbJ z03#8$R3S!UY^j2bL>UP)5@#gPNTiWaBe6z;jYJy>Hxh3o;7G(RRmhQ;TdJTVQAfg# z#2pDd5_u%_NbHf|Bhg2~kHjB20FWa9IRubnz)~Fq$WeeC2FP)M90<sffE)_Qv49*5 z$kBit4#@F<91zG6fgBRZF=43=3goD;REGs}Tp$Mqa%3Qf26Aj52M7Ltbo_@$hnu=j Y{5e#7I|emw+SuPOz_(+7KkwXr2Z!Vh=Kufz literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/ROC b/env/lib/python3.6/site-packages/pytz/zoneinfo/ROC new file mode 100644 index 0000000000000000000000000000000000000000..748873bed9a167d0226bad2246556c45432880d6 GIT binary patch literal 790 zcmcK1Jt)L+9LMqRd3J8VWKkFtB{CUAc^K$S{uE`Cvi&@iMEWfTEG7@5Vo^r2=mwM$ z1CBfAJagyy7XyRh_dYfQi{JJCzpj7(8}9RVT01*R<;O|1Pq;V*_TqcGwzt)$I=$nx z=-sgay;rqh{Iz+q@Aa93a-Rf}I#e+GSqCH2=I|q5LK|D^=(I(L=fxa%_sGd&yE+{l zma~aTbKbVBFZwpsWyQT*mE@@F+%Ab^&Z%hJtd70}%*{nWZv7s0x1Of&mnzM}>$^No zt(m7_u{Qm0D*3%xQXWQCYIsDuw^vL?Xhvr`VdaW*#sBFe(iOG;N0gQXk)d%(;qLN$ zJ@Ga;Qr+lCy}hby9A5lB&hLX2+E-(Py?y`Y!=B!4lZ6b3jED@0jEM}2jEW44jEf8$ z(~gV`jf{;9j*N~BkBpB5Kq4R^kQhi1BnlD+iGu`+X(J(_kXT4CBpMP9iH8J4A~Fvl XImASQB2kgBNL=~%x^5f2sLcHdVF6z% literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/ROK b/env/lib/python3.6/site-packages/pytz/zoneinfo/ROK new file mode 100644 index 0000000000000000000000000000000000000000..312ec40a112d5ab54ea2d92d735735be4b27a44d GIT binary patch literal 531 zcmWHE%1kq2zyNGO5fBCeF(3x91sZ@v_w^4Ilb(tFxa6$<;gP@QhbP(Fe>@eI$at2- zT=87c=fw;Cf{GW<zhu1P52|=|{94BA_a+r@mhZ@TdtR*KUHhzz52l3`3X2maDCVV4 zP-=FXpzP;4fq{{UnUMtvGP43914B6n&_agt4h9wmhPnw1Kpv0;vRh{`@-Q&;EMVmE z@eN_{4h~`P0%C8M5Rh^>2qD2|KcKq)0|Cf~AR6RL5DoGvhz9u<M1y<`qCvg}(IB6L zXprwgG$;hXG%yrEG$<s%G%z$kG$=$sG$>R+G$>?1G$?dHG$@2X&H;rIhz5lehz5lg Ph^AeLaRGx;*OChWrb4=L literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Singapore b/env/lib/python3.6/site-packages/pytz/zoneinfo/Singapore new file mode 100644 index 0000000000000000000000000000000000000000..78583666698a0332b822c0cd15daa58abf60eb27 GIT binary patch literal 424 zcmWHE%1kq2zyKUT5fBCeP9O%c6&ip<TXXB;UFm)k4sbn5IJNm{!s&_e9G5(DKZs5I z%2EIS|9?g%W)@a9R(5s<hQwH)dJxINz>rh`G9q1pkq1OF0Ljb>1|bH9x(N&t3=9Pg z3^G2xAq>GltZiTp!bS!l(ilWq0<i_iC=?Jv*x_724Pb}=fa(Og?|+?H@L~`R@(73q qc?Lv-JOrXao&wPzkAY~A=Rn#)9t6?#Krsfo`I5^9=yy9^3oZbMHDSa6 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Turkey b/env/lib/python3.6/site-packages/pytz/zoneinfo/Turkey new file mode 100644 index 0000000000000000000000000000000000000000..9a53b3a39063e86859b12aa755b78cfb38a26709 GIT binary patch literal 2166 zcmd_qe@v8h0LSs~aUAC%)b)e#2L33-Ls3H@It;<Wlc5Ja>4}@+_AJqKI`%DVt8C*I z%*}N3aoJ>}IZ`4=&bD-ITIsP`!!UYMED2aoYqmKz{nnyi%&zbIskWMb)cUvYv*%vN zaq!3OKJWOtEt^WkO=qfpg`3l^Z=Qz|`oY>g(b<)iYj#~cZ6DgW!8yF+1*4}iVZK&# z!tN~$JFkb^?7pHRv+t`L_8UL%a*n>g$QWq5W)7tK?XxQ;%#Vf=#^B&F^W5cr<I^Xu zI_I|rj0=Y<&EeWK<6<n^`E2eE`}5-K&ZWS5dnE0EIdY}O{^I%(bM$<(F*a<Q<Hz^f z-+q76{I2DcJ+V2%{9$Iq7Tu>D&(`C1TH+PQyYgi_ealJHSN@if(fX3~R~||DC!ZF9 zP;Yl&bg`4E{B~Aw$jlm<ZDjXUnz!`sGj8q3GlM&~8@E5+nY^Q+Lfl#RTr#I%JaJdJ zIhpG{lbGhOPKI{vH$r3klldQBu%~zB7&G>4S2Le)QM0zKQFk{lSNFv3R|Qo?YWDn) zD$LJRbEZzJurDmb6VqhT<$#<!{EM9T!Bts&Voc8O`B*O4-L4jPyeaQ}s#}({?30Vu zzpF~0JfO;!98~42o>livPpFF0M^(j*$5bTm85tR0rYa}b$;I!@Q4fq($*SI3x#XQ0 zvU+EstmzVR=_bE?up?hR6ulyAH~gxW6%NX1t)rryrOtBSS*xz#%V6ELZ-VvS{$Tyk zKyby^hl4AR^#x<+&R7k5$F0VruUZecwg!zo?bah}U$j<j+i0zhoDRlgb=I2PMysj% zpw;waiM95Vg+^N{mGXGg{>zo_`}0ckX88Z@<((3^>GJu6$Scy{NRhW$qzf^x#4|;R z{8=Jfh{#gUZM-6K{{H%3-@|WX4RKMg?`U*24{=dkQm*eZ{;NJ7eSUxD{vhK2yp<09 zPIoIm{qcwTqe?ZsJ#Y!x4zeFty&+^r$d-^jA)7*Wg=`Dim#f|wvNL3B$lhG_=8)YX z+e7w;Y!KNYvPEQ%$R?3pBHKjviEI?vsjJ>9vR7oY$ZnDCBKt))jO-ZQE%UkQJp-FY zc8zQs**CIrWar4%UG?5MY~EGx9@##!e^=cAqytC`kRBjSK)QgmfkPjVMj)L)T7mQe zX@;xr2GS0sA4o%xjvy^TdV(|s=?c;oq%TNgkj}X3)*!t>nuBx)X%EsLq(MlBkQO05 zLYjnh32773C!|qGr(AWbkY2g!W+B}|+J*EBX&BNmq-99YkftGBL)wP)4QU+GIal2} zq<2X3Ty^)5_POf*Aq_-2h_n#tA<{&oi%1)hJ|c}oI*GIr=_S%kSKUpdo!F+o>*D_y O(99|w*XjjjnZE%*@2d&` literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/UCT b/env/lib/python3.6/site-packages/pytz/zoneinfo/UCT new file mode 100644 index 0000000000000000000000000000000000000000..40147b9e8349c50b9b5459d34a8bf683c91b182f GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<U33UzuGC~OJgPFnd17-@c0bD?H47dQf)dy|> literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Alaska b/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Alaska new file mode 100644 index 0000000000000000000000000000000000000000..6c8bdf226900d7f4c6a4d47338e6742d4d6747cd GIT binary patch literal 2380 zcmciCeN2^A0LSs?B5>gbMG2`$^d$2B;sFJrC@%;CA^~!{36V--cUTDoWB5j5I&v#< zv^kuOj&p3aNjuZ6bS=7Rtxe1kZoP~}>DEk^uJtyH<aWNNw*IL3U!G^r^Euq{@9*nr zXn8uv`qv59zwmMn>X&=UZv7eCpXZ-Q__=MOsaX6zQ6^_kZE^?RSS75%Qn%x!6cN;& z?4HwJqJo=N`a^1yRA^D6-<chzoUd%~heu7T@R3Qk>xc8|j&H{N5no;vk^T|?ou|jd zU2mRqM;#s!cke&$zGqv%nA>u|9bMlpVk+9)_ZHNNd8upN^B1+M1>wuwu|c&eHa$8j z_Qzti@N%Mzn^>&wJL8n`CqmWzN3Tk|t3W)^J0=%-7m9=}-`R_EZ;Hg#=j^1IuSIg& zaXWc-My0wA*r`)rskG@fJAL$hm2vT9nK^V+Wu4qAm-M`-#H%$j+b2a%M~+-twnr?h zi<Y^`%_1-7SGnA|O614Qwezo}h=Q44><2$~iH9aXw+mI0didSb_9F+w)QTfVY<KIl zD%$&wEUrGURyH4&B}?8?r6oIL=}(tLS>~W!cJZvJh}>>hoE#RbuQl40uYI7_eAX?i zb{<oYzTF_J8~3T2Lxr-o(5KdR#ml<53gz)y((^~U^5$Na-fzpry7+*-?xQqOKYPh; z7>*E6Or5nG_g)uIjt<-Fo5#h5p+0+K)%U8Y@0ffl^Mcys+b5eNmD*h9lbf#%C|`1g z^nKE)TAb;!WyGttT#1y=9O@9yj;Gnrbv-7wstCK)yIi#OU$@)y7KrCt$L)@o8PQpN z!tM;rQ(gHNWY^Shs(ZeY-J_Fg+l>Laeds;4<BLwYv*&fS>y%gatnXDX4CYHm&>Tl_ z$gQ5xFz0Q20)dWh%hKPCR&XHD+vW%}-w5+dl)1<7wPFK-{@spvb5B@P<*|zD^!vDe zYxG;rALS(;tDZ}xz7$pJn?4RcoWCxf|K6HQ{{t^)K)>8mZt#~r0ex1Sx%nyX>MJTo zHi+!dsJDph5zQvKuuG2KCa_OrqsUH?ts;9xHjC`msJDyk*Qhs)>=@ZHvS(z|$gYuX zBl|`+j_e%SI<j|U^T_Uz?IZg~8h~`bs9RvvJwTd(bOC7t(g&mwNGFh1AiY4Efpi0D z2htCuAx7O1q$Nh(6Qn6fSCF<KeL)(7bOvb+(i@~XNOzF-ApJoagmegLkx};uX%f;U zq)kYlkVYY$LRy9N3TYP7Eu>vYzmSF*b;ppF8FkN)rWtkDkhURxLmG#44rv|IJEVC? z_mK7>{X-gP)Ez`xXw*GKnrPHrMB0e-5osjSNu-rXFOg;<-9*}n^b=_)(ov+PM%`1S zsYcyZq^(BXSER8>XOY$-y+xXfbQfta(qE*(NQaRY8+DJ7CL48^kv1E3pOHo*okm)X z^crb4(ru*OmX3FDP)O*l9%p#iZG0^2LmkQgz2W~v{O_BKx);#-jH;zsIawm3DmyzX IJ4?9!1Y+B7Z2$lO literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Aleutian b/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Aleutian new file mode 100644 index 0000000000000000000000000000000000000000..5696e0f8bedee72515c43f686881a561af339db9 GIT binary patch literal 2365 zcmciCZ%ma{0LSsm-^)cYDj-Bm!Hl7RhrdBt9tc86<UwwrWC#iolmVg)3ox8hdC{a; z&Wf15=#OjEF}7yX*qNL2g>zkSYhaF=n{(Efu39oj-p=>b){C0&@;rN<&*6@@zpuBZ z`Na&^zmCWJ!pj*jFZYBu%;$W6p>J}<Z*7zHj=VkjoVs(W-SL0wm9ArzPQc+D8Q7KW z1hsF{!SxlskcuoFnxE-&r^jgbn>&4;$T{s9x$cDDyrdWWIPF_FeM?5@G2f!Er{#So zrku!k$K?I}r=17(ejp!g9&)0#?32+Yy-rMCy?iLS&WTOz(ThC=PF&zN9k)7hdE76K zUUDT<#ZPAFrDvkm!(T?}Wgp&B%R7r@Lf^DXtV)(ET7DOg<SdXYOQ*!6(X%pX)oGFB z_vqyCA(1@uvtIR6uUI{PL8n}NPo)ljq0>gY)tcV7v>2>a=}n)@j1H+Xiw9&@WunSX z+9h)`gVkehugr~45V_ZKWZry`c>G+feB%47Vy(*3PmY}vPYuNAb?=`LvUN`9A2_QD z$}j1s8$MBmYrfG%h5f4N*Bi1p^<z<daYB|v92O;`qq20iMLaw7t={n6K~>g$Qg8g~ zB~`w4P&-FURmIvpdQ)eLs*JDJ-l|2)JD;bkvTv%Yt5tIIvIw#HY_8mL=Z2{HC{8{< zGa<Gfn3pe%kBZuc8M$rv9Z^?yTh|{tskWzH);pR8RYSzMZY<uT8fTB|rle}sbYZ`4 zcIT<)kp|szZHapMXrFw=mn(L5?vSslIMG^FB-@V6i}su(xvO<Xbi{b%?s6@5`&a7D z+{>!-2e<Bu9amlB*Y)1H<Lb5H^ZNCv{i?fnME9I(P`$NB^uA-ysDQwrfZ&k3J)vRl zd-(YMHQQY-zrWFO1^fLSbpc`ayU_j;Y41^dU9o<DUt7RZdtbbv%<I}%Y2Iq{I$l?S zxfc|AUHR+HWxctW6TpY_*TwtaBMUeGz{`2tyxbG!_{$u>IqS~+F@ZgYYHE@3Ap^9U z5h6oGJ4P-Hl4C{*3=<hAGEiis$WW27B7?P>(IUgOn(-n7Mn;Sb85uJ&Xk^sLu#s^i z14l-V3>_IeGI(V4$ncTzBLT3Q2v|)BkQg9AK%#(z0f_?=2qY3nD3Dkn!9b#cgae6( z)dU2Ih}DDyi3t)EBq~T)khma$K_Y{M28j(493(nOc#!xY0YV~VH6cP`gaiqR5)vjP zPDr4TNFkv@Vub_?i53zrBwkh%FeGAD6EY-bRueQNYDn0SxFLZ<B8P+yi5(I=Bzj2r zkoZ|m0Fek<O$d<~T1^m<C?a7*;)nzii6jzAB$h}pk!T{}MB<4A6p5(SgcOOX)dUrZ zs?~%Qi7OIVB(g|ok=P=^MWTy@7l|(tU?jp;6JjLBRug0-%2pF*B+f{nkw_z<Mq-Tw z8;Q2dR695@B=l~N+Y@#VAD8R1EA;XIiuXVD{`(eg%APJv+EdCh(=yUTN?Ce(T6&rY F{}YG!e4hXS literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Arizona b/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Arizona new file mode 100644 index 0000000000000000000000000000000000000000..adf28236a2feb68f177f5b002ea068db59eea997 GIT binary patch literal 353 zcmWHE%1kq2zyK^j5fBCeZXgD+1sZ_Fyk%As=I>^2SkNXjVd1Qo4W~PKCY%?)FLS>C z>6#0TQZm1OlnVTQ5y8O32!zZ)$jJ2n|Fm}u4FCVHUckum|Nq<x3>;uKkB@H%gRct^ z2Lo|<2+(i{2qEkw9-vCFlYT(;{0D+K7M=|t8stO}4RR)k200Z(gPaSZK~4tIAZLSV apwmG#$oU`|<OL87@&+aT-~xKgoC^R+MSyDn literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Central b/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Central new file mode 100644 index 0000000000000000000000000000000000000000..3dd8f0fa82a60710c0711f35dee93ef5013ca796 GIT binary patch literal 3585 zcmeI!X;77A6u|L=OQ?yOM&c4$mSiHEp=g+zG6aRVAg*Dqh^ag(qNw5HL|RQ{Wu>;@ zhGvVQk_wrKh~{WcDN2$`xS>(#<C5Xhy*>ZeX~vnF$&Y>LFfYH$oy#!8hx0#iLzBjZ z$lu1(zQe=(Y9C(vX!|X5jlW*@)it$zegnPY{iAB-z7$#Y;_oIa;-FqVy40*0P%0mL zFIH<iZk4sJ9P??DkL1&`cg<&)=gQ~#Gt9c;k@7|65R;wLOV+19ZZ>>4LT^kQta9e` z)L%w+Rhx#l(VP8Rsx9HQb?#j*mDl2|&Z{U_TWi;nt-A}=*QYCFTXvbrFWMv97Z#X; zp(V0oeYh&jcujW3W|*QelVw-nII}zERsBuFr_7!X6ZM|zv1+eZAHDZTq}ungOCM}= zMI9Vj*A!2`EQfmhVh#^HEG4b?n;$~A$PvjlrQS=WwEQjeqid5sS}@HVD_g9OfAXd( z%TLoky)spmXAaXRlH%0K^lti8R3~*hp_M)}DBM&;hRfN`zUG{tul(FpOr`y=RbD-5 zeyKPqzm^o4s@+9$e)|b^A-i0gWjocyg@yXk+|}xGa+ba_VuAWCcD}yaJxTo@I9^vj z@{+mMFj`-)lW4A2C(4a;QRe26DEVVgd*jM&FYdKoMwWPq$ASx{#*7P6b4q<xYvkR! zcHi^rt{zp|Gw6He`MRI3+i0VzyKa@#Gw-Oo%NObTrR$AX!F1gq?`_lYleeYO(iC&g zD=E@AE#5RqikEwbgc<LsFuAWwbJMh4bMbkwvual7rZlh7Of|pcqg$M~p;{ieuJ7M* zT={M|rdzGpu3D!yln3UmP!A5gC~YQeGY^Fp%fr#jOk3|w;`iu0<L5e{+tnLw+Lh(# zM=K*t`}}vcfAJ{QA#;Wf$my*D(}(DwMQv4ZVyc8ptD`zb4wlFISE^9IuF@&wTh-aK zrG&LOXTmC6(&ffp({*>bJbv_Z6P|rjciZ}gd17Ii?*4%?J(3G_&y3gAld&sxuQAE0 zcVLE&=-p36T;3v)9VVH`;-wPh6>Fk$W=nKcuzC8!#rm0&J}PETn(nhXNW~5xru)9v zSoI6<ru$F7q6V~VrQ-&csJNRRIzBX4#h>w&!M=yg;9Vj^T$|0%tdlaVY>643Q6$gi z&oT*P*2sv=;pVxRLOpUpni|z1OOH+*rp9>9*JC5Qsj*e#b)sJ@mAF4zCwY3Pq;>v! zLd7Zd{CiO{@jJJfl-gb<XP+?1@m``9?le<EFUZv7)n=Oa0ZEBnU{YLFIyEdyrIvlC zXZp-nGxIm-wCeF{R^}o-`$)8!lRjO)kn69~69(zIOB$#e9(V9n^R~~_s(srJck;{s zozJ-4>kf&y-FfRhYPsE?EtfjHmio>+jhyfI-g^I;m^kUx+dc#0B*H$u2HB@?oZW49 zJpLl?-}hpb{j9SWt8e|1{p)UbLQU6lWKSZy64{r?&P4VmvOAIeiR@5hk0QGi*{6<n zry_gR(e74czal#p*|W&5MfNSSbCJD^>|SL5B0Ct_!^kd1_A#=Pk-h9_cQdk|9qo=r z_B67qk$sKqY-DdEyBpcx$PP#LII_!;eU9vOWUo8g-Hz;cN4w*ZJ&){qN4xKlosaB& zWcMTcA87#60i*?vwg*TPkS-u?K>C0*0_g<O3ZxfEGmvf|?Lhj0Gz94g(h{U6NK+hb zSCF<KeL)(7bOvb+(i@~XNOzF-ApJoagmegL5z-^1Nl2F*ZJUrjA&o*hg-)w@L9ZCw zEQW3&?PBN`(lDfBNXw9(IohTnU30W;W9S>wIHYq(>yX|d&12{u(msa%Aq`~cAkspH z9wJR-=pxcahCVvlMk1X=T8Z=$X(rN5q@4`?L>kJ_QKY2|Jw=+z&{ap<R-~_vwy{WO zk=7!;MVgCr7illjU!=hd9Y$Kr&|{>@3|&Ur%+O~?+h~SPBduoWHPURP+eo{Sej^P> zI*zm)={eGLr0b5h?F@Z)w2f!zyrXSBL+_F1Bi%>ZkMtk80gyWYxdo7W0J#Z}yTH-j z2FQKjXm13DJHgT33JmuGax*a84an`la6ce71j8MH+!Dw=f!q|xU4h&d$bI2xZw%zl zaJ07ua&I`=n*+H!klTad{y=UJhC2kgMHuc8<R)RbOOV@y;XZM+Hwtp6INDnUxmO(R z&4S!5$nApMFUSpp+%d>4gWNN6N7u>!2hBeoH1+Jg)5+dF{xKZ`LIQ&PV}gSNf&+r; F{Ry|ZcJBZH literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/US/East-Indiana b/env/lib/python3.6/site-packages/pytz/zoneinfo/US/East-Indiana new file mode 100644 index 0000000000000000000000000000000000000000..4a92c06593d33d3969756f482e5d3d4b773984ef GIT binary patch literal 1675 zcmdVaT}+K}0LSs?kk@(LFc&RW7jAmD%q$zL)}m8hP9)@yXes=a&Q2uH1yVP-Da?{F zud{4KTr|uuW;Thu)jw}D*7heCne9CO-+%60xN+k-d!Em8&Q9xG{Ju}1pk!mR^T#p5 ze8S1G-kjV=y5`b!I@UdY<A-eCH_knA^p$+DJlcKGl_?+Y54%rdzRM?TN9m5re&^}D zFS;|Y(|I=ik$#?X&FOMl^oz+yoEPelei?3cU)`+Gue%%FH|Gw@?)ECTr><P8rj>4Q zNkG1>nd`pGnkC;CPIEtfo1#CP{~m6O)ZQ6SIgMfMtL;_k2|<~Wo+dK-&+5#$H7c{C zUT2M+ud*(e=>>f;YT>aunf+{@@K=}0oU73yca>i*YKRbvQxoKp%8z1c<U|?ByDtJi z`{lBf^J3YD@j5^1uFAjjRj&x2RRyOy^vdpfwQ5hRUfo`%3X2-`nx-5j{8!}K+ETGD zu0gIZ_KTviA-N$lL2T$Nki|15isDD9vSfI_D7hFVH+}3Br44g*+2gNjb1+hGY3Wc~ z^TJvlXjR)%lrG=es46Dk*4uNd)b?<X49;#4!R}i!G&Lkb?I&f`K!MoNv{&xzN)^>L z#d23$l&C4plDiK(Vu;VahDQ9p8GJi<9X4dx@PF{~yp}nR<9XLF`64{;LbEf{-jA`@ z30$2?o_Fu2Z)&zb;H0ISbE!F(n{!dX$uRdB<}(hTy+Yvcb1O1mwsRX8{44VdJg;zQ zxEYxrGC^d9$P|$|TFoSpSt8T4nt37<MP`aj6`3nCS!A}zbggE-$b^v@BU47^j7%Dt zH8O2v-pIs}nIlt2=8jAr?d-W>`W$BdKmtexND5Yy1CoT*WPzk%HF+S3AekVkAh{sP zAlV@4Ao(B(AsHblSxrtzQdW}{l9tuvg(QY#hNOn%h9rk%hopz(ha`w(h@^<*h$Lw> zSt4m#O`b@iR+A}`Dv~RbERrpfE|M>jFp@EnGLkcrwAEydq-`~MBZ*s0=1A&D?nv@T o_DK3j{>UT1`A?q#qs^ls#XK5f{WIf};}c{3NlEcZ@rk2<0ei4<R{#J2 literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Eastern b/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Eastern new file mode 100644 index 0000000000000000000000000000000000000000..7553fee37a5d03e9163ee19b1ced730a02345cfb GIT binary patch literal 3545 zcmd_sSy0tw7{~FW;u1<|in&Fm6{0L|xKd(jgnGmUQpxbGKnAsVN+m4AN|bP>tkJ=? z!Q9a_a=|1E(F`4%HgPS*S5s0GdzBW_I;Z#h-geP=(N%xve?Dg%818=GCn+U!T5r!k zp2qfnczG_{m+x&}v>!$5LS@CrKdJW?d1U3=U#eB<XJz$*i+auU4`gl3dbMuCL9;%j zKo$4dWQu)j^~Tn(nT^%?`u#dFo60lw=Bn}LgQ8KoWLsbJVQ!?}^6og<nlVyso7YFS z$498rDIu~W>{j*B;NN9u6QjyHo{+MdLuyyRuVz=}cJ;}*W9HM6Z*=*-GP8ThR$Z~? z9kVBEnckcCg83{lTklJoYCeyiq$|DiWPk7=eIPPb4%AOn2ZQ3|;PHX#i&u;s>iUZu zQa5zfoO9-I+$nt|xzZf%yjvfODK^JFEA@$x#pZ-wpuh92m+vdm^~vf2Ikn+sRb4(q zP8XypUF4NBnGdS7xzX}NLN|3TwUwNo7^Q3CBh8QfTj~p8!RBJyYx+`?tLD;ghxJc2 zRp#>19lEx%)LhwJrG73sBxXgay1Hb$T${gK)nygRFH`5LUlViWw;_+H-=kBczT30< zkKkCj-fXhIUO&m)xG-4%d3=!h>%bk_x3iP+ulH-ua-V6Ce?~WaR+~oRQvvEPX*^b| zCUK{wY0tf?>8tJKmX>SOEt{8_K(k0S*9)b^iB&qNB13L1%hSOd7MPZAP1CIk(#>si zAJVNe<4v2%-E~MpxM@4Eg}yz!xoOuWT(xgjYdSP+t~y)`l#XX=Ri|$+%N={ZR-s$I zk~>#!QJu3r=B}5PsxHZAP1orq`tF#0=AMyn=zBxfnXvA&beQim2@g!x;ni!U`=$Q6 zM|r+PR3)j%qD+a})=x#}j*^~B+o@g|8K(C$*HxeR1k-o?Nfi^;!}RN2uKG6(G6On( zrw7#hYzE%=L=UR`)(rl>NXM33k^6SNsPA9$jSP9`aUGYnRfguxR}UmElVNF(so~Mt zGGh2JHKMNA#79om@l}gWLeNm1ux+LpS=&{QdbdDEAB|Jqc{60pjxH*3idV)K2B>kd z(K3EcjhfJ@l_Vt}P)RrH<l&f&>f!UjW>RRSp0w|(nd~dpDQl|CBh`!bl)O^&X!%T? znzr0bEgGYhce^~6KSMnpStw6rcvV_Zj-<y&tMu9w$p~wuGQNzJ%qDdzb91OnuQ{S- z6b>>ozrL!U%<g2KDyh<0$vz`XO7+t*+B}oBT+a&GYi1|T)w6x4C3`@j%C6ocIqh;( zPWft?Tc4tyD_SVeACFb@ax>(GouMi>H9_XT=}`?E+~mJT0XO*zH~R<vyPx;_KQ8ik z{QmtF4FdfBvJXAY-1iplv*l=Sl4rzl`%bX$MEj0SvRmfyG;kkD|Gt5>_6OYt*7F<o z@!Kc0k$D~2L}V9{ZAA7F*+^t3k*!4b64^{-H<9gh+WkZ})M<AV*-~UrkxfN*71>r~ zUy+SPb{5%MWN(qpMRph2USxlf4R+ccMz+{#_ZZn^WS5a`M)n!mXk@36tw#15*=%ID zk?ltI8`*HD-Em~gop#TWO-FX!X}2BOcVy#{okzAF*?VO3k==LN?ML<>X#mmzqy<P1 zkR~8qK-z%x0cix%38WQBFOX&+-9Xxb^uuWzf^-CF3DOg!DM(k4wjg~$8iRBOX${gF zq&Y};koF+`K^o+=9YR`!^ayDZx-RjBHu2a#0gXaBg|rIk71AuETS&W{wqHoYoVH^~ z%aEQSO+&hdv<>MS(m14ZNb8W^A<aX&hqMprAJRaigGdXVwueX)kuD-_MEZy{66qw; zN~D)aGm&m0?L_*CG}LK3inP>edx|s_=_=Azq_0S0k<KEmMS6=g7wIn2UZlTBgOLs+ zEq2--BTYuSjI<f)Gty|J(@3k4UL(y$x{b6O={M4Fr|me>a;NP%(sZZoI?{He??~g3 z&LgcydXF?8=|0kar2ohb;IwxDatk=^J%HQ<PJ0(1w*hh=AU6VXCm^>1axWk^19CSY zw*zuNAU6bZM<BO^)7}%vP2se61#(+B?R|mV7|5N0+#1Nef!rL(-GSU5$o+xbAjlnp z+#*hUk03XR)7~Y>ZQ``|338(#cM5W=AomJ#vmkd1a=UnL`V#q{9xs9Rrirn)O@y~k SRPU&s5#C<CqIyO34E!5Kf4#l{ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Hawaii b/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Hawaii new file mode 100644 index 0000000000000000000000000000000000000000..bd855772054f8d41e0158e71c2bf2c04e50e47cc GIT binary patch literal 276 zcmWHE%1kq2zyK^j5fBCeHXsJEc^ZJkZdPZH-HL?~r#o#=TuSt`xY}Fn!N>%J%>V!A zFflLy$p{9P|NpBp7&-p`FHT@!@$n5|@CXKCmk^+S2nZo;D?3mn*w!CVJ^z8A<Uzqc h5Djt#m<GB8M1x!dqCqYK(I8hbknSulpgRo>xBx^rPJ#dc literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Indiana-Starke b/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Indiana-Starke new file mode 100644 index 0000000000000000000000000000000000000000..cc785da97de0a5614613f9ba6e502d7dc5f525b5 GIT binary patch literal 2437 zcmd_rUrg0y9LMo52q6eaCWd5SS}_9&Y$$E8wFP5`#6Nk!KM+Q0h-E%1kf_n))+8^Q zrA@&a>LQnGhSpSU3r(rDnlm&BQVB4LS;(VtK}zTJydN*KKlgpv@4U_qC|931-bH24 zPm{k~i2a0z+hrf#$7uUfzb{Ge{`7aXXLF?9yX%7b=5?PwJ9$u@EeSQ}^Uq7$#M9>c zw4>54jiw{IPCB~YGC%kZ>kB8=nv0z~^`-r9s?O#r{o|H3s;j<Gcb9opPj#lg?3u29 zDvsAz=4Y7RoDBJSMx5zOh?Bk><IGk2+po3`nO}N)<kz+q)Bk0QTsz*a1{%Ax*?me~ z-+EFHF0N5Ghf3vEpT`Whd*qL=5{<7ZQT!i77^#hrfSLgl`0{{^SRJl{N=E76IoH%( zPxR}Mly6nY8wolz>SGmp;5`X5Z>v#V+jV%yK@)MJLPs9kW=8MdCQ)_e=I$-!GN!7) z+*4K{V;82IXivI~dpy?MJ0(_PCe2XeM-EGD;CK~#BSzoXeM?Pfy{Yg2{E~`0bWz9e zJ+3BJj+O^D?NyWVugl~WpP2{K&dEc$yUoMVhb7^WO(wzDs;7i4F<FmRs_d+}D*Lyi zk~4Ls$vIaio`~7T^T`Iu?N2pNy}MJ-Y@4O>Hdg6bM^e=6qC!1q#~3v?BU3+JF{tKE zh}YiyHsu`-&;@Bts^ChTEQtHgEcilXq3?)U)X*b^owcUuwHA4%dA%uKx=$9@7nx`C zPU@1HD)rpd2EC-TP%Vwvte53vs%8Dlb!kGpDm|U6%R&NF*?}azqW7|TVTVUvJmWVj zD--3V#%{B!AVSpEQ)YGAfUH^dzF8aHD&@0lOu4ULSEe_p%FZ)-UCd^+uKAFz8d|Q_ z*KgMw+H=*$>I(fzQ<ADKp08i6jZ}dF_G?6tgSU773jUkV@BGv5)8K#j-hcYv{Qi%= z7V-O!9SjKa`#;<xBi;LO_Zr2kjJ^H&%sj6=nPs1O_L*;=IbO-O@3{c?3f_NT-#hk) z-TH=Z|1103{r=(+zCFkqkwqe_M3#xH6IrO!t`u1+vQ}iV$ZC=0I_-Lq1tTj)mW-?! zSv0b0WZB5Nk%c2GN0yGP9a%iGdSv-dyMCkqPFn$_1V{~#A|O>j%7D}XDFjjpq!dUk zkYXU!KvxdlP!G-)grg!zNjPeP6a}dYQWm5xPFonHGEQ3>j@lr_L8^n42dNKIAdU(l zCE}<NQY4NlA!XvI6H+LSN+G4<sFl+e3#k@TE~H*a!H|j}CF7_WQZ$aLA!XyJ8&Wuq z$~kT6klG=|L#pSr<>RQI(-x4Uf=*jPjv69GM5>6C5ve0mNTiZTDUn(t#YC#<wB<zV z>9hsqsHoGHl%u9dQ8}uLl$E2dNMSiDi<B0rEmB;hx=4AE`Z{fakqSF)iIEyRZIO{G zBW32OGg4@dN+YG_s5Me-j%p+2=BT&R796R#)0P~mxziRMsX9`2r0z)Jk;)^bM`|yw hy#I^bhuOad)9qh{q`b$HrzNK(<)x-3rzWR_{RxkBPOAU_ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Michigan b/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Michigan new file mode 100644 index 0000000000000000000000000000000000000000..e3ea5c3ef1219fdc1b6e6663d2bf3a38c27df689 GIT binary patch literal 2188 zcmdtiZ%oxy9LMn=L|_+4C@EB;f{cOux2tHk!hjL#71N6;njQ_jyF&3zjX@dOf=ssN znhtX{mT8S@jnr(pim5qkYh}tR`!hVX#e)`gj2^hy_;ucAJ?l}=TKDdp*WKNIyHD=t z9chehD);{JO0?f_c=g)D=ggh<>iIBd&4JK9@6%_ET*&=HE~p9pb#TC3^bYF8%1e?| zdRiqtH(jntJFEiUAqkjgRI=`oly6?qsVAdy?TKzZ_4TJs+JQ|veRs3DuJb;9eXQ0@ zTV0?t>hjGE^HcPV`N<|TCtppUGG=DPC#xC12V~|CV=C+XFLKkVb1M7D=W_GGBkGoR z&)oXdUX|1EqsiSosBa6NF|!suuW!$I&Eyqq)p?U2Nq(R~=a0N91wYTzvxjy`;c!G1 z^~a?6*j!c8wOC5`W~#Di)Rb@eQ_ZQZGj}ZiUIp`Jo4MsfYF<*RshIh#t{DBz%%Au` zFF1SN-1%L<4h{Y)cfG$;-@WGxx#z`=y0Y!4EbQK)?ycV^RZSaIbxFI_R9C5*@c~m? zv|iT^_nNxYg}Uxot64Obs~5ktTb6uPslwY^Wa+y(s(x))mh~m5`$HwN{IOBBA}d`Y z4WFpU#JH>szM)qBG}WxiKC4%qxolQX4(P_i!)DD$uU^}I+&nncs@FB`H|zVCY88G* zHtcFq8;knnp=ek&rFKYjZHa0gYm{hSx{7`ll33!nioKOBn@7J?Eqx12>$%JN;r1-^ z$l$PUYnU{$=eT|}sLhtP{d#N0X|t`qQ*WERB<&S_s(s{?bYym@j-f-+d8JW3-rpln zTnwr0T~T@Ry=>LBu1<Em;3+RI{;GudtN#OU-#<Fb^L+o!m%RkvKk>FV#rHox8JFVj zG<Q#15{`IPD<WPE=K*_`*;5^{|8>uc`}>aki<Mk)&+x0h&svbhAge)^<FxBR7KE&b z$5|4xCZ}B#vMOX*PP;B-VaUpmr6FrW7Kf}3Sst=JWP!*EktHH)L>7sx5?Lm)PN!Wc zvQlKJ$Xb!bBCAD~i>wz}FtTD~$;g_KMI)<r+GQi_cG`s_D@T@&tQ}cAvU+6s$oi22 zAQeDLfYbmf0#XH}3`iZEwh%}qkWwJEK#GA>11Sem52PSSMUavpH9?AkRK;n_g4D%n z3xia~X-k9D1}P3w9i%)+eUJhn6+%jc)CegOQYEJ?6H+IqEfi8Ir!5syE2LORwUBZl z^+F1UR17H@QZuAzNY#+CA$4=w!XcG&+R`DlbK2q|)kDgM)DI~jQbDAINDYx9B2`4n zh}6+(3yD<HX-kRJ(rJr{R1+yDQct9yNJWv7;{UUz0h`*OO>AL!PH}m0X<@jmthlVW GH1HR^1#Eo) literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Mountain b/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Mountain new file mode 100644 index 0000000000000000000000000000000000000000..7fc669171f88e8e1fb0c1483bb83e746e5f1c779 GIT binary patch literal 2453 zcmdtjeN5F=9LMnkqQH%ZQ;8v<nU)CgtR#>b8FWLuL3o9$5k`URY6U)Y(?UE3#rVT< zR*YFDf|Ayni4SWwHq<p-TiG0LO>3b&mt~`eO%k}b^FAy8>5u+w>&t$;e%!&IpEvGR z-Zfd`A2->2!ozi$hxe(9ANJ?zJ^i7o`=tck^V$z;Z>?YNYndW?3oq&3_WkO^wg^2m z=l6!8>R53#-KR(Ab%;NrJ^EUhPh1;)Mvi^&5##48<irPg!hbwh2Hs2%VrRSzYW0iY zXD8&O^>Hesdb*xmI<BVVkLl2iVHLU~TZhY&D*WJK=@{9oZn)H=BQBf}ktdsV)O$T5 z`mJs$Uu_mQw!I*4+EOcS_SVR$E1e>y=m9w`H%Z)*G*8CPE>zRQ9WpLBQN{f_SI2)D zt`dgA^o&zKs+or`>sx!ys9C-l^0w`V)a(@jIcM!h;`Zz><Q+@j5p!eSmx;+*B>FGv zB*zAkG<-@YUv`T-2lnZda}6rB>qVV*v`nQp)#;2^7O2d+7MZninwsxiBNvp7s_euE z<y~2ys)eD+GAI73$oVcp=8jzud8dDtcYoF|7WFywJ^j1I;`X2Py}P!F{Q8geeJ#7x zl9E1sf6Z1^kp8kRELg1ye;bs})JEYvcR&_JR*9mcZF1?Ad{O-R8+zF%mCDuFsvmlH zu_~$b>e9|x>fuGjy37}>mM5fY_lmETdpuf~XP;K(-=s*-%&&xJFiNiU4~kX2Bl3~q z1ER8JNIp8yCaP+V$<<x!#AB|ry1KPhJ)U|*KT+pZHIW^<)>*7-ulRIbVydb;<I&#G zXyrYar`LY_i(1!NA)h=OC7$x-%BK&Fi2Cw)+0Z^D)@M)14fV&w#+Zw8Q%R@T<R8<% zoFmFN{JGv7+o3iOoX}fFed@Wc9{v1zk7{gc)?1I~sivx0y=`ZL3J&_~Yf{Md*S|md z?+pZYcL)&(yxkoXV&1g~v+oi1yIkgS3s-@8mYb)-Jf&{4A|Zn8H}}7<Z;$y!yS`EW z!d$>yRY*i1vPNW)$SRR#BI`sJimcRXmWr$uS*+Ep7FjN`USz?@imhhJ$eNKwBdbQc zY+hJ5XBG~uoMY+8+L6U0t4EfPtlw%1fK<S0N`TY=DFRXjjxr#1;3x!A364@AwcscQ zQVouBAobuV2vQNGBuGuHrYJ~Nkg_0kK?;LZ1}P0v8>Bc$bvVj{)Q6)$NQJDXL`aRS zrbtMYILd_72`Lm(DWp_Lt&n0N)k4aJ)C(yXQZb}tNX@LKXh_vK%7)a9qi{&&I7)}q zj-z-;^^o!*^+O7XRM2Wlh}6((iilLvYRZVzk)x1EC6Q7hwM2@ER1+yDQct9yNJXut zq)1Jzrl?3&t){F<T{#MiRF<Q(NNqWai&U4RyhweK0wWbhN{rOlYKn|h*=ov+)Y)nZ zjZ_*bHBxJiVk6b&C^u4Xj)Ef<=O{T+bE_#jQgy2-J5qP6DLhhnr1VJbk>VrON6L@X jUtDkg|1SRy^Iu`1`R|b8nxB@HmXYGh%uLHn%W(V&DI1f# literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Pacific b/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Pacific new file mode 100644 index 0000000000000000000000000000000000000000..c0ce4402f60a135017d49933afdbee55916b9be3 GIT binary patch literal 2845 zcmd_rdra0<9LMnoMG+J|nJ5w3NlgR+0Y&jr=52tAs33_MUJ@9lp+cx-NQ*LS#+S@X zH#(ZNTp60PSrnlyS8J9*bKWAdRHotuE%DOxbl&IR{^;-4d3JuU-=4vr&-;@(yC5&Y z^N-WPe#7N_W-sqO%Iu@+y%h6yO^BH1qs-RVugiDq!c2X?YfJ0r4^T(E#dD69?Udu* zujE9XsFRKF%FjDKl*Zb%^2@rHrKzewel1ukr%ETwZ>cln_na6x9XV9aBu`RjyH8im z5z(r-b%;6_&{dtQ4^ijOTviu$Uso*$>(#}|W9m}%MY+7JMqQ~mEUjzy%hke<<=WhL z<d5`M<<BwC$@Re{(l&6Ly3uWx-0W1YZnizEZ#Czs+edTtU*DuC@4jU1dp}BfwnS>b z7rLl+D?97<%La=7{0P%wdXRJ+8)yRJE=a)o@g`7xFM&J1)IpbbN#~|&)8*J96<k+o zx_(-%y6yN#tJf;k9qUAQUsa^;ELg64WY1I~shPUxh~cVN<S-rDH$(0U4Ao)n5+$s4 zsJZ)8FA1;jX?pL!A$@kYnR~XL5M44z-}}rL(l;wu_gi#a^-sK{@0;|w8W8fMj)<yK z5#AOP8C0es8-FlS=N70zwYyDpL#f16Z7{KW=E>mFCrsRiaT1^ZicTo%FGG@x_0Wut zk{FSthb1)2@PH(p6y8B4oek3?Zk<&l58l?JPJF9I?+P{d*L<KJsJLmyys=(ASa{Nm zE#D#I(reB5g)hm3!NyFSutX+aZP3XvFRJ8*y*f2`u}a;uSx;)2swTg=$4oi0NYYks zG*dsBBI$Ehm}#40W%}rBGowUFM))X`IkQzVZ}&D?i9bo!>5+O?pDSwC*Wo(b+n{D| z_v(im_o_MNP5P1A%_?W%0X?^Bm715<Xy&ioC<|ih%%k}$WMS|&lbf6^xh>@;KVp>R zAIdWY0llSQXNp;L#wU+$%GHYxhpQze$@=kXuPV%n)JrRx)Usin_42|4swkvI7pHGm z#lApOl2j)pjpxnE&~37^_K;cCRxVFgRhy@d=E>^PO7rx-6e-OqGHbR(NjpFLY47jg z^}qA+`6>r^JU-uB;^*)4Rg`%;x%V#asivf5dZtaVH^bhC?46crf6*R~pL=BfdkJdn zAG#8>@gMg1?8)&Dt3%3w)B!03QVFCKNG+VU7)Ujcav=3U3W8JwDG5>&q$o&Lkg_0k zK?;LZ1}P0v8>Bc$b)2?5NPV2PKuCp<5+OB0iiA`NDHBpBq)<qukWwMFLW+e{3n>>; zFQi~jTQQ_$NX?L<p{p7%l#OHS1{4md98x-@c1ZD%>LKNG+WH{{blM6cB}8h76cMQ+ zQbweXNFk9*BBexXi4+s5CQ?qMo=8EFiXtU-+L|IoMXHLF6{#yySfsK@X_49@#YL)% zlozQlQeda8Fj8Wttuaz$q{>K{kvb!VMk<Yz8mTo>Y^2&qxsiG!1xG55l-y}+juaiK zI#PC|?nvR0$|I#mYL65jsXkJEr2fbPIPD4`OW?F?fGmR3t^%?Q$T}bkfvg0w6v$d2 zi-D{LvK+{IAPeHOD}pSE)2<1!C{DX7$g&{of-DTOGRV>(YlAEfvO37}AnSuH5VAtZ z5;^S}A&ca+tAs3*)2<V;P{>LlONFczvRKG!A<KoV7qVc;iXltpv}=Yen$xZtvTRPf yZpgwRD~Bu{vUbSgA*+WhpT}+8|8Dw%>>?)FWsFW65}Obk7o8R#9~&PV7xXtE&6qp@ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Samoa b/env/lib/python3.6/site-packages/pytz/zoneinfo/US/Samoa new file mode 100644 index 0000000000000000000000000000000000000000..3e38e97c97ddf5e054fba822cfce1c5ccd422e3f GIT binary patch literal 196 zcmWHE%1kq2zyQoZ5fBCeCLji}IU0b(MAqLNj6ji%6$}jj|HuCTk*NU;EIz&=48g%6 mKouYmLfBN84IDrtz~Da+<k_C!0MQ^@7zkU#1+>x7kP855a3xUy literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/UTC b/env/lib/python3.6/site-packages/pytz/zoneinfo/UTC new file mode 100644 index 0000000000000000000000000000000000000000..c3b97f1a199421d6d9625b280316d99b85a4a4e8 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<U2@P=uGC~OJgPFnd17-@c0bD?H47dQg2nTKe literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Universal b/env/lib/python3.6/site-packages/pytz/zoneinfo/Universal new file mode 100644 index 0000000000000000000000000000000000000000..c3b97f1a199421d6d9625b280316d99b85a4a4e8 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<U2@P=uGC~OJgPFnd17-@c0bD?H47dQg2nTKe literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/W-SU b/env/lib/python3.6/site-packages/pytz/zoneinfo/W-SU new file mode 100644 index 0000000000000000000000000000000000000000..906bd05f344ae9a806c1b20b8e2bfd7061d35fa1 GIT binary patch literal 1544 zcmd^;&r4K60Dxy*f3=#`!F1C#&2)9u(lt%X)va|EQ%iCyup%ju1f!vX6gn&+cBw!L zJXof*>Y-mAs!LW&1QJ<z=n!ZSL<b9G2%=Q_G2b)M>d+t1JZ8SxdGE1@_kF&GmS(5; zYpl{IOvX+<*{A#U(sL+j^lIs+_Fl*IOz)TCmTMjRX8K;YPW6joe}BoKJFxOtFyL|y z55yiF9&nk1LrLDcdjV@>tbJ&Fb^N3|E^?wSZgxd5{zJ^Tb;9FbGj?e_VWdwb-t1Dg zD`!>G`IBnxnI@Iox<{oPs8H)_HmlT<47EPns?riIDm`jW*~9G8{yAP|ypQu|wmtS| zzM7Seryph3-AS1pcp-BxKav|d?#PX&`ebfXm&`jjqMRqXRlciNZQ9+g3T%E==-jUg zza3UZ32n0IO}Q$bYmi$WrmL+JC9-7DBV7-YWNBxbEb9@{-5epebtJ0o-YMxh@?Dju zJ(ph3TU8NuOID^nQI*rrRFyfPs$SkuJKlAw>f0Ao&DeEWd-1sP)z}(M<`8peNN8Ai zL}Wzxvhaxc%M!KZieA3t$I=^r?qe)Mq!#NB!5S(=-Z@jG-k)Y))@_OwqR4I1U(1mz zVi)!mbtbD2iOItD`?(|6l(KMM<X(K9cWY{WqLx{&XE{rKxt`S`E4M&+y*@qlp)pLX zrungWVC;nO{oAiy`ccELufN{;p~v3)!BL2B5a%G?8Px7U{DU|M@z9`l5#pmk?IgrY zgW64qpAbj+dpw1>3h~vTb{66-#9fHL5QiZiLtKXV3~?IbHN<U*-w?+ko<m%R_zrR2 zp!Oc(KE!{6x&cTBkQN|4K$?Jb0ciu$2c!{5Cy-Vky+E3QbYoDr1L?=0ZV1v5q$NmC zkftDALE3`!1!)Y@8KgBxZ;<97-9g%e^k+~v2<gzEZV}QWq)AAZkTxNGivKxKi;g-c G-|`bm1(CG? literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/WET b/env/lib/python3.6/site-packages/pytz/zoneinfo/WET new file mode 100644 index 0000000000000000000000000000000000000000..444a1933d72525ab3045980eab2fbf79266cf158 GIT binary patch literal 1873 zcmd7SYfO~|9LMoPjFl|ukH|&2RYYK&;BXBHyo3jafr=afFGLD*NunTP;3d{#&J}aN zHkNb7#Kl;1ioU>l0qC;JXc}v4tWhyHU1gU!H+6IwThI4->nmT?b9VN5cDD2G|NRBF zw>D*Z?s*<?e&Nfr&-wDcWa&tQ!ronJkM#RxUAYQxOwqDHj3O%5+w!cx6`AzAq9T4& zbnw@TzI{P4*UxIjx5HZb*?WqeoU*vnV;29x2}>A|B_4Ro9&PKi$96W@<K+QMD)ieE z-dszLOR*;(j<J-GVx`>8R_ag5S~VM`r@nYlPmlj8@5rxuX7GEZ^<P$c*Q9)nXOvMl zXPKSjmQ^%i*<0SXocJNj&Frz<#Xif69aP@@F3bO?TLn|a_S_G(Djcm>(L{k(A6ln1 z$K$lNDM{=4^X>VP5EVDZ+WO>Q<S$;dl8~=dnsn7lZ~km$!Jk^$%(QK|{-M1vcFM}H zTvWx80o!=`q&Bs;+vb6{Rax_nwzTb0)w691l$UC2^hQ;CSE+izr`pI8tNm`7w%tv& z?H}LKj@eM#IUH_v<A2zTeYdTCaK>J$pS4{Ff79+YpIAfP6*VRvvpq#)YI^9rn&YMB z8%Nc$*s0d(UcEdY(B6-BsO`%Rd*yJE+DEJG)s{rPb|}X>DhsW%DcWAoNU$#dU-m}0 z$GX!a)qU%l^@J^`=bO*1_vW1Tog20NGZQ*+?2-<S4e8C^5%nGElPBmu`*^}Eh@10x zLTda~0Z)xT;0bb`z<r!4|K?PYb45-TIa}m(k@H1P7&&9)l#z2rPTJKuYvi<%^F~e_ zIdkOHk#k2*9yxpD^pW#N5<oIQQb2M*l0dR>b<#lcKoUVRK~h0-L6SkTLDE6;K@vhT zLQ+C<LXtwVa&^)|@^W<&Lo!2BLvllsL$X8CL-IosL^4EDL~=xuM6yKEMDlcX5=Am~ zby7ugMUq9bMbbs`MG{6bMp8y{Mv_LdM$$&|c6Aa*GIw=SM{-A!N3uuKNAgD|0GR<~ z3XnNKCIOiRWEzlpKqdm230G$-khwr61DOqEI*|E5CIp!gWJ-`ZK_&&66=YhFc|j%y znHg7SYLK~cbtVUy9b|ft`9UTKnIU9~@P9f-q0TP(7$|Q=R$5M)FFnIsk>TSl^j~E| Bn416q literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/Zulu b/env/lib/python3.6/site-packages/pytz/zoneinfo/Zulu new file mode 100644 index 0000000000000000000000000000000000000000..c3b97f1a199421d6d9625b280316d99b85a4a4e8 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss<U2@P=uGC~OJgPFnd17-@c0bD?H47dQg2nTKe literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/iso3166.tab b/env/lib/python3.6/site-packages/pytz/zoneinfo/iso3166.tab new file mode 100644 index 0000000..c2e0f8e --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz/zoneinfo/iso3166.tab @@ -0,0 +1,274 @@ +# ISO 3166 alpha-2 country codes +# +# This file is in the public domain, so clarified as of +# 2009-05-17 by Arthur David Olson. +# +# From Paul Eggert (2015-05-02): +# This file contains a table of two-letter country codes. Columns are +# separated by a single tab. Lines beginning with '#' are comments. +# All text uses UTF-8 encoding. The columns of the table are as follows: +# +# 1. ISO 3166-1 alpha-2 country code, current as of +# ISO 3166-1 N905 (2016-11-15). See: Updates on ISO 3166-1 +# http://isotc.iso.org/livelink/livelink/Open/16944257 +# 2. The usual English name for the coded region, +# chosen so that alphabetic sorting of subsets produces helpful lists. +# This is not the same as the English name in the ISO 3166 tables. +# +# The table is sorted by country code. +# +# This table is intended as an aid for users, to help them select time +# zone data appropriate for their practical needs. It is not intended +# to take or endorse any position on legal or territorial claims. +# +#country- +#code name of country, territory, area, or subdivision +AD Andorra +AE United Arab Emirates +AF Afghanistan +AG Antigua & Barbuda +AI Anguilla +AL Albania +AM Armenia +AO Angola +AQ Antarctica +AR Argentina +AS Samoa (American) +AT Austria +AU Australia +AW Aruba +AX Åland Islands +AZ Azerbaijan +BA Bosnia & Herzegovina +BB Barbados +BD Bangladesh +BE Belgium +BF Burkina Faso +BG Bulgaria +BH Bahrain +BI Burundi +BJ Benin +BL St Barthelemy +BM Bermuda +BN Brunei +BO Bolivia +BQ Caribbean NL +BR Brazil +BS Bahamas +BT Bhutan +BV Bouvet Island +BW Botswana +BY Belarus +BZ Belize +CA Canada +CC Cocos (Keeling) Islands +CD Congo (Dem. Rep.) +CF Central African Rep. +CG Congo (Rep.) +CH Switzerland +CI Côte d'Ivoire +CK Cook Islands +CL Chile +CM Cameroon +CN China +CO Colombia +CR Costa Rica +CU Cuba +CV Cape Verde +CW Curaçao +CX Christmas Island +CY Cyprus +CZ Czech Republic +DE Germany +DJ Djibouti +DK Denmark +DM Dominica +DO Dominican Republic +DZ Algeria +EC Ecuador +EE Estonia +EG Egypt +EH Western Sahara +ER Eritrea +ES Spain +ET Ethiopia +FI Finland +FJ Fiji +FK Falkland Islands +FM Micronesia +FO Faroe Islands +FR France +GA Gabon +GB Britain (UK) +GD Grenada +GE Georgia +GF French Guiana +GG Guernsey +GH Ghana +GI Gibraltar +GL Greenland +GM Gambia +GN Guinea +GP Guadeloupe +GQ Equatorial Guinea +GR Greece +GS South Georgia & the South Sandwich Islands +GT Guatemala +GU Guam +GW Guinea-Bissau +GY Guyana +HK Hong Kong +HM Heard Island & McDonald Islands +HN Honduras +HR Croatia +HT Haiti +HU Hungary +ID Indonesia +IE Ireland +IL Israel +IM Isle of Man +IN India +IO British Indian Ocean Territory +IQ Iraq +IR Iran +IS Iceland +IT Italy +JE Jersey +JM Jamaica +JO Jordan +JP Japan +KE Kenya +KG Kyrgyzstan +KH Cambodia +KI Kiribati +KM Comoros +KN St Kitts & Nevis +KP Korea (North) +KR Korea (South) +KW Kuwait +KY Cayman Islands +KZ Kazakhstan +LA Laos +LB Lebanon +LC St Lucia +LI Liechtenstein +LK Sri Lanka +LR Liberia +LS Lesotho +LT Lithuania +LU Luxembourg +LV Latvia +LY Libya +MA Morocco +MC Monaco +MD Moldova +ME Montenegro +MF St Martin (French) +MG Madagascar +MH Marshall Islands +MK Macedonia +ML Mali +MM Myanmar (Burma) +MN Mongolia +MO Macau +MP Northern Mariana Islands +MQ Martinique +MR Mauritania +MS Montserrat +MT Malta +MU Mauritius +MV Maldives +MW Malawi +MX Mexico +MY Malaysia +MZ Mozambique +NA Namibia +NC New Caledonia +NE Niger +NF Norfolk Island +NG Nigeria +NI Nicaragua +NL Netherlands +NO Norway +NP Nepal +NR Nauru +NU Niue +NZ New Zealand +OM Oman +PA Panama +PE Peru +PF French Polynesia +PG Papua New Guinea +PH Philippines +PK Pakistan +PL Poland +PM St Pierre & Miquelon +PN Pitcairn +PR Puerto Rico +PS Palestine +PT Portugal +PW Palau +PY Paraguay +QA Qatar +RE Réunion +RO Romania +RS Serbia +RU Russia +RW Rwanda +SA Saudi Arabia +SB Solomon Islands +SC Seychelles +SD Sudan +SE Sweden +SG Singapore +SH St Helena +SI Slovenia +SJ Svalbard & Jan Mayen +SK Slovakia +SL Sierra Leone +SM San Marino +SN Senegal +SO Somalia +SR Suriname +SS South Sudan +ST Sao Tome & Principe +SV El Salvador +SX St Maarten (Dutch) +SY Syria +SZ Swaziland +TC Turks & Caicos Is +TD Chad +TF French Southern & Antarctic Lands +TG Togo +TH Thailand +TJ Tajikistan +TK Tokelau +TL East Timor +TM Turkmenistan +TN Tunisia +TO Tonga +TR Turkey +TT Trinidad & Tobago +TV Tuvalu +TW Taiwan +TZ Tanzania +UA Ukraine +UG Uganda +UM US minor outlying islands +US United States +UY Uruguay +UZ Uzbekistan +VA Vatican City +VC St Vincent +VE Venezuela +VG Virgin Islands (UK) +VI Virgin Islands (US) +VN Vietnam +VU Vanuatu +WF Wallis & Futuna +WS Samoa (western) +YE Yemen +YT Mayotte +ZA South Africa +ZM Zambia +ZW Zimbabwe diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/leapseconds b/env/lib/python3.6/site-packages/pytz/zoneinfo/leapseconds new file mode 100644 index 0000000..358e741 --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz/zoneinfo/leapseconds @@ -0,0 +1,61 @@ +# Allowance for leap seconds added to each time zone file. + +# This file is in the public domain. + +# This file is generated automatically from the data in the public-domain +# leap-seconds.list file, which is copied from: +# ftp://ftp.nist.gov/pub/time/leap-seconds.list +# For more about leap-seconds.list, please see +# The NTP Timescale and Leap Seconds +# https://www.eecis.udel.edu/~mills/leap.html + +# The International Earth Rotation and Reference Systems Service +# periodically uses leap seconds to keep UTC to within 0.9 s of UT1 +# (which measures the true angular orientation of the earth in space); see +# Levine J. Coordinated Universal Time and the leap second. +# URSI Radio Sci Bull. 2016;89(4):30-6. doi:10.23919/URSIRSB.2016.7909995 +# http://ieeexplore.ieee.org/document/7909995/ +# There were no leap seconds before 1972, because the official mechanism +# accounting for the discrepancy between atomic time and the earth's rotation +# did not exist until the early 1970s. + +# The correction (+ or -) is made at the given time, so lines +# will typically look like: +# Leap YEAR MON DAY 23:59:60 + R/S +# or +# Leap YEAR MON DAY 23:59:59 - R/S + +# If the leapsecond is Rolling (R) the given time is local time. +# If the leapsecond is Stationary (S) the given time is UTC. + +# Leap YEAR MONTH DAY HH:MM:SS CORR R/S +Leap 1972 Jun 30 23:59:60 + S +Leap 1972 Dec 31 23:59:60 + S +Leap 1973 Dec 31 23:59:60 + S +Leap 1974 Dec 31 23:59:60 + S +Leap 1975 Dec 31 23:59:60 + S +Leap 1976 Dec 31 23:59:60 + S +Leap 1977 Dec 31 23:59:60 + S +Leap 1978 Dec 31 23:59:60 + S +Leap 1979 Dec 31 23:59:60 + S +Leap 1981 Jun 30 23:59:60 + S +Leap 1982 Jun 30 23:59:60 + S +Leap 1983 Jun 30 23:59:60 + S +Leap 1985 Jun 30 23:59:60 + S +Leap 1987 Dec 31 23:59:60 + S +Leap 1989 Dec 31 23:59:60 + S +Leap 1990 Dec 31 23:59:60 + S +Leap 1992 Jun 30 23:59:60 + S +Leap 1993 Jun 30 23:59:60 + S +Leap 1994 Jun 30 23:59:60 + S +Leap 1995 Dec 31 23:59:60 + S +Leap 1997 Jun 30 23:59:60 + S +Leap 1998 Dec 31 23:59:60 + S +Leap 2005 Dec 31 23:59:60 + S +Leap 2008 Dec 31 23:59:60 + S +Leap 2012 Jun 30 23:59:60 + S +Leap 2015 Jun 30 23:59:60 + S +Leap 2016 Dec 31 23:59:60 + S + +# Updated through IERS Bulletin C55 +# File expires on: 28 December 2018 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/posixrules b/env/lib/python3.6/site-packages/pytz/zoneinfo/posixrules new file mode 100644 index 0000000000000000000000000000000000000000..7553fee37a5d03e9163ee19b1ced730a02345cfb GIT binary patch literal 3545 zcmd_sSy0tw7{~FW;u1<|in&Fm6{0L|xKd(jgnGmUQpxbGKnAsVN+m4AN|bP>tkJ=? z!Q9a_a=|1E(F`4%HgPS*S5s0GdzBW_I;Z#h-geP=(N%xve?Dg%818=GCn+U!T5r!k zp2qfnczG_{m+x&}v>!$5LS@CrKdJW?d1U3=U#eB<XJz$*i+auU4`gl3dbMuCL9;%j zKo$4dWQu)j^~Tn(nT^%?`u#dFo60lw=Bn}LgQ8KoWLsbJVQ!?}^6og<nlVyso7YFS z$498rDIu~W>{j*B;NN9u6QjyHo{+MdLuyyRuVz=}cJ;}*W9HM6Z*=*-GP8ThR$Z~? z9kVBEnckcCg83{lTklJoYCeyiq$|DiWPk7=eIPPb4%AOn2ZQ3|;PHX#i&u;s>iUZu zQa5zfoO9-I+$nt|xzZf%yjvfODK^JFEA@$x#pZ-wpuh92m+vdm^~vf2Ikn+sRb4(q zP8XypUF4NBnGdS7xzX}NLN|3TwUwNo7^Q3CBh8QfTj~p8!RBJyYx+`?tLD;ghxJc2 zRp#>19lEx%)LhwJrG73sBxXgay1Hb$T${gK)nygRFH`5LUlViWw;_+H-=kBczT30< zkKkCj-fXhIUO&m)xG-4%d3=!h>%bk_x3iP+ulH-ua-V6Ce?~WaR+~oRQvvEPX*^b| zCUK{wY0tf?>8tJKmX>SOEt{8_K(k0S*9)b^iB&qNB13L1%hSOd7MPZAP1CIk(#>si zAJVNe<4v2%-E~MpxM@4Eg}yz!xoOuWT(xgjYdSP+t~y)`l#XX=Ri|$+%N={ZR-s$I zk~>#!QJu3r=B}5PsxHZAP1orq`tF#0=AMyn=zBxfnXvA&beQim2@g!x;ni!U`=$Q6 zM|r+PR3)j%qD+a})=x#}j*^~B+o@g|8K(C$*HxeR1k-o?Nfi^;!}RN2uKG6(G6On( zrw7#hYzE%=L=UR`)(rl>NXM33k^6SNsPA9$jSP9`aUGYnRfguxR}UmElVNF(so~Mt zGGh2JHKMNA#79om@l}gWLeNm1ux+LpS=&{QdbdDEAB|Jqc{60pjxH*3idV)K2B>kd z(K3EcjhfJ@l_Vt}P)RrH<l&f&>f!UjW>RRSp0w|(nd~dpDQl|CBh`!bl)O^&X!%T? znzr0bEgGYhce^~6KSMnpStw6rcvV_Zj-<y&tMu9w$p~wuGQNzJ%qDdzb91OnuQ{S- z6b>>ozrL!U%<g2KDyh<0$vz`XO7+t*+B}oBT+a&GYi1|T)w6x4C3`@j%C6ocIqh;( zPWft?Tc4tyD_SVeACFb@ax>(GouMi>H9_XT=}`?E+~mJT0XO*zH~R<vyPx;_KQ8ik z{QmtF4FdfBvJXAY-1iplv*l=Sl4rzl`%bX$MEj0SvRmfyG;kkD|Gt5>_6OYt*7F<o z@!Kc0k$D~2L}V9{ZAA7F*+^t3k*!4b64^{-H<9gh+WkZ})M<AV*-~UrkxfN*71>r~ zUy+SPb{5%MWN(qpMRph2USxlf4R+ccMz+{#_ZZn^WS5a`M)n!mXk@36tw#15*=%ID zk?ltI8`*HD-Em~gop#TWO-FX!X}2BOcVy#{okzAF*?VO3k==LN?ML<>X#mmzqy<P1 zkR~8qK-z%x0cix%38WQBFOX&+-9Xxb^uuWzf^-CF3DOg!DM(k4wjg~$8iRBOX${gF zq&Y};koF+`K^o+=9YR`!^ayDZx-RjBHu2a#0gXaBg|rIk71AuETS&W{wqHoYoVH^~ z%aEQSO+&hdv<>MS(m14ZNb8W^A<aX&hqMprAJRaigGdXVwueX)kuD-_MEZy{66qw; zN~D)aGm&m0?L_*CG}LK3inP>edx|s_=_=Azq_0S0k<KEmMS6=g7wIn2UZlTBgOLs+ zEq2--BTYuSjI<f)Gty|J(@3k4UL(y$x{b6O={M4Fr|me>a;NP%(sZZoI?{He??~g3 z&LgcydXF?8=|0kar2ohb;IwxDatk=^J%HQ<PJ0(1w*hh=AU6VXCm^>1axWk^19CSY zw*zuNAU6bZM<BO^)7}%vP2se61#(+B?R|mV7|5N0+#1Nef!rL(-GSU5$o+xbAjlnp z+#*hUk03XR)7~Y>ZQ``|338(#cM5W=AomJ#vmkd1a=UnL`V#q{9xs9Rrirn)O@y~k SRPU&s5#C<CqIyO34E!5Kf4#l{ literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/tzdata.zi b/env/lib/python3.6/site-packages/pytz/zoneinfo/tzdata.zi new file mode 100644 index 0000000..412e919 --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz/zoneinfo/tzdata.zi @@ -0,0 +1,4162 @@ +# version unknown +# This zic input file is in the public domain. +R A 1916 o - Jun 14 23s 1 S +R A 1916 1919 - O Sun>=1 23s 0 - +R A 1917 o - Mar 24 23s 1 S +R A 1918 o - Mar 9 23s 1 S +R A 1919 o - Mar 1 23s 1 S +R A 1920 o - F 14 23s 1 S +R A 1920 o - O 23 23s 0 - +R A 1921 o - Mar 14 23s 1 S +R A 1921 o - Jun 21 23s 0 - +R A 1939 o - S 11 23s 1 S +R A 1939 o - N 19 1 0 - +R A 1944 1945 - Ap M>=1 2 1 S +R A 1944 o - O 8 2 0 - +R A 1945 o - S 16 1 0 - +R A 1971 o - Ap 25 23s 1 S +R A 1971 o - S 26 23s 0 - +R A 1977 o - May 6 0 1 S +R A 1977 o - O 21 0 0 - +R A 1978 o - Mar 24 1 1 S +R A 1978 o - S 22 3 0 - +R A 1980 o - Ap 25 0 1 S +R A 1980 o - O 31 2 0 - +Z Africa/Algiers 0:12:12 - LMT 1891 Mar 15 0:1 +0:9:21 - PMT 1911 Mar 11 +0 A WE%sT 1940 F 25 2 +1 A CE%sT 1946 O 7 +0 - WET 1956 Ja 29 +1 - CET 1963 Ap 14 +0 A WE%sT 1977 O 21 +1 A CE%sT 1979 O 26 +0 A WE%sT 1981 May +1 - CET +Z Atlantic/Cape_Verde -1:34:4 - LMT 1912 Ja 1 2u +-2 - -02 1942 S +-2 1 -01 1945 O 15 +-2 - -02 1975 N 25 2 +-1 - -01 +Z Africa/Ndjamena 1:0:12 - LMT 1912 +1 - WAT 1979 O 14 +1 1 WAST 1980 Mar 8 +1 - WAT +Z Africa/Abidjan -0:16:8 - LMT 1912 +0 - GMT +Li Africa/Abidjan Africa/Bamako +Li Africa/Abidjan Africa/Banjul +Li Africa/Abidjan Africa/Conakry +Li Africa/Abidjan Africa/Dakar +Li Africa/Abidjan Africa/Freetown +Li Africa/Abidjan Africa/Lome +Li Africa/Abidjan Africa/Nouakchott +Li Africa/Abidjan Africa/Ouagadougou +Li Africa/Abidjan Atlantic/St_Helena +R B 1940 o - Jul 15 0 1 S +R B 1940 o - O 1 0 0 - +R B 1941 o - Ap 15 0 1 S +R B 1941 o - S 16 0 0 - +R B 1942 1944 - Ap 1 0 1 S +R B 1942 o - O 27 0 0 - +R B 1943 1945 - N 1 0 0 - +R B 1945 o - Ap 16 0 1 S +R B 1957 o - May 10 0 1 S +R B 1957 1958 - O 1 0 0 - +R B 1958 o - May 1 0 1 S +R B 1959 1981 - May 1 1 1 S +R B 1959 1965 - S 30 3 0 - +R B 1966 1994 - O 1 3 0 - +R B 1982 o - Jul 25 1 1 S +R B 1983 o - Jul 12 1 1 S +R B 1984 1988 - May 1 1 1 S +R B 1989 o - May 6 1 1 S +R B 1990 1994 - May 1 1 1 S +R B 1995 2010 - Ap lastF 0s 1 S +R B 1995 2005 - S lastTh 24 0 - +R B 2006 o - S 21 24 0 - +R B 2007 o - S Th>=1 24 0 - +R B 2008 o - Au lastTh 24 0 - +R B 2009 o - Au 20 24 0 - +R B 2010 o - Au 10 24 0 - +R B 2010 o - S 9 24 1 S +R B 2010 o - S lastTh 24 0 - +R B 2014 o - May 15 24 1 S +R B 2014 o - Jun 26 24 0 - +R B 2014 o - Jul 31 24 1 S +R B 2014 o - S lastTh 24 0 - +Z Africa/Cairo 2:5:9 - LMT 1900 O +2 B EE%sT +R C 1920 1942 - S 1 0 0:20 - +R C 1920 1942 - D 31 0 0 - +Z Africa/Accra -0:0:52 - LMT 1918 +0 C GMT/+0020 +Z Africa/Bissau -1:2:20 - LMT 1912 Ja 1 1u +-1 - -01 1975 +0 - GMT +Z Africa/Nairobi 2:27:16 - LMT 1928 Jul +3 - EAT 1930 +2:30 - +0230 1940 +2:45 - +0245 1960 +3 - EAT +Li Africa/Nairobi Africa/Addis_Ababa +Li Africa/Nairobi Africa/Asmara +Li Africa/Nairobi Africa/Dar_es_Salaam +Li Africa/Nairobi Africa/Djibouti +Li Africa/Nairobi Africa/Kampala +Li Africa/Nairobi Africa/Mogadishu +Li Africa/Nairobi Indian/Antananarivo +Li Africa/Nairobi Indian/Comoro +Li Africa/Nairobi Indian/Mayotte +Z Africa/Monrovia -0:43:8 - LMT 1882 +-0:43:8 - MMT 1919 Mar +-0:44:30 - MMT 1972 Ja 7 +0 - GMT +R D 1951 o - O 14 2 1 S +R D 1952 o - Ja 1 0 0 - +R D 1953 o - O 9 2 1 S +R D 1954 o - Ja 1 0 0 - +R D 1955 o - S 30 0 1 S +R D 1956 o - Ja 1 0 0 - +R D 1982 1984 - Ap 1 0 1 S +R D 1982 1985 - O 1 0 0 - +R D 1985 o - Ap 6 0 1 S +R D 1986 o - Ap 4 0 1 S +R D 1986 o - O 3 0 0 - +R D 1987 1989 - Ap 1 0 1 S +R D 1987 1989 - O 1 0 0 - +R D 1997 o - Ap 4 0 1 S +R D 1997 o - O 4 0 0 - +R D 2013 o - Mar lastF 1 1 S +R D 2013 o - O lastF 2 0 - +Z Africa/Tripoli 0:52:44 - LMT 1920 +1 D CE%sT 1959 +2 - EET 1982 +1 D CE%sT 1990 May 4 +2 - EET 1996 S 30 +1 D CE%sT 1997 O 4 +2 - EET 2012 N 10 2 +1 D CE%sT 2013 O 25 2 +2 - EET +R E 1982 o - O 10 0 1 - +R E 1983 o - Mar 21 0 0 - +R E 2008 o - O lastSun 2 1 - +R E 2009 o - Mar lastSun 2 0 - +Z Indian/Mauritius 3:50 - LMT 1907 +4 E +04/+05 +R F 1939 o - S 12 0 1 S +R F 1939 o - N 19 0 0 - +R F 1940 o - F 25 0 1 S +R F 1945 o - N 18 0 0 - +R F 1950 o - Jun 11 0 1 S +R F 1950 o - O 29 0 0 - +R F 1967 o - Jun 3 12 1 S +R F 1967 o - O 1 0 0 - +R F 1974 o - Jun 24 0 1 S +R F 1974 o - S 1 0 0 - +R F 1976 1977 - May 1 0 1 S +R F 1976 o - Au 1 0 0 - +R F 1977 o - S 28 0 0 - +R F 1978 o - Jun 1 0 1 S +R F 1978 o - Au 4 0 0 - +R F 2008 o - Jun 1 0 1 S +R F 2008 o - S 1 0 0 - +R F 2009 o - Jun 1 0 1 S +R F 2009 o - Au 21 0 0 - +R F 2010 o - May 2 0 1 S +R F 2010 o - Au 8 0 0 - +R F 2011 o - Ap 3 0 1 S +R F 2011 o - Jul 31 0 0 - +R F 2012 2013 - Ap lastSun 2 1 S +R F 2012 o - Jul 20 3 0 - +R F 2012 o - Au 20 2 1 S +R F 2012 o - S 30 3 0 - +R F 2013 o - Jul 7 3 0 - +R F 2013 o - Au 10 2 1 S +R F 2013 ma - O lastSun 3 0 - +R F 2014 2021 - Mar lastSun 2 1 S +R F 2014 o - Jun 28 3 0 - +R F 2014 o - Au 2 2 1 S +R F 2015 o - Jun 14 3 0 - +R F 2015 o - Jul 19 2 1 S +R F 2016 o - Jun 5 3 0 - +R F 2016 o - Jul 10 2 1 S +R F 2017 o - May 21 3 0 - +R F 2017 o - Jul 2 2 1 S +R F 2018 o - May 13 3 0 - +R F 2018 o - Jun 17 2 1 S +R F 2019 o - May 5 3 0 - +R F 2019 o - Jun 9 2 1 S +R F 2020 o - Ap 19 3 0 - +R F 2020 o - May 24 2 1 S +R F 2021 o - Ap 11 3 0 - +R F 2021 o - May 16 2 1 S +R F 2022 o - May 8 2 1 S +R F 2023 o - Ap 23 2 1 S +R F 2024 o - Ap 14 2 1 S +R F 2025 o - Ap 6 2 1 S +R F 2026 ma - Mar lastSun 2 1 S +R F 2036 o - O 19 3 0 - +R F 2037 o - O 4 3 0 - +Z Africa/Casablanca -0:30:20 - LMT 1913 O 26 +0 F WE%sT 1984 Mar 16 +1 - CET 1986 +0 F WE%sT +Z Africa/El_Aaiun -0:52:48 - LMT 1934 +-1 - -01 1976 Ap 14 +0 F WE%sT +Z Africa/Maputo 2:10:20 - LMT 1903 Mar +2 - CAT +Li Africa/Maputo Africa/Blantyre +Li Africa/Maputo Africa/Bujumbura +Li Africa/Maputo Africa/Gaborone +Li Africa/Maputo Africa/Harare +Li Africa/Maputo Africa/Kigali +Li Africa/Maputo Africa/Lubumbashi +Li Africa/Maputo Africa/Lusaka +R G 1994 o - Mar 21 0 -1 WAT +R G 1994 2017 - S Sun>=1 2 0 CAT +R G 1995 2017 - Ap Sun>=1 2 -1 WAT +Z Africa/Windhoek 1:8:24 - LMT 1892 F 8 +1:30 - +0130 1903 Mar +2 - SAST 1942 S 20 2 +2 1 SAST 1943 Mar 21 2 +2 - SAST 1990 Mar 21 +2 G %s +Z Africa/Lagos 0:13:36 - LMT 1919 S +1 - WAT +Li Africa/Lagos Africa/Bangui +Li Africa/Lagos Africa/Brazzaville +Li Africa/Lagos Africa/Douala +Li Africa/Lagos Africa/Kinshasa +Li Africa/Lagos Africa/Libreville +Li Africa/Lagos Africa/Luanda +Li Africa/Lagos Africa/Malabo +Li Africa/Lagos Africa/Niamey +Li Africa/Lagos Africa/Porto-Novo +Z Indian/Reunion 3:41:52 - LMT 1911 Jun +4 - +04 +Z Africa/Sao_Tome 0:26:56 - LMT 1884 +-0:36:45 - LMT 1912 Ja 1 0u +0 - GMT 2018 Ja 1 1 +1 - WAT +Z Indian/Mahe 3:41:48 - LMT 1906 Jun +4 - +04 +R H 1942 1943 - S Sun>=15 2 1 - +R H 1943 1944 - Mar Sun>=15 2 0 - +Z Africa/Johannesburg 1:52 - LMT 1892 F 8 +1:30 - SAST 1903 Mar +2 H SAST +Li Africa/Johannesburg Africa/Maseru +Li Africa/Johannesburg Africa/Mbabane +R I 1970 o - May 1 0 1 S +R I 1970 1985 - O 15 0 0 - +R I 1971 o - Ap 30 0 1 S +R I 1972 1985 - Ap lastSun 0 1 S +Z Africa/Khartoum 2:10:8 - LMT 1931 +2 I CA%sT 2000 Ja 15 12 +3 - EAT 2017 N +2 - CAT +Z Africa/Juba 2:6:28 - LMT 1931 +2 I CA%sT 2000 Ja 15 12 +3 - EAT +R J 1939 o - Ap 15 23s 1 S +R J 1939 o - N 18 23s 0 - +R J 1940 o - F 25 23s 1 S +R J 1941 o - O 6 0 0 - +R J 1942 o - Mar 9 0 1 S +R J 1942 o - N 2 3 0 - +R J 1943 o - Mar 29 2 1 S +R J 1943 o - Ap 17 2 0 - +R J 1943 o - Ap 25 2 1 S +R J 1943 o - O 4 2 0 - +R J 1944 1945 - Ap M>=1 2 1 S +R J 1944 o - O 8 0 0 - +R J 1945 o - S 16 0 0 - +R J 1977 o - Ap 30 0s 1 S +R J 1977 o - S 24 0s 0 - +R J 1978 o - May 1 0s 1 S +R J 1978 o - O 1 0s 0 - +R J 1988 o - Jun 1 0s 1 S +R J 1988 1990 - S lastSun 0s 0 - +R J 1989 o - Mar 26 0s 1 S +R J 1990 o - May 1 0s 1 S +R J 2005 o - May 1 0s 1 S +R J 2005 o - S 30 1s 0 - +R J 2006 2008 - Mar lastSun 2s 1 S +R J 2006 2008 - O lastSun 2s 0 - +Z Africa/Tunis 0:40:44 - LMT 1881 May 12 +0:9:21 - PMT 1911 Mar 11 +1 J CE%sT +Z Antarctica/Casey 0 - -00 1969 +8 - +08 2009 O 18 2 +11 - +11 2010 Mar 5 2 +8 - +08 2011 O 28 2 +11 - +11 2012 F 21 17u +8 - +08 2016 O 22 +11 - +11 2018 Mar 11 4 +8 - +08 +Z Antarctica/Davis 0 - -00 1957 Ja 13 +7 - +07 1964 N +0 - -00 1969 F +7 - +07 2009 O 18 2 +5 - +05 2010 Mar 10 20u +7 - +07 2011 O 28 2 +5 - +05 2012 F 21 20u +7 - +07 +Z Antarctica/Mawson 0 - -00 1954 F 13 +6 - +06 2009 O 18 2 +5 - +05 +Z Indian/Kerguelen 0 - -00 1950 +5 - +05 +Z Antarctica/DumontDUrville 0 - -00 1947 +10 - +10 1952 Ja 14 +0 - -00 1956 N +10 - +10 +Z Antarctica/Syowa 0 - -00 1957 Ja 29 +3 - +03 +R K 2005 ma - Mar lastSun 1u 2 +02 +R K 2004 ma - O lastSun 1u 0 +00 +Z Antarctica/Troll 0 - -00 2005 F 12 +0 K %s +Z Antarctica/Vostok 0 - -00 1957 D 16 +6 - +06 +Z Antarctica/Rothera 0 - -00 1976 D +-3 - -03 +Z Asia/Kabul 4:36:48 - LMT 1890 +4 - +04 1945 +4:30 - +0430 +R L 2011 o - Mar lastSun 2s 1 - +R L 2011 o - O lastSun 2s 0 - +Z Asia/Yerevan 2:58 - LMT 1924 May 2 +3 - +03 1957 Mar +4 M +04/+05 1991 Mar 31 2s +3 M +03/+04 1995 S 24 2s +4 - +04 1997 +4 M +04/+05 2011 +4 L +04/+05 +R N 1997 2015 - Mar lastSun 4 1 - +R N 1997 2015 - O lastSun 5 0 - +Z Asia/Baku 3:19:24 - LMT 1924 May 2 +3 - +03 1957 Mar +4 M +04/+05 1991 Mar 31 2s +3 M +03/+04 1992 S lastSun 2s +4 - +04 1996 +4 O +04/+05 1997 +4 N +04/+05 +R P 2009 o - Jun 19 23 1 - +R P 2009 o - D 31 24 0 - +Z Asia/Dhaka 6:1:40 - LMT 1890 +5:53:20 - HMT 1941 O +6:30 - +0630 1942 May 15 +5:30 - +0530 1942 S +6:30 - +0630 1951 S 30 +6 - +06 2009 +6 P +06/+07 +Z Asia/Thimphu 5:58:36 - LMT 1947 Au 15 +5:30 - +0530 1987 O +6 - +06 +Z Indian/Chagos 4:49:40 - LMT 1907 +5 - +05 1996 +6 - +06 +Z Asia/Brunei 7:39:40 - LMT 1926 Mar +7:30 - +0730 1933 +8 - +08 +Z Asia/Yangon 6:24:47 - LMT 1880 +6:24:47 - RMT 1920 +6:30 - +0630 1942 May +9 - +09 1945 May 3 +6:30 - +0630 +R Q 1940 o - Jun 3 0 1 D +R Q 1940 1941 - O 1 0 0 S +R Q 1941 o - Mar 16 0 1 D +R R 1986 o - May 4 0 1 D +R R 1986 1991 - S Sun>=11 0 0 S +R R 1987 1991 - Ap Sun>=10 0 1 D +Z Asia/Shanghai 8:5:43 - LMT 1901 +8 Q C%sT 1949 +8 R C%sT +Z Asia/Urumqi 5:50:20 - LMT 1928 +6 - +06 +R S 1941 o - Ap 1 3:30 1 S +R S 1941 o - S 30 3:30 0 - +R S 1946 o - Ap 20 3:30 1 S +R S 1946 o - D 1 3:30 0 - +R S 1947 o - Ap 13 3:30 1 S +R S 1947 o - D 30 3:30 0 - +R S 1948 o - May 2 3:30 1 S +R S 1948 1951 - O lastSun 3:30 0 - +R S 1952 o - O 25 3:30 0 - +R S 1949 1953 - Ap Sun>=1 3:30 1 S +R S 1953 o - N 1 3:30 0 - +R S 1954 1964 - Mar Sun>=18 3:30 1 S +R S 1954 o - O 31 3:30 0 - +R S 1955 1964 - N Sun>=1 3:30 0 - +R S 1965 1976 - Ap Sun>=16 3:30 1 S +R S 1965 1976 - O Sun>=16 3:30 0 - +R S 1973 o - D 30 3:30 1 S +R S 1979 o - May Sun>=8 3:30 1 S +R S 1979 o - O Sun>=16 3:30 0 - +Z Asia/Hong_Kong 7:36:42 - LMT 1904 O 30 +8 S HK%sT 1941 D 25 +9 - JST 1945 S 15 +8 S HK%sT +R T 1946 o - May 15 0 1 D +R T 1946 o - O 1 0 0 S +R T 1947 o - Ap 15 0 1 D +R T 1947 o - N 1 0 0 S +R T 1948 1951 - May 1 0 1 D +R T 1948 1951 - O 1 0 0 S +R T 1952 o - Mar 1 0 1 D +R T 1952 1954 - N 1 0 0 S +R T 1953 1959 - Ap 1 0 1 D +R T 1955 1961 - O 1 0 0 S +R T 1960 1961 - Jun 1 0 1 D +R T 1974 1975 - Ap 1 0 1 D +R T 1974 1975 - O 1 0 0 S +R T 1979 o - Jul 1 0 1 D +R T 1979 o - O 1 0 0 S +Z Asia/Taipei 8:6 - LMT 1896 +8 - CST 1937 O +9 - JST 1945 S 21 1 +8 T C%sT +R U 1961 1962 - Mar Sun>=16 3:30 1 D +R U 1961 1964 - N Sun>=1 3:30 0 S +R U 1963 o - Mar Sun>=16 0 1 D +R U 1964 o - Mar Sun>=16 3:30 1 D +R U 1965 o - Mar Sun>=16 0 1 D +R U 1965 o - O 31 0 0 S +R U 1966 1971 - Ap Sun>=16 3:30 1 D +R U 1966 1971 - O Sun>=16 3:30 0 S +R U 1972 1974 - Ap Sun>=15 0 1 D +R U 1972 1973 - O Sun>=15 0 0 S +R U 1974 1977 - O Sun>=15 3:30 0 S +R U 1975 1977 - Ap Sun>=15 3:30 1 D +R U 1978 1980 - Ap Sun>=15 0 1 D +R U 1978 1980 - O Sun>=15 0 0 S +Z Asia/Macau 7:34:20 - LMT 1911 D 31 16u +8 U C%sT +R V 1975 o - Ap 13 0 1 S +R V 1975 o - O 12 0 0 - +R V 1976 o - May 15 0 1 S +R V 1976 o - O 11 0 0 - +R V 1977 1980 - Ap Sun>=1 0 1 S +R V 1977 o - S 25 0 0 - +R V 1978 o - O 2 0 0 - +R V 1979 1997 - S lastSun 0 0 - +R V 1981 1998 - Mar lastSun 0 1 S +Z Asia/Nicosia 2:13:28 - LMT 1921 N 14 +2 V EE%sT 1998 S +2 O EE%sT +Z Asia/Famagusta 2:15:48 - LMT 1921 N 14 +2 V EE%sT 1998 S +2 O EE%sT 2016 S 8 +3 - +03 2017 O 29 1u +2 O EE%sT +Li Asia/Nicosia Europe/Nicosia +Z Asia/Tbilisi 2:59:11 - LMT 1880 +2:59:11 - TBMT 1924 May 2 +3 - +03 1957 Mar +4 M +04/+05 1991 Mar 31 2s +3 M +03/+04 1992 +3 W +03/+04 1994 S lastSun +4 W +04/+05 1996 O lastSun +4 1 +05 1997 Mar lastSun +4 W +04/+05 2004 Jun 27 +3 M +03/+04 2005 Mar lastSun 2 +4 - +04 +Z Asia/Dili 8:22:20 - LMT 1912 +8 - +08 1942 F 21 23 +9 - +09 1976 May 3 +8 - +08 2000 S 17 +9 - +09 +Z Asia/Kolkata 5:53:28 - LMT 1854 Jun 28 +5:53:20 - HMT 1870 +5:21:10 - MMT 1906 +5:30 - IST 1941 O +5:30 1 +0630 1942 May 15 +5:30 - IST 1942 S +5:30 1 +0630 1945 O 15 +5:30 - IST +Z Asia/Jakarta 7:7:12 - LMT 1867 Au 10 +7:7:12 - BMT 1923 D 31 23:47:12 +7:20 - +0720 1932 N +7:30 - +0730 1942 Mar 23 +9 - +09 1945 S 23 +7:30 - +0730 1948 May +8 - +08 1950 May +7:30 - +0730 1964 +7 - WIB +Z Asia/Pontianak 7:17:20 - LMT 1908 May +7:17:20 - PMT 1932 N +7:30 - +0730 1942 Ja 29 +9 - +09 1945 S 23 +7:30 - +0730 1948 May +8 - +08 1950 May +7:30 - +0730 1964 +8 - WITA 1988 +7 - WIB +Z Asia/Makassar 7:57:36 - LMT 1920 +7:57:36 - MMT 1932 N +8 - +08 1942 F 9 +9 - +09 1945 S 23 +8 - WITA +Z Asia/Jayapura 9:22:48 - LMT 1932 N +9 - +09 1944 S +9:30 - +0930 1964 +9 - WIT +R X 1978 1980 - Mar 21 0 1 - +R X 1978 o - O 21 0 0 - +R X 1979 o - S 19 0 0 - +R X 1980 o - S 23 0 0 - +R X 1991 o - May 3 0 1 - +R X 1992 1995 - Mar 22 0 1 - +R X 1991 1995 - S 22 0 0 - +R X 1996 o - Mar 21 0 1 - +R X 1996 o - S 21 0 0 - +R X 1997 1999 - Mar 22 0 1 - +R X 1997 1999 - S 22 0 0 - +R X 2000 o - Mar 21 0 1 - +R X 2000 o - S 21 0 0 - +R X 2001 2003 - Mar 22 0 1 - +R X 2001 2003 - S 22 0 0 - +R X 2004 o - Mar 21 0 1 - +R X 2004 o - S 21 0 0 - +R X 2005 o - Mar 22 0 1 - +R X 2005 o - S 22 0 0 - +R X 2008 o - Mar 21 0 1 - +R X 2008 o - S 21 0 0 - +R X 2009 2011 - Mar 22 0 1 - +R X 2009 2011 - S 22 0 0 - +R X 2012 o - Mar 21 0 1 - +R X 2012 o - S 21 0 0 - +R X 2013 2015 - Mar 22 0 1 - +R X 2013 2015 - S 22 0 0 - +R X 2016 o - Mar 21 0 1 - +R X 2016 o - S 21 0 0 - +R X 2017 2019 - Mar 22 0 1 - +R X 2017 2019 - S 22 0 0 - +R X 2020 o - Mar 21 0 1 - +R X 2020 o - S 21 0 0 - +R X 2021 2023 - Mar 22 0 1 - +R X 2021 2023 - S 22 0 0 - +R X 2024 o - Mar 21 0 1 - +R X 2024 o - S 21 0 0 - +R X 2025 2027 - Mar 22 0 1 - +R X 2025 2027 - S 22 0 0 - +R X 2028 2029 - Mar 21 0 1 - +R X 2028 2029 - S 21 0 0 - +R X 2030 2031 - Mar 22 0 1 - +R X 2030 2031 - S 22 0 0 - +R X 2032 2033 - Mar 21 0 1 - +R X 2032 2033 - S 21 0 0 - +R X 2034 2035 - Mar 22 0 1 - +R X 2034 2035 - S 22 0 0 - +R X 2036 ma - Mar 21 0 1 - +R X 2036 ma - S 21 0 0 - +Z Asia/Tehran 3:25:44 - LMT 1916 +3:25:44 - TMT 1946 +3:30 - +0330 1977 N +4 X +04/+05 1979 +3:30 X +0330/+0430 +R Y 1982 o - May 1 0 1 - +R Y 1982 1984 - O 1 0 0 - +R Y 1983 o - Mar 31 0 1 - +R Y 1984 1985 - Ap 1 0 1 - +R Y 1985 1990 - S lastSun 1s 0 - +R Y 1986 1990 - Mar lastSun 1s 1 - +R Y 1991 2007 - Ap 1 3s 1 - +R Y 1991 2007 - O 1 3s 0 - +Z Asia/Baghdad 2:57:40 - LMT 1890 +2:57:36 - BMT 1918 +3 - +03 1982 May +3 Y +03/+04 +R Z 1940 o - Jun 1 0 1 D +R Z 1942 1944 - N 1 0 0 S +R Z 1943 o - Ap 1 2 1 D +R Z 1944 o - Ap 1 0 1 D +R Z 1945 o - Ap 16 0 1 D +R Z 1945 o - N 1 2 0 S +R Z 1946 o - Ap 16 2 1 D +R Z 1946 o - N 1 0 0 S +R Z 1948 o - May 23 0 2 DD +R Z 1948 o - S 1 0 1 D +R Z 1948 1949 - N 1 2 0 S +R Z 1949 o - May 1 0 1 D +R Z 1950 o - Ap 16 0 1 D +R Z 1950 o - S 15 3 0 S +R Z 1951 o - Ap 1 0 1 D +R Z 1951 o - N 11 3 0 S +R Z 1952 o - Ap 20 2 1 D +R Z 1952 o - O 19 3 0 S +R Z 1953 o - Ap 12 2 1 D +R Z 1953 o - S 13 3 0 S +R Z 1954 o - Jun 13 0 1 D +R Z 1954 o - S 12 0 0 S +R Z 1955 o - Jun 11 2 1 D +R Z 1955 o - S 11 0 0 S +R Z 1956 o - Jun 3 0 1 D +R Z 1956 o - S 30 3 0 S +R Z 1957 o - Ap 29 2 1 D +R Z 1957 o - S 22 0 0 S +R Z 1974 o - Jul 7 0 1 D +R Z 1974 o - O 13 0 0 S +R Z 1975 o - Ap 20 0 1 D +R Z 1975 o - Au 31 0 0 S +R Z 1985 o - Ap 14 0 1 D +R Z 1985 o - S 15 0 0 S +R Z 1986 o - May 18 0 1 D +R Z 1986 o - S 7 0 0 S +R Z 1987 o - Ap 15 0 1 D +R Z 1987 o - S 13 0 0 S +R Z 1988 o - Ap 10 0 1 D +R Z 1988 o - S 4 0 0 S +R Z 1989 o - Ap 30 0 1 D +R Z 1989 o - S 3 0 0 S +R Z 1990 o - Mar 25 0 1 D +R Z 1990 o - Au 26 0 0 S +R Z 1991 o - Mar 24 0 1 D +R Z 1991 o - S 1 0 0 S +R Z 1992 o - Mar 29 0 1 D +R Z 1992 o - S 6 0 0 S +R Z 1993 o - Ap 2 0 1 D +R Z 1993 o - S 5 0 0 S +R Z 1994 o - Ap 1 0 1 D +R Z 1994 o - Au 28 0 0 S +R Z 1995 o - Mar 31 0 1 D +R Z 1995 o - S 3 0 0 S +R Z 1996 o - Mar 15 0 1 D +R Z 1996 o - S 16 0 0 S +R Z 1997 o - Mar 21 0 1 D +R Z 1997 o - S 14 0 0 S +R Z 1998 o - Mar 20 0 1 D +R Z 1998 o - S 6 0 0 S +R Z 1999 o - Ap 2 2 1 D +R Z 1999 o - S 3 2 0 S +R Z 2000 o - Ap 14 2 1 D +R Z 2000 o - O 6 1 0 S +R Z 2001 o - Ap 9 1 1 D +R Z 2001 o - S 24 1 0 S +R Z 2002 o - Mar 29 1 1 D +R Z 2002 o - O 7 1 0 S +R Z 2003 o - Mar 28 1 1 D +R Z 2003 o - O 3 1 0 S +R Z 2004 o - Ap 7 1 1 D +R Z 2004 o - S 22 1 0 S +R Z 2005 o - Ap 1 2 1 D +R Z 2005 o - O 9 2 0 S +R Z 2006 2010 - Mar F>=26 2 1 D +R Z 2006 o - O 1 2 0 S +R Z 2007 o - S 16 2 0 S +R Z 2008 o - O 5 2 0 S +R Z 2009 o - S 27 2 0 S +R Z 2010 o - S 12 2 0 S +R Z 2011 o - Ap 1 2 1 D +R Z 2011 o - O 2 2 0 S +R Z 2012 o - Mar F>=26 2 1 D +R Z 2012 o - S 23 2 0 S +R Z 2013 ma - Mar F>=23 2 1 D +R Z 2013 ma - O lastSun 2 0 S +Z Asia/Jerusalem 2:20:54 - LMT 1880 +2:20:40 - JMT 1918 +2 Z I%sT +R a 1948 o - May Sat>=1 24 1 D +R a 1948 1951 - S Sun>=9 0 0 S +R a 1949 o - Ap Sat>=1 24 1 D +R a 1950 1951 - May Sat>=1 24 1 D +Z Asia/Tokyo 9:18:59 - LMT 1887 D 31 15u +9 a J%sT +R b 1973 o - Jun 6 0 1 S +R b 1973 1975 - O 1 0 0 - +R b 1974 1977 - May 1 0 1 S +R b 1976 o - N 1 0 0 - +R b 1977 o - O 1 0 0 - +R b 1978 o - Ap 30 0 1 S +R b 1978 o - S 30 0 0 - +R b 1985 o - Ap 1 0 1 S +R b 1985 o - O 1 0 0 - +R b 1986 1988 - Ap F>=1 0 1 S +R b 1986 1990 - O F>=1 0 0 - +R b 1989 o - May 8 0 1 S +R b 1990 o - Ap 27 0 1 S +R b 1991 o - Ap 17 0 1 S +R b 1991 o - S 27 0 0 - +R b 1992 o - Ap 10 0 1 S +R b 1992 1993 - O F>=1 0 0 - +R b 1993 1998 - Ap F>=1 0 1 S +R b 1994 o - S F>=15 0 0 - +R b 1995 1998 - S F>=15 0s 0 - +R b 1999 o - Jul 1 0s 1 S +R b 1999 2002 - S lastF 0s 0 - +R b 2000 2001 - Mar lastTh 0s 1 S +R b 2002 2012 - Mar lastTh 24 1 S +R b 2003 o - O 24 0s 0 - +R b 2004 o - O 15 0s 0 - +R b 2005 o - S lastF 0s 0 - +R b 2006 2011 - O lastF 0s 0 - +R b 2013 o - D 20 0 0 - +R b 2014 ma - Mar lastTh 24 1 S +R b 2014 ma - O lastF 0s 0 - +Z Asia/Amman 2:23:44 - LMT 1931 +2 b EE%sT +Z Asia/Almaty 5:7:48 - LMT 1924 May 2 +5 - +05 1930 Jun 21 +6 M +06/+07 1991 Mar 31 2s +5 M +05/+06 1992 Ja 19 2s +6 M +06/+07 2004 O 31 2s +6 - +06 +Z Asia/Qyzylorda 4:21:52 - LMT 1924 May 2 +4 - +04 1930 Jun 21 +5 - +05 1981 Ap +5 1 +06 1981 O +6 - +06 1982 Ap +5 M +05/+06 1991 Mar 31 2s +4 M +04/+05 1991 S 29 2s +5 M +05/+06 1992 Ja 19 2s +6 M +06/+07 1992 Mar 29 2s +5 M +05/+06 2004 O 31 2s +6 - +06 +Z Asia/Aqtobe 3:48:40 - LMT 1924 May 2 +4 - +04 1930 Jun 21 +5 - +05 1981 Ap +5 1 +06 1981 O +6 - +06 1982 Ap +5 M +05/+06 1991 Mar 31 2s +4 M +04/+05 1992 Ja 19 2s +5 M +05/+06 2004 O 31 2s +5 - +05 +Z Asia/Aqtau 3:21:4 - LMT 1924 May 2 +4 - +04 1930 Jun 21 +5 - +05 1981 O +6 - +06 1982 Ap +5 M +05/+06 1991 Mar 31 2s +4 M +04/+05 1992 Ja 19 2s +5 M +05/+06 1994 S 25 2s +4 M +04/+05 2004 O 31 2s +5 - +05 +Z Asia/Atyrau 3:27:44 - LMT 1924 May 2 +3 - +03 1930 Jun 21 +5 - +05 1981 O +6 - +06 1982 Ap +5 M +05/+06 1991 Mar 31 2s +4 M +04/+05 1992 Ja 19 2s +5 M +05/+06 1999 Mar 28 2s +4 M +04/+05 2004 O 31 2s +5 - +05 +Z Asia/Oral 3:25:24 - LMT 1924 May 2 +3 - +03 1930 Jun 21 +5 - +05 1981 Ap +5 1 +06 1981 O +6 - +06 1982 Ap +5 M +05/+06 1989 Mar 26 2s +4 M +04/+05 1992 Ja 19 2s +5 M +05/+06 1992 Mar 29 2s +4 M +04/+05 2004 O 31 2s +5 - +05 +R c 1992 1996 - Ap Sun>=7 0s 1 - +R c 1992 1996 - S lastSun 0 0 - +R c 1997 2005 - Mar lastSun 2:30 1 - +R c 1997 2004 - O lastSun 2:30 0 - +Z Asia/Bishkek 4:58:24 - LMT 1924 May 2 +5 - +05 1930 Jun 21 +6 M +06/+07 1991 Mar 31 2s +5 M +05/+06 1991 Au 31 2 +5 c +05/+06 2005 Au 12 +6 - +06 +R d 1948 o - Jun 1 0 1 D +R d 1948 o - S 13 0 0 S +R d 1949 o - Ap 3 0 1 D +R d 1949 1951 - S Sun>=8 0 0 S +R d 1950 o - Ap 1 0 1 D +R d 1951 o - May 6 0 1 D +R d 1955 o - May 5 0 1 D +R d 1955 o - S 9 0 0 S +R d 1956 o - May 20 0 1 D +R d 1956 o - S 30 0 0 S +R d 1957 1960 - May Sun>=1 0 1 D +R d 1957 1960 - S Sun>=18 0 0 S +R d 1987 1988 - May Sun>=8 2 1 D +R d 1987 1988 - O Sun>=8 3 0 S +Z Asia/Seoul 8:27:52 - LMT 1908 Ap +8:30 - KST 1912 +9 - JST 1945 S 8 +9 - KST 1954 Mar 21 +8:30 d K%sT 1961 Au 10 +9 d K%sT +Z Asia/Pyongyang 8:23 - LMT 1908 Ap +8:30 - KST 1912 +9 - JST 1945 Au 24 +9 - KST 2015 Au 15 +8:30 - KST 2018 May 5 +9 - KST +R e 1920 o - Mar 28 0 1 S +R e 1920 o - O 25 0 0 - +R e 1921 o - Ap 3 0 1 S +R e 1921 o - O 3 0 0 - +R e 1922 o - Mar 26 0 1 S +R e 1922 o - O 8 0 0 - +R e 1923 o - Ap 22 0 1 S +R e 1923 o - S 16 0 0 - +R e 1957 1961 - May 1 0 1 S +R e 1957 1961 - O 1 0 0 - +R e 1972 o - Jun 22 0 1 S +R e 1972 1977 - O 1 0 0 - +R e 1973 1977 - May 1 0 1 S +R e 1978 o - Ap 30 0 1 S +R e 1978 o - S 30 0 0 - +R e 1984 1987 - May 1 0 1 S +R e 1984 1991 - O 16 0 0 - +R e 1988 o - Jun 1 0 1 S +R e 1989 o - May 10 0 1 S +R e 1990 1992 - May 1 0 1 S +R e 1992 o - O 4 0 0 - +R e 1993 ma - Mar lastSun 0 1 S +R e 1993 1998 - S lastSun 0 0 - +R e 1999 ma - O lastSun 0 0 - +Z Asia/Beirut 2:22 - LMT 1880 +2 e EE%sT +R f 1935 1941 - S 14 0 0:20 - +R f 1935 1941 - D 14 0 0 - +Z Asia/Kuala_Lumpur 6:46:46 - LMT 1901 +6:55:25 - SMT 1905 Jun +7 - +07 1933 +7 0:20 +0720 1936 +7:20 - +0720 1941 S +7:30 - +0730 1942 F 16 +9 - +09 1945 S 12 +7:30 - +0730 1982 +8 - +08 +Z Asia/Kuching 7:21:20 - LMT 1926 Mar +7:30 - +0730 1933 +8 f +08/+0820 1942 F 16 +9 - +09 1945 S 12 +8 - +08 +Z Indian/Maldives 4:54 - LMT 1880 +4:54 - MMT 1960 +5 - +05 +R g 1983 1984 - Ap 1 0 1 - +R g 1983 o - O 1 0 0 - +R g 1985 1998 - Mar lastSun 0 1 - +R g 1984 1998 - S lastSun 0 0 - +R g 2001 o - Ap lastSat 2 1 - +R g 2001 2006 - S lastSat 2 0 - +R g 2002 2006 - Mar lastSat 2 1 - +R g 2015 2016 - Mar lastSat 2 1 - +R g 2015 2016 - S lastSat 0 0 - +Z Asia/Hovd 6:6:36 - LMT 1905 Au +6 - +06 1978 +7 g +07/+08 +Z Asia/Ulaanbaatar 7:7:32 - LMT 1905 Au +7 - +07 1978 +8 g +08/+09 +Z Asia/Choibalsan 7:38 - LMT 1905 Au +7 - +07 1978 +8 - +08 1983 Ap +9 g +09/+10 2008 Mar 31 +8 g +08/+09 +Z Asia/Kathmandu 5:41:16 - LMT 1920 +5:30 - +0530 1986 +5:45 - +0545 +R h 2002 o - Ap Sun>=2 0 1 S +R h 2002 o - O Sun>=2 0 0 - +R h 2008 o - Jun 1 0 1 S +R h 2008 2009 - N 1 0 0 - +R h 2009 o - Ap 15 0 1 S +Z Asia/Karachi 4:28:12 - LMT 1907 +5:30 - +0530 1942 S +5:30 1 +0630 1945 O 15 +5:30 - +0530 1951 S 30 +5 - +05 1971 Mar 26 +5 h PK%sT +R i 1999 2005 - Ap F>=15 0 1 S +R i 1999 2003 - O F>=15 0 0 - +R i 2004 o - O 1 1 0 - +R i 2005 o - O 4 2 0 - +R i 2006 2007 - Ap 1 0 1 S +R i 2006 o - S 22 0 0 - +R i 2007 o - S Th>=8 2 0 - +R i 2008 2009 - Mar lastF 0 1 S +R i 2008 o - S 1 0 0 - +R i 2009 o - S F>=1 1 0 - +R i 2010 o - Mar 26 0 1 S +R i 2010 o - Au 11 0 0 - +R i 2011 o - Ap 1 0:1 1 S +R i 2011 o - Au 1 0 0 - +R i 2011 o - Au 30 0 1 S +R i 2011 o - S 30 0 0 - +R i 2012 2014 - Mar lastTh 24 1 S +R i 2012 o - S 21 1 0 - +R i 2013 o - S F>=21 0 0 - +R i 2014 2015 - O F>=21 0 0 - +R i 2015 o - Mar lastF 24 1 S +R i 2016 ma - Mar Sat>=22 1 1 S +R i 2016 ma - O lastSat 1 0 - +Z Asia/Gaza 2:17:52 - LMT 1900 O +2 Z EET/EEST 1948 May 15 +2 B EE%sT 1967 Jun 5 +2 Z I%sT 1996 +2 b EE%sT 1999 +2 i EE%sT 2008 Au 29 +2 - EET 2008 S +2 i EE%sT 2010 +2 - EET 2010 Mar 27 0:1 +2 i EE%sT 2011 Au +2 - EET 2012 +2 i EE%sT +Z Asia/Hebron 2:20:23 - LMT 1900 O +2 Z EET/EEST 1948 May 15 +2 B EE%sT 1967 Jun 5 +2 Z I%sT 1996 +2 b EE%sT 1999 +2 i EE%sT +R j 1936 o - N 1 0 1 - +R j 1937 o - F 1 0 0 - +R j 1954 o - Ap 12 0 1 - +R j 1954 o - Jul 1 0 0 - +R j 1978 o - Mar 22 0 1 - +R j 1978 o - S 21 0 0 - +Z Asia/Manila -15:56 - LMT 1844 D 31 +8:4 - LMT 1899 May 11 +8 j +08/+09 1942 May +9 - +09 1944 N +8 j +08/+09 +Z Asia/Qatar 3:26:8 - LMT 1920 +4 - +04 1972 Jun +3 - +03 +Li Asia/Qatar Asia/Bahrain +Z Asia/Riyadh 3:6:52 - LMT 1947 Mar 14 +3 - +03 +Li Asia/Riyadh Asia/Aden +Li Asia/Riyadh Asia/Kuwait +Z Asia/Singapore 6:55:25 - LMT 1901 +6:55:25 - SMT 1905 Jun +7 - +07 1933 +7 0:20 +0720 1936 +7:20 - +0720 1941 S +7:30 - +0730 1942 F 16 +9 - +09 1945 S 12 +7:30 - +0730 1982 +8 - +08 +Z Asia/Colombo 5:19:24 - LMT 1880 +5:19:32 - MMT 1906 +5:30 - +0530 1942 Ja 5 +5:30 0:30 +06 1942 S +5:30 1 +0630 1945 O 16 2 +5:30 - +0530 1996 May 25 +6:30 - +0630 1996 O 26 0:30 +6 - +06 2006 Ap 15 0:30 +5:30 - +0530 +R k 1920 1923 - Ap Sun>=15 2 1 S +R k 1920 1923 - O Sun>=1 2 0 - +R k 1962 o - Ap 29 2 1 S +R k 1962 o - O 1 2 0 - +R k 1963 1965 - May 1 2 1 S +R k 1963 o - S 30 2 0 - +R k 1964 o - O 1 2 0 - +R k 1965 o - S 30 2 0 - +R k 1966 o - Ap 24 2 1 S +R k 1966 1976 - O 1 2 0 - +R k 1967 1978 - May 1 2 1 S +R k 1977 1978 - S 1 2 0 - +R k 1983 1984 - Ap 9 2 1 S +R k 1983 1984 - O 1 2 0 - +R k 1986 o - F 16 2 1 S +R k 1986 o - O 9 2 0 - +R k 1987 o - Mar 1 2 1 S +R k 1987 1988 - O 31 2 0 - +R k 1988 o - Mar 15 2 1 S +R k 1989 o - Mar 31 2 1 S +R k 1989 o - O 1 2 0 - +R k 1990 o - Ap 1 2 1 S +R k 1990 o - S 30 2 0 - +R k 1991 o - Ap 1 0 1 S +R k 1991 1992 - O 1 0 0 - +R k 1992 o - Ap 8 0 1 S +R k 1993 o - Mar 26 0 1 S +R k 1993 o - S 25 0 0 - +R k 1994 1996 - Ap 1 0 1 S +R k 1994 2005 - O 1 0 0 - +R k 1997 1998 - Mar lastM 0 1 S +R k 1999 2006 - Ap 1 0 1 S +R k 2006 o - S 22 0 0 - +R k 2007 o - Mar lastF 0 1 S +R k 2007 o - N F>=1 0 0 - +R k 2008 o - Ap F>=1 0 1 S +R k 2008 o - N 1 0 0 - +R k 2009 o - Mar lastF 0 1 S +R k 2010 2011 - Ap F>=1 0 1 S +R k 2012 ma - Mar lastF 0 1 S +R k 2009 ma - O lastF 0 0 - +Z Asia/Damascus 2:25:12 - LMT 1920 +2 k EE%sT +Z Asia/Dushanbe 4:35:12 - LMT 1924 May 2 +5 - +05 1930 Jun 21 +6 M +06/+07 1991 Mar 31 2s +5 1 +05/+06 1991 S 9 2s +5 - +05 +Z Asia/Bangkok 6:42:4 - LMT 1880 +6:42:4 - BMT 1920 Ap +7 - +07 +Li Asia/Bangkok Asia/Phnom_Penh +Li Asia/Bangkok Asia/Vientiane +Z Asia/Ashgabat 3:53:32 - LMT 1924 May 2 +4 - +04 1930 Jun 21 +5 M +05/+06 1991 Mar 31 2 +4 M +04/+05 1992 Ja 19 2 +5 - +05 +Z Asia/Dubai 3:41:12 - LMT 1920 +4 - +04 +Li Asia/Dubai Asia/Muscat +Z Asia/Samarkand 4:27:53 - LMT 1924 May 2 +4 - +04 1930 Jun 21 +5 - +05 1981 Ap +5 1 +06 1981 O +6 - +06 1982 Ap +5 M +05/+06 1992 +5 - +05 +Z Asia/Tashkent 4:37:11 - LMT 1924 May 2 +5 - +05 1930 Jun 21 +6 M +06/+07 1991 Mar 31 2 +5 M +05/+06 1992 +5 - +05 +Z Asia/Ho_Chi_Minh 7:6:40 - LMT 1906 Jul +7:6:30 - PLMT 1911 May +7 - +07 1942 D 31 23 +8 - +08 1945 Mar 14 23 +9 - +09 1945 S 2 +7 - +07 1947 Ap +8 - +08 1955 Jul +7 - +07 1959 D 31 23 +8 - +08 1975 Jun 13 +7 - +07 +R l 1917 o - Ja 1 0:1 1 D +R l 1917 o - Mar 25 2 0 S +R l 1942 o - Ja 1 2 1 D +R l 1942 o - Mar 29 2 0 S +R l 1942 o - S 27 2 1 D +R l 1943 1944 - Mar lastSun 2 0 S +R l 1943 o - O 3 2 1 D +Z Australia/Darwin 8:43:20 - LMT 1895 F +9 - ACST 1899 May +9:30 l AC%sT +R m 1974 o - O lastSun 2s 1 D +R m 1975 o - Mar Sun>=1 2s 0 S +R m 1983 o - O lastSun 2s 1 D +R m 1984 o - Mar Sun>=1 2s 0 S +R m 1991 o - N 17 2s 1 D +R m 1992 o - Mar Sun>=1 2s 0 S +R m 2006 o - D 3 2s 1 D +R m 2007 2009 - Mar lastSun 2s 0 S +R m 2007 2008 - O lastSun 2s 1 D +Z Australia/Perth 7:43:24 - LMT 1895 D +8 l AW%sT 1943 Jul +8 m AW%sT +Z Australia/Eucla 8:35:28 - LMT 1895 D +8:45 l +0845/+0945 1943 Jul +8:45 m +0845/+0945 +R n 1971 o - O lastSun 2s 1 D +R n 1972 o - F lastSun 2s 0 S +R n 1989 1991 - O lastSun 2s 1 D +R n 1990 1992 - Mar Sun>=1 2s 0 S +R o 1992 1993 - O lastSun 2s 1 D +R o 1993 1994 - Mar Sun>=1 2s 0 S +Z Australia/Brisbane 10:12:8 - LMT 1895 +10 l AE%sT 1971 +10 n AE%sT +Z Australia/Lindeman 9:55:56 - LMT 1895 +10 l AE%sT 1971 +10 n AE%sT 1992 Jul +10 o AE%sT +R p 1971 1985 - O lastSun 2s 1 D +R p 1986 o - O 19 2s 1 D +R p 1987 2007 - O lastSun 2s 1 D +R p 1972 o - F 27 2s 0 S +R p 1973 1985 - Mar Sun>=1 2s 0 S +R p 1986 1990 - Mar Sun>=15 2s 0 S +R p 1991 o - Mar 3 2s 0 S +R p 1992 o - Mar 22 2s 0 S +R p 1993 o - Mar 7 2s 0 S +R p 1994 o - Mar 20 2s 0 S +R p 1995 2005 - Mar lastSun 2s 0 S +R p 2006 o - Ap 2 2s 0 S +R p 2007 o - Mar lastSun 2s 0 S +R p 2008 ma - Ap Sun>=1 2s 0 S +R p 2008 ma - O Sun>=1 2s 1 D +Z Australia/Adelaide 9:14:20 - LMT 1895 F +9 - ACST 1899 May +9:30 l AC%sT 1971 +9:30 p AC%sT +R q 1967 o - O Sun>=1 2s 1 D +R q 1968 o - Mar lastSun 2s 0 S +R q 1968 1985 - O lastSun 2s 1 D +R q 1969 1971 - Mar Sun>=8 2s 0 S +R q 1972 o - F lastSun 2s 0 S +R q 1973 1981 - Mar Sun>=1 2s 0 S +R q 1982 1983 - Mar lastSun 2s 0 S +R q 1984 1986 - Mar Sun>=1 2s 0 S +R q 1986 o - O Sun>=15 2s 1 D +R q 1987 1990 - Mar Sun>=15 2s 0 S +R q 1987 o - O Sun>=22 2s 1 D +R q 1988 1990 - O lastSun 2s 1 D +R q 1991 1999 - O Sun>=1 2s 1 D +R q 1991 2005 - Mar lastSun 2s 0 S +R q 2000 o - Au lastSun 2s 1 D +R q 2001 ma - O Sun>=1 2s 1 D +R q 2006 o - Ap Sun>=1 2s 0 S +R q 2007 o - Mar lastSun 2s 0 S +R q 2008 ma - Ap Sun>=1 2s 0 S +Z Australia/Hobart 9:49:16 - LMT 1895 S +10 - AEST 1916 O 1 2 +10 1 AEDT 1917 F +10 l AE%sT 1967 +10 q AE%sT +Z Australia/Currie 9:35:28 - LMT 1895 S +10 - AEST 1916 O 1 2 +10 1 AEDT 1917 F +10 l AE%sT 1971 Jul +10 q AE%sT +R r 1971 1985 - O lastSun 2s 1 D +R r 1972 o - F lastSun 2s 0 S +R r 1973 1985 - Mar Sun>=1 2s 0 S +R r 1986 1990 - Mar Sun>=15 2s 0 S +R r 1986 1987 - O Sun>=15 2s 1 D +R r 1988 1999 - O lastSun 2s 1 D +R r 1991 1994 - Mar Sun>=1 2s 0 S +R r 1995 2005 - Mar lastSun 2s 0 S +R r 2000 o - Au lastSun 2s 1 D +R r 2001 2007 - O lastSun 2s 1 D +R r 2006 o - Ap Sun>=1 2s 0 S +R r 2007 o - Mar lastSun 2s 0 S +R r 2008 ma - Ap Sun>=1 2s 0 S +R r 2008 ma - O Sun>=1 2s 1 D +Z Australia/Melbourne 9:39:52 - LMT 1895 F +10 l AE%sT 1971 +10 r AE%sT +R s 1971 1985 - O lastSun 2s 1 D +R s 1972 o - F 27 2s 0 S +R s 1973 1981 - Mar Sun>=1 2s 0 S +R s 1982 o - Ap Sun>=1 2s 0 S +R s 1983 1985 - Mar Sun>=1 2s 0 S +R s 1986 1989 - Mar Sun>=15 2s 0 S +R s 1986 o - O 19 2s 1 D +R s 1987 1999 - O lastSun 2s 1 D +R s 1990 1995 - Mar Sun>=1 2s 0 S +R s 1996 2005 - Mar lastSun 2s 0 S +R s 2000 o - Au lastSun 2s 1 D +R s 2001 2007 - O lastSun 2s 1 D +R s 2006 o - Ap Sun>=1 2s 0 S +R s 2007 o - Mar lastSun 2s 0 S +R s 2008 ma - Ap Sun>=1 2s 0 S +R s 2008 ma - O Sun>=1 2s 1 D +Z Australia/Sydney 10:4:52 - LMT 1895 F +10 l AE%sT 1971 +10 s AE%sT +Z Australia/Broken_Hill 9:25:48 - LMT 1895 F +10 - AEST 1896 Au 23 +9 - ACST 1899 May +9:30 l AC%sT 1971 +9:30 s AC%sT 2000 +9:30 p AC%sT +R t 1981 1984 - O lastSun 2 1 - +R t 1982 1985 - Mar Sun>=1 2 0 - +R t 1985 o - O lastSun 2 0:30 - +R t 1986 1989 - Mar Sun>=15 2 0 - +R t 1986 o - O 19 2 0:30 - +R t 1987 1999 - O lastSun 2 0:30 - +R t 1990 1995 - Mar Sun>=1 2 0 - +R t 1996 2005 - Mar lastSun 2 0 - +R t 2000 o - Au lastSun 2 0:30 - +R t 2001 2007 - O lastSun 2 0:30 - +R t 2006 o - Ap Sun>=1 2 0 - +R t 2007 o - Mar lastSun 2 0 - +R t 2008 ma - Ap Sun>=1 2 0 - +R t 2008 ma - O Sun>=1 2 0:30 - +Z Australia/Lord_Howe 10:36:20 - LMT 1895 F +10 - AEST 1981 Mar +10:30 t +1030/+1130 1985 Jul +10:30 t +1030/+11 +Z Antarctica/Macquarie 0 - -00 1899 N +10 - AEST 1916 O 1 2 +10 1 AEDT 1917 F +10 l AE%sT 1919 Ap 1 0s +0 - -00 1948 Mar 25 +10 l AE%sT 1967 +10 q AE%sT 2010 Ap 4 3 +11 - +11 +Z Indian/Christmas 7:2:52 - LMT 1895 F +7 - +07 +Z Indian/Cocos 6:27:40 - LMT 1900 +6:30 - +0630 +R u 1998 1999 - N Sun>=1 2 1 - +R u 1999 2000 - F lastSun 3 0 - +R u 2009 o - N 29 2 1 - +R u 2010 o - Mar lastSun 3 0 - +R u 2010 2013 - O Sun>=21 2 1 - +R u 2011 o - Mar Sun>=1 3 0 - +R u 2012 2013 - Ja Sun>=18 3 0 - +R u 2014 o - Ja Sun>=18 2 0 - +R u 2014 ma - N Sun>=1 2 1 - +R u 2015 ma - Ja Sun>=14 3 0 - +Z Pacific/Fiji 11:55:44 - LMT 1915 O 26 +12 u +12/+13 +Z Pacific/Gambier -8:59:48 - LMT 1912 O +-9 - -09 +Z Pacific/Marquesas -9:18 - LMT 1912 O +-9:30 - -0930 +Z Pacific/Tahiti -9:58:16 - LMT 1912 O +-10 - -10 +Z Pacific/Guam -14:21 - LMT 1844 D 31 +9:39 - LMT 1901 +10 - GST 2000 D 23 +10 - ChST +Li Pacific/Guam Pacific/Saipan +Z Pacific/Tarawa 11:32:4 - LMT 1901 +12 - +12 +Z Pacific/Enderbury -11:24:20 - LMT 1901 +-12 - -12 1979 O +-11 - -11 1994 D 31 +13 - +13 +Z Pacific/Kiritimati -10:29:20 - LMT 1901 +-10:40 - -1040 1979 O +-10 - -10 1994 D 31 +14 - +14 +Z Pacific/Majuro 11:24:48 - LMT 1901 +11 - +11 1969 O +12 - +12 +Z Pacific/Kwajalein 11:9:20 - LMT 1901 +11 - +11 1969 O +-12 - -12 1993 Au 20 +12 - +12 +Z Pacific/Chuuk 10:7:8 - LMT 1901 +10 - +10 +Z Pacific/Pohnpei 10:32:52 - LMT 1901 +11 - +11 +Z Pacific/Kosrae 10:51:56 - LMT 1901 +11 - +11 1969 O +12 - +12 1999 +11 - +11 +Z Pacific/Nauru 11:7:40 - LMT 1921 Ja 15 +11:30 - +1130 1942 Mar 15 +9 - +09 1944 Au 15 +11:30 - +1130 1979 May +12 - +12 +R v 1977 1978 - D Sun>=1 0 1 - +R v 1978 1979 - F 27 0 0 - +R v 1996 o - D 1 2s 1 - +R v 1997 o - Mar 2 2s 0 - +Z Pacific/Noumea 11:5:48 - LMT 1912 Ja 13 +11 v +11/+12 +R w 1927 o - N 6 2 1 S +R w 1928 o - Mar 4 2 0 M +R w 1928 1933 - O Sun>=8 2 0:30 S +R w 1929 1933 - Mar Sun>=15 2 0 M +R w 1934 1940 - Ap lastSun 2 0 M +R w 1934 1940 - S lastSun 2 0:30 S +R w 1946 o - Ja 1 0 0 S +R w 1974 o - N Sun>=1 2s 1 D +R x 1974 o - N Sun>=1 2:45s 1 - +R w 1975 o - F lastSun 2s 0 S +R x 1975 o - F lastSun 2:45s 0 - +R w 1975 1988 - O lastSun 2s 1 D +R x 1975 1988 - O lastSun 2:45s 1 - +R w 1976 1989 - Mar Sun>=1 2s 0 S +R x 1976 1989 - Mar Sun>=1 2:45s 0 - +R w 1989 o - O Sun>=8 2s 1 D +R x 1989 o - O Sun>=8 2:45s 1 - +R w 1990 2006 - O Sun>=1 2s 1 D +R x 1990 2006 - O Sun>=1 2:45s 1 - +R w 1990 2007 - Mar Sun>=15 2s 0 S +R x 1990 2007 - Mar Sun>=15 2:45s 0 - +R w 2007 ma - S lastSun 2s 1 D +R x 2007 ma - S lastSun 2:45s 1 - +R w 2008 ma - Ap Sun>=1 2s 0 S +R x 2008 ma - Ap Sun>=1 2:45s 0 - +Z Pacific/Auckland 11:39:4 - LMT 1868 N 2 +11:30 w NZ%sT 1946 +12 w NZ%sT +Z Pacific/Chatham 12:13:48 - LMT 1868 N 2 +12:15 - +1215 1946 +12:45 x +1245/+1345 +Li Pacific/Auckland Antarctica/McMurdo +R y 1978 o - N 12 0 0:30 - +R y 1979 1991 - Mar Sun>=1 0 0 - +R y 1979 1990 - O lastSun 0 0:30 - +Z Pacific/Rarotonga -10:39:4 - LMT 1901 +-10:30 - -1030 1978 N 12 +-10 y -10/-0930 +Z Pacific/Niue -11:19:40 - LMT 1901 +-11:20 - -1120 1951 +-11:30 - -1130 1978 O +-11 - -11 +Z Pacific/Norfolk 11:11:52 - LMT 1901 +11:12 - +1112 1951 +11:30 - +1130 1974 O 27 2 +11:30 1 +1230 1975 Mar 2 2 +11:30 - +1130 2015 O 4 2 +11 - +11 +Z Pacific/Palau 8:57:56 - LMT 1901 +9 - +09 +Z Pacific/Port_Moresby 9:48:40 - LMT 1880 +9:48:32 - PMMT 1895 +10 - +10 +Z Pacific/Bougainville 10:22:16 - LMT 1880 +9:48:32 - PMMT 1895 +10 - +10 1942 Jul +9 - +09 1945 Au 21 +10 - +10 2014 D 28 2 +11 - +11 +Z Pacific/Pitcairn -8:40:20 - LMT 1901 +-8:30 - -0830 1998 Ap 27 +-8 - -08 +Z Pacific/Pago_Pago 12:37:12 - LMT 1892 Jul 5 +-11:22:48 - LMT 1911 +-11 - SST +Li Pacific/Pago_Pago Pacific/Midway +R z 2010 o - S lastSun 0 1 - +R z 2011 o - Ap Sat>=1 4 0 - +R z 2011 o - S lastSat 3 1 - +R z 2012 ma - Ap Sun>=1 4 0 - +R z 2012 ma - S lastSun 3 1 - +Z Pacific/Apia 12:33:4 - LMT 1892 Jul 5 +-11:26:56 - LMT 1911 +-11:30 - -1130 1950 +-11 z -11/-10 2011 D 29 24 +13 z +13/+14 +Z Pacific/Guadalcanal 10:39:48 - LMT 1912 O +11 - +11 +Z Pacific/Fakaofo -11:24:56 - LMT 1901 +-11 - -11 2011 D 30 +13 - +13 +R ! 1999 o - O 7 2s 1 - +R ! 2000 o - Mar 19 2s 0 - +R ! 2000 2001 - N Sun>=1 2 1 - +R ! 2001 2002 - Ja lastSun 2 0 - +R ! 2016 o - N Sun>=1 2 1 - +R ! 2017 o - Ja Sun>=15 3 0 - +Z Pacific/Tongatapu 12:19:20 - LMT 1901 +12:20 - +1220 1941 +13 - +13 1999 +13 ! +13/+14 +Z Pacific/Funafuti 11:56:52 - LMT 1901 +12 - +12 +Z Pacific/Wake 11:6:28 - LMT 1901 +12 - +12 +R $ 1983 o - S 25 0 1 - +R $ 1984 1991 - Mar Sun>=23 0 0 - +R $ 1984 o - O 23 0 1 - +R $ 1985 1991 - S Sun>=23 0 1 - +R $ 1992 1993 - Ja Sun>=23 0 0 - +R $ 1992 o - O Sun>=23 0 1 - +Z Pacific/Efate 11:13:16 - LMT 1912 Ja 13 +11 $ +11/+12 +Z Pacific/Wallis 12:15:20 - LMT 1901 +12 - +12 +R % 1916 o - May 21 2s 1 BST +R % 1916 o - O 1 2s 0 GMT +R % 1917 o - Ap 8 2s 1 BST +R % 1917 o - S 17 2s 0 GMT +R % 1918 o - Mar 24 2s 1 BST +R % 1918 o - S 30 2s 0 GMT +R % 1919 o - Mar 30 2s 1 BST +R % 1919 o - S 29 2s 0 GMT +R % 1920 o - Mar 28 2s 1 BST +R % 1920 o - O 25 2s 0 GMT +R % 1921 o - Ap 3 2s 1 BST +R % 1921 o - O 3 2s 0 GMT +R % 1922 o - Mar 26 2s 1 BST +R % 1922 o - O 8 2s 0 GMT +R % 1923 o - Ap Sun>=16 2s 1 BST +R % 1923 1924 - S Sun>=16 2s 0 GMT +R % 1924 o - Ap Sun>=9 2s 1 BST +R % 1925 1926 - Ap Sun>=16 2s 1 BST +R % 1925 1938 - O Sun>=2 2s 0 GMT +R % 1927 o - Ap Sun>=9 2s 1 BST +R % 1928 1929 - Ap Sun>=16 2s 1 BST +R % 1930 o - Ap Sun>=9 2s 1 BST +R % 1931 1932 - Ap Sun>=16 2s 1 BST +R % 1933 o - Ap Sun>=9 2s 1 BST +R % 1934 o - Ap Sun>=16 2s 1 BST +R % 1935 o - Ap Sun>=9 2s 1 BST +R % 1936 1937 - Ap Sun>=16 2s 1 BST +R % 1938 o - Ap Sun>=9 2s 1 BST +R % 1939 o - Ap Sun>=16 2s 1 BST +R % 1939 o - N Sun>=16 2s 0 GMT +R % 1940 o - F Sun>=23 2s 1 BST +R % 1941 o - May Sun>=2 1s 2 BDST +R % 1941 1943 - Au Sun>=9 1s 1 BST +R % 1942 1944 - Ap Sun>=2 1s 2 BDST +R % 1944 o - S Sun>=16 1s 1 BST +R % 1945 o - Ap M>=2 1s 2 BDST +R % 1945 o - Jul Sun>=9 1s 1 BST +R % 1945 1946 - O Sun>=2 2s 0 GMT +R % 1946 o - Ap Sun>=9 2s 1 BST +R % 1947 o - Mar 16 2s 1 BST +R % 1947 o - Ap 13 1s 2 BDST +R % 1947 o - Au 10 1s 1 BST +R % 1947 o - N 2 2s 0 GMT +R % 1948 o - Mar 14 2s 1 BST +R % 1948 o - O 31 2s 0 GMT +R % 1949 o - Ap 3 2s 1 BST +R % 1949 o - O 30 2s 0 GMT +R % 1950 1952 - Ap Sun>=14 2s 1 BST +R % 1950 1952 - O Sun>=21 2s 0 GMT +R % 1953 o - Ap Sun>=16 2s 1 BST +R % 1953 1960 - O Sun>=2 2s 0 GMT +R % 1954 o - Ap Sun>=9 2s 1 BST +R % 1955 1956 - Ap Sun>=16 2s 1 BST +R % 1957 o - Ap Sun>=9 2s 1 BST +R % 1958 1959 - Ap Sun>=16 2s 1 BST +R % 1960 o - Ap Sun>=9 2s 1 BST +R % 1961 1963 - Mar lastSun 2s 1 BST +R % 1961 1968 - O Sun>=23 2s 0 GMT +R % 1964 1967 - Mar Sun>=19 2s 1 BST +R % 1968 o - F 18 2s 1 BST +R % 1972 1980 - Mar Sun>=16 2s 1 BST +R % 1972 1980 - O Sun>=23 2s 0 GMT +R % 1981 1995 - Mar lastSun 1u 1 BST +R % 1981 1989 - O Sun>=23 1u 0 GMT +R % 1990 1995 - O Sun>=22 1u 0 GMT +Z Europe/London -0:1:15 - LMT 1847 D 1 0s +0 % %s 1968 O 27 +1 - BST 1971 O 31 2u +0 % %s 1996 +0 O GMT/BST +Li Europe/London Europe/Jersey +Li Europe/London Europe/Guernsey +Li Europe/London Europe/Isle_of_Man +R & 1971 o - O 31 2u -1 - +R & 1972 1980 - Mar Sun>=16 2u 0 - +R & 1972 1980 - O Sun>=23 2u -1 - +R & 1981 ma - Mar lastSun 1u 0 - +R & 1981 1989 - O Sun>=23 1u -1 - +R & 1990 1995 - O Sun>=22 1u -1 - +R & 1996 ma - O lastSun 1u -1 - +Z Europe/Dublin -0:25 - LMT 1880 Au 2 +-0:25:21 - DMT 1916 May 21 2s +-0:25:21 1 IST 1916 O 1 2s +0 % %s 1921 D 6 +0 % GMT/IST 1940 F 25 2s +0 1 IST 1946 O 6 2s +0 - GMT 1947 Mar 16 2s +0 1 IST 1947 N 2 2s +0 - GMT 1948 Ap 18 2s +0 % GMT/IST 1968 O 27 +1 & IST/GMT +R O 1977 1980 - Ap Sun>=1 1u 1 S +R O 1977 o - S lastSun 1u 0 - +R O 1978 o - O 1 1u 0 - +R O 1979 1995 - S lastSun 1u 0 - +R O 1981 ma - Mar lastSun 1u 1 S +R O 1996 ma - O lastSun 1u 0 - +R ' 1977 1980 - Ap Sun>=1 1s 1 S +R ' 1977 o - S lastSun 1s 0 - +R ' 1978 o - O 1 1s 0 - +R ' 1979 1995 - S lastSun 1s 0 - +R ' 1981 ma - Mar lastSun 1s 1 S +R ' 1996 ma - O lastSun 1s 0 - +R ( 1916 o - Ap 30 23 1 S +R ( 1916 o - O 1 1 0 - +R ( 1917 1918 - Ap M>=15 2s 1 S +R ( 1917 1918 - S M>=15 2s 0 - +R ( 1940 o - Ap 1 2s 1 S +R ( 1942 o - N 2 2s 0 - +R ( 1943 o - Mar 29 2s 1 S +R ( 1943 o - O 4 2s 0 - +R ( 1944 1945 - Ap M>=1 2s 1 S +R ( 1944 o - O 2 2s 0 - +R ( 1945 o - S 16 2s 0 - +R ( 1977 1980 - Ap Sun>=1 2s 1 S +R ( 1977 o - S lastSun 2s 0 - +R ( 1978 o - O 1 2s 0 - +R ( 1979 1995 - S lastSun 2s 0 - +R ( 1981 ma - Mar lastSun 2s 1 S +R ( 1996 ma - O lastSun 2s 0 - +R W 1977 1980 - Ap Sun>=1 0 1 S +R W 1977 o - S lastSun 0 0 - +R W 1978 o - O 1 0 0 - +R W 1979 1995 - S lastSun 0 0 - +R W 1981 ma - Mar lastSun 0 1 S +R W 1996 ma - O lastSun 0 0 - +R M 1917 o - Jul 1 23 1 MST +R M 1917 o - D 28 0 0 MMT +R M 1918 o - May 31 22 2 MDST +R M 1918 o - S 16 1 1 MST +R M 1919 o - May 31 23 2 MDST +R M 1919 o - Jul 1 0u 1 MSD +R M 1919 o - Au 16 0 0 MSK +R M 1921 o - F 14 23 1 MSD +R M 1921 o - Mar 20 23 2 +05 +R M 1921 o - S 1 0 1 MSD +R M 1921 o - O 1 0 0 - +R M 1981 1984 - Ap 1 0 1 S +R M 1981 1983 - O 1 0 0 - +R M 1984 1995 - S lastSun 2s 0 - +R M 1985 2010 - Mar lastSun 2s 1 S +R M 1996 2010 - O lastSun 2s 0 - +Z WET 0 O WE%sT +Z CET 1 ( CE%sT +Z MET 1 ( ME%sT +Z EET 2 O EE%sT +R ) 1940 o - Jun 16 0 1 S +R ) 1942 o - N 2 3 0 - +R ) 1943 o - Mar 29 2 1 S +R ) 1943 o - Ap 10 3 0 - +R ) 1974 o - May 4 0 1 S +R ) 1974 o - O 2 0 0 - +R ) 1975 o - May 1 0 1 S +R ) 1975 o - O 2 0 0 - +R ) 1976 o - May 2 0 1 S +R ) 1976 o - O 3 0 0 - +R ) 1977 o - May 8 0 1 S +R ) 1977 o - O 2 0 0 - +R ) 1978 o - May 6 0 1 S +R ) 1978 o - O 1 0 0 - +R ) 1979 o - May 5 0 1 S +R ) 1979 o - S 30 0 0 - +R ) 1980 o - May 3 0 1 S +R ) 1980 o - O 4 0 0 - +R ) 1981 o - Ap 26 0 1 S +R ) 1981 o - S 27 0 0 - +R ) 1982 o - May 2 0 1 S +R ) 1982 o - O 3 0 0 - +R ) 1983 o - Ap 18 0 1 S +R ) 1983 o - O 1 0 0 - +R ) 1984 o - Ap 1 0 1 S +Z Europe/Tirane 1:19:20 - LMT 1914 +1 - CET 1940 Jun 16 +1 ) CE%sT 1984 Jul +1 O CE%sT +Z Europe/Andorra 0:6:4 - LMT 1901 +0 - WET 1946 S 30 +1 - CET 1985 Mar 31 2 +1 O CE%sT +R * 1920 o - Ap 5 2s 1 S +R * 1920 o - S 13 2s 0 - +R * 1946 o - Ap 14 2s 1 S +R * 1946 1948 - O Sun>=1 2s 0 - +R * 1947 o - Ap 6 2s 1 S +R * 1948 o - Ap 18 2s 1 S +R * 1980 o - Ap 6 0 1 S +R * 1980 o - S 28 0 0 - +Z Europe/Vienna 1:5:21 - LMT 1893 Ap +1 ( CE%sT 1920 +1 * CE%sT 1940 Ap 1 2s +1 ( CE%sT 1945 Ap 2 2s +1 1 CEST 1945 Ap 12 2s +1 - CET 1946 +1 * CE%sT 1981 +1 O CE%sT +Z Europe/Minsk 1:50:16 - LMT 1880 +1:50 - MMT 1924 May 2 +2 - EET 1930 Jun 21 +3 - MSK 1941 Jun 28 +1 ( CE%sT 1944 Jul 3 +3 M MSK/MSD 1990 +3 - MSK 1991 Mar 31 2s +2 M EE%sT 2011 Mar 27 2s +3 - +03 +R + 1918 o - Mar 9 0s 1 S +R + 1918 1919 - O Sat>=1 23s 0 - +R + 1919 o - Mar 1 23s 1 S +R + 1920 o - F 14 23s 1 S +R + 1920 o - O 23 23s 0 - +R + 1921 o - Mar 14 23s 1 S +R + 1921 o - O 25 23s 0 - +R + 1922 o - Mar 25 23s 1 S +R + 1922 1927 - O Sat>=1 23s 0 - +R + 1923 o - Ap 21 23s 1 S +R + 1924 o - Mar 29 23s 1 S +R + 1925 o - Ap 4 23s 1 S +R + 1926 o - Ap 17 23s 1 S +R + 1927 o - Ap 9 23s 1 S +R + 1928 o - Ap 14 23s 1 S +R + 1928 1938 - O Sun>=2 2s 0 - +R + 1929 o - Ap 21 2s 1 S +R + 1930 o - Ap 13 2s 1 S +R + 1931 o - Ap 19 2s 1 S +R + 1932 o - Ap 3 2s 1 S +R + 1933 o - Mar 26 2s 1 S +R + 1934 o - Ap 8 2s 1 S +R + 1935 o - Mar 31 2s 1 S +R + 1936 o - Ap 19 2s 1 S +R + 1937 o - Ap 4 2s 1 S +R + 1938 o - Mar 27 2s 1 S +R + 1939 o - Ap 16 2s 1 S +R + 1939 o - N 19 2s 0 - +R + 1940 o - F 25 2s 1 S +R + 1944 o - S 17 2s 0 - +R + 1945 o - Ap 2 2s 1 S +R + 1945 o - S 16 2s 0 - +R + 1946 o - May 19 2s 1 S +R + 1946 o - O 7 2s 0 - +Z Europe/Brussels 0:17:30 - LMT 1880 +0:17:30 - BMT 1892 May 1 12 +0 - WET 1914 N 8 +1 - CET 1916 May +1 ( CE%sT 1918 N 11 11u +0 + WE%sT 1940 May 20 2s +1 ( CE%sT 1944 S 3 +1 + CE%sT 1977 +1 O CE%sT +R , 1979 o - Mar 31 23 1 S +R , 1979 o - O 1 1 0 - +R , 1980 1982 - Ap Sat>=1 23 1 S +R , 1980 o - S 29 1 0 - +R , 1981 o - S 27 2 0 - +Z Europe/Sofia 1:33:16 - LMT 1880 +1:56:56 - IMT 1894 N 30 +2 - EET 1942 N 2 3 +1 ( CE%sT 1945 +1 - CET 1945 Ap 2 3 +2 - EET 1979 Mar 31 23 +2 , EE%sT 1982 S 26 3 +2 ( EE%sT 1991 +2 W EE%sT 1997 +2 O EE%sT +R . 1945 o - Ap M>=1 2s 1 S +R . 1945 o - O 1 2s 0 - +R . 1946 o - May 6 2s 1 S +R . 1946 1949 - O Sun>=1 2s 0 - +R . 1947 1948 - Ap Sun>=15 2s 1 S +R . 1949 o - Ap 9 2s 1 S +Z Europe/Prague 0:57:44 - LMT 1850 +0:57:44 - PMT 1891 O +1 ( CE%sT 1945 May 9 +1 . CE%sT 1946 D 1 3 +1 -1 GMT 1947 F 23 2 +1 . CE%sT 1979 +1 O CE%sT +R / 1916 o - May 14 23 1 S +R / 1916 o - S 30 23 0 - +R / 1940 o - May 15 0 1 S +R / 1945 o - Ap 2 2s 1 S +R / 1945 o - Au 15 2s 0 - +R / 1946 o - May 1 2s 1 S +R / 1946 o - S 1 2s 0 - +R / 1947 o - May 4 2s 1 S +R / 1947 o - Au 10 2s 0 - +R / 1948 o - May 9 2s 1 S +R / 1948 o - Au 8 2s 0 - +Z Europe/Copenhagen 0:50:20 - LMT 1890 +0:50:20 - CMT 1894 +1 / CE%sT 1942 N 2 2s +1 ( CE%sT 1945 Ap 2 2 +1 / CE%sT 1980 +1 O CE%sT +Z Atlantic/Faroe -0:27:4 - LMT 1908 Ja 11 +0 - WET 1981 +0 O WE%sT +R : 1991 1992 - Mar lastSun 2 1 D +R : 1991 1992 - S lastSun 2 0 S +R : 1993 2006 - Ap Sun>=1 2 1 D +R : 1993 2006 - O lastSun 2 0 S +R : 2007 ma - Mar Sun>=8 2 1 D +R : 2007 ma - N Sun>=1 2 0 S +Z America/Danmarkshavn -1:14:40 - LMT 1916 Jul 28 +-3 - -03 1980 Ap 6 2 +-3 O -03/-02 1996 +0 - GMT +Z America/Scoresbysund -1:27:52 - LMT 1916 Jul 28 +-2 - -02 1980 Ap 6 2 +-2 ( -02/-01 1981 Mar 29 +-1 O -01/+00 +Z America/Godthab -3:26:56 - LMT 1916 Jul 28 +-3 - -03 1980 Ap 6 2 +-3 O -03/-02 +Z America/Thule -4:35:8 - LMT 1916 Jul 28 +-4 : A%sT +Z Europe/Tallinn 1:39 - LMT 1880 +1:39 - TMT 1918 F +1 ( CE%sT 1919 Jul +1:39 - TMT 1921 May +2 - EET 1940 Au 6 +3 - MSK 1941 S 15 +1 ( CE%sT 1944 S 22 +3 M MSK/MSD 1989 Mar 26 2s +2 1 EEST 1989 S 24 2s +2 ( EE%sT 1998 S 22 +2 O EE%sT 1999 O 31 4 +2 - EET 2002 F 21 +2 O EE%sT +R ; 1942 o - Ap 2 24 1 S +R ; 1942 o - O 4 1 0 - +R ; 1981 1982 - Mar lastSun 2 1 S +R ; 1981 1982 - S lastSun 3 0 - +Z Europe/Helsinki 1:39:49 - LMT 1878 May 31 +1:39:49 - HMT 1921 May +2 ; EE%sT 1983 +2 O EE%sT +Li Europe/Helsinki Europe/Mariehamn +R < 1916 o - Jun 14 23s 1 S +R < 1916 1919 - O Sun>=1 23s 0 - +R < 1917 o - Mar 24 23s 1 S +R < 1918 o - Mar 9 23s 1 S +R < 1919 o - Mar 1 23s 1 S +R < 1920 o - F 14 23s 1 S +R < 1920 o - O 23 23s 0 - +R < 1921 o - Mar 14 23s 1 S +R < 1921 o - O 25 23s 0 - +R < 1922 o - Mar 25 23s 1 S +R < 1922 1938 - O Sat>=1 23s 0 - +R < 1923 o - May 26 23s 1 S +R < 1924 o - Mar 29 23s 1 S +R < 1925 o - Ap 4 23s 1 S +R < 1926 o - Ap 17 23s 1 S +R < 1927 o - Ap 9 23s 1 S +R < 1928 o - Ap 14 23s 1 S +R < 1929 o - Ap 20 23s 1 S +R < 1930 o - Ap 12 23s 1 S +R < 1931 o - Ap 18 23s 1 S +R < 1932 o - Ap 2 23s 1 S +R < 1933 o - Mar 25 23s 1 S +R < 1934 o - Ap 7 23s 1 S +R < 1935 o - Mar 30 23s 1 S +R < 1936 o - Ap 18 23s 1 S +R < 1937 o - Ap 3 23s 1 S +R < 1938 o - Mar 26 23s 1 S +R < 1939 o - Ap 15 23s 1 S +R < 1939 o - N 18 23s 0 - +R < 1940 o - F 25 2 1 S +R < 1941 o - May 5 0 2 M +R < 1941 o - O 6 0 1 S +R < 1942 o - Mar 9 0 2 M +R < 1942 o - N 2 3 1 S +R < 1943 o - Mar 29 2 2 M +R < 1943 o - O 4 3 1 S +R < 1944 o - Ap 3 2 2 M +R < 1944 o - O 8 1 1 S +R < 1945 o - Ap 2 2 2 M +R < 1945 o - S 16 3 0 - +R < 1976 o - Mar 28 1 1 S +R < 1976 o - S 26 1 0 - +Z Europe/Paris 0:9:21 - LMT 1891 Mar 15 0:1 +0:9:21 - PMT 1911 Mar 11 0:1 +0 < WE%sT 1940 Jun 14 23 +1 ( CE%sT 1944 Au 25 +0 < WE%sT 1945 S 16 3 +1 < CE%sT 1977 +1 O CE%sT +R = 1946 o - Ap 14 2s 1 S +R = 1946 o - O 7 2s 0 - +R = 1947 1949 - O Sun>=1 2s 0 - +R = 1947 o - Ap 6 3s 1 S +R = 1947 o - May 11 2s 2 M +R = 1947 o - Jun 29 3 1 S +R = 1948 o - Ap 18 2s 1 S +R = 1949 o - Ap 10 2s 1 S +R > 1945 o - May 24 2 2 M +R > 1945 o - S 24 3 1 S +R > 1945 o - N 18 2s 0 - +Z Europe/Berlin 0:53:28 - LMT 1893 Ap +1 ( CE%sT 1945 May 24 2 +1 > CE%sT 1946 +1 = CE%sT 1980 +1 O CE%sT +Li Europe/Zurich Europe/Busingen +Z Europe/Gibraltar -0:21:24 - LMT 1880 Au 2 0s +0 % %s 1957 Ap 14 2 +1 - CET 1982 +1 O CE%sT +R ? 1932 o - Jul 7 0 1 S +R ? 1932 o - S 1 0 0 - +R ? 1941 o - Ap 7 0 1 S +R ? 1942 o - N 2 3 0 - +R ? 1943 o - Mar 30 0 1 S +R ? 1943 o - O 4 0 0 - +R ? 1952 o - Jul 1 0 1 S +R ? 1952 o - N 2 0 0 - +R ? 1975 o - Ap 12 0s 1 S +R ? 1975 o - N 26 0s 0 - +R ? 1976 o - Ap 11 2s 1 S +R ? 1976 o - O 10 2s 0 - +R ? 1977 1978 - Ap Sun>=1 2s 1 S +R ? 1977 o - S 26 2s 0 - +R ? 1978 o - S 24 4 0 - +R ? 1979 o - Ap 1 9 1 S +R ? 1979 o - S 29 2 0 - +R ? 1980 o - Ap 1 0 1 S +R ? 1980 o - S 28 0 0 - +Z Europe/Athens 1:34:52 - LMT 1895 S 14 +1:34:52 - AMT 1916 Jul 28 0:1 +2 ? EE%sT 1941 Ap 30 +1 ? CE%sT 1944 Ap 4 +2 ? EE%sT 1981 +2 O EE%sT +R @ 1918 o - Ap 1 3 1 S +R @ 1918 o - S 16 3 0 - +R @ 1919 o - Ap 15 3 1 S +R @ 1919 o - N 24 3 0 - +R @ 1945 o - May 1 23 1 S +R @ 1945 o - N 1 0 0 - +R @ 1946 o - Mar 31 2s 1 S +R @ 1946 1949 - O Sun>=1 2s 0 - +R @ 1947 1949 - Ap Sun>=4 2s 1 S +R @ 1950 o - Ap 17 2s 1 S +R @ 1950 o - O 23 2s 0 - +R @ 1954 1955 - May 23 0 1 S +R @ 1954 1955 - O 3 0 0 - +R @ 1956 o - Jun Sun>=1 0 1 S +R @ 1956 o - S lastSun 0 0 - +R @ 1957 o - Jun Sun>=1 1 1 S +R @ 1957 o - S lastSun 3 0 - +R @ 1980 o - Ap 6 1 1 S +Z Europe/Budapest 1:16:20 - LMT 1890 O +1 ( CE%sT 1918 +1 @ CE%sT 1941 Ap 8 +1 ( CE%sT 1945 +1 @ CE%sT 1980 S 28 2s +1 O CE%sT +R [ 1917 1919 - F 19 23 1 - +R [ 1917 o - O 21 1 0 - +R [ 1918 1919 - N 16 1 0 - +R [ 1921 o - Mar 19 23 1 - +R [ 1921 o - Jun 23 1 0 - +R [ 1939 o - Ap 29 23 1 - +R [ 1939 o - O 29 2 0 - +R [ 1940 o - F 25 2 1 - +R [ 1940 1941 - N Sun>=2 1s 0 - +R [ 1941 1942 - Mar Sun>=2 1s 1 - +R [ 1943 1946 - Mar Sun>=1 1s 1 - +R [ 1942 1948 - O Sun>=22 1s 0 - +R [ 1947 1967 - Ap Sun>=1 1s 1 - +R [ 1949 o - O 30 1s 0 - +R [ 1950 1966 - O Sun>=22 1s 0 - +R [ 1967 o - O 29 1s 0 - +Z Atlantic/Reykjavik -1:28 - LMT 1908 +-1 [ -01/+00 1968 Ap 7 1s +0 - GMT +R \ 1916 o - Jun 3 24 1 S +R \ 1916 1917 - S 30 24 0 - +R \ 1917 o - Mar 31 24 1 S +R \ 1918 o - Mar 9 24 1 S +R \ 1918 o - O 6 24 0 - +R \ 1919 o - Mar 1 24 1 S +R \ 1919 o - O 4 24 0 - +R \ 1920 o - Mar 20 24 1 S +R \ 1920 o - S 18 24 0 - +R \ 1940 o - Jun 14 24 1 S +R \ 1942 o - N 2 2s 0 - +R \ 1943 o - Mar 29 2s 1 S +R \ 1943 o - O 4 2s 0 - +R \ 1944 o - Ap 2 2s 1 S +R \ 1944 o - S 17 2s 0 - +R \ 1945 o - Ap 2 2 1 S +R \ 1945 o - S 15 1 0 - +R \ 1946 o - Mar 17 2s 1 S +R \ 1946 o - O 6 2s 0 - +R \ 1947 o - Mar 16 0s 1 S +R \ 1947 o - O 5 0s 0 - +R \ 1948 o - F 29 2s 1 S +R \ 1948 o - O 3 2s 0 - +R \ 1966 1968 - May Sun>=22 0s 1 S +R \ 1966 o - S 24 24 0 - +R \ 1967 1969 - S Sun>=22 0s 0 - +R \ 1969 o - Jun 1 0s 1 S +R \ 1970 o - May 31 0s 1 S +R \ 1970 o - S lastSun 0s 0 - +R \ 1971 1972 - May Sun>=22 0s 1 S +R \ 1971 o - S lastSun 0s 0 - +R \ 1972 o - O 1 0s 0 - +R \ 1973 o - Jun 3 0s 1 S +R \ 1973 1974 - S lastSun 0s 0 - +R \ 1974 o - May 26 0s 1 S +R \ 1975 o - Jun 1 0s 1 S +R \ 1975 1977 - S lastSun 0s 0 - +R \ 1976 o - May 30 0s 1 S +R \ 1977 1979 - May Sun>=22 0s 1 S +R \ 1978 o - O 1 0s 0 - +R \ 1979 o - S 30 0s 0 - +Z Europe/Rome 0:49:56 - LMT 1866 S 22 +0:49:56 - RMT 1893 O 31 23:49:56 +1 \ CE%sT 1943 S 10 +1 ( CE%sT 1944 Jun 4 +1 \ CE%sT 1980 +1 O CE%sT +Li Europe/Rome Europe/Vatican +Li Europe/Rome Europe/San_Marino +R ] 1989 1996 - Mar lastSun 2s 1 S +R ] 1989 1996 - S lastSun 2s 0 - +Z Europe/Riga 1:36:34 - LMT 1880 +1:36:34 - RMT 1918 Ap 15 2 +1:36:34 1 LST 1918 S 16 3 +1:36:34 - RMT 1919 Ap 1 2 +1:36:34 1 LST 1919 May 22 3 +1:36:34 - RMT 1926 May 11 +2 - EET 1940 Au 5 +3 - MSK 1941 Jul +1 ( CE%sT 1944 O 13 +3 M MSK/MSD 1989 Mar lastSun 2s +2 1 EEST 1989 S lastSun 2s +2 ] EE%sT 1997 Ja 21 +2 O EE%sT 2000 F 29 +2 - EET 2001 Ja 2 +2 O EE%sT +Li Europe/Zurich Europe/Vaduz +Z Europe/Vilnius 1:41:16 - LMT 1880 +1:24 - WMT 1917 +1:35:36 - KMT 1919 O 10 +1 - CET 1920 Jul 12 +2 - EET 1920 O 9 +1 - CET 1940 Au 3 +3 - MSK 1941 Jun 24 +1 ( CE%sT 1944 Au +3 M MSK/MSD 1989 Mar 26 2s +2 M EE%sT 1991 S 29 2s +2 ( EE%sT 1998 +2 - EET 1998 Mar 29 1u +1 O CE%sT 1999 O 31 1u +2 - EET 2003 +2 O EE%sT +R ^ 1916 o - May 14 23 1 S +R ^ 1916 o - O 1 1 0 - +R ^ 1917 o - Ap 28 23 1 S +R ^ 1917 o - S 17 1 0 - +R ^ 1918 o - Ap M>=15 2s 1 S +R ^ 1918 o - S M>=15 2s 0 - +R ^ 1919 o - Mar 1 23 1 S +R ^ 1919 o - O 5 3 0 - +R ^ 1920 o - F 14 23 1 S +R ^ 1920 o - O 24 2 0 - +R ^ 1921 o - Mar 14 23 1 S +R ^ 1921 o - O 26 2 0 - +R ^ 1922 o - Mar 25 23 1 S +R ^ 1922 o - O Sun>=2 1 0 - +R ^ 1923 o - Ap 21 23 1 S +R ^ 1923 o - O Sun>=2 2 0 - +R ^ 1924 o - Mar 29 23 1 S +R ^ 1924 1928 - O Sun>=2 1 0 - +R ^ 1925 o - Ap 5 23 1 S +R ^ 1926 o - Ap 17 23 1 S +R ^ 1927 o - Ap 9 23 1 S +R ^ 1928 o - Ap 14 23 1 S +R ^ 1929 o - Ap 20 23 1 S +Z Europe/Luxembourg 0:24:36 - LMT 1904 Jun +1 ^ CE%sT 1918 N 25 +0 ^ WE%sT 1929 O 6 2s +0 + WE%sT 1940 May 14 3 +1 ( WE%sT 1944 S 18 3 +1 + CE%sT 1977 +1 O CE%sT +R _ 1973 o - Mar 31 0s 1 S +R _ 1973 o - S 29 0s 0 - +R _ 1974 o - Ap 21 0s 1 S +R _ 1974 o - S 16 0s 0 - +R _ 1975 1979 - Ap Sun>=15 2 1 S +R _ 1975 1980 - S Sun>=15 2 0 - +R _ 1980 o - Mar 31 2 1 S +Z Europe/Malta 0:58:4 - LMT 1893 N 2 0s +1 \ CE%sT 1973 Mar 31 +1 _ CE%sT 1981 +1 O CE%sT +R ` 1997 ma - Mar lastSun 2 1 S +R ` 1997 ma - O lastSun 3 0 - +Z Europe/Chisinau 1:55:20 - LMT 1880 +1:55 - CMT 1918 F 15 +1:44:24 - BMT 1931 Jul 24 +2 { EE%sT 1940 Au 15 +2 1 EEST 1941 Jul 17 +1 ( CE%sT 1944 Au 24 +3 M MSK/MSD 1990 May 6 2 +2 M EE%sT 1992 +2 W EE%sT 1997 +2 ` EE%sT +Z Europe/Monaco 0:29:32 - LMT 1891 Mar 15 +0:9:21 - PMT 1911 Mar 11 +0 < WE%sT 1945 S 16 3 +1 < CE%sT 1977 +1 O CE%sT +R | 1916 o - May 1 0 1 NST +R | 1916 o - O 1 0 0 AMT +R | 1917 o - Ap 16 2s 1 NST +R | 1917 o - S 17 2s 0 AMT +R | 1918 1921 - Ap M>=1 2s 1 NST +R | 1918 1921 - S lastM 2s 0 AMT +R | 1922 o - Mar lastSun 2s 1 NST +R | 1922 1936 - O Sun>=2 2s 0 AMT +R | 1923 o - Jun F>=1 2s 1 NST +R | 1924 o - Mar lastSun 2s 1 NST +R | 1925 o - Jun F>=1 2s 1 NST +R | 1926 1931 - May 15 2s 1 NST +R | 1932 o - May 22 2s 1 NST +R | 1933 1936 - May 15 2s 1 NST +R | 1937 o - May 22 2s 1 NST +R | 1937 o - Jul 1 0 1 S +R | 1937 1939 - O Sun>=2 2s 0 - +R | 1938 1939 - May 15 2s 1 S +R | 1945 o - Ap 2 2s 1 S +R | 1945 o - S 16 2s 0 - +Z Europe/Amsterdam 0:19:32 - LMT 1835 +0:19:32 | %s 1937 Jul +0:20 | +0020/+0120 1940 May 16 +1 ( CE%sT 1945 Ap 2 2 +1 | CE%sT 1977 +1 O CE%sT +R } 1916 o - May 22 1 1 S +R } 1916 o - S 30 0 0 - +R } 1945 o - Ap 2 2s 1 S +R } 1945 o - O 1 2s 0 - +R } 1959 1964 - Mar Sun>=15 2s 1 S +R } 1959 1965 - S Sun>=15 2s 0 - +R } 1965 o - Ap 25 2s 1 S +Z Europe/Oslo 0:43 - LMT 1895 +1 } CE%sT 1940 Au 10 23 +1 ( CE%sT 1945 Ap 2 2 +1 } CE%sT 1980 +1 O CE%sT +Li Europe/Oslo Arctic/Longyearbyen +R ~ 1918 1919 - S 16 2s 0 - +R ~ 1919 o - Ap 15 2s 1 S +R ~ 1944 o - Ap 3 2s 1 S +R ~ 1944 o - O 4 2 0 - +R ~ 1945 o - Ap 29 0 1 S +R ~ 1945 o - N 1 0 0 - +R ~ 1946 o - Ap 14 0s 1 S +R ~ 1946 o - O 7 2s 0 - +R ~ 1947 o - May 4 2s 1 S +R ~ 1947 1949 - O Sun>=1 2s 0 - +R ~ 1948 o - Ap 18 2s 1 S +R ~ 1949 o - Ap 10 2s 1 S +R ~ 1957 o - Jun 2 1s 1 S +R ~ 1957 1958 - S lastSun 1s 0 - +R ~ 1958 o - Mar 30 1s 1 S +R ~ 1959 o - May 31 1s 1 S +R ~ 1959 1961 - O Sun>=1 1s 0 - +R ~ 1960 o - Ap 3 1s 1 S +R ~ 1961 1964 - May lastSun 1s 1 S +R ~ 1962 1964 - S lastSun 1s 0 - +Z Europe/Warsaw 1:24 - LMT 1880 +1:24 - WMT 1915 Au 5 +1 ( CE%sT 1918 S 16 3 +2 ~ EE%sT 1922 Jun +1 ~ CE%sT 1940 Jun 23 2 +1 ( CE%sT 1944 O +1 ~ CE%sT 1977 +1 ' CE%sT 1988 +1 O CE%sT +R AA 1916 o - Jun 17 23 1 S +R AA 1916 o - N 1 1 0 - +R AA 1917 o - F 28 23s 1 S +R AA 1917 1921 - O 14 23s 0 - +R AA 1918 o - Mar 1 23s 1 S +R AA 1919 o - F 28 23s 1 S +R AA 1920 o - F 29 23s 1 S +R AA 1921 o - F 28 23s 1 S +R AA 1924 o - Ap 16 23s 1 S +R AA 1924 o - O 14 23s 0 - +R AA 1926 o - Ap 17 23s 1 S +R AA 1926 1929 - O Sat>=1 23s 0 - +R AA 1927 o - Ap 9 23s 1 S +R AA 1928 o - Ap 14 23s 1 S +R AA 1929 o - Ap 20 23s 1 S +R AA 1931 o - Ap 18 23s 1 S +R AA 1931 1932 - O Sat>=1 23s 0 - +R AA 1932 o - Ap 2 23s 1 S +R AA 1934 o - Ap 7 23s 1 S +R AA 1934 1938 - O Sat>=1 23s 0 - +R AA 1935 o - Mar 30 23s 1 S +R AA 1936 o - Ap 18 23s 1 S +R AA 1937 o - Ap 3 23s 1 S +R AA 1938 o - Mar 26 23s 1 S +R AA 1939 o - Ap 15 23s 1 S +R AA 1939 o - N 18 23s 0 - +R AA 1940 o - F 24 23s 1 S +R AA 1940 1941 - O 5 23s 0 - +R AA 1941 o - Ap 5 23s 1 S +R AA 1942 1945 - Mar Sat>=8 23s 1 S +R AA 1942 o - Ap 25 22s 2 M +R AA 1942 o - Au 15 22s 1 S +R AA 1942 1945 - O Sat>=24 23s 0 - +R AA 1943 o - Ap 17 22s 2 M +R AA 1943 1945 - Au Sat>=25 22s 1 S +R AA 1944 1945 - Ap Sat>=21 22s 2 M +R AA 1946 o - Ap Sat>=1 23s 1 S +R AA 1946 o - O Sat>=1 23s 0 - +R AA 1947 1949 - Ap Sun>=1 2s 1 S +R AA 1947 1949 - O Sun>=1 2s 0 - +R AA 1951 1965 - Ap Sun>=1 2s 1 S +R AA 1951 1965 - O Sun>=1 2s 0 - +R AA 1977 o - Mar 27 0s 1 S +R AA 1977 o - S 25 0s 0 - +R AA 1978 1979 - Ap Sun>=1 0s 1 S +R AA 1978 o - O 1 0s 0 - +R AA 1979 1982 - S lastSun 1s 0 - +R AA 1980 o - Mar lastSun 0s 1 S +R AA 1981 1982 - Mar lastSun 1s 1 S +R AA 1983 o - Mar lastSun 2s 1 S +Z Europe/Lisbon -0:36:45 - LMT 1884 +-0:36:45 - LMT 1912 Ja 1 0u +0 AA WE%sT 1966 Ap 3 2 +1 - CET 1976 S 26 1 +0 AA WE%sT 1983 S 25 1s +0 ' WE%sT 1992 S 27 1s +1 O CE%sT 1996 Mar 31 1u +0 O WE%sT +Z Atlantic/Azores -1:42:40 - LMT 1884 +-1:54:32 - HMT 1912 Ja 1 2u +-2 AA -02/-01 1942 Ap 25 22s +-2 AA +00 1942 Au 15 22s +-2 AA -02/-01 1943 Ap 17 22s +-2 AA +00 1943 Au 28 22s +-2 AA -02/-01 1944 Ap 22 22s +-2 AA +00 1944 Au 26 22s +-2 AA -02/-01 1945 Ap 21 22s +-2 AA +00 1945 Au 25 22s +-2 AA -02/-01 1966 Ap 3 2 +-1 AA -01/+00 1983 S 25 1s +-1 ' -01/+00 1992 S 27 1s +0 O WE%sT 1993 Mar 28 1u +-1 O -01/+00 +Z Atlantic/Madeira -1:7:36 - LMT 1884 +-1:7:36 - FMT 1912 Ja 1 1u +-1 AA -01/+00 1942 Ap 25 22s +-1 AA +01 1942 Au 15 22s +-1 AA -01/+00 1943 Ap 17 22s +-1 AA +01 1943 Au 28 22s +-1 AA -01/+00 1944 Ap 22 22s +-1 AA +01 1944 Au 26 22s +-1 AA -01/+00 1945 Ap 21 22s +-1 AA +01 1945 Au 25 22s +-1 AA -01/+00 1966 Ap 3 2 +0 AA WE%sT 1983 S 25 1s +0 O WE%sT +R { 1932 o - May 21 0s 1 S +R { 1932 1939 - O Sun>=1 0s 0 - +R { 1933 1939 - Ap Sun>=2 0s 1 S +R { 1979 o - May 27 0 1 S +R { 1979 o - S lastSun 0 0 - +R { 1980 o - Ap 5 23 1 S +R { 1980 o - S lastSun 1 0 - +R { 1991 1993 - Mar lastSun 0s 1 S +R { 1991 1993 - S lastSun 0s 0 - +Z Europe/Bucharest 1:44:24 - LMT 1891 O +1:44:24 - BMT 1931 Jul 24 +2 { EE%sT 1981 Mar 29 2s +2 ( EE%sT 1991 +2 { EE%sT 1994 +2 W EE%sT 1997 +2 O EE%sT +Z Europe/Kaliningrad 1:22 - LMT 1893 Ap +1 ( CE%sT 1945 +2 ~ CE%sT 1946 +3 M MSK/MSD 1989 Mar 26 2s +2 M EE%sT 2011 Mar 27 2s +3 - +03 2014 O 26 2s +2 - EET +Z Europe/Moscow 2:30:17 - LMT 1880 +2:30:17 - MMT 1916 Jul 3 +2:31:19 M %s 1919 Jul 1 0u +3 M %s 1921 O +3 M MSK/MSD 1922 O +2 - EET 1930 Jun 21 +3 M MSK/MSD 1991 Mar 31 2s +2 M EE%sT 1992 Ja 19 2s +3 M MSK/MSD 2011 Mar 27 2s +4 - MSK 2014 O 26 2s +3 - MSK +Z Europe/Simferopol 2:16:24 - LMT 1880 +2:16 - SMT 1924 May 2 +2 - EET 1930 Jun 21 +3 - MSK 1941 N +1 ( CE%sT 1944 Ap 13 +3 M MSK/MSD 1990 +3 - MSK 1990 Jul 1 2 +2 - EET 1992 +2 W EE%sT 1994 May +3 W MSK/MSD 1996 Mar 31 0s +3 1 MSD 1996 O 27 3s +3 M MSK/MSD 1997 +3 - MSK 1997 Mar lastSun 1u +2 O EE%sT 2014 Mar 30 2 +4 - MSK 2014 O 26 2s +3 - MSK +Z Europe/Astrakhan 3:12:12 - LMT 1924 May +3 - +03 1930 Jun 21 +4 M +04/+05 1989 Mar 26 2s +3 M +03/+04 1991 Mar 31 2s +4 - +04 1992 Mar 29 2s +3 M +03/+04 2011 Mar 27 2s +4 - +04 2014 O 26 2s +3 - +03 2016 Mar 27 2s +4 - +04 +Z Europe/Volgograd 2:57:40 - LMT 1920 Ja 3 +3 - +03 1930 Jun 21 +4 - +04 1961 N 11 +4 M +04/+05 1988 Mar 27 2s +3 M +03/+04 1991 Mar 31 2s +4 - +04 1992 Mar 29 2s +3 M +03/+04 2011 Mar 27 2s +4 - +04 2014 O 26 2s +3 - +03 +Z Europe/Saratov 3:4:18 - LMT 1919 Jul 1 0u +3 - +03 1930 Jun 21 +4 M +04/+05 1988 Mar 27 2s +3 M +03/+04 1991 Mar 31 2s +4 - +04 1992 Mar 29 2s +3 M +03/+04 2011 Mar 27 2s +4 - +04 2014 O 26 2s +3 - +03 2016 D 4 2s +4 - +04 +Z Europe/Kirov 3:18:48 - LMT 1919 Jul 1 0u +3 - +03 1930 Jun 21 +4 M +04/+05 1989 Mar 26 2s +3 M +03/+04 1991 Mar 31 2s +4 - +04 1992 Mar 29 2s +3 M +03/+04 2011 Mar 27 2s +4 - +04 2014 O 26 2s +3 - +03 +Z Europe/Samara 3:20:20 - LMT 1919 Jul 1 0u +3 - +03 1930 Jun 21 +4 - +04 1935 Ja 27 +4 M +04/+05 1989 Mar 26 2s +3 M +03/+04 1991 Mar 31 2s +2 M +02/+03 1991 S 29 2s +3 - +03 1991 O 20 3 +4 M +04/+05 2010 Mar 28 2s +3 M +03/+04 2011 Mar 27 2s +4 - +04 +Z Europe/Ulyanovsk 3:13:36 - LMT 1919 Jul 1 0u +3 - +03 1930 Jun 21 +4 M +04/+05 1989 Mar 26 2s +3 M +03/+04 1991 Mar 31 2s +2 M +02/+03 1992 Ja 19 2s +3 M +03/+04 2011 Mar 27 2s +4 - +04 2014 O 26 2s +3 - +03 2016 Mar 27 2s +4 - +04 +Z Asia/Yekaterinburg 4:2:33 - LMT 1916 Jul 3 +3:45:5 - PMT 1919 Jul 15 4 +4 - +04 1930 Jun 21 +5 M +05/+06 1991 Mar 31 2s +4 M +04/+05 1992 Ja 19 2s +5 M +05/+06 2011 Mar 27 2s +6 - +06 2014 O 26 2s +5 - +05 +Z Asia/Omsk 4:53:30 - LMT 1919 N 14 +5 - +05 1930 Jun 21 +6 M +06/+07 1991 Mar 31 2s +5 M +05/+06 1992 Ja 19 2s +6 M +06/+07 2011 Mar 27 2s +7 - +07 2014 O 26 2s +6 - +06 +Z Asia/Barnaul 5:35 - LMT 1919 D 10 +6 - +06 1930 Jun 21 +7 M +07/+08 1991 Mar 31 2s +6 M +06/+07 1992 Ja 19 2s +7 M +07/+08 1995 May 28 +6 M +06/+07 2011 Mar 27 2s +7 - +07 2014 O 26 2s +6 - +06 2016 Mar 27 2s +7 - +07 +Z Asia/Novosibirsk 5:31:40 - LMT 1919 D 14 6 +6 - +06 1930 Jun 21 +7 M +07/+08 1991 Mar 31 2s +6 M +06/+07 1992 Ja 19 2s +7 M +07/+08 1993 May 23 +6 M +06/+07 2011 Mar 27 2s +7 - +07 2014 O 26 2s +6 - +06 2016 Jul 24 2s +7 - +07 +Z Asia/Tomsk 5:39:51 - LMT 1919 D 22 +6 - +06 1930 Jun 21 +7 M +07/+08 1991 Mar 31 2s +6 M +06/+07 1992 Ja 19 2s +7 M +07/+08 2002 May 1 3 +6 M +06/+07 2011 Mar 27 2s +7 - +07 2014 O 26 2s +6 - +06 2016 May 29 2s +7 - +07 +Z Asia/Novokuznetsk 5:48:48 - LMT 1924 May +6 - +06 1930 Jun 21 +7 M +07/+08 1991 Mar 31 2s +6 M +06/+07 1992 Ja 19 2s +7 M +07/+08 2010 Mar 28 2s +6 M +06/+07 2011 Mar 27 2s +7 - +07 +Z Asia/Krasnoyarsk 6:11:26 - LMT 1920 Ja 6 +6 - +06 1930 Jun 21 +7 M +07/+08 1991 Mar 31 2s +6 M +06/+07 1992 Ja 19 2s +7 M +07/+08 2011 Mar 27 2s +8 - +08 2014 O 26 2s +7 - +07 +Z Asia/Irkutsk 6:57:5 - LMT 1880 +6:57:5 - IMT 1920 Ja 25 +7 - +07 1930 Jun 21 +8 M +08/+09 1991 Mar 31 2s +7 M +07/+08 1992 Ja 19 2s +8 M +08/+09 2011 Mar 27 2s +9 - +09 2014 O 26 2s +8 - +08 +Z Asia/Chita 7:33:52 - LMT 1919 D 15 +8 - +08 1930 Jun 21 +9 M +09/+10 1991 Mar 31 2s +8 M +08/+09 1992 Ja 19 2s +9 M +09/+10 2011 Mar 27 2s +10 - +10 2014 O 26 2s +8 - +08 2016 Mar 27 2 +9 - +09 +Z Asia/Yakutsk 8:38:58 - LMT 1919 D 15 +8 - +08 1930 Jun 21 +9 M +09/+10 1991 Mar 31 2s +8 M +08/+09 1992 Ja 19 2s +9 M +09/+10 2011 Mar 27 2s +10 - +10 2014 O 26 2s +9 - +09 +Z Asia/Vladivostok 8:47:31 - LMT 1922 N 15 +9 - +09 1930 Jun 21 +10 M +10/+11 1991 Mar 31 2s +9 M +09/+10 1992 Ja 19 2s +10 M +10/+11 2011 Mar 27 2s +11 - +11 2014 O 26 2s +10 - +10 +Z Asia/Khandyga 9:2:13 - LMT 1919 D 15 +8 - +08 1930 Jun 21 +9 M +09/+10 1991 Mar 31 2s +8 M +08/+09 1992 Ja 19 2s +9 M +09/+10 2004 +10 M +10/+11 2011 Mar 27 2s +11 - +11 2011 S 13 0s +10 - +10 2014 O 26 2s +9 - +09 +Z Asia/Sakhalin 9:30:48 - LMT 1905 Au 23 +9 - +09 1945 Au 25 +11 M +11/+12 1991 Mar 31 2s +10 M +10/+11 1992 Ja 19 2s +11 M +11/+12 1997 Mar lastSun 2s +10 M +10/+11 2011 Mar 27 2s +11 - +11 2014 O 26 2s +10 - +10 2016 Mar 27 2s +11 - +11 +Z Asia/Magadan 10:3:12 - LMT 1924 May 2 +10 - +10 1930 Jun 21 +11 M +11/+12 1991 Mar 31 2s +10 M +10/+11 1992 Ja 19 2s +11 M +11/+12 2011 Mar 27 2s +12 - +12 2014 O 26 2s +10 - +10 2016 Ap 24 2s +11 - +11 +Z Asia/Srednekolymsk 10:14:52 - LMT 1924 May 2 +10 - +10 1930 Jun 21 +11 M +11/+12 1991 Mar 31 2s +10 M +10/+11 1992 Ja 19 2s +11 M +11/+12 2011 Mar 27 2s +12 - +12 2014 O 26 2s +11 - +11 +Z Asia/Ust-Nera 9:32:54 - LMT 1919 D 15 +8 - +08 1930 Jun 21 +9 M +09/+10 1981 Ap +11 M +11/+12 1991 Mar 31 2s +10 M +10/+11 1992 Ja 19 2s +11 M +11/+12 2011 Mar 27 2s +12 - +12 2011 S 13 0s +11 - +11 2014 O 26 2s +10 - +10 +Z Asia/Kamchatka 10:34:36 - LMT 1922 N 10 +11 - +11 1930 Jun 21 +12 M +12/+13 1991 Mar 31 2s +11 M +11/+12 1992 Ja 19 2s +12 M +12/+13 2010 Mar 28 2s +11 M +11/+12 2011 Mar 27 2s +12 - +12 +Z Asia/Anadyr 11:49:56 - LMT 1924 May 2 +12 - +12 1930 Jun 21 +13 M +13/+14 1982 Ap 1 0s +12 M +12/+13 1991 Mar 31 2s +11 M +11/+12 1992 Ja 19 2s +12 M +12/+13 2010 Mar 28 2s +11 M +11/+12 2011 Mar 27 2s +12 - +12 +Z Europe/Belgrade 1:22 - LMT 1884 +1 - CET 1941 Ap 18 23 +1 ( CE%sT 1945 +1 - CET 1945 May 8 2s +1 1 CEST 1945 S 16 2s +1 - CET 1982 N 27 +1 O CE%sT +Li Europe/Belgrade Europe/Ljubljana +Li Europe/Belgrade Europe/Podgorica +Li Europe/Belgrade Europe/Sarajevo +Li Europe/Belgrade Europe/Skopje +Li Europe/Belgrade Europe/Zagreb +Li Europe/Prague Europe/Bratislava +R AB 1918 o - Ap 15 23 1 S +R AB 1918 1919 - O 6 24s 0 - +R AB 1919 o - Ap 6 23 1 S +R AB 1924 o - Ap 16 23 1 S +R AB 1924 o - O 4 24s 0 - +R AB 1926 o - Ap 17 23 1 S +R AB 1926 1929 - O Sat>=1 24s 0 - +R AB 1927 o - Ap 9 23 1 S +R AB 1928 o - Ap 15 0 1 S +R AB 1929 o - Ap 20 23 1 S +R AB 1937 o - Jun 16 23 1 S +R AB 1937 o - O 2 24s 0 - +R AB 1938 o - Ap 2 23 1 S +R AB 1938 o - Ap 30 23 2 M +R AB 1938 o - O 2 24 1 S +R AB 1939 o - O 7 24s 0 - +R AB 1942 o - May 2 23 1 S +R AB 1942 o - S 1 1 0 - +R AB 1943 1946 - Ap Sat>=13 23 1 S +R AB 1943 1944 - O Sun>=1 1 0 - +R AB 1945 1946 - S lastSun 1 0 - +R AB 1949 o - Ap 30 23 1 S +R AB 1949 o - O 2 1 0 - +R AB 1974 1975 - Ap Sat>=12 23 1 S +R AB 1974 1975 - O Sun>=1 1 0 - +R AB 1976 o - Mar 27 23 1 S +R AB 1976 1977 - S lastSun 1 0 - +R AB 1977 o - Ap 2 23 1 S +R AB 1978 o - Ap 2 2s 1 S +R AB 1978 o - O 1 2s 0 - +R AC 1967 o - Jun 3 12 1 S +R AC 1967 o - O 1 0 0 - +R AC 1974 o - Jun 24 0 1 S +R AC 1974 o - S 1 0 0 - +R AC 1976 1977 - May 1 0 1 S +R AC 1976 o - Au 1 0 0 - +R AC 1977 o - S 28 0 0 - +R AC 1978 o - Jun 1 0 1 S +R AC 1978 o - Au 4 0 0 - +Z Europe/Madrid -0:14:44 - LMT 1900 D 31 23:45:16 +0 AB WE%sT 1940 Mar 16 23 +1 AB CE%sT 1979 +1 O CE%sT +Z Africa/Ceuta -0:21:16 - LMT 1900 D 31 23:38:44 +0 - WET 1918 May 6 23 +0 1 WEST 1918 O 7 23 +0 - WET 1924 +0 AB WE%sT 1929 +0 AC WE%sT 1984 Mar 16 +1 - CET 1986 +1 O CE%sT +Z Atlantic/Canary -1:1:36 - LMT 1922 Mar +-1 - -01 1946 S 30 1 +0 - WET 1980 Ap 6 0s +0 1 WEST 1980 S 28 1u +0 O WE%sT +Z Europe/Stockholm 1:12:12 - LMT 1879 +1:0:14 - SET 1900 +1 - CET 1916 May 14 23 +1 1 CEST 1916 O 1 1 +1 - CET 1980 +1 O CE%sT +R AD 1941 1942 - May M>=1 1 1 S +R AD 1941 1942 - O M>=1 2 0 - +Z Europe/Zurich 0:34:8 - LMT 1853 Jul 16 +0:29:46 - BMT 1894 Jun +1 AD CE%sT 1981 +1 O CE%sT +R AE 1916 o - May 1 0 1 S +R AE 1916 o - O 1 0 0 - +R AE 1920 o - Mar 28 0 1 S +R AE 1920 o - O 25 0 0 - +R AE 1921 o - Ap 3 0 1 S +R AE 1921 o - O 3 0 0 - +R AE 1922 o - Mar 26 0 1 S +R AE 1922 o - O 8 0 0 - +R AE 1924 o - May 13 0 1 S +R AE 1924 1925 - O 1 0 0 - +R AE 1925 o - May 1 0 1 S +R AE 1940 o - Jun 30 0 1 S +R AE 1940 o - O 5 0 0 - +R AE 1940 o - D 1 0 1 S +R AE 1941 o - S 21 0 0 - +R AE 1942 o - Ap 1 0 1 S +R AE 1942 o - N 1 0 0 - +R AE 1945 o - Ap 2 0 1 S +R AE 1945 o - O 8 0 0 - +R AE 1946 o - Jun 1 0 1 S +R AE 1946 o - O 1 0 0 - +R AE 1947 1948 - Ap Sun>=16 0 1 S +R AE 1947 1950 - O Sun>=2 0 0 - +R AE 1949 o - Ap 10 0 1 S +R AE 1950 o - Ap 19 0 1 S +R AE 1951 o - Ap 22 0 1 S +R AE 1951 o - O 8 0 0 - +R AE 1962 o - Jul 15 0 1 S +R AE 1962 o - O 8 0 0 - +R AE 1964 o - May 15 0 1 S +R AE 1964 o - O 1 0 0 - +R AE 1970 1972 - May Sun>=2 0 1 S +R AE 1970 1972 - O Sun>=2 0 0 - +R AE 1973 o - Jun 3 1 1 S +R AE 1973 o - N 4 3 0 - +R AE 1974 o - Mar 31 2 1 S +R AE 1974 o - N 3 5 0 - +R AE 1975 o - Mar 30 0 1 S +R AE 1975 1976 - O lastSun 0 0 - +R AE 1976 o - Jun 1 0 1 S +R AE 1977 1978 - Ap Sun>=1 0 1 S +R AE 1977 o - O 16 0 0 - +R AE 1979 1980 - Ap Sun>=1 3 1 S +R AE 1979 1982 - O M>=11 0 0 - +R AE 1981 1982 - Mar lastSun 3 1 S +R AE 1983 o - Jul 31 0 1 S +R AE 1983 o - O 2 0 0 - +R AE 1985 o - Ap 20 0 1 S +R AE 1985 o - S 28 0 0 - +R AE 1986 1993 - Mar lastSun 1s 1 S +R AE 1986 1995 - S lastSun 1s 0 - +R AE 1994 o - Mar 20 1s 1 S +R AE 1995 2006 - Mar lastSun 1s 1 S +R AE 1996 2006 - O lastSun 1s 0 - +Z Europe/Istanbul 1:55:52 - LMT 1880 +1:56:56 - IMT 1910 O +2 AE EE%sT 1978 O 15 +3 AE +03/+04 1985 Ap 20 +2 AE EE%sT 2007 +2 O EE%sT 2011 Mar 27 1u +2 - EET 2011 Mar 28 1u +2 O EE%sT 2014 Mar 30 1u +2 - EET 2014 Mar 31 1u +2 O EE%sT 2015 O 25 1u +2 1 EEST 2015 N 8 1u +2 O EE%sT 2016 S 7 +3 - +03 +Li Europe/Istanbul Asia/Istanbul +Z Europe/Kiev 2:2:4 - LMT 1880 +2:2:4 - KMT 1924 May 2 +2 - EET 1930 Jun 21 +3 - MSK 1941 S 20 +1 ( CE%sT 1943 N 6 +3 M MSK/MSD 1990 Jul 1 2 +2 1 EEST 1991 S 29 3 +2 W EE%sT 1995 +2 O EE%sT +Z Europe/Uzhgorod 1:29:12 - LMT 1890 O +1 - CET 1940 +1 ( CE%sT 1944 O +1 1 CEST 1944 O 26 +1 - CET 1945 Jun 29 +3 M MSK/MSD 1990 +3 - MSK 1990 Jul 1 2 +1 - CET 1991 Mar 31 3 +2 - EET 1992 +2 W EE%sT 1995 +2 O EE%sT +Z Europe/Zaporozhye 2:20:40 - LMT 1880 +2:20 - +0220 1924 May 2 +2 - EET 1930 Jun 21 +3 - MSK 1941 Au 25 +1 ( CE%sT 1943 O 25 +3 M MSK/MSD 1991 Mar 31 2 +2 W EE%sT 1995 +2 O EE%sT +R AF 1918 1919 - Mar lastSun 2 1 D +R AF 1918 1919 - O lastSun 2 0 S +R AF 1942 o - F 9 2 1 W +R AF 1945 o - Au 14 23u 1 P +R AF 1945 o - S lastSun 2 0 S +R AF 1967 2006 - O lastSun 2 0 S +R AF 1967 1973 - Ap lastSun 2 1 D +R AF 1974 o - Ja 6 2 1 D +R AF 1975 o - F 23 2 1 D +R AF 1976 1986 - Ap lastSun 2 1 D +R AF 1987 2006 - Ap Sun>=1 2 1 D +R AF 2007 ma - Mar Sun>=8 2 1 D +R AF 2007 ma - N Sun>=1 2 0 S +Z EST -5 - EST +Z MST -7 - MST +Z HST -10 - HST +Z EST5EDT -5 AF E%sT +Z CST6CDT -6 AF C%sT +Z MST7MDT -7 AF M%sT +Z PST8PDT -8 AF P%sT +R AG 1920 o - Mar lastSun 2 1 D +R AG 1920 o - O lastSun 2 0 S +R AG 1921 1966 - Ap lastSun 2 1 D +R AG 1921 1954 - S lastSun 2 0 S +R AG 1955 1966 - O lastSun 2 0 S +Z America/New_York -4:56:2 - LMT 1883 N 18 12:3:58 +-5 AF E%sT 1920 +-5 AG E%sT 1942 +-5 AF E%sT 1946 +-5 AG E%sT 1967 +-5 AF E%sT +R AH 1920 o - Jun 13 2 1 D +R AH 1920 1921 - O lastSun 2 0 S +R AH 1921 o - Mar lastSun 2 1 D +R AH 1922 1966 - Ap lastSun 2 1 D +R AH 1922 1954 - S lastSun 2 0 S +R AH 1955 1966 - O lastSun 2 0 S +Z America/Chicago -5:50:36 - LMT 1883 N 18 12:9:24 +-6 AF C%sT 1920 +-6 AH C%sT 1936 Mar 1 2 +-5 - EST 1936 N 15 2 +-6 AH C%sT 1942 +-6 AF C%sT 1946 +-6 AH C%sT 1967 +-6 AF C%sT +Z America/North_Dakota/Center -6:45:12 - LMT 1883 N 18 12:14:48 +-7 AF M%sT 1992 O 25 2 +-6 AF C%sT +Z America/North_Dakota/New_Salem -6:45:39 - LMT 1883 N 18 12:14:21 +-7 AF M%sT 2003 O 26 2 +-6 AF C%sT +Z America/North_Dakota/Beulah -6:47:7 - LMT 1883 N 18 12:12:53 +-7 AF M%sT 2010 N 7 2 +-6 AF C%sT +R AI 1920 1921 - Mar lastSun 2 1 D +R AI 1920 o - O lastSun 2 0 S +R AI 1921 o - May 22 2 0 S +R AI 1965 1966 - Ap lastSun 2 1 D +R AI 1965 1966 - O lastSun 2 0 S +Z America/Denver -6:59:56 - LMT 1883 N 18 12:0:4 +-7 AF M%sT 1920 +-7 AI M%sT 1942 +-7 AF M%sT 1946 +-7 AI M%sT 1967 +-7 AF M%sT +R AJ 1948 o - Mar 14 2:1 1 D +R AJ 1949 o - Ja 1 2 0 S +R AJ 1950 1966 - Ap lastSun 1 1 D +R AJ 1950 1961 - S lastSun 2 0 S +R AJ 1962 1966 - O lastSun 2 0 S +Z America/Los_Angeles -7:52:58 - LMT 1883 N 18 12:7:2 +-8 AF P%sT 1946 +-8 AJ P%sT 1967 +-8 AF P%sT +Z America/Juneau 15:2:19 - LMT 1867 O 19 15:33:32 +-8:57:41 - LMT 1900 Au 20 12 +-8 - PST 1942 +-8 AF P%sT 1946 +-8 - PST 1969 +-8 AF P%sT 1980 Ap 27 2 +-9 AF Y%sT 1980 O 26 2 +-8 AF P%sT 1983 O 30 2 +-9 AF Y%sT 1983 N 30 +-9 AF AK%sT +Z America/Sitka 14:58:47 - LMT 1867 O 19 15:30 +-9:1:13 - LMT 1900 Au 20 12 +-8 - PST 1942 +-8 AF P%sT 1946 +-8 - PST 1969 +-8 AF P%sT 1983 O 30 2 +-9 AF Y%sT 1983 N 30 +-9 AF AK%sT +Z America/Metlakatla 15:13:42 - LMT 1867 O 19 15:44:55 +-8:46:18 - LMT 1900 Au 20 12 +-8 - PST 1942 +-8 AF P%sT 1946 +-8 - PST 1969 +-8 AF P%sT 1983 O 30 2 +-8 - PST 2015 N 1 2 +-9 AF AK%sT +Z America/Yakutat 14:41:5 - LMT 1867 O 19 15:12:18 +-9:18:55 - LMT 1900 Au 20 12 +-9 - YST 1942 +-9 AF Y%sT 1946 +-9 - YST 1969 +-9 AF Y%sT 1983 N 30 +-9 AF AK%sT +Z America/Anchorage 14:0:24 - LMT 1867 O 19 14:31:37 +-9:59:36 - LMT 1900 Au 20 12 +-10 - AST 1942 +-10 AF A%sT 1967 Ap +-10 - AHST 1969 +-10 AF AH%sT 1983 O 30 2 +-9 AF Y%sT 1983 N 30 +-9 AF AK%sT +Z America/Nome 12:58:22 - LMT 1867 O 19 13:29:35 +-11:1:38 - LMT 1900 Au 20 12 +-11 - NST 1942 +-11 AF N%sT 1946 +-11 - NST 1967 Ap +-11 - BST 1969 +-11 AF B%sT 1983 O 30 2 +-9 AF Y%sT 1983 N 30 +-9 AF AK%sT +Z America/Adak 12:13:22 - LMT 1867 O 19 12:44:35 +-11:46:38 - LMT 1900 Au 20 12 +-11 - NST 1942 +-11 AF N%sT 1946 +-11 - NST 1967 Ap +-11 - BST 1969 +-11 AF B%sT 1983 O 30 2 +-10 AF AH%sT 1983 N 30 +-10 AF H%sT +Z Pacific/Honolulu -10:31:26 - LMT 1896 Ja 13 12 +-10:30 - HST 1933 Ap 30 2 +-10:30 1 HDT 1933 May 21 12 +-10:30 - HST 1942 F 9 2 +-10:30 1 HDT 1945 S 30 2 +-10:30 - HST 1947 Jun 8 2 +-10 - HST +Z America/Phoenix -7:28:18 - LMT 1883 N 18 11:31:42 +-7 AF M%sT 1944 Ja 1 0:1 +-7 - MST 1944 Ap 1 0:1 +-7 AF M%sT 1944 O 1 0:1 +-7 - MST 1967 +-7 AF M%sT 1968 Mar 21 +-7 - MST +Z America/Boise -7:44:49 - LMT 1883 N 18 12:15:11 +-8 AF P%sT 1923 May 13 2 +-7 AF M%sT 1974 +-7 - MST 1974 F 3 2 +-7 AF M%sT +R AK 1941 o - Jun 22 2 1 D +R AK 1941 1954 - S lastSun 2 0 S +R AK 1946 1954 - Ap lastSun 2 1 D +Z America/Indiana/Indianapolis -5:44:38 - LMT 1883 N 18 12:15:22 +-6 AF C%sT 1920 +-6 AK C%sT 1942 +-6 AF C%sT 1946 +-6 AK C%sT 1955 Ap 24 2 +-5 - EST 1957 S 29 2 +-6 - CST 1958 Ap 27 2 +-5 - EST 1969 +-5 AF E%sT 1971 +-5 - EST 2006 +-5 AF E%sT +R AL 1951 o - Ap lastSun 2 1 D +R AL 1951 o - S lastSun 2 0 S +R AL 1954 1960 - Ap lastSun 2 1 D +R AL 1954 1960 - S lastSun 2 0 S +Z America/Indiana/Marengo -5:45:23 - LMT 1883 N 18 12:14:37 +-6 AF C%sT 1951 +-6 AL C%sT 1961 Ap 30 2 +-5 - EST 1969 +-5 AF E%sT 1974 Ja 6 2 +-6 1 CDT 1974 O 27 2 +-5 AF E%sT 1976 +-5 - EST 2006 +-5 AF E%sT +R AM 1946 o - Ap lastSun 2 1 D +R AM 1946 o - S lastSun 2 0 S +R AM 1953 1954 - Ap lastSun 2 1 D +R AM 1953 1959 - S lastSun 2 0 S +R AM 1955 o - May 1 0 1 D +R AM 1956 1963 - Ap lastSun 2 1 D +R AM 1960 o - O lastSun 2 0 S +R AM 1961 o - S lastSun 2 0 S +R AM 1962 1963 - O lastSun 2 0 S +Z America/Indiana/Vincennes -5:50:7 - LMT 1883 N 18 12:9:53 +-6 AF C%sT 1946 +-6 AM C%sT 1964 Ap 26 2 +-5 - EST 1969 +-5 AF E%sT 1971 +-5 - EST 2006 Ap 2 2 +-6 AF C%sT 2007 N 4 2 +-5 AF E%sT +R AN 1946 o - Ap lastSun 2 1 D +R AN 1946 o - S lastSun 2 0 S +R AN 1953 1954 - Ap lastSun 2 1 D +R AN 1953 1959 - S lastSun 2 0 S +R AN 1955 o - May 1 0 1 D +R AN 1956 1963 - Ap lastSun 2 1 D +R AN 1960 o - O lastSun 2 0 S +R AN 1961 o - S lastSun 2 0 S +R AN 1962 1963 - O lastSun 2 0 S +Z America/Indiana/Tell_City -5:47:3 - LMT 1883 N 18 12:12:57 +-6 AF C%sT 1946 +-6 AN C%sT 1964 Ap 26 2 +-5 - EST 1969 +-5 AF E%sT 1971 +-5 - EST 2006 Ap 2 2 +-6 AF C%sT +R AO 1955 o - May 1 0 1 D +R AO 1955 1960 - S lastSun 2 0 S +R AO 1956 1964 - Ap lastSun 2 1 D +R AO 1961 1964 - O lastSun 2 0 S +Z America/Indiana/Petersburg -5:49:7 - LMT 1883 N 18 12:10:53 +-6 AF C%sT 1955 +-6 AO C%sT 1965 Ap 25 2 +-5 - EST 1966 O 30 2 +-6 AF C%sT 1977 O 30 2 +-5 - EST 2006 Ap 2 2 +-6 AF C%sT 2007 N 4 2 +-5 AF E%sT +R AP 1947 1961 - Ap lastSun 2 1 D +R AP 1947 1954 - S lastSun 2 0 S +R AP 1955 1956 - O lastSun 2 0 S +R AP 1957 1958 - S lastSun 2 0 S +R AP 1959 1961 - O lastSun 2 0 S +Z America/Indiana/Knox -5:46:30 - LMT 1883 N 18 12:13:30 +-6 AF C%sT 1947 +-6 AP C%sT 1962 Ap 29 2 +-5 - EST 1963 O 27 2 +-6 AF C%sT 1991 O 27 2 +-5 - EST 2006 Ap 2 2 +-6 AF C%sT +R AQ 1946 1960 - Ap lastSun 2 1 D +R AQ 1946 1954 - S lastSun 2 0 S +R AQ 1955 1956 - O lastSun 2 0 S +R AQ 1957 1960 - S lastSun 2 0 S +Z America/Indiana/Winamac -5:46:25 - LMT 1883 N 18 12:13:35 +-6 AF C%sT 1946 +-6 AQ C%sT 1961 Ap 30 2 +-5 - EST 1969 +-5 AF E%sT 1971 +-5 - EST 2006 Ap 2 2 +-6 AF C%sT 2007 Mar 11 2 +-5 AF E%sT +Z America/Indiana/Vevay -5:40:16 - LMT 1883 N 18 12:19:44 +-6 AF C%sT 1954 Ap 25 2 +-5 - EST 1969 +-5 AF E%sT 1973 +-5 - EST 2006 +-5 AF E%sT +R AR 1921 o - May 1 2 1 D +R AR 1921 o - S 1 2 0 S +R AR 1941 1961 - Ap lastSun 2 1 D +R AR 1941 o - S lastSun 2 0 S +R AR 1946 o - Jun 2 2 0 S +R AR 1950 1955 - S lastSun 2 0 S +R AR 1956 1960 - O lastSun 2 0 S +Z America/Kentucky/Louisville -5:43:2 - LMT 1883 N 18 12:16:58 +-6 AF C%sT 1921 +-6 AR C%sT 1942 +-6 AF C%sT 1946 +-6 AR C%sT 1961 Jul 23 2 +-5 - EST 1968 +-5 AF E%sT 1974 Ja 6 2 +-6 1 CDT 1974 O 27 2 +-5 AF E%sT +Z America/Kentucky/Monticello -5:39:24 - LMT 1883 N 18 12:20:36 +-6 AF C%sT 1946 +-6 - CST 1968 +-6 AF C%sT 2000 O 29 2 +-5 AF E%sT +R AS 1948 o - Ap lastSun 2 1 D +R AS 1948 o - S lastSun 2 0 S +Z America/Detroit -5:32:11 - LMT 1905 +-6 - CST 1915 May 15 2 +-5 - EST 1942 +-5 AF E%sT 1946 +-5 AS E%sT 1973 +-5 AF E%sT 1975 +-5 - EST 1975 Ap 27 2 +-5 AF E%sT +R AT 1946 o - Ap lastSun 2 1 D +R AT 1946 o - S lastSun 2 0 S +R AT 1966 o - Ap lastSun 2 1 D +R AT 1966 o - O lastSun 2 0 S +Z America/Menominee -5:50:27 - LMT 1885 S 18 12 +-6 AF C%sT 1946 +-6 AT C%sT 1969 Ap 27 2 +-5 - EST 1973 Ap 29 2 +-6 AF C%sT +R AU 1918 o - Ap 14 2 1 D +R AU 1918 o - O 27 2 0 S +R AU 1942 o - F 9 2 1 W +R AU 1945 o - Au 14 23u 1 P +R AU 1945 o - S 30 2 0 S +R AU 1974 1986 - Ap lastSun 2 1 D +R AU 1974 2006 - O lastSun 2 0 S +R AU 1987 2006 - Ap Sun>=1 2 1 D +R AU 2007 ma - Mar Sun>=8 2 1 D +R AU 2007 ma - N Sun>=1 2 0 S +R AV 1917 o - Ap 8 2 1 D +R AV 1917 o - S 17 2 0 S +R AV 1919 o - May 5 23 1 D +R AV 1919 o - Au 12 23 0 S +R AV 1920 1935 - May Sun>=1 23 1 D +R AV 1920 1935 - O lastSun 23 0 S +R AV 1936 1941 - May M>=9 0 1 D +R AV 1936 1941 - O M>=2 0 0 S +R AV 1946 1950 - May Sun>=8 2 1 D +R AV 1946 1950 - O Sun>=2 2 0 S +R AV 1951 1986 - Ap lastSun 2 1 D +R AV 1951 1959 - S lastSun 2 0 S +R AV 1960 1986 - O lastSun 2 0 S +R AV 1987 o - Ap Sun>=1 0:1 1 D +R AV 1987 2006 - O lastSun 0:1 0 S +R AV 1988 o - Ap Sun>=1 0:1 2 DD +R AV 1989 2006 - Ap Sun>=1 0:1 1 D +R AV 2007 2011 - Mar Sun>=8 0:1 1 D +R AV 2007 2010 - N Sun>=1 0:1 0 S +Z America/St_Johns -3:30:52 - LMT 1884 +-3:30:52 AV N%sT 1918 +-3:30:52 AU N%sT 1919 +-3:30:52 AV N%sT 1935 Mar 30 +-3:30 AV N%sT 1942 May 11 +-3:30 AU N%sT 1946 +-3:30 AV N%sT 2011 N +-3:30 AU N%sT +Z America/Goose_Bay -4:1:40 - LMT 1884 +-3:30:52 - NST 1918 +-3:30:52 AU N%sT 1919 +-3:30:52 - NST 1935 Mar 30 +-3:30 - NST 1936 +-3:30 AV N%sT 1942 May 11 +-3:30 AU N%sT 1946 +-3:30 AV N%sT 1966 Mar 15 2 +-4 AV A%sT 2011 N +-4 AU A%sT +R AW 1916 o - Ap 1 0 1 D +R AW 1916 o - O 1 0 0 S +R AW 1920 o - May 9 0 1 D +R AW 1920 o - Au 29 0 0 S +R AW 1921 o - May 6 0 1 D +R AW 1921 1922 - S 5 0 0 S +R AW 1922 o - Ap 30 0 1 D +R AW 1923 1925 - May Sun>=1 0 1 D +R AW 1923 o - S 4 0 0 S +R AW 1924 o - S 15 0 0 S +R AW 1925 o - S 28 0 0 S +R AW 1926 o - May 16 0 1 D +R AW 1926 o - S 13 0 0 S +R AW 1927 o - May 1 0 1 D +R AW 1927 o - S 26 0 0 S +R AW 1928 1931 - May Sun>=8 0 1 D +R AW 1928 o - S 9 0 0 S +R AW 1929 o - S 3 0 0 S +R AW 1930 o - S 15 0 0 S +R AW 1931 1932 - S M>=24 0 0 S +R AW 1932 o - May 1 0 1 D +R AW 1933 o - Ap 30 0 1 D +R AW 1933 o - O 2 0 0 S +R AW 1934 o - May 20 0 1 D +R AW 1934 o - S 16 0 0 S +R AW 1935 o - Jun 2 0 1 D +R AW 1935 o - S 30 0 0 S +R AW 1936 o - Jun 1 0 1 D +R AW 1936 o - S 14 0 0 S +R AW 1937 1938 - May Sun>=1 0 1 D +R AW 1937 1941 - S M>=24 0 0 S +R AW 1939 o - May 28 0 1 D +R AW 1940 1941 - May Sun>=1 0 1 D +R AW 1946 1949 - Ap lastSun 2 1 D +R AW 1946 1949 - S lastSun 2 0 S +R AW 1951 1954 - Ap lastSun 2 1 D +R AW 1951 1954 - S lastSun 2 0 S +R AW 1956 1959 - Ap lastSun 2 1 D +R AW 1956 1959 - S lastSun 2 0 S +R AW 1962 1973 - Ap lastSun 2 1 D +R AW 1962 1973 - O lastSun 2 0 S +Z America/Halifax -4:14:24 - LMT 1902 Jun 15 +-4 AW A%sT 1918 +-4 AU A%sT 1919 +-4 AW A%sT 1942 F 9 2s +-4 AU A%sT 1946 +-4 AW A%sT 1974 +-4 AU A%sT +Z America/Glace_Bay -3:59:48 - LMT 1902 Jun 15 +-4 AU A%sT 1953 +-4 AW A%sT 1954 +-4 - AST 1972 +-4 AW A%sT 1974 +-4 AU A%sT +R AX 1933 1935 - Jun Sun>=8 1 1 D +R AX 1933 1935 - S Sun>=8 1 0 S +R AX 1936 1938 - Jun Sun>=1 1 1 D +R AX 1936 1938 - S Sun>=1 1 0 S +R AX 1939 o - May 27 1 1 D +R AX 1939 1941 - S Sat>=21 1 0 S +R AX 1940 o - May 19 1 1 D +R AX 1941 o - May 4 1 1 D +R AX 1946 1972 - Ap lastSun 2 1 D +R AX 1946 1956 - S lastSun 2 0 S +R AX 1957 1972 - O lastSun 2 0 S +R AX 1993 2006 - Ap Sun>=1 0:1 1 D +R AX 1993 2006 - O lastSun 0:1 0 S +Z America/Moncton -4:19:8 - LMT 1883 D 9 +-5 - EST 1902 Jun 15 +-4 AU A%sT 1933 +-4 AX A%sT 1942 +-4 AU A%sT 1946 +-4 AX A%sT 1973 +-4 AU A%sT 1993 +-4 AX A%sT 2007 +-4 AU A%sT +Z America/Blanc-Sablon -3:48:28 - LMT 1884 +-4 AU A%sT 1970 +-4 - AST +R AY 1919 o - Mar 30 23:30 1 D +R AY 1919 o - O 26 0 0 S +R AY 1920 o - May 2 2 1 D +R AY 1920 o - S 26 0 0 S +R AY 1921 o - May 15 2 1 D +R AY 1921 o - S 15 2 0 S +R AY 1922 1923 - May Sun>=8 2 1 D +R AY 1922 1926 - S Sun>=15 2 0 S +R AY 1924 1927 - May Sun>=1 2 1 D +R AY 1927 1932 - S lastSun 2 0 S +R AY 1928 1931 - Ap lastSun 2 1 D +R AY 1932 o - May 1 2 1 D +R AY 1933 1940 - Ap lastSun 2 1 D +R AY 1933 o - O 1 2 0 S +R AY 1934 1939 - S lastSun 2 0 S +R AY 1945 1946 - S lastSun 2 0 S +R AY 1946 o - Ap lastSun 2 1 D +R AY 1947 1949 - Ap lastSun 0 1 D +R AY 1947 1948 - S lastSun 0 0 S +R AY 1949 o - N lastSun 0 0 S +R AY 1950 1973 - Ap lastSun 2 1 D +R AY 1950 o - N lastSun 2 0 S +R AY 1951 1956 - S lastSun 2 0 S +R AY 1957 1973 - O lastSun 2 0 S +Z America/Toronto -5:17:32 - LMT 1895 +-5 AU E%sT 1919 +-5 AY E%sT 1942 F 9 2s +-5 AU E%sT 1946 +-5 AY E%sT 1974 +-5 AU E%sT +Z America/Thunder_Bay -5:57 - LMT 1895 +-6 - CST 1910 +-5 - EST 1942 +-5 AU E%sT 1970 +-5 AY E%sT 1973 +-5 - EST 1974 +-5 AU E%sT +Z America/Nipigon -5:53:4 - LMT 1895 +-5 AU E%sT 1940 S 29 +-5 1 EDT 1942 F 9 2s +-5 AU E%sT +Z America/Rainy_River -6:18:16 - LMT 1895 +-6 AU C%sT 1940 S 29 +-6 1 CDT 1942 F 9 2s +-6 AU C%sT +Z America/Atikokan -6:6:28 - LMT 1895 +-6 AU C%sT 1940 S 29 +-6 1 CDT 1942 F 9 2s +-6 AU C%sT 1945 S 30 2 +-5 - EST +R AZ 1916 o - Ap 23 0 1 D +R AZ 1916 o - S 17 0 0 S +R AZ 1918 o - Ap 14 2 1 D +R AZ 1918 o - O 27 2 0 S +R AZ 1937 o - May 16 2 1 D +R AZ 1937 o - S 26 2 0 S +R AZ 1942 o - F 9 2 1 W +R AZ 1945 o - Au 14 23u 1 P +R AZ 1945 o - S lastSun 2 0 S +R AZ 1946 o - May 12 2 1 D +R AZ 1946 o - O 13 2 0 S +R AZ 1947 1949 - Ap lastSun 2 1 D +R AZ 1947 1949 - S lastSun 2 0 S +R AZ 1950 o - May 1 2 1 D +R AZ 1950 o - S 30 2 0 S +R AZ 1951 1960 - Ap lastSun 2 1 D +R AZ 1951 1958 - S lastSun 2 0 S +R AZ 1959 o - O lastSun 2 0 S +R AZ 1960 o - S lastSun 2 0 S +R AZ 1963 o - Ap lastSun 2 1 D +R AZ 1963 o - S 22 2 0 S +R AZ 1966 1986 - Ap lastSun 2s 1 D +R AZ 1966 2005 - O lastSun 2s 0 S +R AZ 1987 2005 - Ap Sun>=1 2s 1 D +Z America/Winnipeg -6:28:36 - LMT 1887 Jul 16 +-6 AZ C%sT 2006 +-6 AU C%sT +R Aa 1918 o - Ap 14 2 1 D +R Aa 1918 o - O 27 2 0 S +R Aa 1930 1934 - May Sun>=1 0 1 D +R Aa 1930 1934 - O Sun>=1 0 0 S +R Aa 1937 1941 - Ap Sun>=8 0 1 D +R Aa 1937 o - O Sun>=8 0 0 S +R Aa 1938 o - O Sun>=1 0 0 S +R Aa 1939 1941 - O Sun>=8 0 0 S +R Aa 1942 o - F 9 2 1 W +R Aa 1945 o - Au 14 23u 1 P +R Aa 1945 o - S lastSun 2 0 S +R Aa 1946 o - Ap Sun>=8 2 1 D +R Aa 1946 o - O Sun>=8 2 0 S +R Aa 1947 1957 - Ap lastSun 2 1 D +R Aa 1947 1957 - S lastSun 2 0 S +R Aa 1959 o - Ap lastSun 2 1 D +R Aa 1959 o - O lastSun 2 0 S +R Ab 1957 o - Ap lastSun 2 1 D +R Ab 1957 o - O lastSun 2 0 S +R Ab 1959 1961 - Ap lastSun 2 1 D +R Ab 1959 o - O lastSun 2 0 S +R Ab 1960 1961 - S lastSun 2 0 S +Z America/Regina -6:58:36 - LMT 1905 S +-7 Aa M%sT 1960 Ap lastSun 2 +-6 - CST +Z America/Swift_Current -7:11:20 - LMT 1905 S +-7 AU M%sT 1946 Ap lastSun 2 +-7 Aa M%sT 1950 +-7 Ab M%sT 1972 Ap lastSun 2 +-6 - CST +R Ac 1918 1919 - Ap Sun>=8 2 1 D +R Ac 1918 o - O 27 2 0 S +R Ac 1919 o - May 27 2 0 S +R Ac 1920 1923 - Ap lastSun 2 1 D +R Ac 1920 o - O lastSun 2 0 S +R Ac 1921 1923 - S lastSun 2 0 S +R Ac 1942 o - F 9 2 1 W +R Ac 1945 o - Au 14 23u 1 P +R Ac 1945 o - S lastSun 2 0 S +R Ac 1947 o - Ap lastSun 2 1 D +R Ac 1947 o - S lastSun 2 0 S +R Ac 1967 o - Ap lastSun 2 1 D +R Ac 1967 o - O lastSun 2 0 S +R Ac 1969 o - Ap lastSun 2 1 D +R Ac 1969 o - O lastSun 2 0 S +R Ac 1972 1986 - Ap lastSun 2 1 D +R Ac 1972 2006 - O lastSun 2 0 S +Z America/Edmonton -7:33:52 - LMT 1906 S +-7 Ac M%sT 1987 +-7 AU M%sT +R Ad 1918 o - Ap 14 2 1 D +R Ad 1918 o - O 27 2 0 S +R Ad 1942 o - F 9 2 1 W +R Ad 1945 o - Au 14 23u 1 P +R Ad 1945 o - S 30 2 0 S +R Ad 1946 1986 - Ap lastSun 2 1 D +R Ad 1946 o - O 13 2 0 S +R Ad 1947 1961 - S lastSun 2 0 S +R Ad 1962 2006 - O lastSun 2 0 S +Z America/Vancouver -8:12:28 - LMT 1884 +-8 Ad P%sT 1987 +-8 AU P%sT +Z America/Dawson_Creek -8:0:56 - LMT 1884 +-8 AU P%sT 1947 +-8 Ad P%sT 1972 Au 30 2 +-7 - MST +Z America/Fort_Nelson -8:10:47 - LMT 1884 +-8 Ad P%sT 1946 +-8 - PST 1947 +-8 Ad P%sT 1987 +-8 AU P%sT 2015 Mar 8 2 +-7 - MST +Z America/Creston -7:46:4 - LMT 1884 +-7 - MST 1916 O +-8 - PST 1918 Jun 2 +-7 - MST +R Ae 1918 o - Ap 14 2 1 D +R Ae 1918 o - O 27 2 0 S +R Ae 1919 o - May 25 2 1 D +R Ae 1919 o - N 1 0 0 S +R Ae 1942 o - F 9 2 1 W +R Ae 1945 o - Au 14 23u 1 P +R Ae 1945 o - S 30 2 0 S +R Ae 1965 o - Ap lastSun 0 2 DD +R Ae 1965 o - O lastSun 2 0 S +R Ae 1980 1986 - Ap lastSun 2 1 D +R Ae 1980 2006 - O lastSun 2 0 S +R Ae 1987 2006 - Ap Sun>=1 2 1 D +Z America/Pangnirtung 0 - -00 1921 +-4 Ae A%sT 1995 Ap Sun>=1 2 +-5 AU E%sT 1999 O 31 2 +-6 AU C%sT 2000 O 29 2 +-5 AU E%sT +Z America/Iqaluit 0 - -00 1942 Au +-5 Ae E%sT 1999 O 31 2 +-6 AU C%sT 2000 O 29 2 +-5 AU E%sT +Z America/Resolute 0 - -00 1947 Au 31 +-6 Ae C%sT 2000 O 29 2 +-5 - EST 2001 Ap 1 3 +-6 AU C%sT 2006 O 29 2 +-5 - EST 2007 Mar 11 3 +-6 AU C%sT +Z America/Rankin_Inlet 0 - -00 1957 +-6 Ae C%sT 2000 O 29 2 +-5 - EST 2001 Ap 1 3 +-6 AU C%sT +Z America/Cambridge_Bay 0 - -00 1920 +-7 Ae M%sT 1999 O 31 2 +-6 AU C%sT 2000 O 29 2 +-5 - EST 2000 N 5 +-6 - CST 2001 Ap 1 3 +-7 AU M%sT +Z America/Yellowknife 0 - -00 1935 +-7 Ae M%sT 1980 +-7 AU M%sT +Z America/Inuvik 0 - -00 1953 +-8 Ae P%sT 1979 Ap lastSun 2 +-7 Ae M%sT 1980 +-7 AU M%sT +Z America/Whitehorse -9:0:12 - LMT 1900 Au 20 +-9 Ae Y%sT 1967 May 28 +-8 Ae P%sT 1980 +-8 AU P%sT +Z America/Dawson -9:17:40 - LMT 1900 Au 20 +-9 Ae Y%sT 1973 O 28 +-8 Ae P%sT 1980 +-8 AU P%sT +R Af 1939 o - F 5 0 1 D +R Af 1939 o - Jun 25 0 0 S +R Af 1940 o - D 9 0 1 D +R Af 1941 o - Ap 1 0 0 S +R Af 1943 o - D 16 0 1 W +R Af 1944 o - May 1 0 0 S +R Af 1950 o - F 12 0 1 D +R Af 1950 o - Jul 30 0 0 S +R Af 1996 2000 - Ap Sun>=1 2 1 D +R Af 1996 2000 - O lastSun 2 0 S +R Af 2001 o - May Sun>=1 2 1 D +R Af 2001 o - S lastSun 2 0 S +R Af 2002 ma - Ap Sun>=1 2 1 D +R Af 2002 ma - O lastSun 2 0 S +Z America/Cancun -5:47:4 - LMT 1922 Ja 1 0:12:56 +-6 - CST 1981 D 23 +-5 Af E%sT 1998 Au 2 2 +-6 Af C%sT 2015 F 1 2 +-5 - EST +Z America/Merida -5:58:28 - LMT 1922 Ja 1 0:1:32 +-6 - CST 1981 D 23 +-5 - EST 1982 D 2 +-6 Af C%sT +Z America/Matamoros -6:40 - LMT 1921 D 31 23:20 +-6 - CST 1988 +-6 AF C%sT 1989 +-6 Af C%sT 2010 +-6 AF C%sT +Z America/Monterrey -6:41:16 - LMT 1921 D 31 23:18:44 +-6 - CST 1988 +-6 AF C%sT 1989 +-6 Af C%sT +Z America/Mexico_City -6:36:36 - LMT 1922 Ja 1 0:23:24 +-7 - MST 1927 Jun 10 23 +-6 - CST 1930 N 15 +-7 - MST 1931 May 1 23 +-6 - CST 1931 O +-7 - MST 1932 Ap +-6 Af C%sT 2001 S 30 2 +-6 - CST 2002 F 20 +-6 Af C%sT +Z America/Ojinaga -6:57:40 - LMT 1922 Ja 1 0:2:20 +-7 - MST 1927 Jun 10 23 +-6 - CST 1930 N 15 +-7 - MST 1931 May 1 23 +-6 - CST 1931 O +-7 - MST 1932 Ap +-6 - CST 1996 +-6 Af C%sT 1998 +-6 - CST 1998 Ap Sun>=1 3 +-7 Af M%sT 2010 +-7 AF M%sT +Z America/Chihuahua -7:4:20 - LMT 1921 D 31 23:55:40 +-7 - MST 1927 Jun 10 23 +-6 - CST 1930 N 15 +-7 - MST 1931 May 1 23 +-6 - CST 1931 O +-7 - MST 1932 Ap +-6 - CST 1996 +-6 Af C%sT 1998 +-6 - CST 1998 Ap Sun>=1 3 +-7 Af M%sT +Z America/Hermosillo -7:23:52 - LMT 1921 D 31 23:36:8 +-7 - MST 1927 Jun 10 23 +-6 - CST 1930 N 15 +-7 - MST 1931 May 1 23 +-6 - CST 1931 O +-7 - MST 1932 Ap +-6 - CST 1942 Ap 24 +-7 - MST 1949 Ja 14 +-8 - PST 1970 +-7 Af M%sT 1999 +-7 - MST +Z America/Mazatlan -7:5:40 - LMT 1921 D 31 23:54:20 +-7 - MST 1927 Jun 10 23 +-6 - CST 1930 N 15 +-7 - MST 1931 May 1 23 +-6 - CST 1931 O +-7 - MST 1932 Ap +-6 - CST 1942 Ap 24 +-7 - MST 1949 Ja 14 +-8 - PST 1970 +-7 Af M%sT +Z America/Bahia_Banderas -7:1 - LMT 1921 D 31 23:59 +-7 - MST 1927 Jun 10 23 +-6 - CST 1930 N 15 +-7 - MST 1931 May 1 23 +-6 - CST 1931 O +-7 - MST 1932 Ap +-6 - CST 1942 Ap 24 +-7 - MST 1949 Ja 14 +-8 - PST 1970 +-7 Af M%sT 2010 Ap 4 2 +-6 Af C%sT +Z America/Tijuana -7:48:4 - LMT 1922 Ja 1 0:11:56 +-7 - MST 1924 +-8 - PST 1927 Jun 10 23 +-7 - MST 1930 N 15 +-8 - PST 1931 Ap +-8 1 PDT 1931 S 30 +-8 - PST 1942 Ap 24 +-8 1 PWT 1945 Au 14 23u +-8 1 PPT 1945 N 12 +-8 - PST 1948 Ap 5 +-8 1 PDT 1949 Ja 14 +-8 - PST 1954 +-8 AJ P%sT 1961 +-8 - PST 1976 +-8 AF P%sT 1996 +-8 Af P%sT 2001 +-8 AF P%sT 2002 F 20 +-8 Af P%sT 2010 +-8 AF P%sT +R Ag 1964 1975 - O lastSun 2 0 S +R Ag 1964 1975 - Ap lastSun 2 1 D +Z America/Nassau -5:9:30 - LMT 1912 Mar 2 +-5 Ag E%sT 1976 +-5 AF E%sT +R Ah 1977 o - Jun 12 2 1 D +R Ah 1977 1978 - O Sun>=1 2 0 S +R Ah 1978 1980 - Ap Sun>=15 2 1 D +R Ah 1979 o - S 30 2 0 S +R Ah 1980 o - S 25 2 0 S +Z America/Barbados -3:58:29 - LMT 1924 +-3:58:29 - BMT 1932 +-4 Ah A%sT +R Ai 1918 1942 - O Sun>=2 0 0:30 -0530 +R Ai 1919 1943 - F Sun>=9 0 0 CST +R Ai 1973 o - D 5 0 1 CDT +R Ai 1974 o - F 9 0 0 CST +R Ai 1982 o - D 18 0 1 CDT +R Ai 1983 o - F 12 0 0 CST +Z America/Belize -5:52:48 - LMT 1912 Ap +-6 Ai %s +Z Atlantic/Bermuda -4:19:18 - LMT 1930 Ja 1 2 +-4 - AST 1974 Ap 28 2 +-4 AU A%sT 1976 +-4 AF A%sT +R Aj 1979 1980 - F lastSun 0 1 D +R Aj 1979 1980 - Jun Sun>=1 0 0 S +R Aj 1991 1992 - Ja Sat>=15 0 1 D +R Aj 1991 o - Jul 1 0 0 S +R Aj 1992 o - Mar 15 0 0 S +Z America/Costa_Rica -5:36:13 - LMT 1890 +-5:36:13 - SJMT 1921 Ja 15 +-6 Aj C%sT +R Ak 1928 o - Jun 10 0 1 D +R Ak 1928 o - O 10 0 0 S +R Ak 1940 1942 - Jun Sun>=1 0 1 D +R Ak 1940 1942 - S Sun>=1 0 0 S +R Ak 1945 1946 - Jun Sun>=1 0 1 D +R Ak 1945 1946 - S Sun>=1 0 0 S +R Ak 1965 o - Jun 1 0 1 D +R Ak 1965 o - S 30 0 0 S +R Ak 1966 o - May 29 0 1 D +R Ak 1966 o - O 2 0 0 S +R Ak 1967 o - Ap 8 0 1 D +R Ak 1967 1968 - S Sun>=8 0 0 S +R Ak 1968 o - Ap 14 0 1 D +R Ak 1969 1977 - Ap lastSun 0 1 D +R Ak 1969 1971 - O lastSun 0 0 S +R Ak 1972 1974 - O 8 0 0 S +R Ak 1975 1977 - O lastSun 0 0 S +R Ak 1978 o - May 7 0 1 D +R Ak 1978 1990 - O Sun>=8 0 0 S +R Ak 1979 1980 - Mar Sun>=15 0 1 D +R Ak 1981 1985 - May Sun>=5 0 1 D +R Ak 1986 1989 - Mar Sun>=14 0 1 D +R Ak 1990 1997 - Ap Sun>=1 0 1 D +R Ak 1991 1995 - O Sun>=8 0s 0 S +R Ak 1996 o - O 6 0s 0 S +R Ak 1997 o - O 12 0s 0 S +R Ak 1998 1999 - Mar lastSun 0s 1 D +R Ak 1998 2003 - O lastSun 0s 0 S +R Ak 2000 2003 - Ap Sun>=1 0s 1 D +R Ak 2004 o - Mar lastSun 0s 1 D +R Ak 2006 2010 - O lastSun 0s 0 S +R Ak 2007 o - Mar Sun>=8 0s 1 D +R Ak 2008 o - Mar Sun>=15 0s 1 D +R Ak 2009 2010 - Mar Sun>=8 0s 1 D +R Ak 2011 o - Mar Sun>=15 0s 1 D +R Ak 2011 o - N 13 0s 0 S +R Ak 2012 o - Ap 1 0s 1 D +R Ak 2012 ma - N Sun>=1 0s 0 S +R Ak 2013 ma - Mar Sun>=8 0s 1 D +Z America/Havana -5:29:28 - LMT 1890 +-5:29:36 - HMT 1925 Jul 19 12 +-5 Ak C%sT +R Al 1966 o - O 30 0 1 EDT +R Al 1967 o - F 28 0 0 EST +R Al 1969 1973 - O lastSun 0 0:30 -0430 +R Al 1970 o - F 21 0 0 EST +R Al 1971 o - Ja 20 0 0 EST +R Al 1972 1974 - Ja 21 0 0 EST +Z America/Santo_Domingo -4:39:36 - LMT 1890 +-4:40 - SDMT 1933 Ap 1 12 +-5 Al %s 1974 O 27 +-4 - AST 2000 O 29 2 +-5 AF E%sT 2000 D 3 1 +-4 - AST +R Am 1987 1988 - May Sun>=1 0 1 D +R Am 1987 1988 - S lastSun 0 0 S +Z America/El_Salvador -5:56:48 - LMT 1921 +-6 Am C%sT +R An 1973 o - N 25 0 1 D +R An 1974 o - F 24 0 0 S +R An 1983 o - May 21 0 1 D +R An 1983 o - S 22 0 0 S +R An 1991 o - Mar 23 0 1 D +R An 1991 o - S 7 0 0 S +R An 2006 o - Ap 30 0 1 D +R An 2006 o - O 1 0 0 S +Z America/Guatemala -6:2:4 - LMT 1918 O 5 +-6 An C%sT +R Ao 1983 o - May 8 0 1 D +R Ao 1984 1987 - Ap lastSun 0 1 D +R Ao 1983 1987 - O lastSun 0 0 S +R Ao 1988 1997 - Ap Sun>=1 1s 1 D +R Ao 1988 1997 - O lastSun 1s 0 S +R Ao 2005 2006 - Ap Sun>=1 0 1 D +R Ao 2005 2006 - O lastSun 0 0 S +R Ao 2012 2015 - Mar Sun>=8 2 1 D +R Ao 2012 2015 - N Sun>=1 2 0 S +R Ao 2017 ma - Mar Sun>=8 2 1 D +R Ao 2017 ma - N Sun>=1 2 0 S +Z America/Port-au-Prince -4:49:20 - LMT 1890 +-4:49 - PPMT 1917 Ja 24 12 +-5 Ao E%sT +R Ap 1987 1988 - May Sun>=1 0 1 D +R Ap 1987 1988 - S lastSun 0 0 S +R Ap 2006 o - May Sun>=1 0 1 D +R Ap 2006 o - Au M>=1 0 0 S +Z America/Tegucigalpa -5:48:52 - LMT 1921 Ap +-6 Ap C%sT +Z America/Jamaica -5:7:10 - LMT 1890 +-5:7:10 - KMT 1912 F +-5 - EST 1974 +-5 AF E%sT 1984 +-5 - EST +Z America/Martinique -4:4:20 - LMT 1890 +-4:4:20 - FFMT 1911 May +-4 - AST 1980 Ap 6 +-4 1 ADT 1980 S 28 +-4 - AST +R Aq 1979 1980 - Mar Sun>=16 0 1 D +R Aq 1979 1980 - Jun M>=23 0 0 S +R Aq 2005 o - Ap 10 0 1 D +R Aq 2005 o - O Sun>=1 0 0 S +R Aq 2006 o - Ap 30 2 1 D +R Aq 2006 o - O Sun>=1 1 0 S +Z America/Managua -5:45:8 - LMT 1890 +-5:45:12 - MMT 1934 Jun 23 +-6 - CST 1973 May +-5 - EST 1975 F 16 +-6 Aq C%sT 1992 Ja 1 4 +-5 - EST 1992 S 24 +-6 - CST 1993 +-5 - EST 1997 +-6 Aq C%sT +Z America/Panama -5:18:8 - LMT 1890 +-5:19:36 - CMT 1908 Ap 22 +-5 - EST +Li America/Panama America/Cayman +Z America/Puerto_Rico -4:24:25 - LMT 1899 Mar 28 12 +-4 - AST 1942 May 3 +-4 AF A%sT 1946 +-4 - AST +Z America/Miquelon -3:44:40 - LMT 1911 May 15 +-4 - AST 1980 May +-3 - -03 1987 +-3 AU -03/-02 +Z America/Grand_Turk -4:44:32 - LMT 1890 +-5:7:10 - KMT 1912 F +-5 - EST 1979 +-5 AF E%sT 2015 N Sun>=1 2 +-4 - AST 2018 Mar 11 3 +-5 AF E%sT +R Ar 1930 o - D 1 0 1 - +R Ar 1931 o - Ap 1 0 0 - +R Ar 1931 o - O 15 0 1 - +R Ar 1932 1940 - Mar 1 0 0 - +R Ar 1932 1939 - N 1 0 1 - +R Ar 1940 o - Jul 1 0 1 - +R Ar 1941 o - Jun 15 0 0 - +R Ar 1941 o - O 15 0 1 - +R Ar 1943 o - Au 1 0 0 - +R Ar 1943 o - O 15 0 1 - +R Ar 1946 o - Mar 1 0 0 - +R Ar 1946 o - O 1 0 1 - +R Ar 1963 o - O 1 0 0 - +R Ar 1963 o - D 15 0 1 - +R Ar 1964 1966 - Mar 1 0 0 - +R Ar 1964 1966 - O 15 0 1 - +R Ar 1967 o - Ap 2 0 0 - +R Ar 1967 1968 - O Sun>=1 0 1 - +R Ar 1968 1969 - Ap Sun>=1 0 0 - +R Ar 1974 o - Ja 23 0 1 - +R Ar 1974 o - May 1 0 0 - +R Ar 1988 o - D 1 0 1 - +R Ar 1989 1993 - Mar Sun>=1 0 0 - +R Ar 1989 1992 - O Sun>=15 0 1 - +R Ar 1999 o - O Sun>=1 0 1 - +R Ar 2000 o - Mar 3 0 0 - +R Ar 2007 o - D 30 0 1 - +R Ar 2008 2009 - Mar Sun>=15 0 0 - +R Ar 2008 o - O Sun>=15 0 1 - +Z America/Argentina/Buenos_Aires -3:53:48 - LMT 1894 O 31 +-4:16:48 - CMT 1920 May +-4 - -04 1930 D +-4 Ar -04/-03 1969 O 5 +-3 Ar -03/-02 1999 O 3 +-4 Ar -04/-03 2000 Mar 3 +-3 Ar -03/-02 +Z America/Argentina/Cordoba -4:16:48 - LMT 1894 O 31 +-4:16:48 - CMT 1920 May +-4 - -04 1930 D +-4 Ar -04/-03 1969 O 5 +-3 Ar -03/-02 1991 Mar 3 +-4 - -04 1991 O 20 +-3 Ar -03/-02 1999 O 3 +-4 Ar -04/-03 2000 Mar 3 +-3 Ar -03/-02 +Z America/Argentina/Salta -4:21:40 - LMT 1894 O 31 +-4:16:48 - CMT 1920 May +-4 - -04 1930 D +-4 Ar -04/-03 1969 O 5 +-3 Ar -03/-02 1991 Mar 3 +-4 - -04 1991 O 20 +-3 Ar -03/-02 1999 O 3 +-4 Ar -04/-03 2000 Mar 3 +-3 Ar -03/-02 2008 O 18 +-3 - -03 +Z America/Argentina/Tucuman -4:20:52 - LMT 1894 O 31 +-4:16:48 - CMT 1920 May +-4 - -04 1930 D +-4 Ar -04/-03 1969 O 5 +-3 Ar -03/-02 1991 Mar 3 +-4 - -04 1991 O 20 +-3 Ar -03/-02 1999 O 3 +-4 Ar -04/-03 2000 Mar 3 +-3 - -03 2004 Jun +-4 - -04 2004 Jun 13 +-3 Ar -03/-02 +Z America/Argentina/La_Rioja -4:27:24 - LMT 1894 O 31 +-4:16:48 - CMT 1920 May +-4 - -04 1930 D +-4 Ar -04/-03 1969 O 5 +-3 Ar -03/-02 1991 Mar +-4 - -04 1991 May 7 +-3 Ar -03/-02 1999 O 3 +-4 Ar -04/-03 2000 Mar 3 +-3 - -03 2004 Jun +-4 - -04 2004 Jun 20 +-3 Ar -03/-02 2008 O 18 +-3 - -03 +Z America/Argentina/San_Juan -4:34:4 - LMT 1894 O 31 +-4:16:48 - CMT 1920 May +-4 - -04 1930 D +-4 Ar -04/-03 1969 O 5 +-3 Ar -03/-02 1991 Mar +-4 - -04 1991 May 7 +-3 Ar -03/-02 1999 O 3 +-4 Ar -04/-03 2000 Mar 3 +-3 - -03 2004 May 31 +-4 - -04 2004 Jul 25 +-3 Ar -03/-02 2008 O 18 +-3 - -03 +Z America/Argentina/Jujuy -4:21:12 - LMT 1894 O 31 +-4:16:48 - CMT 1920 May +-4 - -04 1930 D +-4 Ar -04/-03 1969 O 5 +-3 Ar -03/-02 1990 Mar 4 +-4 - -04 1990 O 28 +-4 1 -03 1991 Mar 17 +-4 - -04 1991 O 6 +-3 1 -02 1992 +-3 Ar -03/-02 1999 O 3 +-4 Ar -04/-03 2000 Mar 3 +-3 Ar -03/-02 2008 O 18 +-3 - -03 +Z America/Argentina/Catamarca -4:23:8 - LMT 1894 O 31 +-4:16:48 - CMT 1920 May +-4 - -04 1930 D +-4 Ar -04/-03 1969 O 5 +-3 Ar -03/-02 1991 Mar 3 +-4 - -04 1991 O 20 +-3 Ar -03/-02 1999 O 3 +-4 Ar -04/-03 2000 Mar 3 +-3 - -03 2004 Jun +-4 - -04 2004 Jun 20 +-3 Ar -03/-02 2008 O 18 +-3 - -03 +Z America/Argentina/Mendoza -4:35:16 - LMT 1894 O 31 +-4:16:48 - CMT 1920 May +-4 - -04 1930 D +-4 Ar -04/-03 1969 O 5 +-3 Ar -03/-02 1990 Mar 4 +-4 - -04 1990 O 15 +-4 1 -03 1991 Mar +-4 - -04 1991 O 15 +-4 1 -03 1992 Mar +-4 - -04 1992 O 18 +-3 Ar -03/-02 1999 O 3 +-4 Ar -04/-03 2000 Mar 3 +-3 - -03 2004 May 23 +-4 - -04 2004 S 26 +-3 Ar -03/-02 2008 O 18 +-3 - -03 +R As 2008 2009 - Mar Sun>=8 0 0 - +R As 2007 2008 - O Sun>=8 0 1 - +Z America/Argentina/San_Luis -4:25:24 - LMT 1894 O 31 +-4:16:48 - CMT 1920 May +-4 - -04 1930 D +-4 Ar -04/-03 1969 O 5 +-3 Ar -03/-02 1990 +-3 1 -02 1990 Mar 14 +-4 - -04 1990 O 15 +-4 1 -03 1991 Mar +-4 - -04 1991 Jun +-3 - -03 1999 O 3 +-4 1 -03 2000 Mar 3 +-3 - -03 2004 May 31 +-4 - -04 2004 Jul 25 +-3 Ar -03/-02 2008 Ja 21 +-4 As -04/-03 2009 O 11 +-3 - -03 +Z America/Argentina/Rio_Gallegos -4:36:52 - LMT 1894 O 31 +-4:16:48 - CMT 1920 May +-4 - -04 1930 D +-4 Ar -04/-03 1969 O 5 +-3 Ar -03/-02 1999 O 3 +-4 Ar -04/-03 2000 Mar 3 +-3 - -03 2004 Jun +-4 - -04 2004 Jun 20 +-3 Ar -03/-02 2008 O 18 +-3 - -03 +Z America/Argentina/Ushuaia -4:33:12 - LMT 1894 O 31 +-4:16:48 - CMT 1920 May +-4 - -04 1930 D +-4 Ar -04/-03 1969 O 5 +-3 Ar -03/-02 1999 O 3 +-4 Ar -04/-03 2000 Mar 3 +-3 - -03 2004 May 30 +-4 - -04 2004 Jun 20 +-3 Ar -03/-02 2008 O 18 +-3 - -03 +Li America/Curacao America/Aruba +Z America/La_Paz -4:32:36 - LMT 1890 +-4:32:36 - CMT 1931 O 15 +-4:32:36 1 BST 1932 Mar 21 +-4 - -04 +R At 1931 o - O 3 11 1 - +R At 1932 1933 - Ap 1 0 0 - +R At 1932 o - O 3 0 1 - +R At 1949 1952 - D 1 0 1 - +R At 1950 o - Ap 16 1 0 - +R At 1951 1952 - Ap 1 0 0 - +R At 1953 o - Mar 1 0 0 - +R At 1963 o - D 9 0 1 - +R At 1964 o - Mar 1 0 0 - +R At 1965 o - Ja 31 0 1 - +R At 1965 o - Mar 31 0 0 - +R At 1965 o - D 1 0 1 - +R At 1966 1968 - Mar 1 0 0 - +R At 1966 1967 - N 1 0 1 - +R At 1985 o - N 2 0 1 - +R At 1986 o - Mar 15 0 0 - +R At 1986 o - O 25 0 1 - +R At 1987 o - F 14 0 0 - +R At 1987 o - O 25 0 1 - +R At 1988 o - F 7 0 0 - +R At 1988 o - O 16 0 1 - +R At 1989 o - Ja 29 0 0 - +R At 1989 o - O 15 0 1 - +R At 1990 o - F 11 0 0 - +R At 1990 o - O 21 0 1 - +R At 1991 o - F 17 0 0 - +R At 1991 o - O 20 0 1 - +R At 1992 o - F 9 0 0 - +R At 1992 o - O 25 0 1 - +R At 1993 o - Ja 31 0 0 - +R At 1993 1995 - O Sun>=11 0 1 - +R At 1994 1995 - F Sun>=15 0 0 - +R At 1996 o - F 11 0 0 - +R At 1996 o - O 6 0 1 - +R At 1997 o - F 16 0 0 - +R At 1997 o - O 6 0 1 - +R At 1998 o - Mar 1 0 0 - +R At 1998 o - O 11 0 1 - +R At 1999 o - F 21 0 0 - +R At 1999 o - O 3 0 1 - +R At 2000 o - F 27 0 0 - +R At 2000 2001 - O Sun>=8 0 1 - +R At 2001 2006 - F Sun>=15 0 0 - +R At 2002 o - N 3 0 1 - +R At 2003 o - O 19 0 1 - +R At 2004 o - N 2 0 1 - +R At 2005 o - O 16 0 1 - +R At 2006 o - N 5 0 1 - +R At 2007 o - F 25 0 0 - +R At 2007 o - O Sun>=8 0 1 - +R At 2008 2017 - O Sun>=15 0 1 - +R At 2008 2011 - F Sun>=15 0 0 - +R At 2012 o - F Sun>=22 0 0 - +R At 2013 2014 - F Sun>=15 0 0 - +R At 2015 o - F Sun>=22 0 0 - +R At 2016 2022 - F Sun>=15 0 0 - +R At 2018 ma - N Sun>=1 0 1 - +R At 2023 o - F Sun>=22 0 0 - +R At 2024 2025 - F Sun>=15 0 0 - +R At 2026 o - F Sun>=22 0 0 - +R At 2027 2033 - F Sun>=15 0 0 - +R At 2034 o - F Sun>=22 0 0 - +R At 2035 2036 - F Sun>=15 0 0 - +R At 2037 o - F Sun>=22 0 0 - +R At 2038 ma - F Sun>=15 0 0 - +Z America/Noronha -2:9:40 - LMT 1914 +-2 At -02/-01 1990 S 17 +-2 - -02 1999 S 30 +-2 At -02/-01 2000 O 15 +-2 - -02 2001 S 13 +-2 At -02/-01 2002 O +-2 - -02 +Z America/Belem -3:13:56 - LMT 1914 +-3 At -03/-02 1988 S 12 +-3 - -03 +Z America/Santarem -3:38:48 - LMT 1914 +-4 At -04/-03 1988 S 12 +-4 - -04 2008 Jun 24 +-3 - -03 +Z America/Fortaleza -2:34 - LMT 1914 +-3 At -03/-02 1990 S 17 +-3 - -03 1999 S 30 +-3 At -03/-02 2000 O 22 +-3 - -03 2001 S 13 +-3 At -03/-02 2002 O +-3 - -03 +Z America/Recife -2:19:36 - LMT 1914 +-3 At -03/-02 1990 S 17 +-3 - -03 1999 S 30 +-3 At -03/-02 2000 O 15 +-3 - -03 2001 S 13 +-3 At -03/-02 2002 O +-3 - -03 +Z America/Araguaina -3:12:48 - LMT 1914 +-3 At -03/-02 1990 S 17 +-3 - -03 1995 S 14 +-3 At -03/-02 2003 S 24 +-3 - -03 2012 O 21 +-3 At -03/-02 2013 S +-3 - -03 +Z America/Maceio -2:22:52 - LMT 1914 +-3 At -03/-02 1990 S 17 +-3 - -03 1995 O 13 +-3 At -03/-02 1996 S 4 +-3 - -03 1999 S 30 +-3 At -03/-02 2000 O 22 +-3 - -03 2001 S 13 +-3 At -03/-02 2002 O +-3 - -03 +Z America/Bahia -2:34:4 - LMT 1914 +-3 At -03/-02 2003 S 24 +-3 - -03 2011 O 16 +-3 At -03/-02 2012 O 21 +-3 - -03 +Z America/Sao_Paulo -3:6:28 - LMT 1914 +-3 At -03/-02 1963 O 23 +-3 1 -02 1964 +-3 At -03/-02 +Z America/Campo_Grande -3:38:28 - LMT 1914 +-4 At -04/-03 +Z America/Cuiaba -3:44:20 - LMT 1914 +-4 At -04/-03 2003 S 24 +-4 - -04 2004 O +-4 At -04/-03 +Z America/Porto_Velho -4:15:36 - LMT 1914 +-4 At -04/-03 1988 S 12 +-4 - -04 +Z America/Boa_Vista -4:2:40 - LMT 1914 +-4 At -04/-03 1988 S 12 +-4 - -04 1999 S 30 +-4 At -04/-03 2000 O 15 +-4 - -04 +Z America/Manaus -4:0:4 - LMT 1914 +-4 At -04/-03 1988 S 12 +-4 - -04 1993 S 28 +-4 At -04/-03 1994 S 22 +-4 - -04 +Z America/Eirunepe -4:39:28 - LMT 1914 +-5 At -05/-04 1988 S 12 +-5 - -05 1993 S 28 +-5 At -05/-04 1994 S 22 +-5 - -05 2008 Jun 24 +-4 - -04 2013 N 10 +-5 - -05 +Z America/Rio_Branco -4:31:12 - LMT 1914 +-5 At -05/-04 1988 S 12 +-5 - -05 2008 Jun 24 +-4 - -04 2013 N 10 +-5 - -05 +R Au 1927 1931 - S 1 0 1 - +R Au 1928 1932 - Ap 1 0 0 - +R Au 1968 o - N 3 4u 1 - +R Au 1969 o - Mar 30 3u 0 - +R Au 1969 o - N 23 4u 1 - +R Au 1970 o - Mar 29 3u 0 - +R Au 1971 o - Mar 14 3u 0 - +R Au 1970 1972 - O Sun>=9 4u 1 - +R Au 1972 1986 - Mar Sun>=9 3u 0 - +R Au 1973 o - S 30 4u 1 - +R Au 1974 1987 - O Sun>=9 4u 1 - +R Au 1987 o - Ap 12 3u 0 - +R Au 1988 1990 - Mar Sun>=9 3u 0 - +R Au 1988 1989 - O Sun>=9 4u 1 - +R Au 1990 o - S 16 4u 1 - +R Au 1991 1996 - Mar Sun>=9 3u 0 - +R Au 1991 1997 - O Sun>=9 4u 1 - +R Au 1997 o - Mar 30 3u 0 - +R Au 1998 o - Mar Sun>=9 3u 0 - +R Au 1998 o - S 27 4u 1 - +R Au 1999 o - Ap 4 3u 0 - +R Au 1999 2010 - O Sun>=9 4u 1 - +R Au 2000 2007 - Mar Sun>=9 3u 0 - +R Au 2008 o - Mar 30 3u 0 - +R Au 2009 o - Mar Sun>=9 3u 0 - +R Au 2010 o - Ap Sun>=1 3u 0 - +R Au 2011 o - May Sun>=2 3u 0 - +R Au 2011 o - Au Sun>=16 4u 1 - +R Au 2012 2014 - Ap Sun>=23 3u 0 - +R Au 2012 2014 - S Sun>=2 4u 1 - +R Au 2016 ma - May Sun>=9 3u 0 - +R Au 2016 ma - Au Sun>=9 4u 1 - +Z America/Santiago -4:42:46 - LMT 1890 +-4:42:46 - SMT 1910 Ja 10 +-5 - -05 1916 Jul +-4:42:46 - SMT 1918 S 10 +-4 - -04 1919 Jul +-4:42:46 - SMT 1927 S +-5 Au -05/-04 1932 S +-4 - -04 1942 Jun +-5 - -05 1942 Au +-4 - -04 1946 Jul 15 +-4 1 -03 1946 S +-4 - -04 1947 Ap +-5 - -05 1947 May 21 23 +-4 Au -04/-03 +Z America/Punta_Arenas -4:43:40 - LMT 1890 +-4:42:46 - SMT 1910 Ja 10 +-5 - -05 1916 Jul +-4:42:46 - SMT 1918 S 10 +-4 - -04 1919 Jul +-4:42:46 - SMT 1927 S +-5 Au -05/-04 1932 S +-4 - -04 1942 Jun +-5 - -05 1942 Au +-4 - -04 1947 Ap +-5 - -05 1947 May 21 23 +-4 Au -04/-03 2016 D 4 +-3 - -03 +Z Pacific/Easter -7:17:28 - LMT 1890 +-7:17:28 - EMT 1932 S +-7 Au -07/-06 1982 Mar 14 3u +-6 Au -06/-05 +Z Antarctica/Palmer 0 - -00 1965 +-4 Ar -04/-03 1969 O 5 +-3 Ar -03/-02 1982 May +-4 Au -04/-03 2016 D 4 +-3 - -03 +R Av 1992 o - May 3 0 1 - +R Av 1993 o - Ap 4 0 0 - +Z America/Bogota -4:56:16 - LMT 1884 Mar 13 +-4:56:16 - BMT 1914 N 23 +-5 Av -05/-04 +Z America/Curacao -4:35:47 - LMT 1912 F 12 +-4:30 - -0430 1965 +-4 - AST +Li America/Curacao America/Lower_Princes +Li America/Curacao America/Kralendijk +R Aw 1992 o - N 28 0 1 - +R Aw 1993 o - F 5 0 0 - +Z America/Guayaquil -5:19:20 - LMT 1890 +-5:14 - QMT 1931 +-5 Aw -05/-04 +Z Pacific/Galapagos -5:58:24 - LMT 1931 +-5 - -05 1986 +-6 Aw -06/-05 +R Ax 1937 1938 - S lastSun 0 1 - +R Ax 1938 1942 - Mar Sun>=19 0 0 - +R Ax 1939 o - O 1 0 1 - +R Ax 1940 1942 - S lastSun 0 1 - +R Ax 1943 o - Ja 1 0 0 - +R Ax 1983 o - S lastSun 0 1 - +R Ax 1984 1985 - Ap lastSun 0 0 - +R Ax 1984 o - S 16 0 1 - +R Ax 1985 2000 - S Sun>=9 0 1 - +R Ax 1986 2000 - Ap Sun>=16 0 0 - +R Ax 2001 2010 - Ap Sun>=15 2 0 - +R Ax 2001 2010 - S Sun>=1 2 1 - +Z Atlantic/Stanley -3:51:24 - LMT 1890 +-3:51:24 - SMT 1912 Mar 12 +-4 Ax -04/-03 1983 May +-3 Ax -03/-02 1985 S 15 +-4 Ax -04/-03 2010 S 5 2 +-3 - -03 +Z America/Cayenne -3:29:20 - LMT 1911 Jul +-4 - -04 1967 O +-3 - -03 +Z America/Guyana -3:52:40 - LMT 1915 Mar +-3:45 - -0345 1975 Jul 31 +-3 - -03 1991 +-4 - -04 +R Ay 1975 1988 - O 1 0 1 - +R Ay 1975 1978 - Mar 1 0 0 - +R Ay 1979 1991 - Ap 1 0 0 - +R Ay 1989 o - O 22 0 1 - +R Ay 1990 o - O 1 0 1 - +R Ay 1991 o - O 6 0 1 - +R Ay 1992 o - Mar 1 0 0 - +R Ay 1992 o - O 5 0 1 - +R Ay 1993 o - Mar 31 0 0 - +R Ay 1993 1995 - O 1 0 1 - +R Ay 1994 1995 - F lastSun 0 0 - +R Ay 1996 o - Mar 1 0 0 - +R Ay 1996 2001 - O Sun>=1 0 1 - +R Ay 1997 o - F lastSun 0 0 - +R Ay 1998 2001 - Mar Sun>=1 0 0 - +R Ay 2002 2004 - Ap Sun>=1 0 0 - +R Ay 2002 2003 - S Sun>=1 0 1 - +R Ay 2004 2009 - O Sun>=15 0 1 - +R Ay 2005 2009 - Mar Sun>=8 0 0 - +R Ay 2010 ma - O Sun>=1 0 1 - +R Ay 2010 2012 - Ap Sun>=8 0 0 - +R Ay 2013 ma - Mar Sun>=22 0 0 - +Z America/Asuncion -3:50:40 - LMT 1890 +-3:50:40 - AMT 1931 O 10 +-4 - -04 1972 O +-3 - -03 1974 Ap +-4 Ay -04/-03 +R Az 1938 o - Ja 1 0 1 - +R Az 1938 o - Ap 1 0 0 - +R Az 1938 1939 - S lastSun 0 1 - +R Az 1939 1940 - Mar Sun>=24 0 0 - +R Az 1986 1987 - Ja 1 0 1 - +R Az 1986 1987 - Ap 1 0 0 - +R Az 1990 o - Ja 1 0 1 - +R Az 1990 o - Ap 1 0 0 - +R Az 1994 o - Ja 1 0 1 - +R Az 1994 o - Ap 1 0 0 - +Z America/Lima -5:8:12 - LMT 1890 +-5:8:36 - LMT 1908 Jul 28 +-5 Az -05/-04 +Z Atlantic/South_Georgia -2:26:8 - LMT 1890 +-2 - -02 +Z America/Paramaribo -3:40:40 - LMT 1911 +-3:40:52 - PMT 1935 +-3:40:36 - PMT 1945 O +-3:30 - -0330 1984 O +-3 - -03 +Z America/Port_of_Spain -4:6:4 - LMT 1912 Mar 2 +-4 - AST +Li America/Port_of_Spain America/Anguilla +Li America/Port_of_Spain America/Antigua +Li America/Port_of_Spain America/Dominica +Li America/Port_of_Spain America/Grenada +Li America/Port_of_Spain America/Guadeloupe +Li America/Port_of_Spain America/Marigot +Li America/Port_of_Spain America/Montserrat +Li America/Port_of_Spain America/St_Barthelemy +Li America/Port_of_Spain America/St_Kitts +Li America/Port_of_Spain America/St_Lucia +Li America/Port_of_Spain America/St_Thomas +Li America/Port_of_Spain America/St_Vincent +Li America/Port_of_Spain America/Tortola +R A! 1923 1925 - O 1 0 0:30 - +R A! 1924 1926 - Ap 1 0 0 - +R A! 1933 1938 - O lastSun 0 0:30 - +R A! 1934 1941 - Mar lastSat 24 0 - +R A! 1939 o - O 1 0 0:30 - +R A! 1940 o - O 27 0 0:30 - +R A! 1941 o - Au 1 0 0:30 - +R A! 1942 o - D 14 0 0:30 - +R A! 1943 o - Mar 14 0 0 - +R A! 1959 o - May 24 0 0:30 - +R A! 1959 o - N 15 0 0 - +R A! 1960 o - Ja 17 0 1 - +R A! 1960 o - Mar 6 0 0 - +R A! 1965 o - Ap 4 0 1 - +R A! 1965 o - S 26 0 0 - +R A! 1968 o - May 27 0 0:30 - +R A! 1968 o - D 1 0 0 - +R A! 1970 o - Ap 25 0 1 - +R A! 1970 o - Jun 14 0 0 - +R A! 1972 o - Ap 23 0 1 - +R A! 1972 o - Jul 16 0 0 - +R A! 1974 o - Ja 13 0 1:30 - +R A! 1974 o - Mar 10 0 0:30 - +R A! 1974 o - S 1 0 0 - +R A! 1974 o - D 22 0 1 - +R A! 1975 o - Mar 30 0 0 - +R A! 1976 o - D 19 0 1 - +R A! 1977 o - Mar 6 0 0 - +R A! 1977 o - D 4 0 1 - +R A! 1978 1979 - Mar Sun>=1 0 0 - +R A! 1978 o - D 17 0 1 - +R A! 1979 o - Ap 29 0 1 - +R A! 1980 o - Mar 16 0 0 - +R A! 1987 o - D 14 0 1 - +R A! 1988 o - F 28 0 0 - +R A! 1988 o - D 11 0 1 - +R A! 1989 o - Mar 5 0 0 - +R A! 1989 o - O 29 0 1 - +R A! 1990 o - F 25 0 0 - +R A! 1990 1991 - O Sun>=21 0 1 - +R A! 1991 1992 - Mar Sun>=1 0 0 - +R A! 1992 o - O 18 0 1 - +R A! 1993 o - F 28 0 0 - +R A! 2004 o - S 19 0 1 - +R A! 2005 o - Mar 27 2 0 - +R A! 2005 o - O 9 2 1 - +R A! 2006 2015 - Mar Sun>=8 2 0 - +R A! 2006 2014 - O Sun>=1 2 1 - +Z America/Montevideo -3:44:51 - LMT 1908 Jun 10 +-3:44:51 - MMT 1920 May +-4 - -04 1923 O +-3:30 A! -0330/-03 1942 D 14 +-3 A! -03/-0230 1960 +-3 A! -03/-02 1968 +-3 A! -03/-0230 1970 +-3 A! -03/-02 1974 +-3 A! -03/-0130 1974 Mar 10 +-3 A! -03/-0230 1974 D 22 +-3 A! -03/-02 +Z America/Caracas -4:27:44 - LMT 1890 +-4:27:40 - CMT 1912 F 12 +-4:30 - -0430 1965 +-4 - -04 2007 D 9 3 +-4:30 - -0430 2016 May 1 2:30 +-4 - -04 +Z Etc/GMT 0 - GMT +Z Etc/UTC 0 - UTC +Z Etc/UCT 0 - UCT +Li Etc/GMT GMT +Li Etc/UTC Etc/Universal +Li Etc/UTC Etc/Zulu +Li Etc/GMT Etc/Greenwich +Li Etc/GMT Etc/GMT-0 +Li Etc/GMT Etc/GMT+0 +Li Etc/GMT Etc/GMT0 +Z Etc/GMT-14 14 - +14 +Z Etc/GMT-13 13 - +13 +Z Etc/GMT-12 12 - +12 +Z Etc/GMT-11 11 - +11 +Z Etc/GMT-10 10 - +10 +Z Etc/GMT-9 9 - +09 +Z Etc/GMT-8 8 - +08 +Z Etc/GMT-7 7 - +07 +Z Etc/GMT-6 6 - +06 +Z Etc/GMT-5 5 - +05 +Z Etc/GMT-4 4 - +04 +Z Etc/GMT-3 3 - +03 +Z Etc/GMT-2 2 - +02 +Z Etc/GMT-1 1 - +01 +Z Etc/GMT+1 -1 - -01 +Z Etc/GMT+2 -2 - -02 +Z Etc/GMT+3 -3 - -03 +Z Etc/GMT+4 -4 - -04 +Z Etc/GMT+5 -5 - -05 +Z Etc/GMT+6 -6 - -06 +Z Etc/GMT+7 -7 - -07 +Z Etc/GMT+8 -8 - -08 +Z Etc/GMT+9 -9 - -09 +Z Etc/GMT+10 -10 - -10 +Z Etc/GMT+11 -11 - -11 +Z Etc/GMT+12 -12 - -12 +Z Factory 0 - -00 +Li Africa/Nairobi Africa/Asmera +Li Africa/Abidjan Africa/Timbuktu +Li America/Argentina/Catamarca America/Argentina/ComodRivadavia +Li America/Adak America/Atka +Li America/Argentina/Buenos_Aires America/Buenos_Aires +Li America/Argentina/Catamarca America/Catamarca +Li America/Atikokan America/Coral_Harbour +Li America/Argentina/Cordoba America/Cordoba +Li America/Tijuana America/Ensenada +Li America/Indiana/Indianapolis America/Fort_Wayne +Li America/Indiana/Indianapolis America/Indianapolis +Li America/Argentina/Jujuy America/Jujuy +Li America/Indiana/Knox America/Knox_IN +Li America/Kentucky/Louisville America/Louisville +Li America/Argentina/Mendoza America/Mendoza +Li America/Toronto America/Montreal +Li America/Rio_Branco America/Porto_Acre +Li America/Argentina/Cordoba America/Rosario +Li America/Tijuana America/Santa_Isabel +Li America/Denver America/Shiprock +Li America/Port_of_Spain America/Virgin +Li Pacific/Auckland Antarctica/South_Pole +Li Asia/Ashgabat Asia/Ashkhabad +Li Asia/Kolkata Asia/Calcutta +Li Asia/Shanghai Asia/Chongqing +Li Asia/Shanghai Asia/Chungking +Li Asia/Dhaka Asia/Dacca +Li Asia/Shanghai Asia/Harbin +Li Asia/Urumqi Asia/Kashgar +Li Asia/Kathmandu Asia/Katmandu +Li Asia/Macau Asia/Macao +Li Asia/Yangon Asia/Rangoon +Li Asia/Ho_Chi_Minh Asia/Saigon +Li Asia/Jerusalem Asia/Tel_Aviv +Li Asia/Thimphu Asia/Thimbu +Li Asia/Makassar Asia/Ujung_Pandang +Li Asia/Ulaanbaatar Asia/Ulan_Bator +Li Atlantic/Faroe Atlantic/Faeroe +Li Europe/Oslo Atlantic/Jan_Mayen +Li Australia/Sydney Australia/ACT +Li Australia/Sydney Australia/Canberra +Li Australia/Lord_Howe Australia/LHI +Li Australia/Sydney Australia/NSW +Li Australia/Darwin Australia/North +Li Australia/Brisbane Australia/Queensland +Li Australia/Adelaide Australia/South +Li Australia/Hobart Australia/Tasmania +Li Australia/Melbourne Australia/Victoria +Li Australia/Perth Australia/West +Li Australia/Broken_Hill Australia/Yancowinna +Li America/Rio_Branco Brazil/Acre +Li America/Noronha Brazil/DeNoronha +Li America/Sao_Paulo Brazil/East +Li America/Manaus Brazil/West +Li America/Halifax Canada/Atlantic +Li America/Winnipeg Canada/Central +Li America/Toronto Canada/Eastern +Li America/Edmonton Canada/Mountain +Li America/St_Johns Canada/Newfoundland +Li America/Vancouver Canada/Pacific +Li America/Regina Canada/Saskatchewan +Li America/Whitehorse Canada/Yukon +Li America/Santiago Chile/Continental +Li Pacific/Easter Chile/EasterIsland +Li America/Havana Cuba +Li Africa/Cairo Egypt +Li Europe/Dublin Eire +Li Europe/London Europe/Belfast +Li Europe/Chisinau Europe/Tiraspol +Li Europe/London GB +Li Europe/London GB-Eire +Li Etc/GMT GMT+0 +Li Etc/GMT GMT-0 +Li Etc/GMT GMT0 +Li Etc/GMT Greenwich +Li Asia/Hong_Kong Hongkong +Li Atlantic/Reykjavik Iceland +Li Asia/Tehran Iran +Li Asia/Jerusalem Israel +Li America/Jamaica Jamaica +Li Asia/Tokyo Japan +Li Pacific/Kwajalein Kwajalein +Li Africa/Tripoli Libya +Li America/Tijuana Mexico/BajaNorte +Li America/Mazatlan Mexico/BajaSur +Li America/Mexico_City Mexico/General +Li Pacific/Auckland NZ +Li Pacific/Chatham NZ-CHAT +Li America/Denver Navajo +Li Asia/Shanghai PRC +Li Pacific/Honolulu Pacific/Johnston +Li Pacific/Pohnpei Pacific/Ponape +Li Pacific/Pago_Pago Pacific/Samoa +Li Pacific/Chuuk Pacific/Truk +Li Pacific/Chuuk Pacific/Yap +Li Europe/Warsaw Poland +Li Europe/Lisbon Portugal +Li Asia/Taipei ROC +Li Asia/Seoul ROK +Li Asia/Singapore Singapore +Li Europe/Istanbul Turkey +Li Etc/UCT UCT +Li America/Anchorage US/Alaska +Li America/Adak US/Aleutian +Li America/Phoenix US/Arizona +Li America/Chicago US/Central +Li America/Indiana/Indianapolis US/East-Indiana +Li America/New_York US/Eastern +Li Pacific/Honolulu US/Hawaii +Li America/Indiana/Knox US/Indiana-Starke +Li America/Detroit US/Michigan +Li America/Denver US/Mountain +Li America/Los_Angeles US/Pacific +Li Pacific/Pago_Pago US/Samoa +Li Etc/UTC UTC +Li Etc/UTC Universal +Li Europe/Moscow W-SU +Li Etc/UTC Zulu diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/zone.tab b/env/lib/python3.6/site-packages/pytz/zoneinfo/zone.tab new file mode 100644 index 0000000..f92c919 --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz/zoneinfo/zone.tab @@ -0,0 +1,448 @@ +# tz zone descriptions (deprecated version) +# +# This file is in the public domain, so clarified as of +# 2009-05-17 by Arthur David Olson. +# +# From Paul Eggert (2014-07-31): +# This file is intended as a backward-compatibility aid for older programs. +# New programs should use zone1970.tab. This file is like zone1970.tab (see +# zone1970.tab's comments), but with the following additional restrictions: +# +# 1. This file contains only ASCII characters. +# 2. The first data column contains exactly one country code. +# +# Because of (2), each row stands for an area that is the intersection +# of a region identified by a country code and of a zone where civil +# clocks have agreed since 1970; this is a narrower definition than +# that of zone1970.tab. +# +# This table is intended as an aid for users, to help them select time +# zone data entries appropriate for their practical needs. It is not +# intended to take or endorse any position on legal or territorial claims. +# +#country- +#code coordinates TZ comments +AD +4230+00131 Europe/Andorra +AE +2518+05518 Asia/Dubai +AF +3431+06912 Asia/Kabul +AG +1703-06148 America/Antigua +AI +1812-06304 America/Anguilla +AL +4120+01950 Europe/Tirane +AM +4011+04430 Asia/Yerevan +AO -0848+01314 Africa/Luanda +AQ -7750+16636 Antarctica/McMurdo New Zealand time - McMurdo, South Pole +AQ -6617+11031 Antarctica/Casey Casey +AQ -6835+07758 Antarctica/Davis Davis +AQ -6640+14001 Antarctica/DumontDUrville Dumont-d'Urville +AQ -6736+06253 Antarctica/Mawson Mawson +AQ -6448-06406 Antarctica/Palmer Palmer +AQ -6734-06808 Antarctica/Rothera Rothera +AQ -690022+0393524 Antarctica/Syowa Syowa +AQ -720041+0023206 Antarctica/Troll Troll +AQ -7824+10654 Antarctica/Vostok Vostok +AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF) +AR -3124-06411 America/Argentina/Cordoba Argentina (most areas: CB, CC, CN, ER, FM, MN, SE, SF) +AR -2447-06525 America/Argentina/Salta Salta (SA, LP, NQ, RN) +AR -2411-06518 America/Argentina/Jujuy Jujuy (JY) +AR -2649-06513 America/Argentina/Tucuman Tucuman (TM) +AR -2828-06547 America/Argentina/Catamarca Catamarca (CT); Chubut (CH) +AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR) +AR -3132-06831 America/Argentina/San_Juan San Juan (SJ) +AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ) +AR -3319-06621 America/Argentina/San_Luis San Luis (SL) +AR -5138-06913 America/Argentina/Rio_Gallegos Santa Cruz (SC) +AR -5448-06818 America/Argentina/Ushuaia Tierra del Fuego (TF) +AS -1416-17042 Pacific/Pago_Pago +AT +4813+01620 Europe/Vienna +AU -3133+15905 Australia/Lord_Howe Lord Howe Island +AU -5430+15857 Antarctica/Macquarie Macquarie Island +AU -4253+14719 Australia/Hobart Tasmania (most areas) +AU -3956+14352 Australia/Currie Tasmania (King Island) +AU -3749+14458 Australia/Melbourne Victoria +AU -3352+15113 Australia/Sydney New South Wales (most areas) +AU -3157+14127 Australia/Broken_Hill New South Wales (Yancowinna) +AU -2728+15302 Australia/Brisbane Queensland (most areas) +AU -2016+14900 Australia/Lindeman Queensland (Whitsunday Islands) +AU -3455+13835 Australia/Adelaide South Australia +AU -1228+13050 Australia/Darwin Northern Territory +AU -3157+11551 Australia/Perth Western Australia (most areas) +AU -3143+12852 Australia/Eucla Western Australia (Eucla) +AW +1230-06958 America/Aruba +AX +6006+01957 Europe/Mariehamn +AZ +4023+04951 Asia/Baku +BA +4352+01825 Europe/Sarajevo +BB +1306-05937 America/Barbados +BD +2343+09025 Asia/Dhaka +BE +5050+00420 Europe/Brussels +BF +1222-00131 Africa/Ouagadougou +BG +4241+02319 Europe/Sofia +BH +2623+05035 Asia/Bahrain +BI -0323+02922 Africa/Bujumbura +BJ +0629+00237 Africa/Porto-Novo +BL +1753-06251 America/St_Barthelemy +BM +3217-06446 Atlantic/Bermuda +BN +0456+11455 Asia/Brunei +BO -1630-06809 America/La_Paz +BQ +120903-0681636 America/Kralendijk +BR -0351-03225 America/Noronha Atlantic islands +BR -0127-04829 America/Belem Para (east); Amapa +BR -0343-03830 America/Fortaleza Brazil (northeast: MA, PI, CE, RN, PB) +BR -0803-03454 America/Recife Pernambuco +BR -0712-04812 America/Araguaina Tocantins +BR -0940-03543 America/Maceio Alagoas, Sergipe +BR -1259-03831 America/Bahia Bahia +BR -2332-04637 America/Sao_Paulo Brazil (southeast: GO, DF, MG, ES, RJ, SP, PR, SC, RS) +BR -2027-05437 America/Campo_Grande Mato Grosso do Sul +BR -1535-05605 America/Cuiaba Mato Grosso +BR -0226-05452 America/Santarem Para (west) +BR -0846-06354 America/Porto_Velho Rondonia +BR +0249-06040 America/Boa_Vista Roraima +BR -0308-06001 America/Manaus Amazonas (east) +BR -0640-06952 America/Eirunepe Amazonas (west) +BR -0958-06748 America/Rio_Branco Acre +BS +2505-07721 America/Nassau +BT +2728+08939 Asia/Thimphu +BW -2439+02555 Africa/Gaborone +BY +5354+02734 Europe/Minsk +BZ +1730-08812 America/Belize +CA +4734-05243 America/St_Johns Newfoundland; Labrador (southeast) +CA +4439-06336 America/Halifax Atlantic - NS (most areas); PE +CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton) +CA +4606-06447 America/Moncton Atlantic - New Brunswick +CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas) +CA +5125-05707 America/Blanc-Sablon AST - QC (Lower North Shore) +CA +4339-07923 America/Toronto Eastern - ON, QC (most areas) +CA +4901-08816 America/Nipigon Eastern - ON, QC (no DST 1967-73) +CA +4823-08915 America/Thunder_Bay Eastern - ON (Thunder Bay) +CA +6344-06828 America/Iqaluit Eastern - NU (most east areas) +CA +6608-06544 America/Pangnirtung Eastern - NU (Pangnirtung) +CA +484531-0913718 America/Atikokan EST - ON (Atikokan); NU (Coral H) +CA +4953-09709 America/Winnipeg Central - ON (west); Manitoba +CA +4843-09434 America/Rainy_River Central - ON (Rainy R, Ft Frances) +CA +744144-0944945 America/Resolute Central - NU (Resolute) +CA +624900-0920459 America/Rankin_Inlet Central - NU (central) +CA +5024-10439 America/Regina CST - SK (most areas) +CA +5017-10750 America/Swift_Current CST - SK (midwest) +CA +5333-11328 America/Edmonton Mountain - AB; BC (E); SK (W) +CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west) +CA +6227-11421 America/Yellowknife Mountain - NT (central) +CA +682059-1334300 America/Inuvik Mountain - NT (west) +CA +4906-11631 America/Creston MST - BC (Creston) +CA +5946-12014 America/Dawson_Creek MST - BC (Dawson Cr, Ft St John) +CA +5848-12242 America/Fort_Nelson MST - BC (Ft Nelson) +CA +4916-12307 America/Vancouver Pacific - BC (most areas) +CA +6043-13503 America/Whitehorse Pacific - Yukon (south) +CA +6404-13925 America/Dawson Pacific - Yukon (north) +CC -1210+09655 Indian/Cocos +CD -0418+01518 Africa/Kinshasa Dem. Rep. of Congo (west) +CD -1140+02728 Africa/Lubumbashi Dem. Rep. of Congo (east) +CF +0422+01835 Africa/Bangui +CG -0416+01517 Africa/Brazzaville +CH +4723+00832 Europe/Zurich +CI +0519-00402 Africa/Abidjan +CK -2114-15946 Pacific/Rarotonga +CL -3327-07040 America/Santiago Chile (most areas) +CL -5309-07055 America/Punta_Arenas Region of Magallanes +CL -2709-10926 Pacific/Easter Easter Island +CM +0403+00942 Africa/Douala +CN +3114+12128 Asia/Shanghai Beijing Time +CN +4348+08735 Asia/Urumqi Xinjiang Time +CO +0436-07405 America/Bogota +CR +0956-08405 America/Costa_Rica +CU +2308-08222 America/Havana +CV +1455-02331 Atlantic/Cape_Verde +CW +1211-06900 America/Curacao +CX -1025+10543 Indian/Christmas +CY +3510+03322 Asia/Nicosia Cyprus (most areas) +CY +3507+03357 Asia/Famagusta Northern Cyprus +CZ +5005+01426 Europe/Prague +DE +5230+01322 Europe/Berlin Germany (most areas) +DE +4742+00841 Europe/Busingen Busingen +DJ +1136+04309 Africa/Djibouti +DK +5540+01235 Europe/Copenhagen +DM +1518-06124 America/Dominica +DO +1828-06954 America/Santo_Domingo +DZ +3647+00303 Africa/Algiers +EC -0210-07950 America/Guayaquil Ecuador (mainland) +EC -0054-08936 Pacific/Galapagos Galapagos Islands +EE +5925+02445 Europe/Tallinn +EG +3003+03115 Africa/Cairo +EH +2709-01312 Africa/El_Aaiun +ER +1520+03853 Africa/Asmara +ES +4024-00341 Europe/Madrid Spain (mainland) +ES +3553-00519 Africa/Ceuta Ceuta, Melilla +ES +2806-01524 Atlantic/Canary Canary Islands +ET +0902+03842 Africa/Addis_Ababa +FI +6010+02458 Europe/Helsinki +FJ -1808+17825 Pacific/Fiji +FK -5142-05751 Atlantic/Stanley +FM +0725+15147 Pacific/Chuuk Chuuk/Truk, Yap +FM +0658+15813 Pacific/Pohnpei Pohnpei/Ponape +FM +0519+16259 Pacific/Kosrae Kosrae +FO +6201-00646 Atlantic/Faroe +FR +4852+00220 Europe/Paris +GA +0023+00927 Africa/Libreville +GB +513030-0000731 Europe/London +GD +1203-06145 America/Grenada +GE +4143+04449 Asia/Tbilisi +GF +0456-05220 America/Cayenne +GG +492717-0023210 Europe/Guernsey +GH +0533-00013 Africa/Accra +GI +3608-00521 Europe/Gibraltar +GL +6411-05144 America/Godthab Greenland (most areas) +GL +7646-01840 America/Danmarkshavn National Park (east coast) +GL +7029-02158 America/Scoresbysund Scoresbysund/Ittoqqortoormiit +GL +7634-06847 America/Thule Thule/Pituffik +GM +1328-01639 Africa/Banjul +GN +0931-01343 Africa/Conakry +GP +1614-06132 America/Guadeloupe +GQ +0345+00847 Africa/Malabo +GR +3758+02343 Europe/Athens +GS -5416-03632 Atlantic/South_Georgia +GT +1438-09031 America/Guatemala +GU +1328+14445 Pacific/Guam +GW +1151-01535 Africa/Bissau +GY +0648-05810 America/Guyana +HK +2217+11409 Asia/Hong_Kong +HN +1406-08713 America/Tegucigalpa +HR +4548+01558 Europe/Zagreb +HT +1832-07220 America/Port-au-Prince +HU +4730+01905 Europe/Budapest +ID -0610+10648 Asia/Jakarta Java, Sumatra +ID -0002+10920 Asia/Pontianak Borneo (west, central) +ID -0507+11924 Asia/Makassar Borneo (east, south); Sulawesi/Celebes, Bali, Nusa Tengarra; Timor (west) +ID -0232+14042 Asia/Jayapura New Guinea (West Papua / Irian Jaya); Malukus/Moluccas +IE +5320-00615 Europe/Dublin +IL +314650+0351326 Asia/Jerusalem +IM +5409-00428 Europe/Isle_of_Man +IN +2232+08822 Asia/Kolkata +IO -0720+07225 Indian/Chagos +IQ +3321+04425 Asia/Baghdad +IR +3540+05126 Asia/Tehran +IS +6409-02151 Atlantic/Reykjavik +IT +4154+01229 Europe/Rome +JE +491101-0020624 Europe/Jersey +JM +175805-0764736 America/Jamaica +JO +3157+03556 Asia/Amman +JP +353916+1394441 Asia/Tokyo +KE -0117+03649 Africa/Nairobi +KG +4254+07436 Asia/Bishkek +KH +1133+10455 Asia/Phnom_Penh +KI +0125+17300 Pacific/Tarawa Gilbert Islands +KI -0308-17105 Pacific/Enderbury Phoenix Islands +KI +0152-15720 Pacific/Kiritimati Line Islands +KM -1141+04316 Indian/Comoro +KN +1718-06243 America/St_Kitts +KP +3901+12545 Asia/Pyongyang +KR +3733+12658 Asia/Seoul +KW +2920+04759 Asia/Kuwait +KY +1918-08123 America/Cayman +KZ +4315+07657 Asia/Almaty Kazakhstan (most areas) +KZ +4448+06528 Asia/Qyzylorda Qyzylorda/Kyzylorda/Kzyl-Orda +KZ +5017+05710 Asia/Aqtobe Aqtobe/Aktobe +KZ +4431+05016 Asia/Aqtau Mangghystau/Mankistau +KZ +4707+05156 Asia/Atyrau Atyrau/Atirau/Gur'yev +KZ +5113+05121 Asia/Oral West Kazakhstan +LA +1758+10236 Asia/Vientiane +LB +3353+03530 Asia/Beirut +LC +1401-06100 America/St_Lucia +LI +4709+00931 Europe/Vaduz +LK +0656+07951 Asia/Colombo +LR +0618-01047 Africa/Monrovia +LS -2928+02730 Africa/Maseru +LT +5441+02519 Europe/Vilnius +LU +4936+00609 Europe/Luxembourg +LV +5657+02406 Europe/Riga +LY +3254+01311 Africa/Tripoli +MA +3339-00735 Africa/Casablanca +MC +4342+00723 Europe/Monaco +MD +4700+02850 Europe/Chisinau +ME +4226+01916 Europe/Podgorica +MF +1804-06305 America/Marigot +MG -1855+04731 Indian/Antananarivo +MH +0709+17112 Pacific/Majuro Marshall Islands (most areas) +MH +0905+16720 Pacific/Kwajalein Kwajalein +MK +4159+02126 Europe/Skopje +ML +1239-00800 Africa/Bamako +MM +1647+09610 Asia/Yangon +MN +4755+10653 Asia/Ulaanbaatar Mongolia (most areas) +MN +4801+09139 Asia/Hovd Bayan-Olgiy, Govi-Altai, Hovd, Uvs, Zavkhan +MN +4804+11430 Asia/Choibalsan Dornod, Sukhbaatar +MO +2214+11335 Asia/Macau +MP +1512+14545 Pacific/Saipan +MQ +1436-06105 America/Martinique +MR +1806-01557 Africa/Nouakchott +MS +1643-06213 America/Montserrat +MT +3554+01431 Europe/Malta +MU -2010+05730 Indian/Mauritius +MV +0410+07330 Indian/Maldives +MW -1547+03500 Africa/Blantyre +MX +1924-09909 America/Mexico_City Central Time +MX +2105-08646 America/Cancun Eastern Standard Time - Quintana Roo +MX +2058-08937 America/Merida Central Time - Campeche, Yucatan +MX +2540-10019 America/Monterrey Central Time - Durango; Coahuila, Nuevo Leon, Tamaulipas (most areas) +MX +2550-09730 America/Matamoros Central Time US - Coahuila, Nuevo Leon, Tamaulipas (US border) +MX +2313-10625 America/Mazatlan Mountain Time - Baja California Sur, Nayarit, Sinaloa +MX +2838-10605 America/Chihuahua Mountain Time - Chihuahua (most areas) +MX +2934-10425 America/Ojinaga Mountain Time US - Chihuahua (US border) +MX +2904-11058 America/Hermosillo Mountain Standard Time - Sonora +MX +3232-11701 America/Tijuana Pacific Time US - Baja California +MX +2048-10515 America/Bahia_Banderas Central Time - Bahia de Banderas +MY +0310+10142 Asia/Kuala_Lumpur Malaysia (peninsula) +MY +0133+11020 Asia/Kuching Sabah, Sarawak +MZ -2558+03235 Africa/Maputo +NA -2234+01706 Africa/Windhoek +NC -2216+16627 Pacific/Noumea +NE +1331+00207 Africa/Niamey +NF -2903+16758 Pacific/Norfolk +NG +0627+00324 Africa/Lagos +NI +1209-08617 America/Managua +NL +5222+00454 Europe/Amsterdam +NO +5955+01045 Europe/Oslo +NP +2743+08519 Asia/Kathmandu +NR -0031+16655 Pacific/Nauru +NU -1901-16955 Pacific/Niue +NZ -3652+17446 Pacific/Auckland New Zealand (most areas) +NZ -4357-17633 Pacific/Chatham Chatham Islands +OM +2336+05835 Asia/Muscat +PA +0858-07932 America/Panama +PE -1203-07703 America/Lima +PF -1732-14934 Pacific/Tahiti Society Islands +PF -0900-13930 Pacific/Marquesas Marquesas Islands +PF -2308-13457 Pacific/Gambier Gambier Islands +PG -0930+14710 Pacific/Port_Moresby Papua New Guinea (most areas) +PG -0613+15534 Pacific/Bougainville Bougainville +PH +1435+12100 Asia/Manila +PK +2452+06703 Asia/Karachi +PL +5215+02100 Europe/Warsaw +PM +4703-05620 America/Miquelon +PN -2504-13005 Pacific/Pitcairn +PR +182806-0660622 America/Puerto_Rico +PS +3130+03428 Asia/Gaza Gaza Strip +PS +313200+0350542 Asia/Hebron West Bank +PT +3843-00908 Europe/Lisbon Portugal (mainland) +PT +3238-01654 Atlantic/Madeira Madeira Islands +PT +3744-02540 Atlantic/Azores Azores +PW +0720+13429 Pacific/Palau +PY -2516-05740 America/Asuncion +QA +2517+05132 Asia/Qatar +RE -2052+05528 Indian/Reunion +RO +4426+02606 Europe/Bucharest +RS +4450+02030 Europe/Belgrade +RU +5443+02030 Europe/Kaliningrad MSK-01 - Kaliningrad +RU +554521+0373704 Europe/Moscow MSK+00 - Moscow area +RU +4457+03406 Europe/Simferopol MSK+00 - Crimea +RU +4844+04425 Europe/Volgograd MSK+00 - Volgograd +RU +5836+04939 Europe/Kirov MSK+00 - Kirov +RU +4621+04803 Europe/Astrakhan MSK+01 - Astrakhan +RU +5134+04602 Europe/Saratov MSK+01 - Saratov +RU +5420+04824 Europe/Ulyanovsk MSK+01 - Ulyanovsk +RU +5312+05009 Europe/Samara MSK+01 - Samara, Udmurtia +RU +5651+06036 Asia/Yekaterinburg MSK+02 - Urals +RU +5500+07324 Asia/Omsk MSK+03 - Omsk +RU +5502+08255 Asia/Novosibirsk MSK+04 - Novosibirsk +RU +5322+08345 Asia/Barnaul MSK+04 - Altai +RU +5630+08458 Asia/Tomsk MSK+04 - Tomsk +RU +5345+08707 Asia/Novokuznetsk MSK+04 - Kemerovo +RU +5601+09250 Asia/Krasnoyarsk MSK+04 - Krasnoyarsk area +RU +5216+10420 Asia/Irkutsk MSK+05 - Irkutsk, Buryatia +RU +5203+11328 Asia/Chita MSK+06 - Zabaykalsky +RU +6200+12940 Asia/Yakutsk MSK+06 - Lena River +RU +623923+1353314 Asia/Khandyga MSK+06 - Tomponsky, Ust-Maysky +RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River +RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky +RU +5934+15048 Asia/Magadan MSK+08 - Magadan +RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island +RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); North Kuril Is +RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka +RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea +RW -0157+03004 Africa/Kigali +SA +2438+04643 Asia/Riyadh +SB -0932+16012 Pacific/Guadalcanal +SC -0440+05528 Indian/Mahe +SD +1536+03232 Africa/Khartoum +SE +5920+01803 Europe/Stockholm +SG +0117+10351 Asia/Singapore +SH -1555-00542 Atlantic/St_Helena +SI +4603+01431 Europe/Ljubljana +SJ +7800+01600 Arctic/Longyearbyen +SK +4809+01707 Europe/Bratislava +SL +0830-01315 Africa/Freetown +SM +4355+01228 Europe/San_Marino +SN +1440-01726 Africa/Dakar +SO +0204+04522 Africa/Mogadishu +SR +0550-05510 America/Paramaribo +SS +0451+03137 Africa/Juba +ST +0020+00644 Africa/Sao_Tome +SV +1342-08912 America/El_Salvador +SX +180305-0630250 America/Lower_Princes +SY +3330+03618 Asia/Damascus +SZ -2618+03106 Africa/Mbabane +TC +2128-07108 America/Grand_Turk +TD +1207+01503 Africa/Ndjamena +TF -492110+0701303 Indian/Kerguelen +TG +0608+00113 Africa/Lome +TH +1345+10031 Asia/Bangkok +TJ +3835+06848 Asia/Dushanbe +TK -0922-17114 Pacific/Fakaofo +TL -0833+12535 Asia/Dili +TM +3757+05823 Asia/Ashgabat +TN +3648+01011 Africa/Tunis +TO -2110-17510 Pacific/Tongatapu +TR +4101+02858 Europe/Istanbul +TT +1039-06131 America/Port_of_Spain +TV -0831+17913 Pacific/Funafuti +TW +2503+12130 Asia/Taipei +TZ -0648+03917 Africa/Dar_es_Salaam +UA +5026+03031 Europe/Kiev Ukraine (most areas) +UA +4837+02218 Europe/Uzhgorod Ruthenia +UA +4750+03510 Europe/Zaporozhye Zaporozh'ye/Zaporizhia; Lugansk/Luhansk (east) +UG +0019+03225 Africa/Kampala +UM +2813-17722 Pacific/Midway Midway Islands +UM +1917+16637 Pacific/Wake Wake Island +US +404251-0740023 America/New_York Eastern (most areas) +US +421953-0830245 America/Detroit Eastern - MI (most areas) +US +381515-0854534 America/Kentucky/Louisville Eastern - KY (Louisville area) +US +364947-0845057 America/Kentucky/Monticello Eastern - KY (Wayne) +US +394606-0860929 America/Indiana/Indianapolis Eastern - IN (most areas) +US +384038-0873143 America/Indiana/Vincennes Eastern - IN (Da, Du, K, Mn) +US +410305-0863611 America/Indiana/Winamac Eastern - IN (Pulaski) +US +382232-0862041 America/Indiana/Marengo Eastern - IN (Crawford) +US +382931-0871643 America/Indiana/Petersburg Eastern - IN (Pike) +US +384452-0850402 America/Indiana/Vevay Eastern - IN (Switzerland) +US +415100-0873900 America/Chicago Central (most areas) +US +375711-0864541 America/Indiana/Tell_City Central - IN (Perry) +US +411745-0863730 America/Indiana/Knox Central - IN (Starke) +US +450628-0873651 America/Menominee Central - MI (Wisconsin border) +US +470659-1011757 America/North_Dakota/Center Central - ND (Oliver) +US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural) +US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer) +US +394421-1045903 America/Denver Mountain (most areas) +US +433649-1161209 America/Boise Mountain - ID (south); OR (east) +US +332654-1120424 America/Phoenix MST - Arizona (except Navajo) +US +340308-1181434 America/Los_Angeles Pacific +US +611305-1495401 America/Anchorage Alaska (most areas) +US +581807-1342511 America/Juneau Alaska - Juneau area +US +571035-1351807 America/Sitka Alaska - Sitka area +US +550737-1313435 America/Metlakatla Alaska - Annette Island +US +593249-1394338 America/Yakutat Alaska - Yakutat +US +643004-1652423 America/Nome Alaska (west) +US +515248-1763929 America/Adak Aleutian Islands +US +211825-1575130 Pacific/Honolulu Hawaii +UY -345433-0561245 America/Montevideo +UZ +3940+06648 Asia/Samarkand Uzbekistan (west) +UZ +4120+06918 Asia/Tashkent Uzbekistan (east) +VA +415408+0122711 Europe/Vatican +VC +1309-06114 America/St_Vincent +VE +1030-06656 America/Caracas +VG +1827-06437 America/Tortola +VI +1821-06456 America/St_Thomas +VN +1045+10640 Asia/Ho_Chi_Minh +VU -1740+16825 Pacific/Efate +WF -1318-17610 Pacific/Wallis +WS -1350-17144 Pacific/Apia +YE +1245+04512 Asia/Aden +YT -1247+04514 Indian/Mayotte +ZA -2615+02800 Africa/Johannesburg +ZM -1525+02817 Africa/Lusaka +ZW -1750+03103 Africa/Harare diff --git a/env/lib/python3.6/site-packages/pytz/zoneinfo/zone1970.tab b/env/lib/python3.6/site-packages/pytz/zoneinfo/zone1970.tab new file mode 100644 index 0000000..2d90ed7 --- /dev/null +++ b/env/lib/python3.6/site-packages/pytz/zoneinfo/zone1970.tab @@ -0,0 +1,382 @@ +# tz zone descriptions +# +# This file is in the public domain. +# +# From Paul Eggert (2017-10-01): +# This file contains a table where each row stands for a zone where +# civil time stamps have agreed since 1970. Columns are separated by +# a single tab. Lines beginning with '#' are comments. All text uses +# UTF-8 encoding. The columns of the table are as follows: +# +# 1. The countries that overlap the zone, as a comma-separated list +# of ISO 3166 2-character country codes. See the file 'iso3166.tab'. +# 2. Latitude and longitude of the zone's principal location +# in ISO 6709 sign-degrees-minutes-seconds format, +# either ±DDMM±DDDMM or ±DDMMSS±DDDMMSS, +# first latitude (+ is north), then longitude (+ is east). +# 3. Zone name used in value of TZ environment variable. +# Please see the theory.html file for how zone names are chosen. +# If multiple zones overlap a country, each has a row in the +# table, with each column 1 containing the country code. +# 4. Comments; present if and only if a country has multiple zones. +# +# If a zone covers multiple countries, the most-populous city is used, +# and that country is listed first in column 1; any other countries +# are listed alphabetically by country code. The table is sorted +# first by country code, then (if possible) by an order within the +# country that (1) makes some geographical sense, and (2) puts the +# most populous zones first, where that does not contradict (1). +# +# This table is intended as an aid for users, to help them select time +# zone data entries appropriate for their practical needs. It is not +# intended to take or endorse any position on legal or territorial claims. +# +#country- +#codes coordinates TZ comments +AD +4230+00131 Europe/Andorra +AE,OM +2518+05518 Asia/Dubai +AF +3431+06912 Asia/Kabul +AL +4120+01950 Europe/Tirane +AM +4011+04430 Asia/Yerevan +AQ -6617+11031 Antarctica/Casey Casey +AQ -6835+07758 Antarctica/Davis Davis +AQ -6640+14001 Antarctica/DumontDUrville Dumont-d'Urville +AQ -6736+06253 Antarctica/Mawson Mawson +AQ -6448-06406 Antarctica/Palmer Palmer +AQ -6734-06808 Antarctica/Rothera Rothera +AQ -690022+0393524 Antarctica/Syowa Syowa +AQ -720041+0023206 Antarctica/Troll Troll +AQ -7824+10654 Antarctica/Vostok Vostok +AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF) +AR -3124-06411 America/Argentina/Cordoba Argentina (most areas: CB, CC, CN, ER, FM, MN, SE, SF) +AR -2447-06525 America/Argentina/Salta Salta (SA, LP, NQ, RN) +AR -2411-06518 America/Argentina/Jujuy Jujuy (JY) +AR -2649-06513 America/Argentina/Tucuman Tucumán (TM) +AR -2828-06547 America/Argentina/Catamarca Catamarca (CT); Chubut (CH) +AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR) +AR -3132-06831 America/Argentina/San_Juan San Juan (SJ) +AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ) +AR -3319-06621 America/Argentina/San_Luis San Luis (SL) +AR -5138-06913 America/Argentina/Rio_Gallegos Santa Cruz (SC) +AR -5448-06818 America/Argentina/Ushuaia Tierra del Fuego (TF) +AS,UM -1416-17042 Pacific/Pago_Pago Samoa, Midway +AT +4813+01620 Europe/Vienna +AU -3133+15905 Australia/Lord_Howe Lord Howe Island +AU -5430+15857 Antarctica/Macquarie Macquarie Island +AU -4253+14719 Australia/Hobart Tasmania (most areas) +AU -3956+14352 Australia/Currie Tasmania (King Island) +AU -3749+14458 Australia/Melbourne Victoria +AU -3352+15113 Australia/Sydney New South Wales (most areas) +AU -3157+14127 Australia/Broken_Hill New South Wales (Yancowinna) +AU -2728+15302 Australia/Brisbane Queensland (most areas) +AU -2016+14900 Australia/Lindeman Queensland (Whitsunday Islands) +AU -3455+13835 Australia/Adelaide South Australia +AU -1228+13050 Australia/Darwin Northern Territory +AU -3157+11551 Australia/Perth Western Australia (most areas) +AU -3143+12852 Australia/Eucla Western Australia (Eucla) +AZ +4023+04951 Asia/Baku +BB +1306-05937 America/Barbados +BD +2343+09025 Asia/Dhaka +BE +5050+00420 Europe/Brussels +BG +4241+02319 Europe/Sofia +BM +3217-06446 Atlantic/Bermuda +BN +0456+11455 Asia/Brunei +BO -1630-06809 America/La_Paz +BR -0351-03225 America/Noronha Atlantic islands +BR -0127-04829 America/Belem Pará (east); Amapá +BR -0343-03830 America/Fortaleza Brazil (northeast: MA, PI, CE, RN, PB) +BR -0803-03454 America/Recife Pernambuco +BR -0712-04812 America/Araguaina Tocantins +BR -0940-03543 America/Maceio Alagoas, Sergipe +BR -1259-03831 America/Bahia Bahia +BR -2332-04637 America/Sao_Paulo Brazil (southeast: GO, DF, MG, ES, RJ, SP, PR, SC, RS) +BR -2027-05437 America/Campo_Grande Mato Grosso do Sul +BR -1535-05605 America/Cuiaba Mato Grosso +BR -0226-05452 America/Santarem Pará (west) +BR -0846-06354 America/Porto_Velho Rondônia +BR +0249-06040 America/Boa_Vista Roraima +BR -0308-06001 America/Manaus Amazonas (east) +BR -0640-06952 America/Eirunepe Amazonas (west) +BR -0958-06748 America/Rio_Branco Acre +BS +2505-07721 America/Nassau +BT +2728+08939 Asia/Thimphu +BY +5354+02734 Europe/Minsk +BZ +1730-08812 America/Belize +CA +4734-05243 America/St_Johns Newfoundland; Labrador (southeast) +CA +4439-06336 America/Halifax Atlantic - NS (most areas); PE +CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton) +CA +4606-06447 America/Moncton Atlantic - New Brunswick +CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas) +CA +5125-05707 America/Blanc-Sablon AST - QC (Lower North Shore) +CA +4339-07923 America/Toronto Eastern - ON, QC (most areas) +CA +4901-08816 America/Nipigon Eastern - ON, QC (no DST 1967-73) +CA +4823-08915 America/Thunder_Bay Eastern - ON (Thunder Bay) +CA +6344-06828 America/Iqaluit Eastern - NU (most east areas) +CA +6608-06544 America/Pangnirtung Eastern - NU (Pangnirtung) +CA +484531-0913718 America/Atikokan EST - ON (Atikokan); NU (Coral H) +CA +4953-09709 America/Winnipeg Central - ON (west); Manitoba +CA +4843-09434 America/Rainy_River Central - ON (Rainy R, Ft Frances) +CA +744144-0944945 America/Resolute Central - NU (Resolute) +CA +624900-0920459 America/Rankin_Inlet Central - NU (central) +CA +5024-10439 America/Regina CST - SK (most areas) +CA +5017-10750 America/Swift_Current CST - SK (midwest) +CA +5333-11328 America/Edmonton Mountain - AB; BC (E); SK (W) +CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west) +CA +6227-11421 America/Yellowknife Mountain - NT (central) +CA +682059-1334300 America/Inuvik Mountain - NT (west) +CA +4906-11631 America/Creston MST - BC (Creston) +CA +5946-12014 America/Dawson_Creek MST - BC (Dawson Cr, Ft St John) +CA +5848-12242 America/Fort_Nelson MST - BC (Ft Nelson) +CA +4916-12307 America/Vancouver Pacific - BC (most areas) +CA +6043-13503 America/Whitehorse Pacific - Yukon (south) +CA +6404-13925 America/Dawson Pacific - Yukon (north) +CC -1210+09655 Indian/Cocos +CH,DE,LI +4723+00832 Europe/Zurich Swiss time +CI,BF,GM,GN,ML,MR,SH,SL,SN,TG +0519-00402 Africa/Abidjan +CK -2114-15946 Pacific/Rarotonga +CL -3327-07040 America/Santiago Chile (most areas) +CL -5309-07055 America/Punta_Arenas Region of Magallanes +CL -2709-10926 Pacific/Easter Easter Island +CN +3114+12128 Asia/Shanghai Beijing Time +CN +4348+08735 Asia/Urumqi Xinjiang Time +CO +0436-07405 America/Bogota +CR +0956-08405 America/Costa_Rica +CU +2308-08222 America/Havana +CV +1455-02331 Atlantic/Cape_Verde +CW,AW,BQ,SX +1211-06900 America/Curacao +CX -1025+10543 Indian/Christmas +CY +3510+03322 Asia/Nicosia Cyprus (most areas) +CY +3507+03357 Asia/Famagusta Northern Cyprus +CZ,SK +5005+01426 Europe/Prague +DE +5230+01322 Europe/Berlin Germany (most areas) +DK +5540+01235 Europe/Copenhagen +DO +1828-06954 America/Santo_Domingo +DZ +3647+00303 Africa/Algiers +EC -0210-07950 America/Guayaquil Ecuador (mainland) +EC -0054-08936 Pacific/Galapagos Galápagos Islands +EE +5925+02445 Europe/Tallinn +EG +3003+03115 Africa/Cairo +EH +2709-01312 Africa/El_Aaiun +ES +4024-00341 Europe/Madrid Spain (mainland) +ES +3553-00519 Africa/Ceuta Ceuta, Melilla +ES +2806-01524 Atlantic/Canary Canary Islands +FI,AX +6010+02458 Europe/Helsinki +FJ -1808+17825 Pacific/Fiji +FK -5142-05751 Atlantic/Stanley +FM +0725+15147 Pacific/Chuuk Chuuk/Truk, Yap +FM +0658+15813 Pacific/Pohnpei Pohnpei/Ponape +FM +0519+16259 Pacific/Kosrae Kosrae +FO +6201-00646 Atlantic/Faroe +FR +4852+00220 Europe/Paris +GB,GG,IM,JE +513030-0000731 Europe/London +GE +4143+04449 Asia/Tbilisi +GF +0456-05220 America/Cayenne +GH +0533-00013 Africa/Accra +GI +3608-00521 Europe/Gibraltar +GL +6411-05144 America/Godthab Greenland (most areas) +GL +7646-01840 America/Danmarkshavn National Park (east coast) +GL +7029-02158 America/Scoresbysund Scoresbysund/Ittoqqortoormiit +GL +7634-06847 America/Thule Thule/Pituffik +GR +3758+02343 Europe/Athens +GS -5416-03632 Atlantic/South_Georgia +GT +1438-09031 America/Guatemala +GU,MP +1328+14445 Pacific/Guam +GW +1151-01535 Africa/Bissau +GY +0648-05810 America/Guyana +HK +2217+11409 Asia/Hong_Kong +HN +1406-08713 America/Tegucigalpa +HT +1832-07220 America/Port-au-Prince +HU +4730+01905 Europe/Budapest +ID -0610+10648 Asia/Jakarta Java, Sumatra +ID -0002+10920 Asia/Pontianak Borneo (west, central) +ID -0507+11924 Asia/Makassar Borneo (east, south); Sulawesi/Celebes, Bali, Nusa Tengarra; Timor (west) +ID -0232+14042 Asia/Jayapura New Guinea (West Papua / Irian Jaya); Malukus/Moluccas +IE +5320-00615 Europe/Dublin +IL +314650+0351326 Asia/Jerusalem +IN +2232+08822 Asia/Kolkata +IO -0720+07225 Indian/Chagos +IQ +3321+04425 Asia/Baghdad +IR +3540+05126 Asia/Tehran +IS +6409-02151 Atlantic/Reykjavik +IT,SM,VA +4154+01229 Europe/Rome +JM +175805-0764736 America/Jamaica +JO +3157+03556 Asia/Amman +JP +353916+1394441 Asia/Tokyo +KE,DJ,ER,ET,KM,MG,SO,TZ,UG,YT -0117+03649 Africa/Nairobi +KG +4254+07436 Asia/Bishkek +KI +0125+17300 Pacific/Tarawa Gilbert Islands +KI -0308-17105 Pacific/Enderbury Phoenix Islands +KI +0152-15720 Pacific/Kiritimati Line Islands +KP +3901+12545 Asia/Pyongyang +KR +3733+12658 Asia/Seoul +KZ +4315+07657 Asia/Almaty Kazakhstan (most areas) +KZ +4448+06528 Asia/Qyzylorda Qyzylorda/Kyzylorda/Kzyl-Orda +KZ +5017+05710 Asia/Aqtobe Aqtöbe/Aktobe +KZ +4431+05016 Asia/Aqtau Mangghystaū/Mankistau +KZ +4707+05156 Asia/Atyrau Atyraū/Atirau/Gur'yev +KZ +5113+05121 Asia/Oral West Kazakhstan +LB +3353+03530 Asia/Beirut +LK +0656+07951 Asia/Colombo +LR +0618-01047 Africa/Monrovia +LT +5441+02519 Europe/Vilnius +LU +4936+00609 Europe/Luxembourg +LV +5657+02406 Europe/Riga +LY +3254+01311 Africa/Tripoli +MA +3339-00735 Africa/Casablanca +MC +4342+00723 Europe/Monaco +MD +4700+02850 Europe/Chisinau +MH +0709+17112 Pacific/Majuro Marshall Islands (most areas) +MH +0905+16720 Pacific/Kwajalein Kwajalein +MM +1647+09610 Asia/Yangon +MN +4755+10653 Asia/Ulaanbaatar Mongolia (most areas) +MN +4801+09139 Asia/Hovd Bayan-Ölgii, Govi-Altai, Hovd, Uvs, Zavkhan +MN +4804+11430 Asia/Choibalsan Dornod, Sükhbaatar +MO +2214+11335 Asia/Macau +MQ +1436-06105 America/Martinique +MT +3554+01431 Europe/Malta +MU -2010+05730 Indian/Mauritius +MV +0410+07330 Indian/Maldives +MX +1924-09909 America/Mexico_City Central Time +MX +2105-08646 America/Cancun Eastern Standard Time - Quintana Roo +MX +2058-08937 America/Merida Central Time - Campeche, Yucatán +MX +2540-10019 America/Monterrey Central Time - Durango; Coahuila, Nuevo León, Tamaulipas (most areas) +MX +2550-09730 America/Matamoros Central Time US - Coahuila, Nuevo León, Tamaulipas (US border) +MX +2313-10625 America/Mazatlan Mountain Time - Baja California Sur, Nayarit, Sinaloa +MX +2838-10605 America/Chihuahua Mountain Time - Chihuahua (most areas) +MX +2934-10425 America/Ojinaga Mountain Time US - Chihuahua (US border) +MX +2904-11058 America/Hermosillo Mountain Standard Time - Sonora +MX +3232-11701 America/Tijuana Pacific Time US - Baja California +MX +2048-10515 America/Bahia_Banderas Central Time - Bahía de Banderas +MY +0310+10142 Asia/Kuala_Lumpur Malaysia (peninsula) +MY +0133+11020 Asia/Kuching Sabah, Sarawak +MZ,BI,BW,CD,MW,RW,ZM,ZW -2558+03235 Africa/Maputo Central Africa Time +NA -2234+01706 Africa/Windhoek +NC -2216+16627 Pacific/Noumea +NF -2903+16758 Pacific/Norfolk +NG,AO,BJ,CD,CF,CG,CM,GA,GQ,NE +0627+00324 Africa/Lagos West Africa Time +NI +1209-08617 America/Managua +NL +5222+00454 Europe/Amsterdam +NO,SJ +5955+01045 Europe/Oslo +NP +2743+08519 Asia/Kathmandu +NR -0031+16655 Pacific/Nauru +NU -1901-16955 Pacific/Niue +NZ,AQ -3652+17446 Pacific/Auckland New Zealand time +NZ -4357-17633 Pacific/Chatham Chatham Islands +PA,KY +0858-07932 America/Panama +PE -1203-07703 America/Lima +PF -1732-14934 Pacific/Tahiti Society Islands +PF -0900-13930 Pacific/Marquesas Marquesas Islands +PF -2308-13457 Pacific/Gambier Gambier Islands +PG -0930+14710 Pacific/Port_Moresby Papua New Guinea (most areas) +PG -0613+15534 Pacific/Bougainville Bougainville +PH +1435+12100 Asia/Manila +PK +2452+06703 Asia/Karachi +PL +5215+02100 Europe/Warsaw +PM +4703-05620 America/Miquelon +PN -2504-13005 Pacific/Pitcairn +PR +182806-0660622 America/Puerto_Rico +PS +3130+03428 Asia/Gaza Gaza Strip +PS +313200+0350542 Asia/Hebron West Bank +PT +3843-00908 Europe/Lisbon Portugal (mainland) +PT +3238-01654 Atlantic/Madeira Madeira Islands +PT +3744-02540 Atlantic/Azores Azores +PW +0720+13429 Pacific/Palau +PY -2516-05740 America/Asuncion +QA,BH +2517+05132 Asia/Qatar +RE,TF -2052+05528 Indian/Reunion Réunion, Crozet, Scattered Islands +RO +4426+02606 Europe/Bucharest +RS,BA,HR,ME,MK,SI +4450+02030 Europe/Belgrade +RU +5443+02030 Europe/Kaliningrad MSK-01 - Kaliningrad +RU +554521+0373704 Europe/Moscow MSK+00 - Moscow area +RU +4457+03406 Europe/Simferopol MSK+00 - Crimea +RU +4844+04425 Europe/Volgograd MSK+00 - Volgograd +RU +5836+04939 Europe/Kirov MSK+00 - Kirov +RU +4621+04803 Europe/Astrakhan MSK+01 - Astrakhan +RU +5134+04602 Europe/Saratov MSK+01 - Saratov +RU +5420+04824 Europe/Ulyanovsk MSK+01 - Ulyanovsk +RU +5312+05009 Europe/Samara MSK+01 - Samara, Udmurtia +RU +5651+06036 Asia/Yekaterinburg MSK+02 - Urals +RU +5500+07324 Asia/Omsk MSK+03 - Omsk +RU +5502+08255 Asia/Novosibirsk MSK+04 - Novosibirsk +RU +5322+08345 Asia/Barnaul MSK+04 - Altai +RU +5630+08458 Asia/Tomsk MSK+04 - Tomsk +RU +5345+08707 Asia/Novokuznetsk MSK+04 - Kemerovo +RU +5601+09250 Asia/Krasnoyarsk MSK+04 - Krasnoyarsk area +RU +5216+10420 Asia/Irkutsk MSK+05 - Irkutsk, Buryatia +RU +5203+11328 Asia/Chita MSK+06 - Zabaykalsky +RU +6200+12940 Asia/Yakutsk MSK+06 - Lena River +RU +623923+1353314 Asia/Khandyga MSK+06 - Tomponsky, Ust-Maysky +RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River +RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky +RU +5934+15048 Asia/Magadan MSK+08 - Magadan +RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island +RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); North Kuril Is +RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka +RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea +SA,KW,YE +2438+04643 Asia/Riyadh +SB -0932+16012 Pacific/Guadalcanal +SC -0440+05528 Indian/Mahe +SD +1536+03232 Africa/Khartoum +SE +5920+01803 Europe/Stockholm +SG +0117+10351 Asia/Singapore +SR +0550-05510 America/Paramaribo +SS +0451+03137 Africa/Juba +ST +0020+00644 Africa/Sao_Tome +SV +1342-08912 America/El_Salvador +SY +3330+03618 Asia/Damascus +TC +2128-07108 America/Grand_Turk +TD +1207+01503 Africa/Ndjamena +TF -492110+0701303 Indian/Kerguelen Kerguelen, St Paul Island, Amsterdam Island +TH,KH,LA,VN +1345+10031 Asia/Bangkok Indochina (most areas) +TJ +3835+06848 Asia/Dushanbe +TK -0922-17114 Pacific/Fakaofo +TL -0833+12535 Asia/Dili +TM +3757+05823 Asia/Ashgabat +TN +3648+01011 Africa/Tunis +TO -2110-17510 Pacific/Tongatapu +TR +4101+02858 Europe/Istanbul +TT,AG,AI,BL,DM,GD,GP,KN,LC,MF,MS,VC,VG,VI +1039-06131 America/Port_of_Spain +TV -0831+17913 Pacific/Funafuti +TW +2503+12130 Asia/Taipei +UA +5026+03031 Europe/Kiev Ukraine (most areas) +UA +4837+02218 Europe/Uzhgorod Ruthenia +UA +4750+03510 Europe/Zaporozhye Zaporozh'ye/Zaporizhia; Lugansk/Luhansk (east) +UM +1917+16637 Pacific/Wake Wake Island +US +404251-0740023 America/New_York Eastern (most areas) +US +421953-0830245 America/Detroit Eastern - MI (most areas) +US +381515-0854534 America/Kentucky/Louisville Eastern - KY (Louisville area) +US +364947-0845057 America/Kentucky/Monticello Eastern - KY (Wayne) +US +394606-0860929 America/Indiana/Indianapolis Eastern - IN (most areas) +US +384038-0873143 America/Indiana/Vincennes Eastern - IN (Da, Du, K, Mn) +US +410305-0863611 America/Indiana/Winamac Eastern - IN (Pulaski) +US +382232-0862041 America/Indiana/Marengo Eastern - IN (Crawford) +US +382931-0871643 America/Indiana/Petersburg Eastern - IN (Pike) +US +384452-0850402 America/Indiana/Vevay Eastern - IN (Switzerland) +US +415100-0873900 America/Chicago Central (most areas) +US +375711-0864541 America/Indiana/Tell_City Central - IN (Perry) +US +411745-0863730 America/Indiana/Knox Central - IN (Starke) +US +450628-0873651 America/Menominee Central - MI (Wisconsin border) +US +470659-1011757 America/North_Dakota/Center Central - ND (Oliver) +US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural) +US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer) +US +394421-1045903 America/Denver Mountain (most areas) +US +433649-1161209 America/Boise Mountain - ID (south); OR (east) +US +332654-1120424 America/Phoenix MST - Arizona (except Navajo) +US +340308-1181434 America/Los_Angeles Pacific +US +611305-1495401 America/Anchorage Alaska (most areas) +US +581807-1342511 America/Juneau Alaska - Juneau area +US +571035-1351807 America/Sitka Alaska - Sitka area +US +550737-1313435 America/Metlakatla Alaska - Annette Island +US +593249-1394338 America/Yakutat Alaska - Yakutat +US +643004-1652423 America/Nome Alaska (west) +US +515248-1763929 America/Adak Aleutian Islands +US,UM +211825-1575130 Pacific/Honolulu Hawaii +UY -345433-0561245 America/Montevideo +UZ +3940+06648 Asia/Samarkand Uzbekistan (west) +UZ +4120+06918 Asia/Tashkent Uzbekistan (east) +VE +1030-06656 America/Caracas +VN +1045+10640 Asia/Ho_Chi_Minh Vietnam (south) +VU -1740+16825 Pacific/Efate +WF -1318-17610 Pacific/Wallis +WS -1350-17144 Pacific/Apia +ZA,LS,SZ -2615+02800 Africa/Johannesburg diff --git a/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/DESCRIPTION.rst b/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..2a58be5 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/DESCRIPTION.rst @@ -0,0 +1,1684 @@ +Requests: HTTP for Humans +========================= + +.. image:: https://img.shields.io/pypi/v/requests.svg + :target: https://pypi.org/project/requests/ + +.. image:: https://img.shields.io/pypi/l/requests.svg + :target: https://pypi.org/project/requests/ + +.. image:: https://img.shields.io/pypi/pyversions/requests.svg + :target: https://pypi.org/project/requests/ + +.. image:: https://codecov.io/github/requests/requests/coverage.svg?branch=master + :target: https://codecov.io/github/requests/requests + :alt: codecov.io + +.. image:: https://img.shields.io/github/contributors/requests/requests.svg + :target: https://github.com/requests/requests/graphs/contributors + +.. image:: https://img.shields.io/badge/Say%20Thanks-!-1EAEDB.svg + :target: https://saythanks.io/to/kennethreitz + +Requests is the only *Non-GMO* HTTP library for Python, safe for human +consumption. + +.. image:: https://farm5.staticflickr.com/4317/35198386374_1939af3de6_k_d.jpg + +Behold, the power of Requests: + +.. code-block:: python + + >>> r = requests.get('https://api.github.com/user', auth=('user', 'pass')) + >>> r.status_code + 200 + >>> r.headers['content-type'] + 'application/json; charset=utf8' + >>> r.encoding + 'utf-8' + >>> r.text + u'{"type":"User"...' + >>> r.json() + {u'disk_usage': 368627, u'private_gists': 484, ...} + +See `the similar code, sans Requests <https://gist.github.com/973705>`_. + +.. image:: https://raw.githubusercontent.com/requests/requests/master/docs/_static/requests-logo-small.png + :target: http://docs.python-requests.org/ + + +Requests allows you to send *organic, grass-fed* HTTP/1.1 requests, without the +need for manual labor. There's no need to manually add query strings to your +URLs, or to form-encode your POST data. Keep-alive and HTTP connection pooling +are 100% automatic, thanks to `urllib3 <https://github.com/shazow/urllib3>`_. + +Besides, all the cool kids are doing it. Requests is one of the most +downloaded Python packages of all time, pulling in over 11,000,000 downloads +every month. You don't want to be left out! + +Feature Support +--------------- + +Requests is ready for today's web. + +- International Domains and URLs +- Keep-Alive & Connection Pooling +- Sessions with Cookie Persistence +- Browser-style SSL Verification +- Basic/Digest Authentication +- Elegant Key/Value Cookies +- Automatic Decompression +- Automatic Content Decoding +- Unicode Response Bodies +- Multipart File Uploads +- HTTP(S) Proxy Support +- Connection Timeouts +- Streaming Downloads +- ``.netrc`` Support +- Chunked Requests + +Requests officially supports Python 2.7 & 3.4–3.6, and runs great on PyPy. + +Installation +------------ + +To install Requests, simply use `pipenv <http://pipenv.org/>`_ (or pip, of course): + +.. code-block:: bash + + $ pipenv install requests + ✨🍰✨ + +Satisfaction guaranteed. + +Documentation +------------- + +Fantastic documentation is available at http://docs.python-requests.org/, for a limited time only. + + +How to Contribute +----------------- + +#. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug. There is a `Contributor Friendly`_ tag for issues that should be ideal for people who are not very familiar with the codebase yet. +#. Fork `the repository`_ on GitHub to start making your changes to the **master** branch (or branch off of it). +#. Write a test which shows that the bug was fixed or that the feature works as expected. +#. Send a pull request and bug the maintainer until it gets merged and published. :) Make sure to add yourself to AUTHORS_. + +.. _`the repository`: https://github.com/requests/requests +.. _AUTHORS: https://github.com/requests/requests/blob/master/AUTHORS.rst +.. _Contributor Friendly: https://github.com/requests/requests/issues?direction=desc&labels=Contributor+Friendly&page=1&sort=updated&state=open + + +.. :changelog: + +Release History +--------------- + +dev ++++ + +- [Short description of non-trivial change.] + +2.19.1 (2018-06-14) ++++++++++++++++++++ + +**Bugfixes** + +- Fixed issue where status_codes.py's ``init`` function failed trying to append to + a ``__doc__`` value of ``None``. + +2.19.0 (2018-06-12) ++++++++++++++++++++ + +**Improvements** + +- Warn user about possible slowdown when using cryptography version < 1.3.4 +- Check for invalid host in proxy URL, before forwarding request to adapter. +- Fragments are now properly maintained across redirects. (RFC7231 7.1.2) +- Removed use of cgi module to expedite library load time. +- Added support for SHA-256 and SHA-512 digest auth algorithms. +- Minor performance improvement to ``Request.content``. +- Migrate to using collections.abc for 3.7 compatibility. + +**Bugfixes** + +- Parsing empty ``Link`` headers with ``parse_header_links()`` no longer return one bogus entry. +- Fixed issue where loading the default certificate bundle from a zip archive + would raise an ``IOError``. +- Fixed issue with unexpected ``ImportError`` on windows system which do not support ``winreg`` module. +- DNS resolution in proxy bypass no longer includes the username and password in + the request. This also fixes the issue of DNS queries failing on macOS. +- Properly normalize adapter prefixes for url comparison. +- Passing ``None`` as a file pointer to the ``files`` param no longer raises an exception. +- Calling ``copy`` on a ``RequestsCookieJar`` will now preserve the cookie policy correctly. + +**Dependencies** + +- We now support idna v2.7. +- We now support urllib3 v1.23. + +2.18.4 (2017-08-15) ++++++++++++++++++++ + +**Improvements** + +- Error messages for invalid headers now include the header name for easier debugging + +**Dependencies** + +- We now support idna v2.6. + +2.18.3 (2017-08-02) ++++++++++++++++++++ + +**Improvements** + +- Running ``$ python -m requests.help`` now includes the installed version of idna. + +**Bugfixes** + +- Fixed issue where Requests would raise ``ConnectionError`` instead of + ``SSLError`` when encountering SSL problems when using urllib3 v1.22. + +2.18.2 (2017-07-25) ++++++++++++++++++++ + +**Bugfixes** + +- ``requests.help`` no longer fails on Python 2.6 due to the absence of + ``ssl.OPENSSL_VERSION_NUMBER``. + +**Dependencies** + +- We now support urllib3 v1.22. + +2.18.1 (2017-06-14) ++++++++++++++++++++ + +**Bugfixes** + +- Fix an error in the packaging whereby the ``*.whl`` contained incorrect data + that regressed the fix in v2.17.3. + +2.18.0 (2017-06-14) ++++++++++++++++++++ + +**Improvements** + +- ``Response`` is now a context manager, so can be used directly in a ``with`` statement + without first having to be wrapped by ``contextlib.closing()``. + +**Bugfixes** + +- Resolve installation failure if multiprocessing is not available +- Resolve tests crash if multiprocessing is not able to determine the number of CPU cores +- Resolve error swallowing in utils set_environ generator + + +2.17.3 (2017-05-29) ++++++++++++++++++++ + +**Improvements** + +- Improved ``packages`` namespace identity support, for monkeypatching libraries. + + +2.17.2 (2017-05-29) ++++++++++++++++++++ + +**Improvements** + +- Improved ``packages`` namespace identity support, for monkeypatching libraries. + + +2.17.1 (2017-05-29) ++++++++++++++++++++ + +**Improvements** + +- Improved ``packages`` namespace identity support, for monkeypatching libraries. + + +2.17.0 (2017-05-29) ++++++++++++++++++++ + +**Improvements** + +- Removal of the 301 redirect cache. This improves thread-safety. + + +2.16.5 (2017-05-28) ++++++++++++++++++++ + +- Improvements to ``$ python -m requests.help``. + +2.16.4 (2017-05-27) ++++++++++++++++++++ + +- Introduction of the ``$ python -m requests.help`` command, for debugging with maintainers! + +2.16.3 (2017-05-27) ++++++++++++++++++++ + +- Further restored the ``requests.packages`` namespace for compatibility reasons. + +2.16.2 (2017-05-27) ++++++++++++++++++++ + +- Further restored the ``requests.packages`` namespace for compatibility reasons. + +No code modification (noted below) should be necessary any longer. + +2.16.1 (2017-05-27) ++++++++++++++++++++ + +- Restored the ``requests.packages`` namespace for compatibility reasons. +- Bugfix for ``urllib3`` version parsing. + +**Note**: code that was written to import against the ``requests.packages`` +namespace previously will have to import code that rests at this module-level +now. + +For example:: + + from requests.packages.urllib3.poolmanager import PoolManager + +Will need to be re-written to be:: + + from requests.packages import urllib3 + urllib3.poolmanager.PoolManager + +Or, even better:: + + from urllib3.poolmanager import PoolManager + +2.16.0 (2017-05-26) ++++++++++++++++++++ + +- Unvendor ALL the things! + +2.15.1 (2017-05-26) ++++++++++++++++++++ + +- Everyone makes mistakes. + +2.15.0 (2017-05-26) ++++++++++++++++++++ + +**Improvements** + +- Introduction of the ``Response.next`` property, for getting the next + ``PreparedResponse`` from a redirect chain (when ``allow_redirects=False``). +- Internal refactoring of ``__version__`` module. + +**Bugfixes** + +- Restored once-optional parameter for ``requests.utils.get_environ_proxies()``. + +2.14.2 (2017-05-10) ++++++++++++++++++++ + +**Bugfixes** + +- Changed a less-than to an equal-to and an or in the dependency markers to + widen compatibility with older setuptools releases. + +2.14.1 (2017-05-09) ++++++++++++++++++++ + +**Bugfixes** + +- Changed the dependency markers to widen compatibility with older pip + releases. + +2.14.0 (2017-05-09) ++++++++++++++++++++ + +**Improvements** + +- It is now possible to pass ``no_proxy`` as a key to the ``proxies`` + dictionary to provide handling similar to the ``NO_PROXY`` environment + variable. +- When users provide invalid paths to certificate bundle files or directories + Requests now raises ``IOError``, rather than failing at the time of the HTTPS + request with a fairly inscrutable certificate validation error. +- The behavior of ``SessionRedirectMixin`` was slightly altered. + ``resolve_redirects`` will now detect a redirect by calling + ``get_redirect_target(response)`` instead of directly + querying ``Response.is_redirect`` and ``Response.headers['location']``. + Advanced users will be able to process malformed redirects more easily. +- Changed the internal calculation of elapsed request time to have higher + resolution on Windows. +- Added ``win_inet_pton`` as conditional dependency for the ``[socks]`` extra + on Windows with Python 2.7. +- Changed the proxy bypass implementation on Windows: the proxy bypass + check doesn't use forward and reverse DNS requests anymore +- URLs with schemes that begin with ``http`` but are not ``http`` or ``https`` + no longer have their host parts forced to lowercase. + +**Bugfixes** + +- Much improved handling of non-ASCII ``Location`` header values in redirects. + Fewer ``UnicodeDecodeErrors`` are encountered on Python 2, and Python 3 now + correctly understands that Latin-1 is unlikely to be the correct encoding. +- If an attempt to ``seek`` file to find out its length fails, we now + appropriately handle that by aborting our content-length calculations. +- Restricted ``HTTPDigestAuth`` to only respond to auth challenges made on 4XX + responses, rather than to all auth challenges. +- Fixed some code that was firing ``DeprecationWarning`` on Python 3.6. +- The dismayed person emoticon (``/o\\``) no longer has a big head. I'm sure + this is what you were all worrying about most. + + +**Miscellaneous** + +- Updated bundled urllib3 to v1.21.1. +- Updated bundled chardet to v3.0.2. +- Updated bundled idna to v2.5. +- Updated bundled certifi to 2017.4.17. + +2.13.0 (2017-01-24) ++++++++++++++++++++ + +**Features** + +- Only load the ``idna`` library when we've determined we need it. This will + save some memory for users. + +**Miscellaneous** + +- Updated bundled urllib3 to 1.20. +- Updated bundled idna to 2.2. + +2.12.5 (2017-01-18) ++++++++++++++++++++ + +**Bugfixes** + +- Fixed an issue with JSON encoding detection, specifically detecting + big-endian UTF-32 with BOM. + +2.12.4 (2016-12-14) ++++++++++++++++++++ + +**Bugfixes** + +- Fixed regression from 2.12.2 where non-string types were rejected in the + basic auth parameters. While support for this behaviour has been readded, + the behaviour is deprecated and will be removed in the future. + +2.12.3 (2016-12-01) ++++++++++++++++++++ + +**Bugfixes** + +- Fixed regression from v2.12.1 for URLs with schemes that begin with "http". + These URLs have historically been processed as though they were HTTP-schemed + URLs, and so have had parameters added. This was removed in v2.12.2 in an + overzealous attempt to resolve problems with IDNA-encoding those URLs. This + change was reverted: the other fixes for IDNA-encoding have been judged to + be sufficient to return to the behaviour Requests had before v2.12.0. + +2.12.2 (2016-11-30) ++++++++++++++++++++ + +**Bugfixes** + +- Fixed several issues with IDNA-encoding URLs that are technically invalid but + which are widely accepted. Requests will now attempt to IDNA-encode a URL if + it can but, if it fails, and the host contains only ASCII characters, it will + be passed through optimistically. This will allow users to opt-in to using + IDNA2003 themselves if they want to, and will also allow technically invalid + but still common hostnames. +- Fixed an issue where URLs with leading whitespace would raise + ``InvalidSchema`` errors. +- Fixed an issue where some URLs without the HTTP or HTTPS schemes would still + have HTTP URL preparation applied to them. +- Fixed an issue where Unicode strings could not be used in basic auth. +- Fixed an issue encountered by some Requests plugins where constructing a + Response object would cause ``Response.content`` to raise an + ``AttributeError``. + +2.12.1 (2016-11-16) ++++++++++++++++++++ + +**Bugfixes** + +- Updated setuptools 'security' extra for the new PyOpenSSL backend in urllib3. + +**Miscellaneous** + +- Updated bundled urllib3 to 1.19.1. + +2.12.0 (2016-11-15) ++++++++++++++++++++ + +**Improvements** + +- Updated support for internationalized domain names from IDNA2003 to IDNA2008. + This updated support is required for several forms of IDNs and is mandatory + for .de domains. +- Much improved heuristics for guessing content lengths: Requests will no + longer read an entire ``StringIO`` into memory. +- Much improved logic for recalculating ``Content-Length`` headers for + ``PreparedRequest`` objects. +- Improved tolerance for file-like objects that have no ``tell`` method but + do have a ``seek`` method. +- Anything that is a subclass of ``Mapping`` is now treated like a dictionary + by the ``data=`` keyword argument. +- Requests now tolerates empty passwords in proxy credentials, rather than + stripping the credentials. +- If a request is made with a file-like object as the body and that request is + redirected with a 307 or 308 status code, Requests will now attempt to + rewind the body object so it can be replayed. + +**Bugfixes** + +- When calling ``response.close``, the call to ``close`` will be propagated + through to non-urllib3 backends. +- Fixed issue where the ``ALL_PROXY`` environment variable would be preferred + over scheme-specific variables like ``HTTP_PROXY``. +- Fixed issue where non-UTF8 reason phrases got severely mangled by falling + back to decoding using ISO 8859-1 instead. +- Fixed a bug where Requests would not correctly correlate cookies set when + using custom Host headers if those Host headers did not use the native + string type for the platform. + +**Miscellaneous** + +- Updated bundled urllib3 to 1.19. +- Updated bundled certifi certs to 2016.09.26. + +2.11.1 (2016-08-17) ++++++++++++++++++++ + +**Bugfixes** + +- Fixed a bug when using ``iter_content`` with ``decode_unicode=True`` for + streamed bodies would raise ``AttributeError``. This bug was introduced in + 2.11. +- Strip Content-Type and Transfer-Encoding headers from the header block when + following a redirect that transforms the verb from POST/PUT to GET. + +2.11.0 (2016-08-08) ++++++++++++++++++++ + +**Improvements** + +- Added support for the ``ALL_PROXY`` environment variable. +- Reject header values that contain leading whitespace or newline characters to + reduce risk of header smuggling. + +**Bugfixes** + +- Fixed occasional ``TypeError`` when attempting to decode a JSON response that + occurred in an error case. Now correctly returns a ``ValueError``. +- Requests would incorrectly ignore a non-CIDR IP address in the ``NO_PROXY`` + environment variables: Requests now treats it as a specific IP. +- Fixed a bug when sending JSON data that could cause us to encounter obscure + OpenSSL errors in certain network conditions (yes, really). +- Added type checks to ensure that ``iter_content`` only accepts integers and + ``None`` for chunk sizes. +- Fixed issue where responses whose body had not been fully consumed would have + the underlying connection closed but not returned to the connection pool, + which could cause Requests to hang in situations where the ``HTTPAdapter`` + had been configured to use a blocking connection pool. + +**Miscellaneous** + +- Updated bundled urllib3 to 1.16. +- Some previous releases accidentally accepted non-strings as acceptable header values. This release does not. + +2.10.0 (2016-04-29) ++++++++++++++++++++ + +**New Features** + +- SOCKS Proxy Support! (requires PySocks; ``$ pip install requests[socks]``) + +**Miscellaneous** + +- Updated bundled urllib3 to 1.15.1. + +2.9.2 (2016-04-29) +++++++++++++++++++ + +**Improvements** + +- Change built-in CaseInsensitiveDict (used for headers) to use OrderedDict + as its underlying datastore. + +**Bugfixes** + +- Don't use redirect_cache if allow_redirects=False +- When passed objects that throw exceptions from ``tell()``, send them via + chunked transfer encoding instead of failing. +- Raise a ProxyError for proxy related connection issues. + +2.9.1 (2015-12-21) +++++++++++++++++++ + +**Bugfixes** + +- Resolve regression introduced in 2.9.0 that made it impossible to send binary + strings as bodies in Python 3. +- Fixed errors when calculating cookie expiration dates in certain locales. + +**Miscellaneous** + +- Updated bundled urllib3 to 1.13.1. + +2.9.0 (2015-12-15) +++++++++++++++++++ + +**Minor Improvements** (Backwards compatible) + +- The ``verify`` keyword argument now supports being passed a path to a + directory of CA certificates, not just a single-file bundle. +- Warnings are now emitted when sending files opened in text mode. +- Added the 511 Network Authentication Required status code to the status code + registry. + +**Bugfixes** + +- For file-like objects that are not seeked to the very beginning, we now + send the content length for the number of bytes we will actually read, rather + than the total size of the file, allowing partial file uploads. +- When uploading file-like objects, if they are empty or have no obvious + content length we set ``Transfer-Encoding: chunked`` rather than + ``Content-Length: 0``. +- We correctly receive the response in buffered mode when uploading chunked + bodies. +- We now handle being passed a query string as a bytestring on Python 3, by + decoding it as UTF-8. +- Sessions are now closed in all cases (exceptional and not) when using the + functional API rather than leaking and waiting for the garbage collector to + clean them up. +- Correctly handle digest auth headers with a malformed ``qop`` directive that + contains no token, by treating it the same as if no ``qop`` directive was + provided at all. +- Minor performance improvements when removing specific cookies by name. + +**Miscellaneous** + +- Updated urllib3 to 1.13. + +2.8.1 (2015-10-13) +++++++++++++++++++ + +**Bugfixes** + +- Update certificate bundle to match ``certifi`` 2015.9.6.2's weak certificate + bundle. +- Fix a bug in 2.8.0 where requests would raise ``ConnectTimeout`` instead of + ``ConnectionError`` +- When using the PreparedRequest flow, requests will now correctly respect the + ``json`` parameter. Broken in 2.8.0. +- When using the PreparedRequest flow, requests will now correctly handle a + Unicode-string method name on Python 2. Broken in 2.8.0. + +2.8.0 (2015-10-05) +++++++++++++++++++ + +**Minor Improvements** (Backwards Compatible) + +- Requests now supports per-host proxies. This allows the ``proxies`` + dictionary to have entries of the form + ``{'<scheme>://<hostname>': '<proxy>'}``. Host-specific proxies will be used + in preference to the previously-supported scheme-specific ones, but the + previous syntax will continue to work. +- ``Response.raise_for_status`` now prints the URL that failed as part of the + exception message. +- ``requests.utils.get_netrc_auth`` now takes an ``raise_errors`` kwarg, + defaulting to ``False``. When ``True``, errors parsing ``.netrc`` files cause + exceptions to be thrown. +- Change to bundled projects import logic to make it easier to unbundle + requests downstream. +- Changed the default User-Agent string to avoid leaking data on Linux: now + contains only the requests version. + +**Bugfixes** + +- The ``json`` parameter to ``post()`` and friends will now only be used if + neither ``data`` nor ``files`` are present, consistent with the + documentation. +- We now ignore empty fields in the ``NO_PROXY`` environment variable. +- Fixed problem where ``httplib.BadStatusLine`` would get raised if combining + ``stream=True`` with ``contextlib.closing``. +- Prevented bugs where we would attempt to return the same connection back to + the connection pool twice when sending a Chunked body. +- Miscellaneous minor internal changes. +- Digest Auth support is now thread safe. + +**Updates** + +- Updated urllib3 to 1.12. + +2.7.0 (2015-05-03) +++++++++++++++++++ + +This is the first release that follows our new release process. For more, see +`our documentation +<http://docs.python-requests.org/en/latest/community/release-process/>`_. + +**Bugfixes** + +- Updated urllib3 to 1.10.4, resolving several bugs involving chunked transfer + encoding and response framing. + +2.6.2 (2015-04-23) +++++++++++++++++++ + +**Bugfixes** + +- Fix regression where compressed data that was sent as chunked data was not + properly decompressed. (#2561) + +2.6.1 (2015-04-22) +++++++++++++++++++ + +**Bugfixes** + +- Remove VendorAlias import machinery introduced in v2.5.2. + +- Simplify the PreparedRequest.prepare API: We no longer require the user to + pass an empty list to the hooks keyword argument. (c.f. #2552) + +- Resolve redirects now receives and forwards all of the original arguments to + the adapter. (#2503) + +- Handle UnicodeDecodeErrors when trying to deal with a unicode URL that + cannot be encoded in ASCII. (#2540) + +- Populate the parsed path of the URI field when performing Digest + Authentication. (#2426) + +- Copy a PreparedRequest's CookieJar more reliably when it is not an instance + of RequestsCookieJar. (#2527) + +2.6.0 (2015-03-14) +++++++++++++++++++ + +**Bugfixes** + +- CVE-2015-2296: Fix handling of cookies on redirect. Previously a cookie + without a host value set would use the hostname for the redirected URL + exposing requests users to session fixation attacks and potentially cookie + stealing. This was disclosed privately by Matthew Daley of + `BugFuzz <https://bugfuzz.com>`_. This affects all versions of requests from + v2.1.0 to v2.5.3 (inclusive on both ends). + +- Fix error when requests is an ``install_requires`` dependency and ``python + setup.py test`` is run. (#2462) + +- Fix error when urllib3 is unbundled and requests continues to use the + vendored import location. + +- Include fixes to ``urllib3``'s header handling. + +- Requests' handling of unvendored dependencies is now more restrictive. + +**Features and Improvements** + +- Support bytearrays when passed as parameters in the ``files`` argument. + (#2468) + +- Avoid data duplication when creating a request with ``str``, ``bytes``, or + ``bytearray`` input to the ``files`` argument. + +2.5.3 (2015-02-24) +++++++++++++++++++ + +**Bugfixes** + +- Revert changes to our vendored certificate bundle. For more context see + (#2455, #2456, and http://bugs.python.org/issue23476) + +2.5.2 (2015-02-23) +++++++++++++++++++ + +**Features and Improvements** + +- Add sha256 fingerprint support. (`shazow/urllib3#540`_) + +- Improve the performance of headers. (`shazow/urllib3#544`_) + +**Bugfixes** + +- Copy pip's import machinery. When downstream redistributors remove + requests.packages.urllib3 the import machinery will continue to let those + same symbols work. Example usage in requests' documentation and 3rd-party + libraries relying on the vendored copies of urllib3 will work without having + to fallback to the system urllib3. + +- Attempt to quote parts of the URL on redirect if unquoting and then quoting + fails. (#2356) + +- Fix filename type check for multipart form-data uploads. (#2411) + +- Properly handle the case where a server issuing digest authentication + challenges provides both auth and auth-int qop-values. (#2408) + +- Fix a socket leak. (`shazow/urllib3#549`_) + +- Fix multiple ``Set-Cookie`` headers properly. (`shazow/urllib3#534`_) + +- Disable the built-in hostname verification. (`shazow/urllib3#526`_) + +- Fix the behaviour of decoding an exhausted stream. (`shazow/urllib3#535`_) + +**Security** + +- Pulled in an updated ``cacert.pem``. + +- Drop RC4 from the default cipher list. (`shazow/urllib3#551`_) + +.. _shazow/urllib3#551: https://github.com/shazow/urllib3/pull/551 +.. _shazow/urllib3#549: https://github.com/shazow/urllib3/pull/549 +.. _shazow/urllib3#544: https://github.com/shazow/urllib3/pull/544 +.. _shazow/urllib3#540: https://github.com/shazow/urllib3/pull/540 +.. _shazow/urllib3#535: https://github.com/shazow/urllib3/pull/535 +.. _shazow/urllib3#534: https://github.com/shazow/urllib3/pull/534 +.. _shazow/urllib3#526: https://github.com/shazow/urllib3/pull/526 + +2.5.1 (2014-12-23) +++++++++++++++++++ + +**Behavioural Changes** + +- Only catch HTTPErrors in raise_for_status (#2382) + +**Bugfixes** + +- Handle LocationParseError from urllib3 (#2344) +- Handle file-like object filenames that are not strings (#2379) +- Unbreak HTTPDigestAuth handler. Allow new nonces to be negotiated (#2389) + +2.5.0 (2014-12-01) +++++++++++++++++++ + +**Improvements** + +- Allow usage of urllib3's Retry object with HTTPAdapters (#2216) +- The ``iter_lines`` method on a response now accepts a delimiter with which + to split the content (#2295) + +**Behavioural Changes** + +- Add deprecation warnings to functions in requests.utils that will be removed + in 3.0 (#2309) +- Sessions used by the functional API are always closed (#2326) +- Restrict requests to HTTP/1.1 and HTTP/1.0 (stop accepting HTTP/0.9) (#2323) + +**Bugfixes** + +- Only parse the URL once (#2353) +- Allow Content-Length header to always be overridden (#2332) +- Properly handle files in HTTPDigestAuth (#2333) +- Cap redirect_cache size to prevent memory abuse (#2299) +- Fix HTTPDigestAuth handling of redirects after authenticating successfully + (#2253) +- Fix crash with custom method parameter to Session.request (#2317) +- Fix how Link headers are parsed using the regular expression library (#2271) + +**Documentation** + +- Add more references for interlinking (#2348) +- Update CSS for theme (#2290) +- Update width of buttons and sidebar (#2289) +- Replace references of Gittip with Gratipay (#2282) +- Add link to changelog in sidebar (#2273) + +2.4.3 (2014-10-06) +++++++++++++++++++ + +**Bugfixes** + +- Unicode URL improvements for Python 2. +- Re-order JSON param for backwards compat. +- Automatically defrag authentication schemes from host/pass URIs. (`#2249 <https://github.com/requests/requests/issues/2249>`_) + + +2.4.2 (2014-10-05) +++++++++++++++++++ + +**Improvements** + +- FINALLY! Add json parameter for uploads! (`#2258 <https://github.com/requests/requests/pull/2258>`_) +- Support for bytestring URLs on Python 3.x (`#2238 <https://github.com/requests/requests/pull/2238>`_) + +**Bugfixes** + +- Avoid getting stuck in a loop (`#2244 <https://github.com/requests/requests/pull/2244>`_) +- Multiple calls to iter* fail with unhelpful error. (`#2240 <https://github.com/requests/requests/issues/2240>`_, `#2241 <https://github.com/requests/requests/issues/2241>`_) + +**Documentation** + +- Correct redirection introduction (`#2245 <https://github.com/requests/requests/pull/2245/>`_) +- Added example of how to send multiple files in one request. (`#2227 <https://github.com/requests/requests/pull/2227/>`_) +- Clarify how to pass a custom set of CAs (`#2248 <https://github.com/requests/requests/pull/2248/>`_) + + + +2.4.1 (2014-09-09) +++++++++++++++++++ + +- Now has a "security" package extras set, ``$ pip install requests[security]`` +- Requests will now use Certifi if it is available. +- Capture and re-raise urllib3 ProtocolError +- Bugfix for responses that attempt to redirect to themselves forever (wtf?). + + +2.4.0 (2014-08-29) +++++++++++++++++++ + +**Behavioral Changes** + +- ``Connection: keep-alive`` header is now sent automatically. + +**Improvements** + +- Support for connect timeouts! Timeout now accepts a tuple (connect, read) which is used to set individual connect and read timeouts. +- Allow copying of PreparedRequests without headers/cookies. +- Updated bundled urllib3 version. +- Refactored settings loading from environment -- new `Session.merge_environment_settings`. +- Handle socket errors in iter_content. + + +2.3.0 (2014-05-16) +++++++++++++++++++ + +**API Changes** + +- New ``Response`` property ``is_redirect``, which is true when the + library could have processed this response as a redirection (whether + or not it actually did). +- The ``timeout`` parameter now affects requests with both ``stream=True`` and + ``stream=False`` equally. +- The change in v2.0.0 to mandate explicit proxy schemes has been reverted. + Proxy schemes now default to ``http://``. +- The ``CaseInsensitiveDict`` used for HTTP headers now behaves like a normal + dictionary when references as string or viewed in the interpreter. + +**Bugfixes** + +- No longer expose Authorization or Proxy-Authorization headers on redirect. + Fix CVE-2014-1829 and CVE-2014-1830 respectively. +- Authorization is re-evaluated each redirect. +- On redirect, pass url as native strings. +- Fall-back to autodetected encoding for JSON when Unicode detection fails. +- Headers set to ``None`` on the ``Session`` are now correctly not sent. +- Correctly honor ``decode_unicode`` even if it wasn't used earlier in the same + response. +- Stop advertising ``compress`` as a supported Content-Encoding. +- The ``Response.history`` parameter is now always a list. +- Many, many ``urllib3`` bugfixes. + +2.2.1 (2014-01-23) +++++++++++++++++++ + +**Bugfixes** + +- Fixes incorrect parsing of proxy credentials that contain a literal or encoded '#' character. +- Assorted urllib3 fixes. + +2.2.0 (2014-01-09) +++++++++++++++++++ + +**API Changes** + +- New exception: ``ContentDecodingError``. Raised instead of ``urllib3`` + ``DecodeError`` exceptions. + +**Bugfixes** + +- Avoid many many exceptions from the buggy implementation of ``proxy_bypass`` on OS X in Python 2.6. +- Avoid crashing when attempting to get authentication credentials from ~/.netrc when running as a user without a home directory. +- Use the correct pool size for pools of connections to proxies. +- Fix iteration of ``CookieJar`` objects. +- Ensure that cookies are persisted over redirect. +- Switch back to using chardet, since it has merged with charade. + +2.1.0 (2013-12-05) +++++++++++++++++++ + +- Updated CA Bundle, of course. +- Cookies set on individual Requests through a ``Session`` (e.g. via ``Session.get()``) are no longer persisted to the ``Session``. +- Clean up connections when we hit problems during chunked upload, rather than leaking them. +- Return connections to the pool when a chunked upload is successful, rather than leaking it. +- Match the HTTPbis recommendation for HTTP 301 redirects. +- Prevent hanging when using streaming uploads and Digest Auth when a 401 is received. +- Values of headers set by Requests are now always the native string type. +- Fix previously broken SNI support. +- Fix accessing HTTP proxies using proxy authentication. +- Unencode HTTP Basic usernames and passwords extracted from URLs. +- Support for IP address ranges for no_proxy environment variable +- Parse headers correctly when users override the default ``Host:`` header. +- Avoid munging the URL in case of case-sensitive servers. +- Looser URL handling for non-HTTP/HTTPS urls. +- Accept unicode methods in Python 2.6 and 2.7. +- More resilient cookie handling. +- Make ``Response`` objects pickleable. +- Actually added MD5-sess to Digest Auth instead of pretending to like last time. +- Updated internal urllib3. +- Fixed @Lukasa's lack of taste. + +2.0.1 (2013-10-24) +++++++++++++++++++ + +- Updated included CA Bundle with new mistrusts and automated process for the future +- Added MD5-sess to Digest Auth +- Accept per-file headers in multipart file POST messages. +- Fixed: Don't send the full URL on CONNECT messages. +- Fixed: Correctly lowercase a redirect scheme. +- Fixed: Cookies not persisted when set via functional API. +- Fixed: Translate urllib3 ProxyError into a requests ProxyError derived from ConnectionError. +- Updated internal urllib3 and chardet. + +2.0.0 (2013-09-24) +++++++++++++++++++ + +**API Changes:** + +- Keys in the Headers dictionary are now native strings on all Python versions, + i.e. bytestrings on Python 2, unicode on Python 3. +- Proxy URLs now *must* have an explicit scheme. A ``MissingSchema`` exception + will be raised if they don't. +- Timeouts now apply to read time if ``Stream=False``. +- ``RequestException`` is now a subclass of ``IOError``, not ``RuntimeError``. +- Added new method to ``PreparedRequest`` objects: ``PreparedRequest.copy()``. +- Added new method to ``Session`` objects: ``Session.update_request()``. This + method updates a ``Request`` object with the data (e.g. cookies) stored on + the ``Session``. +- Added new method to ``Session`` objects: ``Session.prepare_request()``. This + method updates and prepares a ``Request`` object, and returns the + corresponding ``PreparedRequest`` object. +- Added new method to ``HTTPAdapter`` objects: ``HTTPAdapter.proxy_headers()``. + This should not be called directly, but improves the subclass interface. +- ``httplib.IncompleteRead`` exceptions caused by incorrect chunked encoding + will now raise a Requests ``ChunkedEncodingError`` instead. +- Invalid percent-escape sequences now cause a Requests ``InvalidURL`` + exception to be raised. +- HTTP 208 no longer uses reason phrase ``"im_used"``. Correctly uses + ``"already_reported"``. +- HTTP 226 reason added (``"im_used"``). + +**Bugfixes:** + +- Vastly improved proxy support, including the CONNECT verb. Special thanks to + the many contributors who worked towards this improvement. +- Cookies are now properly managed when 401 authentication responses are + received. +- Chunked encoding fixes. +- Support for mixed case schemes. +- Better handling of streaming downloads. +- Retrieve environment proxies from more locations. +- Minor cookies fixes. +- Improved redirect behaviour. +- Improved streaming behaviour, particularly for compressed data. +- Miscellaneous small Python 3 text encoding bugs. +- ``.netrc`` no longer overrides explicit auth. +- Cookies set by hooks are now correctly persisted on Sessions. +- Fix problem with cookies that specify port numbers in their host field. +- ``BytesIO`` can be used to perform streaming uploads. +- More generous parsing of the ``no_proxy`` environment variable. +- Non-string objects can be passed in data values alongside files. + +1.2.3 (2013-05-25) +++++++++++++++++++ + +- Simple packaging fix + + +1.2.2 (2013-05-23) +++++++++++++++++++ + +- Simple packaging fix + + +1.2.1 (2013-05-20) +++++++++++++++++++ + +- 301 and 302 redirects now change the verb to GET for all verbs, not just + POST, improving browser compatibility. +- Python 3.3.2 compatibility +- Always percent-encode location headers +- Fix connection adapter matching to be most-specific first +- new argument to the default connection adapter for passing a block argument +- prevent a KeyError when there's no link headers + +1.2.0 (2013-03-31) +++++++++++++++++++ + +- Fixed cookies on sessions and on requests +- Significantly change how hooks are dispatched - hooks now receive all the + arguments specified by the user when making a request so hooks can make a + secondary request with the same parameters. This is especially necessary for + authentication handler authors +- certifi support was removed +- Fixed bug where using OAuth 1 with body ``signature_type`` sent no data +- Major proxy work thanks to @Lukasa including parsing of proxy authentication + from the proxy url +- Fix DigestAuth handling too many 401s +- Update vendored urllib3 to include SSL bug fixes +- Allow keyword arguments to be passed to ``json.loads()`` via the + ``Response.json()`` method +- Don't send ``Content-Length`` header by default on ``GET`` or ``HEAD`` + requests +- Add ``elapsed`` attribute to ``Response`` objects to time how long a request + took. +- Fix ``RequestsCookieJar`` +- Sessions and Adapters are now picklable, i.e., can be used with the + multiprocessing library +- Update charade to version 1.0.3 + +The change in how hooks are dispatched will likely cause a great deal of +issues. + +1.1.0 (2013-01-10) +++++++++++++++++++ + +- CHUNKED REQUESTS +- Support for iterable response bodies +- Assume servers persist redirect params +- Allow explicit content types to be specified for file data +- Make merge_kwargs case-insensitive when looking up keys + +1.0.3 (2012-12-18) +++++++++++++++++++ + +- Fix file upload encoding bug +- Fix cookie behavior + +1.0.2 (2012-12-17) +++++++++++++++++++ + +- Proxy fix for HTTPAdapter. + +1.0.1 (2012-12-17) +++++++++++++++++++ + +- Cert verification exception bug. +- Proxy fix for HTTPAdapter. + +1.0.0 (2012-12-17) +++++++++++++++++++ + +- Massive Refactor and Simplification +- Switch to Apache 2.0 license +- Swappable Connection Adapters +- Mountable Connection Adapters +- Mutable ProcessedRequest chain +- /s/prefetch/stream +- Removal of all configuration +- Standard library logging +- Make Response.json() callable, not property. +- Usage of new charade project, which provides python 2 and 3 simultaneous chardet. +- Removal of all hooks except 'response' +- Removal of all authentication helpers (OAuth, Kerberos) + +This is not a backwards compatible change. + +0.14.2 (2012-10-27) ++++++++++++++++++++ + +- Improved mime-compatible JSON handling +- Proxy fixes +- Path hack fixes +- Case-Insensitive Content-Encoding headers +- Support for CJK parameters in form posts + + +0.14.1 (2012-10-01) ++++++++++++++++++++ + +- Python 3.3 Compatibility +- Simply default accept-encoding +- Bugfixes + + +0.14.0 (2012-09-02) +++++++++++++++++++++ + +- No more iter_content errors if already downloaded. + +0.13.9 (2012-08-25) ++++++++++++++++++++ + +- Fix for OAuth + POSTs +- Remove exception eating from dispatch_hook +- General bugfixes + +0.13.8 (2012-08-21) ++++++++++++++++++++ + +- Incredible Link header support :) + +0.13.7 (2012-08-19) ++++++++++++++++++++ + +- Support for (key, value) lists everywhere. +- Digest Authentication improvements. +- Ensure proxy exclusions work properly. +- Clearer UnicodeError exceptions. +- Automatic casting of URLs to strings (fURL and such) +- Bugfixes. + +0.13.6 (2012-08-06) ++++++++++++++++++++ + +- Long awaited fix for hanging connections! + +0.13.5 (2012-07-27) ++++++++++++++++++++ + +- Packaging fix + +0.13.4 (2012-07-27) ++++++++++++++++++++ + +- GSSAPI/Kerberos authentication! +- App Engine 2.7 Fixes! +- Fix leaking connections (from urllib3 update) +- OAuthlib path hack fix +- OAuthlib URL parameters fix. + +0.13.3 (2012-07-12) ++++++++++++++++++++ + +- Use simplejson if available. +- Do not hide SSLErrors behind Timeouts. +- Fixed param handling with urls containing fragments. +- Significantly improved information in User Agent. +- client certificates are ignored when verify=False + +0.13.2 (2012-06-28) ++++++++++++++++++++ + +- Zero dependencies (once again)! +- New: Response.reason +- Sign querystring parameters in OAuth 1.0 +- Client certificates no longer ignored when verify=False +- Add openSUSE certificate support + +0.13.1 (2012-06-07) ++++++++++++++++++++ + +- Allow passing a file or file-like object as data. +- Allow hooks to return responses that indicate errors. +- Fix Response.text and Response.json for body-less responses. + +0.13.0 (2012-05-29) ++++++++++++++++++++ + +- Removal of Requests.async in favor of `grequests <https://github.com/kennethreitz/grequests>`_ +- Allow disabling of cookie persistence. +- New implementation of safe_mode +- cookies.get now supports default argument +- Session cookies not saved when Session.request is called with return_response=False +- Env: no_proxy support. +- RequestsCookieJar improvements. +- Various bug fixes. + +0.12.1 (2012-05-08) ++++++++++++++++++++ + +- New ``Response.json`` property. +- Ability to add string file uploads. +- Fix out-of-range issue with iter_lines. +- Fix iter_content default size. +- Fix POST redirects containing files. + +0.12.0 (2012-05-02) ++++++++++++++++++++ + +- EXPERIMENTAL OAUTH SUPPORT! +- Proper CookieJar-backed cookies interface with awesome dict-like interface. +- Speed fix for non-iterated content chunks. +- Move ``pre_request`` to a more usable place. +- New ``pre_send`` hook. +- Lazily encode data, params, files. +- Load system Certificate Bundle if ``certify`` isn't available. +- Cleanups, fixes. + +0.11.2 (2012-04-22) ++++++++++++++++++++ + +- Attempt to use the OS's certificate bundle if ``certifi`` isn't available. +- Infinite digest auth redirect fix. +- Multi-part file upload improvements. +- Fix decoding of invalid %encodings in URLs. +- If there is no content in a response don't throw an error the second time that content is attempted to be read. +- Upload data on redirects. + +0.11.1 (2012-03-30) ++++++++++++++++++++ + +* POST redirects now break RFC to do what browsers do: Follow up with a GET. +* New ``strict_mode`` configuration to disable new redirect behavior. + + +0.11.0 (2012-03-14) ++++++++++++++++++++ + +* Private SSL Certificate support +* Remove select.poll from Gevent monkeypatching +* Remove redundant generator for chunked transfer encoding +* Fix: Response.ok raises Timeout Exception in safe_mode + +0.10.8 (2012-03-09) ++++++++++++++++++++ + +* Generate chunked ValueError fix +* Proxy configuration by environment variables +* Simplification of iter_lines. +* New `trust_env` configuration for disabling system/environment hints. +* Suppress cookie errors. + +0.10.7 (2012-03-07) ++++++++++++++++++++ + +* `encode_uri` = False + +0.10.6 (2012-02-25) ++++++++++++++++++++ + +* Allow '=' in cookies. + +0.10.5 (2012-02-25) ++++++++++++++++++++ + +* Response body with 0 content-length fix. +* New async.imap. +* Don't fail on netrc. + + +0.10.4 (2012-02-20) ++++++++++++++++++++ + +* Honor netrc. + +0.10.3 (2012-02-20) ++++++++++++++++++++ + +* HEAD requests don't follow redirects anymore. +* raise_for_status() doesn't raise for 3xx anymore. +* Make Session objects picklable. +* ValueError for invalid schema URLs. + +0.10.2 (2012-01-15) ++++++++++++++++++++ + +* Vastly improved URL quoting. +* Additional allowed cookie key values. +* Attempted fix for "Too many open files" Error +* Replace unicode errors on first pass, no need for second pass. +* Append '/' to bare-domain urls before query insertion. +* Exceptions now inherit from RuntimeError. +* Binary uploads + auth fix. +* Bugfixes. + + +0.10.1 (2012-01-23) ++++++++++++++++++++ + +* PYTHON 3 SUPPORT! +* Dropped 2.5 Support. (*Backwards Incompatible*) + +0.10.0 (2012-01-21) ++++++++++++++++++++ + +* ``Response.content`` is now bytes-only. (*Backwards Incompatible*) +* New ``Response.text`` is unicode-only. +* If no ``Response.encoding`` is specified and ``chardet`` is available, ``Response.text`` will guess an encoding. +* Default to ISO-8859-1 (Western) encoding for "text" subtypes. +* Removal of `decode_unicode`. (*Backwards Incompatible*) +* New multiple-hooks system. +* New ``Response.register_hook`` for registering hooks within the pipeline. +* ``Response.url`` is now Unicode. + +0.9.3 (2012-01-18) +++++++++++++++++++ + +* SSL verify=False bugfix (apparent on windows machines). + +0.9.2 (2012-01-18) +++++++++++++++++++ + +* Asynchronous async.send method. +* Support for proper chunk streams with boundaries. +* session argument for Session classes. +* Print entire hook tracebacks, not just exception instance. +* Fix response.iter_lines from pending next line. +* Fix but in HTTP-digest auth w/ URI having query strings. +* Fix in Event Hooks section. +* Urllib3 update. + + +0.9.1 (2012-01-06) +++++++++++++++++++ + +* danger_mode for automatic Response.raise_for_status() +* Response.iter_lines refactor + +0.9.0 (2011-12-28) +++++++++++++++++++ + +* verify ssl is default. + + +0.8.9 (2011-12-28) +++++++++++++++++++ + +* Packaging fix. + + +0.8.8 (2011-12-28) +++++++++++++++++++ + +* SSL CERT VERIFICATION! +* Release of Cerifi: Mozilla's cert list. +* New 'verify' argument for SSL requests. +* Urllib3 update. + +0.8.7 (2011-12-24) +++++++++++++++++++ + +* iter_lines last-line truncation fix +* Force safe_mode for async requests +* Handle safe_mode exceptions more consistently +* Fix iteration on null responses in safe_mode + +0.8.6 (2011-12-18) +++++++++++++++++++ + +* Socket timeout fixes. +* Proxy Authorization support. + +0.8.5 (2011-12-14) +++++++++++++++++++ + +* Response.iter_lines! + +0.8.4 (2011-12-11) +++++++++++++++++++ + +* Prefetch bugfix. +* Added license to installed version. + +0.8.3 (2011-11-27) +++++++++++++++++++ + +* Converted auth system to use simpler callable objects. +* New session parameter to API methods. +* Display full URL while logging. + +0.8.2 (2011-11-19) +++++++++++++++++++ + +* New Unicode decoding system, based on over-ridable `Response.encoding`. +* Proper URL slash-quote handling. +* Cookies with ``[``, ``]``, and ``_`` allowed. + +0.8.1 (2011-11-15) +++++++++++++++++++ + +* URL Request path fix +* Proxy fix. +* Timeouts fix. + +0.8.0 (2011-11-13) +++++++++++++++++++ + +* Keep-alive support! +* Complete removal of Urllib2 +* Complete removal of Poster +* Complete removal of CookieJars +* New ConnectionError raising +* Safe_mode for error catching +* prefetch parameter for request methods +* OPTION method +* Async pool size throttling +* File uploads send real names +* Vendored in urllib3 + +0.7.6 (2011-11-07) +++++++++++++++++++ + +* Digest authentication bugfix (attach query data to path) + +0.7.5 (2011-11-04) +++++++++++++++++++ + +* Response.content = None if there was an invalid response. +* Redirection auth handling. + +0.7.4 (2011-10-26) +++++++++++++++++++ + +* Session Hooks fix. + +0.7.3 (2011-10-23) +++++++++++++++++++ + +* Digest Auth fix. + + +0.7.2 (2011-10-23) +++++++++++++++++++ + +* PATCH Fix. + + +0.7.1 (2011-10-23) +++++++++++++++++++ + +* Move away from urllib2 authentication handling. +* Fully Remove AuthManager, AuthObject, &c. +* New tuple-based auth system with handler callbacks. + + +0.7.0 (2011-10-22) +++++++++++++++++++ + +* Sessions are now the primary interface. +* Deprecated InvalidMethodException. +* PATCH fix. +* New config system (no more global settings). + + +0.6.6 (2011-10-19) +++++++++++++++++++ + +* Session parameter bugfix (params merging). + + +0.6.5 (2011-10-18) +++++++++++++++++++ + +* Offline (fast) test suite. +* Session dictionary argument merging. + + +0.6.4 (2011-10-13) +++++++++++++++++++ + +* Automatic decoding of unicode, based on HTTP Headers. +* New ``decode_unicode`` setting. +* Removal of ``r.read/close`` methods. +* New ``r.faw`` interface for advanced response usage.* +* Automatic expansion of parameterized headers. + + +0.6.3 (2011-10-13) +++++++++++++++++++ + +* Beautiful ``requests.async`` module, for making async requests w/ gevent. + + +0.6.2 (2011-10-09) +++++++++++++++++++ + +* GET/HEAD obeys allow_redirects=False. + + +0.6.1 (2011-08-20) +++++++++++++++++++ + +* Enhanced status codes experience ``\o/`` +* Set a maximum number of redirects (``settings.max_redirects``) +* Full Unicode URL support +* Support for protocol-less redirects. +* Allow for arbitrary request types. +* Bugfixes + + +0.6.0 (2011-08-17) +++++++++++++++++++ + +* New callback hook system +* New persistent sessions object and context manager +* Transparent Dict-cookie handling +* Status code reference object +* Removed Response.cached +* Added Response.request +* All args are kwargs +* Relative redirect support +* HTTPError handling improvements +* Improved https testing +* Bugfixes + + +0.5.1 (2011-07-23) +++++++++++++++++++ + +* International Domain Name Support! +* Access headers without fetching entire body (``read()``) +* Use lists as dicts for parameters +* Add Forced Basic Authentication +* Forced Basic is default authentication type +* ``python-requests.org`` default User-Agent header +* CaseInsensitiveDict lower-case caching +* Response.history bugfix + + +0.5.0 (2011-06-21) +++++++++++++++++++ + +* PATCH Support +* Support for Proxies +* HTTPBin Test Suite +* Redirect Fixes +* settings.verbose stream writing +* Querystrings for all methods +* URLErrors (Connection Refused, Timeout, Invalid URLs) are treated as explicitly raised + ``r.requests.get('hwe://blah'); r.raise_for_status()`` + + +0.4.1 (2011-05-22) +++++++++++++++++++ + +* Improved Redirection Handling +* New 'allow_redirects' param for following non-GET/HEAD Redirects +* Settings module refactoring + + +0.4.0 (2011-05-15) +++++++++++++++++++ + +* Response.history: list of redirected responses +* Case-Insensitive Header Dictionaries! +* Unicode URLs + + +0.3.4 (2011-05-14) +++++++++++++++++++ + +* Urllib2 HTTPAuthentication Recursion fix (Basic/Digest) +* Internal Refactor +* Bytes data upload Bugfix + + + +0.3.3 (2011-05-12) +++++++++++++++++++ + +* Request timeouts +* Unicode url-encoded data +* Settings context manager and module + + +0.3.2 (2011-04-15) +++++++++++++++++++ + +* Automatic Decompression of GZip Encoded Content +* AutoAuth Support for Tupled HTTP Auth + + +0.3.1 (2011-04-01) +++++++++++++++++++ + +* Cookie Changes +* Response.read() +* Poster fix + + +0.3.0 (2011-02-25) +++++++++++++++++++ + +* Automatic Authentication API Change +* Smarter Query URL Parameterization +* Allow file uploads and POST data together +* New Authentication Manager System + - Simpler Basic HTTP System + - Supports all build-in urllib2 Auths + - Allows for custom Auth Handlers + + +0.2.4 (2011-02-19) +++++++++++++++++++ + +* Python 2.5 Support +* PyPy-c v1.4 Support +* Auto-Authentication tests +* Improved Request object constructor + +0.2.3 (2011-02-15) +++++++++++++++++++ + +* New HTTPHandling Methods + - Response.__nonzero__ (false if bad HTTP Status) + - Response.ok (True if expected HTTP Status) + - Response.error (Logged HTTPError if bad HTTP Status) + - Response.raise_for_status() (Raises stored HTTPError) + + +0.2.2 (2011-02-14) +++++++++++++++++++ + +* Still handles request in the event of an HTTPError. (Issue #2) +* Eventlet and Gevent Monkeypatch support. +* Cookie Support (Issue #1) + + +0.2.1 (2011-02-14) +++++++++++++++++++ + +* Added file attribute to POST and PUT requests for multipart-encode file uploads. +* Added Request.url attribute for context and redirects + + +0.2.0 (2011-02-14) +++++++++++++++++++ + +* Birth! + + +0.0.1 (2011-02-13) +++++++++++++++++++ + +* Frustration +* Conception + + diff --git a/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/INSTALLER b/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/LICENSE.txt b/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/LICENSE.txt new file mode 100644 index 0000000..2e68b82 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/LICENSE.txt @@ -0,0 +1,13 @@ +Copyright 2018 Kenneth Reitz + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/METADATA b/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/METADATA new file mode 100644 index 0000000..ba21618 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/METADATA @@ -0,0 +1,1721 @@ +Metadata-Version: 2.0 +Name: requests +Version: 2.19.1 +Summary: Python HTTP for Humans. +Home-page: http://python-requests.org +Author: Kenneth Reitz +Author-email: me@kennethreitz.org +License: Apache 2.0 +Description-Content-Type: text/x-rst +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +Requires-Dist: chardet (<3.1.0,>=3.0.2) +Requires-Dist: idna (<2.8,>=2.5) +Requires-Dist: urllib3 (<1.24,>=1.21.1) +Requires-Dist: certifi (>=2017.4.17) +Provides-Extra: security +Requires-Dist: pyOpenSSL (>=0.14); extra == 'security' +Requires-Dist: cryptography (>=1.3.4); extra == 'security' +Requires-Dist: idna (>=2.0.0); extra == 'security' +Provides-Extra: socks +Requires-Dist: PySocks (!=1.5.7,>=1.5.6); extra == 'socks' +Provides-Extra: socks +Requires-Dist: win-inet-pton; sys_platform == "win32" and (python_version == "2.7" or python_version == "2.6") and extra == 'socks' + +Requests: HTTP for Humans +========================= + +.. image:: https://img.shields.io/pypi/v/requests.svg + :target: https://pypi.org/project/requests/ + +.. image:: https://img.shields.io/pypi/l/requests.svg + :target: https://pypi.org/project/requests/ + +.. image:: https://img.shields.io/pypi/pyversions/requests.svg + :target: https://pypi.org/project/requests/ + +.. image:: https://codecov.io/github/requests/requests/coverage.svg?branch=master + :target: https://codecov.io/github/requests/requests + :alt: codecov.io + +.. image:: https://img.shields.io/github/contributors/requests/requests.svg + :target: https://github.com/requests/requests/graphs/contributors + +.. image:: https://img.shields.io/badge/Say%20Thanks-!-1EAEDB.svg + :target: https://saythanks.io/to/kennethreitz + +Requests is the only *Non-GMO* HTTP library for Python, safe for human +consumption. + +.. image:: https://farm5.staticflickr.com/4317/35198386374_1939af3de6_k_d.jpg + +Behold, the power of Requests: + +.. code-block:: python + + >>> r = requests.get('https://api.github.com/user', auth=('user', 'pass')) + >>> r.status_code + 200 + >>> r.headers['content-type'] + 'application/json; charset=utf8' + >>> r.encoding + 'utf-8' + >>> r.text + u'{"type":"User"...' + >>> r.json() + {u'disk_usage': 368627, u'private_gists': 484, ...} + +See `the similar code, sans Requests <https://gist.github.com/973705>`_. + +.. image:: https://raw.githubusercontent.com/requests/requests/master/docs/_static/requests-logo-small.png + :target: http://docs.python-requests.org/ + + +Requests allows you to send *organic, grass-fed* HTTP/1.1 requests, without the +need for manual labor. There's no need to manually add query strings to your +URLs, or to form-encode your POST data. Keep-alive and HTTP connection pooling +are 100% automatic, thanks to `urllib3 <https://github.com/shazow/urllib3>`_. + +Besides, all the cool kids are doing it. Requests is one of the most +downloaded Python packages of all time, pulling in over 11,000,000 downloads +every month. You don't want to be left out! + +Feature Support +--------------- + +Requests is ready for today's web. + +- International Domains and URLs +- Keep-Alive & Connection Pooling +- Sessions with Cookie Persistence +- Browser-style SSL Verification +- Basic/Digest Authentication +- Elegant Key/Value Cookies +- Automatic Decompression +- Automatic Content Decoding +- Unicode Response Bodies +- Multipart File Uploads +- HTTP(S) Proxy Support +- Connection Timeouts +- Streaming Downloads +- ``.netrc`` Support +- Chunked Requests + +Requests officially supports Python 2.7 & 3.4–3.6, and runs great on PyPy. + +Installation +------------ + +To install Requests, simply use `pipenv <http://pipenv.org/>`_ (or pip, of course): + +.. code-block:: bash + + $ pipenv install requests + ✨🍰✨ + +Satisfaction guaranteed. + +Documentation +------------- + +Fantastic documentation is available at http://docs.python-requests.org/, for a limited time only. + + +How to Contribute +----------------- + +#. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug. There is a `Contributor Friendly`_ tag for issues that should be ideal for people who are not very familiar with the codebase yet. +#. Fork `the repository`_ on GitHub to start making your changes to the **master** branch (or branch off of it). +#. Write a test which shows that the bug was fixed or that the feature works as expected. +#. Send a pull request and bug the maintainer until it gets merged and published. :) Make sure to add yourself to AUTHORS_. + +.. _`the repository`: https://github.com/requests/requests +.. _AUTHORS: https://github.com/requests/requests/blob/master/AUTHORS.rst +.. _Contributor Friendly: https://github.com/requests/requests/issues?direction=desc&labels=Contributor+Friendly&page=1&sort=updated&state=open + + +.. :changelog: + +Release History +--------------- + +dev ++++ + +- [Short description of non-trivial change.] + +2.19.1 (2018-06-14) ++++++++++++++++++++ + +**Bugfixes** + +- Fixed issue where status_codes.py's ``init`` function failed trying to append to + a ``__doc__`` value of ``None``. + +2.19.0 (2018-06-12) ++++++++++++++++++++ + +**Improvements** + +- Warn user about possible slowdown when using cryptography version < 1.3.4 +- Check for invalid host in proxy URL, before forwarding request to adapter. +- Fragments are now properly maintained across redirects. (RFC7231 7.1.2) +- Removed use of cgi module to expedite library load time. +- Added support for SHA-256 and SHA-512 digest auth algorithms. +- Minor performance improvement to ``Request.content``. +- Migrate to using collections.abc for 3.7 compatibility. + +**Bugfixes** + +- Parsing empty ``Link`` headers with ``parse_header_links()`` no longer return one bogus entry. +- Fixed issue where loading the default certificate bundle from a zip archive + would raise an ``IOError``. +- Fixed issue with unexpected ``ImportError`` on windows system which do not support ``winreg`` module. +- DNS resolution in proxy bypass no longer includes the username and password in + the request. This also fixes the issue of DNS queries failing on macOS. +- Properly normalize adapter prefixes for url comparison. +- Passing ``None`` as a file pointer to the ``files`` param no longer raises an exception. +- Calling ``copy`` on a ``RequestsCookieJar`` will now preserve the cookie policy correctly. + +**Dependencies** + +- We now support idna v2.7. +- We now support urllib3 v1.23. + +2.18.4 (2017-08-15) ++++++++++++++++++++ + +**Improvements** + +- Error messages for invalid headers now include the header name for easier debugging + +**Dependencies** + +- We now support idna v2.6. + +2.18.3 (2017-08-02) ++++++++++++++++++++ + +**Improvements** + +- Running ``$ python -m requests.help`` now includes the installed version of idna. + +**Bugfixes** + +- Fixed issue where Requests would raise ``ConnectionError`` instead of + ``SSLError`` when encountering SSL problems when using urllib3 v1.22. + +2.18.2 (2017-07-25) ++++++++++++++++++++ + +**Bugfixes** + +- ``requests.help`` no longer fails on Python 2.6 due to the absence of + ``ssl.OPENSSL_VERSION_NUMBER``. + +**Dependencies** + +- We now support urllib3 v1.22. + +2.18.1 (2017-06-14) ++++++++++++++++++++ + +**Bugfixes** + +- Fix an error in the packaging whereby the ``*.whl`` contained incorrect data + that regressed the fix in v2.17.3. + +2.18.0 (2017-06-14) ++++++++++++++++++++ + +**Improvements** + +- ``Response`` is now a context manager, so can be used directly in a ``with`` statement + without first having to be wrapped by ``contextlib.closing()``. + +**Bugfixes** + +- Resolve installation failure if multiprocessing is not available +- Resolve tests crash if multiprocessing is not able to determine the number of CPU cores +- Resolve error swallowing in utils set_environ generator + + +2.17.3 (2017-05-29) ++++++++++++++++++++ + +**Improvements** + +- Improved ``packages`` namespace identity support, for monkeypatching libraries. + + +2.17.2 (2017-05-29) ++++++++++++++++++++ + +**Improvements** + +- Improved ``packages`` namespace identity support, for monkeypatching libraries. + + +2.17.1 (2017-05-29) ++++++++++++++++++++ + +**Improvements** + +- Improved ``packages`` namespace identity support, for monkeypatching libraries. + + +2.17.0 (2017-05-29) ++++++++++++++++++++ + +**Improvements** + +- Removal of the 301 redirect cache. This improves thread-safety. + + +2.16.5 (2017-05-28) ++++++++++++++++++++ + +- Improvements to ``$ python -m requests.help``. + +2.16.4 (2017-05-27) ++++++++++++++++++++ + +- Introduction of the ``$ python -m requests.help`` command, for debugging with maintainers! + +2.16.3 (2017-05-27) ++++++++++++++++++++ + +- Further restored the ``requests.packages`` namespace for compatibility reasons. + +2.16.2 (2017-05-27) ++++++++++++++++++++ + +- Further restored the ``requests.packages`` namespace for compatibility reasons. + +No code modification (noted below) should be necessary any longer. + +2.16.1 (2017-05-27) ++++++++++++++++++++ + +- Restored the ``requests.packages`` namespace for compatibility reasons. +- Bugfix for ``urllib3`` version parsing. + +**Note**: code that was written to import against the ``requests.packages`` +namespace previously will have to import code that rests at this module-level +now. + +For example:: + + from requests.packages.urllib3.poolmanager import PoolManager + +Will need to be re-written to be:: + + from requests.packages import urllib3 + urllib3.poolmanager.PoolManager + +Or, even better:: + + from urllib3.poolmanager import PoolManager + +2.16.0 (2017-05-26) ++++++++++++++++++++ + +- Unvendor ALL the things! + +2.15.1 (2017-05-26) ++++++++++++++++++++ + +- Everyone makes mistakes. + +2.15.0 (2017-05-26) ++++++++++++++++++++ + +**Improvements** + +- Introduction of the ``Response.next`` property, for getting the next + ``PreparedResponse`` from a redirect chain (when ``allow_redirects=False``). +- Internal refactoring of ``__version__`` module. + +**Bugfixes** + +- Restored once-optional parameter for ``requests.utils.get_environ_proxies()``. + +2.14.2 (2017-05-10) ++++++++++++++++++++ + +**Bugfixes** + +- Changed a less-than to an equal-to and an or in the dependency markers to + widen compatibility with older setuptools releases. + +2.14.1 (2017-05-09) ++++++++++++++++++++ + +**Bugfixes** + +- Changed the dependency markers to widen compatibility with older pip + releases. + +2.14.0 (2017-05-09) ++++++++++++++++++++ + +**Improvements** + +- It is now possible to pass ``no_proxy`` as a key to the ``proxies`` + dictionary to provide handling similar to the ``NO_PROXY`` environment + variable. +- When users provide invalid paths to certificate bundle files or directories + Requests now raises ``IOError``, rather than failing at the time of the HTTPS + request with a fairly inscrutable certificate validation error. +- The behavior of ``SessionRedirectMixin`` was slightly altered. + ``resolve_redirects`` will now detect a redirect by calling + ``get_redirect_target(response)`` instead of directly + querying ``Response.is_redirect`` and ``Response.headers['location']``. + Advanced users will be able to process malformed redirects more easily. +- Changed the internal calculation of elapsed request time to have higher + resolution on Windows. +- Added ``win_inet_pton`` as conditional dependency for the ``[socks]`` extra + on Windows with Python 2.7. +- Changed the proxy bypass implementation on Windows: the proxy bypass + check doesn't use forward and reverse DNS requests anymore +- URLs with schemes that begin with ``http`` but are not ``http`` or ``https`` + no longer have their host parts forced to lowercase. + +**Bugfixes** + +- Much improved handling of non-ASCII ``Location`` header values in redirects. + Fewer ``UnicodeDecodeErrors`` are encountered on Python 2, and Python 3 now + correctly understands that Latin-1 is unlikely to be the correct encoding. +- If an attempt to ``seek`` file to find out its length fails, we now + appropriately handle that by aborting our content-length calculations. +- Restricted ``HTTPDigestAuth`` to only respond to auth challenges made on 4XX + responses, rather than to all auth challenges. +- Fixed some code that was firing ``DeprecationWarning`` on Python 3.6. +- The dismayed person emoticon (``/o\\``) no longer has a big head. I'm sure + this is what you were all worrying about most. + + +**Miscellaneous** + +- Updated bundled urllib3 to v1.21.1. +- Updated bundled chardet to v3.0.2. +- Updated bundled idna to v2.5. +- Updated bundled certifi to 2017.4.17. + +2.13.0 (2017-01-24) ++++++++++++++++++++ + +**Features** + +- Only load the ``idna`` library when we've determined we need it. This will + save some memory for users. + +**Miscellaneous** + +- Updated bundled urllib3 to 1.20. +- Updated bundled idna to 2.2. + +2.12.5 (2017-01-18) ++++++++++++++++++++ + +**Bugfixes** + +- Fixed an issue with JSON encoding detection, specifically detecting + big-endian UTF-32 with BOM. + +2.12.4 (2016-12-14) ++++++++++++++++++++ + +**Bugfixes** + +- Fixed regression from 2.12.2 where non-string types were rejected in the + basic auth parameters. While support for this behaviour has been readded, + the behaviour is deprecated and will be removed in the future. + +2.12.3 (2016-12-01) ++++++++++++++++++++ + +**Bugfixes** + +- Fixed regression from v2.12.1 for URLs with schemes that begin with "http". + These URLs have historically been processed as though they were HTTP-schemed + URLs, and so have had parameters added. This was removed in v2.12.2 in an + overzealous attempt to resolve problems with IDNA-encoding those URLs. This + change was reverted: the other fixes for IDNA-encoding have been judged to + be sufficient to return to the behaviour Requests had before v2.12.0. + +2.12.2 (2016-11-30) ++++++++++++++++++++ + +**Bugfixes** + +- Fixed several issues with IDNA-encoding URLs that are technically invalid but + which are widely accepted. Requests will now attempt to IDNA-encode a URL if + it can but, if it fails, and the host contains only ASCII characters, it will + be passed through optimistically. This will allow users to opt-in to using + IDNA2003 themselves if they want to, and will also allow technically invalid + but still common hostnames. +- Fixed an issue where URLs with leading whitespace would raise + ``InvalidSchema`` errors. +- Fixed an issue where some URLs without the HTTP or HTTPS schemes would still + have HTTP URL preparation applied to them. +- Fixed an issue where Unicode strings could not be used in basic auth. +- Fixed an issue encountered by some Requests plugins where constructing a + Response object would cause ``Response.content`` to raise an + ``AttributeError``. + +2.12.1 (2016-11-16) ++++++++++++++++++++ + +**Bugfixes** + +- Updated setuptools 'security' extra for the new PyOpenSSL backend in urllib3. + +**Miscellaneous** + +- Updated bundled urllib3 to 1.19.1. + +2.12.0 (2016-11-15) ++++++++++++++++++++ + +**Improvements** + +- Updated support for internationalized domain names from IDNA2003 to IDNA2008. + This updated support is required for several forms of IDNs and is mandatory + for .de domains. +- Much improved heuristics for guessing content lengths: Requests will no + longer read an entire ``StringIO`` into memory. +- Much improved logic for recalculating ``Content-Length`` headers for + ``PreparedRequest`` objects. +- Improved tolerance for file-like objects that have no ``tell`` method but + do have a ``seek`` method. +- Anything that is a subclass of ``Mapping`` is now treated like a dictionary + by the ``data=`` keyword argument. +- Requests now tolerates empty passwords in proxy credentials, rather than + stripping the credentials. +- If a request is made with a file-like object as the body and that request is + redirected with a 307 or 308 status code, Requests will now attempt to + rewind the body object so it can be replayed. + +**Bugfixes** + +- When calling ``response.close``, the call to ``close`` will be propagated + through to non-urllib3 backends. +- Fixed issue where the ``ALL_PROXY`` environment variable would be preferred + over scheme-specific variables like ``HTTP_PROXY``. +- Fixed issue where non-UTF8 reason phrases got severely mangled by falling + back to decoding using ISO 8859-1 instead. +- Fixed a bug where Requests would not correctly correlate cookies set when + using custom Host headers if those Host headers did not use the native + string type for the platform. + +**Miscellaneous** + +- Updated bundled urllib3 to 1.19. +- Updated bundled certifi certs to 2016.09.26. + +2.11.1 (2016-08-17) ++++++++++++++++++++ + +**Bugfixes** + +- Fixed a bug when using ``iter_content`` with ``decode_unicode=True`` for + streamed bodies would raise ``AttributeError``. This bug was introduced in + 2.11. +- Strip Content-Type and Transfer-Encoding headers from the header block when + following a redirect that transforms the verb from POST/PUT to GET. + +2.11.0 (2016-08-08) ++++++++++++++++++++ + +**Improvements** + +- Added support for the ``ALL_PROXY`` environment variable. +- Reject header values that contain leading whitespace or newline characters to + reduce risk of header smuggling. + +**Bugfixes** + +- Fixed occasional ``TypeError`` when attempting to decode a JSON response that + occurred in an error case. Now correctly returns a ``ValueError``. +- Requests would incorrectly ignore a non-CIDR IP address in the ``NO_PROXY`` + environment variables: Requests now treats it as a specific IP. +- Fixed a bug when sending JSON data that could cause us to encounter obscure + OpenSSL errors in certain network conditions (yes, really). +- Added type checks to ensure that ``iter_content`` only accepts integers and + ``None`` for chunk sizes. +- Fixed issue where responses whose body had not been fully consumed would have + the underlying connection closed but not returned to the connection pool, + which could cause Requests to hang in situations where the ``HTTPAdapter`` + had been configured to use a blocking connection pool. + +**Miscellaneous** + +- Updated bundled urllib3 to 1.16. +- Some previous releases accidentally accepted non-strings as acceptable header values. This release does not. + +2.10.0 (2016-04-29) ++++++++++++++++++++ + +**New Features** + +- SOCKS Proxy Support! (requires PySocks; ``$ pip install requests[socks]``) + +**Miscellaneous** + +- Updated bundled urllib3 to 1.15.1. + +2.9.2 (2016-04-29) +++++++++++++++++++ + +**Improvements** + +- Change built-in CaseInsensitiveDict (used for headers) to use OrderedDict + as its underlying datastore. + +**Bugfixes** + +- Don't use redirect_cache if allow_redirects=False +- When passed objects that throw exceptions from ``tell()``, send them via + chunked transfer encoding instead of failing. +- Raise a ProxyError for proxy related connection issues. + +2.9.1 (2015-12-21) +++++++++++++++++++ + +**Bugfixes** + +- Resolve regression introduced in 2.9.0 that made it impossible to send binary + strings as bodies in Python 3. +- Fixed errors when calculating cookie expiration dates in certain locales. + +**Miscellaneous** + +- Updated bundled urllib3 to 1.13.1. + +2.9.0 (2015-12-15) +++++++++++++++++++ + +**Minor Improvements** (Backwards compatible) + +- The ``verify`` keyword argument now supports being passed a path to a + directory of CA certificates, not just a single-file bundle. +- Warnings are now emitted when sending files opened in text mode. +- Added the 511 Network Authentication Required status code to the status code + registry. + +**Bugfixes** + +- For file-like objects that are not seeked to the very beginning, we now + send the content length for the number of bytes we will actually read, rather + than the total size of the file, allowing partial file uploads. +- When uploading file-like objects, if they are empty or have no obvious + content length we set ``Transfer-Encoding: chunked`` rather than + ``Content-Length: 0``. +- We correctly receive the response in buffered mode when uploading chunked + bodies. +- We now handle being passed a query string as a bytestring on Python 3, by + decoding it as UTF-8. +- Sessions are now closed in all cases (exceptional and not) when using the + functional API rather than leaking and waiting for the garbage collector to + clean them up. +- Correctly handle digest auth headers with a malformed ``qop`` directive that + contains no token, by treating it the same as if no ``qop`` directive was + provided at all. +- Minor performance improvements when removing specific cookies by name. + +**Miscellaneous** + +- Updated urllib3 to 1.13. + +2.8.1 (2015-10-13) +++++++++++++++++++ + +**Bugfixes** + +- Update certificate bundle to match ``certifi`` 2015.9.6.2's weak certificate + bundle. +- Fix a bug in 2.8.0 where requests would raise ``ConnectTimeout`` instead of + ``ConnectionError`` +- When using the PreparedRequest flow, requests will now correctly respect the + ``json`` parameter. Broken in 2.8.0. +- When using the PreparedRequest flow, requests will now correctly handle a + Unicode-string method name on Python 2. Broken in 2.8.0. + +2.8.0 (2015-10-05) +++++++++++++++++++ + +**Minor Improvements** (Backwards Compatible) + +- Requests now supports per-host proxies. This allows the ``proxies`` + dictionary to have entries of the form + ``{'<scheme>://<hostname>': '<proxy>'}``. Host-specific proxies will be used + in preference to the previously-supported scheme-specific ones, but the + previous syntax will continue to work. +- ``Response.raise_for_status`` now prints the URL that failed as part of the + exception message. +- ``requests.utils.get_netrc_auth`` now takes an ``raise_errors`` kwarg, + defaulting to ``False``. When ``True``, errors parsing ``.netrc`` files cause + exceptions to be thrown. +- Change to bundled projects import logic to make it easier to unbundle + requests downstream. +- Changed the default User-Agent string to avoid leaking data on Linux: now + contains only the requests version. + +**Bugfixes** + +- The ``json`` parameter to ``post()`` and friends will now only be used if + neither ``data`` nor ``files`` are present, consistent with the + documentation. +- We now ignore empty fields in the ``NO_PROXY`` environment variable. +- Fixed problem where ``httplib.BadStatusLine`` would get raised if combining + ``stream=True`` with ``contextlib.closing``. +- Prevented bugs where we would attempt to return the same connection back to + the connection pool twice when sending a Chunked body. +- Miscellaneous minor internal changes. +- Digest Auth support is now thread safe. + +**Updates** + +- Updated urllib3 to 1.12. + +2.7.0 (2015-05-03) +++++++++++++++++++ + +This is the first release that follows our new release process. For more, see +`our documentation +<http://docs.python-requests.org/en/latest/community/release-process/>`_. + +**Bugfixes** + +- Updated urllib3 to 1.10.4, resolving several bugs involving chunked transfer + encoding and response framing. + +2.6.2 (2015-04-23) +++++++++++++++++++ + +**Bugfixes** + +- Fix regression where compressed data that was sent as chunked data was not + properly decompressed. (#2561) + +2.6.1 (2015-04-22) +++++++++++++++++++ + +**Bugfixes** + +- Remove VendorAlias import machinery introduced in v2.5.2. + +- Simplify the PreparedRequest.prepare API: We no longer require the user to + pass an empty list to the hooks keyword argument. (c.f. #2552) + +- Resolve redirects now receives and forwards all of the original arguments to + the adapter. (#2503) + +- Handle UnicodeDecodeErrors when trying to deal with a unicode URL that + cannot be encoded in ASCII. (#2540) + +- Populate the parsed path of the URI field when performing Digest + Authentication. (#2426) + +- Copy a PreparedRequest's CookieJar more reliably when it is not an instance + of RequestsCookieJar. (#2527) + +2.6.0 (2015-03-14) +++++++++++++++++++ + +**Bugfixes** + +- CVE-2015-2296: Fix handling of cookies on redirect. Previously a cookie + without a host value set would use the hostname for the redirected URL + exposing requests users to session fixation attacks and potentially cookie + stealing. This was disclosed privately by Matthew Daley of + `BugFuzz <https://bugfuzz.com>`_. This affects all versions of requests from + v2.1.0 to v2.5.3 (inclusive on both ends). + +- Fix error when requests is an ``install_requires`` dependency and ``python + setup.py test`` is run. (#2462) + +- Fix error when urllib3 is unbundled and requests continues to use the + vendored import location. + +- Include fixes to ``urllib3``'s header handling. + +- Requests' handling of unvendored dependencies is now more restrictive. + +**Features and Improvements** + +- Support bytearrays when passed as parameters in the ``files`` argument. + (#2468) + +- Avoid data duplication when creating a request with ``str``, ``bytes``, or + ``bytearray`` input to the ``files`` argument. + +2.5.3 (2015-02-24) +++++++++++++++++++ + +**Bugfixes** + +- Revert changes to our vendored certificate bundle. For more context see + (#2455, #2456, and http://bugs.python.org/issue23476) + +2.5.2 (2015-02-23) +++++++++++++++++++ + +**Features and Improvements** + +- Add sha256 fingerprint support. (`shazow/urllib3#540`_) + +- Improve the performance of headers. (`shazow/urllib3#544`_) + +**Bugfixes** + +- Copy pip's import machinery. When downstream redistributors remove + requests.packages.urllib3 the import machinery will continue to let those + same symbols work. Example usage in requests' documentation and 3rd-party + libraries relying on the vendored copies of urllib3 will work without having + to fallback to the system urllib3. + +- Attempt to quote parts of the URL on redirect if unquoting and then quoting + fails. (#2356) + +- Fix filename type check for multipart form-data uploads. (#2411) + +- Properly handle the case where a server issuing digest authentication + challenges provides both auth and auth-int qop-values. (#2408) + +- Fix a socket leak. (`shazow/urllib3#549`_) + +- Fix multiple ``Set-Cookie`` headers properly. (`shazow/urllib3#534`_) + +- Disable the built-in hostname verification. (`shazow/urllib3#526`_) + +- Fix the behaviour of decoding an exhausted stream. (`shazow/urllib3#535`_) + +**Security** + +- Pulled in an updated ``cacert.pem``. + +- Drop RC4 from the default cipher list. (`shazow/urllib3#551`_) + +.. _shazow/urllib3#551: https://github.com/shazow/urllib3/pull/551 +.. _shazow/urllib3#549: https://github.com/shazow/urllib3/pull/549 +.. _shazow/urllib3#544: https://github.com/shazow/urllib3/pull/544 +.. _shazow/urllib3#540: https://github.com/shazow/urllib3/pull/540 +.. _shazow/urllib3#535: https://github.com/shazow/urllib3/pull/535 +.. _shazow/urllib3#534: https://github.com/shazow/urllib3/pull/534 +.. _shazow/urllib3#526: https://github.com/shazow/urllib3/pull/526 + +2.5.1 (2014-12-23) +++++++++++++++++++ + +**Behavioural Changes** + +- Only catch HTTPErrors in raise_for_status (#2382) + +**Bugfixes** + +- Handle LocationParseError from urllib3 (#2344) +- Handle file-like object filenames that are not strings (#2379) +- Unbreak HTTPDigestAuth handler. Allow new nonces to be negotiated (#2389) + +2.5.0 (2014-12-01) +++++++++++++++++++ + +**Improvements** + +- Allow usage of urllib3's Retry object with HTTPAdapters (#2216) +- The ``iter_lines`` method on a response now accepts a delimiter with which + to split the content (#2295) + +**Behavioural Changes** + +- Add deprecation warnings to functions in requests.utils that will be removed + in 3.0 (#2309) +- Sessions used by the functional API are always closed (#2326) +- Restrict requests to HTTP/1.1 and HTTP/1.0 (stop accepting HTTP/0.9) (#2323) + +**Bugfixes** + +- Only parse the URL once (#2353) +- Allow Content-Length header to always be overridden (#2332) +- Properly handle files in HTTPDigestAuth (#2333) +- Cap redirect_cache size to prevent memory abuse (#2299) +- Fix HTTPDigestAuth handling of redirects after authenticating successfully + (#2253) +- Fix crash with custom method parameter to Session.request (#2317) +- Fix how Link headers are parsed using the regular expression library (#2271) + +**Documentation** + +- Add more references for interlinking (#2348) +- Update CSS for theme (#2290) +- Update width of buttons and sidebar (#2289) +- Replace references of Gittip with Gratipay (#2282) +- Add link to changelog in sidebar (#2273) + +2.4.3 (2014-10-06) +++++++++++++++++++ + +**Bugfixes** + +- Unicode URL improvements for Python 2. +- Re-order JSON param for backwards compat. +- Automatically defrag authentication schemes from host/pass URIs. (`#2249 <https://github.com/requests/requests/issues/2249>`_) + + +2.4.2 (2014-10-05) +++++++++++++++++++ + +**Improvements** + +- FINALLY! Add json parameter for uploads! (`#2258 <https://github.com/requests/requests/pull/2258>`_) +- Support for bytestring URLs on Python 3.x (`#2238 <https://github.com/requests/requests/pull/2238>`_) + +**Bugfixes** + +- Avoid getting stuck in a loop (`#2244 <https://github.com/requests/requests/pull/2244>`_) +- Multiple calls to iter* fail with unhelpful error. (`#2240 <https://github.com/requests/requests/issues/2240>`_, `#2241 <https://github.com/requests/requests/issues/2241>`_) + +**Documentation** + +- Correct redirection introduction (`#2245 <https://github.com/requests/requests/pull/2245/>`_) +- Added example of how to send multiple files in one request. (`#2227 <https://github.com/requests/requests/pull/2227/>`_) +- Clarify how to pass a custom set of CAs (`#2248 <https://github.com/requests/requests/pull/2248/>`_) + + + +2.4.1 (2014-09-09) +++++++++++++++++++ + +- Now has a "security" package extras set, ``$ pip install requests[security]`` +- Requests will now use Certifi if it is available. +- Capture and re-raise urllib3 ProtocolError +- Bugfix for responses that attempt to redirect to themselves forever (wtf?). + + +2.4.0 (2014-08-29) +++++++++++++++++++ + +**Behavioral Changes** + +- ``Connection: keep-alive`` header is now sent automatically. + +**Improvements** + +- Support for connect timeouts! Timeout now accepts a tuple (connect, read) which is used to set individual connect and read timeouts. +- Allow copying of PreparedRequests without headers/cookies. +- Updated bundled urllib3 version. +- Refactored settings loading from environment -- new `Session.merge_environment_settings`. +- Handle socket errors in iter_content. + + +2.3.0 (2014-05-16) +++++++++++++++++++ + +**API Changes** + +- New ``Response`` property ``is_redirect``, which is true when the + library could have processed this response as a redirection (whether + or not it actually did). +- The ``timeout`` parameter now affects requests with both ``stream=True`` and + ``stream=False`` equally. +- The change in v2.0.0 to mandate explicit proxy schemes has been reverted. + Proxy schemes now default to ``http://``. +- The ``CaseInsensitiveDict`` used for HTTP headers now behaves like a normal + dictionary when references as string or viewed in the interpreter. + +**Bugfixes** + +- No longer expose Authorization or Proxy-Authorization headers on redirect. + Fix CVE-2014-1829 and CVE-2014-1830 respectively. +- Authorization is re-evaluated each redirect. +- On redirect, pass url as native strings. +- Fall-back to autodetected encoding for JSON when Unicode detection fails. +- Headers set to ``None`` on the ``Session`` are now correctly not sent. +- Correctly honor ``decode_unicode`` even if it wasn't used earlier in the same + response. +- Stop advertising ``compress`` as a supported Content-Encoding. +- The ``Response.history`` parameter is now always a list. +- Many, many ``urllib3`` bugfixes. + +2.2.1 (2014-01-23) +++++++++++++++++++ + +**Bugfixes** + +- Fixes incorrect parsing of proxy credentials that contain a literal or encoded '#' character. +- Assorted urllib3 fixes. + +2.2.0 (2014-01-09) +++++++++++++++++++ + +**API Changes** + +- New exception: ``ContentDecodingError``. Raised instead of ``urllib3`` + ``DecodeError`` exceptions. + +**Bugfixes** + +- Avoid many many exceptions from the buggy implementation of ``proxy_bypass`` on OS X in Python 2.6. +- Avoid crashing when attempting to get authentication credentials from ~/.netrc when running as a user without a home directory. +- Use the correct pool size for pools of connections to proxies. +- Fix iteration of ``CookieJar`` objects. +- Ensure that cookies are persisted over redirect. +- Switch back to using chardet, since it has merged with charade. + +2.1.0 (2013-12-05) +++++++++++++++++++ + +- Updated CA Bundle, of course. +- Cookies set on individual Requests through a ``Session`` (e.g. via ``Session.get()``) are no longer persisted to the ``Session``. +- Clean up connections when we hit problems during chunked upload, rather than leaking them. +- Return connections to the pool when a chunked upload is successful, rather than leaking it. +- Match the HTTPbis recommendation for HTTP 301 redirects. +- Prevent hanging when using streaming uploads and Digest Auth when a 401 is received. +- Values of headers set by Requests are now always the native string type. +- Fix previously broken SNI support. +- Fix accessing HTTP proxies using proxy authentication. +- Unencode HTTP Basic usernames and passwords extracted from URLs. +- Support for IP address ranges for no_proxy environment variable +- Parse headers correctly when users override the default ``Host:`` header. +- Avoid munging the URL in case of case-sensitive servers. +- Looser URL handling for non-HTTP/HTTPS urls. +- Accept unicode methods in Python 2.6 and 2.7. +- More resilient cookie handling. +- Make ``Response`` objects pickleable. +- Actually added MD5-sess to Digest Auth instead of pretending to like last time. +- Updated internal urllib3. +- Fixed @Lukasa's lack of taste. + +2.0.1 (2013-10-24) +++++++++++++++++++ + +- Updated included CA Bundle with new mistrusts and automated process for the future +- Added MD5-sess to Digest Auth +- Accept per-file headers in multipart file POST messages. +- Fixed: Don't send the full URL on CONNECT messages. +- Fixed: Correctly lowercase a redirect scheme. +- Fixed: Cookies not persisted when set via functional API. +- Fixed: Translate urllib3 ProxyError into a requests ProxyError derived from ConnectionError. +- Updated internal urllib3 and chardet. + +2.0.0 (2013-09-24) +++++++++++++++++++ + +**API Changes:** + +- Keys in the Headers dictionary are now native strings on all Python versions, + i.e. bytestrings on Python 2, unicode on Python 3. +- Proxy URLs now *must* have an explicit scheme. A ``MissingSchema`` exception + will be raised if they don't. +- Timeouts now apply to read time if ``Stream=False``. +- ``RequestException`` is now a subclass of ``IOError``, not ``RuntimeError``. +- Added new method to ``PreparedRequest`` objects: ``PreparedRequest.copy()``. +- Added new method to ``Session`` objects: ``Session.update_request()``. This + method updates a ``Request`` object with the data (e.g. cookies) stored on + the ``Session``. +- Added new method to ``Session`` objects: ``Session.prepare_request()``. This + method updates and prepares a ``Request`` object, and returns the + corresponding ``PreparedRequest`` object. +- Added new method to ``HTTPAdapter`` objects: ``HTTPAdapter.proxy_headers()``. + This should not be called directly, but improves the subclass interface. +- ``httplib.IncompleteRead`` exceptions caused by incorrect chunked encoding + will now raise a Requests ``ChunkedEncodingError`` instead. +- Invalid percent-escape sequences now cause a Requests ``InvalidURL`` + exception to be raised. +- HTTP 208 no longer uses reason phrase ``"im_used"``. Correctly uses + ``"already_reported"``. +- HTTP 226 reason added (``"im_used"``). + +**Bugfixes:** + +- Vastly improved proxy support, including the CONNECT verb. Special thanks to + the many contributors who worked towards this improvement. +- Cookies are now properly managed when 401 authentication responses are + received. +- Chunked encoding fixes. +- Support for mixed case schemes. +- Better handling of streaming downloads. +- Retrieve environment proxies from more locations. +- Minor cookies fixes. +- Improved redirect behaviour. +- Improved streaming behaviour, particularly for compressed data. +- Miscellaneous small Python 3 text encoding bugs. +- ``.netrc`` no longer overrides explicit auth. +- Cookies set by hooks are now correctly persisted on Sessions. +- Fix problem with cookies that specify port numbers in their host field. +- ``BytesIO`` can be used to perform streaming uploads. +- More generous parsing of the ``no_proxy`` environment variable. +- Non-string objects can be passed in data values alongside files. + +1.2.3 (2013-05-25) +++++++++++++++++++ + +- Simple packaging fix + + +1.2.2 (2013-05-23) +++++++++++++++++++ + +- Simple packaging fix + + +1.2.1 (2013-05-20) +++++++++++++++++++ + +- 301 and 302 redirects now change the verb to GET for all verbs, not just + POST, improving browser compatibility. +- Python 3.3.2 compatibility +- Always percent-encode location headers +- Fix connection adapter matching to be most-specific first +- new argument to the default connection adapter for passing a block argument +- prevent a KeyError when there's no link headers + +1.2.0 (2013-03-31) +++++++++++++++++++ + +- Fixed cookies on sessions and on requests +- Significantly change how hooks are dispatched - hooks now receive all the + arguments specified by the user when making a request so hooks can make a + secondary request with the same parameters. This is especially necessary for + authentication handler authors +- certifi support was removed +- Fixed bug where using OAuth 1 with body ``signature_type`` sent no data +- Major proxy work thanks to @Lukasa including parsing of proxy authentication + from the proxy url +- Fix DigestAuth handling too many 401s +- Update vendored urllib3 to include SSL bug fixes +- Allow keyword arguments to be passed to ``json.loads()`` via the + ``Response.json()`` method +- Don't send ``Content-Length`` header by default on ``GET`` or ``HEAD`` + requests +- Add ``elapsed`` attribute to ``Response`` objects to time how long a request + took. +- Fix ``RequestsCookieJar`` +- Sessions and Adapters are now picklable, i.e., can be used with the + multiprocessing library +- Update charade to version 1.0.3 + +The change in how hooks are dispatched will likely cause a great deal of +issues. + +1.1.0 (2013-01-10) +++++++++++++++++++ + +- CHUNKED REQUESTS +- Support for iterable response bodies +- Assume servers persist redirect params +- Allow explicit content types to be specified for file data +- Make merge_kwargs case-insensitive when looking up keys + +1.0.3 (2012-12-18) +++++++++++++++++++ + +- Fix file upload encoding bug +- Fix cookie behavior + +1.0.2 (2012-12-17) +++++++++++++++++++ + +- Proxy fix for HTTPAdapter. + +1.0.1 (2012-12-17) +++++++++++++++++++ + +- Cert verification exception bug. +- Proxy fix for HTTPAdapter. + +1.0.0 (2012-12-17) +++++++++++++++++++ + +- Massive Refactor and Simplification +- Switch to Apache 2.0 license +- Swappable Connection Adapters +- Mountable Connection Adapters +- Mutable ProcessedRequest chain +- /s/prefetch/stream +- Removal of all configuration +- Standard library logging +- Make Response.json() callable, not property. +- Usage of new charade project, which provides python 2 and 3 simultaneous chardet. +- Removal of all hooks except 'response' +- Removal of all authentication helpers (OAuth, Kerberos) + +This is not a backwards compatible change. + +0.14.2 (2012-10-27) ++++++++++++++++++++ + +- Improved mime-compatible JSON handling +- Proxy fixes +- Path hack fixes +- Case-Insensitive Content-Encoding headers +- Support for CJK parameters in form posts + + +0.14.1 (2012-10-01) ++++++++++++++++++++ + +- Python 3.3 Compatibility +- Simply default accept-encoding +- Bugfixes + + +0.14.0 (2012-09-02) +++++++++++++++++++++ + +- No more iter_content errors if already downloaded. + +0.13.9 (2012-08-25) ++++++++++++++++++++ + +- Fix for OAuth + POSTs +- Remove exception eating from dispatch_hook +- General bugfixes + +0.13.8 (2012-08-21) ++++++++++++++++++++ + +- Incredible Link header support :) + +0.13.7 (2012-08-19) ++++++++++++++++++++ + +- Support for (key, value) lists everywhere. +- Digest Authentication improvements. +- Ensure proxy exclusions work properly. +- Clearer UnicodeError exceptions. +- Automatic casting of URLs to strings (fURL and such) +- Bugfixes. + +0.13.6 (2012-08-06) ++++++++++++++++++++ + +- Long awaited fix for hanging connections! + +0.13.5 (2012-07-27) ++++++++++++++++++++ + +- Packaging fix + +0.13.4 (2012-07-27) ++++++++++++++++++++ + +- GSSAPI/Kerberos authentication! +- App Engine 2.7 Fixes! +- Fix leaking connections (from urllib3 update) +- OAuthlib path hack fix +- OAuthlib URL parameters fix. + +0.13.3 (2012-07-12) ++++++++++++++++++++ + +- Use simplejson if available. +- Do not hide SSLErrors behind Timeouts. +- Fixed param handling with urls containing fragments. +- Significantly improved information in User Agent. +- client certificates are ignored when verify=False + +0.13.2 (2012-06-28) ++++++++++++++++++++ + +- Zero dependencies (once again)! +- New: Response.reason +- Sign querystring parameters in OAuth 1.0 +- Client certificates no longer ignored when verify=False +- Add openSUSE certificate support + +0.13.1 (2012-06-07) ++++++++++++++++++++ + +- Allow passing a file or file-like object as data. +- Allow hooks to return responses that indicate errors. +- Fix Response.text and Response.json for body-less responses. + +0.13.0 (2012-05-29) ++++++++++++++++++++ + +- Removal of Requests.async in favor of `grequests <https://github.com/kennethreitz/grequests>`_ +- Allow disabling of cookie persistence. +- New implementation of safe_mode +- cookies.get now supports default argument +- Session cookies not saved when Session.request is called with return_response=False +- Env: no_proxy support. +- RequestsCookieJar improvements. +- Various bug fixes. + +0.12.1 (2012-05-08) ++++++++++++++++++++ + +- New ``Response.json`` property. +- Ability to add string file uploads. +- Fix out-of-range issue with iter_lines. +- Fix iter_content default size. +- Fix POST redirects containing files. + +0.12.0 (2012-05-02) ++++++++++++++++++++ + +- EXPERIMENTAL OAUTH SUPPORT! +- Proper CookieJar-backed cookies interface with awesome dict-like interface. +- Speed fix for non-iterated content chunks. +- Move ``pre_request`` to a more usable place. +- New ``pre_send`` hook. +- Lazily encode data, params, files. +- Load system Certificate Bundle if ``certify`` isn't available. +- Cleanups, fixes. + +0.11.2 (2012-04-22) ++++++++++++++++++++ + +- Attempt to use the OS's certificate bundle if ``certifi`` isn't available. +- Infinite digest auth redirect fix. +- Multi-part file upload improvements. +- Fix decoding of invalid %encodings in URLs. +- If there is no content in a response don't throw an error the second time that content is attempted to be read. +- Upload data on redirects. + +0.11.1 (2012-03-30) ++++++++++++++++++++ + +* POST redirects now break RFC to do what browsers do: Follow up with a GET. +* New ``strict_mode`` configuration to disable new redirect behavior. + + +0.11.0 (2012-03-14) ++++++++++++++++++++ + +* Private SSL Certificate support +* Remove select.poll from Gevent monkeypatching +* Remove redundant generator for chunked transfer encoding +* Fix: Response.ok raises Timeout Exception in safe_mode + +0.10.8 (2012-03-09) ++++++++++++++++++++ + +* Generate chunked ValueError fix +* Proxy configuration by environment variables +* Simplification of iter_lines. +* New `trust_env` configuration for disabling system/environment hints. +* Suppress cookie errors. + +0.10.7 (2012-03-07) ++++++++++++++++++++ + +* `encode_uri` = False + +0.10.6 (2012-02-25) ++++++++++++++++++++ + +* Allow '=' in cookies. + +0.10.5 (2012-02-25) ++++++++++++++++++++ + +* Response body with 0 content-length fix. +* New async.imap. +* Don't fail on netrc. + + +0.10.4 (2012-02-20) ++++++++++++++++++++ + +* Honor netrc. + +0.10.3 (2012-02-20) ++++++++++++++++++++ + +* HEAD requests don't follow redirects anymore. +* raise_for_status() doesn't raise for 3xx anymore. +* Make Session objects picklable. +* ValueError for invalid schema URLs. + +0.10.2 (2012-01-15) ++++++++++++++++++++ + +* Vastly improved URL quoting. +* Additional allowed cookie key values. +* Attempted fix for "Too many open files" Error +* Replace unicode errors on first pass, no need for second pass. +* Append '/' to bare-domain urls before query insertion. +* Exceptions now inherit from RuntimeError. +* Binary uploads + auth fix. +* Bugfixes. + + +0.10.1 (2012-01-23) ++++++++++++++++++++ + +* PYTHON 3 SUPPORT! +* Dropped 2.5 Support. (*Backwards Incompatible*) + +0.10.0 (2012-01-21) ++++++++++++++++++++ + +* ``Response.content`` is now bytes-only. (*Backwards Incompatible*) +* New ``Response.text`` is unicode-only. +* If no ``Response.encoding`` is specified and ``chardet`` is available, ``Response.text`` will guess an encoding. +* Default to ISO-8859-1 (Western) encoding for "text" subtypes. +* Removal of `decode_unicode`. (*Backwards Incompatible*) +* New multiple-hooks system. +* New ``Response.register_hook`` for registering hooks within the pipeline. +* ``Response.url`` is now Unicode. + +0.9.3 (2012-01-18) +++++++++++++++++++ + +* SSL verify=False bugfix (apparent on windows machines). + +0.9.2 (2012-01-18) +++++++++++++++++++ + +* Asynchronous async.send method. +* Support for proper chunk streams with boundaries. +* session argument for Session classes. +* Print entire hook tracebacks, not just exception instance. +* Fix response.iter_lines from pending next line. +* Fix but in HTTP-digest auth w/ URI having query strings. +* Fix in Event Hooks section. +* Urllib3 update. + + +0.9.1 (2012-01-06) +++++++++++++++++++ + +* danger_mode for automatic Response.raise_for_status() +* Response.iter_lines refactor + +0.9.0 (2011-12-28) +++++++++++++++++++ + +* verify ssl is default. + + +0.8.9 (2011-12-28) +++++++++++++++++++ + +* Packaging fix. + + +0.8.8 (2011-12-28) +++++++++++++++++++ + +* SSL CERT VERIFICATION! +* Release of Cerifi: Mozilla's cert list. +* New 'verify' argument for SSL requests. +* Urllib3 update. + +0.8.7 (2011-12-24) +++++++++++++++++++ + +* iter_lines last-line truncation fix +* Force safe_mode for async requests +* Handle safe_mode exceptions more consistently +* Fix iteration on null responses in safe_mode + +0.8.6 (2011-12-18) +++++++++++++++++++ + +* Socket timeout fixes. +* Proxy Authorization support. + +0.8.5 (2011-12-14) +++++++++++++++++++ + +* Response.iter_lines! + +0.8.4 (2011-12-11) +++++++++++++++++++ + +* Prefetch bugfix. +* Added license to installed version. + +0.8.3 (2011-11-27) +++++++++++++++++++ + +* Converted auth system to use simpler callable objects. +* New session parameter to API methods. +* Display full URL while logging. + +0.8.2 (2011-11-19) +++++++++++++++++++ + +* New Unicode decoding system, based on over-ridable `Response.encoding`. +* Proper URL slash-quote handling. +* Cookies with ``[``, ``]``, and ``_`` allowed. + +0.8.1 (2011-11-15) +++++++++++++++++++ + +* URL Request path fix +* Proxy fix. +* Timeouts fix. + +0.8.0 (2011-11-13) +++++++++++++++++++ + +* Keep-alive support! +* Complete removal of Urllib2 +* Complete removal of Poster +* Complete removal of CookieJars +* New ConnectionError raising +* Safe_mode for error catching +* prefetch parameter for request methods +* OPTION method +* Async pool size throttling +* File uploads send real names +* Vendored in urllib3 + +0.7.6 (2011-11-07) +++++++++++++++++++ + +* Digest authentication bugfix (attach query data to path) + +0.7.5 (2011-11-04) +++++++++++++++++++ + +* Response.content = None if there was an invalid response. +* Redirection auth handling. + +0.7.4 (2011-10-26) +++++++++++++++++++ + +* Session Hooks fix. + +0.7.3 (2011-10-23) +++++++++++++++++++ + +* Digest Auth fix. + + +0.7.2 (2011-10-23) +++++++++++++++++++ + +* PATCH Fix. + + +0.7.1 (2011-10-23) +++++++++++++++++++ + +* Move away from urllib2 authentication handling. +* Fully Remove AuthManager, AuthObject, &c. +* New tuple-based auth system with handler callbacks. + + +0.7.0 (2011-10-22) +++++++++++++++++++ + +* Sessions are now the primary interface. +* Deprecated InvalidMethodException. +* PATCH fix. +* New config system (no more global settings). + + +0.6.6 (2011-10-19) +++++++++++++++++++ + +* Session parameter bugfix (params merging). + + +0.6.5 (2011-10-18) +++++++++++++++++++ + +* Offline (fast) test suite. +* Session dictionary argument merging. + + +0.6.4 (2011-10-13) +++++++++++++++++++ + +* Automatic decoding of unicode, based on HTTP Headers. +* New ``decode_unicode`` setting. +* Removal of ``r.read/close`` methods. +* New ``r.faw`` interface for advanced response usage.* +* Automatic expansion of parameterized headers. + + +0.6.3 (2011-10-13) +++++++++++++++++++ + +* Beautiful ``requests.async`` module, for making async requests w/ gevent. + + +0.6.2 (2011-10-09) +++++++++++++++++++ + +* GET/HEAD obeys allow_redirects=False. + + +0.6.1 (2011-08-20) +++++++++++++++++++ + +* Enhanced status codes experience ``\o/`` +* Set a maximum number of redirects (``settings.max_redirects``) +* Full Unicode URL support +* Support for protocol-less redirects. +* Allow for arbitrary request types. +* Bugfixes + + +0.6.0 (2011-08-17) +++++++++++++++++++ + +* New callback hook system +* New persistent sessions object and context manager +* Transparent Dict-cookie handling +* Status code reference object +* Removed Response.cached +* Added Response.request +* All args are kwargs +* Relative redirect support +* HTTPError handling improvements +* Improved https testing +* Bugfixes + + +0.5.1 (2011-07-23) +++++++++++++++++++ + +* International Domain Name Support! +* Access headers without fetching entire body (``read()``) +* Use lists as dicts for parameters +* Add Forced Basic Authentication +* Forced Basic is default authentication type +* ``python-requests.org`` default User-Agent header +* CaseInsensitiveDict lower-case caching +* Response.history bugfix + + +0.5.0 (2011-06-21) +++++++++++++++++++ + +* PATCH Support +* Support for Proxies +* HTTPBin Test Suite +* Redirect Fixes +* settings.verbose stream writing +* Querystrings for all methods +* URLErrors (Connection Refused, Timeout, Invalid URLs) are treated as explicitly raised + ``r.requests.get('hwe://blah'); r.raise_for_status()`` + + +0.4.1 (2011-05-22) +++++++++++++++++++ + +* Improved Redirection Handling +* New 'allow_redirects' param for following non-GET/HEAD Redirects +* Settings module refactoring + + +0.4.0 (2011-05-15) +++++++++++++++++++ + +* Response.history: list of redirected responses +* Case-Insensitive Header Dictionaries! +* Unicode URLs + + +0.3.4 (2011-05-14) +++++++++++++++++++ + +* Urllib2 HTTPAuthentication Recursion fix (Basic/Digest) +* Internal Refactor +* Bytes data upload Bugfix + + + +0.3.3 (2011-05-12) +++++++++++++++++++ + +* Request timeouts +* Unicode url-encoded data +* Settings context manager and module + + +0.3.2 (2011-04-15) +++++++++++++++++++ + +* Automatic Decompression of GZip Encoded Content +* AutoAuth Support for Tupled HTTP Auth + + +0.3.1 (2011-04-01) +++++++++++++++++++ + +* Cookie Changes +* Response.read() +* Poster fix + + +0.3.0 (2011-02-25) +++++++++++++++++++ + +* Automatic Authentication API Change +* Smarter Query URL Parameterization +* Allow file uploads and POST data together +* New Authentication Manager System + - Simpler Basic HTTP System + - Supports all build-in urllib2 Auths + - Allows for custom Auth Handlers + + +0.2.4 (2011-02-19) +++++++++++++++++++ + +* Python 2.5 Support +* PyPy-c v1.4 Support +* Auto-Authentication tests +* Improved Request object constructor + +0.2.3 (2011-02-15) +++++++++++++++++++ + +* New HTTPHandling Methods + - Response.__nonzero__ (false if bad HTTP Status) + - Response.ok (True if expected HTTP Status) + - Response.error (Logged HTTPError if bad HTTP Status) + - Response.raise_for_status() (Raises stored HTTPError) + + +0.2.2 (2011-02-14) +++++++++++++++++++ + +* Still handles request in the event of an HTTPError. (Issue #2) +* Eventlet and Gevent Monkeypatch support. +* Cookie Support (Issue #1) + + +0.2.1 (2011-02-14) +++++++++++++++++++ + +* Added file attribute to POST and PUT requests for multipart-encode file uploads. +* Added Request.url attribute for context and redirects + + +0.2.0 (2011-02-14) +++++++++++++++++++ + +* Birth! + + +0.0.1 (2011-02-13) +++++++++++++++++++ + +* Frustration +* Conception + + diff --git a/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/RECORD b/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/RECORD new file mode 100644 index 0000000..c9cb234 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/RECORD @@ -0,0 +1,44 @@ +requests/__init__.py,sha256=CfBVrS2i2DRqW4l_YWg0amv2aE40uzKipJOhl5f_ym4,4056 +requests/__version__.py,sha256=rJ2xgNOLhjspGkNPfgXTBctqqvsf2uJMFTaE0rlVtbI,436 +requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096 +requests/adapters.py,sha256=i3hSRejv346malcOuw46ivOBDxCyip0SBuMy3sylvmA,21236 +requests/api.py,sha256=zub9ENcEUT2m9gwgBgqH5RIRDfrx2kwRpZ7L6hX3mcw,6261 +requests/auth.py,sha256=oRSQkBYcLkTEssudkzoR1UW1Glb1ts3p1RWusgHf1YU,10208 +requests/certs.py,sha256=dOB5rV2DZ13dEhq9BUa_4hd5kAqg59e_zUZB00faYz8,453 +requests/compat.py,sha256=aY4WEIpX7kYXzD4h4RhcO5QwkurS79Ssg5m0fWEflMc,1723 +requests/cookies.py,sha256=olUaLeNci_z1K-Bn5PeEKllSspmQqN9-s8Ug7CasaPE,18346 +requests/exceptions.py,sha256=Q8YeWWxiHHXhkEynLpMgC_6_r_ZTYw2aITs9wCSAZNY,3185 +requests/help.py,sha256=vKOUvN6TMonwqumMjRaG9BgsYw7-Ei6aMWY71vtHCx8,3606 +requests/hooks.py,sha256=HXAHoC1FNTFRZX6-lNdvPM7Tst4kvGwYTN-AOKRxoRU,767 +requests/models.py,sha256=Eus9H6VuYZ3hkvFqpK6chs5HsU4Uhubm2k7015AeRII,34095 +requests/packages.py,sha256=Q2rF0L5mc3wQAvc6q_lAVtPTDOaOeFgD-7kWSQLkjEQ,542 +requests/sessions.py,sha256=71MK2HCadovka1vAx9dyDFWAuw69KgRPRBpd0HWSEAo,27829 +requests/status_codes.py,sha256=pgw-xlnxO5zHQWn3fKps2cxwQehKzPxEbdhIrMQe6Ec,4128 +requests/structures.py,sha256=zoP8qly2Jak5e89HwpqjN1z2diztI-_gaqts1raJJBc,2981 +requests/utils.py,sha256=3OxbbLUQFVdm84fdBD9nduXvhw6hIzj59mhvBomKuJI,30156 +requests-2.19.1.dist-info/DESCRIPTION.rst,sha256=n_VLfuLN3E966vObL5yznSnEemZi9-mN_NYciEbtu1U,50439 +requests-2.19.1.dist-info/LICENSE.txt,sha256=gqhp_k6WdEmVbSalRq3HYqys4CiFLOgboWw8Wx12sVs,581 +requests-2.19.1.dist-info/METADATA,sha256=xyt8lFsqw5Ub6FgGyQi_zaONQ6uCh8ULkonZeblqHzg,52012 +requests-2.19.1.dist-info/RECORD,, +requests-2.19.1.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110 +requests-2.19.1.dist-info/metadata.json,sha256=aoLNiq-pX6UqPnwJr037D19kMMoHnl8qHmS1HFmI-zw,1768 +requests-2.19.1.dist-info/top_level.txt,sha256=fMSVmHfb5rbGOo6xv-O_tUX6j-WyixssE-SnwcDRxNQ,9 +requests-2.19.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +requests/__pycache__/help.cpython-36.pyc,, +requests/__pycache__/_internal_utils.cpython-36.pyc,, +requests/__pycache__/__version__.cpython-36.pyc,, +requests/__pycache__/exceptions.cpython-36.pyc,, +requests/__pycache__/certs.cpython-36.pyc,, +requests/__pycache__/structures.cpython-36.pyc,, +requests/__pycache__/sessions.cpython-36.pyc,, +requests/__pycache__/status_codes.cpython-36.pyc,, +requests/__pycache__/models.cpython-36.pyc,, +requests/__pycache__/compat.cpython-36.pyc,, +requests/__pycache__/adapters.cpython-36.pyc,, +requests/__pycache__/packages.cpython-36.pyc,, +requests/__pycache__/hooks.cpython-36.pyc,, +requests/__pycache__/auth.cpython-36.pyc,, +requests/__pycache__/utils.cpython-36.pyc,, +requests/__pycache__/cookies.cpython-36.pyc,, +requests/__pycache__/__init__.cpython-36.pyc,, +requests/__pycache__/api.cpython-36.pyc,, diff --git a/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/WHEEL b/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/WHEEL new file mode 100644 index 0000000..7332a41 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/metadata.json b/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/metadata.json new file mode 100644 index 0000000..c844f60 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Natural Language :: English", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy"], "description_content_type": "text/x-rst", "extensions": {"python.details": {"contacts": [{"email": "me@kennethreitz.org", "name": "Kenneth Reitz", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "http://python-requests.org"}}}, "extras": ["security", "socks"], "generator": "bdist_wheel (0.30.0)", "license": "Apache 2.0", "metadata_version": "2.0", "name": "requests", "requires_python": ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", "run_requires": [{"extra": "socks", "requires": ["PySocks (!=1.5.7,>=1.5.6)"]}, {"requires": ["certifi (>=2017.4.17)", "chardet (<3.1.0,>=3.0.2)", "idna (<2.8,>=2.5)", "urllib3 (<1.24,>=1.21.1)"]}, {"extra": "security", "requires": ["cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)"]}, {"environment": "sys_platform == \"win32\" and (python_version == \"2.7\" or python_version == \"2.6\")", "extra": "socks", "requires": ["win-inet-pton"]}], "summary": "Python HTTP for Humans.", "test_requires": [{"requires": ["PySocks (!=1.5.7,>=1.5.6)", "pytest (>=2.8.0)", "pytest-cov", "pytest-httpbin (==0.0.7)", "pytest-mock", "pytest-xdist"]}], "version": "2.19.1"} \ No newline at end of file diff --git a/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/top_level.txt b/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/top_level.txt new file mode 100644 index 0000000..f229360 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests-2.19.1.dist-info/top_level.txt @@ -0,0 +1 @@ +requests diff --git a/env/lib/python3.6/site-packages/requests/_internal_utils.py b/env/lib/python3.6/site-packages/requests/_internal_utils.py new file mode 100644 index 0000000..759d9a5 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests/_internal_utils.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- + +""" +requests._internal_utils +~~~~~~~~~~~~~~ + +Provides utility functions that are consumed internally by Requests +which depend on extremely few external helpers (such as compat) +""" + +from .compat import is_py2, builtin_str, str + + +def to_native_string(string, encoding='ascii'): + """Given a string object, regardless of type, returns a representation of + that string in the native string type, encoding and decoding where + necessary. This assumes ASCII unless told otherwise. + """ + if isinstance(string, builtin_str): + out = string + else: + if is_py2: + out = string.encode(encoding) + else: + out = string.decode(encoding) + + return out + + +def unicode_is_ascii(u_string): + """Determine if unicode string only contains ASCII characters. + + :param str u_string: unicode string to check. Must be unicode + and not Python 2 `str`. + :rtype: bool + """ + assert isinstance(u_string, str) + try: + u_string.encode('ascii') + return True + except UnicodeEncodeError: + return False diff --git a/env/lib/python3.6/site-packages/requests/adapters.py b/env/lib/python3.6/site-packages/requests/adapters.py new file mode 100644 index 0000000..a4b0284 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests/adapters.py @@ -0,0 +1,530 @@ +# -*- coding: utf-8 -*- + +""" +requests.adapters +~~~~~~~~~~~~~~~~~ + +This module contains the transport adapters that Requests uses to define +and maintain connections. +""" + +import os.path +import socket + +from urllib3.poolmanager import PoolManager, proxy_from_url +from urllib3.response import HTTPResponse +from urllib3.util import parse_url +from urllib3.util import Timeout as TimeoutSauce +from urllib3.util.retry import Retry +from urllib3.exceptions import ClosedPoolError +from urllib3.exceptions import ConnectTimeoutError +from urllib3.exceptions import HTTPError as _HTTPError +from urllib3.exceptions import MaxRetryError +from urllib3.exceptions import NewConnectionError +from urllib3.exceptions import ProxyError as _ProxyError +from urllib3.exceptions import ProtocolError +from urllib3.exceptions import ReadTimeoutError +from urllib3.exceptions import SSLError as _SSLError +from urllib3.exceptions import ResponseError + +from .models import Response +from .compat import urlparse, basestring +from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths, + get_encoding_from_headers, prepend_scheme_if_needed, + get_auth_from_url, urldefragauth, select_proxy) +from .structures import CaseInsensitiveDict +from .cookies import extract_cookies_to_jar +from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, + ProxyError, RetryError, InvalidSchema, InvalidProxyURL) +from .auth import _basic_auth_str + +try: + from urllib3.contrib.socks import SOCKSProxyManager +except ImportError: + def SOCKSProxyManager(*args, **kwargs): + raise InvalidSchema("Missing dependencies for SOCKS support.") + +DEFAULT_POOLBLOCK = False +DEFAULT_POOLSIZE = 10 +DEFAULT_RETRIES = 0 +DEFAULT_POOL_TIMEOUT = None + + +class BaseAdapter(object): + """The Base Transport Adapter""" + + def __init__(self): + super(BaseAdapter, self).__init__() + + def send(self, request, stream=False, timeout=None, verify=True, + cert=None, proxies=None): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) <timeouts>` tuple. + :type timeout: float or tuple + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + """ + raise NotImplementedError + + def close(self): + """Cleans up adapter specific items.""" + raise NotImplementedError + + +class HTTPAdapter(BaseAdapter): + """The built-in HTTP Adapter for urllib3. + + Provides a general-case interface for Requests sessions to contact HTTP and + HTTPS urls by implementing the Transport Adapter interface. This class will + usually be created by the :class:`Session <Session>` class under the + covers. + + :param pool_connections: The number of urllib3 connection pools to cache. + :param pool_maxsize: The maximum number of connections to save in the pool. + :param max_retries: The maximum number of retries each connection + should attempt. Note, this applies only to failed DNS lookups, socket + connections and connection timeouts, never to requests where data has + made it to the server. By default, Requests does not retry failed + connections. If you need granular control over the conditions under + which we retry a request, import urllib3's ``Retry`` class and pass + that instead. + :param pool_block: Whether the connection pool should block for connections. + + Usage:: + + >>> import requests + >>> s = requests.Session() + >>> a = requests.adapters.HTTPAdapter(max_retries=3) + >>> s.mount('http://', a) + """ + __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize', + '_pool_block'] + + def __init__(self, pool_connections=DEFAULT_POOLSIZE, + pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES, + pool_block=DEFAULT_POOLBLOCK): + if max_retries == DEFAULT_RETRIES: + self.max_retries = Retry(0, read=False) + else: + self.max_retries = Retry.from_int(max_retries) + self.config = {} + self.proxy_manager = {} + + super(HTTPAdapter, self).__init__() + + self._pool_connections = pool_connections + self._pool_maxsize = pool_maxsize + self._pool_block = pool_block + + self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) + + def __getstate__(self): + return dict((attr, getattr(self, attr, None)) for attr in + self.__attrs__) + + def __setstate__(self, state): + # Can't handle by adding 'proxy_manager' to self.__attrs__ because + # self.poolmanager uses a lambda function, which isn't pickleable. + self.proxy_manager = {} + self.config = {} + + for attr, value in state.items(): + setattr(self, attr, value) + + self.init_poolmanager(self._pool_connections, self._pool_maxsize, + block=self._pool_block) + + def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs): + """Initializes a urllib3 PoolManager. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param connections: The number of urllib3 connection pools to cache. + :param maxsize: The maximum number of connections to save in the pool. + :param block: Block when no free connections are available. + :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. + """ + # save these values for pickling + self._pool_connections = connections + self._pool_maxsize = maxsize + self._pool_block = block + + self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, + block=block, strict=True, **pool_kwargs) + + def proxy_manager_for(self, proxy, **proxy_kwargs): + """Return urllib3 ProxyManager for the given proxy. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param proxy: The proxy to return a urllib3 ProxyManager for. + :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. + :returns: ProxyManager + :rtype: urllib3.ProxyManager + """ + if proxy in self.proxy_manager: + manager = self.proxy_manager[proxy] + elif proxy.lower().startswith('socks'): + username, password = get_auth_from_url(proxy) + manager = self.proxy_manager[proxy] = SOCKSProxyManager( + proxy, + username=username, + password=password, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs + ) + else: + proxy_headers = self.proxy_headers(proxy) + manager = self.proxy_manager[proxy] = proxy_from_url( + proxy, + proxy_headers=proxy_headers, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs) + + return manager + + def cert_verify(self, conn, url, verify, cert): + """Verify a SSL certificate. This method should not be called from user + code, and is only exposed for use when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param conn: The urllib3 connection object associated with the cert. + :param url: The requested URL. + :param verify: Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: The SSL certificate to verify. + """ + if url.lower().startswith('https') and verify: + + cert_loc = None + + # Allow self-specified cert location. + if verify is not True: + cert_loc = verify + + if not cert_loc: + cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) + + if not cert_loc or not os.path.exists(cert_loc): + raise IOError("Could not find a suitable TLS CA certificate bundle, " + "invalid path: {0}".format(cert_loc)) + + conn.cert_reqs = 'CERT_REQUIRED' + + if not os.path.isdir(cert_loc): + conn.ca_certs = cert_loc + else: + conn.ca_cert_dir = cert_loc + else: + conn.cert_reqs = 'CERT_NONE' + conn.ca_certs = None + conn.ca_cert_dir = None + + if cert: + if not isinstance(cert, basestring): + conn.cert_file = cert[0] + conn.key_file = cert[1] + else: + conn.cert_file = cert + conn.key_file = None + if conn.cert_file and not os.path.exists(conn.cert_file): + raise IOError("Could not find the TLS certificate file, " + "invalid path: {0}".format(conn.cert_file)) + if conn.key_file and not os.path.exists(conn.key_file): + raise IOError("Could not find the TLS key file, " + "invalid path: {0}".format(conn.key_file)) + + def build_response(self, req, resp): + """Builds a :class:`Response <requests.Response>` object from a urllib3 + response. This should not be called from user code, and is only exposed + for use when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>` + + :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response. + :param resp: The urllib3 response object. + :rtype: requests.Response + """ + response = Response() + + # Fallback to None if there's no status_code, for whatever reason. + response.status_code = getattr(resp, 'status', None) + + # Make headers case-insensitive. + response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) + + # Set encoding. + response.encoding = get_encoding_from_headers(response.headers) + response.raw = resp + response.reason = response.raw.reason + + if isinstance(req.url, bytes): + response.url = req.url.decode('utf-8') + else: + response.url = req.url + + # Add new cookies from the server. + extract_cookies_to_jar(response.cookies, req, resp) + + # Give the Response some context. + response.request = req + response.connection = self + + return response + + def get_connection(self, url, proxies=None): + """Returns a urllib3 connection for the given URL. This should not be + called from user code, and is only exposed for use when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param url: The URL to connect to. + :param proxies: (optional) A Requests-style dictionary of proxies used on this request. + :rtype: urllib3.ConnectionPool + """ + proxy = select_proxy(url, proxies) + + if proxy: + proxy = prepend_scheme_if_needed(proxy, 'http') + proxy_url = parse_url(proxy) + if not proxy_url.host: + raise InvalidProxyURL("Please check proxy URL. It is malformed" + " and could be missing the host.") + proxy_manager = self.proxy_manager_for(proxy) + conn = proxy_manager.connection_from_url(url) + else: + # Only scheme should be lower case + parsed = urlparse(url) + url = parsed.geturl() + conn = self.poolmanager.connection_from_url(url) + + return conn + + def close(self): + """Disposes of any internal state. + + Currently, this closes the PoolManager and any active ProxyManager, + which closes any pooled connections. + """ + self.poolmanager.clear() + for proxy in self.proxy_manager.values(): + proxy.clear() + + def request_url(self, request, proxies): + """Obtain the url to use when making the final request. + + If the message is being sent through a HTTP proxy, the full URL has to + be used. Otherwise, we should only use the path portion of the URL. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. + :rtype: str + """ + proxy = select_proxy(request.url, proxies) + scheme = urlparse(request.url).scheme + + is_proxied_http_request = (proxy and scheme != 'https') + using_socks_proxy = False + if proxy: + proxy_scheme = urlparse(proxy).scheme.lower() + using_socks_proxy = proxy_scheme.startswith('socks') + + url = request.path_url + if is_proxied_http_request and not using_socks_proxy: + url = urldefragauth(request.url) + + return url + + def add_headers(self, request, **kwargs): + """Add any headers needed by the connection. As of v2.0 this does + nothing by default, but is left for overriding by users that subclass + the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to. + :param kwargs: The keyword arguments from the call to send(). + """ + pass + + def proxy_headers(self, proxy): + """Returns a dictionary of the headers to add to any request sent + through a proxy. This works with urllib3 magic to ensure that they are + correctly sent to the proxy, rather than in a tunnelled request if + CONNECT is being used. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param proxies: The url of the proxy being used for this request. + :rtype: dict + """ + headers = {} + username, password = get_auth_from_url(proxy) + + if username: + headers['Proxy-Authorization'] = _basic_auth_str(username, + password) + + return headers + + def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) <timeouts>` tuple. + :type timeout: float or tuple or urllib3 Timeout object + :param verify: (optional) Either a boolean, in which case it controls whether + we verify the server's TLS certificate, or a string, in which case it + must be a path to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + :rtype: requests.Response + """ + + conn = self.get_connection(request.url, proxies) + + self.cert_verify(conn, request.url, verify, cert) + url = self.request_url(request, proxies) + self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies) + + chunked = not (request.body is None or 'Content-Length' in request.headers) + + if isinstance(timeout, tuple): + try: + connect, read = timeout + timeout = TimeoutSauce(connect=connect, read=read) + except ValueError as e: + # this may raise a string formatting error. + err = ("Invalid timeout {0}. Pass a (connect, read) " + "timeout tuple, or a single float to set " + "both timeouts to the same value".format(timeout)) + raise ValueError(err) + elif isinstance(timeout, TimeoutSauce): + pass + else: + timeout = TimeoutSauce(connect=timeout, read=timeout) + + try: + if not chunked: + resp = conn.urlopen( + method=request.method, + url=url, + body=request.body, + headers=request.headers, + redirect=False, + assert_same_host=False, + preload_content=False, + decode_content=False, + retries=self.max_retries, + timeout=timeout + ) + + # Send the request. + else: + if hasattr(conn, 'proxy_pool'): + conn = conn.proxy_pool + + low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) + + try: + low_conn.putrequest(request.method, + url, + skip_accept_encoding=True) + + for header, value in request.headers.items(): + low_conn.putheader(header, value) + + low_conn.endheaders() + + for i in request.body: + low_conn.send(hex(len(i))[2:].encode('utf-8')) + low_conn.send(b'\r\n') + low_conn.send(i) + low_conn.send(b'\r\n') + low_conn.send(b'0\r\n\r\n') + + # Receive the response from the server + try: + # For Python 2.7+ versions, use buffering of HTTP + # responses + r = low_conn.getresponse(buffering=True) + except TypeError: + # For compatibility with Python 2.6 versions and back + r = low_conn.getresponse() + + resp = HTTPResponse.from_httplib( + r, + pool=conn, + connection=low_conn, + preload_content=False, + decode_content=False + ) + except: + # If we hit any problems here, clean up the connection. + # Then, reraise so that we can handle the actual exception. + low_conn.close() + raise + + except (ProtocolError, socket.error) as err: + raise ConnectionError(err, request=request) + + except MaxRetryError as e: + if isinstance(e.reason, ConnectTimeoutError): + # TODO: Remove this in 3.0.0: see #2811 + if not isinstance(e.reason, NewConnectionError): + raise ConnectTimeout(e, request=request) + + if isinstance(e.reason, ResponseError): + raise RetryError(e, request=request) + + if isinstance(e.reason, _ProxyError): + raise ProxyError(e, request=request) + + if isinstance(e.reason, _SSLError): + # This branch is for urllib3 v1.22 and later. + raise SSLError(e, request=request) + + raise ConnectionError(e, request=request) + + except ClosedPoolError as e: + raise ConnectionError(e, request=request) + + except _ProxyError as e: + raise ProxyError(e) + + except (_SSLError, _HTTPError) as e: + if isinstance(e, _SSLError): + # This branch is for urllib3 versions earlier than v1.22 + raise SSLError(e, request=request) + elif isinstance(e, ReadTimeoutError): + raise ReadTimeout(e, request=request) + else: + raise + + return self.build_response(request, resp) diff --git a/env/lib/python3.6/site-packages/requests/api.py b/env/lib/python3.6/site-packages/requests/api.py new file mode 100644 index 0000000..a2cc84d --- /dev/null +++ b/env/lib/python3.6/site-packages/requests/api.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- + +""" +requests.api +~~~~~~~~~~~~ + +This module implements the Requests API. + +:copyright: (c) 2012 by Kenneth Reitz. +:license: Apache2, see LICENSE for more details. +""" + +from . import sessions + + +def request(method, url, **kwargs): + """Constructs and sends a :class:`Request <Request>`. + + :param method: method for the new :class:`Request` object. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. + :param data: (optional) Dictionary or list of tuples ``[(key, value)]`` (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. + :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. + ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` + or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string + defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers + to add for the file. + :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How many seconds to wait for the server to send data + before giving up, as a float, or a :ref:`(connect timeout, read + timeout) <timeouts>` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. + :param stream: (optional) if ``False``, the response content will be immediately downloaded. + :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. + :return: :class:`Response <Response>` object + :rtype: requests.Response + + Usage:: + + >>> import requests + >>> req = requests.request('GET', 'http://httpbin.org/get') + <Response [200]> + """ + + # By using the 'with' statement we are sure the session is closed, thus we + # avoid leaving sockets open which can trigger a ResourceWarning in some + # cases, and look like a memory leak in others. + with sessions.Session() as session: + return session.request(method=method, url=url, **kwargs) + + +def get(url, params=None, **kwargs): + r"""Sends a GET request. + + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response <Response>` object + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', True) + return request('get', url, params=params, **kwargs) + + +def options(url, **kwargs): + r"""Sends an OPTIONS request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response <Response>` object + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', True) + return request('options', url, **kwargs) + + +def head(url, **kwargs): + r"""Sends a HEAD request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response <Response>` object + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', False) + return request('head', url, **kwargs) + + +def post(url, data=None, json=None, **kwargs): + r"""Sends a POST request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response <Response>` object + :rtype: requests.Response + """ + + return request('post', url, data=data, json=json, **kwargs) + + +def put(url, data=None, **kwargs): + r"""Sends a PUT request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response <Response>` object + :rtype: requests.Response + """ + + return request('put', url, data=data, **kwargs) + + +def patch(url, data=None, **kwargs): + r"""Sends a PATCH request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response <Response>` object + :rtype: requests.Response + """ + + return request('patch', url, data=data, **kwargs) + + +def delete(url, **kwargs): + r"""Sends a DELETE request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response <Response>` object + :rtype: requests.Response + """ + + return request('delete', url, **kwargs) diff --git a/env/lib/python3.6/site-packages/requests/auth.py b/env/lib/python3.6/site-packages/requests/auth.py new file mode 100644 index 0000000..4ae4594 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests/auth.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- + +""" +requests.auth +~~~~~~~~~~~~~ + +This module contains the authentication handlers for Requests. +""" + +import os +import re +import time +import hashlib +import threading +import warnings + +from base64 import b64encode + +from .compat import urlparse, str, basestring +from .cookies import extract_cookies_to_jar +from ._internal_utils import to_native_string +from .utils import parse_dict_header + +CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' +CONTENT_TYPE_MULTI_PART = 'multipart/form-data' + + +def _basic_auth_str(username, password): + """Returns a Basic Auth string.""" + + # "I want us to put a big-ol' comment on top of it that + # says that this behaviour is dumb but we need to preserve + # it because people are relying on it." + # - Lukasa + # + # These are here solely to maintain backwards compatibility + # for things like ints. This will be removed in 3.0.0. + if not isinstance(username, basestring): + warnings.warn( + "Non-string usernames will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({0!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(username), + category=DeprecationWarning, + ) + username = str(username) + + if not isinstance(password, basestring): + warnings.warn( + "Non-string passwords will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({0!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(password), + category=DeprecationWarning, + ) + password = str(password) + # -- End Removal -- + + if isinstance(username, str): + username = username.encode('latin1') + + if isinstance(password, str): + password = password.encode('latin1') + + authstr = 'Basic ' + to_native_string( + b64encode(b':'.join((username, password))).strip() + ) + + return authstr + + +class AuthBase(object): + """Base class that all auth implementations derive from""" + + def __call__(self, r): + raise NotImplementedError('Auth hooks must be callable.') + + +class HTTPBasicAuth(AuthBase): + """Attaches HTTP Basic Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + + def __eq__(self, other): + return all([ + self.username == getattr(other, 'username', None), + self.password == getattr(other, 'password', None) + ]) + + def __ne__(self, other): + return not self == other + + def __call__(self, r): + r.headers['Authorization'] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPProxyAuth(HTTPBasicAuth): + """Attaches HTTP Proxy Authentication to a given Request object.""" + + def __call__(self, r): + r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPDigestAuth(AuthBase): + """Attaches HTTP Digest Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + # Keep state in per-thread local storage + self._thread_local = threading.local() + + def init_per_thread_state(self): + # Ensure state is initialized just once per-thread + if not hasattr(self._thread_local, 'init'): + self._thread_local.init = True + self._thread_local.last_nonce = '' + self._thread_local.nonce_count = 0 + self._thread_local.chal = {} + self._thread_local.pos = None + self._thread_local.num_401_calls = None + + def build_digest_header(self, method, url): + """ + :rtype: str + """ + + realm = self._thread_local.chal['realm'] + nonce = self._thread_local.chal['nonce'] + qop = self._thread_local.chal.get('qop') + algorithm = self._thread_local.chal.get('algorithm') + opaque = self._thread_local.chal.get('opaque') + hash_utf8 = None + + if algorithm is None: + _algorithm = 'MD5' + else: + _algorithm = algorithm.upper() + # lambdas assume digest modules are imported at the top level + if _algorithm == 'MD5' or _algorithm == 'MD5-SESS': + def md5_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.md5(x).hexdigest() + hash_utf8 = md5_utf8 + elif _algorithm == 'SHA': + def sha_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.sha1(x).hexdigest() + hash_utf8 = sha_utf8 + elif _algorithm == 'SHA-256': + def sha256_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.sha256(x).hexdigest() + hash_utf8 = sha256_utf8 + elif _algorithm == 'SHA-512': + def sha512_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.sha512(x).hexdigest() + hash_utf8 = sha512_utf8 + + KD = lambda s, d: hash_utf8("%s:%s" % (s, d)) + + if hash_utf8 is None: + return None + + # XXX not implemented yet + entdig = None + p_parsed = urlparse(url) + #: path is request-uri defined in RFC 2616 which should not be empty + path = p_parsed.path or "/" + if p_parsed.query: + path += '?' + p_parsed.query + + A1 = '%s:%s:%s' % (self.username, realm, self.password) + A2 = '%s:%s' % (method, path) + + HA1 = hash_utf8(A1) + HA2 = hash_utf8(A2) + + if nonce == self._thread_local.last_nonce: + self._thread_local.nonce_count += 1 + else: + self._thread_local.nonce_count = 1 + ncvalue = '%08x' % self._thread_local.nonce_count + s = str(self._thread_local.nonce_count).encode('utf-8') + s += nonce.encode('utf-8') + s += time.ctime().encode('utf-8') + s += os.urandom(8) + + cnonce = (hashlib.sha1(s).hexdigest()[:16]) + if _algorithm == 'MD5-SESS': + HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) + + if not qop: + respdig = KD(HA1, "%s:%s" % (nonce, HA2)) + elif qop == 'auth' or 'auth' in qop.split(','): + noncebit = "%s:%s:%s:%s:%s" % ( + nonce, ncvalue, cnonce, 'auth', HA2 + ) + respdig = KD(HA1, noncebit) + else: + # XXX handle auth-int. + return None + + self._thread_local.last_nonce = nonce + + # XXX should the partial digests be encoded too? + base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ + 'response="%s"' % (self.username, realm, nonce, path, respdig) + if opaque: + base += ', opaque="%s"' % opaque + if algorithm: + base += ', algorithm="%s"' % algorithm + if entdig: + base += ', digest="%s"' % entdig + if qop: + base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce) + + return 'Digest %s' % (base) + + def handle_redirect(self, r, **kwargs): + """Reset num_401_calls counter on redirects.""" + if r.is_redirect: + self._thread_local.num_401_calls = 1 + + def handle_401(self, r, **kwargs): + """ + Takes the given response and tries digest-auth, if needed. + + :rtype: requests.Response + """ + + # If response is not 4xx, do not auth + # See https://github.com/requests/requests/issues/3772 + if not 400 <= r.status_code < 500: + self._thread_local.num_401_calls = 1 + return r + + if self._thread_local.pos is not None: + # Rewind the file position indicator of the body to where + # it was to resend the request. + r.request.body.seek(self._thread_local.pos) + s_auth = r.headers.get('www-authenticate', '') + + if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2: + + self._thread_local.num_401_calls += 1 + pat = re.compile(r'digest ', flags=re.IGNORECASE) + self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1)) + + # Consume content and release the original connection + # to allow our new request to reuse the same one. + r.content + r.close() + prep = r.request.copy() + extract_cookies_to_jar(prep._cookies, r.request, r.raw) + prep.prepare_cookies(prep._cookies) + + prep.headers['Authorization'] = self.build_digest_header( + prep.method, prep.url) + _r = r.connection.send(prep, **kwargs) + _r.history.append(r) + _r.request = prep + + return _r + + self._thread_local.num_401_calls = 1 + return r + + def __call__(self, r): + # Initialize per-thread state, if needed + self.init_per_thread_state() + # If we have a saved nonce, skip the 401 + if self._thread_local.last_nonce: + r.headers['Authorization'] = self.build_digest_header(r.method, r.url) + try: + self._thread_local.pos = r.body.tell() + except AttributeError: + # In the case of HTTPDigestAuth being reused and the body of + # the previous request was a file-like object, pos has the + # file position of the previous body. Ensure it's set to + # None. + self._thread_local.pos = None + r.register_hook('response', self.handle_401) + r.register_hook('response', self.handle_redirect) + self._thread_local.num_401_calls = 1 + + return r + + def __eq__(self, other): + return all([ + self.username == getattr(other, 'username', None), + self.password == getattr(other, 'password', None) + ]) + + def __ne__(self, other): + return not self == other diff --git a/env/lib/python3.6/site-packages/requests/certs.py b/env/lib/python3.6/site-packages/requests/certs.py new file mode 100644 index 0000000..d1a378d --- /dev/null +++ b/env/lib/python3.6/site-packages/requests/certs.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +""" +requests.certs +~~~~~~~~~~~~~~ + +This module returns the preferred default CA certificate bundle. There is +only one — the one from the certifi package. + +If you are packaging Requests, e.g., for a Linux distribution or a managed +environment, you can change the definition of where() to return a separately +packaged CA bundle. +""" +from certifi import where + +if __name__ == '__main__': + print(where()) diff --git a/env/lib/python3.6/site-packages/requests/compat.py b/env/lib/python3.6/site-packages/requests/compat.py new file mode 100644 index 0000000..6b9c6fa --- /dev/null +++ b/env/lib/python3.6/site-packages/requests/compat.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- + +""" +requests.compat +~~~~~~~~~~~~~~~ + +This module handles import compatibility issues between Python 2 and +Python 3. +""" + +import chardet + +import sys + +# ------- +# Pythons +# ------- + +# Syntax sugar. +_ver = sys.version_info + +#: Python 2.x? +is_py2 = (_ver[0] == 2) + +#: Python 3.x? +is_py3 = (_ver[0] == 3) + +try: + import simplejson as json +except ImportError: + import json + +# --------- +# Specifics +# --------- + +if is_py2: + from urllib import ( + quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, + proxy_bypass, proxy_bypass_environment, getproxies_environment) + from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag + from urllib2 import parse_http_list + import cookielib + from Cookie import Morsel + from StringIO import StringIO + from collections import Callable, Mapping, MutableMapping + + from urllib3.packages.ordered_dict import OrderedDict + + builtin_str = str + bytes = str + str = unicode + basestring = basestring + numeric_types = (int, long, float) + integer_types = (int, long) + +elif is_py3: + from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag + from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment + from http import cookiejar as cookielib + from http.cookies import Morsel + from io import StringIO + from collections import OrderedDict + from collections.abc import Callable, Mapping, MutableMapping + + builtin_str = str + str = str + bytes = bytes + basestring = (str, bytes) + numeric_types = (int, float) + integer_types = (int,) diff --git a/env/lib/python3.6/site-packages/requests/cookies.py b/env/lib/python3.6/site-packages/requests/cookies.py new file mode 100644 index 0000000..50883a8 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests/cookies.py @@ -0,0 +1,546 @@ +# -*- coding: utf-8 -*- + +""" +requests.cookies +~~~~~~~~~~~~~~~~ + +Compatibility code to be able to use `cookielib.CookieJar` with requests. + +requests.utils imports from here, so be careful with imports. +""" + +import copy +import time +import calendar + +from ._internal_utils import to_native_string +from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping + +try: + import threading +except ImportError: + import dummy_threading as threading + + +class MockRequest(object): + """Wraps a `requests.Request` to mimic a `urllib2.Request`. + + The code in `cookielib.CookieJar` expects this interface in order to correctly + manage cookie policies, i.e., determine whether a cookie can be set, given the + domains of the request and the cookie. + + The original request object is read-only. The client is responsible for collecting + the new headers via `get_new_headers()` and interpreting them appropriately. You + probably want `get_cookie_header`, defined below. + """ + + def __init__(self, request): + self._r = request + self._new_headers = {} + self.type = urlparse(self._r.url).scheme + + def get_type(self): + return self.type + + def get_host(self): + return urlparse(self._r.url).netloc + + def get_origin_req_host(self): + return self.get_host() + + def get_full_url(self): + # Only return the response's URL if the user hadn't set the Host + # header + if not self._r.headers.get('Host'): + return self._r.url + # If they did set it, retrieve it and reconstruct the expected domain + host = to_native_string(self._r.headers['Host'], encoding='utf-8') + parsed = urlparse(self._r.url) + # Reconstruct the URL as we expect it + return urlunparse([ + parsed.scheme, host, parsed.path, parsed.params, parsed.query, + parsed.fragment + ]) + + def is_unverifiable(self): + return True + + def has_header(self, name): + return name in self._r.headers or name in self._new_headers + + def get_header(self, name, default=None): + return self._r.headers.get(name, self._new_headers.get(name, default)) + + def add_header(self, key, val): + """cookielib has no legitimate use for this method; add it back if you find one.""" + raise NotImplementedError("Cookie headers should be added with add_unredirected_header()") + + def add_unredirected_header(self, name, value): + self._new_headers[name] = value + + def get_new_headers(self): + return self._new_headers + + @property + def unverifiable(self): + return self.is_unverifiable() + + @property + def origin_req_host(self): + return self.get_origin_req_host() + + @property + def host(self): + return self.get_host() + + +class MockResponse(object): + """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. + + ...what? Basically, expose the parsed HTTP headers from the server response + the way `cookielib` expects to see them. + """ + + def __init__(self, headers): + """Make a MockResponse for `cookielib` to read. + + :param headers: a httplib.HTTPMessage or analogous carrying the headers + """ + self._headers = headers + + def info(self): + return self._headers + + def getheaders(self, name): + self._headers.getheaders(name) + + +def extract_cookies_to_jar(jar, request, response): + """Extract the cookies from the response into a CookieJar. + + :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) + :param request: our own requests.Request object + :param response: urllib3.HTTPResponse object + """ + if not (hasattr(response, '_original_response') and + response._original_response): + return + # the _original_response field is the wrapped httplib.HTTPResponse object, + req = MockRequest(request) + # pull out the HTTPMessage with the headers and put it in the mock: + res = MockResponse(response._original_response.msg) + jar.extract_cookies(res, req) + + +def get_cookie_header(jar, request): + """ + Produce an appropriate Cookie header string to be sent with `request`, or None. + + :rtype: str + """ + r = MockRequest(request) + jar.add_cookie_header(r) + return r.get_new_headers().get('Cookie') + + +def remove_cookie_by_name(cookiejar, name, domain=None, path=None): + """Unsets a cookie by name, by default over all domains and paths. + + Wraps CookieJar.clear(), is O(n). + """ + clearables = [] + for cookie in cookiejar: + if cookie.name != name: + continue + if domain is not None and domain != cookie.domain: + continue + if path is not None and path != cookie.path: + continue + clearables.append((cookie.domain, cookie.path, cookie.name)) + + for domain, path, name in clearables: + cookiejar.clear(domain, path, name) + + +class CookieConflictError(RuntimeError): + """There are two cookies that meet the criteria specified in the cookie jar. + Use .get and .set and include domain and path args in order to be more specific. + """ + + +class RequestsCookieJar(cookielib.CookieJar, MutableMapping): + """Compatibility class; is a cookielib.CookieJar, but exposes a dict + interface. + + This is the CookieJar we create by default for requests and sessions that + don't specify one, since some clients may expect response.cookies and + session.cookies to support dict operations. + + Requests does not use the dict interface internally; it's just for + compatibility with external client code. All requests code should work + out of the box with externally provided instances of ``CookieJar``, e.g. + ``LWPCookieJar`` and ``FileCookieJar``. + + Unlike a regular CookieJar, this class is pickleable. + + .. warning:: dictionary operations that are normally O(1) may be O(n). + """ + + def get(self, name, default=None, domain=None, path=None): + """Dict-like get() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + + .. warning:: operation is O(n), not O(1). + """ + try: + return self._find_no_duplicates(name, domain, path) + except KeyError: + return default + + def set(self, name, value, **kwargs): + """Dict-like set() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + """ + # support client code that unsets cookies by assignment of a None value: + if value is None: + remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path')) + return + + if isinstance(value, Morsel): + c = morsel_to_cookie(value) + else: + c = create_cookie(name, value, **kwargs) + self.set_cookie(c) + return c + + def iterkeys(self): + """Dict-like iterkeys() that returns an iterator of names of cookies + from the jar. + + .. seealso:: itervalues() and iteritems(). + """ + for cookie in iter(self): + yield cookie.name + + def keys(self): + """Dict-like keys() that returns a list of names of cookies from the + jar. + + .. seealso:: values() and items(). + """ + return list(self.iterkeys()) + + def itervalues(self): + """Dict-like itervalues() that returns an iterator of values of cookies + from the jar. + + .. seealso:: iterkeys() and iteritems(). + """ + for cookie in iter(self): + yield cookie.value + + def values(self): + """Dict-like values() that returns a list of values of cookies from the + jar. + + .. seealso:: keys() and items(). + """ + return list(self.itervalues()) + + def iteritems(self): + """Dict-like iteritems() that returns an iterator of name-value tuples + from the jar. + + .. seealso:: iterkeys() and itervalues(). + """ + for cookie in iter(self): + yield cookie.name, cookie.value + + def items(self): + """Dict-like items() that returns a list of name-value tuples from the + jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a + vanilla python dict of key value pairs. + + .. seealso:: keys() and values(). + """ + return list(self.iteritems()) + + def list_domains(self): + """Utility method to list all the domains in the jar.""" + domains = [] + for cookie in iter(self): + if cookie.domain not in domains: + domains.append(cookie.domain) + return domains + + def list_paths(self): + """Utility method to list all the paths in the jar.""" + paths = [] + for cookie in iter(self): + if cookie.path not in paths: + paths.append(cookie.path) + return paths + + def multiple_domains(self): + """Returns True if there are multiple domains in the jar. + Returns False otherwise. + + :rtype: bool + """ + domains = [] + for cookie in iter(self): + if cookie.domain is not None and cookie.domain in domains: + return True + domains.append(cookie.domain) + return False # there is only one domain in jar + + def get_dict(self, domain=None, path=None): + """Takes as an argument an optional domain and path and returns a plain + old Python dict of name-value pairs of cookies that meet the + requirements. + + :rtype: dict + """ + dictionary = {} + for cookie in iter(self): + if ( + (domain is None or cookie.domain == domain) and + (path is None or cookie.path == path) + ): + dictionary[cookie.name] = cookie.value + return dictionary + + def __contains__(self, name): + try: + return super(RequestsCookieJar, self).__contains__(name) + except CookieConflictError: + return True + + def __getitem__(self, name): + """Dict-like __getitem__() for compatibility with client code. Throws + exception if there are more than one cookie with name. In that case, + use the more explicit get() method instead. + + .. warning:: operation is O(n), not O(1). + """ + return self._find_no_duplicates(name) + + def __setitem__(self, name, value): + """Dict-like __setitem__ for compatibility with client code. Throws + exception if there is already a cookie of that name in the jar. In that + case, use the more explicit set() method instead. + """ + self.set(name, value) + + def __delitem__(self, name): + """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s + ``remove_cookie_by_name()``. + """ + remove_cookie_by_name(self, name) + + def set_cookie(self, cookie, *args, **kwargs): + if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'): + cookie.value = cookie.value.replace('\\"', '') + return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs) + + def update(self, other): + """Updates this jar with cookies from another CookieJar or dict-like""" + if isinstance(other, cookielib.CookieJar): + for cookie in other: + self.set_cookie(copy.copy(cookie)) + else: + super(RequestsCookieJar, self).update(other) + + def _find(self, name, domain=None, path=None): + """Requests uses this method internally to get cookie values. + + If there are conflicting cookies, _find arbitrarily chooses one. + See _find_no_duplicates if you want an exception thrown if there are + conflicting cookies. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :return: cookie.value + """ + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + return cookie.value + + raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) + + def _find_no_duplicates(self, name, domain=None, path=None): + """Both ``__get_item__`` and ``get`` call this function: it's never + used elsewhere in Requests. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :raises KeyError: if cookie is not found + :raises CookieConflictError: if there are multiple cookies + that match name and optionally domain and path + :return: cookie.value + """ + toReturn = None + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if toReturn is not None: # if there are multiple cookies that meet passed in criteria + raise CookieConflictError('There are multiple cookies with name, %r' % (name)) + toReturn = cookie.value # we will eventually return this as long as no cookie conflict + + if toReturn: + return toReturn + raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) + + def __getstate__(self): + """Unlike a normal CookieJar, this class is pickleable.""" + state = self.__dict__.copy() + # remove the unpickleable RLock object + state.pop('_cookies_lock') + return state + + def __setstate__(self, state): + """Unlike a normal CookieJar, this class is pickleable.""" + self.__dict__.update(state) + if '_cookies_lock' not in self.__dict__: + self._cookies_lock = threading.RLock() + + def copy(self): + """Return a copy of this RequestsCookieJar.""" + new_cj = RequestsCookieJar() + new_cj.set_policy(self.get_policy()) + new_cj.update(self) + return new_cj + + def get_policy(self): + """Return the CookiePolicy instance used.""" + return self._policy + + +def _copy_cookie_jar(jar): + if jar is None: + return None + + if hasattr(jar, 'copy'): + # We're dealing with an instance of RequestsCookieJar + return jar.copy() + # We're dealing with a generic CookieJar instance + new_jar = copy.copy(jar) + new_jar.clear() + for cookie in jar: + new_jar.set_cookie(copy.copy(cookie)) + return new_jar + + +def create_cookie(name, value, **kwargs): + """Make a cookie from underspecified parameters. + + By default, the pair of `name` and `value` will be set for the domain '' + and sent on every request (this is sometimes called a "supercookie"). + """ + result = dict( + version=0, + name=name, + value=value, + port=None, + domain='', + path='/', + secure=False, + expires=None, + discard=True, + comment=None, + comment_url=None, + rest={'HttpOnly': None}, + rfc2109=False,) + + badargs = set(kwargs) - set(result) + if badargs: + err = 'create_cookie() got unexpected keyword arguments: %s' + raise TypeError(err % list(badargs)) + + result.update(kwargs) + result['port_specified'] = bool(result['port']) + result['domain_specified'] = bool(result['domain']) + result['domain_initial_dot'] = result['domain'].startswith('.') + result['path_specified'] = bool(result['path']) + + return cookielib.Cookie(**result) + + +def morsel_to_cookie(morsel): + """Convert a Morsel object into a Cookie containing the one k/v pair.""" + + expires = None + if morsel['max-age']: + try: + expires = int(time.time() + int(morsel['max-age'])) + except ValueError: + raise TypeError('max-age: %s must be integer' % morsel['max-age']) + elif morsel['expires']: + time_template = '%a, %d-%b-%Y %H:%M:%S GMT' + expires = calendar.timegm( + time.strptime(morsel['expires'], time_template) + ) + return create_cookie( + comment=morsel['comment'], + comment_url=bool(morsel['comment']), + discard=False, + domain=morsel['domain'], + expires=expires, + name=morsel.key, + path=morsel['path'], + port=None, + rest={'HttpOnly': morsel['httponly']}, + rfc2109=False, + secure=bool(morsel['secure']), + value=morsel.value, + version=morsel['version'] or 0, + ) + + +def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): + """Returns a CookieJar from a key/value dictionary. + + :param cookie_dict: Dict of key/values to insert into CookieJar. + :param cookiejar: (optional) A cookiejar to add the cookies to. + :param overwrite: (optional) If False, will not replace cookies + already in the jar with new ones. + """ + if cookiejar is None: + cookiejar = RequestsCookieJar() + + if cookie_dict is not None: + names_from_jar = [cookie.name for cookie in cookiejar] + for name in cookie_dict: + if overwrite or (name not in names_from_jar): + cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) + + return cookiejar + + +def merge_cookies(cookiejar, cookies): + """Add cookies to cookiejar and returns a merged CookieJar. + + :param cookiejar: CookieJar object to add the cookies to. + :param cookies: Dictionary or CookieJar object to be added. + """ + if not isinstance(cookiejar, cookielib.CookieJar): + raise ValueError('You can only merge into CookieJar') + + if isinstance(cookies, dict): + cookiejar = cookiejar_from_dict( + cookies, cookiejar=cookiejar, overwrite=False) + elif isinstance(cookies, cookielib.CookieJar): + try: + cookiejar.update(cookies) + except AttributeError: + for cookie_in_jar in cookies: + cookiejar.set_cookie(cookie_in_jar) + + return cookiejar diff --git a/env/lib/python3.6/site-packages/requests/exceptions.py b/env/lib/python3.6/site-packages/requests/exceptions.py new file mode 100644 index 0000000..a80cad8 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests/exceptions.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- + +""" +requests.exceptions +~~~~~~~~~~~~~~~~~~~ + +This module contains the set of Requests' exceptions. +""" +from urllib3.exceptions import HTTPError as BaseHTTPError + + +class RequestException(IOError): + """There was an ambiguous exception that occurred while handling your + request. + """ + + def __init__(self, *args, **kwargs): + """Initialize RequestException with `request` and `response` objects.""" + response = kwargs.pop('response', None) + self.response = response + self.request = kwargs.pop('request', None) + if (response is not None and not self.request and + hasattr(response, 'request')): + self.request = self.response.request + super(RequestException, self).__init__(*args, **kwargs) + + +class HTTPError(RequestException): + """An HTTP error occurred.""" + + +class ConnectionError(RequestException): + """A Connection error occurred.""" + + +class ProxyError(ConnectionError): + """A proxy error occurred.""" + + +class SSLError(ConnectionError): + """An SSL error occurred.""" + + +class Timeout(RequestException): + """The request timed out. + + Catching this error will catch both + :exc:`~requests.exceptions.ConnectTimeout` and + :exc:`~requests.exceptions.ReadTimeout` errors. + """ + + +class ConnectTimeout(ConnectionError, Timeout): + """The request timed out while trying to connect to the remote server. + + Requests that produced this error are safe to retry. + """ + + +class ReadTimeout(Timeout): + """The server did not send any data in the allotted amount of time.""" + + +class URLRequired(RequestException): + """A valid URL is required to make a request.""" + + +class TooManyRedirects(RequestException): + """Too many redirects.""" + + +class MissingSchema(RequestException, ValueError): + """The URL schema (e.g. http or https) is missing.""" + + +class InvalidSchema(RequestException, ValueError): + """See defaults.py for valid schemas.""" + + +class InvalidURL(RequestException, ValueError): + """The URL provided was somehow invalid.""" + + +class InvalidHeader(RequestException, ValueError): + """The header value provided was somehow invalid.""" + + +class InvalidProxyURL(InvalidURL): + """The proxy URL provided is invalid.""" + + +class ChunkedEncodingError(RequestException): + """The server declared chunked encoding but sent an invalid chunk.""" + + +class ContentDecodingError(RequestException, BaseHTTPError): + """Failed to decode response content""" + + +class StreamConsumedError(RequestException, TypeError): + """The content for this response was already consumed""" + + +class RetryError(RequestException): + """Custom retries logic failed""" + + +class UnrewindableBodyError(RequestException): + """Requests encountered an error when trying to rewind a body""" + +# Warnings + + +class RequestsWarning(Warning): + """Base warning for Requests.""" + pass + + +class FileModeWarning(RequestsWarning, DeprecationWarning): + """A file was opened in text mode, but Requests determined its binary length.""" + pass + + +class RequestsDependencyWarning(RequestsWarning): + """An imported dependency doesn't match the expected version range.""" + pass diff --git a/env/lib/python3.6/site-packages/requests/help.py b/env/lib/python3.6/site-packages/requests/help.py new file mode 100644 index 0000000..06e06b2 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests/help.py @@ -0,0 +1,120 @@ +"""Module containing bug report helper(s).""" +from __future__ import print_function + +import json +import platform +import sys +import ssl + +import idna +import urllib3 +import chardet + +from . import __version__ as requests_version + +try: + from urllib3.contrib import pyopenssl +except ImportError: + pyopenssl = None + OpenSSL = None + cryptography = None +else: + import OpenSSL + import cryptography + + +def _implementation(): + """Return a dict with the Python implementation and version. + + Provide both the name and the version of the Python implementation + currently running. For example, on CPython 2.7.5 it will return + {'name': 'CPython', 'version': '2.7.5'}. + + This function works best on CPython and PyPy: in particular, it probably + doesn't work for Jython or IronPython. Future investigation should be done + to work out the correct shape of the code for those platforms. + """ + implementation = platform.python_implementation() + + if implementation == 'CPython': + implementation_version = platform.python_version() + elif implementation == 'PyPy': + implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, + sys.pypy_version_info.minor, + sys.pypy_version_info.micro) + if sys.pypy_version_info.releaselevel != 'final': + implementation_version = ''.join([ + implementation_version, sys.pypy_version_info.releaselevel + ]) + elif implementation == 'Jython': + implementation_version = platform.python_version() # Complete Guess + elif implementation == 'IronPython': + implementation_version = platform.python_version() # Complete Guess + else: + implementation_version = 'Unknown' + + return {'name': implementation, 'version': implementation_version} + + +def info(): + """Generate information for a bug report.""" + try: + platform_info = { + 'system': platform.system(), + 'release': platform.release(), + } + except IOError: + platform_info = { + 'system': 'Unknown', + 'release': 'Unknown', + } + + implementation_info = _implementation() + urllib3_info = {'version': urllib3.__version__} + chardet_info = {'version': chardet.__version__} + + pyopenssl_info = { + 'version': None, + 'openssl_version': '', + } + if OpenSSL: + pyopenssl_info = { + 'version': OpenSSL.__version__, + 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER, + } + cryptography_info = { + 'version': getattr(cryptography, '__version__', ''), + } + idna_info = { + 'version': getattr(idna, '__version__', ''), + } + + # OPENSSL_VERSION_NUMBER doesn't exist in the Python 2.6 ssl module. + system_ssl = getattr(ssl, 'OPENSSL_VERSION_NUMBER', None) + system_ssl_info = { + 'version': '%x' % system_ssl if system_ssl is not None else '' + } + + return { + 'platform': platform_info, + 'implementation': implementation_info, + 'system_ssl': system_ssl_info, + 'using_pyopenssl': pyopenssl is not None, + 'pyOpenSSL': pyopenssl_info, + 'urllib3': urllib3_info, + 'chardet': chardet_info, + 'cryptography': cryptography_info, + 'idna': idna_info, + 'requests': { + 'version': requests_version, + }, + } + + +def main(): + """Pretty-print the bug information as JSON.""" + print(json.dumps(info(), sort_keys=True, indent=2)) + + +if __name__ == '__main__': + main() diff --git a/env/lib/python3.6/site-packages/requests/hooks.py b/env/lib/python3.6/site-packages/requests/hooks.py new file mode 100644 index 0000000..32b32de --- /dev/null +++ b/env/lib/python3.6/site-packages/requests/hooks.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- + +""" +requests.hooks +~~~~~~~~~~~~~~ + +This module provides the capabilities for the Requests hooks system. + +Available hooks: + +``response``: + The response generated from a Request. +""" +HOOKS = ['response'] + + +def default_hooks(): + return dict((event, []) for event in HOOKS) + +# TODO: response is the only one + + +def dispatch_hook(key, hooks, hook_data, **kwargs): + """Dispatches a hook dictionary on a given piece of data.""" + hooks = hooks or dict() + hooks = hooks.get(key) + if hooks: + if hasattr(hooks, '__call__'): + hooks = [hooks] + for hook in hooks: + _hook_data = hook(hook_data, **kwargs) + if _hook_data is not None: + hook_data = _hook_data + return hook_data diff --git a/env/lib/python3.6/site-packages/requests/models.py b/env/lib/python3.6/site-packages/requests/models.py new file mode 100644 index 0000000..3d0e1f4 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests/models.py @@ -0,0 +1,952 @@ +# -*- coding: utf-8 -*- + +""" +requests.models +~~~~~~~~~~~~~~~ + +This module contains the primary objects that power Requests. +""" + +import datetime +import sys + +# Import encoding now, to avoid implicit import later. +# Implicit import within threads may cause LookupError when standard library is in a ZIP, +# such as in Embedded Python. See https://github.com/requests/requests/issues/3578. +import encodings.idna + +from urllib3.fields import RequestField +from urllib3.filepost import encode_multipart_formdata +from urllib3.util import parse_url +from urllib3.exceptions import ( + DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) + +from io import UnsupportedOperation +from .hooks import default_hooks +from .structures import CaseInsensitiveDict + +from .auth import HTTPBasicAuth +from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar +from .exceptions import ( + HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, + ContentDecodingError, ConnectionError, StreamConsumedError) +from ._internal_utils import to_native_string, unicode_is_ascii +from .utils import ( + guess_filename, get_auth_from_url, requote_uri, + stream_decode_response_unicode, to_key_val_list, parse_header_links, + iter_slices, guess_json_utf, super_len, check_header_validity) +from .compat import ( + Callable, Mapping, + cookielib, urlunparse, urlsplit, urlencode, str, bytes, + is_py2, chardet, builtin_str, basestring) +from .compat import json as complexjson +from .status_codes import codes + +#: The set of HTTP status codes that indicate an automatically +#: processable redirect. +REDIRECT_STATI = ( + codes.moved, # 301 + codes.found, # 302 + codes.other, # 303 + codes.temporary_redirect, # 307 + codes.permanent_redirect, # 308 +) + +DEFAULT_REDIRECT_LIMIT = 30 +CONTENT_CHUNK_SIZE = 10 * 1024 +ITER_CHUNK_SIZE = 512 + + +class RequestEncodingMixin(object): + @property + def path_url(self): + """Build the path URL to use.""" + + url = [] + + p = urlsplit(self.url) + + path = p.path + if not path: + path = '/' + + url.append(path) + + query = p.query + if query: + url.append('?') + url.append(query) + + return ''.join(url) + + @staticmethod + def _encode_params(data): + """Encode parameters in a piece of data. + + Will successfully encode parameters when passed as a dict or a list of + 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary + if parameters are supplied as a dict. + """ + + if isinstance(data, (str, bytes)): + return data + elif hasattr(data, 'read'): + return data + elif hasattr(data, '__iter__'): + result = [] + for k, vs in to_key_val_list(data): + if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): + vs = [vs] + for v in vs: + if v is not None: + result.append( + (k.encode('utf-8') if isinstance(k, str) else k, + v.encode('utf-8') if isinstance(v, str) else v)) + return urlencode(result, doseq=True) + else: + return data + + @staticmethod + def _encode_files(files, data): + """Build the body for a multipart/form-data request. + + Will successfully encode files when passed as a dict or a list of + tuples. Order is retained if data is a list of tuples but arbitrary + if parameters are supplied as a dict. + The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) + or 4-tuples (filename, fileobj, contentype, custom_headers). + """ + if (not files): + raise ValueError("Files must be provided.") + elif isinstance(data, basestring): + raise ValueError("Data must not be a string.") + + new_fields = [] + fields = to_key_val_list(data or {}) + files = to_key_val_list(files or {}) + + for field, val in fields: + if isinstance(val, basestring) or not hasattr(val, '__iter__'): + val = [val] + for v in val: + if v is not None: + # Don't call str() on bytestrings: in Py3 it all goes wrong. + if not isinstance(v, bytes): + v = str(v) + + new_fields.append( + (field.decode('utf-8') if isinstance(field, bytes) else field, + v.encode('utf-8') if isinstance(v, str) else v)) + + for (k, v) in files: + # support for explicit filename + ft = None + fh = None + if isinstance(v, (tuple, list)): + if len(v) == 2: + fn, fp = v + elif len(v) == 3: + fn, fp, ft = v + else: + fn, fp, ft, fh = v + else: + fn = guess_filename(v) or k + fp = v + + if isinstance(fp, (str, bytes, bytearray)): + fdata = fp + elif hasattr(fp, 'read'): + fdata = fp.read() + elif fp is None: + continue + else: + fdata = fp + + rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) + rf.make_multipart(content_type=ft) + new_fields.append(rf) + + body, content_type = encode_multipart_formdata(new_fields) + + return body, content_type + + +class RequestHooksMixin(object): + def register_hook(self, event, hook): + """Properly register a hook.""" + + if event not in self.hooks: + raise ValueError('Unsupported event specified, with event name "%s"' % (event)) + + if isinstance(hook, Callable): + self.hooks[event].append(hook) + elif hasattr(hook, '__iter__'): + self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) + + def deregister_hook(self, event, hook): + """Deregister a previously registered hook. + Returns True if the hook existed, False if not. + """ + + try: + self.hooks[event].remove(hook) + return True + except ValueError: + return False + + +class Request(RequestHooksMixin): + """A user-created :class:`Request <Request>` object. + + Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server. + + :param method: HTTP method to use. + :param url: URL to send. + :param headers: dictionary of headers to send. + :param files: dictionary of {filename: fileobject} files to multipart upload. + :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place. + :param json: json for the body to attach to the request (if files or data is not specified). + :param params: dictionary of URL parameters to append to the URL. + :param auth: Auth handler or (user, pass) tuple. + :param cookies: dictionary or CookieJar of cookies to attach to this request. + :param hooks: dictionary of callback hooks, for internal usage. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'http://httpbin.org/get') + >>> req.prepare() + <PreparedRequest [GET]> + """ + + def __init__(self, + method=None, url=None, headers=None, files=None, data=None, + params=None, auth=None, cookies=None, hooks=None, json=None): + + # Default empty dicts for dict params. + data = [] if data is None else data + files = [] if files is None else files + headers = {} if headers is None else headers + params = {} if params is None else params + hooks = {} if hooks is None else hooks + + self.hooks = default_hooks() + for (k, v) in list(hooks.items()): + self.register_hook(event=k, hook=v) + + self.method = method + self.url = url + self.headers = headers + self.files = files + self.data = data + self.json = json + self.params = params + self.auth = auth + self.cookies = cookies + + def __repr__(self): + return '<Request [%s]>' % (self.method) + + def prepare(self): + """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.""" + p = PreparedRequest() + p.prepare( + method=self.method, + url=self.url, + headers=self.headers, + files=self.files, + data=self.data, + json=self.json, + params=self.params, + auth=self.auth, + cookies=self.cookies, + hooks=self.hooks, + ) + return p + + +class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): + """The fully mutable :class:`PreparedRequest <PreparedRequest>` object, + containing the exact bytes that will be sent to the server. + + Generated from either a :class:`Request <Request>` object or manually. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'http://httpbin.org/get') + >>> r = req.prepare() + <PreparedRequest [GET]> + + >>> s = requests.Session() + >>> s.send(r) + <Response [200]> + """ + + def __init__(self): + #: HTTP verb to send to the server. + self.method = None + #: HTTP URL to send the request to. + self.url = None + #: dictionary of HTTP headers. + self.headers = None + # The `CookieJar` used to create the Cookie header will be stored here + # after prepare_cookies is called + self._cookies = None + #: request body to send to the server. + self.body = None + #: dictionary of callback hooks, for internal usage. + self.hooks = default_hooks() + #: integer denoting starting position of a readable file-like body. + self._body_position = None + + def prepare(self, + method=None, url=None, headers=None, files=None, data=None, + params=None, auth=None, cookies=None, hooks=None, json=None): + """Prepares the entire request with the given parameters.""" + + self.prepare_method(method) + self.prepare_url(url, params) + self.prepare_headers(headers) + self.prepare_cookies(cookies) + self.prepare_body(data, files, json) + self.prepare_auth(auth, url) + + # Note that prepare_auth must be last to enable authentication schemes + # such as OAuth to work on a fully prepared request. + + # This MUST go after prepare_auth. Authenticators could add a hook + self.prepare_hooks(hooks) + + def __repr__(self): + return '<PreparedRequest [%s]>' % (self.method) + + def copy(self): + p = PreparedRequest() + p.method = self.method + p.url = self.url + p.headers = self.headers.copy() if self.headers is not None else None + p._cookies = _copy_cookie_jar(self._cookies) + p.body = self.body + p.hooks = self.hooks + p._body_position = self._body_position + return p + + def prepare_method(self, method): + """Prepares the given HTTP method.""" + self.method = method + if self.method is not None: + self.method = to_native_string(self.method.upper()) + + @staticmethod + def _get_idna_encoded_host(host): + import idna + + try: + host = idna.encode(host, uts46=True).decode('utf-8') + except idna.IDNAError: + raise UnicodeError + return host + + def prepare_url(self, url, params): + """Prepares the given HTTP URL.""" + #: Accept objects that have string representations. + #: We're unable to blindly call unicode/str functions + #: as this will include the bytestring indicator (b'') + #: on python 3.x. + #: https://github.com/requests/requests/pull/2238 + if isinstance(url, bytes): + url = url.decode('utf8') + else: + url = unicode(url) if is_py2 else str(url) + + # Remove leading whitespaces from url + url = url.lstrip() + + # Don't do any URL preparation for non-HTTP schemes like `mailto`, + # `data` etc to work around exceptions from `url_parse`, which + # handles RFC 3986 only. + if ':' in url and not url.lower().startswith('http'): + self.url = url + return + + # Support for unicode domain names and paths. + try: + scheme, auth, host, port, path, query, fragment = parse_url(url) + except LocationParseError as e: + raise InvalidURL(*e.args) + + if not scheme: + error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") + error = error.format(to_native_string(url, 'utf8')) + + raise MissingSchema(error) + + if not host: + raise InvalidURL("Invalid URL %r: No host supplied" % url) + + # In general, we want to try IDNA encoding the hostname if the string contains + # non-ASCII characters. This allows users to automatically get the correct IDNA + # behaviour. For strings containing only ASCII characters, we need to also verify + # it doesn't start with a wildcard (*), before allowing the unencoded hostname. + if not unicode_is_ascii(host): + try: + host = self._get_idna_encoded_host(host) + except UnicodeError: + raise InvalidURL('URL has an invalid label.') + elif host.startswith(u'*'): + raise InvalidURL('URL has an invalid label.') + + # Carefully reconstruct the network location + netloc = auth or '' + if netloc: + netloc += '@' + netloc += host + if port: + netloc += ':' + str(port) + + # Bare domains aren't valid URLs. + if not path: + path = '/' + + if is_py2: + if isinstance(scheme, str): + scheme = scheme.encode('utf-8') + if isinstance(netloc, str): + netloc = netloc.encode('utf-8') + if isinstance(path, str): + path = path.encode('utf-8') + if isinstance(query, str): + query = query.encode('utf-8') + if isinstance(fragment, str): + fragment = fragment.encode('utf-8') + + if isinstance(params, (str, bytes)): + params = to_native_string(params) + + enc_params = self._encode_params(params) + if enc_params: + if query: + query = '%s&%s' % (query, enc_params) + else: + query = enc_params + + url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) + self.url = url + + def prepare_headers(self, headers): + """Prepares the given HTTP headers.""" + + self.headers = CaseInsensitiveDict() + if headers: + for header in headers.items(): + # Raise exception on invalid header value. + check_header_validity(header) + name, value = header + self.headers[to_native_string(name)] = value + + def prepare_body(self, data, files, json=None): + """Prepares the given HTTP body data.""" + + # Check if file, fo, generator, iterator. + # If not, run through normal process. + + # Nottin' on you. + body = None + content_type = None + + if not data and json is not None: + # urllib3 requires a bytes-like body. Python 2's json.dumps + # provides this natively, but Python 3 gives a Unicode string. + content_type = 'application/json' + body = complexjson.dumps(json) + if not isinstance(body, bytes): + body = body.encode('utf-8') + + is_stream = all([ + hasattr(data, '__iter__'), + not isinstance(data, (basestring, list, tuple, Mapping)) + ]) + + try: + length = super_len(data) + except (TypeError, AttributeError, UnsupportedOperation): + length = None + + if is_stream: + body = data + + if getattr(body, 'tell', None) is not None: + # Record the current file position before reading. + # This will allow us to rewind a file in the event + # of a redirect. + try: + self._body_position = body.tell() + except (IOError, OSError): + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body + self._body_position = object() + + if files: + raise NotImplementedError('Streamed bodies and files are mutually exclusive.') + + if length: + self.headers['Content-Length'] = builtin_str(length) + else: + self.headers['Transfer-Encoding'] = 'chunked' + else: + # Multi-part file uploads. + if files: + (body, content_type) = self._encode_files(files, data) + else: + if data: + body = self._encode_params(data) + if isinstance(data, basestring) or hasattr(data, 'read'): + content_type = None + else: + content_type = 'application/x-www-form-urlencoded' + + self.prepare_content_length(body) + + # Add content-type if it wasn't explicitly provided. + if content_type and ('content-type' not in self.headers): + self.headers['Content-Type'] = content_type + + self.body = body + + def prepare_content_length(self, body): + """Prepare Content-Length header based on request method and body""" + if body is not None: + length = super_len(body) + if length: + # If length exists, set it. Otherwise, we fallback + # to Transfer-Encoding: chunked. + self.headers['Content-Length'] = builtin_str(length) + elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: + # Set Content-Length to 0 for methods that can have a body + # but don't provide one. (i.e. not GET or HEAD) + self.headers['Content-Length'] = '0' + + def prepare_auth(self, auth, url=''): + """Prepares the given HTTP auth data.""" + + # If no Auth is explicitly provided, extract it from the URL first. + if auth is None: + url_auth = get_auth_from_url(self.url) + auth = url_auth if any(url_auth) else None + + if auth: + if isinstance(auth, tuple) and len(auth) == 2: + # special-case basic HTTP auth + auth = HTTPBasicAuth(*auth) + + # Allow auth to make its changes. + r = auth(self) + + # Update self to reflect the auth changes. + self.__dict__.update(r.__dict__) + + # Recompute Content-Length + self.prepare_content_length(self.body) + + def prepare_cookies(self, cookies): + """Prepares the given HTTP cookie data. + + This function eventually generates a ``Cookie`` header from the + given cookies using cookielib. Due to cookielib's design, the header + will not be regenerated if it already exists, meaning this function + can only be called once for the life of the + :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls + to ``prepare_cookies`` will have no actual effect, unless the "Cookie" + header is removed beforehand. + """ + if isinstance(cookies, cookielib.CookieJar): + self._cookies = cookies + else: + self._cookies = cookiejar_from_dict(cookies) + + cookie_header = get_cookie_header(self._cookies, self) + if cookie_header is not None: + self.headers['Cookie'] = cookie_header + + def prepare_hooks(self, hooks): + """Prepares the given hooks.""" + # hooks can be passed as None to the prepare method and to this + # method. To prevent iterating over None, simply use an empty list + # if hooks is False-y + hooks = hooks or [] + for event in hooks: + self.register_hook(event, hooks[event]) + + +class Response(object): + """The :class:`Response <Response>` object, which contains a + server's response to an HTTP request. + """ + + __attrs__ = [ + '_content', 'status_code', 'headers', 'url', 'history', + 'encoding', 'reason', 'cookies', 'elapsed', 'request' + ] + + def __init__(self): + self._content = False + self._content_consumed = False + self._next = None + + #: Integer Code of responded HTTP Status, e.g. 404 or 200. + self.status_code = None + + #: Case-insensitive Dictionary of Response Headers. + #: For example, ``headers['content-encoding']`` will return the + #: value of a ``'Content-Encoding'`` response header. + self.headers = CaseInsensitiveDict() + + #: File-like object representation of response (for advanced usage). + #: Use of ``raw`` requires that ``stream=True`` be set on the request. + # This requirement does not apply for use internally to Requests. + self.raw = None + + #: Final URL location of Response. + self.url = None + + #: Encoding to decode with when accessing r.text. + self.encoding = None + + #: A list of :class:`Response <Response>` objects from + #: the history of the Request. Any redirect responses will end + #: up here. The list is sorted from the oldest to the most recent request. + self.history = [] + + #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". + self.reason = None + + #: A CookieJar of Cookies the server sent back. + self.cookies = cookiejar_from_dict({}) + + #: The amount of time elapsed between sending the request + #: and the arrival of the response (as a timedelta). + #: This property specifically measures the time taken between sending + #: the first byte of the request and finishing parsing the headers. It + #: is therefore unaffected by consuming the response content or the + #: value of the ``stream`` keyword argument. + self.elapsed = datetime.timedelta(0) + + #: The :class:`PreparedRequest <PreparedRequest>` object to which this + #: is a response. + self.request = None + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def __getstate__(self): + # Consume everything; accessing the content attribute makes + # sure the content has been fully read. + if not self._content_consumed: + self.content + + return dict( + (attr, getattr(self, attr, None)) + for attr in self.__attrs__ + ) + + def __setstate__(self, state): + for name, value in state.items(): + setattr(self, name, value) + + # pickled objects do not have .raw + setattr(self, '_content_consumed', True) + setattr(self, 'raw', None) + + def __repr__(self): + return '<Response [%s]>' % (self.status_code) + + def __bool__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __nonzero__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __iter__(self): + """Allows you to use a response as an iterator.""" + return self.iter_content(128) + + @property + def ok(self): + """Returns True if :attr:`status_code` is less than 400, False if not. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + try: + self.raise_for_status() + except HTTPError: + return False + return True + + @property + def is_redirect(self): + """True if this Response is a well-formed HTTP redirect that could have + been processed automatically (by :meth:`Session.resolve_redirects`). + """ + return ('location' in self.headers and self.status_code in REDIRECT_STATI) + + @property + def is_permanent_redirect(self): + """True if this Response one of the permanent versions of redirect.""" + return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) + + @property + def next(self): + """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" + return self._next + + @property + def apparent_encoding(self): + """The apparent encoding, provided by the chardet library.""" + return chardet.detect(self.content)['encoding'] + + def iter_content(self, chunk_size=1, decode_unicode=False): + """Iterates over the response data. When stream=True is set on the + request, this avoids reading the content at once into memory for + large responses. The chunk size is the number of bytes it should + read into memory. This is not necessarily the length of each item + returned as decoding can take place. + + chunk_size must be of type int or None. A value of None will + function differently depending on the value of `stream`. + stream=True will read data as it arrives in whatever size the + chunks are received. If stream=False, data is returned as + a single chunk. + + If decode_unicode is True, content will be decoded using the best + available encoding based on the response. + """ + + def generate(): + # Special case for urllib3. + if hasattr(self.raw, 'stream'): + try: + for chunk in self.raw.stream(chunk_size, decode_content=True): + yield chunk + except ProtocolError as e: + raise ChunkedEncodingError(e) + except DecodeError as e: + raise ContentDecodingError(e) + except ReadTimeoutError as e: + raise ConnectionError(e) + else: + # Standard file-like object. + while True: + chunk = self.raw.read(chunk_size) + if not chunk: + break + yield chunk + + self._content_consumed = True + + if self._content_consumed and isinstance(self._content, bool): + raise StreamConsumedError() + elif chunk_size is not None and not isinstance(chunk_size, int): + raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) + # simulate reading small chunks of the content + reused_chunks = iter_slices(self._content, chunk_size) + + stream_chunks = generate() + + chunks = reused_chunks if self._content_consumed else stream_chunks + + if decode_unicode: + chunks = stream_decode_response_unicode(chunks, self) + + return chunks + + def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None): + """Iterates over the response data, one line at a time. When + stream=True is set on the request, this avoids reading the + content at once into memory for large responses. + + .. note:: This method is not reentrant safe. + """ + + pending = None + + for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): + + if pending is not None: + chunk = pending + chunk + + if delimiter: + lines = chunk.split(delimiter) + else: + lines = chunk.splitlines() + + if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: + pending = lines.pop() + else: + pending = None + + for line in lines: + yield line + + if pending is not None: + yield pending + + @property + def content(self): + """Content of the response, in bytes.""" + + if self._content is False: + # Read the contents. + if self._content_consumed: + raise RuntimeError( + 'The content for this response was already consumed') + + if self.status_code == 0 or self.raw is None: + self._content = None + else: + self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b'' + + self._content_consumed = True + # don't need to release the connection; that's been handled by urllib3 + # since we exhausted the data. + return self._content + + @property + def text(self): + """Content of the response, in unicode. + + If Response.encoding is None, encoding will be guessed using + ``chardet``. + + The encoding of the response content is determined based solely on HTTP + headers, following RFC 2616 to the letter. If you can take advantage of + non-HTTP knowledge to make a better guess at the encoding, you should + set ``r.encoding`` appropriately before accessing this property. + """ + + # Try charset from content-type + content = None + encoding = self.encoding + + if not self.content: + return str('') + + # Fallback to auto-detected encoding. + if self.encoding is None: + encoding = self.apparent_encoding + + # Decode unicode from given encoding. + try: + content = str(self.content, encoding, errors='replace') + except (LookupError, TypeError): + # A LookupError is raised if the encoding was not found which could + # indicate a misspelling or similar mistake. + # + # A TypeError can be raised if encoding is None + # + # So we try blindly encoding. + content = str(self.content, errors='replace') + + return content + + def json(self, **kwargs): + r"""Returns the json-encoded content of a response, if any. + + :param \*\*kwargs: Optional arguments that ``json.loads`` takes. + :raises ValueError: If the response body does not contain valid json. + """ + + if not self.encoding and self.content and len(self.content) > 3: + # No encoding set. JSON RFC 4627 section 3 states we should expect + # UTF-8, -16 or -32. Detect which one to use; If the detection or + # decoding fails, fall back to `self.text` (using chardet to make + # a best guess). + encoding = guess_json_utf(self.content) + if encoding is not None: + try: + return complexjson.loads( + self.content.decode(encoding), **kwargs + ) + except UnicodeDecodeError: + # Wrong UTF codec detected; usually because it's not UTF-8 + # but some other 8-bit codec. This is an RFC violation, + # and the server didn't bother to tell us what codec *was* + # used. + pass + return complexjson.loads(self.text, **kwargs) + + @property + def links(self): + """Returns the parsed header links of the response, if any.""" + + header = self.headers.get('link') + + # l = MultiDict() + l = {} + + if header: + links = parse_header_links(header) + + for link in links: + key = link.get('rel') or link.get('url') + l[key] = link + + return l + + def raise_for_status(self): + """Raises stored :class:`HTTPError`, if one occurred.""" + + http_error_msg = '' + if isinstance(self.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. (See PR #3538) + try: + reason = self.reason.decode('utf-8') + except UnicodeDecodeError: + reason = self.reason.decode('iso-8859-1') + else: + reason = self.reason + + if 400 <= self.status_code < 500: + http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) + + elif 500 <= self.status_code < 600: + http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) + + if http_error_msg: + raise HTTPError(http_error_msg, response=self) + + def close(self): + """Releases the connection back to the pool. Once this method has been + called the underlying ``raw`` object must not be accessed again. + + *Note: Should not normally need to be called explicitly.* + """ + if not self._content_consumed: + self.raw.close() + + release_conn = getattr(self.raw, 'release_conn', None) + if release_conn is not None: + release_conn() diff --git a/env/lib/python3.6/site-packages/requests/packages.py b/env/lib/python3.6/site-packages/requests/packages.py new file mode 100644 index 0000000..7232fe0 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests/packages.py @@ -0,0 +1,14 @@ +import sys + +# This code exists for backwards compatibility reasons. +# I don't like it either. Just look the other way. :) + +for package in ('urllib3', 'idna', 'chardet'): + locals()[package] = __import__(package) + # This traversal is apparently necessary such that the identities are + # preserved (requests.packages.urllib3.* is urllib3.*) + for mod in list(sys.modules): + if mod == package or mod.startswith(package + '.'): + sys.modules['requests.packages.' + mod] = sys.modules[mod] + +# Kinda cool, though, right? diff --git a/env/lib/python3.6/site-packages/requests/sessions.py b/env/lib/python3.6/site-packages/requests/sessions.py new file mode 100644 index 0000000..ba13526 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests/sessions.py @@ -0,0 +1,741 @@ +# -*- coding: utf-8 -*- + +""" +requests.session +~~~~~~~~~~~~~~~~ + +This module provides a Session object to manage and persist settings across +requests (cookies, auth, proxies). +""" +import os +import sys +import time +from datetime import timedelta + +from .auth import _basic_auth_str +from .compat import cookielib, is_py3, OrderedDict, urljoin, urlparse, Mapping +from .cookies import ( + cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) +from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT +from .hooks import default_hooks, dispatch_hook +from ._internal_utils import to_native_string +from .utils import to_key_val_list, default_headers +from .exceptions import ( + TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) + +from .structures import CaseInsensitiveDict +from .adapters import HTTPAdapter + +from .utils import ( + requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies, + get_auth_from_url, rewind_body +) + +from .status_codes import codes + +# formerly defined here, reexposed here for backward compatibility +from .models import REDIRECT_STATI + +# Preferred clock, based on which one is more accurate on a given system. +if sys.platform == 'win32': + try: # Python 3.4+ + preferred_clock = time.perf_counter + except AttributeError: # Earlier than Python 3. + preferred_clock = time.clock +else: + preferred_clock = time.time + + +def merge_setting(request_setting, session_setting, dict_class=OrderedDict): + """Determines appropriate setting for a given request, taking into account + the explicit setting on that request, and the setting in the session. If a + setting is a dictionary, they will be merged together using `dict_class` + """ + + if session_setting is None: + return request_setting + + if request_setting is None: + return session_setting + + # Bypass if not a dictionary (e.g. verify) + if not ( + isinstance(session_setting, Mapping) and + isinstance(request_setting, Mapping) + ): + return request_setting + + merged_setting = dict_class(to_key_val_list(session_setting)) + merged_setting.update(to_key_val_list(request_setting)) + + # Remove keys that are set to None. Extract keys first to avoid altering + # the dictionary during iteration. + none_keys = [k for (k, v) in merged_setting.items() if v is None] + for key in none_keys: + del merged_setting[key] + + return merged_setting + + +def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): + """Properly merges both requests and session hooks. + + This is necessary because when request_hooks == {'response': []}, the + merge breaks Session hooks entirely. + """ + if session_hooks is None or session_hooks.get('response') == []: + return request_hooks + + if request_hooks is None or request_hooks.get('response') == []: + return session_hooks + + return merge_setting(request_hooks, session_hooks, dict_class) + + +class SessionRedirectMixin(object): + + def get_redirect_target(self, resp): + """Receives a Response. Returns a redirect URI or ``None``""" + # Due to the nature of how requests processes redirects this method will + # be called at least once upon the original response and at least twice + # on each subsequent redirect response (if any). + # If a custom mixin is used to handle this logic, it may be advantageous + # to cache the redirect location onto the response object as a private + # attribute. + if resp.is_redirect: + location = resp.headers['location'] + # Currently the underlying http module on py3 decode headers + # in latin1, but empirical evidence suggests that latin1 is very + # rarely used with non-ASCII characters in HTTP headers. + # It is more likely to get UTF8 header rather than latin1. + # This causes incorrect handling of UTF8 encoded location headers. + # To solve this, we re-encode the location in latin1. + if is_py3: + location = location.encode('latin1') + return to_native_string(location, 'utf8') + return None + + def resolve_redirects(self, resp, req, stream=False, timeout=None, + verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs): + """Receives a Response. Returns a generator of Responses or Requests.""" + + hist = [] # keep track of history + + url = self.get_redirect_target(resp) + previous_fragment = urlparse(req.url).fragment + while url: + prepared_request = req.copy() + + # Update history and keep track of redirects. + # resp.history must ignore the original request in this loop + hist.append(resp) + resp.history = hist[1:] + + try: + resp.content # Consume socket so it can be released + except (ChunkedEncodingError, ContentDecodingError, RuntimeError): + resp.raw.read(decode_content=False) + + if len(resp.history) >= self.max_redirects: + raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects, response=resp) + + # Release the connection back into the pool. + resp.close() + + # Handle redirection without scheme (see: RFC 1808 Section 4) + if url.startswith('//'): + parsed_rurl = urlparse(resp.url) + url = '%s:%s' % (to_native_string(parsed_rurl.scheme), url) + + # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) + parsed = urlparse(url) + if parsed.fragment == '' and previous_fragment: + parsed = parsed._replace(fragment=previous_fragment) + elif parsed.fragment: + previous_fragment = parsed.fragment + url = parsed.geturl() + + # Facilitate relative 'location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + # Compliant with RFC3986, we percent encode the url. + if not parsed.netloc: + url = urljoin(resp.url, requote_uri(url)) + else: + url = requote_uri(url) + + prepared_request.url = to_native_string(url) + + self.rebuild_method(prepared_request, resp) + + # https://github.com/requests/requests/issues/1084 + if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): + # https://github.com/requests/requests/issues/3490 + purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') + for header in purged_headers: + prepared_request.headers.pop(header, None) + prepared_request.body = None + + headers = prepared_request.headers + try: + del headers['Cookie'] + except KeyError: + pass + + # Extract any cookies sent on the response to the cookiejar + # in the new request. Because we've mutated our copied prepared + # request, use the old one that we haven't yet touched. + extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) + merge_cookies(prepared_request._cookies, self.cookies) + prepared_request.prepare_cookies(prepared_request._cookies) + + # Rebuild auth and proxy information. + proxies = self.rebuild_proxies(prepared_request, proxies) + self.rebuild_auth(prepared_request, resp) + + # A failed tell() sets `_body_position` to `object()`. This non-None + # value ensures `rewindable` will be True, allowing us to raise an + # UnrewindableBodyError, instead of hanging the connection. + rewindable = ( + prepared_request._body_position is not None and + ('Content-Length' in headers or 'Transfer-Encoding' in headers) + ) + + # Attempt to rewind consumed file-like object. + if rewindable: + rewind_body(prepared_request) + + # Override the original request. + req = prepared_request + + if yield_requests: + yield req + else: + + resp = self.send( + req, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + allow_redirects=False, + **adapter_kwargs + ) + + extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) + + # extract redirect url, if any, for the next loop + url = self.get_redirect_target(resp) + yield resp + + def rebuild_auth(self, prepared_request, response): + """When being redirected we may want to strip authentication from the + request to avoid leaking credentials. This method intelligently removes + and reapplies authentication where possible to avoid credential loss. + """ + headers = prepared_request.headers + url = prepared_request.url + + if 'Authorization' in headers: + # If we get redirected to a new host, we should strip out any + # authentication headers. + original_parsed = urlparse(response.request.url) + redirect_parsed = urlparse(url) + + if (original_parsed.hostname != redirect_parsed.hostname): + del headers['Authorization'] + + # .netrc might have more auth for us on our new host. + new_auth = get_netrc_auth(url) if self.trust_env else None + if new_auth is not None: + prepared_request.prepare_auth(new_auth) + + return + + def rebuild_proxies(self, prepared_request, proxies): + """This method re-evaluates the proxy configuration by considering the + environment variables. If we are redirected to a URL covered by + NO_PROXY, we strip the proxy configuration. Otherwise, we set missing + proxy keys for this URL (in case they were stripped by a previous + redirect). + + This method also replaces the Proxy-Authorization header where + necessary. + + :rtype: dict + """ + proxies = proxies if proxies is not None else {} + headers = prepared_request.headers + url = prepared_request.url + scheme = urlparse(url).scheme + new_proxies = proxies.copy() + no_proxy = proxies.get('no_proxy') + + bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy) + if self.trust_env and not bypass_proxy: + environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) + + proxy = environ_proxies.get(scheme, environ_proxies.get('all')) + + if proxy: + new_proxies.setdefault(scheme, proxy) + + if 'Proxy-Authorization' in headers: + del headers['Proxy-Authorization'] + + try: + username, password = get_auth_from_url(new_proxies[scheme]) + except KeyError: + username, password = None, None + + if username and password: + headers['Proxy-Authorization'] = _basic_auth_str(username, password) + + return new_proxies + + def rebuild_method(self, prepared_request, response): + """When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = prepared_request.method + + # http://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.see_other and method != 'HEAD': + method = 'GET' + + # Do what the browsers do, despite standards... + # First, turn 302s into GETs. + if response.status_code == codes.found and method != 'HEAD': + method = 'GET' + + # Second, if a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in Issue 1704. + if response.status_code == codes.moved and method == 'POST': + method = 'GET' + + prepared_request.method = method + + +class Session(SessionRedirectMixin): + """A Requests session. + + Provides cookie persistence, connection-pooling, and configuration. + + Basic Usage:: + + >>> import requests + >>> s = requests.Session() + >>> s.get('http://httpbin.org/get') + <Response [200]> + + Or as a context manager:: + + >>> with requests.Session() as s: + >>> s.get('http://httpbin.org/get') + <Response [200]> + """ + + __attrs__ = [ + 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', + 'cert', 'prefetch', 'adapters', 'stream', 'trust_env', + 'max_redirects', + ] + + def __init__(self): + + #: A case-insensitive dictionary of headers to be sent on each + #: :class:`Request <Request>` sent from this + #: :class:`Session <Session>`. + self.headers = default_headers() + + #: Default Authentication tuple or object to attach to + #: :class:`Request <Request>`. + self.auth = None + + #: Dictionary mapping protocol or protocol and host to the URL of the proxy + #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to + #: be used on each :class:`Request <Request>`. + self.proxies = {} + + #: Event-handling hooks. + self.hooks = default_hooks() + + #: Dictionary of querystring data to attach to each + #: :class:`Request <Request>`. The dictionary values may be lists for + #: representing multivalued query parameters. + self.params = {} + + #: Stream response content default. + self.stream = False + + #: SSL Verification default. + self.verify = True + + #: SSL client certificate default, if String, path to ssl client + #: cert file (.pem). If Tuple, ('cert', 'key') pair. + self.cert = None + + #: Maximum number of redirects allowed. If the request exceeds this + #: limit, a :class:`TooManyRedirects` exception is raised. + #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is + #: 30. + self.max_redirects = DEFAULT_REDIRECT_LIMIT + + #: Trust environment settings for proxy configuration, default + #: authentication and similar. + self.trust_env = True + + #: A CookieJar containing all currently outstanding cookies set on this + #: session. By default it is a + #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but + #: may be any other ``cookielib.CookieJar`` compatible object. + self.cookies = cookiejar_from_dict({}) + + # Default connection adapters. + self.adapters = OrderedDict() + self.mount('https://', HTTPAdapter()) + self.mount('http://', HTTPAdapter()) + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def prepare_request(self, request): + """Constructs a :class:`PreparedRequest <PreparedRequest>` for + transmission and returns it. The :class:`PreparedRequest` has settings + merged from the :class:`Request <Request>` instance and those of the + :class:`Session`. + + :param request: :class:`Request` instance to prepare with this + session's settings. + :rtype: requests.PreparedRequest + """ + cookies = request.cookies or {} + + # Bootstrap CookieJar. + if not isinstance(cookies, cookielib.CookieJar): + cookies = cookiejar_from_dict(cookies) + + # Merge with session cookies + merged_cookies = merge_cookies( + merge_cookies(RequestsCookieJar(), self.cookies), cookies) + + # Set environment's basic authentication if not explicitly set. + auth = request.auth + if self.trust_env and not auth and not self.auth: + auth = get_netrc_auth(request.url) + + p = PreparedRequest() + p.prepare( + method=request.method.upper(), + url=request.url, + files=request.files, + data=request.data, + json=request.json, + headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), + params=merge_setting(request.params, self.params), + auth=merge_setting(auth, self.auth), + cookies=merged_cookies, + hooks=merge_hooks(request.hooks, self.hooks), + ) + return p + + def request(self, method, url, + params=None, data=None, headers=None, cookies=None, files=None, + auth=None, timeout=None, allow_redirects=True, proxies=None, + hooks=None, stream=None, verify=None, cert=None, json=None): + """Constructs a :class:`Request <Request>`, prepares it and sends it. + Returns :class:`Response <Response>` object. + + :param method: method for the new :class:`Request` object. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query + string for the :class:`Request`. + :param data: (optional) Dictionary, bytes, or file-like object to send + in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the + :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the + :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the + :class:`Request`. + :param files: (optional) Dictionary of ``'filename': file-like-objects`` + for multipart encoding upload. + :param auth: (optional) Auth tuple or callable to enable + Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) <timeouts>` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Set to True by default. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol or protocol and + hostname to the URL of the proxy. + :param stream: (optional) whether to immediately download the response + content. Defaults to ``False``. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. + :param cert: (optional) if String, path to ssl client cert file (.pem). + If Tuple, ('cert', 'key') pair. + :rtype: requests.Response + """ + # Create the Request. + req = Request( + method=method.upper(), + url=url, + headers=headers, + files=files, + data=data or {}, + json=json, + params=params or {}, + auth=auth, + cookies=cookies, + hooks=hooks, + ) + prep = self.prepare_request(req) + + proxies = proxies or {} + + settings = self.merge_environment_settings( + prep.url, proxies, stream, verify, cert + ) + + # Send the request. + send_kwargs = { + 'timeout': timeout, + 'allow_redirects': allow_redirects, + } + send_kwargs.update(settings) + resp = self.send(prep, **send_kwargs) + + return resp + + def get(self, url, **kwargs): + r"""Sends a GET request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', True) + return self.request('GET', url, **kwargs) + + def options(self, url, **kwargs): + r"""Sends a OPTIONS request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', True) + return self.request('OPTIONS', url, **kwargs) + + def head(self, url, **kwargs): + r"""Sends a HEAD request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', False) + return self.request('HEAD', url, **kwargs) + + def post(self, url, data=None, json=None, **kwargs): + r"""Sends a POST request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request('POST', url, data=data, json=json, **kwargs) + + def put(self, url, data=None, **kwargs): + r"""Sends a PUT request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request('PUT', url, data=data, **kwargs) + + def patch(self, url, data=None, **kwargs): + r"""Sends a PATCH request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request('PATCH', url, data=data, **kwargs) + + def delete(self, url, **kwargs): + r"""Sends a DELETE request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request('DELETE', url, **kwargs) + + def send(self, request, **kwargs): + """Send a given PreparedRequest. + + :rtype: requests.Response + """ + # Set defaults that the hooks can utilize to ensure they always have + # the correct parameters to reproduce the previous request. + kwargs.setdefault('stream', self.stream) + kwargs.setdefault('verify', self.verify) + kwargs.setdefault('cert', self.cert) + kwargs.setdefault('proxies', self.proxies) + + # It's possible that users might accidentally send a Request object. + # Guard against that specific failure case. + if isinstance(request, Request): + raise ValueError('You can only send PreparedRequests.') + + # Set up variables needed for resolve_redirects and dispatching of hooks + allow_redirects = kwargs.pop('allow_redirects', True) + stream = kwargs.get('stream') + hooks = request.hooks + + # Get the appropriate adapter to use + adapter = self.get_adapter(url=request.url) + + # Start time (approximately) of the request + start = preferred_clock() + + # Send the request + r = adapter.send(request, **kwargs) + + # Total elapsed time of the request (approximately) + elapsed = preferred_clock() - start + r.elapsed = timedelta(seconds=elapsed) + + # Response manipulation hooks + r = dispatch_hook('response', hooks, r, **kwargs) + + # Persist cookies + if r.history: + + # If the hooks create history then we want those cookies too + for resp in r.history: + extract_cookies_to_jar(self.cookies, resp.request, resp.raw) + + extract_cookies_to_jar(self.cookies, request, r.raw) + + # Redirect resolving generator. + gen = self.resolve_redirects(r, request, **kwargs) + + # Resolve redirects if allowed. + history = [resp for resp in gen] if allow_redirects else [] + + # Shuffle things around if there's history. + if history: + # Insert the first (original) request at the start + history.insert(0, r) + # Get the last request made + r = history.pop() + r.history = history + + # If redirects aren't being followed, store the response on the Request for Response.next(). + if not allow_redirects: + try: + r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs)) + except StopIteration: + pass + + if not stream: + r.content + + return r + + def merge_environment_settings(self, url, proxies, stream, verify, cert): + """ + Check the environment and merge it with some settings. + + :rtype: dict + """ + # Gather clues from the surrounding environment. + if self.trust_env: + # Set environment's proxies. + no_proxy = proxies.get('no_proxy') if proxies is not None else None + env_proxies = get_environ_proxies(url, no_proxy=no_proxy) + for (k, v) in env_proxies.items(): + proxies.setdefault(k, v) + + # Look for requests environment configuration and be compatible + # with cURL. + if verify is True or verify is None: + verify = (os.environ.get('REQUESTS_CA_BUNDLE') or + os.environ.get('CURL_CA_BUNDLE')) + + # Merge all the kwargs. + proxies = merge_setting(proxies, self.proxies) + stream = merge_setting(stream, self.stream) + verify = merge_setting(verify, self.verify) + cert = merge_setting(cert, self.cert) + + return {'verify': verify, 'proxies': proxies, 'stream': stream, + 'cert': cert} + + def get_adapter(self, url): + """ + Returns the appropriate connection adapter for the given URL. + + :rtype: requests.adapters.BaseAdapter + """ + for (prefix, adapter) in self.adapters.items(): + + if url.lower().startswith(prefix.lower()): + return adapter + + # Nothing matches :-/ + raise InvalidSchema("No connection adapters were found for '%s'" % url) + + def close(self): + """Closes all adapters and as such the session""" + for v in self.adapters.values(): + v.close() + + def mount(self, prefix, adapter): + """Registers a connection adapter to a prefix. + + Adapters are sorted in descending order by prefix length. + """ + self.adapters[prefix] = adapter + keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] + + for key in keys_to_move: + self.adapters[key] = self.adapters.pop(key) + + def __getstate__(self): + state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__) + return state + + def __setstate__(self, state): + for attr, value in state.items(): + setattr(self, attr, value) + + +def session(): + """ + Returns a :class:`Session` for context-management. + + :rtype: Session + """ + + return Session() diff --git a/env/lib/python3.6/site-packages/requests/status_codes.py b/env/lib/python3.6/site-packages/requests/status_codes.py new file mode 100644 index 0000000..ff462c6 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests/status_codes.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- + +""" +The ``codes`` object defines a mapping from common names for HTTP statuses +to their numerical codes, accessible either as attributes or as dictionary +items. + +>>> requests.codes['temporary_redirect'] +307 +>>> requests.codes.teapot +418 +>>> requests.codes['\o/'] +200 + +Some codes have multiple names, and both upper- and lower-case versions of +the names are allowed. For example, ``codes.ok``, ``codes.OK``, and +``codes.okay`` all correspond to the HTTP status code 200. +""" + +from .structures import LookupDict + +_codes = { + + # Informational. + 100: ('continue',), + 101: ('switching_protocols',), + 102: ('processing',), + 103: ('checkpoint',), + 122: ('uri_too_long', 'request_uri_too_long'), + 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), + 201: ('created',), + 202: ('accepted',), + 203: ('non_authoritative_info', 'non_authoritative_information'), + 204: ('no_content',), + 205: ('reset_content', 'reset'), + 206: ('partial_content', 'partial'), + 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), + 208: ('already_reported',), + 226: ('im_used',), + + # Redirection. + 300: ('multiple_choices',), + 301: ('moved_permanently', 'moved', '\\o-'), + 302: ('found',), + 303: ('see_other', 'other'), + 304: ('not_modified',), + 305: ('use_proxy',), + 306: ('switch_proxy',), + 307: ('temporary_redirect', 'temporary_moved', 'temporary'), + 308: ('permanent_redirect', + 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 + + # Client Error. + 400: ('bad_request', 'bad'), + 401: ('unauthorized',), + 402: ('payment_required', 'payment'), + 403: ('forbidden',), + 404: ('not_found', '-o-'), + 405: ('method_not_allowed', 'not_allowed'), + 406: ('not_acceptable',), + 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), + 408: ('request_timeout', 'timeout'), + 409: ('conflict',), + 410: ('gone',), + 411: ('length_required',), + 412: ('precondition_failed', 'precondition'), + 413: ('request_entity_too_large',), + 414: ('request_uri_too_large',), + 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), + 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), + 417: ('expectation_failed',), + 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), + 421: ('misdirected_request',), + 422: ('unprocessable_entity', 'unprocessable'), + 423: ('locked',), + 424: ('failed_dependency', 'dependency'), + 425: ('unordered_collection', 'unordered'), + 426: ('upgrade_required', 'upgrade'), + 428: ('precondition_required', 'precondition'), + 429: ('too_many_requests', 'too_many'), + 431: ('header_fields_too_large', 'fields_too_large'), + 444: ('no_response', 'none'), + 449: ('retry_with', 'retry'), + 450: ('blocked_by_windows_parental_controls', 'parental_controls'), + 451: ('unavailable_for_legal_reasons', 'legal_reasons'), + 499: ('client_closed_request',), + + # Server Error. + 500: ('internal_server_error', 'server_error', '/o\\', '✗'), + 501: ('not_implemented',), + 502: ('bad_gateway',), + 503: ('service_unavailable', 'unavailable'), + 504: ('gateway_timeout',), + 505: ('http_version_not_supported', 'http_version'), + 506: ('variant_also_negotiates',), + 507: ('insufficient_storage',), + 509: ('bandwidth_limit_exceeded', 'bandwidth'), + 510: ('not_extended',), + 511: ('network_authentication_required', 'network_auth', 'network_authentication'), +} + +codes = LookupDict(name='status_codes') + +def _init(): + for code, titles in _codes.items(): + for title in titles: + setattr(codes, title, code) + if not title.startswith(('\\', '/')): + setattr(codes, title.upper(), code) + + def doc(code): + names = ', '.join('``%s``' % n for n in _codes[code]) + return '* %d: %s' % (code, names) + + global __doc__ + __doc__ = (__doc__ + '\n' + + '\n'.join(doc(code) for code in sorted(_codes)) + if __doc__ is not None else None) + +_init() diff --git a/env/lib/python3.6/site-packages/requests/structures.py b/env/lib/python3.6/site-packages/requests/structures.py new file mode 100644 index 0000000..da930e2 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests/structures.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- + +""" +requests.structures +~~~~~~~~~~~~~~~~~~~ + +Data structures that power Requests. +""" + +from .compat import OrderedDict, Mapping, MutableMapping + + +class CaseInsensitiveDict(MutableMapping): + """A case-insensitive ``dict``-like object. + + Implements all methods and operations of + ``MutableMapping`` as well as dict's ``copy``. Also + provides ``lower_items``. + + All keys are expected to be strings. The structure remembers the + case of the last key to be set, and ``iter(instance)``, + ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` + will contain case-sensitive keys. However, querying and contains + testing is case insensitive:: + + cid = CaseInsensitiveDict() + cid['Accept'] = 'application/json' + cid['aCCEPT'] == 'application/json' # True + list(cid) == ['Accept'] # True + + For example, ``headers['content-encoding']`` will return the + value of a ``'Content-Encoding'`` response header, regardless + of how the header name was originally stored. + + If the constructor, ``.update``, or equality comparison + operations are given keys that have equal ``.lower()``s, the + behavior is undefined. + """ + + def __init__(self, data=None, **kwargs): + self._store = OrderedDict() + if data is None: + data = {} + self.update(data, **kwargs) + + def __setitem__(self, key, value): + # Use the lowercased key for lookups, but store the actual + # key alongside the value. + self._store[key.lower()] = (key, value) + + def __getitem__(self, key): + return self._store[key.lower()][1] + + def __delitem__(self, key): + del self._store[key.lower()] + + def __iter__(self): + return (casedkey for casedkey, mappedvalue in self._store.values()) + + def __len__(self): + return len(self._store) + + def lower_items(self): + """Like iteritems(), but with all lowercase keys.""" + return ( + (lowerkey, keyval[1]) + for (lowerkey, keyval) + in self._store.items() + ) + + def __eq__(self, other): + if isinstance(other, Mapping): + other = CaseInsensitiveDict(other) + else: + return NotImplemented + # Compare insensitively + return dict(self.lower_items()) == dict(other.lower_items()) + + # Copy is required + def copy(self): + return CaseInsensitiveDict(self._store.values()) + + def __repr__(self): + return str(dict(self.items())) + + +class LookupDict(dict): + """Dictionary lookup object.""" + + def __init__(self, name=None): + self.name = name + super(LookupDict, self).__init__() + + def __repr__(self): + return '<lookup \'%s\'>' % (self.name) + + def __getitem__(self, key): + # We allow fall-through here, so values default to None + + return self.__dict__.get(key, None) + + def get(self, key, default=None): + return self.__dict__.get(key, default) diff --git a/env/lib/python3.6/site-packages/requests/utils.py b/env/lib/python3.6/site-packages/requests/utils.py new file mode 100644 index 0000000..431f6be --- /dev/null +++ b/env/lib/python3.6/site-packages/requests/utils.py @@ -0,0 +1,976 @@ +# -*- coding: utf-8 -*- + +""" +requests.utils +~~~~~~~~~~~~~~ + +This module provides utility functions that are used within Requests +that are also useful for external consumption. +""" + +import codecs +import contextlib +import io +import os +import re +import socket +import struct +import sys +import tempfile +import warnings +import zipfile + +from .__version__ import __version__ +from . import certs +# to_native_string is unused here, but imported here for backwards compatibility +from ._internal_utils import to_native_string +from .compat import parse_http_list as _parse_list_header +from .compat import ( + quote, urlparse, bytes, str, OrderedDict, unquote, getproxies, + proxy_bypass, urlunparse, basestring, integer_types, is_py3, + proxy_bypass_environment, getproxies_environment, Mapping) +from .cookies import cookiejar_from_dict +from .structures import CaseInsensitiveDict +from .exceptions import ( + InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError) + +NETRC_FILES = ('.netrc', '_netrc') + +DEFAULT_CA_BUNDLE_PATH = certs.where() + + +if sys.platform == 'win32': + # provide a proxy_bypass version on Windows without DNS lookups + + def proxy_bypass_registry(host): + try: + if is_py3: + import winreg + else: + import _winreg as winreg + except ImportError: + return False + + try: + internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, + r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') + # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it + proxyEnable = int(winreg.QueryValueEx(internetSettings, + 'ProxyEnable')[0]) + # ProxyOverride is almost always a string + proxyOverride = winreg.QueryValueEx(internetSettings, + 'ProxyOverride')[0] + except OSError: + return False + if not proxyEnable or not proxyOverride: + return False + + # make a check value list from the registry entry: replace the + # '<local>' string by the localhost entry and the corresponding + # canonical entry. + proxyOverride = proxyOverride.split(';') + # now check if we match one of the registry values. + for test in proxyOverride: + if test == '<local>': + if '.' not in host: + return True + test = test.replace(".", r"\.") # mask dots + test = test.replace("*", r".*") # change glob sequence + test = test.replace("?", r".") # change glob char + if re.match(test, host, re.I): + return True + return False + + def proxy_bypass(host): # noqa + """Return True, if the host should be bypassed. + + Checks proxy settings gathered from the environment, if specified, + or the registry. + """ + if getproxies_environment(): + return proxy_bypass_environment(host) + else: + return proxy_bypass_registry(host) + + +def dict_to_sequence(d): + """Returns an internal sequence dictionary update.""" + + if hasattr(d, 'items'): + d = d.items() + + return d + + +def super_len(o): + total_length = None + current_position = 0 + + if hasattr(o, '__len__'): + total_length = len(o) + + elif hasattr(o, 'len'): + total_length = o.len + + elif hasattr(o, 'fileno'): + try: + fileno = o.fileno() + except io.UnsupportedOperation: + pass + else: + total_length = os.fstat(fileno).st_size + + # Having used fstat to determine the file length, we need to + # confirm that this file was opened up in binary mode. + if 'b' not in o.mode: + warnings.warn(( + "Requests has determined the content-length for this " + "request using the binary size of the file: however, the " + "file has been opened in text mode (i.e. without the 'b' " + "flag in the mode). This may lead to an incorrect " + "content-length. In Requests 3.0, support will be removed " + "for files in text mode."), + FileModeWarning + ) + + if hasattr(o, 'tell'): + try: + current_position = o.tell() + except (OSError, IOError): + # This can happen in some weird situations, such as when the file + # is actually a special file descriptor like stdin. In this + # instance, we don't know what the length is, so set it to zero and + # let requests chunk it instead. + if total_length is not None: + current_position = total_length + else: + if hasattr(o, 'seek') and total_length is None: + # StringIO and BytesIO have seek but no useable fileno + try: + # seek to end of file + o.seek(0, 2) + total_length = o.tell() + + # seek back to current position to support + # partially read file-like objects + o.seek(current_position or 0) + except (OSError, IOError): + total_length = 0 + + if total_length is None: + total_length = 0 + + return max(0, total_length - current_position) + + +def get_netrc_auth(url, raise_errors=False): + """Returns the Requests tuple auth for a given url from netrc.""" + + try: + from netrc import netrc, NetrcParseError + + netrc_path = None + + for f in NETRC_FILES: + try: + loc = os.path.expanduser('~/{0}'.format(f)) + except KeyError: + # os.path.expanduser can fail when $HOME is undefined and + # getpwuid fails. See http://bugs.python.org/issue20164 & + # https://github.com/requests/requests/issues/1846 + return + + if os.path.exists(loc): + netrc_path = loc + break + + # Abort early if there isn't one. + if netrc_path is None: + return + + ri = urlparse(url) + + # Strip port numbers from netloc. This weird `if...encode`` dance is + # used for Python 3.2, which doesn't support unicode literals. + splitstr = b':' + if isinstance(url, str): + splitstr = splitstr.decode('ascii') + host = ri.netloc.split(splitstr)[0] + + try: + _netrc = netrc(netrc_path).authenticators(host) + if _netrc: + # Return with login / password + login_i = (0 if _netrc[0] else 1) + return (_netrc[login_i], _netrc[2]) + except (NetrcParseError, IOError): + # If there was a parsing error or a permissions issue reading the file, + # we'll just skip netrc auth unless explicitly asked to raise errors. + if raise_errors: + raise + + # AppEngine hackiness. + except (ImportError, AttributeError): + pass + + +def guess_filename(obj): + """Tries to guess the filename of the given object.""" + name = getattr(obj, 'name', None) + if (name and isinstance(name, basestring) and name[0] != '<' and + name[-1] != '>'): + return os.path.basename(name) + + +def extract_zipped_paths(path): + """Replace nonexistent paths that look like they refer to a member of a zip + archive with the location of an extracted copy of the target, or else + just return the provided path unchanged. + """ + if os.path.exists(path): + # this is already a valid path, no need to do anything further + return path + + # find the first valid part of the provided path and treat that as a zip archive + # assume the rest of the path is the name of a member in the archive + archive, member = os.path.split(path) + while archive and not os.path.exists(archive): + archive, prefix = os.path.split(archive) + member = '/'.join([prefix, member]) + + if not zipfile.is_zipfile(archive): + return path + + zip_file = zipfile.ZipFile(archive) + if member not in zip_file.namelist(): + return path + + # we have a valid zip archive and a valid member of that archive + tmp = tempfile.gettempdir() + extracted_path = os.path.join(tmp, *member.split('/')) + if not os.path.exists(extracted_path): + extracted_path = zip_file.extract(member, path=tmp) + + return extracted_path + + +def from_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. Unless it can not be represented as such, return an + OrderedDict, e.g., + + :: + + >>> from_key_val_list([('key', 'val')]) + OrderedDict([('key', 'val')]) + >>> from_key_val_list('string') + ValueError: need more than 1 value to unpack + >>> from_key_val_list({'key': 'val'}) + OrderedDict([('key', 'val')]) + + :rtype: OrderedDict + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError('cannot encode objects that are not 2-tuples') + + return OrderedDict(value) + + +def to_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. If it can be, return a list of tuples, e.g., + + :: + + >>> to_key_val_list([('key', 'val')]) + [('key', 'val')] + >>> to_key_val_list({'key': 'val'}) + [('key', 'val')] + >>> to_key_val_list('string') + ValueError: cannot encode objects that are not 2-tuples. + + :rtype: list + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError('cannot encode objects that are not 2-tuples') + + if isinstance(value, Mapping): + value = value.items() + + return list(value) + + +# From mitsuhiko/werkzeug (used with permission). +def parse_list_header(value): + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Quotes are removed automatically after parsing. + + It basically works like :func:`parse_set_header` just that items + may appear multiple times and case sensitivity is preserved. + + The return value is a standard :class:`list`: + + >>> parse_list_header('token, "quoted value"') + ['token', 'quoted value'] + + To create a header from the :class:`list` again, use the + :func:`dump_header` function. + + :param value: a string with a list header. + :return: :class:`list` + :rtype: list + """ + result = [] + for item in _parse_list_header(value): + if item[:1] == item[-1:] == '"': + item = unquote_header_value(item[1:-1]) + result.append(item) + return result + + +# From mitsuhiko/werkzeug (used with permission). +def parse_dict_header(value): + """Parse lists of key, value pairs as described by RFC 2068 Section 2 and + convert them into a python dict: + + >>> d = parse_dict_header('foo="is a fish", bar="as well"') + >>> type(d) is dict + True + >>> sorted(d.items()) + [('bar', 'as well'), ('foo', 'is a fish')] + + If there is no value for a key it will be `None`: + + >>> parse_dict_header('key_without_value') + {'key_without_value': None} + + To create a header from the :class:`dict` again, use the + :func:`dump_header` function. + + :param value: a string with a dict header. + :return: :class:`dict` + :rtype: dict + """ + result = {} + for item in _parse_list_header(value): + if '=' not in item: + result[item] = None + continue + name, value = item.split('=', 1) + if value[:1] == value[-1:] == '"': + value = unquote_header_value(value[1:-1]) + result[name] = value + return result + + +# From mitsuhiko/werkzeug (used with permission). +def unquote_header_value(value, is_filename=False): + r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). + This does not use the real unquoting but what browsers are actually + using for quoting. + + :param value: the header value to unquote. + :rtype: str + """ + if value and value[0] == value[-1] == '"': + # this is not the real unquoting, but fixing this so that the + # RFC is met will result in bugs with internet explorer and + # probably some other browsers as well. IE for example is + # uploading files with "C:\foo\bar.txt" as filename + value = value[1:-1] + + # if this is a filename and the starting characters look like + # a UNC path, then just return the value without quotes. Using the + # replace sequence below on a UNC path has the effect of turning + # the leading double slash into a single slash and then + # _fix_ie_filename() doesn't work correctly. See #458. + if not is_filename or value[:2] != '\\\\': + return value.replace('\\\\', '\\').replace('\\"', '"') + return value + + +def dict_from_cookiejar(cj): + """Returns a key/value dictionary from a CookieJar. + + :param cj: CookieJar object to extract cookies from. + :rtype: dict + """ + + cookie_dict = {} + + for cookie in cj: + cookie_dict[cookie.name] = cookie.value + + return cookie_dict + + +def add_dict_to_cookiejar(cj, cookie_dict): + """Returns a CookieJar from a key/value dictionary. + + :param cj: CookieJar to insert cookies into. + :param cookie_dict: Dict of key/values to insert into CookieJar. + :rtype: CookieJar + """ + + return cookiejar_from_dict(cookie_dict, cj) + + +def get_encodings_from_content(content): + """Returns encodings from given content string. + + :param content: bytestring to extract encodings from. + """ + warnings.warn(( + 'In requests 3.0, get_encodings_from_content will be removed. For ' + 'more information, please see the discussion on issue #2266. (This' + ' warning should only appear once.)'), + DeprecationWarning) + + charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I) + pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I) + xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') + + return (charset_re.findall(content) + + pragma_re.findall(content) + + xml_re.findall(content)) + + +def _parse_content_type_header(header): + """Returns content type and parameters from given header + + :param header: string + :return: tuple containing content type and dictionary of + parameters + """ + + tokens = header.split(';') + content_type, params = tokens[0].strip(), tokens[1:] + params_dict = {} + items_to_strip = "\"' " + + for param in params: + param = param.strip() + if param: + key, value = param, True + index_of_equals = param.find("=") + if index_of_equals != -1: + key = param[:index_of_equals].strip(items_to_strip) + value = param[index_of_equals + 1:].strip(items_to_strip) + params_dict[key] = value + return content_type, params_dict + + +def get_encoding_from_headers(headers): + """Returns encodings from given HTTP Header Dict. + + :param headers: dictionary to extract encoding from. + :rtype: str + """ + + content_type = headers.get('content-type') + + if not content_type: + return None + + content_type, params = _parse_content_type_header(content_type) + + if 'charset' in params: + return params['charset'].strip("'\"") + + if 'text' in content_type: + return 'ISO-8859-1' + + +def stream_decode_response_unicode(iterator, r): + """Stream decodes a iterator.""" + + if r.encoding is None: + for item in iterator: + yield item + return + + decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') + for chunk in iterator: + rv = decoder.decode(chunk) + if rv: + yield rv + rv = decoder.decode(b'', final=True) + if rv: + yield rv + + +def iter_slices(string, slice_length): + """Iterate over slices of a string.""" + pos = 0 + if slice_length is None or slice_length <= 0: + slice_length = len(string) + while pos < len(string): + yield string[pos:pos + slice_length] + pos += slice_length + + +def get_unicode_from_response(r): + """Returns the requested content back in unicode. + + :param r: Response object to get unicode content from. + + Tried: + + 1. charset from content-type + 2. fall back and replace all unicode characters + + :rtype: str + """ + warnings.warn(( + 'In requests 3.0, get_unicode_from_response will be removed. For ' + 'more information, please see the discussion on issue #2266. (This' + ' warning should only appear once.)'), + DeprecationWarning) + + tried_encodings = [] + + # Try charset from content-type + encoding = get_encoding_from_headers(r.headers) + + if encoding: + try: + return str(r.content, encoding) + except UnicodeError: + tried_encodings.append(encoding) + + # Fall back: + try: + return str(r.content, encoding, errors='replace') + except TypeError: + return r.content + + +# The unreserved URI characters (RFC 3986) +UNRESERVED_SET = frozenset( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~") + + +def unquote_unreserved(uri): + """Un-escape any percent-escape sequences in a URI that are unreserved + characters. This leaves all reserved, illegal and non-ASCII bytes encoded. + + :rtype: str + """ + parts = uri.split('%') + for i in range(1, len(parts)): + h = parts[i][0:2] + if len(h) == 2 and h.isalnum(): + try: + c = chr(int(h, 16)) + except ValueError: + raise InvalidURL("Invalid percent-escape sequence: '%s'" % h) + + if c in UNRESERVED_SET: + parts[i] = c + parts[i][2:] + else: + parts[i] = '%' + parts[i] + else: + parts[i] = '%' + parts[i] + return ''.join(parts) + + +def requote_uri(uri): + """Re-quote the given URI. + + This function passes the given URI through an unquote/quote cycle to + ensure that it is fully and consistently quoted. + + :rtype: str + """ + safe_with_percent = "!#$%&'()*+,/:;=?@[]~" + safe_without_percent = "!#$&'()*+,/:;=?@[]~" + try: + # Unquote only the unreserved characters + # Then quote only illegal characters (do not quote reserved, + # unreserved, or '%') + return quote(unquote_unreserved(uri), safe=safe_with_percent) + except InvalidURL: + # We couldn't unquote the given URI, so let's try quoting it, but + # there may be unquoted '%'s in the URI. We need to make sure they're + # properly quoted so they do not cause issues elsewhere. + return quote(uri, safe=safe_without_percent) + + +def address_in_network(ip, net): + """This function allows you to check if an IP belongs to a network subnet + + Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 + returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 + + :rtype: bool + """ + ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0] + netaddr, bits = net.split('/') + netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0] + network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask + return (ipaddr & netmask) == (network & netmask) + + +def dotted_netmask(mask): + """Converts mask from /xx format to xxx.xxx.xxx.xxx + + Example: if mask is 24 function returns 255.255.255.0 + + :rtype: str + """ + bits = 0xffffffff ^ (1 << 32 - mask) - 1 + return socket.inet_ntoa(struct.pack('>I', bits)) + + +def is_ipv4_address(string_ip): + """ + :rtype: bool + """ + try: + socket.inet_aton(string_ip) + except socket.error: + return False + return True + + +def is_valid_cidr(string_network): + """ + Very simple check of the cidr format in no_proxy variable. + + :rtype: bool + """ + if string_network.count('/') == 1: + try: + mask = int(string_network.split('/')[1]) + except ValueError: + return False + + if mask < 1 or mask > 32: + return False + + try: + socket.inet_aton(string_network.split('/')[0]) + except socket.error: + return False + else: + return False + return True + + +@contextlib.contextmanager +def set_environ(env_name, value): + """Set the environment variable 'env_name' to 'value' + + Save previous value, yield, and then restore the previous value stored in + the environment variable 'env_name'. + + If 'value' is None, do nothing""" + value_changed = value is not None + if value_changed: + old_value = os.environ.get(env_name) + os.environ[env_name] = value + try: + yield + finally: + if value_changed: + if old_value is None: + del os.environ[env_name] + else: + os.environ[env_name] = old_value + + +def should_bypass_proxies(url, no_proxy): + """ + Returns whether we should bypass proxies or not. + + :rtype: bool + """ + # Prioritize lowercase environment variables over uppercase + # to keep a consistent behaviour with other http projects (curl, wget). + get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) + + # First check whether no_proxy is defined. If it is, check that the URL + # we're getting isn't in the no_proxy list. + no_proxy_arg = no_proxy + if no_proxy is None: + no_proxy = get_proxy('no_proxy') + parsed = urlparse(url) + + if no_proxy: + # We need to check whether we match here. We need to see if we match + # the end of the hostname, both with and without the port. + no_proxy = ( + host for host in no_proxy.replace(' ', '').split(',') if host + ) + + if is_ipv4_address(parsed.hostname): + for proxy_ip in no_proxy: + if is_valid_cidr(proxy_ip): + if address_in_network(parsed.hostname, proxy_ip): + return True + elif parsed.hostname == proxy_ip: + # If no_proxy ip was defined in plain IP notation instead of cidr notation & + # matches the IP of the index + return True + else: + host_with_port = parsed.hostname + if parsed.port: + host_with_port += ':{0}'.format(parsed.port) + + for host in no_proxy: + if parsed.hostname.endswith(host) or host_with_port.endswith(host): + # The URL does match something in no_proxy, so we don't want + # to apply the proxies on this URL. + return True + + # If the system proxy settings indicate that this URL should be bypassed, + # don't proxy. + # The proxy_bypass function is incredibly buggy on OS X in early versions + # of Python 2.6, so allow this call to fail. Only catch the specific + # exceptions we've seen, though: this call failing in other ways can reveal + # legitimate problems. + with set_environ('no_proxy', no_proxy_arg): + try: + bypass = proxy_bypass(parsed.hostname) + except (TypeError, socket.gaierror): + bypass = False + + if bypass: + return True + + return False + + +def get_environ_proxies(url, no_proxy=None): + """ + Return a dict of environment proxies. + + :rtype: dict + """ + if should_bypass_proxies(url, no_proxy=no_proxy): + return {} + else: + return getproxies() + + +def select_proxy(url, proxies): + """Select a proxy for the url, if applicable. + + :param url: The url being for the request + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + """ + proxies = proxies or {} + urlparts = urlparse(url) + if urlparts.hostname is None: + return proxies.get(urlparts.scheme, proxies.get('all')) + + proxy_keys = [ + urlparts.scheme + '://' + urlparts.hostname, + urlparts.scheme, + 'all://' + urlparts.hostname, + 'all', + ] + proxy = None + for proxy_key in proxy_keys: + if proxy_key in proxies: + proxy = proxies[proxy_key] + break + + return proxy + + +def default_user_agent(name="python-requests"): + """ + Return a string representing the default user agent. + + :rtype: str + """ + return '%s/%s' % (name, __version__) + + +def default_headers(): + """ + :rtype: requests.structures.CaseInsensitiveDict + """ + return CaseInsensitiveDict({ + 'User-Agent': default_user_agent(), + 'Accept-Encoding': ', '.join(('gzip', 'deflate')), + 'Accept': '*/*', + 'Connection': 'keep-alive', + }) + + +def parse_header_links(value): + """Return a list of parsed link headers proxies. + + i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg" + + :rtype: list + """ + + links = [] + + replace_chars = ' \'"' + + value = value.strip(replace_chars) + if not value: + return links + + for val in re.split(', *<', value): + try: + url, params = val.split(';', 1) + except ValueError: + url, params = val, '' + + link = {'url': url.strip('<> \'"')} + + for param in params.split(';'): + try: + key, value = param.split('=') + except ValueError: + break + + link[key.strip(replace_chars)] = value.strip(replace_chars) + + links.append(link) + + return links + + +# Null bytes; no need to recreate these on each call to guess_json_utf +_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3 +_null2 = _null * 2 +_null3 = _null * 3 + + +def guess_json_utf(data): + """ + :rtype: str + """ + # JSON always starts with two ASCII characters, so detection is as + # easy as counting the nulls and from their location and count + # determine the encoding. Also detect a BOM, if present. + sample = data[:4] + if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): + return 'utf-32' # BOM included + if sample[:3] == codecs.BOM_UTF8: + return 'utf-8-sig' # BOM included, MS style (discouraged) + if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + return 'utf-16' # BOM included + nullcount = sample.count(_null) + if nullcount == 0: + return 'utf-8' + if nullcount == 2: + if sample[::2] == _null2: # 1st and 3rd are null + return 'utf-16-be' + if sample[1::2] == _null2: # 2nd and 4th are null + return 'utf-16-le' + # Did not detect 2 valid UTF-16 ascii-range characters + if nullcount == 3: + if sample[:3] == _null3: + return 'utf-32-be' + if sample[1:] == _null3: + return 'utf-32-le' + # Did not detect a valid UTF-32 ascii-range character + return None + + +def prepend_scheme_if_needed(url, new_scheme): + """Given a URL that may or may not have a scheme, prepend the given scheme. + Does not replace a present scheme with the one provided as an argument. + + :rtype: str + """ + scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme) + + # urlparse is a finicky beast, and sometimes decides that there isn't a + # netloc present. Assume that it's being over-cautious, and switch netloc + # and path if urlparse decided there was no netloc. + if not netloc: + netloc, path = path, netloc + + return urlunparse((scheme, netloc, path, params, query, fragment)) + + +def get_auth_from_url(url): + """Given a url with authentication components, extract them into a tuple of + username,password. + + :rtype: (str,str) + """ + parsed = urlparse(url) + + try: + auth = (unquote(parsed.username), unquote(parsed.password)) + except (AttributeError, TypeError): + auth = ('', '') + + return auth + + +# Moved outside of function to avoid recompile every call +_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$') +_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$') + + +def check_header_validity(header): + """Verifies that header value is a string which doesn't contain + leading whitespace or return characters. This prevents unintended + header injection. + + :param header: tuple, in the format (name, value). + """ + name, value = header + + if isinstance(value, bytes): + pat = _CLEAN_HEADER_REGEX_BYTE + else: + pat = _CLEAN_HEADER_REGEX_STR + try: + if not pat.match(value): + raise InvalidHeader("Invalid return character or leading space in header: %s" % name) + except TypeError: + raise InvalidHeader("Value for header {%s: %s} must be of type str or " + "bytes, not %s" % (name, value, type(value))) + + +def urldefragauth(url): + """ + Given a url remove the fragment and the authentication part. + + :rtype: str + """ + scheme, netloc, path, params, query, fragment = urlparse(url) + + # see func:`prepend_scheme_if_needed` + if not netloc: + netloc, path = path, netloc + + netloc = netloc.rsplit('@', 1)[-1] + + return urlunparse((scheme, netloc, path, params, query, '')) + + +def rewind_body(prepared_request): + """Move file pointer back to its recorded starting position + so it can be read again on redirect. + """ + body_seek = getattr(prepared_request.body, 'seek', None) + if body_seek is not None and isinstance(prepared_request._body_position, integer_types): + try: + body_seek(prepared_request._body_position) + except (IOError, OSError): + raise UnrewindableBodyError("An error occurred when rewinding request " + "body for redirect.") + else: + raise UnrewindableBodyError("Unable to rewind request body for redirect.") diff --git a/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/INSTALLER b/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/LICENSE.txt b/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..de09f40 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/LICENSE.txt @@ -0,0 +1,15 @@ +ISC License + +Copyright (c) 2014 Kenneth Reitz. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/METADATA b/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/METADATA new file mode 100644 index 0000000..fbfa223 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/METADATA @@ -0,0 +1,204 @@ +Metadata-Version: 2.1 +Name: requests-oauthlib +Version: 1.0.0 +Summary: OAuthlib authentication support for Requests. +Home-page: https://github.com/requests/requests-oauthlib +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: ISC +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +Provides-Extra: rsa +Requires-Dist: oauthlib (>=0.6.2) +Requires-Dist: requests (>=2.0.0) +Provides-Extra: rsa +Requires-Dist: oauthlib[rsa] (>=0.6.2); extra == 'rsa' +Requires-Dist: requests (>=2.0.0); extra == 'rsa' + +Requests-OAuthlib |build-status| |coverage-status| |docs| +========================================================= + +This project provides first-class OAuth library support for `Requests <http://python-requests.org>`_. + +The OAuth 1 workflow +-------------------- + +OAuth 1 can seem overly complicated and it sure has its quirks. Luckily, +requests_oauthlib hides most of these and let you focus at the task at hand. + +Accessing protected resources using requests_oauthlib is as simple as: + +.. code-block:: pycon + + >>> from requests_oauthlib import OAuth1Session + >>> twitter = OAuth1Session('client_key', + client_secret='client_secret', + resource_owner_key='resource_owner_key', + resource_owner_secret='resource_owner_secret') + >>> url = 'https://api.twitter.com/1/account/settings.json' + >>> r = twitter.get(url) + +Before accessing resources you will need to obtain a few credentials from your +provider (e.g. Twitter) and authorization from the user for whom you wish to +retrieve resources for. You can read all about this in the full +`OAuth 1 workflow guide on RTD <https://requests-oauthlib.readthedocs.io/en/latest/oauth1_workflow.html>`_. + +The OAuth 2 workflow +-------------------- + +OAuth 2 is generally simpler than OAuth 1 but comes in more flavours. The most +common being the Authorization Code Grant, also known as the WebApplication +flow. + +Fetching a protected resource after obtaining an access token can be extremely +simple. However, before accessing resources you will need to obtain a few +credentials from your provider (e.g. Google) and authorization from the user +for whom you wish to retrieve resources for. You can read all about this in the +full `OAuth 2 workflow guide on RTD <https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html>`_. + +Installation +------------- + +To install requests and requests_oauthlib you can use pip: + +.. code-block:: bash + + $ pip install requests requests_oauthlib + +.. |build-status| image:: https://travis-ci.org/requests/requests-oauthlib.svg?branch=master + :target: https://travis-ci.org/requests/requests-oauthlib +.. |coverage-status| image:: https://img.shields.io/coveralls/requests/requests-oauthlib.svg + :target: https://coveralls.io/r/requests/requests-oauthlib +.. |docs| image:: https://readthedocs.org/projects/requests-oauthlib/badge/ + :alt: Documentation Status + :scale: 100% + :target: https://requests-oauthlib.readthedocs.io/ + + +History +------- + +UNRELEASED +++++++++++ + +nothing yet + +v1.0.0 (4 June 2018) +++++++++++++++++++++ + +- **Removed support for Python 2.6 and Python 3.3.** + This project now supports Python 2.7, and Python 3.4 and above. +- Added several examples to the documentation. +- Added plentymarkets compliance fix. +- Added a ``token`` property to OAuth1Session, to match the corresponding + ``token`` property on OAuth2Session. + +v0.8.0 (14 February 2017) ++++++++++++++++++++++++++ + +- Added Fitbit compliance fix. +- Fixed an issue where newlines in the response body for the access token + request would cause errors when trying to extract the token. +- Fixed an issue introduced in v0.7.0 where users passing ``auth`` to several + methods would encounter conflicts with the ``client_id`` and + ``client_secret``-derived auth. The user-supplied ``auth`` argument is now + used in preference to those options. + +v0.7.0 (22 September 2016) +++++++++++++++++++++++++++ + +- Allowed ``OAuth2Session.request`` to take the ``client_id`` and + ``client_secret`` parameters for the purposes of automatic token refresh, + which may need them. + +v0.6.2 (12 July 2016) ++++++++++++++++++++++ + +- Use ``client_id`` and ``client_secret`` for the Authorization header if + provided. +- Allow explicit bypass of the Authorization header by setting ``auth=False``. +- Pass through the ``proxies`` kwarg when refreshing tokens. +- Miscellaneous cleanups. + +v0.6.1 (19 February 2016) ++++++++++++++++++++++++++ + +- Fixed a bug when sending authorization in headers with no username and + password present. +- Make sure we clear the session token before obtaining a new one. +- Some improvements to the Slack compliance fix. +- Avoid timing problems around token refresh. +- Allow passing arbitrary arguments to requests when calling + ``fetch_request_token`` and ``fetch_access_token``. + +v0.6.0 (14 December 2015) ++++++++++++++++++++++++++ + +- Add compliance fix for Slack. +- Add compliance fix for Mailchimp. +- ``TokenRequestDenied`` exceptions now carry the entire response, not just the + status code. +- Pass through keyword arguments when refreshing tokens automatically. +- Send authorization in headers, not just body, to maximize compatibility. +- More getters/setters available for OAuth2 session client values. +- Allow sending custom headers when refreshing tokens, and set some defaults. + + +v0.5.0 (4 May 2015) ++++++++++++++++++++ +- Fix ``TypeError`` being raised instead of ``TokenMissing`` error. +- Raise requests exceptions on 4XX and 5XX responses in the OAuth2 flow. +- Avoid ``AttributeError`` when initializing the ``OAuth2Session`` class + without complete client information. + +v0.4.2 (16 October 2014) +++++++++++++++++++++++++ +- New ``authorized`` property on OAuth1Session and OAuth2Session, which allows + you to easily determine if the session is already authorized with OAuth tokens + or not. +- New ``TokenMissing`` and ``VerifierMissing`` exception classes for OAuth1Session: + this will make it easier to catch and identify these exceptions. + +v0.4.1 (6 June 2014) +++++++++++++++++++++ +- New install target ``[rsa]`` for people using OAuth1 RSA-SHA1 signature + method. +- Fixed bug in OAuth2 where supplied state param was not used in auth url. +- OAuth2 HTTPS checking can be disabled by setting environment variable + ``OAUTHLIB_INSECURE_TRANSPORT``. +- OAuth1 now re-authorize upon redirects. +- OAuth1 token fetching now raise a detailed error message when the + response body is incorrectly encoded or the request was denied. +- Added support for custom OAuth1 clients. +- OAuth2 compliance fix for Sina Weibo. +- Multiple fixes to facebook compliance fix. +- Compliance fixes now re-encode body properly as bytes in Python 3. +- Logging now properly done under ``requests_oauthlib`` namespace instead + of piggybacking on oauthlib namespace. +- Logging introduced for OAuth1 auth and session. + +v0.4.0 (29 September 2013) +++++++++++++++++++++++++++ +- OAuth1Session methods only return unicode strings. #55. +- Renamed requests_oauthlib.core to requests_oauthlib.oauth1_auth for consistency. #79. +- Added Facebook compliance fix and access_token_response hook to OAuth2Session. #63. +- Added LinkedIn compliance fix. +- Added refresh_token_response compliance hook, invoked before parsing the refresh token. +- Correctly limit compliance hooks to running only once! +- Content type guessing should only be done when no content type is given +- OAuth1 now updates r.headers instead of replacing it with non case insensitive dict +- Remove last use of Response.content (in OAuth1Session). #44. +- State param can now be supplied in OAuth2Session.authorize_url + + diff --git a/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/RECORD b/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/RECORD new file mode 100644 index 0000000..7d55d9b --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/RECORD @@ -0,0 +1,34 @@ +requests_oauthlib/__init__.py,sha256=Kj1G2mmIsjxvCNNH7v4fr_1DOrima2RbhiJUmzCFSDw,517 +requests_oauthlib/oauth1_auth.py,sha256=XVk2Xs6P1lw-RL98VUNS0CkCuQc6fF5kxGEX-CE6rV0,3549 +requests_oauthlib/oauth1_session.py,sha256=uLHw3YjAWDAYQt1YKhmuzf-6OXCV8Ji173bwfS2tX_s,17096 +requests_oauthlib/oauth2_auth.py,sha256=LcLvn222F5N2-rZezCrbb2LK3ayLt26CEDMOBwEFL0o,1542 +requests_oauthlib/oauth2_session.py,sha256=d1HBPmVRx8atWN0cPX5vXG5pTmBgAljGjg1zfkT1cn0,17210 +requests_oauthlib/compliance_fixes/__init__.py,sha256=-6xxxZQPscFTK9BXsqXBtb1ks8clnCrBm0ZEQWoMfAg,358 +requests_oauthlib/compliance_fixes/douban.py,sha256=ScsTLV8TffXulTxB5xSui7E7SA_LPakqU3hxZjnYft0,473 +requests_oauthlib/compliance_fixes/facebook.py,sha256=QU0VAMb2Vfz_WqiXq4SkblM3Z5uvqIZzNpaBk-Jt_3I,1119 +requests_oauthlib/compliance_fixes/fitbit.py,sha256=8ejsjFGkDp8s5p4eSut7M6vEfTsyGiYA6Jcx1PYjltI,906 +requests_oauthlib/compliance_fixes/linkedin.py,sha256=e_Am10sByk99x_XbSiEjEgxfiQ3IMKWrt67uc123rVY,824 +requests_oauthlib/compliance_fixes/mailchimp.py,sha256=ZmB1Tka44YejemalNDPpKLoPu2-kr0K4COEl88DO944,756 +requests_oauthlib/compliance_fixes/plentymarkets.py,sha256=yoSPzqsgx9W_PGrScryT3P1G0bP6QWmDVAXv8d4ku9w,761 +requests_oauthlib/compliance_fixes/slack.py,sha256=KpJHuC9DOqwHfKmjZeDpCeWeBrnDznrSdtYPFcIUaLg,1453 +requests_oauthlib/compliance_fixes/weibo.py,sha256=7DytnOaDHGGHa5ECMFxkHOASgfzWEXsvAn-whE6soZI,482 +requests_oauthlib-1.0.0.dist-info/LICENSE.txt,sha256=rgGEavrYqCkf5qCJZvMBWvmo_2ddhLmB-Xk8Ei94dug,745 +requests_oauthlib-1.0.0.dist-info/METADATA,sha256=LZDSNz0JKWIbunueXqVw7iKGt54-xil2NTKlAu40UO0,8194 +requests_oauthlib-1.0.0.dist-info/RECORD,, +requests_oauthlib-1.0.0.dist-info/WHEEL,sha256=J3CsTk7Mf2JNUyhImI-mjX-fmI4oDjyiXgWT4qgZiCE,110 +requests_oauthlib-1.0.0.dist-info/top_level.txt,sha256=vx1R42DWBO64h6iu8Gco0F01TVTPWXSRWBaV1GwVRTQ,18 +requests_oauthlib-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +requests_oauthlib/__pycache__/oauth2_session.cpython-36.pyc,, +requests_oauthlib/__pycache__/oauth1_session.cpython-36.pyc,, +requests_oauthlib/__pycache__/__init__.cpython-36.pyc,, +requests_oauthlib/__pycache__/oauth2_auth.cpython-36.pyc,, +requests_oauthlib/__pycache__/oauth1_auth.cpython-36.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/plentymarkets.cpython-36.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/linkedin.cpython-36.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/douban.cpython-36.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/weibo.cpython-36.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/mailchimp.cpython-36.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/facebook.cpython-36.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/slack.cpython-36.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/__init__.cpython-36.pyc,, +requests_oauthlib/compliance_fixes/__pycache__/fitbit.cpython-36.pyc,, diff --git a/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/WHEEL b/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/WHEEL new file mode 100644 index 0000000..f21b51c --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/top_level.txt b/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/top_level.txt new file mode 100644 index 0000000..55d4f90 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib-1.0.0.dist-info/top_level.txt @@ -0,0 +1 @@ +requests_oauthlib diff --git a/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/douban.py b/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/douban.py new file mode 100644 index 0000000..2e45b3b --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/douban.py @@ -0,0 +1,18 @@ +import json + +from oauthlib.common import to_unicode + + +def douban_compliance_fix(session): + + def fix_token_type(r): + token = json.loads(r.text) + token.setdefault('token_type', 'Bearer') + fixed_token = json.dumps(token) + r._content = to_unicode(fixed_token).encode('utf-8') + return r + + session._client_default_token_placement = 'query' + session.register_compliance_hook('access_token_response', fix_token_type) + + return session diff --git a/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/facebook.py b/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/facebook.py new file mode 100644 index 0000000..07181c3 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/facebook.py @@ -0,0 +1,33 @@ +from json import dumps +try: + from urlparse import parse_qsl +except ImportError: + from urllib.parse import parse_qsl + +from oauthlib.common import to_unicode + + +def facebook_compliance_fix(session): + + def _compliance_fix(r): + # if Facebook claims to be sending us json, let's trust them. + if 'application/json' in r.headers.get('content-type', {}): + return r + + # Facebook returns a content-type of text/plain when sending their + # x-www-form-urlencoded responses, along with a 200. If not, let's + # assume we're getting JSON and bail on the fix. + if 'text/plain' in r.headers.get('content-type', {}) and r.status_code == 200: + token = dict(parse_qsl(r.text, keep_blank_values=True)) + else: + return r + + expires = token.get('expires') + if expires is not None: + token['expires_in'] = expires + token['token_type'] = 'Bearer' + r._content = to_unicode(dumps(token)).encode('UTF-8') + return r + + session.register_compliance_hook('access_token_response', _compliance_fix) + return session diff --git a/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/fitbit.py b/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/fitbit.py new file mode 100644 index 0000000..cec533b --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/fitbit.py @@ -0,0 +1,26 @@ +""" +The Fitbit API breaks from the OAuth2 RFC standard by returning an "errors" +object list, rather than a single "error" string. This puts hooks in place so +that oauthlib can process an error in the results from access token and refresh +token responses. This is necessary to prevent getting the generic red herring +MissingTokenError. +""" + +from json import loads, dumps + +from oauthlib.common import to_unicode + + +def fitbit_compliance_fix(session): + + def _missing_error(r): + token = loads(r.text) + if 'errors' in token: + # Set the error to the first one we have + token['error'] = token['errors'][0]['errorType'] + r._content = to_unicode(dumps(token)).encode('UTF-8') + return r + + session.register_compliance_hook('access_token_response', _missing_error) + session.register_compliance_hook('refresh_token_response', _missing_error) + return session diff --git a/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/linkedin.py b/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/linkedin.py new file mode 100644 index 0000000..e697ced --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/linkedin.py @@ -0,0 +1,24 @@ +from json import loads, dumps + +from oauthlib.common import add_params_to_uri, to_unicode + + +def linkedin_compliance_fix(session): + + def _missing_token_type(r): + token = loads(r.text) + token['token_type'] = 'Bearer' + r._content = to_unicode(dumps(token)).encode('UTF-8') + return r + + def _non_compliant_param_name(url, headers, data): + token = [('oauth2_access_token', session.access_token)] + url = add_params_to_uri(url, token) + return url, headers, data + + session._client.default_token_placement = 'query' + session.register_compliance_hook('access_token_response', + _missing_token_type) + session.register_compliance_hook('protected_request', + _non_compliant_param_name) + return session diff --git a/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/mailchimp.py b/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/mailchimp.py new file mode 100644 index 0000000..ee9bc94 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/mailchimp.py @@ -0,0 +1,22 @@ +import json + +from oauthlib.common import to_unicode + +def mailchimp_compliance_fix(session): + def _null_scope(r): + token = json.loads(r.text) + if 'scope' in token and token['scope'] is None: + token.pop('scope') + r._content = to_unicode(json.dumps(token)).encode('utf-8') + return r + + def _non_zero_expiration(r): + token = json.loads(r.text) + if 'expires_in' in token and token['expires_in'] == 0: + token['expires_in'] = 3600 + r._content = to_unicode(json.dumps(token)).encode('utf-8') + return r + + session.register_compliance_hook('access_token_response', _null_scope) + session.register_compliance_hook('access_token_response', _non_zero_expiration) + return session diff --git a/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/plentymarkets.py b/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/plentymarkets.py new file mode 100644 index 0000000..1b8a236 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/plentymarkets.py @@ -0,0 +1,27 @@ +from json import dumps, loads +import re + +from oauthlib.common import to_unicode + + +def plentymarkets_compliance_fix(session): + + def _to_snake_case(n): + return re.sub('(.)([A-Z][a-z]+)', r'\1_\2', n).lower() + + def _compliance_fix(r): + # Plenty returns the Token in CamelCase instead of _ + if 'application/json' in r.headers.get('content-type', {}) and r.status_code == 200: + token = loads(r.text) + else: + return r + + fixed_token = {} + for k, v in token.items(): + fixed_token[_to_snake_case(k)] = v + + r._content = to_unicode(dumps(fixed_token)).encode('UTF-8') + return r + + session.register_compliance_hook('access_token_response', _compliance_fix) + return session diff --git a/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/slack.py b/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/slack.py new file mode 100644 index 0000000..ab2d938 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/slack.py @@ -0,0 +1,37 @@ +try: + from urlparse import urlparse, parse_qs +except ImportError: + from urllib.parse import urlparse, parse_qs + +from oauthlib.common import add_params_to_uri + + +def slack_compliance_fix(session): + def _non_compliant_param_name(url, headers, data): + # If the user has already specified the token, either in the URL + # or in a data dictionary, then there's nothing to do. + # If the specified token is different from ``session.access_token``, + # we assume the user intends to override the access token. + url_query = dict(parse_qs(urlparse(url).query)) + token = url_query.get("token") + if not token and isinstance(data, dict): + token = data.get("token") + + if token: + # Nothing to do, just return. + return url, headers, data + + if not data: + data = {"token": session.access_token} + elif isinstance(data, dict): + data["token"] = session.access_token + else: + # ``data`` is something other than a dict: maybe a stream, + # maybe a file object, maybe something else. We can't easily + # modify it, so we'll set the token by modifying the URL instead. + token = [('token', session.access_token)] + url = add_params_to_uri(url, token) + return url, headers, data + + session.register_compliance_hook('protected_request', _non_compliant_param_name) + return session diff --git a/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/weibo.py b/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/weibo.py new file mode 100644 index 0000000..28aca32 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib/compliance_fixes/weibo.py @@ -0,0 +1,17 @@ +from json import loads, dumps + +from oauthlib.common import to_unicode + + +def weibo_compliance_fix(session): + + def _missing_token_type(r): + token = loads(r.text) + token['token_type'] = 'Bearer' + r._content = to_unicode(dumps(token)).encode('UTF-8') + return r + + session._client.default_token_placement = 'query' + session.register_compliance_hook('access_token_response', + _missing_token_type) + return session diff --git a/env/lib/python3.6/site-packages/requests_oauthlib/oauth1_auth.py b/env/lib/python3.6/site-packages/requests_oauthlib/oauth1_auth.py new file mode 100644 index 0000000..4626384 --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib/oauth1_auth.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +import logging + +from oauthlib.common import extract_params +from oauthlib.oauth1 import Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER +from oauthlib.oauth1 import SIGNATURE_TYPE_BODY +from requests.compat import is_py3 +from requests.utils import to_native_string +from requests.auth import AuthBase + +CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' +CONTENT_TYPE_MULTI_PART = 'multipart/form-data' + +if is_py3: + unicode = str + +log = logging.getLogger(__name__) + +# OBS!: Correct signing of requests are conditional on invoking OAuth1 +# as the last step of preparing a request, or at least having the +# content-type set properly. +class OAuth1(AuthBase): + """Signs the request using OAuth 1 (RFC5849)""" + + client_class = Client + + def __init__(self, client_key, + client_secret=None, + resource_owner_key=None, + resource_owner_secret=None, + callback_uri=None, + signature_method=SIGNATURE_HMAC, + signature_type=SIGNATURE_TYPE_AUTH_HEADER, + rsa_key=None, verifier=None, + decoding='utf-8', + client_class=None, + force_include_body=False, + **kwargs): + + try: + signature_type = signature_type.upper() + except AttributeError: + pass + + client_class = client_class or self.client_class + + self.force_include_body = force_include_body + + self.client = client_class(client_key, client_secret, resource_owner_key, + resource_owner_secret, callback_uri, signature_method, + signature_type, rsa_key, verifier, decoding=decoding, **kwargs) + + def __call__(self, r): + """Add OAuth parameters to the request. + + Parameters may be included from the body if the content-type is + urlencoded, if no content type is set a guess is made. + """ + # Overwriting url is safe here as request will not modify it past + # this point. + log.debug('Signing request %s using client %s', r, self.client) + + content_type = r.headers.get('Content-Type', '') + if (not content_type and extract_params(r.body) + or self.client.signature_type == SIGNATURE_TYPE_BODY): + content_type = CONTENT_TYPE_FORM_URLENCODED + if not isinstance(content_type, unicode): + content_type = content_type.decode('utf-8') + + is_form_encoded = (CONTENT_TYPE_FORM_URLENCODED in content_type) + + log.debug('Including body in call to sign: %s', + is_form_encoded or self.force_include_body) + + if is_form_encoded: + r.headers['Content-Type'] = CONTENT_TYPE_FORM_URLENCODED + r.url, headers, r.body = self.client.sign( + unicode(r.url), unicode(r.method), r.body or '', r.headers) + elif self.force_include_body: + # To allow custom clients to work on non form encoded bodies. + r.url, headers, r.body = self.client.sign( + unicode(r.url), unicode(r.method), r.body or '', r.headers) + else: + # Omit body data in the signing of non form-encoded requests + r.url, headers, _ = self.client.sign( + unicode(r.url), unicode(r.method), None, r.headers) + + r.prepare_headers(headers) + r.url = to_native_string(r.url) + log.debug('Updated url: %s', r.url) + log.debug('Updated headers: %s', headers) + log.debug('Updated body: %r', r.body) + return r diff --git a/env/lib/python3.6/site-packages/requests_oauthlib/oauth1_session.py b/env/lib/python3.6/site-packages/requests_oauthlib/oauth1_session.py new file mode 100644 index 0000000..53b7b6d --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib/oauth1_session.py @@ -0,0 +1,396 @@ +from __future__ import unicode_literals + +try: + from urlparse import urlparse +except ImportError: + from urllib.parse import urlparse + +import logging + +from oauthlib.common import add_params_to_uri +from oauthlib.common import urldecode as _urldecode +from oauthlib.oauth1 import ( + SIGNATURE_HMAC, SIGNATURE_RSA, SIGNATURE_TYPE_AUTH_HEADER +) +import requests + +from . import OAuth1 + + +log = logging.getLogger(__name__) + + +def urldecode(body): + """Parse query or json to python dictionary""" + try: + return _urldecode(body) + except: + import json + return json.loads(body) + + +class TokenRequestDenied(ValueError): + + def __init__(self, message, response): + super(TokenRequestDenied, self).__init__(message) + self.response = response + + @property + def status_code(self): + """For backwards-compatibility purposes""" + return self.response.status_code + + +class TokenMissing(ValueError): + def __init__(self, message, response): + super(TokenMissing, self).__init__(message) + self.response = response + + +class VerifierMissing(ValueError): + pass + + +class OAuth1Session(requests.Session): + """Request signing and convenience methods for the oauth dance. + + What is the difference between OAuth1Session and OAuth1? + + OAuth1Session actually uses OAuth1 internally and its purpose is to assist + in the OAuth workflow through convenience methods to prepare authorization + URLs and parse the various token and redirection responses. It also provide + rudimentary validation of responses. + + An example of the OAuth workflow using a basic CLI app and Twitter. + + >>> # Credentials obtained during the registration. + >>> client_key = 'client key' + >>> client_secret = 'secret' + >>> callback_uri = 'https://127.0.0.1/callback' + >>> + >>> # Endpoints found in the OAuth provider API documentation + >>> request_token_url = 'https://api.twitter.com/oauth/request_token' + >>> authorization_url = 'https://api.twitter.com/oauth/authorize' + >>> access_token_url = 'https://api.twitter.com/oauth/access_token' + >>> + >>> oauth_session = OAuth1Session(client_key,client_secret=client_secret, callback_uri=callback_uri) + >>> + >>> # First step, fetch the request token. + >>> oauth_session.fetch_request_token(request_token_url) + { + 'oauth_token': 'kjerht2309u', + 'oauth_token_secret': 'lsdajfh923874', + } + >>> + >>> # Second step. Follow this link and authorize + >>> oauth_session.authorization_url(authorization_url) + 'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf&oauth_callback=https%3A%2F%2F127.0.0.1%2Fcallback' + >>> + >>> # Third step. Fetch the access token + >>> redirect_response = raw_input('Paste the full redirect URL here.') + >>> oauth_session.parse_authorization_response(redirect_response) + { + 'oauth_token: 'kjerht2309u', + 'oauth_token_secret: 'lsdajfh923874', + 'oauth_verifier: 'w34o8967345', + } + >>> oauth_session.fetch_access_token(access_token_url) + { + 'oauth_token': 'sdf0o9823sjdfsdf', + 'oauth_token_secret': '2kjshdfp92i34asdasd', + } + >>> # Done. You can now make OAuth requests. + >>> status_url = 'http://api.twitter.com/1/statuses/update.json' + >>> new_status = {'status': 'hello world!'} + >>> oauth_session.post(status_url, data=new_status) + <Response [200]> + """ + + def __init__(self, client_key, + client_secret=None, + resource_owner_key=None, + resource_owner_secret=None, + callback_uri=None, + signature_method=SIGNATURE_HMAC, + signature_type=SIGNATURE_TYPE_AUTH_HEADER, + rsa_key=None, + verifier=None, + client_class=None, + force_include_body=False, + **kwargs): + """Construct the OAuth 1 session. + + :param client_key: A client specific identifier. + :param client_secret: A client specific secret used to create HMAC and + plaintext signatures. + :param resource_owner_key: A resource owner key, also referred to as + request token or access token depending on + when in the workflow it is used. + :param resource_owner_secret: A resource owner secret obtained with + either a request or access token. Often + referred to as token secret. + :param callback_uri: The URL the user is redirect back to after + authorization. + :param signature_method: Signature methods determine how the OAuth + signature is created. The three options are + oauthlib.oauth1.SIGNATURE_HMAC (default), + oauthlib.oauth1.SIGNATURE_RSA and + oauthlib.oauth1.SIGNATURE_PLAIN. + :param signature_type: Signature type decides where the OAuth + parameters are added. Either in the + Authorization header (default) or to the URL + query parameters or the request body. Defined as + oauthlib.oauth1.SIGNATURE_TYPE_AUTH_HEADER, + oauthlib.oauth1.SIGNATURE_TYPE_QUERY and + oauthlib.oauth1.SIGNATURE_TYPE_BODY + respectively. + :param rsa_key: The private RSA key as a string. Can only be used with + signature_method=oauthlib.oauth1.SIGNATURE_RSA. + :param verifier: A verifier string to prove authorization was granted. + :param client_class: A subclass of `oauthlib.oauth1.Client` to use with + `requests_oauthlib.OAuth1` instead of the default + :param force_include_body: Always include the request body in the + signature creation. + :param **kwargs: Additional keyword arguments passed to `OAuth1` + """ + super(OAuth1Session, self).__init__() + self._client = OAuth1(client_key, + client_secret=client_secret, + resource_owner_key=resource_owner_key, + resource_owner_secret=resource_owner_secret, + callback_uri=callback_uri, + signature_method=signature_method, + signature_type=signature_type, + rsa_key=rsa_key, + verifier=verifier, + client_class=client_class, + force_include_body=force_include_body, + **kwargs) + self.auth = self._client + + @property + def token(self): + oauth_token = self._client.client.resource_owner_key + oauth_token_secret = self._client.client.resource_owner_secret + oauth_verifier = self._client.client.verifier + + token_dict = {} + if oauth_token: + token_dict["oauth_token"] = oauth_token + if oauth_token_secret: + token_dict["oauth_token_secret"] = oauth_token_secret + if oauth_verifier: + token_dict["oauth_verifier"] = oauth_verifier + + return token_dict + + @token.setter + def token(self, value): + self._populate_attributes(value) + + @property + def authorized(self): + """Boolean that indicates whether this session has an OAuth token + or not. If `self.authorized` is True, you can reasonably expect + OAuth-protected requests to the resource to succeed. If + `self.authorized` is False, you need the user to go through the OAuth + authentication dance before OAuth-protected requests to the resource + will succeed. + """ + if self._client.client.signature_method == SIGNATURE_RSA: + # RSA only uses resource_owner_key + return bool(self._client.client.resource_owner_key) + else: + # other methods of authentication use all three pieces + return ( + bool(self._client.client.client_secret) and + bool(self._client.client.resource_owner_key) and + bool(self._client.client.resource_owner_secret) + ) + + def authorization_url(self, url, request_token=None, **kwargs): + """Create an authorization URL by appending request_token and optional + kwargs to url. + + This is the second step in the OAuth 1 workflow. The user should be + redirected to this authorization URL, grant access to you, and then + be redirected back to you. The redirection back can either be specified + during client registration or by supplying a callback URI per request. + + :param url: The authorization endpoint URL. + :param request_token: The previously obtained request token. + :param kwargs: Optional parameters to append to the URL. + :returns: The authorization URL with new parameters embedded. + + An example using a registered default callback URI. + + >>> request_token_url = 'https://api.twitter.com/oauth/request_token' + >>> authorization_url = 'https://api.twitter.com/oauth/authorize' + >>> oauth_session = OAuth1Session('client-key', client_secret='secret') + >>> oauth_session.fetch_request_token(request_token_url) + { + 'oauth_token': 'sdf0o9823sjdfsdf', + 'oauth_token_secret': '2kjshdfp92i34asdasd', + } + >>> oauth_session.authorization_url(authorization_url) + 'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf' + >>> oauth_session.authorization_url(authorization_url, foo='bar') + 'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf&foo=bar' + + An example using an explicit callback URI. + + >>> request_token_url = 'https://api.twitter.com/oauth/request_token' + >>> authorization_url = 'https://api.twitter.com/oauth/authorize' + >>> oauth_session = OAuth1Session('client-key', client_secret='secret', callback_uri='https://127.0.0.1/callback') + >>> oauth_session.fetch_request_token(request_token_url) + { + 'oauth_token': 'sdf0o9823sjdfsdf', + 'oauth_token_secret': '2kjshdfp92i34asdasd', + } + >>> oauth_session.authorization_url(authorization_url) + 'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf&oauth_callback=https%3A%2F%2F127.0.0.1%2Fcallback' + """ + kwargs['oauth_token'] = request_token or self._client.client.resource_owner_key + log.debug('Adding parameters %s to url %s', kwargs, url) + return add_params_to_uri(url, kwargs.items()) + + def fetch_request_token(self, url, realm=None, **request_kwargs): + """Fetch a request token. + + This is the first step in the OAuth 1 workflow. A request token is + obtained by making a signed post request to url. The token is then + parsed from the application/x-www-form-urlencoded response and ready + to be used to construct an authorization url. + + :param url: The request token endpoint URL. + :param realm: A list of realms to request access to. + :param \*\*request_kwargs: Optional arguments passed to ''post'' + function in ''requests.Session'' + :returns: The response in dict format. + + Note that a previously set callback_uri will be reset for your + convenience, or else signature creation will be incorrect on + consecutive requests. + + >>> request_token_url = 'https://api.twitter.com/oauth/request_token' + >>> oauth_session = OAuth1Session('client-key', client_secret='secret') + >>> oauth_session.fetch_request_token(request_token_url) + { + 'oauth_token': 'sdf0o9823sjdfsdf', + 'oauth_token_secret': '2kjshdfp92i34asdasd', + } + """ + self._client.client.realm = ' '.join(realm) if realm else None + token = self._fetch_token(url, **request_kwargs) + log.debug('Resetting callback_uri and realm (not needed in next phase).') + self._client.client.callback_uri = None + self._client.client.realm = None + return token + + def fetch_access_token(self, url, verifier=None, **request_kwargs): + """Fetch an access token. + + This is the final step in the OAuth 1 workflow. An access token is + obtained using all previously obtained credentials, including the + verifier from the authorization step. + + Note that a previously set verifier will be reset for your + convenience, or else signature creation will be incorrect on + consecutive requests. + + >>> access_token_url = 'https://api.twitter.com/oauth/access_token' + >>> redirect_response = 'https://127.0.0.1/callback?oauth_token=kjerht2309uf&oauth_token_secret=lsdajfh923874&oauth_verifier=w34o8967345' + >>> oauth_session = OAuth1Session('client-key', client_secret='secret') + >>> oauth_session.parse_authorization_response(redirect_response) + { + 'oauth_token: 'kjerht2309u', + 'oauth_token_secret: 'lsdajfh923874', + 'oauth_verifier: 'w34o8967345', + } + >>> oauth_session.fetch_access_token(access_token_url) + { + 'oauth_token': 'sdf0o9823sjdfsdf', + 'oauth_token_secret': '2kjshdfp92i34asdasd', + } + """ + if verifier: + self._client.client.verifier = verifier + if not getattr(self._client.client, 'verifier', None): + raise VerifierMissing('No client verifier has been set.') + token = self._fetch_token(url, **request_kwargs) + log.debug('Resetting verifier attribute, should not be used anymore.') + self._client.client.verifier = None + return token + + def parse_authorization_response(self, url): + """Extract parameters from the post authorization redirect response URL. + + :param url: The full URL that resulted from the user being redirected + back from the OAuth provider to you, the client. + :returns: A dict of parameters extracted from the URL. + + >>> redirect_response = 'https://127.0.0.1/callback?oauth_token=kjerht2309uf&oauth_token_secret=lsdajfh923874&oauth_verifier=w34o8967345' + >>> oauth_session = OAuth1Session('client-key', client_secret='secret') + >>> oauth_session.parse_authorization_response(redirect_response) + { + 'oauth_token: 'kjerht2309u', + 'oauth_token_secret: 'lsdajfh923874', + 'oauth_verifier: 'w34o8967345', + } + """ + log.debug('Parsing token from query part of url %s', url) + token = dict(urldecode(urlparse(url).query)) + log.debug('Updating internal client token attribute.') + self._populate_attributes(token) + self.token = token + return token + + def _populate_attributes(self, token): + if 'oauth_token' in token: + self._client.client.resource_owner_key = token['oauth_token'] + else: + raise TokenMissing( + 'Response does not contain a token: {resp}'.format(resp=token), + token, + ) + if 'oauth_token_secret' in token: + self._client.client.resource_owner_secret = ( + token['oauth_token_secret']) + if 'oauth_verifier' in token: + self._client.client.verifier = token['oauth_verifier'] + + def _fetch_token(self, url, **request_kwargs): + log.debug('Fetching token from %s using client %s', url, self._client.client) + r = self.post(url, **request_kwargs) + + if r.status_code >= 400: + error = "Token request failed with code %s, response was '%s'." + raise TokenRequestDenied(error % (r.status_code, r.text), r) + + log.debug('Decoding token from response "%s"', r.text) + try: + token = dict(urldecode(r.text.strip())) + except ValueError as e: + error = ("Unable to decode token from token response. " + "This is commonly caused by an unsuccessful request where" + " a non urlencoded error message is returned. " + "The decoding error was %s""" % e) + raise ValueError(error) + + log.debug('Obtained token %s', token) + log.debug('Updating internal client attributes from token data.') + self._populate_attributes(token) + self.token = token + return token + + def rebuild_auth(self, prepared_request, response): + """ + When being redirected we should always strip Authorization + header, since nonce may not be reused as per OAuth spec. + """ + if 'Authorization' in prepared_request.headers: + # If we get redirected to a new host, we should strip out + # any authentication headers. + prepared_request.headers.pop('Authorization', True) + prepared_request.prepare_auth(self.auth) + return diff --git a/env/lib/python3.6/site-packages/requests_oauthlib/oauth2_auth.py b/env/lib/python3.6/site-packages/requests_oauthlib/oauth2_auth.py new file mode 100644 index 0000000..0ce58cc --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib/oauth2_auth.py @@ -0,0 +1,36 @@ +from __future__ import unicode_literals +from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError +from oauthlib.oauth2 import is_secure_transport +from requests.auth import AuthBase + + +class OAuth2(AuthBase): + """Adds proof of authorization (OAuth2 token) to the request.""" + + def __init__(self, client_id=None, client=None, token=None): + """Construct a new OAuth 2 authorization object. + + :param client_id: Client id obtained during registration + :param client: :class:`oauthlib.oauth2.Client` to be used. Default is + WebApplicationClient which is useful for any + hosted application but not mobile or desktop. + :param token: Token dictionary, must include access_token + and token_type. + """ + self._client = client or WebApplicationClient(client_id, token=token) + if token: + for k, v in token.items(): + setattr(self._client, k, v) + + def __call__(self, r): + """Append an OAuth 2 token to the request. + + Note that currently HTTPS is required for all requests. There may be + a token type that allows for plain HTTP in the future and then this + should be updated to allow plain HTTP on a white list basis. + """ + if not is_secure_transport(r.url): + raise InsecureTransportError() + r.url, r.headers, r.body = self._client.add_token(r.url, + http_method=r.method, body=r.body, headers=r.headers) + return r diff --git a/env/lib/python3.6/site-packages/requests_oauthlib/oauth2_session.py b/env/lib/python3.6/site-packages/requests_oauthlib/oauth2_session.py new file mode 100644 index 0000000..e5ad72c --- /dev/null +++ b/env/lib/python3.6/site-packages/requests_oauthlib/oauth2_session.py @@ -0,0 +1,376 @@ +from __future__ import unicode_literals + +import logging + +from oauthlib.common import generate_token, urldecode +from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError +from oauthlib.oauth2 import TokenExpiredError, is_secure_transport +import requests + +log = logging.getLogger(__name__) + + +class TokenUpdated(Warning): + def __init__(self, token): + super(TokenUpdated, self).__init__() + self.token = token + + +class OAuth2Session(requests.Session): + """Versatile OAuth 2 extension to :class:`requests.Session`. + + Supports any grant type adhering to :class:`oauthlib.oauth2.Client` spec + including the four core OAuth 2 grants. + + Can be used to create authorization urls, fetch tokens and access protected + resources using the :class:`requests.Session` interface you are used to. + + - :class:`oauthlib.oauth2.WebApplicationClient` (default): Authorization Code Grant + - :class:`oauthlib.oauth2.MobileApplicationClient`: Implicit Grant + - :class:`oauthlib.oauth2.LegacyApplicationClient`: Password Credentials Grant + - :class:`oauthlib.oauth2.BackendApplicationClient`: Client Credentials Grant + + Note that the only time you will be using Implicit Grant from python is if + you are driving a user agent able to obtain URL fragments. + """ + + def __init__(self, client_id=None, client=None, auto_refresh_url=None, + auto_refresh_kwargs=None, scope=None, redirect_uri=None, token=None, + state=None, token_updater=None, **kwargs): + """Construct a new OAuth 2 client session. + + :param client_id: Client id obtained during registration + :param client: :class:`oauthlib.oauth2.Client` to be used. Default is + WebApplicationClient which is useful for any + hosted application but not mobile or desktop. + :param scope: List of scopes you wish to request access to + :param redirect_uri: Redirect URI you registered as callback + :param token: Token dictionary, must include access_token + and token_type. + :param state: State string used to prevent CSRF. This will be given + when creating the authorization url and must be supplied + when parsing the authorization response. + Can be either a string or a no argument callable. + :auto_refresh_url: Refresh token endpoint URL, must be HTTPS. Supply + this if you wish the client to automatically refresh + your access tokens. + :auto_refresh_kwargs: Extra arguments to pass to the refresh token + endpoint. + :token_updater: Method with one argument, token, to be used to update + your token database on automatic token refresh. If not + set a TokenUpdated warning will be raised when a token + has been refreshed. This warning will carry the token + in its token argument. + :param kwargs: Arguments to pass to the Session constructor. + """ + super(OAuth2Session, self).__init__(**kwargs) + self._client = client or WebApplicationClient(client_id, token=token) + self.token = token or {} + self.scope = scope + self.redirect_uri = redirect_uri + self.state = state or generate_token + self._state = state + self.auto_refresh_url = auto_refresh_url + self.auto_refresh_kwargs = auto_refresh_kwargs or {} + self.token_updater = token_updater + + # Allow customizations for non compliant providers through various + # hooks to adjust requests and responses. + self.compliance_hook = { + 'access_token_response': set(), + 'refresh_token_response': set(), + 'protected_request': set(), + } + + def new_state(self): + """Generates a state string to be used in authorizations.""" + try: + self._state = self.state() + log.debug('Generated new state %s.', self._state) + except TypeError: + self._state = self.state + log.debug('Re-using previously supplied state %s.', self._state) + return self._state + + @property + def client_id(self): + return getattr(self._client, "client_id", None) + + @client_id.setter + def client_id(self, value): + self._client.client_id = value + + @client_id.deleter + def client_id(self): + del self._client.client_id + + @property + def token(self): + return getattr(self._client, "token", None) + + @token.setter + def token(self, value): + self._client.token = value + self._client._populate_attributes(value) + + @property + def access_token(self): + return getattr(self._client, "access_token", None) + + @access_token.setter + def access_token(self, value): + self._client.access_token = value + + @access_token.deleter + def access_token(self): + del self._client.access_token + + @property + def authorized(self): + """Boolean that indicates whether this session has an OAuth token + or not. If `self.authorized` is True, you can reasonably expect + OAuth-protected requests to the resource to succeed. If + `self.authorized` is False, you need the user to go through the OAuth + authentication dance before OAuth-protected requests to the resource + will succeed. + """ + return bool(self.access_token) + + def authorization_url(self, url, state=None, **kwargs): + """Form an authorization URL. + + :param url: Authorization endpoint url, must be HTTPS. + :param state: An optional state string for CSRF protection. If not + given it will be generated for you. + :param kwargs: Extra parameters to include. + :return: authorization_url, state + """ + state = state or self.new_state() + return self._client.prepare_request_uri(url, + redirect_uri=self.redirect_uri, + scope=self.scope, + state=state, + **kwargs), state + + def fetch_token(self, token_url, code=None, authorization_response=None, + body='', auth=None, username=None, password=None, method='POST', + timeout=None, headers=None, verify=True, proxies=None, **kwargs): + """Generic method for fetching an access token from the token endpoint. + + If you are using the MobileApplicationClient you will want to use + token_from_fragment instead of fetch_token. + + :param token_url: Token endpoint URL, must use HTTPS. + :param code: Authorization code (used by WebApplicationClients). + :param authorization_response: Authorization response URL, the callback + URL of the request back to you. Used by + WebApplicationClients instead of code. + :param body: Optional application/x-www-form-urlencoded body to add the + include in the token request. Prefer kwargs over body. + :param auth: An auth tuple or method as accepted by requests. + :param username: Username used by LegacyApplicationClients. + :param password: Password used by LegacyApplicationClients. + :param method: The HTTP method used to make the request. Defaults + to POST, but may also be GET. Other methods should + be added as needed. + :param headers: Dict to default request headers with. + :param timeout: Timeout of the request in seconds. + :param verify: Verify SSL certificate. + :param kwargs: Extra parameters to include in the token request. + :return: A token dict + """ + if not is_secure_transport(token_url): + raise InsecureTransportError() + + if not code and authorization_response: + self._client.parse_request_uri_response(authorization_response, + state=self._state) + code = self._client.code + elif not code and isinstance(self._client, WebApplicationClient): + code = self._client.code + if not code: + raise ValueError('Please supply either code or ' + 'authorization_response parameters.') + + + body = self._client.prepare_request_body(code=code, body=body, + redirect_uri=self.redirect_uri, username=username, + password=password, **kwargs) + + client_id = kwargs.get('client_id', '') + if auth is None: + if client_id: + log.debug('Encoding client_id "%s" with client_secret as Basic auth credentials.', client_id) + client_secret = kwargs.get('client_secret', '') + client_secret = client_secret if client_secret is not None else '' + auth = requests.auth.HTTPBasicAuth(client_id, client_secret) + elif username: + if password is None: + raise ValueError('Username was supplied, but not password.') + log.debug('Encoding username, password as Basic auth credentials.') + auth = requests.auth.HTTPBasicAuth(username, password) + + headers = headers or { + 'Accept': 'application/json', + 'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8', + } + self.token = {} + if method.upper() == 'POST': + r = self.post(token_url, data=dict(urldecode(body)), + timeout=timeout, headers=headers, auth=auth, + verify=verify, proxies=proxies) + log.debug('Prepared fetch token request body %s', body) + elif method.upper() == 'GET': + # if method is not 'POST', switch body to querystring and GET + r = self.get(token_url, params=dict(urldecode(body)), + timeout=timeout, headers=headers, auth=auth, + verify=verify, proxies=proxies) + log.debug('Prepared fetch token request querystring %s', body) + else: + raise ValueError('The method kwarg must be POST or GET.') + + log.debug('Request to fetch token completed with status %s.', + r.status_code) + log.debug('Request headers were %s', r.request.headers) + log.debug('Request body was %s', r.request.body) + log.debug('Response headers were %s and content %s.', + r.headers, r.text) + log.debug('Invoking %d token response hooks.', + len(self.compliance_hook['access_token_response'])) + for hook in self.compliance_hook['access_token_response']: + log.debug('Invoking hook %s.', hook) + r = hook(r) + + self._client.parse_request_body_response(r.text, scope=self.scope) + self.token = self._client.token + log.debug('Obtained token %s.', self.token) + return self.token + + def token_from_fragment(self, authorization_response): + """Parse token from the URI fragment, used by MobileApplicationClients. + + :param authorization_response: The full URL of the redirect back to you + :return: A token dict + """ + self._client.parse_request_uri_response(authorization_response, + state=self._state) + self.token = self._client.token + return self.token + + def refresh_token(self, token_url, refresh_token=None, body='', auth=None, + timeout=None, headers=None, verify=True, proxies=None, **kwargs): + """Fetch a new access token using a refresh token. + + :param token_url: The token endpoint, must be HTTPS. + :param refresh_token: The refresh_token to use. + :param body: Optional application/x-www-form-urlencoded body to add the + include in the token request. Prefer kwargs over body. + :param auth: An auth tuple or method as accepted by requests. + :param timeout: Timeout of the request in seconds. + :param verify: Verify SSL certificate. + :param kwargs: Extra parameters to include in the token request. + :return: A token dict + """ + if not token_url: + raise ValueError('No token endpoint set for auto_refresh.') + + if not is_secure_transport(token_url): + raise InsecureTransportError() + + refresh_token = refresh_token or self.token.get('refresh_token') + + log.debug('Adding auto refresh key word arguments %s.', + self.auto_refresh_kwargs) + kwargs.update(self.auto_refresh_kwargs) + body = self._client.prepare_refresh_body(body=body, + refresh_token=refresh_token, scope=self.scope, **kwargs) + log.debug('Prepared refresh token request body %s', body) + + if headers is None: + headers = { + 'Accept': 'application/json', + 'Content-Type': ( + 'application/x-www-form-urlencoded;charset=UTF-8' + ), + } + + r = self.post(token_url, data=dict(urldecode(body)), auth=auth, + timeout=timeout, headers=headers, verify=verify, withhold_token=True, proxies=proxies) + log.debug('Request to refresh token completed with status %s.', + r.status_code) + log.debug('Response headers were %s and content %s.', + r.headers, r.text) + log.debug('Invoking %d token response hooks.', + len(self.compliance_hook['refresh_token_response'])) + for hook in self.compliance_hook['refresh_token_response']: + log.debug('Invoking hook %s.', hook) + r = hook(r) + + self.token = self._client.parse_request_body_response(r.text, scope=self.scope) + if not 'refresh_token' in self.token: + log.debug('No new refresh token given. Re-using old.') + self.token['refresh_token'] = refresh_token + return self.token + + def request(self, method, url, data=None, headers=None, withhold_token=False, + client_id=None, client_secret=None, **kwargs): + """Intercept all requests and add the OAuth 2 token if present.""" + if not is_secure_transport(url): + raise InsecureTransportError() + if self.token and not withhold_token: + log.debug('Invoking %d protected resource request hooks.', + len(self.compliance_hook['protected_request'])) + for hook in self.compliance_hook['protected_request']: + log.debug('Invoking hook %s.', hook) + url, headers, data = hook(url, headers, data) + + log.debug('Adding token %s to request.', self.token) + try: + url, headers, data = self._client.add_token(url, + http_method=method, body=data, headers=headers) + # Attempt to retrieve and save new access token if expired + except TokenExpiredError: + if self.auto_refresh_url: + log.debug('Auto refresh is set, attempting to refresh at %s.', + self.auto_refresh_url) + + # We mustn't pass auth twice. + auth = kwargs.pop('auth', None) + if client_id and client_secret and (auth is None): + log.debug('Encoding client_id "%s" with client_secret as Basic auth credentials.', client_id) + auth = requests.auth.HTTPBasicAuth(client_id, client_secret) + token = self.refresh_token( + self.auto_refresh_url, auth=auth, **kwargs + ) + if self.token_updater: + log.debug('Updating token to %s using %s.', + token, self.token_updater) + self.token_updater(token) + url, headers, data = self._client.add_token(url, + http_method=method, body=data, headers=headers) + else: + raise TokenUpdated(token) + else: + raise + + log.debug('Requesting url %s using method %s.', url, method) + log.debug('Supplying headers %s and data %s', headers, data) + log.debug('Passing through key word arguments %s.', kwargs) + return super(OAuth2Session, self).request(method, url, + headers=headers, data=data, **kwargs) + + def register_compliance_hook(self, hook_type, hook): + """Register a hook for request/response tweaking. + + Available hooks are: + access_token_response invoked before token parsing. + refresh_token_response invoked before refresh token parsing. + protected_request invoked before making a request. + + If you find a new hook is needed please send a GitHub PR request + or open an issue. + """ + if hook_type not in self.compliance_hook: + raise ValueError('Hook type %s is not in %s.', + hook_type, self.compliance_hook) + self.compliance_hook[hook_type].add(hook) diff --git a/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/DESCRIPTION.rst b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..f2dad24 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,243 @@ +=============================== +Installing and Using Setuptools +=============================== + +.. contents:: **Table of Contents** + + +.. image:: https://setuptools.readthedocs.io/en/latest/?badge=latest + :target: https://setuptools.readthedocs.io + +------------------------- +Installation Instructions +------------------------- + +The recommended way to bootstrap setuptools on any system is to download +`ez_setup.py`_ and run it using the target Python environment. Different +operating systems have different recommended techniques to accomplish this +basic routine, so below are some examples to get you started. + +Setuptools requires Python 2.6 or later. To install setuptools +on Python 2.4 or Python 2.5, use the `bootstrap script for Setuptools 1.x +<https://raw.githubusercontent.com/pypa/setuptools/bootstrap-py24/ez_setup.py>`_. + +The link provided to ez_setup.py is a bookmark to bootstrap script for the +latest known stable release. + +.. _ez_setup.py: https://bootstrap.pypa.io/ez_setup.py + +Windows (Powershell 3 or later) +=============================== + +For best results, uninstall previous versions FIRST (see `Uninstalling`_). + +Using Windows 8 (which includes PowerShell 3) or earlier versions of Windows +with PowerShell 3 installed, it's possible to install with one simple +Powershell command. Start up Powershell and paste this command:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - + +.. image:: https://badges.gitter.im/pypa/setuptools.svg + :alt: Join the chat at https://gitter.im/pypa/setuptools + :target: https://gitter.im/pypa/setuptools?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge + +You must start the Powershell with Administrative privileges or you may choose +to install a user-local installation:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - --user + +If you have Python 3.3 or later, you can use the ``py`` command to install to +different Python versions. For example, to install to Python 3.3 if you have +Python 2.7 installed:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | py -3 - + +The recommended way to install setuptools on Windows is to download +`ez_setup.py`_ and run it. The script will download the appropriate +distribution file and install it for you. + +Once installation is complete, you will find an ``easy_install`` program in +your Python ``Scripts`` subdirectory. For simple invocation and best results, +add this directory to your ``PATH`` environment variable, if it is not already +present. If you did a user-local install, the ``Scripts`` subdirectory is +``$env:APPDATA\Python\Scripts``. + + +Windows (simplified) +==================== + +For Windows without PowerShell 3 or for installation without a command-line, +download `ez_setup.py`_ using your preferred web browser or other technique +and "run" that file. + + +Unix (wget) +=========== + +Most Linux distributions come with wget. + +Download `ez_setup.py`_ and run it using the target Python version. The script +will download the appropriate version and install it for you:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | python + +Note that you will may need to invoke the command with superuser privileges to +install to the system Python:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | sudo python + +Alternatively, Setuptools may be installed to a user-local path:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | python - --user + +Note that on some older systems (noted on Debian 6 and CentOS 5 installations), +`wget` may refuse to download `ez_setup.py`, complaining that the certificate common name `*.c.ssl.fastly.net` +does not match the host name `bootstrap.pypa.io`. In addition, the `ez_setup.py` script may then encounter similar problems using +`wget` internally to download `setuptools-x.y.zip`, complaining that the certificate common name of `www.python.org` does not match the +host name `pypi.python.org`. Those are known issues, related to a bug in the older versions of `wget` +(see `Issue 59 <https://bitbucket.org/pypa/pypi/issue/59#comment-5881915>`_). If you happen to encounter them, +install Setuptools as follows:: + + > wget --no-check-certificate https://bootstrap.pypa.io/ez_setup.py + > python ez_setup.py --insecure + + +Unix including Mac OS X (curl) +============================== + +If your system has curl installed, follow the ``wget`` instructions but +replace ``wget`` with ``curl`` and ``-O`` with ``-o``. For example:: + + > curl https://bootstrap.pypa.io/ez_setup.py -o - | python + + +Advanced Installation +===================== + +For more advanced installation options, such as installing to custom +locations or prefixes, download and extract the source +tarball from `Setuptools on PyPI <https://pypi.python.org/pypi/setuptools>`_ +and run setup.py with any supported distutils and Setuptools options. +For example:: + + setuptools-x.x$ python setup.py install --prefix=/opt/setuptools + +Use ``--help`` to get a full options list, but we recommend consulting +the `EasyInstall manual`_ for detailed instructions, especially `the section +on custom installation locations`_. + +.. _EasyInstall manual: https://pythonhosted.org/setuptools/EasyInstall +.. _the section on custom installation locations: https://pythonhosted.org/setuptools/EasyInstall#custom-installation-locations + + +Downloads +========= + +All setuptools downloads can be found at `the project's home page in the Python +Package Index`_. Scroll to the very bottom of the page to find the links. + +.. _the project's home page in the Python Package Index: https://pypi.python.org/pypi/setuptools + +In addition to the PyPI downloads, the development version of ``setuptools`` +is available from the `Bitbucket repo`_, and in-development versions of the +`0.6 branch`_ are available as well. + +.. _Bitbucket repo: https://bitbucket.org/pypa/setuptools/get/default.tar.gz#egg=setuptools-dev +.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 + +Uninstalling +============ + +On Windows, if Setuptools was installed using an ``.exe`` or ``.msi`` +installer, simply use the uninstall feature of "Add/Remove Programs" in the +Control Panel. + +Otherwise, to uninstall Setuptools or Distribute, regardless of the Python +version, delete all ``setuptools*`` and ``distribute*`` files and +directories from your system's ``site-packages`` directory +(and any other ``sys.path`` directories) FIRST. + +If you are upgrading or otherwise plan to re-install Setuptools or Distribute, +nothing further needs to be done. If you want to completely remove Setuptools, +you may also want to remove the 'easy_install' and 'easy_install-x.x' scripts +and associated executables installed to the Python scripts directory. + +-------------------------------- +Using Setuptools and EasyInstall +-------------------------------- + +Here are some of the available manuals, tutorials, and other resources for +learning about Setuptools, Python Eggs, and EasyInstall: + +* `The EasyInstall user's guide and reference manual`_ +* `The setuptools Developer's Guide`_ +* `The pkg_resources API reference`_ +* `The Internal Structure of Python Eggs`_ + +Questions, comments, and bug reports should be directed to the `distutils-sig +mailing list`_. If you have written (or know of) any tutorials, documentation, +plug-ins, or other resources for setuptools users, please let us know about +them there, so this reference list can be updated. If you have working, +*tested* patches to correct problems or add features, you may submit them to +the `setuptools bug tracker`_. + +.. _setuptools bug tracker: https://github.com/pypa/setuptools/issues +.. _The Internal Structure of Python Eggs: https://setuptools.readthedocs.io/en/latest/formats.html +.. _The setuptools Developer's Guide: https://setuptools.readthedocs.io/en/latest/developer-guide.html +.. _The pkg_resources API reference: https://setuptools.readthedocs.io/en/latest/pkg_resources.html +.. _The EasyInstall user's guide and reference manual: https://setuptools.readthedocs.io/en/latest/easy_install.html +.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ + + +------- +Credits +------- + +* The original design for the ``.egg`` format and the ``pkg_resources`` API was + co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first + version of ``pkg_resources``, and supplied the OS X operating system version + compatibility algorithm. + +* Ian Bicking implemented many early "creature comfort" features of + easy_install, including support for downloading via Sourceforge and + Subversion repositories. Ian's comments on the Web-SIG about WSGI + application deployment also inspired the concept of "entry points" in eggs, + and he has given talks at PyCon and elsewhere to inform and educate the + community about eggs and setuptools. + +* Jim Fulton contributed time and effort to build automated tests of various + aspects of ``easy_install``, and supplied the doctests for the command-line + ``.exe`` wrappers on Windows. + +* Phillip J. Eby is the seminal author of setuptools, and + first proposed the idea of an importable binary distribution format for + Python application plug-ins. + +* Significant parts of the implementation of setuptools were funded by the Open + Source Applications Foundation, to provide a plug-in infrastructure for the + Chandler PIM application. In addition, many OSAF staffers (such as Mike + "Code Bear" Taylor) contributed their time and stress as guinea pigs for the + use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) + +* Tarek Ziadé is the principal author of the Distribute fork, which + re-invigorated the community on the project, encouraged renewed innovation, + and addressed many defects. + +* Since the merge with Distribute, Jason R. Coombs is the + maintainer of setuptools. The project is maintained in coordination with + the Python Packaging Authority (PyPA) and the larger Python community. + +.. _files: + + +--------------- +Code of Conduct +--------------- + +Everyone interacting in the setuptools project's codebases, issue trackers, +chat rooms, and mailing lists is expected to follow the +`PyPA Code of Conduct`_. + +.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ + + diff --git a/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/INSTALLER b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/METADATA b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/METADATA new file mode 100644 index 0000000..a38609c --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/METADATA @@ -0,0 +1,272 @@ +Metadata-Version: 2.0 +Name: setuptools +Version: 28.8.0 +Summary: Easily download, build, install, upgrade, and uninstall Python packages +Home-page: https://github.com/pypa/setuptools +Author: Python Packaging Authority +Author-email: distutils-sig@python.org +License: UNKNOWN +Keywords: CPAN PyPI distutils eggs package management +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: System :: Archiving :: Packaging +Classifier: Topic :: System :: Systems Administration +Classifier: Topic :: Utilities +Provides-Extra: certs +Requires-Dist: certifi (==2016.9.26); extra == 'certs' +Provides-Extra: ssl +Requires-Dist: wincertstore (==0.2); sys_platform=='win32' and extra == 'ssl' + +=============================== +Installing and Using Setuptools +=============================== + +.. contents:: **Table of Contents** + + +.. image:: https://setuptools.readthedocs.io/en/latest/?badge=latest + :target: https://setuptools.readthedocs.io + +------------------------- +Installation Instructions +------------------------- + +The recommended way to bootstrap setuptools on any system is to download +`ez_setup.py`_ and run it using the target Python environment. Different +operating systems have different recommended techniques to accomplish this +basic routine, so below are some examples to get you started. + +Setuptools requires Python 2.6 or later. To install setuptools +on Python 2.4 or Python 2.5, use the `bootstrap script for Setuptools 1.x +<https://raw.githubusercontent.com/pypa/setuptools/bootstrap-py24/ez_setup.py>`_. + +The link provided to ez_setup.py is a bookmark to bootstrap script for the +latest known stable release. + +.. _ez_setup.py: https://bootstrap.pypa.io/ez_setup.py + +Windows (Powershell 3 or later) +=============================== + +For best results, uninstall previous versions FIRST (see `Uninstalling`_). + +Using Windows 8 (which includes PowerShell 3) or earlier versions of Windows +with PowerShell 3 installed, it's possible to install with one simple +Powershell command. Start up Powershell and paste this command:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - + +.. image:: https://badges.gitter.im/pypa/setuptools.svg + :alt: Join the chat at https://gitter.im/pypa/setuptools + :target: https://gitter.im/pypa/setuptools?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge + +You must start the Powershell with Administrative privileges or you may choose +to install a user-local installation:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - --user + +If you have Python 3.3 or later, you can use the ``py`` command to install to +different Python versions. For example, to install to Python 3.3 if you have +Python 2.7 installed:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | py -3 - + +The recommended way to install setuptools on Windows is to download +`ez_setup.py`_ and run it. The script will download the appropriate +distribution file and install it for you. + +Once installation is complete, you will find an ``easy_install`` program in +your Python ``Scripts`` subdirectory. For simple invocation and best results, +add this directory to your ``PATH`` environment variable, if it is not already +present. If you did a user-local install, the ``Scripts`` subdirectory is +``$env:APPDATA\Python\Scripts``. + + +Windows (simplified) +==================== + +For Windows without PowerShell 3 or for installation without a command-line, +download `ez_setup.py`_ using your preferred web browser or other technique +and "run" that file. + + +Unix (wget) +=========== + +Most Linux distributions come with wget. + +Download `ez_setup.py`_ and run it using the target Python version. The script +will download the appropriate version and install it for you:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | python + +Note that you will may need to invoke the command with superuser privileges to +install to the system Python:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | sudo python + +Alternatively, Setuptools may be installed to a user-local path:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | python - --user + +Note that on some older systems (noted on Debian 6 and CentOS 5 installations), +`wget` may refuse to download `ez_setup.py`, complaining that the certificate common name `*.c.ssl.fastly.net` +does not match the host name `bootstrap.pypa.io`. In addition, the `ez_setup.py` script may then encounter similar problems using +`wget` internally to download `setuptools-x.y.zip`, complaining that the certificate common name of `www.python.org` does not match the +host name `pypi.python.org`. Those are known issues, related to a bug in the older versions of `wget` +(see `Issue 59 <https://bitbucket.org/pypa/pypi/issue/59#comment-5881915>`_). If you happen to encounter them, +install Setuptools as follows:: + + > wget --no-check-certificate https://bootstrap.pypa.io/ez_setup.py + > python ez_setup.py --insecure + + +Unix including Mac OS X (curl) +============================== + +If your system has curl installed, follow the ``wget`` instructions but +replace ``wget`` with ``curl`` and ``-O`` with ``-o``. For example:: + + > curl https://bootstrap.pypa.io/ez_setup.py -o - | python + + +Advanced Installation +===================== + +For more advanced installation options, such as installing to custom +locations or prefixes, download and extract the source +tarball from `Setuptools on PyPI <https://pypi.python.org/pypi/setuptools>`_ +and run setup.py with any supported distutils and Setuptools options. +For example:: + + setuptools-x.x$ python setup.py install --prefix=/opt/setuptools + +Use ``--help`` to get a full options list, but we recommend consulting +the `EasyInstall manual`_ for detailed instructions, especially `the section +on custom installation locations`_. + +.. _EasyInstall manual: https://pythonhosted.org/setuptools/EasyInstall +.. _the section on custom installation locations: https://pythonhosted.org/setuptools/EasyInstall#custom-installation-locations + + +Downloads +========= + +All setuptools downloads can be found at `the project's home page in the Python +Package Index`_. Scroll to the very bottom of the page to find the links. + +.. _the project's home page in the Python Package Index: https://pypi.python.org/pypi/setuptools + +In addition to the PyPI downloads, the development version of ``setuptools`` +is available from the `Bitbucket repo`_, and in-development versions of the +`0.6 branch`_ are available as well. + +.. _Bitbucket repo: https://bitbucket.org/pypa/setuptools/get/default.tar.gz#egg=setuptools-dev +.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 + +Uninstalling +============ + +On Windows, if Setuptools was installed using an ``.exe`` or ``.msi`` +installer, simply use the uninstall feature of "Add/Remove Programs" in the +Control Panel. + +Otherwise, to uninstall Setuptools or Distribute, regardless of the Python +version, delete all ``setuptools*`` and ``distribute*`` files and +directories from your system's ``site-packages`` directory +(and any other ``sys.path`` directories) FIRST. + +If you are upgrading or otherwise plan to re-install Setuptools or Distribute, +nothing further needs to be done. If you want to completely remove Setuptools, +you may also want to remove the 'easy_install' and 'easy_install-x.x' scripts +and associated executables installed to the Python scripts directory. + +-------------------------------- +Using Setuptools and EasyInstall +-------------------------------- + +Here are some of the available manuals, tutorials, and other resources for +learning about Setuptools, Python Eggs, and EasyInstall: + +* `The EasyInstall user's guide and reference manual`_ +* `The setuptools Developer's Guide`_ +* `The pkg_resources API reference`_ +* `The Internal Structure of Python Eggs`_ + +Questions, comments, and bug reports should be directed to the `distutils-sig +mailing list`_. If you have written (or know of) any tutorials, documentation, +plug-ins, or other resources for setuptools users, please let us know about +them there, so this reference list can be updated. If you have working, +*tested* patches to correct problems or add features, you may submit them to +the `setuptools bug tracker`_. + +.. _setuptools bug tracker: https://github.com/pypa/setuptools/issues +.. _The Internal Structure of Python Eggs: https://setuptools.readthedocs.io/en/latest/formats.html +.. _The setuptools Developer's Guide: https://setuptools.readthedocs.io/en/latest/developer-guide.html +.. _The pkg_resources API reference: https://setuptools.readthedocs.io/en/latest/pkg_resources.html +.. _The EasyInstall user's guide and reference manual: https://setuptools.readthedocs.io/en/latest/easy_install.html +.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ + + +------- +Credits +------- + +* The original design for the ``.egg`` format and the ``pkg_resources`` API was + co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first + version of ``pkg_resources``, and supplied the OS X operating system version + compatibility algorithm. + +* Ian Bicking implemented many early "creature comfort" features of + easy_install, including support for downloading via Sourceforge and + Subversion repositories. Ian's comments on the Web-SIG about WSGI + application deployment also inspired the concept of "entry points" in eggs, + and he has given talks at PyCon and elsewhere to inform and educate the + community about eggs and setuptools. + +* Jim Fulton contributed time and effort to build automated tests of various + aspects of ``easy_install``, and supplied the doctests for the command-line + ``.exe`` wrappers on Windows. + +* Phillip J. Eby is the seminal author of setuptools, and + first proposed the idea of an importable binary distribution format for + Python application plug-ins. + +* Significant parts of the implementation of setuptools were funded by the Open + Source Applications Foundation, to provide a plug-in infrastructure for the + Chandler PIM application. In addition, many OSAF staffers (such as Mike + "Code Bear" Taylor) contributed their time and stress as guinea pigs for the + use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) + +* Tarek Ziadé is the principal author of the Distribute fork, which + re-invigorated the community on the project, encouraged renewed innovation, + and addressed many defects. + +* Since the merge with Distribute, Jason R. Coombs is the + maintainer of setuptools. The project is maintained in coordination with + the Python Packaging Authority (PyPA) and the larger Python community. + +.. _files: + + +--------------- +Code of Conduct +--------------- + +Everyone interacting in the setuptools project's codebases, issue trackers, +chat rooms, and mailing lists is expected to follow the +`PyPA Code of Conduct`_. + +.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ + + diff --git a/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/RECORD b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/RECORD new file mode 100644 index 0000000..46a8005 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/RECORD @@ -0,0 +1,143 @@ +easy_install.py,sha256=MDC9vt5AxDsXX5qcKlBz2TnW6Tpuv_AobnfhCJ9X3PM,126 +pkg_resources/__init__.py,sha256=Z_i79ylo01fXF7p2lnMG7Ov4fLu0O-HLX-3JHMk0FCI,103301 +pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pkg_resources/_vendor/appdirs.py,sha256=tgGaL0m4Jo2VeuGfoOOifLv7a7oUEJu2n1vRkqoPw-0,22374 +pkg_resources/_vendor/pyparsing.py,sha256=PifeLY3-WhIcBVzLtv0U4T_pwDtPruBhBCkg5vLqa28,229867 +pkg_resources/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 +pkg_resources/_vendor/packaging/__about__.py,sha256=E9KR5UJ_8U9K-R1mScu6FmkXtbnlFEEFjEXJp-7LxNU,720 +pkg_resources/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513 +pkg_resources/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860 +pkg_resources/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416 +pkg_resources/_vendor/packaging/markers.py,sha256=ndShKOQb_OgpQkFju6LR-2msB1La5u5iAfD5MIqXE4c,7939 +pkg_resources/_vendor/packaging/requirements.py,sha256=SikL2UynbsT0qtY9ltqngndha_sfo0w6XGFhAhoSoaQ,4355 +pkg_resources/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025 +pkg_resources/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421 +pkg_resources/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556 +pkg_resources/extern/__init__.py,sha256=JUtlHHvlxHSNuB4pWqNjcx7n6kG-fwXg7qmJ2zNJlIY,2487 +setuptools/__init__.py,sha256=hW5vya8Yp46AiWmYcALuQbGI7148nQSYVLsEIJLet54,5050 +setuptools/archive_util.py,sha256=Z58-gbZQ0j92UJy7X7uZevwI28JTVEXd__AjKy4aw78,6613 +setuptools/cli-32.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 +setuptools/cli-64.exe,sha256=KLABu5pyrnokJCv6skjXZ6GsXeyYHGcqOUT3oHI3Xpo,74752 +setuptools/cli.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 +setuptools/depends.py,sha256=h5tbRzianODRkECVsK4sLBbsOfpY1TGEnp9Zm0wg2To,6473 +setuptools/dist.py,sha256=5WUqgcvehkRw97T8NpV66vwneVp77LI9zOmWkPvgHoE,37129 +setuptools/extension.py,sha256=uc6nHI-MxwmNCNPbUiBnybSyqhpJqjbhvOQ-emdvt_E,1729 +setuptools/glob.py,sha256=Y-fpv8wdHZzv9DPCaGACpMSBWJ6amq_1e0R_i8_el4w,5207 +setuptools/gui-32.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 +setuptools/gui-64.exe,sha256=aYKMhX1IJLn4ULHgWX0sE0yREUt6B3TEHf_jOw6yNyE,75264 +setuptools/gui.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 +setuptools/launch.py,sha256=sd7ejwhBocCDx_wG9rIs0OaZ8HtmmFU8ZC6IR_S0Lvg,787 +setuptools/lib2to3_ex.py,sha256=t5e12hbR2pi9V4ezWDTB4JM-AISUnGOkmcnYHek3xjg,2013 +setuptools/monkey.py,sha256=qHoZT9IgmFM-_yF7BjCoPtXg4DYF1H49t_1nLq7dPRo,5337 +setuptools/msvc.py,sha256=6cKqwOxTH8pJNMrNyRLBGB6Efz2rxTTdQQJABUW-6WU,37091 +setuptools/namespaces.py,sha256=I2YrskOZU2ctBS8jdFwAc9q4M8SxbOr0DsFLxxO7gfA,2648 +setuptools/package_index.py,sha256=eYtVpQiaP_4RRFp8zsNbsvNGPAzyOUDM2Lf_q4hePcs,39947 +setuptools/py26compat.py,sha256=VRGHC7z2gliR4_uICJsQNodUcNUzybpus3BrJkWbnK4,679 +setuptools/py27compat.py,sha256=Yu8hW3y8Djps_IenCuz5xZ9OD9qQ-kmBIoVGVpWeQ5Y,330 +setuptools/py31compat.py,sha256=qGRk3tefux8HbhNzhM0laR3mD8vhAZtffZgzLkBMXJs,1645 +setuptools/sandbox.py,sha256=0aNeoJ2tCLhHmQZlx8DQPhuuhGt2690Q7PoGLz6KJ30,14324 +setuptools/script (dev).tmpl,sha256=f7MR17dTkzaqkCMSVseyOCMVrPVSMdmTQsaB8cZzfuI,201 +setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138 +setuptools/site-patch.py,sha256=BVt6yIrDMXJoflA5J6DJIcsJUfW_XEeVhOzelTTFDP4,2307 +setuptools/ssl_support.py,sha256=qUzJ_2WeFWBVkGoN638qC42Uzs-Wvmb8cZub9gT1xuI,8131 +setuptools/unicode_utils.py,sha256=NOiZ_5hD72A6w-4wVj8awHFM3n51Kmw1Ic_vx15XFqw,996 +setuptools/version.py,sha256=fvr31nm9BOi4wvMhGVoU0VwmyGeeEAeF3fh33z84wx4,138 +setuptools/windows_support.py,sha256=5GrfqSP2-dLGJoZTq2g6dCKkyQxxa2n5IQiXlJCoYEE,714 +setuptools/command/__init__.py,sha256=FCLDWoQEGsNexQviqbeJ4MEYGYpYVbbwc7i-9gjzBkM,563 +setuptools/command/alias.py,sha256=KjpE0sz_SDIHv3fpZcIQK-sCkJz-SrC6Gmug6b9Nkc8,2426 +setuptools/command/bdist_egg.py,sha256=XDamu6-cfyYrqd67YGQ5gWo-0c8kuWqkPy1vYpfsAxw,17178 +setuptools/command/bdist_rpm.py,sha256=B7l0TnzCGb-0nLlm6rS00jWLkojASwVmdhW2w5Qz_Ak,1508 +setuptools/command/bdist_wininst.py,sha256=_6dz3lpB1tY200LxKPLM7qgwTCceOMgaWFF-jW2-pm0,637 +setuptools/command/build_ext.py,sha256=dO89j-IC0dAjSty1sSZxvi0LSdkPGR_ZPXFuAAFDZj4,13049 +setuptools/command/build_py.py,sha256=FwU7GNUnd2dT3yEtsTPsJBcl9a7loOcN6GB4uQ0W_IM,9596 +setuptools/command/develop.py,sha256=cHScw5hhILsZM3pk-ddORnd51aLFIbi-4gRm3yTnS_k,7384 +setuptools/command/easy_install.py,sha256=AlmasFuULxUXXgTcUhARle87a74rwStU5nSQn9Oakjw,85720 +setuptools/command/egg_info.py,sha256=f89c5gu9hWS89XheFLD0h0yONVmB4Z3FJR5hkAjn0SY,25110 +setuptools/command/install.py,sha256=a0EZpL_A866KEdhicTGbuyD_TYl1sykfzdrri-zazT4,4683 +setuptools/command/install_egg_info.py,sha256=bMgeIeRiXzQ4DAGPV1328kcjwQjHjOWU4FngAWLV78Q,2203 +setuptools/command/install_lib.py,sha256=11mxf0Ch12NsuYwS8PHwXBRvyh671QAM4cTRh7epzG0,3840 +setuptools/command/install_scripts.py,sha256=UD0rEZ6861mTYhIdzcsqKnUl8PozocXWl9VBQ1VTWnc,2439 +setuptools/command/launcher manifest.xml,sha256=xlLbjWrB01tKC0-hlVkOKkiSPbzMml2eOPtJ_ucCnbE,628 +setuptools/command/py36compat.py,sha256=SzjZcOxF7zdFUT47Zv2n7AM3H8koDys_0OpS-n9gIfc,4986 +setuptools/command/register.py,sha256=bHlMm1qmBbSdahTOT8w6UhA-EgeQIz7p6cD-qOauaiI,270 +setuptools/command/rotate.py,sha256=co5C1EkI7P0GGT6Tqz-T2SIj2LBJTZXYELpmao6d4KQ,2164 +setuptools/command/saveopts.py,sha256=za7QCBcQimKKriWcoCcbhxPjUz30gSB74zuTL47xpP4,658 +setuptools/command/sdist.py,sha256=cu745bayFZMEEGqzMg6kUbIYY9ZKXAbJjPBDXznNUyc,6650 +setuptools/command/setopt.py,sha256=NTWDyx-gjDF-txf4dO577s7LOzHVoKR0Mq33rFxaRr8,5085 +setuptools/command/test.py,sha256=IqkQ6rKdQsPh2BiLEXIx5FpzfGMpZttGl8dMa9utxVY,8535 +setuptools/command/upload.py,sha256=6LbdC9NWFe19tb8u-6L3Hriei-nVgtqfO7tn0Si7yLo,1077 +setuptools/command/upload_docs.py,sha256=0Ft5uo7Unnk4lJdZtIm6Nzq7hlyux-9zIYcXC8k_Czs,7275 +setuptools/extern/__init__.py,sha256=ZtCLYQ8JTtOtm7SYoxekZw-UzY3TR50SRIUaeqr2ROk,131 +setuptools-28.8.0.dist-info/DESCRIPTION.rst,sha256=UnjDOK_xBlHg-QN2JbfTbA55nmD_kStv89yxF9aOkqU,10257 +setuptools-28.8.0.dist-info/METADATA,sha256=Xf6UtJ6jxpg-nF5OHBErvUwMGLz-4nizC7iwdYmOcGc,11486 +setuptools-28.8.0.dist-info/RECORD,, +setuptools-28.8.0.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +setuptools-28.8.0.dist-info/dependency_links.txt,sha256=HlkCFkoK5TbZ5EMLbLKYhLcY_E31kBWD8TqW2EgmatQ,239 +setuptools-28.8.0.dist-info/entry_points.txt,sha256=zPeIHv66mpCraHdiLlHQjxKDe-6gFIepEbEIjCvu8x4,2885 +setuptools-28.8.0.dist-info/metadata.json,sha256=EIv4nnlOffwLVnetEwhIkFs74vm5jfPJHsxZrjQyXFE,4614 +setuptools-28.8.0.dist-info/top_level.txt,sha256=2HUXVVwA4Pff1xgTFr3GsTXXKaPaO6vlG6oNJ_4u4Tg,38 +setuptools-28.8.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +../../../bin/easy_install,sha256=eZpzBw2Us4y60rOGnh1X8yW1DCViN6mUyi8-7J6n4Ng,268 +../../../bin/easy_install-3.6,sha256=eZpzBw2Us4y60rOGnh1X8yW1DCViN6mUyi8-7J6n4Ng,268 +setuptools-28.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +__pycache__/easy_install.cpython-36.pyc,, +setuptools/__pycache__/package_index.cpython-36.pyc,, +setuptools/__pycache__/py31compat.cpython-36.pyc,, +setuptools/__pycache__/sandbox.cpython-36.pyc,, +setuptools/__pycache__/windows_support.cpython-36.pyc,, +setuptools/__pycache__/version.cpython-36.pyc,, +setuptools/__pycache__/site-patch.cpython-36.pyc,, +setuptools/__pycache__/py26compat.cpython-36.pyc,, +setuptools/__pycache__/launch.cpython-36.pyc,, +setuptools/__pycache__/unicode_utils.cpython-36.pyc,, +setuptools/__pycache__/ssl_support.cpython-36.pyc,, +setuptools/__pycache__/depends.cpython-36.pyc,, +setuptools/__pycache__/glob.cpython-36.pyc,, +setuptools/__pycache__/msvc.cpython-36.pyc,, +setuptools/__pycache__/py27compat.cpython-36.pyc,, +setuptools/__pycache__/lib2to3_ex.cpython-36.pyc,, +setuptools/__pycache__/monkey.cpython-36.pyc,, +setuptools/__pycache__/dist.cpython-36.pyc,, +setuptools/__pycache__/namespaces.cpython-36.pyc,, +setuptools/__pycache__/__init__.cpython-36.pyc,, +setuptools/__pycache__/extension.cpython-36.pyc,, +setuptools/__pycache__/archive_util.cpython-36.pyc,, +setuptools/command/__pycache__/alias.cpython-36.pyc,, +setuptools/command/__pycache__/register.cpython-36.pyc,, +setuptools/command/__pycache__/install_lib.cpython-36.pyc,, +setuptools/command/__pycache__/setopt.cpython-36.pyc,, +setuptools/command/__pycache__/bdist_egg.cpython-36.pyc,, +setuptools/command/__pycache__/bdist_rpm.cpython-36.pyc,, +setuptools/command/__pycache__/develop.cpython-36.pyc,, +setuptools/command/__pycache__/build_py.cpython-36.pyc,, +setuptools/command/__pycache__/upload.cpython-36.pyc,, +setuptools/command/__pycache__/sdist.cpython-36.pyc,, +setuptools/command/__pycache__/install.cpython-36.pyc,, +setuptools/command/__pycache__/egg_info.cpython-36.pyc,, +setuptools/command/__pycache__/py36compat.cpython-36.pyc,, +setuptools/command/__pycache__/easy_install.cpython-36.pyc,, +setuptools/command/__pycache__/build_ext.cpython-36.pyc,, +setuptools/command/__pycache__/rotate.cpython-36.pyc,, +setuptools/command/__pycache__/upload_docs.cpython-36.pyc,, +setuptools/command/__pycache__/saveopts.cpython-36.pyc,, +setuptools/command/__pycache__/__init__.cpython-36.pyc,, +setuptools/command/__pycache__/test.cpython-36.pyc,, +setuptools/command/__pycache__/bdist_wininst.cpython-36.pyc,, +setuptools/command/__pycache__/install_scripts.cpython-36.pyc,, +setuptools/command/__pycache__/install_egg_info.cpython-36.pyc,, +setuptools/extern/__pycache__/__init__.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/version.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/markers.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/utils.cpython-36.pyc,, +pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-36.pyc,, +pkg_resources/_vendor/__pycache__/appdirs.cpython-36.pyc,, +pkg_resources/_vendor/__pycache__/six.cpython-36.pyc,, +pkg_resources/_vendor/__pycache__/__init__.cpython-36.pyc,, +pkg_resources/_vendor/__pycache__/pyparsing.cpython-36.pyc,, +pkg_resources/__pycache__/__init__.cpython-36.pyc,, +pkg_resources/extern/__pycache__/__init__.cpython-36.pyc,, diff --git a/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/WHEEL b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/dependency_links.txt b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/dependency_links.txt new file mode 100644 index 0000000..e87d021 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/dependency_links.txt @@ -0,0 +1,2 @@ +https://files.pythonhosted.org/packages/source/c/certifi/certifi-2016.9.26.tar.gz#md5=baa81e951a29958563689d868ef1064d +https://files.pythonhosted.org/packages/source/w/wincertstore/wincertstore-0.2.zip#md5=ae728f2f007185648d0c7a8679b361e2 diff --git a/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/entry_points.txt b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/entry_points.txt new file mode 100644 index 0000000..c3dd284 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/entry_points.txt @@ -0,0 +1,63 @@ +[console_scripts] +easy_install = setuptools.command.easy_install:main +easy_install-3.5 = setuptools.command.easy_install:main + +[distutils.commands] +alias = setuptools.command.alias:alias +bdist_egg = setuptools.command.bdist_egg:bdist_egg +bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm +bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst +build_ext = setuptools.command.build_ext:build_ext +build_py = setuptools.command.build_py:build_py +develop = setuptools.command.develop:develop +easy_install = setuptools.command.easy_install:easy_install +egg_info = setuptools.command.egg_info:egg_info +install = setuptools.command.install:install +install_egg_info = setuptools.command.install_egg_info:install_egg_info +install_lib = setuptools.command.install_lib:install_lib +install_scripts = setuptools.command.install_scripts:install_scripts +register = setuptools.command.register:register +rotate = setuptools.command.rotate:rotate +saveopts = setuptools.command.saveopts:saveopts +sdist = setuptools.command.sdist:sdist +setopt = setuptools.command.setopt:setopt +test = setuptools.command.test:test +upload = setuptools.command.upload:upload +upload_docs = setuptools.command.upload_docs:upload_docs + +[distutils.setup_keywords] +convert_2to3_doctests = setuptools.dist:assert_string_list +dependency_links = setuptools.dist:assert_string_list +eager_resources = setuptools.dist:assert_string_list +entry_points = setuptools.dist:check_entry_points +exclude_package_data = setuptools.dist:check_package_data +extras_require = setuptools.dist:check_extras +include_package_data = setuptools.dist:assert_bool +install_requires = setuptools.dist:check_requirements +namespace_packages = setuptools.dist:check_nsp +package_data = setuptools.dist:check_package_data +packages = setuptools.dist:check_packages +python_requires = setuptools.dist:check_specifier +setup_requires = setuptools.dist:check_requirements +test_loader = setuptools.dist:check_importable +test_runner = setuptools.dist:check_importable +test_suite = setuptools.dist:check_test_suite +tests_require = setuptools.dist:check_requirements +use_2to3 = setuptools.dist:assert_bool +use_2to3_exclude_fixers = setuptools.dist:assert_string_list +use_2to3_fixers = setuptools.dist:assert_string_list +zip_safe = setuptools.dist:assert_bool + +[egg_info.writers] +PKG-INFO = setuptools.command.egg_info:write_pkg_info +dependency_links.txt = setuptools.command.egg_info:overwrite_arg +depends.txt = setuptools.command.egg_info:warn_depends_obsolete +eager_resources.txt = setuptools.command.egg_info:overwrite_arg +entry_points.txt = setuptools.command.egg_info:write_entries +namespace_packages.txt = setuptools.command.egg_info:overwrite_arg +requires.txt = setuptools.command.egg_info:write_requirements +top_level.txt = setuptools.command.egg_info:write_toplevel_names + +[setuptools.installation] +eggsecutable = setuptools.command.easy_install:bootstrap + diff --git a/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/metadata.json b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/metadata.json new file mode 100644 index 0000000..258f532 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Archiving :: Packaging", "Topic :: System :: Systems Administration", "Topic :: Utilities"], "extensions": {"python.commands": {"wrap_console": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.5": "setuptools.command.easy_install:main"}}, "python.details": {"contacts": [{"email": "distutils-sig@python.org", "name": "Python Packaging Authority", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/pypa/setuptools"}}, "python.exports": {"console_scripts": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.5": "setuptools.command.easy_install:main"}, "distutils.commands": {"alias": "setuptools.command.alias:alias", "bdist_egg": "setuptools.command.bdist_egg:bdist_egg", "bdist_rpm": "setuptools.command.bdist_rpm:bdist_rpm", "bdist_wininst": "setuptools.command.bdist_wininst:bdist_wininst", "build_ext": "setuptools.command.build_ext:build_ext", "build_py": "setuptools.command.build_py:build_py", "develop": "setuptools.command.develop:develop", "easy_install": "setuptools.command.easy_install:easy_install", "egg_info": "setuptools.command.egg_info:egg_info", "install": "setuptools.command.install:install", "install_egg_info": "setuptools.command.install_egg_info:install_egg_info", "install_lib": "setuptools.command.install_lib:install_lib", "install_scripts": "setuptools.command.install_scripts:install_scripts", "register": "setuptools.command.register:register", "rotate": "setuptools.command.rotate:rotate", "saveopts": "setuptools.command.saveopts:saveopts", "sdist": "setuptools.command.sdist:sdist", "setopt": "setuptools.command.setopt:setopt", "test": "setuptools.command.test:test", "upload": "setuptools.command.upload:upload", "upload_docs": "setuptools.command.upload_docs:upload_docs"}, "distutils.setup_keywords": {"convert_2to3_doctests": "setuptools.dist:assert_string_list", "dependency_links": "setuptools.dist:assert_string_list", "eager_resources": "setuptools.dist:assert_string_list", "entry_points": "setuptools.dist:check_entry_points", "exclude_package_data": "setuptools.dist:check_package_data", "extras_require": "setuptools.dist:check_extras", "include_package_data": "setuptools.dist:assert_bool", "install_requires": "setuptools.dist:check_requirements", "namespace_packages": "setuptools.dist:check_nsp", "package_data": "setuptools.dist:check_package_data", "packages": "setuptools.dist:check_packages", "python_requires": "setuptools.dist:check_specifier", "setup_requires": "setuptools.dist:check_requirements", "test_loader": "setuptools.dist:check_importable", "test_runner": "setuptools.dist:check_importable", "test_suite": "setuptools.dist:check_test_suite", "tests_require": "setuptools.dist:check_requirements", "use_2to3": "setuptools.dist:assert_bool", "use_2to3_exclude_fixers": "setuptools.dist:assert_string_list", "use_2to3_fixers": "setuptools.dist:assert_string_list", "zip_safe": "setuptools.dist:assert_bool"}, "egg_info.writers": {"PKG-INFO": "setuptools.command.egg_info:write_pkg_info", "dependency_links.txt": "setuptools.command.egg_info:overwrite_arg", "depends.txt": "setuptools.command.egg_info:warn_depends_obsolete", "eager_resources.txt": "setuptools.command.egg_info:overwrite_arg", "entry_points.txt": "setuptools.command.egg_info:write_entries", "namespace_packages.txt": "setuptools.command.egg_info:overwrite_arg", "requires.txt": "setuptools.command.egg_info:write_requirements", "top_level.txt": "setuptools.command.egg_info:write_toplevel_names"}, "setuptools.installation": {"eggsecutable": "setuptools.command.easy_install:bootstrap"}}}, "extras": ["certs", "ssl"], "generator": "bdist_wheel (0.29.0)", "keywords": ["CPAN", "PyPI", "distutils", "eggs", "package", "management"], "metadata_version": "2.0", "name": "setuptools", "run_requires": [{"extra": "certs", "requires": ["certifi (==2016.9.26)"]}, {"environment": "sys_platform=='win32'", "extra": "ssl", "requires": ["wincertstore (==0.2)"]}], "summary": "Easily download, build, install, upgrade, and uninstall Python packages", "version": "28.8.0"} \ No newline at end of file diff --git a/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/top_level.txt b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/top_level.txt new file mode 100644 index 0000000..4577c6a --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/top_level.txt @@ -0,0 +1,3 @@ +easy_install +pkg_resources +setuptools diff --git a/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/zip-safe b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools-28.8.0.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/env/lib/python3.6/site-packages/setuptools/archive_util.py b/env/lib/python3.6/site-packages/setuptools/archive_util.py new file mode 100644 index 0000000..cc82b3d --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/archive_util.py @@ -0,0 +1,173 @@ +"""Utilities for extracting common archive formats""" + +import zipfile +import tarfile +import os +import shutil +import posixpath +import contextlib +from distutils.errors import DistutilsError + +from pkg_resources import ensure_directory, ContextualZipFile + +__all__ = [ + "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", + "UnrecognizedFormat", "extraction_drivers", "unpack_directory", +] + + +class UnrecognizedFormat(DistutilsError): + """Couldn't recognize the archive type""" + + +def default_filter(src, dst): + """The default progress/filter callback; returns True for all files""" + return dst + + +def unpack_archive(filename, extract_dir, progress_filter=default_filter, + drivers=None): + """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` + + `progress_filter` is a function taking two arguments: a source path + internal to the archive ('/'-separated), and a filesystem path where it + will be extracted. The callback must return the desired extract path + (which may be the same as the one passed in), or else ``None`` to skip + that file or directory. The callback can thus be used to report on the + progress of the extraction, as well as to filter the items extracted or + alter their extraction paths. + + `drivers`, if supplied, must be a non-empty sequence of functions with the + same signature as this function (minus the `drivers` argument), that raise + ``UnrecognizedFormat`` if they do not support extracting the designated + archive type. The `drivers` are tried in sequence until one is found that + does not raise an error, or until all are exhausted (in which case + ``UnrecognizedFormat`` is raised). If you do not supply a sequence of + drivers, the module's ``extraction_drivers`` constant will be used, which + means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that + order. + """ + for driver in drivers or extraction_drivers: + try: + driver(filename, extract_dir, progress_filter) + except UnrecognizedFormat: + continue + else: + return + else: + raise UnrecognizedFormat( + "Not a recognized archive type: %s" % filename + ) + + +def unpack_directory(filename, extract_dir, progress_filter=default_filter): + """"Unpack" a directory, using the same interface as for archives + + Raises ``UnrecognizedFormat`` if `filename` is not a directory + """ + if not os.path.isdir(filename): + raise UnrecognizedFormat("%s is not a directory" % filename) + + paths = { + filename: ('', extract_dir), + } + for base, dirs, files in os.walk(filename): + src, dst = paths[base] + for d in dirs: + paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d) + for f in files: + target = os.path.join(dst, f) + target = progress_filter(src + f, target) + if not target: + # skip non-files + continue + ensure_directory(target) + f = os.path.join(base, f) + shutil.copyfile(f, target) + shutil.copystat(f, target) + + +def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): + """Unpack zip `filename` to `extract_dir` + + Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined + by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation + of the `progress_filter` argument. + """ + + if not zipfile.is_zipfile(filename): + raise UnrecognizedFormat("%s is not a zip file" % (filename,)) + + with ContextualZipFile(filename) as z: + for info in z.infolist(): + name = info.filename + + # don't extract absolute paths or ones with .. in them + if name.startswith('/') or '..' in name.split('/'): + continue + + target = os.path.join(extract_dir, *name.split('/')) + target = progress_filter(name, target) + if not target: + continue + if name.endswith('/'): + # directory + ensure_directory(target) + else: + # file + ensure_directory(target) + data = z.read(info.filename) + with open(target, 'wb') as f: + f.write(data) + unix_attributes = info.external_attr >> 16 + if unix_attributes: + os.chmod(target, unix_attributes) + + +def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): + """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` + + Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined + by ``tarfile.open()``). See ``unpack_archive()`` for an explanation + of the `progress_filter` argument. + """ + try: + tarobj = tarfile.open(filename) + except tarfile.TarError: + raise UnrecognizedFormat( + "%s is not a compressed or uncompressed tar file" % (filename,) + ) + with contextlib.closing(tarobj): + # don't do any chowning! + tarobj.chown = lambda *args: None + for member in tarobj: + name = member.name + # don't extract absolute paths or ones with .. in them + if not name.startswith('/') and '..' not in name.split('/'): + prelim_dst = os.path.join(extract_dir, *name.split('/')) + + # resolve any links and to extract the link targets as normal + # files + while member is not None and (member.islnk() or member.issym()): + linkpath = member.linkname + if member.issym(): + base = posixpath.dirname(member.name) + linkpath = posixpath.join(base, linkpath) + linkpath = posixpath.normpath(linkpath) + member = tarobj._getmember(linkpath) + + if member is not None and (member.isfile() or member.isdir()): + final_dst = progress_filter(name, prelim_dst) + if final_dst: + if final_dst.endswith(os.sep): + final_dst = final_dst[:-1] + try: + # XXX Ugh + tarobj._extract_member(member, final_dst) + except tarfile.ExtractError: + # chown/chmod/mkfifo/mknode/makedev failed + pass + return True + + +extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile diff --git a/env/lib/python3.6/site-packages/setuptools/cli-32.exe b/env/lib/python3.6/site-packages/setuptools/cli-32.exe new file mode 100644 index 0000000000000000000000000000000000000000..b1487b7819e7286577a043c7726fbe0ca1543083 GIT binary patch literal 65536 zcmeFae|%KMxj%k3yGc&ShO@v10t8qfC>m5WpovRhA=wa=z=p_%6%z1@blsvwI0vv2 zNIY4alVK~j)mwY3trY!Sy|tffZ$+^cObBMdpZutbN^PuECoa`kXb2K>zVBzw<_Fq) zU-$d^{_*|%@qt&)nVIv<%rnnC&oeX6JTqHy>n_PINs<G9rYTAL@TPx0@%--}9r!$a z((i^#&t<$Zd7o|Z8<TGd-?_=NVdM9{v+=gOJh$I=_ub!9J^yrvXQOtv=gzx5rAw<k zcYSZ|9am>%4a-Xw9jfY!Ot@}WQUBkK=MqH|Mf{(O%J6=?F0E)R-u5-_q9XB5EmFjL zRMB1HZ7a&fd)b}0hpCKjVjS>G(qfxk>Uow`_J8Y;?6yo>h9td;lqFW`r_=Cu;je?@ zJ}aCeNvRaYzy7!6vsuJK8t7Ip04X137Vm)<B}y|cNYZo>`v3N5I`@q}=|CK){8#_3 zR`1xV;$zJbJP0ppD|Paae;!F%bM?lxx2d-wfQV@O6ujTW-;jSkRCTolCLPMh2Nx=) zGP{NVA?TB&mP=FqZ|whc3RJSvJUJGyHOs!nBie<k<-z=e)r`kVud+vM0lsONB<Y9b z0<+))qcqReE=`GTutop6y*iN=`x&*3EzZknc4W?3rP&uIJaeXK<D%wvS9N4nkT;0D zPW$-+vpsE9St6ytWVaCXsHU`%GVdR^wE=Xv01fto0vp%r_OvPOWj3j{W@V_Y;fxbp zySskme5v4&(U>PA7G%%m<=|b-UJ~!-boN$bi#jT{Hcy&A=Niq?KHpr`Y-?=MzKk{I zIl-)f*v>o`q`5M7OP+gKtTfLZsOCS(qPDr~x8=!_5`6-VLD0EMY5XaI$Uqq@V-Jap zR-V}6Ja=V~*CHdz@F4Rb<?;{KZ*yd>ij_JtwPEG;g{#zT!Uq*Py$3gDv`Z2tYF|X8 zYEi!^3#I2mi!9?8K!AuX>_C;=ltI=m5eE7*@I4UZ&p}=3ho&bc^h3P|C;`K|s)PJt z@!8GLOb})@Yp*SMou>fLhC@WZw%7ar>1Sm0aW&hPm&@Wqv5z<cJW4gM&zmkfJJ+a@ zj6&r=dVrlbR^{dLe--p{MqAX8%7LY}g_XQXq&T82+UL#6!luP}xs6BE?<fb3E#r6f ze^S%+ZFw$9UEExnmrHC?k~jf28Qa}v(?%Aw6cJb9i=;f%LL7GNV)O&mRYm+WAK2)J zoc6N?AE0A$CG}^`sG(_iS>i_&0GwOEjRhPMrYB*+WA64e$@ELiFO?ay?gvgcC<n$Y z<L^1CK%h$vSZG@q;PL(x?eqG1V1nyS(*z5;SA+M!_HB5xgCaCQzioLANgKIa^30b| zP)0-wnAuW?PuhpB1D*9VD+*d7r2(|XN$tU(8-F?I^V~ojiGY&$x^&Sr^ySP^J_*UW zrARijT__0kuL5&8h*xu#MI`axM$bS5AWndQ;JM+aKJrO?BE}`X#TVcgz$PT9E&8Dq zZ6JXIg6WKy%Zx0-)XbKtWRx0n<OM3tY=>1!dbl2?B=#{!9_2$Llg!~3%n@58CG`RW z1LPlkk=p2eFSa3N`&F?g@~A1mHitQyVq0yNK4^CN8joui^5gTpuf^0f+qMtEYVL?F z$fu`~#PaZA)VQ4Amx;XbZ%EJqQT~UlXZwx7HHW!>vn=MgCVU7v0(=qWSe%!~9KS(N zgLM=3LHzO$mU+*{wx!#)wXd#auhgvU=lF&*IVnT+hZ`~0nCHPOETKA3I;S!sQ8$^{ zZcv4UbEsTEpxvZ3yazYCQD1%G)vA+(ndH~oy5$RmDNA{h9?j)8QlvdBd-|V!63d!_ zr{P-1vS(7D+|itM9Rk61MnI<ijY!Ly%7^jv=YUlg`cLmOwOJ@HClJm79G^?wO8q+) z2vf7m?6nYbY6S#*GNiuY5H+x^+G@?tJP#TL9re>+K~KhBa?C)KKh+E*p-K?e54p;H z-uNb0vkbWyR)1lbnp%G$OG`vjpo}PU*o}&pp;`PEODluTuiNcFBFmELneD_AsyG+G zkGm*r)oMJHmxrXL#=Plxfj%;6&nXBm<I#%{teK#)2aU^vKFj+G2|d8ZfX<DYT4pfZ zfo|^HD@jrnxXrnoJ(D*BEsHtwkuBFp`spvA2GpIQLK~G_Fij)vWt2{I(c2x~KW)!t zCOE{y+%GQUQ^og%kazlaaoZ=NV(uK8O?>)d`#6i)km>UtDzrb-*V{hPU&@;WB&3=+ zxL1-^s(vuM%+x$5wc!b>TMmX_2j=|8Kt*)b-4;r#_ff_ny|oEKpX@DE=!THWD9l;8 zEWjV=HO&BTAtLP*tp;IMlM0_Vn8(sUqI$?Nv_U1G^tEZC@of=jxa%BH_{Ai!MYo}y zE@)vjviC#f;TCVZ=HXtX$EDFgCrJNz+eAX#tsgc!-#{X?u;vu7>K}|6xr+Y+O$ixV zZ+D5)r){a?S581&?=jW!dQYD^njLNZDwQ49Kbq9~QJUTP@Z(p`mlCNjK7uj2dw$*y z?Fs@NOQ3Fcxb;G+-Z81QBhBuJS%CWlpf9gp&E>m+$xzI$NMcrT+APveYg4QEVhkj# zC+2qrf~MxI;{Q2Zk_`Xps%rkG7-Dkc{@y;QZ4Oz0#y`#fgd*BZP3DWK6>a+@*L<mM zcZ+wv6pXlQp*qv|N$8nGnzy|!owe_wFT`9w_5eJz=cRm7?ApYLBWTQ~Z~Xh0d`OLq zTT$CqaQsCoH<7xV;0<Sr-s;g0IvOs}L}lA&k-l0$xByYj4z~8BGDno!&c4z=oz(hi z8grx*iDYlPN`q&LaV@ehXt=Ne8MeK-x}c@DjsM$J%twl6LU~JSD&H^}!^3Q<i@!_g zv@vrzI}>D@EZXPo+Bl`5Zw>0+GLF5OFNogis^p(SM>i~SO7+N+7^b&-f@XG3hYwRL zs{rPg^&WTKXuZW1;J*Vf^E(^LEqH+VoqCH0;~Qle%pqFtZQVGjSX7wPu*PZbFwOi{ zG*lGy6QCZdX|wX?4#`^~>lfT8wQf{0k4{L2{|oR+{f=JfFn@0V9WOeR5QLU=M!U6~ zB7d(sir<zi(J(xWuRwrR^cpgzK1ceMKSTyn=7h94qQ})c3tBJ-kufbC-S8FZ{*A-+ z;wE$p2;6zcG#Z^Q=wCTDUVHvM{Uf{T%s<wYuE%Y9r%meyA9u+1R(iScdR70ky|pt% zO*{K56g<p=`;6dF!Rj_V9Z4Kex3fBWL}~ny1nH|{??HFC&$rtV!@%g$GEs~YjUt-3 zyg5y8xAoVl=3`2GjRmRwg}nzj?Kb^myE<wR3=lWy37hs;ROnh+ySnXsoC;P)_ZOlx zK7zQFs(oe^qFNu3t$Ssyg|9J2k2}y#^%uW0`}(%CH2YD#%Pcs^MniW#E!k`h>Z!)# z>Ws#2b>jJh;6zDv(pxgML&lgyPQ#zcbb!!sgpiDoqu{tG6%!Ja>nvz7KufAa>qaA# z=oV|HC9oE}Y-%~C<~B7KIy+)gcYDw!`k|a8<5gBx6?_n^Hfnl`YGk#JRXDw`Y3W5Z zF72K~Dqd=&sK!kRIocXZ$WcQ@HMx}F(UwwzM=dX^$<yW*)lApsLU0ONe1#L$wDK}< z+m`P7xi@OFy|1a`^g5Sax&QBIL?i`BM9fM)?J~l{Rc2^%VhrUz829&peWXrWCnHlz z(^x9cG-`TL;&SCcT7aJf@*!}hy(}@hIc?50YSx@pYQ~(aH5qypGnehQvcielAG{aU zX~0_@&*J%hxyYZhxenZpYC#MBj39u^sFM>J%<uNLp{5+>??vDyuV3EiM+4QdBA;io zzdv6tSFL<#t<s2TfRwNG7HQKrPlW>QrIPdbG7F+JhObn}j(kln(mY$%K{!!5k#)1E ziz+3WTCrR!=CNXVR%|-O_{kh9N!CV3M%Px+KVv3eg)|H^tUYmMQB9Bbm&lY5<g+!A z3q(W{bNLa7G-%8GR2a%BXjxsm@<>uSRpgw1Z~T#cB&t&nSAs!Ug_}|kVHMz$WCS?l zqwD<1@hy6X9b^#7A}+?pyqY#|7U^Uy<!oE$R#G6OIHC7~?928tC#m||`Rwb!vt=?X zUvCU&<zZuqgAMm)Z5TgaQb)3^o#QYflyA_|`O&KZm&VE*-qc-V@o_Xmrh)G=FTI?~ zaUiwZw;@Gy>*X6#P>C%ujL9h3=b(@6wKWGF78?2)w89yy=;G^09Q<ASzGu)Qw(X;0 z{;ohoCMo#dETWJz;bQfN@r_l;$_tKiy+f|A>y^}WR?(y1w&Cj}$@F5L2YsfEL<3pY z8Z-dF^8sAbhP4Aqi=v(obhDs>e#QftDyng66L`)T%)98HH5&8BF<Y>v2#E?5hTb_9 zH2mD~chFE=MQHmw0&)Lo6u2YqKeGV1@zG*g<1#Bwv#zb_%-_+JlMrxKd<~ir3Ze1+ zy(_eP6{~SYKhV+(S~~v~1yt)79UHaSeZ5h0^WBheRNU;+TO4|;1L|kljg`GxMRVY5 zgy-B?`L%XKbD$65%Wkaf(<V0uOoUxGf)z4#f3Kscu6N_X#60DBpQ${*$V`+W)Q3=C zVh%!IBlLCRI)r)=>P<|yYD*~1E|lWFafIgb%{TqMMK!$}&wwd`weq~AJfD%@n)sU_ zUiHfyy0+TP&cgr)(wf;G1RCO$+F-8vOp><HO7p|jNn-Q6t|xsd^WT9I=Ikc$B){h> zOt(p4nn%&aNx*RFpHZMF4f(Ufvk=7?JRPMYo=R06O@dN!hp9(J{WAdZdPL@b!%!G% zLqHJ$fo+g=B{EqW3P?d+m=J67#;*QZ08JwbS`rFm!NrD0j{xSFfN^d-(+{H;KZnVO zq>c^Kn`akV>TQ^)nUX?$=?!SjnvZ-^xEv3@Td*3+ToB$GLi`Q1f1eLu;*Pvh0=OLj zdhtFgHl&UZQ-JSB8KgFySnsCLa+gvITEM<JVb|Z0=_NNbv&@H6(`bHB@Igt@ghI@c zl*U&;NMph*gq!`YU((D;uXAEi{}>T?_A^wxGy~aKk5P9rYN}h!*-ueoBA*hw4DFOr zciPZ8^v@j#d(UsI=5c%~N>l%e$W7+;ycJQ_!+(R9k!HS|Ec90*HCfot5kX%T)t%N- zi~Jqxa4NIzB;-ca!0JvWei7b)=I>ieG+2$PYbd;x;wr_LQoMggi&;CG;F7fIhG-(% zJ!c$nrEc$qdPCdkvnu1mRQk}y|2ztlU(w@aFd)D-lsL#-NVQSwulrLY!m_|0v*K-t zB7y%f8D%CG3s<7iT|s_@7ZVu%+>P|Sc?3OwD#DH8xgHD=<f-VsApaaa9sX=8nv;#Z z`k}l%#O<|7rBhsro=L%+c2xoT1-LwYZBh#O<!BUXr-(Z|lREpYkzkpMTP0~-Q7W02 zwZh$V@M_pc5wh%Sm%o^4qt8t_^m(klPsMxqW>>+Hq9%@@@^GtBaXR79?>LQ?^WZ#C z2`ni`a{1lFpInCsiUb$05edblZ^2mnBP=hXEp>8aJojRG7BaJEcKD<{j}yzhTP#U? z=Aa#XBtim8=Gg?r4Uj`5WN-&1pw{2h8%&)Z;9p{i7uubJoO^Qd2$-{7c$u@ERF>y& zqN~6wdfjPB!z|)D^aBs!k+_=q&oG%~7!{|m@ca2}v;&KPJ2>;78Umj~@P&9JSqLha zzlFYP<2&bKzVZaVB-Mc?2YHnu!LA|`O$fbh{3s#N;_-HA4$=p_MZ|rGufc4|OmzUu z^JPvljA~1&s$+Aa<w()zNx!G<0L@dyGr)f#BOMeS6)ST`QZT9-X)BDf9E^O4EH=;B zE*o==+8m?Sfptj=P=j*yt%Pm3WkA!^$&z|GbdnQQQMu~aAXl=XRo6Mq&w=2&97(@S z($~pS2zk2aJAG=JelIfRnTs4-Gueoy6w{_W-;!`D2U;p&H9!}KX!)wyGt%13G>Z>O zBaXr}qS-H-6;8gFl+j!hB|&HG__QCH?uAZY6+qd0>UH`KS<+@;OtPgV@|*2uh0NaK zb;wtOjM^yvHpr<LUa2YUt!L-)wNxOQvg7UAl}UBoaAs>tzb)z&!{3Y1&uQu2YF0;6 z-&pJkNPw~TIeP9tMbGFy@$3@M*Ts{I=TY%&5zoVT@~P)d6APo+yaISwqj*6}fd26l zSTkcVuiyVH03~%8i#~&ZzGlPMWCA!0Gf#IJR{FI;?gP_@en$)RA<KPQ>9elZzErW? z-z!$}DeP6T*8k_BYkgYiUq~IY)=yyvyM1}}O7uIRM!^y9drD&sLd~O$*hyeu#5%<D zB|MuR{sPa&<4WTs;8UXSCjiNK>=0hc&P=2=ADrQtvtr8#<-kGZK>Z2~i+YDr(2b== zcR`DCps{r;k|OD?J&uqOeF)jSt;!F64YPom7yZ+9fQ}L6K;B(=8G8lk_6m~j6~x@z zCDMtQotu#j_2}HA-lTK8dcDqNby|73nvIwet;T0PM(}dy%>!Xa=e&Wit+N2(1_4tK zJ>Ho&@F}G;2jTj!uGD5=No4gi+tKUoGxifUO6&p|zC}*Q`Nt@!^HZd-C<VXUGE6z} zYOGW~YKVB}>-c2srIvNJB1pwv_RV7Hs}lRAC|1y*^It@P6dqcjDCIs;$|7}n{a0bN zwEnC0YEJ!ETa@VSNVnP}A=G&bfqB<!qf3&BkW{O;I*ahh!r#?-)j-(OIT_(*`<&~w z3HA5cW@%$e`m=&S$*g^tLCz@<0M`kCCyB^pUPuD`kpR{zjc?QYPNne;dVddtKfN`j zaX-DcDvf*Ty+UdHHQvTv;)Yn1ge#yte=uO|J&YiKVh)%++R_{)&I_qiSd0WOwwE}M zKLJhMY%j5@ZER5*pMVy>1mb=`bXK5zVw9e>%7YwwQE9vvGOqVjDG&Y)-L5pEZIaIC zt1d9l3jE3C<x2EN7|!Ysdg9Sts0z6xi~B92`HDn$#vVI|kHS`EJa!sEBl<X=N~|0e z#G}+#WRvWC64CQfBGXLJSBXA?#3B7;AUgP28#eff33<>jm|E(KL}PG`1?WOK18iyR zr@EEK-#D<=?b9-MKLq7qL@AMpXFN*8q(*e^0F2H-_4k1j+Inw(tI~Km%BD8|oIZZL z3U#LP!ouD_m~3*fC^b0{i;`Lh@J}(6VsVI}X;M5&;!2eyMl~<&Z4!WS0Y`~eMhmOX z*{Fz-wZUowjBH+3?(n{;&a#?E?5n&i88K>u>i%i|!DBr`8qsAZj-fVnlD&ENu7UOj zcr8tPJKsdI-m^h@@FMC~8b8KU@3}+S`I1Qgj`G7<7-#jKJJoyip1alQde8Ti=;Qd- zEqbZmLK{d(>TSv1K-&|`*$o3Y^LH_kih}8`ftlRO=24yNSd>_EospK1t)P)MNSMz5 zMFbXV!)H|iohdPqaK2TlCsdyXsw|yVJM_5R`8Fcji2AR-qupV#6XH@LR3unydzvBM z4f~1F_TbC*c}(zSLwgMXgM4Bpq**9!s9VzD=qH!e1;$?DRCY2k%qp0&7j#pf$VRk@ zJ}vAuqB{{t3Z*G@GUUh<RahMtFhwyjk)sMzr4_lDBo%wm1?Ew<pEzDWl-uxWJxW(S zme6Q9$r7u~*=q@WxCI^x)$b=M|BjXmCLRK`hJZRJi82A?y-FLA>=QH+(oZ~6)oG_G zm7oW8n-SZG)I^@nHz|$JLoI;48x87n8XKNR#<&=^F9+-;eGV0gPPh}0%>uwt*&h7^ zikjIJeH*WM^eCR-1*y{y7<3vkDAAj#<hY}|)uZNEl<988lt+1aVQ<1g!t+y1WES>P zqW!0sNgW>q8t;8)$CzynZ~LYZ=TGX#rStC(HZCa)yTB3evmPy_-~(OswN&RE!Vcqf zp@Gi}J#;B+uy|&hmNr=+9n;P-K_62nm1xV3H2SPw#e|IhbXfof`+6|7-a1piP-HwN z7^H{2zdg+^sM$1pNn(G@e>T6pEQuKCV2I4dULmNrfxpt(oApIA)u1V4mx*V)ZKf|V zchNeer}=!|H??#5LN6WbNlX_CYfykKg_THOR9^_2FTwuZg0(8r_mh$V#aE#VnGn{e zeCl;DfP%p?tggB$k@J+TKa!uwd@4m9VSVvf-3M5SiBUWMu?`fM{}^?u#Rg7oj438} zF(JrR5f9(+cj98FDW)K7zZihT$5@OwgKx%nE3=G6vK4Y@Bde<-Gp$1S)m91meo|RL zn<`b;MO(K26BC3>4jV6|nK2@IAd(jIpM#El1d*~p8E?Q^LTFiSdXY#}J?38eXq6wU zILE&{2PF4XZYiYgP2}og_GW_ZL=T`a(o6hRfQ6D1w{88ns)Va232{Fagx$LRq%S0O zl)0Az+ySZ5pA=~!CT4ui_9ihZH^Qxh#U26>6Z7Hbqn#h2z5ie)Ybiu*0bt+kjg>s@ zjA<Te+x6L%J}EKXCyl?tC*6y`SMYZff1{CJnvdz?E#UyIH1B}!gaNm%H|Bp7#ui@( z%oNtXQp6YWU}CIctPO>{aix*=UiZ)(*qFTw&sY<UCyANuK8K{sX1gzSn6XuE_vK0L zzG=hSeU~9x*zTJ}dxI>C@-?(l4s4*jzOJb5O{H-dahv}rm2DF96vkFyo8F5}t^)$F zZ(9oMi~Bo>vl1%_AO0!k4`R(0WECATr`T9CY<emo<caMP7+pC8BYll5)vw8`??*{r zQwa1doJQE+frH9%)8A24O!>DxmPlhFq~FmY!A0jT?5Z*B+?Z-mztE>vHrpWqH$Nq7 znQ$bS14=<K=P<2<wbKUBCzDz~Nwd$g_PdY~mJ)PknIrr-mL;(=XMopVX(6vP9zl!D zG8t8u=>F3%*>!CDalr@dER`@@Y?!6d@*<PA64UCJIO-D{+shmcuo$LBx>vxe+Ey;C zzAb-8pA`ZV>?nizOJLlY2g_U%w^_#AX+&7PCq<)De2EOb$F4aLln1f;?205wZvaM# zVFVXXgXYER?xJ1UNedWLbhw#43pHVVJOXQCT7oAT1xqP@drH6g1<S->K{s|^C-D8~ zII-`VG_Cp(PnuTk%;)M~Y9hy;0G87Oi^b`fGFXmJv{=-iJc*G;s){U*MNc7w4PZX$ zFG5NYGosTWBeCdAJRx94bOr)R^%*-w;fF~?jmJo-7}k16tTxu|e7FZm>vqP@h}UDJ zMb_<%9ulu7Tg2<vB$|&tC^RDTJ7N`%xTwhn&1g*%jMzDVutmMrtSTNQWXCw9mbgHc zSQk?Rq?y?(K)r~>PMX=bAQTgbqx%Agz--_|=gN^3-U*{nC`=`o*^BWB5aoD5zDc^L zbCPah$}ndW(fDOKfCnSmYs?O0|98q>)A^t1Kmi5fV)^NK<0K|?>Ztkpg{wAx87u#* zeqqFx;gPHrpt<9XQ}|ZXmRbrVBf~@9!{b|~w(2b~o%2V>(ripi+vjs*FBxfV+~`j# zwUV4ks{+SXm<c0&r6KeC5rkopzl66j6a9?+$nen{e9~GIIv0{&3jd(>d9E1#@;j=6 z)uOkr_4gLM5-{%ICcH@ey-Dse{MZBUT1zu282Bo>*21v||3a&=U&8)UQ`x`eDO#(a z$+2t;o8*GowEI!b(%StdRN6V}iP(KElBg`U#9@D{z*)%O`vf>Iabn-XiXWl4ADbAC zbxL$JvcOIfTh5KDUbfOny8snu^oxD!YWTy%94p!42i&pJ2V91~3)1fIfdSdg-sO4d z0#s^?wrun5SjhZ6>?CT{-mI^K=Fel0?4c+GlPClQ3ODjHfx<bfb!|YLTAMfm$~F|; zzUi(GI2jc0gto%WFHCQ)PbR4%le@x}%Msf$Gn>-kp8?Z8kIzIS{LZ2kPIYA1qR0t$ zn7?WzV-v+FcYYJ4Hb@syr5~l=QXFk8m(jW!<oq3}hoUN{(zpzPWU;St4WBx5kz$$J zstdZw%J~Xa)f0lN%jHF>w}53gPr_z=9*MvMv}fS8675hU*yDz=>Qxqp`&p8$PzafG z#m<%=%AZ_k$Zh6-SXSFN%1V}W(ZY$4no;C;s{g~%TEA5qZDWZ>Vk4~|HI(T3pO(1a zDly^=Z=limT__6dNkqF<O)qXlFWR+|h=Y&CAT5mkLH;f(3SopqcV`3xyoaI#cJoZI zim;&G0GtxTkTVqo4z&eA!rAH-<PNvS(l(>HhpOr_vsaOh;YYEgH_}4<XGm>}xWc;# zn?;DgBeLc+Ou7F;1!12zVqb04b$E-(L8Pvlop1dlMR<bP+lzA4QYLl#oVuz6cm(EQ z;W=YB{ik))y=}SxV~#Y-JE9cTiWGBJ8vh#n6tWyja?=(jex4Nl0ne6Hft8KlkV35y z+y&dDCbKdpJ6!*f9e$D*QZ(PwG9*?lf;3mNx%oX9!Dm#%Tj>sXK7|7O2c;w@PH!A` z$}(qT%e{);@wHLrOr+~eoF4r(b2T#R>l_%jYgt>r>5{5}aWNyvNppn~*97@Ca5!n) zRB&u!64`2fsMa0iy>Oxm@QbJ?bpB*$d`r@}3#0zCM9#0Uq@}4Awna{XqNUUrOuWc% zslzKgZj_jgN(3Qdj%SMs)!HOMgJ?$SA5m?n;P?V#d2f=I&$4o7cdM>mQ?y*xMg;gx zgc(g7CW7dRu|;*V=I(Ayq5ilg`3a_A7|!c@Ic8!~S)viH$y!IUBc2WN3Q-Bvj^$c3 z5<sx!+AtAP?XbA>`_KmLmGEEV1Gd_1d=iz5E(t<VUtR&}*5~|vF-8WPHZkV-dpSZz zp_pr!Gxc~5uY<A@^EYRi-j}!SIA#*7YuofZ0ZDU<FPT}zCJ=W74^VFOBqlYZ^z9Ct znpJI{sOCq(3^0R-^me(SFPx2e+bIFLTI}*=5Tu69@DqdIKdD`5F%49^IqMZF*38aD z71(fbhEG!8)PhF}%!TM2><dpIQPFbva~SF(6L|_oSg~2j>p!M007t}T351I#sty)U z+#Si`84w_Buz4?P3V#KB5SPf|6%DG44C5i97KEp0qBcViqnfK8ixAqFYTieA`GW(w zAaRLIV{Rh7ntx26`g<b-#gL;{Hz3<k?DQn<ll%HHt7-aNNgEa5Q|P1E;2FVHjLjkQ z`T-Xxw7Q2{9Y#SISPD$<Tbr+rbgU>ie*R0Z-#Na;r%mD}%<5Jvs_7s90pggwVaNJy z;Gz5ncB#LFXNdQ_W-sV26M91L>)3K<zv8-CZ&&nBu)9dR+1}I*&}Lh1fJ$0Sh=Bu1 zZIV!tHtTQUYHDH4Y44xZ5%^qP#jpQBOzXUV(rydFEg-4H)}rs&NhB^VDy~OgsRcp) zBQj;caunT&@|oX7tBL@ERuek?2okS5fdLs%LT$*NCE(OF3x;97gEqE-ocb9DFl2Q! zgtm63uT#EgNyte@*InzB9Z1=+&_xdqJ!aCwM~?tK*3e@^?B#m2W|4N3p`^dmSjEDp zr5EJ*DeEctDj!a93cWB2&A~*29n=53!&rXK`>HxJ|5fbYYy!?SjKig2`8l{-`R#sJ z{y|JM;N@7?!z#|5{daszTz&pedK?9JQ8F;@qU0|0D_iceAI?7tSL#Z>U6e&#kwgbP zkkbtwSlf+Cu<f@_ncfPo253+zF_re*BqkMOz=e-l@dSF=3tHNe6Mx!NOm-RZ<2n>! z2^i*I1ua#Wv>X0&z_aSn73?s&*dqlVd-T@)W9p>J$FO7ZOZr;Fjpb*IiZ0<kj-=(t z)3frtzZVEN)Zu&;5GEyyDoKyR4}t#_Nqfj|4VZ{Qpi+zi1s_y<&#G{Aa&GbPMOY+9 zMu&t)2l!LwN5#q;zBt0;6CDn2Z&SxMOE<QuqarD*i|U-p1COE7rnIv5v>VIdYQtLL z+vF=8tIkQ-iCW8@Pz=4^uQuJ=>}nca<}1w6IQAlU`d|lyHiM6o3qDTHh2A>nrl2_S zA+q^%P|?VQl|Hvwh66uk?P7j%C%U{@zVS76a{Yy?)f|yCw>|CZvLrN|l>4FS+vXAI zH~1Q@M_VFOIwyh-O%sQD3<-Z4nfz%+pMuT$dA}3f(Y)N<c#Ca<Hc{-Aj|5{d<1iXZ zo-tGXE}|+3jBfS)BafO0JZ&L^nBNGx!%&i(k|jT2v%Ep@)Id7GlWuGz+R=G5+`2DW z)a`k83dV!1XXu&z6g?+ALC@Kb)3f+dJlE~aJ}h2YFNxQLN5m`jA@Q2FOT4byiPxhK zrncaPvkrTn6K}_!eR#*Pnmk1DXa@$0c&dc34gYu3$34$Yo-f5ypTaYP)@Z5EAVe%L z79fULyzOojc5hm0T5GmFJpjT`w=@qL21F6dx9}hS>_d<iZ+bBSNLanucs{{|sq9Nu zZ%5j$dIA$Db&Ad%>KL78sm^jCQ2QJXENk|S6i>1Swe1^0VH!|z6vhVJ3d~qpZgqg? zzXJ`{qP%dJwHn(Uw4c1)+4_+yvo*He^{Zd~>O~p~F~0$D{+lmT#%8yz$>m$BosT^* z0nr20&}O%cv?bbkjJiUE8qVZG$Ol*3*xZhC4DtbUv%|~|qj@h=J~GK)1f2?6ni^AS zZU9&Mjpv%9p98c#N(mlVtgend_5~7@=MO8-+r5XkjLvWM1!50n(f5dF84tfLw0Q}( zm*9+g613dxj758q1+@iGGXVyKBgR-iD*K=c=}3jXt{(VYjZ9Vis|CbfrAYwv)gXY_ zQ4v6I3!prr+D<=J)7@%Qhu1Goo8W5RnM%bbM$r5yo02?~go2uOrV+Uka(kl)NYvB= ziJ(Qrc=R;N`2{d8IC6yuvxg}q);OGU*^kC<_2?JJZgJKx9*$a$VY4ft=wFT9f@+7O zj$`$od74}ad%Gmf_rA69AldC`VZZbwE$pF`3rQ)z)dl0=BiP1ZJ-dY$-og#)1bxSP zNgczsgfSnLVGH~D`xwSpJO32GZILW~7K4{qB>)7j@ZQ<NRquK%CdOgGwE<m;>40L* znbh<k|G`<n?<OE)VVDVMWCQ4WfcB5bU=AtqL#CZZ1^b}qlhbb~9C*-Gk;ZxAT`V0Y zybkv}y{}K37*C}jNCD~Cih>GjdU1BZa@I@C(fhvEMh*p00h0JY@9QPky)JkP4t`7= zqP*~?>!A&M*52<x2k*Th{F-zns1|+)7*@OCH45wZaE#_Jpf@pHc?`&iqX9+x9zkQ3 z#(yT{uqtVpS=@!-#!nke{xxk-Yyf0~*(t(n5msJ^!~C*MP!4Ndq{RF@00SGz1&Krf zl7x`PN^-FpYdVe!k1rrQ)O`+Ple1_!S03m=74>zWqxiQFifLao4{wB9^g%?F=gS~0 zM>_u(!b6Igk78KGX%zF_BQvo$i2dd%>Ll%S;>zYS8{}-d^88%#^8m>@n(H6JN4eBH z0j1d%dV4m1hFL&aSv{tK$Ix%EF=8gH*LA?R>-5G>76)qa5?U!q{5zOkM$(KDXRO2( zGaf}bx2|K?&R=KDobU79gq@AE{9S-_z5ubTUu>V?@OfJ|ccbj>v{^6<LJ%vN_+lT5 zs+VQoBJBbzaqyAIfg+76Ibk<ohp|+arK#>CO_g}6Xg2YP5?z6EY1!XzyS@qf0Ycyo zuOK0K^{@C^(P8ojvDHkzYo|CVWwttu893J<y#^+hB@U&rn!3T0f)?HX1<Az8=m$z; z84_P?0&WlocJb_!`cw(tn=;==vp-BaJ7}^<vkj)5GB<|@BxD3D3m20zCAX#9AzLA% zHeAJuNh-{DyURAfZT&N3>rN%fv?<X)A_D19F*sY|SK`=n3hiSh@}3UycJ4WiH(bHN zbUmqcI2E<H#I??F`i~;nm*C<{G3o5OtmefzxlK(?W9UPt^?{_R4jL<mG)z;|t{nRI z35>GnumQA32}vG6{NITX#smVXGT-f&W{?OLdm#JQzu|LRVj9_7JPjAE=2mf)a`9Ab zAy_6`@*nHK5Zl4;M_QX+{4AWn;AI>6ng`K$p?E4K0IPv1nYAu|;3Z1JysS<AUUB&Z z&@#*(cou0$s4dFTZe<VbvtnZq!)oOs{F}_@DHn%f0h22Bz;l-Xygvx=wvPbJ=czn? za4`J^1Sw++(os(-O7^h_4k30Gv1ow*3jo*yuOlp`=K1je*G1A%BvDKgg|#5YBM4&7 z6Fcw+#8`T96Shm$F-4CMRvOmRzlU3yc>^y2SSS?R4u@cwoDv##^y~sxs3TZ9P{;%d zV4{fxRJ6JmKGh2ygURWXjF~(9skC^I_ki6)F#9EEOd#ZJVmWw7$<^jN><83bny&>Y zLev|G5KaS;mcdAD^#EG;S!iW2dlFE;4^Gs>Ag}%LHh~9<rUs`{k*H`89YP}tZwN9_ z5Nb4>{Qrg)EWdHM7sD`c1JExBvYFoV>hx-(khc<7V#FIC<h0_$S~x^Q-Xqi}81h0S z`z(%QOf59lZteEL8@Cf<Egd#yUDjAzwgL0B?HFrwc{U|)Sf3nluR1}w+xceXKz4pV zDF<3R#md&RV)B~jccRiE>scXhtpKePdPzHNO}c{S>_$Md+4Z2J`3~AJd3QY$$aFIX z`~CFMe8)VB4>GIofqW${KcIdLn~0fokH)b<em8~*vP0#B*Wwcfs_7_=ve2~sD0Cwh z4X~qPqW%M5l^nSL-&NiFUsQeeSbx>K{=2Hp>_(s@oc@#bn%UH3)&+`=hYRR5kn9dZ z4t}=DW@k4MKznW507XWFA~^)<B}jO2XA!N;-9#m#*l;v`Co<_-f^MC^gCL=EAEC~D z;8WB52Ias8vj}~36ULEv*{WTgK1{L~8r$6<UY<ovHi3v~o-iID>W8V7CdN|4i6qAM z4ebxmQmUl=ftwL8iI;^*g+j63Erc38A%+wZ;C|f;g&~0xDhNPW0h~tJdNR=LCeA_F z+`OLKFu)Did$N&(XP^abKo7X0_}Qc+i1%iQ04)<N6RtU%hyow&e})9WON1!ABurbj zSe5(+yGE=FcDHWzM$lQ1Z?>CA%1Iyuqv1qukiSCW1Bc&-h@49tFbOAM`K$%MhYGq; z(=Mdb8GBlv@Exc~)FVe+e8f?}(3glDZXwD$X&-}Zr%EHufLK``s0(E{f(m10Gpv~1 zip{cOe+QoUHphy6YQ=n3>^&=1YQ<i&V&ztBzZF|mOkGKpJVOZ}R|iHdYfRoAhPD`o zCJfAjO>5Ar<~s<uzn7}5Uivr6h%|Jr#I~<T-l^66Eav$kuMl+A-Czo(;)D~h21A_* zQ`$fw6Ok*(FQ;<(B5a<J1c>h2oIp|=g`GTNh0%lGX3!tM2{;A|w$fM&6xeLy#&FBW zLg$8`qxT*s`p<kP{FI20Bq8#+h)~a(@94z@fxIM8dq{xP(RwifN@|u~OhA%2g_*aT zWO5IE*-dg3Po<1&m-?_UCn%BE66HNfnNu2R6tx5x!vsx*e~$$I3b+71-N?j8VH#)w z2u!(M#6@{R?1`9`T<@Vo{xRYha7AVO8L$Pq_Kxt1N(i1+U@-~+tM2Jnl;!>0eF79t za`&uDxqFzE1tpCq?*5dbmvA>3m(ux<kWSVVOF6@ag?XYYR>Ap^S5b0}94oOE(<En$ z!u;GijRYIYiiCzU!>x6)Op5~OTCvw2;0wtUob>WYcvweLn*2RYH5c0bU(rF-f+I~e zJ?;Jr(tMPJ0|^`4<^~5H^sJ2edjcqjt{$0)Qv~`U4^)Gz(0`5=KwY!|f-Tvtyx{Mh z>UY-HodcW0prhZm;p_foQ6+hf2l<u`8iBB-=?pz}zcz*!!uA`N$aE~WIpFqu4VnV? zo-95=e42t!iI1_GgLA`ZxTinmQW}4NG`2+6JNk^_*djq;ddC;~VR*GW0Rc<))4~;g z2LDMLdW{_CRVQa6OiuGzWHovkZVzODhQ2)jTTloaCA8|ORvPQ6bQ~a?8!NZrbl8%d z{GLVLi#U9?eL^*zV&kXaC_#%Te{Z5fKkPxRwAFGijIrd5F`k?;MzdBpU9)32kS*M< zlV`D$N30zl6+ZY?Rh9fosNJat!B{j>Ohc{B6>^iD7!8eD4O5Y*?yiCAaCS<~NYV+e zhRHr%y%HyDErVkvwwGnv>kvLO-rTR7pmo&@vJdL!n2n#~q3B!C%!r+T--lM~JvOCr zmX&ZPC4eH3zMZf!;lp@*Xt+p=5T$WG!r={2V83@`)=~Ac2U1bZXBG-lfSt0eBkU(X zBsp=58&D1u0S23U?Wx6=&4)aSdmK=~W#JVlCwwu5)X?WQ^p~LYyTw0bl>rj~{NsJV zan9z#Apbr&%YW{*w@2(R&YC`73g3c4@(;rh-7PqhhQ|>F-4+^^RuM2Fc83FigO{62 zKsg6dy~={YUOskRc7jj<O28b9t{nuDlkIVNY*KhSN~-23iv>*Ly2!btcgsodhiaaF z(Nrfzump#s%=((j!^xyq;0+K8nAcaC*^fYXVZw?9q@DMn+llsSHX>hA1Z0_%q`Njc zOeE)5^kMVbq|hXU=vWCIk%UpXI(fk9RTw<1<4v^u?B%~hoHUL1ymCKHgxQDre~Ohj z^d85?E!F&ORD%QiC617{XH)q;;lk9jDTT%DaafQPuv#zQ^bu7ATt>$hVvAy<Po&l) zQ`Ku*FQ%YzkMOr)#t!YFqg%9OjU#5@jI<-jUlJea_!hV`L^fQ}WQ@nK%X)Ym(obiW z9tIf5EK1lz(3lRSMsjd~A6sX1%pMaYPQ&yaAU|(83}~9OpspSw#gHj%|E5y|0NeO4 z0BMnlU|#@v$PWp-o#nJ_3GVAS=aUZ5qZ)f*?VA*a6EWiCUEJaA+xVr>vB7<upy=`6 zK~=->`GOD2F7$Fc8S&#d-jJr7(>HPy^SbCOY;q)zN!e7K+yM^r=h#~t3dIqrFK`n< zCWLBTQF)H?&_Q-k_@P+0N#J~Z@;EFjpJP9)yfEKg6;xihC#~Q(ZYh#;qTQRvvpOgC zSG^ZDX0R2q{XOr+jl&k`Ez`a4Y{Y_Htc?20qPHk7(ifJ`L-K^L%WiOp6rg*D1{_>^ z;NUXg%>qvs%rFQj3@McOm7u2O$gv!KdljX@JDk1*#1|Q)^fF&wE1z`!sNP{qPFaTf z#0ZxdTwg#Zrfdbr#r}<G`Ve<5>=F&}qOo#d(l#A<^XgOJ1`lz$Z!2mWEtukH0>@N` zI(+e;%#kF%0kCc1td+=iIaw0-kj`l9*ONiM1}sR^L(3Awf~$6`=uBEivRA8$iqzrk z<aa-C>a9-u``*_!e*WDSr~RP!@FuyaNORz<w6!}i45Y_!lRPR*7HIuqs^%oOKH$_z zb{PF46zPWuuqA7Z3T%rxjU{W~_pV=%l_;%~SymVo!+=B2WA+Q)ckA-Ld&J4MuhQ4z z#0D!CpC{1g1@=DyA@7N8e`Ynk*a6$Vw)ltG`_eMvWot>`6Sc*=`r{20Us4QXqV>Iz z;&Y3C+#iop{OaOZfBb%mPb_}0KmGv4hZp~d;^`>A8F6#-TI_P32pQYg!Yu)ftTa!+ z{uwgL)?fr&xw?NG0)Ol&1iAOjp@)wirFbMw2l&deh}glRfCFAZUw*gSY1d@E#p!L| zcm_?kSID*A)=jDO8Fa2`GiOs7{QWP{k8Kf8xSW{bCfJvg{t72C>gg9VcPv)3Sz9C} zl;5gO!Jmx3wfU`DDc=MRNFFc6>2FLjZiC<*AQX4gBeBNZvWlG$Ck^4`(=M~L#I3AN z=ZZQ<=V@wwITqVLe6Qc^)IUzSk%F-<@xKocdb{b77=3`+yqg}0VF#$yyXleKx(x8q zXoKPJ2;u&Px(;y0NszV3-=U>rAo$xWa9e^a16By_P?Ufn|H6y1It-12KgUIfHl8g7 z7yZFlxCZI4A1z&LR2+>jT)Pv+P|DR7H{moQ%MuKgP26LDwW#7$-B?y}iWsYUl~FnZ z&Yh<cAMow45#X>w(w`zbS;{1H%i1b)c}FNQ7L>)=Sn}GzaaLSC^e5^9@$FK?um#wU zRT`XTjfHCqTKF048dwrX9I+U57-WGxD=v+$5>fc}gsF4yLQYHNlmC*L{dfna`*0e$ zCb{(s5*8dO9s}l79%^N+q(2(!Iw+3C3*c!b_>FDg)t4Z%X0Ud1HbwY0vVlOWC{*E5 z3eo0n4Qw%kNHeLSP<Xjrsc&`JwLIo?7kg5FJXXyvo=mUd#Z%~&UM%^3YSU7AiI}?6 zy#nDMuEtV9?9IWr({HIv<>gpr!CpmYRxzSr7|bE|d>kDyr&zTu400V?93i@~t2qsu zQlCW}3*oR2#)HpV$S9^0t62TLW|dHtSP<mPkb#{nsh?XMQm>8Js`xTM1D1xmCBdoy z-*z>4Ma*#qW?WO=7MzSR%zl<E^DmkLBW{O`>C*@~NxvK`uO|k~sUb)^<dW*=e<V4W zMnQ=t!l$iy3S0)N3R;3jI{O>8sN-Zl2B*tv1_`TQb{M0;-Su;)XfE7y<nR6M6x=jd zMsw;pW;(nH<mR-d6gU$(n<pyIx4|ENB6*3R4WrC-ItvQxV1=_e&Gb8)Y-Okb)ir*A z!=Si*L3_IXq6gP!UChvafs!2U3rulz7%fv8JAno+{_v=dIT>17S>o)H#K+<TSy|~| zC=kT$JA|OiwBaas!I4Bt+5GystJDjG?Pb`c!&HqfdBA3-t-f#y#)GazRzV9~bNsz@ zU7o-9SSOq<M=lbTr>t6l1|8A9q_&_B)#U<587SO5CqrF``|^r$AT|Ktsl14$T4-ce za~hgwHO|CRs=uX)EIv93VlOk(@oBlUtTTuK7}?X?QzW7oWpH&4M<QBMyAs9Ob&q7) z`Y)q6<HT|*SY0%MtmEL)L$Cx`6ZS9!Az0NkVLiN7tm*o0I#+GXo{r9iX*eBigO7k6 zccrl9@X7B9R8__5&hcTGmC;7nA!jjaoww;G?C)bOv}pnBY5g=M=1|~Oe?83E?*ObT z1b2ullG*Kj)j=xY2n;<|0p)w>%(WrTUt>*4ewWE9BqqPRHvlmm_(No#gNRobd_evZ z+SM>R!?{Uy##0G`SS>NtvOMWMTeV@4lofmE1MY<qC1BMPZ2%DYLs?nHT^Fw+iN)6y zO;U&ZeCuExzhJ%o#%4c@+TgX3AFn#r;|o;d9u@yN^BwqvfGXDn_|p&|OiOzan_PwU zc@HMe=Kw{<2Xeve<@?Zfa<an64KvR(D2}xyR>AjOh0R^N-^_lBlDfQSmBx*rAug;L zM(!9F>Cv6v?hBwUz5vxg@PW1yw$>+*LwF9MzF;+fI$y|j@&kEp_OHE3z@WXsn_)V- z1cT&0WZgr4WI!*4bewMw`Ew>U9kx%!7N&kjj}V-y>X(;%;`=>pC^)<uSF@sRYR37a zd&m<Zu?9Cmp|#ns6Z%?jf!1SYA4a&K%d*qa`;drZW(l|!g7cp%@OKq-!8t4az*3Z) z$c&!VaOoFramws6glqKqcZ}IoLG9}PR*+c2QCZ;*Se7lD0qJJp&c6*VTy#icV=n&$ z)>E+vv_SaXhzrNC#5mlI)<GwsnRPM)D|6*Qsm-Bx_+W^(T71}sD+*G#f-=^?(m#i$ zyQ<E&V&w}T>1LbWO8cBktOV@~+J%;q{#VHtvxzI4k{34Nq7>`8CeG&fBIk9Dr`5ct zK~6Zm<0YADO5%;!e7Ysik>A=Do8LDO`g$PLn+yr{iY|f>Xin^6u{xLctmgJ!-0T90 zz=0_S+?+ba3Q)xDIRDZBo-%iA9?#>jfepC}D1a!agS&um`A-gQm~YxgqS#fm!mUIf z1#Y-|$o(QML)T$<^?Jyzf|@d`tAf1nIm+wgD$0mUuu@=y0YN4<)%$P25nPB|*Lg2) znZXxP?NbJBB0Bz-s2v;WIG+mylbh+CcOl$_c?7iv?r$W|0%qC}n6U`QDx8&7)xn4@ zR^hI!GHRT#SDD!)tH|hv%aszXr7RUPT&DILw#1A5O5yuTlnxY-xX}?3??vT-)p%30 zZu_lhR_9X0t!2}tu0z|P>_D<XS%FQ62zMjaoA7NS7q>xArfE_=?XQ3PN+99B#9u@m zbhF0mK^!`8XSQh5(aA1^o#gDuP9h}Z-No9@uSNP{)=qExvBW}zS0RP2Q3K4e&SM`O z`|Q}s%p=;l^JiHXpm4_@zPQeRVn4QVxEF9+<c*3Ku$wcM<m1D5T%K9*0YWlD&hzi% zAmaNHdzGEQU1+GM_Ml7Br`1EI#4WX0B%&_D%nb~4mM;rbR)#%y4xE{=TpkYLN=SLF zF%A7irzmD(c?9Sg1!LI;C)_WvKD;Gwmi|>Abl%@KUmcsZIkxJzE|v)=fBimO-}<`n zGQh?(Pr)ID7pdDR;zlI#?Aix~nBnFzuv8n#!uk0Q+SJ@faB2bS!%b0g!D0T(y(U)A z;T&@V_`wA$CZ7v3gHvk+44Pr2>?2Wz(<5%fWLKE?<eK;7nD<QQ*-1dm*l-(f75j{a z^@8JMP&1EV%7ae-jD5*kv1_q<Cial&>k)i6%}+2qfk<?{OE?a?RPvux;>KUvFkOzj zd*x-7CT^JH&k5#n)*O_v+Y)Y~xo*Q7K<<vy(4Mk)w(vup0x!@*e*kCD6c`Mdi7DVe zuzAFgu??Uvp8%*e&nACxxVb7n*p22@RkPx?kOjS%G(EWtH(*-^F2iqO(rH<iD!{X$ z&~DQGFh^;_u?2&huoC2T7r=Q!9LK^=UKKGZ8HF%CwUt?Zvx7eS?~*@*c6G#ATa+ri zU9-vd@=J0zz|2DdLY?=a0KVjPEH!5Gh2pguF6;^Tq~AwiyZ~vIldHIH1dD*Dh%jL! zW3q_Shm+ZLJfYF~I(i#=52(P+>UQXlQ0EIsO1kwbQM&F^EDHr0nh^tqwh)D2B7?_n zilAi&`QQE=G)hu@5lxJ9;K%_k0oJMH<2)NCd6<`o@)-0kXC=MmSfHk`cDiQkG`}$q z6y~3x0xU+5+li9FoOHubIR>^gcpbyJc)-h;taj85W;S(+Ri@{gWqvXhWtv(Cf0>$e z$lbp%!;Bqs(+)|yc1RbX^k5a#NV3>Jpjg%eryF=Q*T`t}QyBQb7ImkwPZNC^B_zF( zX9T(9EIyHg$#JkFe-8TyIOC_SA3Sie8c8r`C00{j8cFzr7LXdYIx2CGz~tKqz*{(& zWQ18k{xfpq06{0AH#WZ!<c#9H1ZDO2H;*II#%JQ$xeYyx{G<64#0HT$euNgO*ceY7 z7y1~}VN77XuWg<l=_ok9f}Fx#n{xSI0VW)4t)jVxIB1AT<b1e;yP&|nq$>(Di9HWr zfsSP->B2i6qq!$mQ&>m2y&rCJ<(~y}+y7L>SNvLN4Kb7IUjt@^Au7Aq<MG`iZu{ZH z2pnq44>)mgC1zF|GxQc*KD;q8ux7+CO`gv4T{Ko#v%dU$!4bW!U*Im9JC8WPF|nPt zQeq*D8N(MD6*w)9sp$!PsEXxY%SOT9ngx4}<vnn*#_-mC(59)aUpa2lznZt%9+`J5 zyV>ErS=JWN_Ex?Am1omf_Ueg5Y;lU?{E5k{_LcT!Xj6f}<gtm|*i9V+Umo2@ekb^d zRfaq{<banNtCHDD2Yj9E73Yjw9kimtbD0cBDWF9=8AEEV>Cr#788zpWDC|YJ$FPUh z^t4`dMCO4fZ?5%zxH*M=Xos;&<U)4uJ4kuQ`#w&Lz%TzEhxZ;?^Bxd5U-WDm!(Kb_ z`T2JytH5`$-Jwk;q^?bji{0EI(x0=irB4Fidw?cNk=Y^#T?r^kWQ$~Di3}pcCmQQZ z>_9=AzOOXaqY@0rG3PNB0<=u~L&(1bPZ>||5?Nc*401J9D1EI>2oMpc)z>K!eDq!w zWId4pJ{e<0SWvfgUui~8;tB!e0$GPZg&c_gjv992vsk0RI|H+_UL(yYoe9_aE)!P2 zv-rMyo0xoC1|XKT4GhI*zXTBuOFl_z{YbHwJAY4ehpI{}P{enUC0TYxKo(J)Q?)+o zPc%`NTIC|Oue`(pD0kK0TOw&0`Wi={NYS^#1LF=-92g$o5lI*&2ldDrAOR~9u{q%g zHfPzy@A-#gi$|QPjFr2w<?`2jkQMWBoRAlw-c*9!?9lI$-9kF{sMI1@eJI^1ruGT@ z;O?ymVf9Ak!{CA4xLLTH_PZ@^cu`O-16q>Q84g3yg;!hkRLbSDa_teq*X_0o`0%0m z(D0WWy)eqKb)m*1j<Dnr#%mW{2Y3?YVW$p7jx;yB2CAXfCVr+bkxkrxwcTN+5@M{( zg()+`mF4~RVsHSP4@)__$AvX#!ftOV!DV6>SlgW~LW&z_k`#mg{XMrDKH2a&a2oX{ z?OepcE{Zi*>!*tSUT2tkG>HrbRGDl&kD=FMKan;-2`q;f|CSQ=YW`cTolfk)%-73% zOugw0wkplou3o$h7v3;b#eKb96b(4y^&A0;q|(}Mk@gyv)|f}9l4nS4sS|gb8}sGZ zO$f-we22dF=cU4(<fWezzciPXG#~D3ZEQhTH7zN@@vE&4!D0}}&(0s89FQ3<+wWh2 zVdX6dA(kF4EIgd--TX>uv@xxpDeTp6XtZ-|X)jLLEb@LC+g8-eCK(kjtbdgsE(c=x zl>sG62d=SkaaMWIix5;#>jejNV2^%b-sZH(ybzhoS3A6`Wv#^0Zx=k9#*sAk#1`9x zg4;z3?lMvrV-u6~Rw%f^kB{!61`g42OJ$U1K-n#IupP2-FDB}){5NeCy=0G3e)uGy z={N<B)R>N?vBlS7%Ty@Y)vV@REcc>O<AQ>u{538kBpWw7NTb{=<LM2_T6Oc{bZC)L zq(#yly6M@JTVFSdw8&dS^uyR#>8?`tR>C8`xnfJdp*$J|(n#)?bC)n}^~OrC!yU@T zVjJ$LMG6d0#)4j>^tztTIUpTYdxdx@G1@zaF24f)0ZVMg&AqWz1-(pjwe~rdVDvzO z-Y1$=+YR3lC0b8S)_Uo4{|6AqyL4bc>7xPVO$-}qT0gyq4-P0x#DF5ce2dr^P(bf3 zLfLMSQ7Y+M4K~wW!@_5v!isY-=a=kWA|<&cgT6Q8DJMrZkTtDeIj1>vAOx}s<@_d1 zY3fgWLCU#Eko8R>E54!e9Ya3e>xd=Ex?~7h{Vv09l;-qeraP3u-MfVXsF0zO?5U(` z^wu%@M_m}8!JSo$^b4L~bzP?Zrg`FXy`slVWP$DUSIvU%6Q9vAoh9_%dzcqgIhc3q z@}8-EneS@D^fouVF}x=?a_>oP2b(|z{}(Xt0p>kzWdchg+-o<OvkN(|P3FwF<lB22 zyO1NBKMo%ib`td@_oFgWXoh+tY|tTgv&*ot5|>_Rs(&#i2qa5f%mtOBe}#Du+bI~2 zZQE5kwSsVd3kSKe_+S=4mY1@k{<aLq^{eck8$o<nH4>kaw)wW?FWyyJU`~A#Uh`JL zC^X_(4ZV3}Ve|;}X2m&n%LNA;mXCSQmr4GExNpatrWV`RjbtrmH#xjF$=WK&l8~Uf z%h+2a;JvYJh2Tb`=FHSpO{E6@`V_5zRh+@VKRGio1JYxG?G!_z1wDCepMo4(CV&7s z`DRCQqR@kSWcGcBajydvvhR~(P#Uo<28GnmnK#J>04fQ<sFag<)mogH+1CoLYyy|o zO|7rXl(bC2dXSngGQ4b%NqaN4HI>q&0U%j}44QEt&ADPPS*R}Q5R;-4pJ&_vMFtyk zrZLP|Jc5KCx=`z~A0xR&(sdB)b8L9*UYju&w&ii&2{g`v+?Z>L$%2-yPopGKtA-p~ z;230bvKz@5dvT^1>y%u+_W<l3^e=f2Mls@;H)pmb7U23pUA+On5dz<tAUnwqO(&O) z-@Zf#i4(X+NvB)D>QYe>n7J$$!|t#Ef3ua=4%>5a07wiT;uz~;TG0K3O2$tJV2_vX z<wi&2hY;episL$buxb~G@ZaqhD9~<#ldeEiom3dk^8G6S+k*UG9;YhmdV^wDdg$7i zYy^q7QGAe}CLn77-*<W(mN11dQ4Jo=z_kM~9U9SD@Xs>#7K-OgJc~4!Fa~$Rwt#y= zF6U1H87y3Xh*#3CI2x7k(E~Vk9snp7+t@me<EoX|EbEe$H0wtN?D6Imc_|+py=d&6 zj^djhyByE@i@0gE{-RBri9zW6G1^nOjL$=fz-T6)`i-i71%jhTI!jOwE`RW-Bj^%d z%Yt+}P64AEXd&~?XJ{}vyFCWMXKCG~>5h7(aTg*yL6&#lde}D0-LYscFo1b8z|zcF z=|;?hsF~e?nGj`O19-rRR8?-oQH20f%<NP6&K?ug5(Qv)GCBu2ah-tjzyi?Sh?XMS z9HsW*V!r5iAj8d>OtiY71;1!Qdm~Y*3>VqQ^{u$;DZ4o^t7-YUri#DQ%{Ta|6WoB5 zxLG;S8sP7q5sguAWHG8U|22CBHi~@S!^#6sqF}&AeMrZ`dk&Zq6H$0jS-0Vpm;#Z+ zcx--IKv>!jfr&Y2#0&%?sklR_61Kw_6;z39&4@0^+?Ey5au8UB3~=lbtqs83eJ;SF z)RjyE`7FmCBHR@KW1?ynBSx~f7VRYh8Bt;`WoI_N>-(ww67EL?3k{SB9EKFy?mw4x zNx?^9tJ3#VQ8s1gTZouZD&G|43Onx{_?OH{(IzV|6cij;r}u%>ttBP8Kqkf5OYO6| zISIJT6lr|gG%SPHc?BhvXqf5|g{CC&RIk7#ECEA&=RJ8tfxQ9`YMF%%j;<Do`jq=G ze2umI<@nBqH;=NgY`R66#fBTDN@3@4d?+|VEC5ypf4&UvVwMz&jsV9+X(J}dT@~Oi z53=C$Bf&{5MugCxBwmy91#iTn<%oDIT$_s6!}Qe@UDZ5te*IU&@WTayTJ2Jn&teRm zFth><`>7BU4v{$McG4;(AIJV;(HTe&fO)7~OG*a2d4a%}AZ&tG-Zo|DjUtVz&KE6# zK|;BIG0N`r;EN>~5P2nf3=J!yCRHGPut|i6{v_r9R+Gxu!{V#em&ywx=g(iKqgkVM z(X5n6*2;B8j?bryHm4+C>kOCA*C2SNkJ`8Qf8M@-qM=t%V6c6+iZsGwNc-kd`+WE! z8nlf-V&7^A$!Ylo)2yZLnPasDjj-({Nc)?jDY)r}+F)<D33;)eXo0=mYQa-bdmCRa z=ne+M%d@bkiFLt#Ss9B_x%sW)p2z@e4Ftn<G%hK)C-EygjXy~WndnZ|mfs$THO{8Y z|44vUr+qI0dOzIpTEc1V6Ih&&lvS2sTdlVQTJ-TS&>%4nEEA)w^m7O1UQ$=)%zlP} zONt<-{v=5uc!5Ob((?8FlqPBG_5A`yy(*GgTO=eDzcw)%Cfejy)<gu2nTdHx>77Ex z+r+g=xe)r^2ZO8N!1}^*V(pyA-+7+$=YkacLj-k?*razdfk?h!qSY%gODK4wmWO{X zPPn<koQ7)-a9ZSJ(``KerInZeKokeNC>0|XuNcVV1N(22`Mm(ZQJ2*NaMqCiDU9+M z!*Ep){R&PjSKN&TXB%-Z8Ou}-EWXyEe`Hf%4)7vUG#K5Py}NWKF4h=LWVJ4`xw?l+ zf$Qz*#Ax1&B9oMHh)QX0(Qh&(3~9y?#uxFkLpqg8m&eFGXqyws$+nH+za1!u+Vt<p z3G-sxK%2(#9}NHq10x@oY|K%sF>@|$jDp4t7maBT@by!vG1&J_?=DS4W3Hu<x?>6w zu^D>0gT`DfGs$gel^vGnqMFm{Sbi<)U=^ovM}T{v_J7pCAK<HK;4i5rYraFfgY*j$ zGNyO$V3#gw78UcBTEs20XoQTC*g71?|MMF#H(D_Gc^3R00hwTMkv3e;yLj+XLh4+s z%q$AYYHm69mA4F2o_BSZ4x8Y>-2wQGBXnZ^mrGc?bvo8MSvz1spgD`Uk!U$&1RXiB ziRLDk1WeoL$6{zZ(?vgjfdRksQ|J|JABy`ECh`m*He~nmN52(q!R-kxq=%5#(KIn} zL~My()Fw7f<R<|!B!jiL=kA;iaIxQchU-5gPQZSrtYPQET@3_-e9tiO_aRp&{Z^HZ zJHTlb-mWRlN|Wqch>H;>;rMA{+(1;m2|oZ);nqGU6zokoKJN)7dKi3EIEij9ciXht zv8{BCA-qf{#{6gCkKc>mtqAa$FGGaMK#t4K@nbN(oBm8cIMe$S7UyjwVs!oZt(d7| zb7u36v2AI6Mx7gFOt#8!i!#n&PTXIHyGV1R3^>@om0y9&buceznv`%ftx7WsYkJ68 z{~S5%M*=IvZ_I!|FZ|~vJF-4R!5u?^u^+US9nODKzmT%6BDOV&Lb4ea3U_`R1vJAA zm;KzPN&FU+$qq-ZTw&O#+%e=Ff|CJ>;X`W~@D#>A8Uzz08Hu~S8w&sUN9<g|BW^3$ zeDDWS+=KJ@svzxwe_1r4kyb#3RaN9WA71+znNrbv@VxF4Ql`pAF@Yqq`}ct17!psV zq!f@EJ-2-d-LBzxEh@}WWgmXVs9Qe*)^O*ymV5o~I-Ae%yLS^jyf&1^XHYoC{>CSW zMaZFqcBaJ7AbD{0QyR{S8-5R)eFl}o|Dq<3+(O(~@Q@@qUI8rpFf@<leWElzh=lDW z)_%r$l)v$YSm`{uSi+of%P9Ush&DTfJ?-4M^g7PABt~Gr2|w`?LQ+OtA{xQo2$vMn zALoi-m~Whm0>R7YtXnVW*CkLFO;bNc&1^Q&q^imS5H5D_u)|n@dtbATexLU{scQ8K z{0foM_$;z`D{_?w{|y0C%Z20&&Dpt&zQ4BJpWKci^kI?7NTNTQzcmF_o`V!e;%S6F zJS-FAa39pi-)sRKso=2>!1=<ZMWAmv04DozN>vs8dX%H8Dv@R(LV%#G#~Sxxe+^nk zsF9cd2PUF0g@!sqqHC~&(nUH^^o|=R5a~Cl2D*y$vd2Tp+J6RX39$y8jC@|dM``>3 zErhERybREN)Ngz)K(XBinxhZ?z-DtnP*59RErJ3Uc=n_hba%dh+}n%wo{lYr=q9UE zNAnjagDSo7TKZ!=T~H-1s4|QE+%D-??CRk+dI9(x8jC{;Ek6>v6A|<R6a@NsXpOjc zKQRr&fnN?f3iknkINBK=n}q6c-%%H^KL6qP?y1PmW4)*>F|MDKC@eYBn%UGK26~-S zGl-TwzX2rlBrtR0_pr!G^)Di+J$6S2j0<80!7u-pfeRop27#nBXiP?;sZB=^zi}n7 zAr7(_6R7j)KmsR<{*jkNW#yot?{0$VS<-$1guRjcj<CrZ6tWJlryd|on$(z0fQeZ{ z#GL%UL}IEaM9A-3=oFIQINm~jIRZj{bHEhoLVj}w<<~><>k{(o9F*Uje);_sb@7}A zvkP7}TkuPvgR*;^=>84a4Ul{9rG1P|boI`dV;+7?wu*naOZ0FxRS61_^r9v-4);#E zY5N&2uGCzxSQS4)W<PLwLM!Md;Sk7!y>sa|*9KaGF6Q$mfW3*gX-Hq_MK4Yyrgnj; zodHzA?*st-l3xx)@D%p)2KtC<gxqJJBc|xVR~(!A<Ufcb;;}o<40QkWhyFqLPeCF& zUUWY=@zTB@-A65jP50X#GBh0^|NI6BAud|sn^B*+S>|_(x0A0EZx^o>Z#NH$cMe}d z@9X(O5%utS;+@BD5bx>y8u6aNFBk8be3E$2;$y@+mn-63$kWAp4mbZdVdyhA`}jEo z&CR9!jChyx)8f6DpAzo?|ATnn!e1Bf75tERui`I>_Zt43c(3Kph<BJjA>QlxqvE}R zKP28N-znZ(d82r5<J<5i6rQgKm+`wP_4!5$-Y$Yo6kH*K<Oj|xM39s+Um$`HQSb&4 ze1w8CM39`j_+$}$oPwi8@CgcLir`Zeln~Sp%^0}xQgn(so27YE#mx!O1AoLmInKr6 z*Vh))T?$BfO{8pwKTANQ1o?}U@{K~a<KP~y*G%U5iB*cro4O*I617s?-qcmelucGj zjyH8pGUYZaCD)s}Hkq>2O7VD8!^xClk+M0@JA1uI3G#eO>Bk1M4dD+9c}&Na7W~x4 z^W9I2X`?aIn(tqUC}u^N3E@Iznw~oF3u^DPqlM#C$AYCAxt@OBJiKYxf-=kv?Mt<@ z@X&POMyy+@81d_RUncfmaw-S2oM7@C!T;0Vxd290UW<AsGbBR@%pgI-dk|0*#3&CF z0ydEZf)W@AB&3QG$zT#g5|h1oSON(XY?3jR+SaPa(~79Ix3<SVL~XStKodZUAXZU1 z6_itV&TupyBg7h+`>lV^B$Ei%bK85*z2}~RmA&`>e*f!VYyE3s2}W2t*mRDL+r|C9 z-BHe;*vF%45dPr)Anr&THpVEgmMG^A`}nF4xLvr{9lmX$=(*rPy-;UNcrz=pvd2^n zSL)zXy(+bgPpeXY3}em*(8-p1R3Xtv6xu5|ZyY%94b*Ei^$HB@{&Xygz<DtdNR|Bx zU*#HVe2GU;&gE_E8LA+eOC;w|J8TKbaD*ED<(~3Q?p?lTe-tiXQn=BF(db8%VEA10 zqjfj*F!LkAhBIjH)zBdUP6W@y^tR*dZX2T-g?7<1ql_su>SZ$vqKpY~r}R<HrfX(; zv@s0F!7~eNh70}%wqxT?8Hk-Aw7+e{t|KRWyQ21--OY-m>4}Ze^cBgxPX`g{_}Sgj z;{Nz*KOU0)AzWJ|{oj-ROTOmlKz&%Al>X0?;}_&#p&K`I^QR^C95bfVxkWI_+D`>} zt>jK%J**<`M(5?Cj?edJXX?3IZ!;XX-nOD`GBoXw3DKcgA;t75cZw>n{P>CB`0p+K zcAB=$-}-B*tgp>p$pu-PZ65}AingU;cc-aP{CS#uZd=cv$ANvoIBDKk^!U`zi)x%3 zO}h2-qJ1qkU#m*}V0Y?_%kHo$RFtnJ+SeK_Wq7hX)HW*&_EV*V7;VM3zT1~HZlWN` zKoT$!a07{e3vdAbjBlN4$hhwmPm`y~^EA)XJllD;^X%Z+!LyTRCr|jI_jNVdg@vQp z+HIYo=I{rl(xt$9;9f}^>G<1FMlUsve79;Ja*=r%*&;MYIBb)C4ZNt7u23h8@9Bhr zpMU&B7x}i|PcFf;Z_?6_@=99aKKaz@lS$Gi9h8L-5_p@PKNA5D&^XsN?nwPSo9_eF zdLOFR`$a_3QnpZ-p1%4Z+V`RAh5Cq)+akhI18NxRvkz>(52a_FTXLDI5iv;namw&C z@GIa&U@veGcnx?Tpsh#J)+2c)@=WBJz%zlTizmXO--_pnfa<p#Jh7_%Ejv$?=tuUA z)kfNP=x-nqm<)v5m~zts5q+V)scl3*SYa%;UVRsyY&^f(dg~9Wg%*hhYoYxJLPx|( zyLhoMjaZk#yErH2VR^I5Oc=}*dj)i^)fj9R?+BBm{H^{s0yly{HDz~!Ux|pkc2Z$% z1RP@FrXY0vJ?72C$q&4u)bxi8Qd?B9Ca7OE?$5#PV6w{Px{`#Vi9)<uL<~64Vi^(j z{uYI9q^XIkTQmRVvF<Xo_+M{3%rxjjqI;bXkmz3Q4rr0+GWcdg2<-cE5*?hX?^y|a zqfY`hD*@Qy{@sC_J!XYVj#E8^JW#)$6NdR?h5ES~Q24v-L}0jiRd;IUbd|m@`?%7u z6(;G$QxmlO`j?$B?<asFdi_+gu!vrk9Xus%V-9;<P?BsUUWAe`&^JHc(VCtp0y2TY zeAt`P6Y#=GR%|4Dd<7_0j*6g0ai8LLgtLVQ?wh@h^8|OQoLjkV2~~lc!NH-AC`?#X zU|h*U9a4eO@iBK&tYdZpu4wu|m>#>Dr^J1SBolnyV}9RqJggkQ8*<!YIsQsHJ{WRb zgJb@VNBN=_2}O@s$$QLY%KZ`Cx62<emqjU~B$z(WWBwA);B@&y$NiHMQgn5k(I+F| zI8mJ<hBak(E-pc6{WR<^Pw)*Ak2!-5dZT}BHcjN#0x8?2T%?<Xk}*kwAQMDuPZuvE zw@dl(9O5zOhCDeQbSZ!Ie&K0O3AuB8krRwMKM+9f&4QPNZX(e^a(m;@#?jE0HlaPi zW+ZISaC3N@s2&Xi)yD|)B3QYRyw`_+s75N(T97zMx>+(SQV0ZRd4+J6-wAV;j}bDG zv%Io9W*{f53OE^I*<~OQmV|J^>++U~gs?uqU)AONpuecLv!SalJPu)+X(BJ{f_@Sb zzO^&8k<xE5KP7$i;fRz0N(t@exF<=CJE`V<4f3LJpW4$C*_V3`wrBcn122ur<%VUP zIaNq$X58;#VsVx&x!8>7HQx#X)yd+Fi7lCizq9=a15F?HhL8a-u~!iV24Y#T^QU!{ zzy%a@KNyVRv@S+2W^M_82|+%>&P54kmL$+nE{9_yh&RjZ#d!=%aOw5)#$eD|pOKzl zro`tR4>7@@#^heAX)EMxiF)EM$opT5EPsMOt83~$^A}r{yuZuunYhI78Nb9#po4sS z9bXXlmrD%Xd|2k;BD{-CLiQf4p4jVY!aTfX$$?N4<?e#qS_tYheH+J5#sp=mK7R7r ztGKn`kN;%@_T%N+!p2{6Z{ZT_-a^JN9p-#lPvqq`UINcau?sDe5S*&13s<cQ{V=h> z@HW_`44C#^9PeKepR(9t^ix+E_T()7&373PfdQcx5<zy$(J;r}aA*9o#h&H)EAnsV zhC=XgnA)F!bh*%4PMgox2{FJ0W+`hvSAozyW=uAZJkndnBcE@U`kLxa(bQrQg(0>d zW6?^fPSE2)<fAw4=kNH<ShYBv(>R)C9OLM|7oMi*QJXFi0yOtBOB^24%Q{IIMghjK zzr7ECJkUUM1NN;M!~Gh^%nP*Ee0G%)<I7Hr4j}e0$*|!FWfgkly*H7k&|m6qP%q=1 z_oeUxSLDi?&yt{SW+p(3hn&+GJ8M1G+LtRQhd7PJkL8Ms*1k@cF@)g8AQj3!Yq?>c zCt3Vlio;UG%JAx0$gewJc0L!s@JzE^cQ}9hvac;EFoH{5<fmWL_;O8KLCvSba9?Nh zwYh!G`%|+Ms)kW$2NydlFE{L|2iA_|)2@vFqJ=tf5!QCxN`EmbmE&cz2;9sCKj%NK zNU*&L(?_cAXF>-zKgHecr=pD6z7x@U|5~UW$gZvHPc0`w^<R6LnFJT&OlD$KtHz+$ zU>an11p`i85cF8iVrFY$?WJRB(CCI_ao25US9JC2K$r@F#Bi9TUS4RZ?!KMRv9o(o zPU$Cx$&J{e^&=Q?X!rREbDV+EOBaQpQGbW?%0`C$h0ZJXAAtLYapTDIO5#5%+&Dq} z!I2;2bK6AzECtpB-Di+5JFiIU;IrLf&wpM~Ww_vZC6vZz<Y@vYfMdX6U>~pxcpd=9 z{X3jjBr|_dDm@aI2+R_f|Ly0MM}H{!s`HA6*9)9i9;YmFq9Me#U-5nn(D(?SG0uBl zk<ef5yrR+#r`3(sf7y8@l=f1xxCJN#N&y|%2-E@J2k4u>!+AwA^9P^d@AJSu;JCPi z`{r*suPE$5&KG&P=1Z_&gjTD2wu{9r-#M_eGc`i>i!uiI&P5v|&!lC*8wa(xpP(gC zDA#L{I2=Uuk-28IymRPqfSIt[c}i<OXTz6k>I#RErv3nvcIClH@!{vM)zJ_weD zu_-L8NU*G<xQC7$Bg`f~d>lC{d0L!!VW10^+~>qmNB~Y8H+F}!P8_d(PpvjzMJQmr z)F<LB!IdzF`7%cck^aLb_J<@DD#CfB0B$E^bzV@-Vr`q!&`=<s^68_Wa_GZ_v^?aY zU=VZGXAzm5x{LcyVkUd8JxnNsqtS!3fw-nje@5tui@0AmI$b-*P5O7)s<z9AVj!{a zusK!aLirXkGmKBs9|=}}+<^)RB1ao<^{^>kX;2B~<|3JfJeWv@IXo~nTtp$}Gjie> zs8UDG*kid(%i5QCBp~MA;#I186PI-nZ&k7!k8BiLJSuR>h7ArSYHD~<iO|JiNP|OD zR=9Lm@@Ua+Eq87EAwAZBPGrH*)zP)xEF>B0I<PUu3WRluor4HwG59U@*GT3C4#)*> z=T6L{zqglekt0JjG5z&|GWb4?+B5+{p^fgTufl_KesA{@I&g7rNq==^SGc5GcM%$N zDBG2)qExz*Z;jGN_-iD-y8i2BCq)p}2lKcspLg>w-;qwg(()HXrZa3jd!}spuwBVX zwmX!iwU<Qo&ds@10tJ4pnneT?LI)M|HS1v7YY$x9Bv-SsJ$Cl+xPAV;6Eqk-srxG9 z{LT5_#k!V#{GO}ibh%Xvw5jxHs@yzGY~@?`(yJD$GqsX;X$pypI5DT^o5eVu9#Z@z zw!tumU}_j8#vZXTB&Vb!;K(WYBw))aIfHo=I@urFFfxYS9PyXWVFQN5U;5Dw%tIz$ zw`nTQR_c;mZr;Y5QwPf3_^KR#GvcZKkFXD~jQGWdi~_bGh!>?#7uoQnunw|OlU~+c z^L5Ak3zWhaA4B^FhMMboO0k*O2GL)lD9_<$5b>czbCvKcSt+u*gA*=%dH>Q-Bc11h zzO7jbXN)&5mBf=w2anK6P$YcJZQoWa2#E!v{hFKxxm7Fc)Fc9iC35{|Lp7bIDjrhC zgMiGf4r2yquH{U7WdMio;XS4Y%Ry{q7#kv#gZ07i`7eo#MMh_o68E*Fd_#nrri^4b zX+slbsv>+8pmck%oLDU<yTk`c&RTk8mVQAOK~qMQ#2raos*zaqlvJZo>L()8NRJ#Z z8DReF_eq2zsjEXGs)yS{k}ykS1B!ZrY0f6O65^lslJv3g&wfpDg-&EwF8wrc=hSwm zPlV&n%%yE_@onOwK?)`GNJ6MQ0drMuBYWCH5dkD)uErh@*k}#GcFl<-;;TN+5vb|b zctkCv;*zL7f)A;QuO%(81r0)&aUz4EQu;kA!k@7i8RZ)koMaWW`5cC6n@{w!!J$5d zx}l)4VP4xL=BKi&c^{n_Qi`q@G{vimblcVR53b#<Dz&@nl0LRIeY=p^I1%{g=J)$y zJ4tny{}tcKG0i7qLLJtU;jl;LnJu8bQak(kB&;UDjom{#=dp=&3s}YXYz3C()*?Ie zpOr>*X$FUOQFm!A8JKahNSiBdY+x3bJZfD8n{--FLUM4+Mx@{vM<W!B9QJEa7>_ep zkk)U=K8R(rhU(X_faI*ZO}cn`5t*O}lx^j8|0rt-)o=Axn^DGcQTi!#7hxLTq?|HQ zB;T6(nrsCeYK0_o%)IO+CP{n#+|;w1ZmvD2c-J{i88bp63RjyKOE!B!D3U{RCs*Zh z&^%65VM(J34230U4bHS}M@SYS9TEK}c%)2<$h1|T;##zRtjRt@#1T%J=kAhOiw+Z% z7DpyWVK@6%9K^uVD9LDKj)dR^aZK6$@Lt)l;sj@`QSzBm{TlLG{JKM_^60Zr2w~nr zr>P-BaV8OjjWm?hQ3$ZCx+lyD%q`~4iNF9xWKi$t&pzBhwN9Dq-o^v9@=abLR#|<P zZAhQVQAqt{KX8b!o72`jV*h~V{I<6~6`|CSYi!tcFRq-OP_ri!l#8;keBk$FyRh37 zh-vx<nho1V<uSlQEH;(ry7_afSZop_PK$8boQKoq+i)shoyMOs4}aFK<j<xGJnq14 zb2)CC*WtE#b4An68qy4#ciQ16Pbjcq3r`~(syir#2qbbvYtKWddcXwdfk_9bi9C9n ze)1pT^3siP-~5MsCpR}_o2eh^LneJBm*p>KZqkLal4YCRR9VNhIM|rBqmzzcImvcx z66fD`zj4}M-A;gyA17cSC-oI$`q?*q&8~)Qv|C#(aSFd|hYbf}FFVB?n3Q?Svt+Td z#AW4x=9X}?aizE|`r{}3l-H&b6-{_j#STR!lD001vu;K>KT;*^ChCevBwCMFpg{JI zv``4YsjK1&142Pl%%A#u3rbGso1<_fngd1`+}!pMu@z5Me_5UFxiPYKqFL4_`WXmY zeWJrZUKzrrMuBcHupOq4Wr12sE*T-*CXh;FA=)Q+BMN(?DJ!kq?%Ww`xlG3e;lz2t zY?tl;i?gHO_79VwJ_cThq^>FqRUPlqS?IuI+CfSbNkv_1l~7eGaCwRmuOF|ic1ac2 z9ldo$TN~LhX~J01P75nyi&d8=Y@QNZ5e<=6v_R3rM}nN}5ae`^LV&sAD<=;*z=!~` zvJ0@i!orMuT*5kyXNzJnxfU!+#FTW(syy@yj7XX8#zD_9TWBSg(;KZ25VO;is;-&R zf(29n3U}agkC`j4sjX{=`D1EkCC@enOA~v{GOLYQKAdPN6+?W+QE4fLMhrW4RG<SI z@?qI-KY>bH5^K(rm4T}`=ra<6GP2}cRBE9K8^r(O+ZvKpJDL~qNguPmwQZp-8m7V@ zN^KFU8@Q*E7UJswZD=OYtct4KqA&NDKSOfc-#M>@o#)4;YLqtENdFS^3K9&dFBr|M z*loqE3X2sMmi8hv#7H5<kgna*Z>rqGc_y=ShEbHT^m7S`?4d%B+(-6dYGI-*t5E+< z^P3gqvBIHjFQNKiDKj-p;Y*MmMAXOK^8{gVhrBn?Un}%9(JqaGPiann?Ll$aX-{n1 z!AnT<v!xN*zo+dH+)yR$d)}fNUUOcJ)Xz$%vH5mur0%L;@p((;IW$raH52Q@7``Z{ z?rO>WyjwZ7y=hrziEYVZVX)-}D^!8a+Bc<5#*3h1xvWqS7I$WL>iwNNvp;P<;TX`| zOF6ZibFB4T(YJC~mj~?Ev*ln|9sgYVFTcLiEi{YE;!ZWj>X*aK9|va;HulW-D`RH9 zw=O#R&of(j+rwMS%oCi;+oFskQ}@q2q4x)O3<fKs&%WtzzFD};-G{Hxx)V?F$WHWF z7(*i07&g=2&}`P4G>k5e6yDx`kLvQs@M`+D)vGA+`X6%Dl9YOA?Qrurfg>XqT9E@^ zgWxOT&hX+yo>7=HCb!3BO$p54I3{j@qbN!+nu>Ti*O~vw`5RU!f_JXS+*x#-zFp@m zr}GGVhgT1=p-TFp#dtAVjM3QdpDoi{l*z?1s=d~(E;Fkn=*i8+oB<M)E&5W?I^M)M zknOw+hdKDcP%Q}tuai)WoEa!7&-Iumsf3KA>cJ3Ib?Vh+rZWNZ$pO`dl8LcBv_cAA zc18lYB|rc<0u%wEdTGEup|%_S`L>@ui4LTkvnNApm<q=y*er!iCv8V>#>+b4WIF<} z^J}=w7L&$J%unXCb|Wy{z3WVlMDNhz3o7S-3)6oqjx)7WX0HTEH<C-Do)>{-=9>q+ zXXtoVPHKfVJMk8bt&h;MII}u~0l79^#`5CdW6Ef!eb|E&Q{UJ$n$yP;^Jd)qhw~ej zB?c~nN*%0zm%$}MD%|<q*x?^2$-sGY)_qDIsjoQeKH{k^*%_~Mm`JG>VZuS8W+Qtf zS+Uu?;oSPL<h#s;p3UgxZ3c;@9(LZhh9?&RH`z;Ufi?^GL|RbrQ|i$u#k>L}G`jMH zn3`(J{6K%B(Gykos(!d}z)Wr!%sjC6=V@s)qG1MJN~uoVlq{jeI#XKPMI;@L^`RBZ z<X%K$e<C_&9&p~HQ%fuI$-p5?U{jDsR}QoVqzzw}E77mP5v&U`27f1F&0F8zlxE2) ze=M@fh-;2;q_!ewec2frY%fKQkh6Y#Ck=~JBu;z6vOFXzd7O1mkt`yaC)8Gn>0Fhm zEI{|uQr0z1gk4W{mj*%4Z*00DBL5ko{4X}2{Dl0wAi#aSmq_r~FBHL|;}P&0k>OU! zhx64h5vSKwffV0W4JQs2dFBrfQx(B{AK=BGc`U!}S&BFnE6QSvw?`~m^}8j(4$IzQ z_WzjR?fD!VI8Aa=N;O96$f<JeDN}@@k24)dnpa7nV{o~|y480HWd%qi09M-w5HA7H z5t)dJA9OeU2(Ddz+nofIxgaM#sfN{v)}n+p872aEFyGb(<(TUTpJ(1Bv9RRP<lWbe zn*X9W;yA^EqlAv1#u2Gg|1wrNw~{@z1W#o_GFNuVYLs|BsZ*hkg_h`Il0YDiCHm+W zmS~Y0wwCC%sMd>IWzW@IV2KtfOm4MwFVU~FM5pwL+-yY-+$4mvEEjvjP+5JUm8n(w zTE>U0(q9W!VAi2soP~_07HUw%Pt_tTYxD^79a6Fw-(PjP4xwLxv3Ycv!%RV}m`xvC zX`nx*(H@IF+EJ)392Ul)-t@Oj>L>VGb7%C~V}eWde6yYkCcYR2>L5_BFiz*D#3I_* zY)|v0XvW#xv=Y0=d;t!!=&NUW2H8t2>2H>>rUwQga=@Hd8s$Z+x+rNk0%K7J*cGvn za#2GFTwHgcx}(hY&AoeJJ>OtvvdouZfGLkWz?5@JX6KrhfDJ0`xz(qU+f2hY)2ykx zl5dMrs#`m^OO;aljpVNpXHI7j?NBazjFr-P<5NZ{lysyym6ILI!i}auR#r=s8-sHH zo|F}x&aDr!mLdRfA3dBON<#lrL!uSm7=o9syd*hDuX`F0HkX``(5Ixonj|KOyUg3^ zQc-Q1zi|oXoEJ7t`z@l)r8HbVnV=3@R147(4T%Z?MF>|u+vhb+dmd}f?PMV8SW8Om zNGeF;<~ukE61hiT7Fejt`7XmU^|R{ev+p#`i$*Qly)%e2TjDu=LV)p<*h6u5gyTBv zF2X}pxW+%<Fj!P}AZas9RZ`k$Jvv1owwn8%W?{}x!+bkqQCghlz9l!;d?w_cXMXg@ z&=}JPT7tF@L2ahnMB72@q!wG|Y3@>;eRIVAvq#45Tg=WlQSFR|)0f>5G`p(9xM7}| zFKtPEbWZkN=1qLjD*3c&W=C5QZ78nOyIt7^bEIKqkTQs5B8y0Tx?-c7F3RU`pPOs` z_?hl<U&@p~CMd0Mfz5AN1#S&Vwsi0NvWloHbK|_KEOMjJm}q8E=E&9JuvOv6IZ8ov zcoQ8$o#cQM?=kPAi}LePW480inT%^k+4bRRjjowT_3NF_?RV~cwfUrD02;pIjR9GK zQO@U%q%4cq2SOIu>A-(AYe*|k@#n%-mt4P66m+?M)nmWXqWP-^>As_PEzQPQQFQR8 z8-h3Q39C3Q91oVz2*#A-KL%2bY;8!cmJ9uHA`|<v{z~0`eQ`+GHZb5=o_|mCd#>C8 z$NX`>3!Xc-34zzMQ(s0p^HbkPL0@}t>MK)QkhQHnsYONA8Y3sjLq95yD8o_vXX;;L z>_rtUVz~Yrx{&>y!BX_$%=h%m(WLsmNbc^@hvIY`rx=`G3p{Y^ZC06YKwy@l-|)Hh zU=6u>PjJFvP!kJ(Tc+sbM_EIjrY|G=W}4NvvWB>k^nM4`K&TNt=8t0byviN1Lph6= zm_yLKL?eam;`vUGWXllNQpvgH+$3sPb_yL=Bg|EjmK*vv&mK-$JqW8%=|ASK>2#&P z_Hr|Y5Dkgu7#^X*C_?v-?p6bh!n7?WmSW!JeSwnSm}M7T5((zV1Sgd@d05#6N@`iq zIof-m%Wyrh&Os_zmvwFpf)UBIy{<8BeDtovo%NaL&_|tBV$bJ-C;E$apFPY)zG1$1 z&owMVml>CDJKAdL5zE6EYkt$pYmLfF?wDG0`I8N*#DQu4-A7E6KcN`U27=18Fz;s6 zgRIKZJ=&bE;>8osoUL9Ryh=TbC>SSDx$a_ae4Sb3Y{(ciQKVJ&x*C=an(TMl4xLH2 zXX$$5{C?<{&`X7#bw|C!?@WU>(wf=M60Egk4C)t`yyBd`(C=(qFld4VoFf6R4+pHN zK8Ll6cJ>?zJRuIOK|)?8A%{uGgm6egv3W?S%i_2=V{%GzdHk`#X)(c}lhxAXtow#+ zFHp)}cHUdTEBD@=-@HTIVx!PQ#~t7^T8*<#^hS~|xc9~6%di^At;m{`IHO;U1JyJ& z?$6LV#Y%45gWjnIu3a5-`VNydN5;meS;L)mKjUK-hMMbbbJA&Cbq9~|S=gw!q$wS} z<Z(t^y7;u%;xGk;LG3lcOw_zt$NHvB?!ZTuJIo+vtIY)W*7UDg7nZYhgoJ`|`U@?# zf&SRW>>!$M`UNJWuIMmgl*gmkLk_ZS(?`c%lMZ(&XFK8NP#)0^vSl6vFEG>}Yt=qY z>WCarV-#iQR(@uObO3d9Zj~Ae<}6f(n;Hky?Oz`=r|lj-I0#^gmZN5;ee)19uN-uf zbLW7xnioz$Qqpv@afoy00q1WU<dahvrqv*^Tb#kb-RY_O47=@EAgz1AjGqJEU%$BD z#{P{%{LcENgC^i$Gs0h&&6#v8aM9Ug50ykMQMk~#qpD^cswS=IIHD-)jLMD@Eu?Zl zXzx^j#tYp#^O##HK)x^gH2Y8oBzw6P^DLtqvNE>|&pEgH8343To6masFPXZZ+i2fw zw(TOJh6NWV1zH#tgBTU7eP2E-U^0`E%lVvRweM3##v6R|Hc)r2ZWu6UP8uu_SKF^7 z5Ei+b&tX|(bW>KeN_C)b7q?VhC2@*pFT<#gaK20zQb%f_ppm8Xf&=AdHBgp?2g=0N zzUt06{THYVS>0fh!O|&%MP5GTWr9DpB_rmtxWJV%cw()<Th-`+9pNw^epR)x<&H5y zNn}p<5E>yvDADh1(g)ek#K;gD6diD^_G>B>y~3*2ri=>?y@k#|fr6r^y=jEkKl3E7 z4M}aqf+KgXac<4$1&vT`xA250AV##H0=5ek@I!)vK3Iwme$0oDmHS)WNy*wIdYTYj zZRu7LFxIS58JMfP!&x-K4>+HK()5vW=nSz9Me#w3T`4{giqU44ixK<NS-`KgQcF~+ z$)Xx~#$%3oPu5N7C1^%ShRb#_>rd!tunBaOeaO;`@Gg0VSi}FyYeUlc*jfuoTFFEd zOR8Z4RTBHrnM_v=qLS_KTIyGvYt1|?i!+C4y??`sV=b9MS0Ju6Q)C6T`W3;Z%o85d ziENh~l0#_RtCgzGELP8JHB9M!#^AHfT3W1T^h?P+q1$V+gEe9y%{FPzuSsRs@Ay-r z&&$%MWa*cg*GZ8R;SHL@d5gHczoSYe+a|;+l&uAZooROH4pP=g`GeNXPLfFzb`#S1 z2_-JE19Kg4B`^wb`OGw9drEbu!t~n%qeIJiU}$Ld55)5#)skz}?aZlPlQ8z#UJ#-| zYO^vmzd2P;V*j5ETWQQ}A;NIjCB|%xCEmF;jXrG6JdLv!xSAK@X@Sdl!B-26nk^;Q zowGGGn&>N2cRRN_tq77S`L(hZ^0u`V19Af$;OpSM*@-NJvG_<B4C7r?o87^iy*8Wb zMrpq6c67@_sMBrzt2>@@hy5J^v<IIiJ1y|!Q!YK$isdqQoTPDML_TG>d5CVZ8v5tF zwQ7lkRx1I6-#=R@`m)Md`q#Na+?08k)vz7fn~b?P7;2Kt8t}>IiMVUrKGxYujGZWb zLanz`MzcgG7IDuLahiX|7e$b)I}hh9p%{<(HOiH54&kp~Ytv~>ArTCn#S8~^$oQ)X zh^?`%yGTMs6NUtL_ntBL;MAmDP#8v#36b}%i_U$y`ln#i)B;*>S*Pvjco$ClL? z%=q~elnuXpj0WVh4c6?B5^b?x@W;C;BYJ#|yQV(-^BV8xS@qdyP_7}XGtF%KKWAjn zLectNCDB|O$s?N`pgU^fn(!runKLO{ZL*IDdN#goZ=z)9FDy|a4b+7tIf&rq{hz40 z&UP~#62@?Yv#|LPJJk&HQ3e)?F*x^tH_b5TT8Z=h%QKll3XntrekU{W1ucz%R_!vl zu6JTwtI@B2wku%k4*@aLHLf+aS<jd)!%M#cTQ)o{<ty6y;vrvlB!}@s{CO0_`ltZs z3fJ>dHs*_rgZ{Wh2W%`KXEPa`u}qU^8Nd`Gtzm`f-1-zBi0iySJ$H?3COIw5Sts}8 z<+Vm%m)h*yTBpLCW?Q^x1F!Vd+Cd-yYm=~2?%cW>C+BZ7&rJ<xIqNRtBg?sU36IuH zGk8uOY8JK)$4P80(iq7HrP*8qcI&NRs5o4XL)iMFv+i5c$~Hy3oMB$wp_-Th?yNKL zAangr28eU(Pbpw+wfW(1ey17vQuDUsxUj8DIfV^QQ0G0jGyEy5^P3)CLis=cawvai z-5gx4GVHJ%DF#_>{WkI2`jH<!Izhz8W}oAaF^s~#^M*_X2XtOm#D*kvo)l8G*-}>+ z<t5PsS#I^dD)cT0YpM^@RaIwOUV(>b9w~ZgNut<T7H`U!4Nfz|w82YY^r-kX#J6>( zRG;4bHiKMr_Jpiv$aIiF9yPwvac%awnv<K8gmQS^5Q443>2~cp8C&!2=C}j(2#tMi zjAaHm5bPpSUwa%RYp-#*{ngfz;(tXArj2S*S=&8{L(57D#>Sy>ye}&aBu|6{WXYoR zJy=+9jhe&f&&Pd^I=}K3&D!?hXM~&KKNL|-rI@I}J}9IBm%CT4Pr(h2lA`RU!W}#z zTt1O71J@X3uEEEm16dpYC#BMwiUd{3p3PQWl4fnzvSl_Q9@M}hNeE;-!hE}nWGGc1 zPd%s4GDneKLvjGcS1HB`9XaviNE~IJ5)rQKQ@w;(FbQa{p*Dyv{NvkHXAi;5a-v(C z`r^gH3Wfzd%G^(xROzgOnu~kNc%v|Y{{$u`D4$wu6mDT|WDAsPz{x$PmVRmi?cZF+ z-U3yHJ4XL3ya%Jx{3B1Os@RU`W_KkhwTO`EP<`_mS~KR8U+7dTIE{Ja&Tt#Gon$nl zE(dWJp-%nLFGR6dIAy<_TXIXDnE(n>ay2-K8OIy5nAx_qmLyOgtQ6Fj%*-=qe@HKi z0nCq$syuW4!}7)5RiQ;?m+>J6id0FQbux>KbU4=#b?)3Fg%G{}A@pSk=NYO@J@Gx( z+{gD5$inzGt&2vIBM=9%&Ys$We)D#=;$X>?T(d~*H3&8|nSsg$L4-o()4BCDnT9d8 zE_0<UD}u4Lw;fd;UFHK1Sw-$AMSfUDn)r(v5hd^Sk`)Y2*Ymsk6l$eaD9LZJB+_ZC z?#wseq9VdWMx##Wq_ehmu!z%RL@#$oFo~*F_DyBDl?uh~G*>`&P_=OS)^ylwt2<5* zvwCk}v{^^0RD(Mo4Ce-R%T811{Z?J%>mVhkZSqsZUab`AH#ms$5NI#mLjx`}s<cDr zd(bT?x#j~c4Ean`t;tA{$e7DliznxUyYchy8+U-d7c;x*N+iTJseQy>ob@d<%w|L( zocFxQ+iwIN$`Lbg(^wA>sk1CDaCHq1dn;88aoAtv)vqavty0V_rw}n1A$&%RTW^fp zY)}2T(vF=bG5SC~B*4=@Q8ksK&3H(1Umvsi=+-mqUO_!8b(bJ>RT_kck`^w4=oz2- zwmQq2dD6<s{fq(TOjQ^`MAUW8j=)Q)pKZQtBiUBnNhi3h<-*+j`^bGNgVvX9{sEGR zNO&hvNz2S>)<X=Yal0`ZAdBD?=G#SKJjZ;G*RVweNW@0_IHN=HbIvdd$%?KtCDDXl zS-puTv{HE}Vwupja?ML6W68l~ZcsT0fl8=k*}`^H<U@)jw_TZWQdA3@6ACGl0(xdK zv6O82hzlWrpNr9j5G_^2VwJ3Rizru3uw+-GLsw+ulN!^ZTID%+Zm>hOs(rtPvK;BG z{Y=ms-NO?H{RW<b%v>f<@R!l@1ap~PGv8k0k3-q__{PCC@7C5Fh^ikPxV*RPmYM_6 z0kfvSzBw?k$ERj&%~qlI8?ow$vto~Q!31rW=wT=8P}xDGS$oy?u<(xFOYiHeWgsP# zT)aFG=O0)ID^^KfcN36{h|5_lk9ol<i^Xs#!VJ1=)5TyRo4{4=Mm$HcD9|-JJ&<fh zkv<f^_enN#g)O(Tku&Sh7?;YX7>2Erhw1%VG`GJQ^J0PAl8jr?Yx*E!U4=K2it(Ud zQ6rhrtZtLI1dW*3;fTHQ-7(GY#w6b|7=sK8vsi6UF!k;QP1I`7T{{)D%r}j9f6JY_ z`axh=-H>^}`P?qy;<rl2GrJD5de^xKlln23Oy<F+EPK<&BrJD#Zc35s&LNx|Ji}&J zXm_K>er7j3=la1cXR(2P^}~G5U@)^Y9R^W~(Yf&ei6pNG>XS)n>Z@{y@SU?&+x_PP zwi4TIm{g4?h9h`GI^_u<CDQ?3teJ-(%{L@AWgch0dr;Ksu;h1GD-v@Vd?KD%8=f^m z;~-ZoK9U+x<NkT(4r1pAmLrJ72_nawwuDKdgr0<*Fp4!2$;P1$QjoiH>ccL{tvDS( zC7i=<#ERSNqK5joFl%3Dof%|KBvEU5qQ@ea%d`kN0xVuIHgfZRyPgfKsk;4%Cssd! zRZy@kcG~O{Xfb=dB)TDUpTCpV$~J|+y5e-hioLf6Tpsh<?=bFK?P5~WABz$q<20L1 zgK^Njk^zL6F8vdO>o_n_hSP(E;qsV|s#j?^8BAB(5Hf@{N#z(eFM>tMXu;~1uk&K# zE;Rzpm%)M=;(^<h1j!5clYZyCd5BydPFZnUI5nru$8oe_LALrZ21JRzsDzD_MOjK( zk00E|rj4;t{uou#?P7|O!p$-N?LHWDp|9zbIyggai<?WN4itPete-Y-G=orT;ji9@ zLZ=ymGJHhw=e8|l=poY$b}_LL$-0_PXX|5f%|!A;LiZHb1)@|=P1CS_a;kCA%$JSh zxHn`U3rtF09;IJZvp#yJae2*p+iYVjBMKEb-&RqNfxq_i50rAjaJMzrB+u3l!Dye9 ziMZoyHmr2-3XD;W@iY-=yLLglF9DNcS7U9=rn>O${@GT2SY*Q<WH6{6fu7s|*TK2< zT3P#Nn0GR%^BYE+f1!axn_2WK8jB`q6;Wudt(Y3NX71&$7WkD1)-24lgPvS-^RHD$ z_24>}7pOi8US|%YNHQuI9Dx}gPKACg9BY2xSRbtn$9iuY9oSBsmKgV3c(wEn=%-nK zD|%o2NhvE{vveJc2sn-K3I^M)_Ob0-oNJyT-AUD_7&*4H{_58PGyIvmsB7>#GLE9O zM_%Yt+6~?L-bud7E~=~mV~m!R6?=_4{MCo0O}Rex{k}23X2mR8`5ssCbIoY$sMFI9 zV=R9en4=k(1bGJ`JxbOSr0X_SY1>&{IxnuM;$(R1rZhlZsNjrRzXB)?&li~var z?B}%klDLWDf^4)nO#Q>nX4L#{frSueKHj{6e&Bw?L>`d{`ZHFsWS3ZmQoc`R>p!Zt z)MWNo*@Q0+(@KUAHQ#)n2!1ZmKjktmg>5tXOlEwvo@l;@bE{CFH1qfBRZ%~VD0^FK zYxkW_5R7B$+uR~XI@m1DA|0`t2h;L9#E9HeM)1wN?ybHta2K0&yD%+>v34#tOPGE6 z`4T2CtnhJRUgKcr&fU(Poo6zxgN->hy>T#X%%RSme-YWd)|AY6<Q>vM0lNYNQ&yn% zUR-P#5K5nU)Yx-dWQHOQ5Jo1y$g%9Mk}!8IeeMr47nESfX>;2=StXRpPm!JqVOg!O zss1JtXWbeChf1w%MT>HGxYweE6iHzp10k|K23P|lvUm(HB!wrCOfHOAC+sN2t35LB zOh)u5<f*#!IgOW4DXvp=1(w6XCDf~{2e47@U+w>B9syRTR=6tT`Fqj2nANt5guo2m zFRo1DZ{oTuaTy*M?|e>p@X=?|N4fNYq|h*m3`rtjb3S)K(tr~W*Ak!p*pjtM&|QE` z1g;w|3YQ_Trwmq5RfH^6ge+BrELDUoRfH^6gsiVr1gXj)W9({XO@BJWxitVf8QE40 zLOB<V*u~}OEb%~M+|m&GzUoKm-f$<4BQ9%Yue(_y!71{a^buyY_Xq#|XDDPs%>2Ws z#?1K7`D%?yj@5<1AMJ1LLKc%*@PGU7yMNKNXMh&qIPd`w1JXJYm<B8WRsu!9-9SC? zFz__+B5(jW4s-yHF5&^nKrT=M+zs3V+z<Q!*a;j0jsd5DGl2bbjG6(Xfr&seun_n< zPy*Z!JPqsx{seRYgCIwZ1g-=!fTchQPzP)SegOOo_$_c4I0bY7age!&1CxR40S|CH zPzG!S?gbtLegW(T4g>E39l%IX`-wm@a3j$7_kLoU_KWm1ZQ4y~+M(s#*}g5UJIHUI zPSYM7*7F_qSY1$D>MeBZ<?cJYy4$<HSa+`~FZ8-sSC+4FS5%g-@>W$%;b7krZdIkX zK=(%axhGU<{MY7`8>NNrvT{ksyGmSfD<~6()x~9nZqEk2sJu*h8hXL)rCx%Nv^H*R zh4Ps~G%44(vEA{?E4*bY)KyihDvK-hDHR(epUO-M>aj|vX=}79ZIxE8Rcc=TP0<Rq zQvT7GTA603_bVh>ZDN^GT57!tV<JYH(52a8w3uj@Ju@@2pZumLX&x2Wo$Og2>(H)C zO3L#<8gjb@-_RT@i&pZ}wDlG1`8fyy(bwVN;ozTqYEO+#*R)Fkeo@gjd%u`iNB_71 z@dF1rU4t(gk}&k*OA?0-A2D*&=rQiGmyR1h;j+soUUB85$yZIeI_a8gr%szb<GSRO znW?j8U;nkV^c&`6WX_$JHUGw&7Gy76<XOBVXDJptm*;=|=37?WdfUo^+gBBOSKm=o zTykgWnzHhWyDF=6W9_>28}9zb#_CO*6`47+OuE!lUR<VoD=E`WTBf!{Tgcx9+EndY zS}cRN1**Im-riy7mR8NJ^m;X(IbJ=tpwv+B^CI5UOH0dFN#shSOfO#Jb$cr-%PZZQ zHjvI;x?oXGj^!esTF(51^CCXAj78b$^B4BGESZrsb=ttV^fGrrMMY`xssg>3AyZUP z<z7?3uq?n`*S%{hbQ!Xx<pm7gBCmUnJDhiE@$Hobl^fi})VZ?KyGk$JFeT1Y>Mf}9 zGO)|^f>p#MMnvkDSGlW<ii+||e7pr~+^Z@4n(|67Y4Ey6m0*f0Jmr`2O&u6_l{>ws z7zSx)=geOaF>~~y;wpDRRh4(m?WG&sg+^s@*&XgOl3FXppd!U(#d>i;Y4P1E`M9ML zo;e~F_7c;5yKx8K?hWNeWn@{WxaaF`g03mA(%q%ScX~-(s#EE$GD>xK`D*v7g3?mS zjFyrzUA3xwO@*4`6R%!XT6u+gwNbW8wW*rn1wDl-tI{itRXUaDzw*o|EzK?{E>m@v zdS5H`R@1wz+_<C2T~$%Aij{)k41fZrb3}thw%0X%+N-<nUaRw#EVbHOFQU-pWvjeX zzIuB|K2o+M$zu*FN%?v*C=B^un=JlDnOb!iIXxlVMc#r6tF)wZ?R8&L$92UK5mmqS z#G7%!cvX7gm&BVc@hS{P+uGtv-6$yS=^*Jzm4TFtIdOruzpcDXmhGz<II?=Hg|)j} z*Q7|io_eeGlzC89PInc0*A}nx_Jj?!k#~Is^M*}9TBc`as&>9cwU0rLp)hM0cEx%T zdqSa%f;;<$zi_*RA{7?s1r%YR)#VY>Qce0w?_GwsN(v*Rd`W15p#xdT))X_L7<AI# zGTe<aqe>cZUBTaR%G35qstwOO?!9I7T6x(TZ<$UVB&=$~^M);`yu*-yRjR=yteQ`& zS;TaiuobdCcdtZ}ge-4fHG(xQyLeS)c~$vp-JM&kYB^`pr0(`uU@dwqPg)%FVak*# z+AQ|&J1SYt$_iMKjj}t-%GZ@$PalSwFjLm(v2k&1q7rPTTO#x0<g^R2zWR;gT^RfF zdm!SyiFdUb;*JiC?svpDyWh7(yu<A4cIU1@_xpDu-eYQN?y0G*VMDgvQ*+OjnuLD+ z*patx-AaLyl4?9P^_oMQczLoXuZI1WP1)nACwuqAn)(`IX>7|yMMVxr?D~p|brlu8 z_G7&NzyG<lzW*kIA6ftU`ke1O3ry+D{?%z;{MS2tt=97|O8aX6B2(C+_56#5xcycB zh2y*bzwdwT3;pj#!{h(q5fD||{SSfXuk;J|pggxk_56#D`fC5e@y|D=|6^`{Z3akA z3H%G^C|^DAE)ntm5B&Ou|7x}E3FXpy-mSN&D47H`wOf33TkrX1eM6)F-llKex9!{a zf9Jd3d*J&IKJ@TEJo1k}_~E15AKUTx6Hor=sUQE3pFI83pZ(J_KmWxqfA#Fn=bnGz z*S~r3rQiN;SM%;Ydw<{3x^Mr1mk<8o&?|?Jyn6JtKfeCPu{Ym(`}jZq>75fN-+k}Y zzx?@qv+Z94r~mDP58FTb_m4Y1Idiu2)4zPy#pTGq`9O5x1J74F5dCM@|35qbzq$SY z+JW@K{^~&bpI!f~teI=p%&Zd9gjUFJvOAlfTV6Ks)3UR#E-bv77k-{>O-lzj6LXGJ zM`vwe`P%OHMVywzImcVUk<<#1Zrov1>6&(<QL56o5nNf)O0TFa7MetMLFK9<o^!po zR~j5t#qY*~GWAM6lD<Z|lBPylk`7QtybY3u#Fw}dN6RVDjmkniB)!UF^|rLgsH_UP z<#`LsyrGY!pwZ%-U0$YqbBxflK$o~0@if9~gp)8D{u+n;5RD~|qiOlN99<oH#C=(n zw{p?#C7cuH_Z*Ui;(_0Sf+{_oGv-=I4i!d)a<jgzWVCE(N(Fa#Zzx}%t}V;STr&0A zDH#hOKaeL`QvwP?c_<b&wAzO%Q*#=CcAz<E6&i;&qN!*xX*hm!7A;(~Z0UGy3TIyV z4%3sS+^&+reNCZqzlFRuaH?3dq`X`*;Fo1R{+IsNT$HXIhC^v1_TlT;X^TN)A3A?h zkaeNtX&N+m^$dT%0qstH;qQHY{9hc`+y7vM|Bol6X)git3&+1V!hhEEG%XE?^zWPh zdoz3cAC8DG@qV7#+dndY@lTy?`OAAO@8NRv&1cv3R=5lKfBdxz`;SUb(^3HWT`2xl z^LqRDE$3%9_V({vzB?Cwx&Kc+J#~9A;{8~k_9|b}6Yd)k?|t)|p5Hsa$aLQRdYbkj zAir>ZBmJ+sIZe9;i1gppryTXS_V$nL*F@;USBGfC;q?2K?~0NO$CrF(miG4V8~^$Z zz5OHem-q{7zuf=oExrBw_UHKT_4e<Z{!8Ega{r~<d;9k-|I1JG_U}6{zx^Z2U*q?O zCwuz5Z#fqHtamzn{fl<@_U~KI0SD5wrJs^X=r>3MojVc!>izt0p32|GQ&|!<&s*lL zgt#=vqLj_iD@!xiLc4)ag`Y0mhdDx04|5>O?0E&n`rPu$94I-ZUTbI6zNgJmypm8b zw#R?6K}3&8G^?PjuoMj96G=6@ywE81&V^XJ5Sk64-_kOLVn3%6QZdB99CllX;qZc@ z7kCTSdcWZQm!4Ftg!43Ql0B!?3odbKG&x8?(hCbA7K8uvi;85TR7l)8<!jbZq6Nie zWZy1jwbFsHBXz%C(#X*ZEk}505=Y9rbVG$#n`QYHK*g*Oq##}U9hg(8msadkf$Qu` z!_>R(7W^M7e*=<zSs3Zivh2&sic|{~X0Bfal11&wPBAgY*eTrwy<d->UzOp7hJJ^) z(nEEn>)w|f1UFHnFHL(gIt%)yVs2=UsdtN!af>R6N2;LxK6<|NfDkslh4af`eF+6m z)0!jQ!9K$7ITAO0jz`lHq%{_0X3P5tN(1MlxKNE5FdyxD`_j@X0$BW%S@IR)qI^x> zyE!eh<x3T@LwX~k^goMeuceCoIv?ET`}REAT8$y?O!NZihau7+qv_X_ImC15+au{^ zg*g?)WmY%e6eSsE_E0u+bm3l9rE9w+&o6pt3oZ~NPph-%6&HHv6cto1EzcH8@eLbv zueSUA=`dO!SN&kk8ci#(=UOyz)dKmp#fG<XgU4H`xH7N_RC$>_CDPVQi&xzl8mB*r zXq(Ugqj7T7_*7`$Qn*y<Rchq&raf$1qL(f!TL+S>{aBS?iP!3mTf-#?^-i5iIkYIy zvkydkGkwAIZ-|;(YE%_T+BX=hS9>d&X@8DhFekg9!fHo)VvMc3EtZyt8%Q%FL(vv# z)_jt-m-$7!IlWy7(<b>ZP|O!=%4zS*IFa1D*?m7zHOeWzo6==yb4tsryrBtvuQggi z>ruM)a71ku8G41G%jkWeSExKKMrK~bDzG86%1Nf!ErdI}rlO$I+g;n--Y%5-n3OSM z9OV{N77Jr0UArlB$->M9oCgX^IV_dgmcUk!bT#ddR-D2`tF7<Lq%A_7EAtph04cpH zgwBAy-GGlqoBj9i|LzvpB?|HQ$<v}xh05y+JtH0nS_#&3!JqgG{P*v_Ti~m<z`{SL z{pRPxewXpD<I>dFDt#B-`T)nMV2ubY{4f4woL&rs$D}RvZs(Z@^aBP0$f0Qcfmk3O zaD<-XCf`y7@e`h0*iX`xxbj3Rhsr~yi?|I2E((F<Jr)r6>41EvhrZ{8zFFW^oFyUm zoY0eHTBV=QQ}SjxR_Uza=>}MEkw-%21CX*xJ)}G}fRwp5^xVQz{C$A<*8x%<xd3<t z@Pp9zcAiqc#{tRjM}UNT4v;z>0>u9fK>QPF6ltGuoAKJcHblus#4r3Eeullm-+iBb z{ri6ZweT1652y2A@9DbW&#J5Yg1`S7ZE<0ygjK%_6UF~))L&|G!66XZ$uBqr-2Zjj zfSUY2J`{?Ef`>)h9gnkNt=zI<%h*uoJo%3Gvi%9`S^L8iUGkQ;sYX4YB7F0Xw|2NK z?=SqVMfO#GX`$z{Uom`oDEv;szw+3r$A)YF@|gM9%~oO&f4kG)v|Ysz-BF9*y7eu$ zcH3JeZ(SP^(t52udhAappr>84$%<L}Zx-!tPAFt}4gW&KztLga@bq3O{H@<o&c0<8 zd)47zQ6Nog|1eFf_$W=QADON_Nd6LDp3>KX=g3d?)=o1`;TQ*b%AWlwPua^IJY^Ce ze?Lv_#ZU7T9HXA+5T3X26r5%}&tW{f{+y-_=ed{X2%h)y6kMT@=V+c8Jjd`n@h@qb zo99zJ$MSsURGP91=Hj`YZ;j^$9_{a?X?OEH!BYm?ah^e*2YDWXzWY^x;iK><NmuF= zT9h<tpA!21!H?6l?*iL^dx3hO4yXav0~J6Ka0}o8vVd7YGB6ED0wx0!f$@MF7zrc- z34jZT2kb!Sztbmx2}t-8JdXi~fxW<sz%#((z@xw;z&2nbPyzI}_w>2+=@jadL7(4y z#b1Zbp`VPADB?+6d4_+|PVRo+k#0QiPsT~)ucpF^-~N%s&+_Cfjr9Hxzk4$Nw)lss zmkZ@sGN!|sN4^W6LqL8q7E^(*12QhY4?GLJ27C+*reTtRg@9a?3CEd<Up}x7cmVhn sa1{7=KrVY;4P*nQ!2j#Nzb3L0-REZu{lfJw?Z8eMa0{>$=sSM?C)~1m4*&oF literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/setuptools/cli-64.exe b/env/lib/python3.6/site-packages/setuptools/cli-64.exe new file mode 100644 index 0000000000000000000000000000000000000000..675e6bf3743f3d3011c238657e7128ee9960ef7f GIT binary patch literal 74752 zcmeFad3;nw);Hdr?j}u==7yyqfJg%kqCtqpC80t4LPu^(N8=-ER75n&prFR&UceDB z@phavWskhi=#1m|%%F}lj?UsZGsvQt5J<wlxB%iv+^cPuAew~rzTZ>Todne9_xygp zKi+>{KBRBmT2Gxib?VePr|Op8w9@9V*=$byS(eSV22c7I6u<xdPaBf^ja=8y_RqdM zMy;_&c8r=e|E_9ZWz~H@sk-eRU&U?r-g}?!yZugIm2t1{u6uo<tFQIlbKf0zPV{)P z{Hdx3p3OZsJoLz%^k3!LlXGT?_n*zl!t?Wj+&S0c89qN_PPKRroO6qKy5>w4&mnWJ z$MZk#s+do8oC$GRiOqJ$BTifH-`O?kw07GVTXsfYo9!LM+%035<l~tu!a+MdD4b!l zx#$P~(ob6@QVCi32fWp!3#G~;R#uXJP`*?Q1#MsC+HK=SDD^YfZaV=`{(t{#x7k)o zP=BzhiTa&Obfld17JdjI>U*jm2#J3_n{DpIsylAeZ?oA}or@^cX*&;p@8Yl5zaYqC zqReLd_+ljZfRn*^ItAvsb0S~E#7db_^bvivWg&Uk_wpg@|NZxW0s~rXw%@JA7W#9w znC{QhVoUu#b(VUadc9_T;ft^jG;@np*brtX*3qDS^H;5NPdwDuuEig)w2D?9%(2-D zI|{#yRD9iR8?D95?Ge^qXDz=|8CgU9QI*v>6KammHk?*-@|>EZqYYnO$MQiT*8IwB zjcsG6_)Vxma~#U=Xm-rjtfpi}VFwC1Cur7YyoLi`)=#&Vu0f#zy$X$$g*3L%uW3y8 zmuYONzr5Kox_P?Yrm@-nV3;*)<|dyyN4-Uz-LyUZkNTT;gI4>+ToAv;T(1p4{=!XK zEb1>4F$Xl(sI2a*v18FK`oNW%)lhSElHqI)TC-QUqg#xxw0P7X1TG@+NBu#}xJW$Y z4{GsQ{sQzzi-r6?etCazhNb=jn^N~z-~hqkY$f^}g8yCNU9xZn3QMGGaTEl`MFX9C zG^<s!wrGyln&R1p8$mpEuS^ZJR%JJ%CnC~F_JWC^1fz-owidt!7;Jo($7U15xt3-u zUy3=Y#UB^>k^_1rR8RtYQ(Z&ZG}fxIF8)$B1zR-ss6<%dcHRYkqOqs_HH5(0O@!H7 z(-{Bn=}Th=WLG2XbB!I3m$?Ojp&R@&FvUVkV@K53GMlm?8)Q{d_^}qt<JSQ}bq%^# z85y!6Wu_fu!h<5xXjfL}<24xlQolK<Y}moa%gnBlx{vj6u;wHYVoUM>LZgkr!HyQY z(XX%piOS;*!3)0(v9>){ouv<muoj}vo%}U`p*cDWEvoX_VEsf5bo|t5S$>_)(%i?U zS|zq{MF|F?IUKvFnF@^q@cbE|2r&0wnTB_zh%nk~0w9tZmW7^zXwRVMAE05(%JFqu zi~-E^@F=^jZj0_N+-rF+c@HZ$%}<d0_%!MT$rJu_iQe0gTG&7sJ)p%S{>o5%#{9y) zvDf^><cadi=%<{1=JIB@%@)4_lic$tKm*-W&POiG`_)0B_u0q`nyieVZjA~AiER|o zPeDoHmXg8-5KZA0ypAW5Be*Q@ODI~`V2tOVyU<?T`_lXL(B|^nK`vC{X@3_%QoE@Q zk6W7<;LupaUuJH#Vy-7pi{-r)b%;2kR)X8|hSJskLRLE=U2XP{R2!8YKC`*r{Gk^= zyn%S3<b(-Hsq3jbVRkZH!9lBme{1X;utZF+Nc<Z6vSC-UDO+X6Z~hv#8j%!o?1=<+ zEd4ZGu@z|HN~Y-k_J7-KrED`MRfM(i3<Z%XMtf3Li#p?XS<4C{%=vz}Vh1qx1d4<m z+xgr52n$o*mjyuWV$Osd2|%-S_Zf5)W}5^X1QQf<GI;F`>h&rSL^*gD7~pzOHv=pn zZpOX|VMKkAilc(3scUTLaN!oqd+b0OM&e5aa-zmVIg^N-3ba7uqC91!t)^(Ao-0Z= zBRe=&VB_K>f*4`+Pn0a&i?Yl$8QqaZV>2w}Ro8`hpBI~vsjPOLi(vhXzC8J=&Bped zU6wJL|AUwqsICB*_!{IcXlEQCj!$<ajsQlYi2^( &sjKl@1{;unAiW2w^OujNoW z+s1GGSx<J&+NxO_wZOh=MOmE@ZP49QvUKMZkCAB3K%I|@I?-k|+Emw|J{xyq05F-y zq7$V8l2oRcow-7Yh^cOL;xdHl)f~cwpX#{~ZSyaWVW!KqqDW)=HMWc2eUv6Y*DyJJ zd<PmpV>@Y{fyvVRn1*ukl8i(qo?7gm{xW32isz5Se(%>1j-a2k4wb|wT)GbP)~3cw z?6fpLj~Sq`9YkM)yDZB*We>-k{xAm5y?nH0Ho2{x^Hypsn|E~r0<*<Uahmy+U5m}= zGCmb!!{0-iAbH9V4jiJiWkbU(=Y8Ht#jK`Y2}?gSAwHl{38mHoTDRHs^TO;c0K(t; zJur}@Zp6KBL8hecMc8IO7nuZRlY>jx=2YhD6NHvl9yo4U5tiyIlU>#Dq@mTY2oce0 zScIx+t*YHbRIT2s&bjqw$p*oU67G{!71sDN2sxTN5)0-<Vw&&T>oL1Aw=ob$3lFj* ztVs)OQ=VuDG#Tgc$T*v=MF_RTL4A^~749wE!fzjIvze_{!i$bjkvG#thW==gNvR?q zqN9=c9sWvw6oprI%*YEWbx$CY=-}BgsJF|~&ojGDfwn3zlecP(M_rM)Yu~wcoB82L zZNc91uwxJ?*>iE0-InZ+zyt&|243NM1(`ag6+L8(rCNqjEnXsf)~Gdhxy%nxd<%-_ zG<2v%HTr0NH-P%#9@h8)$xbV9#5j)t>pPHUVJX`#82c>$e2P5Fi^z73?Zb3>4H-a4 zyZAo{B_wtgf!oXxBcR1yzjoPeO~Gr4i!#^3fZeu!5V{O<&s;;BtE4N?q(qtks-WJO zD~v3>0nlkN*NA*{4_W;X4Io~{Mogf@=VYQSm6*9^7%EIIDcl0W%13KjY>-_uHx_7S zBM3Ta*CEci_MQineL{VRdq*QvNnCS;!G7c3CFAYj=nW|}g_(0Bp(?@#*~8{BOV7sd zDcx0Cx7X;?l5q+PV%P#V+gK1b6L#Y@;%u9I)LB}a`E+cYYNlR9TO8fRcYr1|=D8ki zBiH!EGQ4k>xDX4mXDLK0EpVV}G7x2RQ+WU4iC8DJH7~s={+*}g@6kFx*BXyG1VJP& zk4O6F@~-nB`>b1#rzEqq_{;*!TY-&T3J_Vpd32D*-d(1cjk$bl@7z}+_r*QACEP&D zVFxw8wdzuUVu0Idf!4+O%DVgW6fJ*iFL*i=X9BYTeFhw6BWnKWO#uf<A%qV=u}o3c zRpkjdrpb(P0%2Wu#uU7F_=8fI=C=Y|;*J>j;l&UybT5BxG@`(Cv-v9sK`sc!KoDR) z67}ijJN2A5PZ=2nO;9zBVYAC!b*-{`Z+NXe^)IaaZ4aV@RcC9R2h0yL^*)jOMlF^L z;kuNyhRwFi!;OhPMzMU!#EV1kKX2Z=l`FMaf1;|ewZ-_h6!2u#_t&h(u+?gGG$|v4 zHp+zm;o76Nvuw8N0?Hq|1`@?JxhMxg>6-ocYeRWFIR4u4*JbQaJ`RvWfLCeik3W>a zk1T?~etHvy@Z|K;PCs47?)I7-zb!EfMA;h!J^hcc1Etvwx*tQ>u`yF0zXD5Ky|cd( z{fLlbZ3N_cCQ^(~lR075)TG6n=-@`+HY03uch$J?TI-bfw>;v2tg<_7eq)su?g_88 zNnF;J*6q=^gv|!G5@o0}RXt%pRsE9a$MydHx{-RlOKar0BA0%9D(ZTf<J#2gjGi39 zRMbT>#|5d^vE5aSOvMb88FJ;TQa6RBDfP#(RV&<!vCge3>1fQ<voKoq{n6{>Vf4>e zHMI8t#jeT2Ao(bv`ZIKiLhh=*sWGP#4Q@o)t1`u?Cy!7I+f(zogymtrMc5YA{HROq zusI`ak3LXkL3e3InX_|$#IXlFE;43MxT5JwHYitP({q{T)*Lh49jZgobClJp!)$BU zo+LyUZVj_7g1QsGhU6pWQYllhRv}>zkD+^~3H)*$Bbgb}+xSQ<;`f1gBW$Av`I&Dx z2crSD+_YWn2O`LmcO5N%w9$t&Xnp}X^Y{K2FlZ61txwY6v7?X$3-^|?qikzzmcLR9 z9MiKRfo}{Y64<CKYr)`biP!K;uZJUntwxSk{J4K5qKyy14N_tKok-wwnY4<MT4WN1 z_4Sd!hcfA9O8T=*qOiV7_KqDY8mMQBoiCQ!jf)T01ST630EIpZW9m>I#&Td&*J2qF z@)G(Q#-?r8cnF+(wfKYfq?__O)cV01?J&R5P~i~$PTG?FQe*<`E(kHnAuAkHCh49j zv-Q4HCK^~TjwGF0d;#q(iv}9Iw7}>3qzEuDHUfz%e^;dVQPET7kr#V6y^GJ1O|z5K z@-b?8hz1C*(E^=S5nw_e6=6G56|6$hMfa1OC*a<}hls*Jie9GWzpoWP?I&C;x{7ue z4C^ZOZaY7W!At@e)TQMgqFkb)@gi4uUE7eWa4*&6RO<)%AqM>~)Wx<YonW4o5f=5= z;GM7oKsPQT6cNCl^te&X5Nf0!#jHZ!MX2aHl=x6a3D88{pbTRyA2xz$><+)rww`o> zJrWbP>=VHYSyOTVh-4o>jF+`w;<lI@vI(}mOF)_hB(#yL=GHm4U`h!(1=rMR^J;!k z7A9Hwm=x_bc9;ae8q`3-P3QhFYb+gpuyo9Rgs~=+4&O^VQ}Eh|zo>M~ZV}s}Q7n`+ zG&RPDMJy0jI=n$ctPg^WYPMm8-O1k-g6C}7ed>^P%uQw8%8YIn+rwYAfad}1kc|FX zV`J{T&PK~JGLAH9jazaPx16@tH>-JA!1gM24+Cy~_#yxwn+_(hvVr;$8>q2*(!Fc3 znc%%1Z#J#Jd-TDqrWLVuu1EW#5jWp_A!Pxau4)n%il@8v;ewIWi)@}dDO+Fu2duNG z9yLwR?GQC&7+zE4$!MOQhiP#{xi900@{qmv8Y<S|pgHwtLouneiUS6~b1i^?sl4he zH{0CF>uFEmE8NS+f&FOMq5I4=Iml~YKA5&<J|VzCAUp!4aER?sqI^vd=^^FSv&z91 z-Oz*;+4LMLT41gskWZ>&5f2La2_um!c$45?Br(nf%0OEiAmB;b>LDvByYe@O3UNGn zod#vdJ2d7&`Y9mwTn!o!+ZafF&_omg>WA>urXil+l!bx|{Y7@Re@PZ;6$+q0ON#wk zLE#o2xP(X+!#_8*ljt6N1bW7wWB>yqS_FJ~eR@fxg=XXm`?M8<`eM16ywSLUmf5SY zxx7;AY@|(*@xhhxL4D`derPH4YL9g(i}z^Ej#Z&An4Ga$NEldp!t2s&?;<S9?N-FG zH(a<eT-T&G0?@*SCJp3k?zftvd-Zdo9r_rp@$+1Sha)^B6;=?=meI~=hfz<(&;u!R zu>(B282#MF-$QpncdwrWX1*xE1cfb#mJHv`n$^}TKeimt>>$O9V=L0p`Js>;A3_ZF zYL@rZ78&Ve+pOK9^l5FqiUB~1_Ykt7&b4l|k(lVC7a1NslEM%|tIrpTLz?@To5x62 zW)5mDgX+aLHE^ivOX3{`)CwkOPj=EJi2|r)2qZ|%tZbr<3~NuiWTJP;6t9s@nNy!S z8wAS^=y~YrV+iwglf`b|O@J?_h{M1bI=x~WJv=w#!Iz_BXzC`s{|2f23Xx^RB#~um z0UpVIKhyzpY9TeJk3_-qsP0nPm;!<=+@i+IGA!=^#8aQn=&Rt3q^im5y^IG-SQ~pc z#EuGl^1WwcXJ$_QD|9?|C3*trZgD+DF9?O|$3BK&-9e>p7hW;=D@Oo=uP0I%QYoog z>Kc^j?_}ZvO57_FyC~5YVI2emmK}((m|U9qH5fMb|61TwRSy3RWi8G$GLoNC1eB=? z|Ai>NpFc#;Sf=$R8XZpc{!}L5)k&`l@EXDP(-jGD9St3!(H)O9nVyhTQVlW*NU{#2 zaTbwd+;b9?#b2ZSe%w1$MrGl_|AeTOqyx^9h*^s@2(QMt7T3?g!3ZBJc$=HALV}8| zYz_+GX?Y7<NcsZyD``ETr7GCHRDrl@p!O#2#;#C=F=Y0{Y`l@GAQYcwPh2gMwhOH~ zqS(g7REm-Fj~nL`wp+2;;ZIGa;5PmrspnSgs_A`l>ixXb^I?z(#s8s5J|CuM-187f zke^M}#ax|7@u0bzlJ|swx2E(aDA<Z!S?^$tx?ZbrO+^3&kG+kDqp`M#Or=mKAEdQ2 z8CaVQp=w^Sme(CM-dsaceZR%&JVOc(7C+gADCLPJQK*kB{05<ua5!CT^GBOgOR$_} zU_1O<EPI4{8()ZpOz;@~J`_BB>ZEkmVX3Uulr@*Ks@+-tL0L1vsaEnRG^TY84`i(! zPFW@*!Sb%$EPDTU?7jJWK@ol(s~6vYc`7gQ8=gUxY@U*e>Pt~yLn{Y(zeNgIOeVBW z|3*xNxh_NTNX&IP9vbud@L-<7RORzuqC^)>gSvwT75EnP!ZR_l$sw!@TCgBiYeXjy zy`5V`ePlBseK}+u;#Z_AxD*Q!-p41d7epd-ROOgN^YgS=rH}Mgr_JqB_JF&TjS92- zi%Ro9>rkEZN=X#@Ji-!6-FxT=wEHow75c5+#g{3MKsy4$n3Kb%cSQni%ENy|4mSM+ zh0Wg}Y(D6;DN&LN&467W3jT^2P@u85!;ThfH>Q3)4fpbDwRV}UqWYdTW4vZgok_BR zem3Z48bbWPu+jr%{RDZ3*$&H_k7zd2six$2RJM!HKtIFmiXgkzSz1vF3dI%$@8iRc zeL@GmLogJ}yRQj@aV0Wa5M!Hi1D93bowy7mTiB4C7iJIm3cn2JTg4L>%|f?w+01Vv zfe)%KlijPnL<=0P%FzN{)tPEXiPL9HG6OcfFM1W|(#Ir+Xl#~$33~Q-XhHjgfQM2? zi)!tLk&#-OSoN|1n2Z}R9o}3JW()AF*23(g-qSrTmoD|^3f-X(D--9SMU3?mD&azj z{t8&*P7sJ@Hb5`F-*5u{f&7~<M9f@@Su7f}TpOWg>71TNGL%sfiH{veLS02y*qn00 zX5_CWLp{H80FW1Ro&Ym8uqaIjT|jP(IfTYEHr)>~FG&j76D`yIRG?+Ln;sA(kt@4) zW*!+7MSC!<Hpq1Z#!~QWSVx6r6pLelP|qprZqI{o_HOlA*k<y^K{i`$MV|E)bjKBb z5b7BGRph2QOIn8Ln3e}j?T1un{xsKSxKzuQ9A{2*TT47pBGkiBnW3z1OuCf~Tll9F zKx|OwJNr748I~i(qw4l9kBIfV#||x4<1jlKX6@|V;EDuolGr=J6+5hLybcs$UT*2m zx`PjWmg*1WIAYI1s!@pRKUAOE5hPG$r5a1<Ibm~&0NLI@c`2YMTu~~vk?b8bb2gfR z4H_*OL-<r+)GRvB=q~~J`{mrilm!4gegpt&|FkW3?H9YjP$5uX`7IvO;@pZD8j=Gf zvCb#41v79-nC&iQ3CxkXFh}AsE5zFIpgB^GzcT*95z8upQX}xLq4MWIe1!+k6pN{O zAAhx<%~tfZ*r@7?hAm$`O?D}FlM4GJL{Zh;Wpzx?3r6Ce_Fa~x)U87vT3-fu@Qi!6 z9YLNzi$0zd%3~rG4anGnj8L6o$25{O)TIj=%1a&5Ej6&cC$pe)K$hPl3-Aqf^tn{} zY$`oeD780|CL0=Qsm*@8kxD^tU8AdfAK?A5z9a$8kM%`mEr|=z7lD*x`m4belT@-} z&GHB7C!{j${T>%;4R!M8O7!zS)WxTTzC&G4N@&e$Q3Ky-Fo(X3?kkVBB1gQWZA$s# z0h+R5^E73{qwaQK!u&u<I#jk*tJtVjK;1m36-ke0<zh@5k2%rSY_?Sm>{X%<034`? zm1sQ{9TAw64kXh_@1_H*(t%&0S@WnJ>MI0bzus(i-Jv|T9PB}f)&NYiOI4z@qcXdu zE79FFnq4JIbfSovp+v`uz_t24W>>iq{aC!+qz^H>Zd0OUuQ0nRl;|H(ETK7xCBs;4 zZiZQBqdrMv<p{j1k5iR(A7?9X*s2Ho8hfQOl(OY-+|!j9fD(kwvV<EUjg5HbFzPuB z<&@gFsQ{hB)K}JhksW5Y*h&JODr;Vg8T616f&zB48+me(M~RYR9POm5)|AkQxu^&f zm-q%vol#d$Nqs_z@@i=pS@{}}k7i1!lr{0}pcr=*eHejC%L(4(Ky^h)7v4hjRv%53 zcv?IYr2rXem6R5&+3Zuz?ZFZZeq5%j?1&OSAIMfWU=VDH1qhm5cPfv1QO@l8$?{!h z*Ih~!FyrlBCHgNBxKD{bB?6WDon}|H68#SR!R#`W=ynmkM5%il6|Ff3Z^>(|)_I}g z{xD0JjTwO4_*%=~rtLYJ90kk}My_ZV7)fSXt)Zg+I(TR!Wjma|4U8g`U;;X@B)HeC z`$Aa*^09$4%vFWJR1*F8fw|6WnnV6bff~Q&oBEKyG<mHm1Yb%EQK7!csbRKE3_o85 zVF*(PEhy0?(0-^Ln|!)!UhL9jM(olwP7@1hq=71RZ5EotYN`>XC{>yC$f?dMO;J;F zq8M+gV-RWz>Y1g=8zo)IAs9bAaz$L9(h7u~C9DLhQsnWJ1~x8phdcKZY;IX`mZ-SO zQNkK9Jj>kb1~InTs`+teN#IC{a`llA7P7fyy204J0i;0HGknXKtw55dvYo26Qw?l= z$c4IfXf2R0j5*tRIKmp@(+bS4;^hw2(NgcwtZm8N<e5WNsBeI3t^6h^{;2)Fz-ve` zN$MdI>su2jP@)h~!7;X3NNRQzBu)SyMnAZe{KQaGKo+L}RBKN?ht%cgs__lCP^pSt z`~l!kgTK*}NT4lkCZvDXne3x(psX}0u@CzA7=oaFFoBa=1$J6d!L4}NC={YqBE;Y? z1bIzr^O_MHPgdp^s8aT32s<;MwOeH;3L9!at3jkbA{1zc0Kq)Zpla?G^*|)T#Itr6 zHVEj41-c9<N<E7y$EQAODV?JxaK1s~@&#zIiI#^ZY;i#}gq~3GEPuIDHxvC6gLwfV z&Rv~J6nK6z8*z3$mtOM4&LFnbuO<5<HbWO#d`XUBq~&`S`M=E1*ZraVPNe5xxkXol zuo1I&{_f*%!Qd<+2muj_-Ny&PvW={6eF%P?rxhsR&!GUS4iz@Qid3c>fv)BEYb*(M z6ogP>Bt$Ym+A82jT|=|o+NGJBGx+L2dPW!*GO7IpSJ%fyptzc!0^w0noc{uCh{<!z z_@e+nIYvCNCIL6W<k0Re>?5?@A+w{NAn0l7FoIei)SZXA`DKTwk=AP>5#r9!VYG4; zbc2@CE1AaRVnt#PX5(xux|3Rg46&Zk3W$}i&JX8;P?6NilL+vr6ak)TMa3tfQbq&` zA!I<mFbR1Fi=q$n9ENm~R=Oo$=wv}4VSO@w=j-|SU8sBTyV&?8(L{Fgv6{;l8nCUj z&}&Yz28<#%u^1Bx0bk-?1Xd8A_(GX-i7}|=A^Sx}Kllw~h^WNXNS;zC;xFuu|5iy{ zO7V9n(Mj|K%RPslV6-FY3C=o%o=cRdLQkxBnRwC)HCvEvP+7f0tXF&?c8rA`foAB- zfhde0kPlIkPx;QWfG9v6ocxs%%>ezLo?$pL0ON^YgO{VX=NUswm?5Sm7?KkI6{1U6 zXW}tDr^j<v(}Ep}>)P(bGLiC4!ble!p{BSa1|4KEONrlvBp?Tdp`-$8m=({dq4M#N zwwp2}Cd;BeT}8`d^b7EtuaCy>`T9Wo7ASRjvIciTNmZ5TBLnutNzz^b-I<9a6f(DG zBtA!g&{0W0<@7U)ezX$yA^JeUvP3iT@c(cTnUNP4=`cve<4dVp=VRRu7X4GmlZnNk zQt0ry_pFuJZ7hLb#av&?rd0dIN)Q=MRiEV@u^OB9b>)Z%#cyvVE5;!-6Jh&H3axOU z#c-22`XEta%$2|<NM+k&o>tloxop{_4BB5ky`=s@Sl_ZOwRw8qtdiJ+Ify92OK}!{ zCR0oqVj^L)sT^YVbG-{!H8Iam5rI{AssDB*8Wuy1xs0}zDA|xA@%c`zq9E+}ZoLh1 zN^zbN$rIcPE+O$a;Eu#EE<+8X4+Q^62|p^(@51)%6mtzlvg+6rbLAosjx!1Pfok=8 zfU7kXMKwPRIlK=}b@#byGjlbOCEjWYG%bySP)7U{ugOdRL-8uJ)WD(T%Qf>dOJ9KB zQ~I6Q{MzjL9D2AhnOHx|`{X}q@oLe-k&4gA9}L1b*3glq3qFR}?gta-LykcZnQSU# z1$P)jmb-2h_7!~Rd9q}tinT5$DMsmSAj4`2)5f{k9XP)9;Sz>g!8#6U3l5fRjuGb) z#Ad*v9bw><-lt}!yC(Ti^K^HuikWB85^Xkqw+8fMl>|OhLeLw3^$(hQ?HYNmTuCS` z5$fbah$g@<)nbLp>ISnb!=T!N$-c1t8BPS<aDGU^Iywcb%bK2(%mqCqCsJOm#erF2 zsn#Z7Q8O)v^5`{qXP&$JkW1l0G=c581NkEmB8X(M{r6$(4-LhG1*NQ_s9Oa<x@_oe zil9w~P2xPFR$=eznJuY_aybZ!0B|t%EbK^Oc7@)+b0bt`<Oc&^OwbNWR*Ko7L-Jbl zINIf9hiH8xO=CRj&m|JY+C<N8N6RwHJ6xdZX}_DA$MPJ+s)D)7?|%sIkR}2IQ;}d~ zL7IGXg_J-cc(k<Ai;xpUwXkpC-3M#O`6!+A(UQXf8%Z0o{+{<22%c0rNzX%^HnOSc zh!**4@U*;lz5;Y^Vf!ubwFptGn&k~52<1f%RAuhCmcbWZL|I28b{*9shB}9`!}k-d z3wz5C?BAi9g5usYpc6#F4uqloW#8~%9?GHH!y;hq*f7ITN}2)<R$8z$h(O7)!aB@5 z3xP){;LgZH+vNEm5ZcBEY2nsL5Gli`k(O@zcC4!BenKPyt9vLObO*BZe5)bs*ll*5 zU-eB~{nG5}zqrpDY))-WwT&TA)|$Zxn@9Vp$`vrsJgKr!qcf%NTP%Tvc{%P1d<u*^ zp(4sfTjOD9f<EwuUg;y#>4QXix4ovYSDxd5Ow=(5Hr8QCfHTuah$DnJBk{6a2pj<- z{#XVoA$4$Cf0g$47kU<Q3O;P^!0%4J|3Va(t~cY0U4Q)!W?vtv!Owb`SoiNZgo99E z#4i!Avg68(lYx^4wAbD07f=)snKH_BuMP9DHdI2VxdcZG$f83H!W5st!i4n|1VH1( z?}7l9YWlolS0Ob$nwoy*Z@rryE}K@B87I`h2?K?D8iy1~_RKT{q}}>)7&?TRNWcK= zF9Gm)Pv0kLaPbBdf5FBcQ0&CK6Hxp%g@7jzkBuUr_*M;kYi#&`fa3djPx}=Yb_hcL zTm}Ad+Cot8+qAwM{5~+gZeV`?S3*e|7<V@?->HG`jP<?9SYkt{#e{Lai7a843T0n} zjPITZY#-!7{uXM)938^1g$#gEfPWTZAax$ch7bnl6#1m-2X=Welm&$y@vH3oZb$|z z<8vIObqb8AA85BNyDL)h5tiZEa4NgfoYH2~%dTWOZ5?W!sps->n2f~h`&iA8FZ|~5 zK}#<{=1G(pxv(vUgV^D}5IuN?$;c153QCT!5m|VjY5G61S!8tZB_CT$EQo&wen<kX zn8xsT0>lL%fD|7|`4RY-npcQ{Kj3#v$uKVORP(S@+w@CVasC6jIJI&<KZ_i6*|oVL z)`HGoKiOu3bfU27dC`Uk6tnGQY<gZY)0~;-gM*~TX6Bj|Zqcj`1!OF{oAd<lkaL#Q zdsr|s`NaS;If37eZeV`8Xn{CeSyz$Qui8sHgJ&VCqsbxIdSHoc5XxGKb&|ng6@bn; z61&5n*W<GjVux`iLJk4-e`TSCTu^B2vI0{xaI!^-KY~VaHV4SvYZoKIZTj6XG;^qJ zO?@t`9y|BJIDzz6D4peSF+>-ua2GZP@nYg0Sb@i4{S2XTe{y(9U57CknKCer!(_6m zggOD^c-Tl5idqJJj*3sBVylG!5*q+HOr*S`x>4j?8ZP3s*rH)=x&uoUjhXNRX%e{; z8K|Lq?qCcF33-x-KwED6faH1zknBD4LATw2(`>VlTdZac;xw4-sdkW1JO|5OHqRI> zOcm!NI`bn$L+uZNAh3UFlTeP!p#wZc1dp6CAfJjB&Cw7x{hLTiIM@x#Y5Y@*k1*P( zq4WRxA(8BHja{nMb?C#*hun5J;S&4szeFiJ`BL&OG0#EsExB6Y<We|B3+r@_=s_RL zd;CQS8#(i10ueLq;c!yBEi{j=3~JJ`MPulmHFhBt!+ZdpbmK`JT!0^k(3`+^bE{BP z4B>f0q1?P`1m{?(qz&$-Hlq6DngjC3`F}b@s)wZ~F)^I1Ir-q)@t`5z1oBLAXN6D1 zON$L>um~$R355`!hqslooH0oZ15x#(KFL=oTtk+(BiOK~igqM(!?D>XZArLWZR58i z6?Ev?ismiv(|<}&XY~KHLAgcFX|Zylb6R|A7oGWV9MsGyhv10AN%IC)22rCw_Z}js za}M=POyH^rbqick9kBH5r<DMF@j~($o7M&mkrrsF_HzxOeqX|)Uh`Wzg;nYnP5IkV zNj`O!ri8k%n3-1F;ym=@8z@oWwG569zX56yFr9Bs{T$IYsKPNpULGlMvrVfzsK3(U zpo)_((n}xtLO>HC3VWd(+un2s#LyxN$d%}ElqK(?=r;(^@_K+AQ%0#P;E$;fBfS>f ziS{XvyhefejrMwbvtu$eIgn~f(Q{R;DYij$qzQ3KF@K3%D>C3pNxHG7n#nff6L=%? zND*9{izev<Yl>#W2TWwHzDFM0BL|wfgv6oA0jZR0SJ*{)C@)dF0ojd=9LRFP3Ok_6 zpE6M&oyt1C*@1&qa1cwq=bc$JKEtjBniu6ZmjL-MW9zUUvl$-n%?_f#G5o(MiUhAS z#|whd-?58NuY;IMrwe#JbB2f^$lirBz1Xv=?5N7x`IL8wfI|N9A!YSJHM-O>!WfCE zjY%CMud#aKXVc&xb>o<3;@HI41wC|oIzdHeN_7hjXBiQ5ImR?dHej}q?NQfa?F4IR zg&-vO<o509NZNvLN!%oPAniNEZiDZ*gu01c1qttNY$xieg1F~{uV~^N{{zXnBes8y z2WY08<ST3w<`VYH`OIo$g?<47?oxl5O;<I@@EBIA0463%!T}rTM<|4ig6mOKN?~6F z<;zI_RZcpRx!5xtt-=V5ragfGAm%DZo3wQiuVw>Sk?RvG4m&!f#9V*-lHQ_Xmxb4t zk=WvT1d)AdGvTU12<W5&V-HXPY|s%Nl?qo{-ahDD%+-#3ay1zZ)<kEMK7Ah9<DTDP znpxgGcrmALMJAh(CG#DF+THTLjD&U6l-O}RMP+I?5wJfZ7h|Hp5SrM4B@Hl<3npCO zUfM%Cp@Uj{S*{wN*+*4gZ3@M1apKR7znpnTUIIt@!+R)^e{zL$q?`dbRAa!v5QlS% zZ5{P-g|oOGzNL+t`8lQhAe$Gm7M465%cb*LH7<g}mAxMiX+EqJF^5?go~lsaSl*H7 z5}eS8t0>W_c*?P_tk1xK1#4rVsp`8GA^-JI#lpJ)=YXzHo~x|B!4A@H2*J5_u$sRc zO7bh?5hsoZPP4z_<FD@~7TA)pA~V`xyveS}5t~cWpj8s7uq&L{a!FE&`YW+HNcp)4 zlHtnbVxJqdAs@Rw2l<MKKFIO{(ku`(Myk)s5NpDDK}d6aKg1uj@x3D8V5b*>FDT+t zrJhA8+P)J68kRO}sXH8YJ*TE`?uzIjYLDy=jtqT3O<y0yplE$9VJex~ES}J@G?MSQ z*@Uf9(r&zwyqs2pt4073zf<EupV>8Zu^aWpr}>gOD!uhXU05#8s0U}stj55bRoI0- z>K7vf-Re8=u_5?q4541ggL(lfhL4B`pjX1h)yMyxMFZT$Qm&j&VI73x*Id&83WX<w z#-3b*K=R(T9z1v_7AGv1zoR&+1fB*XZpA{VhiC;ktKD>1(B;Qn!{4P^$+08Q3J;tU zupNVnE~X_j_A^nKxy})97|(Xo29HowCfgw0HfqCCI@8CuLYzzOu7vNvt@2DyP@X4+ zeTC<um*&`WG1qP8@l(dw7S}L@fn?0R$DhU8A-q4Y70{%3VzR_Me$p7w;%WykkU4Kh z&g5I>@e>BluYmEixZX;ov7j@#zMHWE+>|LB%pDB%W+4}(ZSKU((a(Rsg?`d(A<~1o zAPi=TvtC^|;|1@8o!kX+ERhFlfZTJzzaesLgMA>(Hml^=ZYwT=(is8Ou|4egg4{XG zqpqq%t;Hc6DN#BVT?;EZg}ablc@?|We>{UNLz5Ey3=uRf#qRl$RAjS=yy`4c`4Cs( zx9q^~YPmBuCnr>Vhu^0>5*Il_{&7XK{p0lWi^}c#cx82wvRbnTjxP4*??RoIjsQS4 zS<bNIt#JN!<2wMBQIu!Asl~52d+jMyP~&!o9h*cNyUJOc_&uhDKHf|?^|Q=`N6%FQ z+acODC5NqXV)021Ttl|qWX>9=8xPl-{&<UBkrRr|b0;0KInc2!&jp)X+Xq#Hza`r6 zEFLip3|6Uo6~Y#FGKqH(hw0MOGi>eQUAFKZV0Of=gGh9Isjj1?t~4I{GMBsuit_Xe zif**)6O`5carVI;*u9vHB^QoRSHLd!mg=@sY^h^=VD};*zcHg|sIe=Ib*0qtUTOYY z#(E&G_G{`JL8|-Bubq0H`L##SA;rM3^|Ej4W#87zzO5I1n*%T3>vM4u@=K@al=5mO zF}Zo9CfS%lc!O^#WOeKXNjnh%?O+o3-%Aq!lbE^+g6sBH@76K&)`62~2@wL@dhUdM z7TQgoOR_)vEloN|e;e=y2amvXrxJY(w6N9(GUT)2Z38hIA{=R^mm*$czm(IoRb3;p z+=xwSEC3@Pl;oVwHij5S<~qN~{Bz3OZrUwln8w5lc1nXWJYfuaKYrqCxTryYJl26I zEhc~gudsJK(u#5!N*x@?Z5^(&Fk)~+pbdj$1@+&O3)^&O%rz$o@Ta?Dt{X)lC+3<( zfqkTI!!g8{{sMwH=2`}4kFCn9p_#e!)L2xj$7*D4q%6q~W!BnbGy#?kLADj4p=V92 zkJ^3bb!Ym3wvDwGv4myAU^HD39ZG8_<tl(*o7`3=-^UDJ0O<g1%Yp|!^UT2u_0z=% zp`Ti8M5#!1*kvc0zCq{n$pL8`FkpY1GQS7wI(8o)1MmC>xM)cgZqii<w0^D93GHr; z0``TFfbJ0TTY-vw2y}Ml)Z0kpHU_Q5Kv?`Rep_5K5d~;z`4zf7uxGh1lbaS+J07V* zFVLVr0J)`w_-~+5zei&xDP~E3cbi#cGvGDLd?I3tKG=j1-Jb^pfiS9pzdDtwVR@(L z7}_gGsmwu@a(l1%@5nuknFXR`gFb^An}({2D55q&OoZ<dd6<T%H);@}<?rIJ%eXSi zhS$H!SE`0TE5qfK6nE()0b#`%X0Dx!7=rw5&@Gyv4BVj1@dwL=iv_a(Yd_M8XSC}B z;3rIbge>Z<i<eS9^Pw(U3E9=|UMYnlrNu`FmW|gjgef74_KGH)z!C$HVf%K>1gvPa zgaDxxl`CAWL@KnTsdtIOp7%6jWO`gJm*!#kLkan-xU8K{G2~*)MO9?rwCNJSh$RKb zRD0sY0W!ORJ$fzmy4|cHT-ZskjGidbCxI9h$Ku;Vb}a9`fDG9|l)ZqI?>#`u_Z}eW zy*H5a_7OTy12SaC0nIaj6me$)8M4<ClsH;LaHe%w?^3r^!vB;A>mPwJd=edtV_W%C zSOIW0Rv#J0%UDbT)x?GoXOms+U@?)vZp_AGg7eYcE;J)Z5iRTG3DMI2w9NAdlz``b zTIT7;w}|v78-S=}{#vp1K82aRQj0T+gTg6^uJY^AEV!o3@Nc5?wA3<a7p0JZAk^R6 zvHc(V6g;|N*|f$g6v9|oV?7k2`OG})P@#F$(mj@!(oN3`hyW47P1h16C3T>wsVq(! z#9hxn2Vi2gs{m7rdKQ4TwbT+rrBHJ%8A+x$*LKnac&XnlG83bgd?{aaiJ6jh+fv-h zi+;!+WsCIK`UaGMVw%i)t|Nkfn<9z{Wbj-tpOv!20h%2o$ced--roqAEpHp>j(PT? z0@h`Dhy9xHC=T0dam~Jt`~kSi1wv`c6f(~rsV%nK@^+vkrW#@gL*DxqBaeF_D9)Ve zhL$*)$)8RL0SkiAyCQFoHa;aU`uP2Fut*;Q9ZfF3e@Cw&67xcME_VyY#3)&qtZtyB zDX1TMS53Z6lyBwo%_rZ4j={wT$hS(F=9F(s<Xea69;*@fq-sBr5vwQy=k1@tLx{^e z5HH8*XTT`rZMKH8VB?L$5nJ>TVxb*^BLCcp=(L#Khd+UGD`ml}u&BsE3CSwb!>H$z z66grjURq$PAB&Mb3>B?^liKdm`<a*HBp2m)9m=-Uux5}CF;=Tf1h}(PtgdIC^5;SB zeEa7@!#o!&%U{G0-TEs?46Y9#3zO1a6GJRF#y5US71H4A7ckEoBrVf8_d@|hosBIJ zTBEZNIER9`)Htspvc_O<!?f<6(WD#gt)7~zRUE~cOKk6g@Mz^nS|O;!Z?&tn$7xn9 z78;abN`nFg$^(htp;FdKGIOx;6da#c@8quxO6@2Km|*=s{j^&T*1zVD;n^JZufPL_ zkSp!UffP%rh^0iFKf`q^bWD7fzbKMYN-%Yh*tM$IFjJCHabPPecdNG*2zA`xBIr2e z8MU(11_LUlVUT6~m18zz`%x}Vu+hylQm;cM+qv);@3pG~E*Lf)<=DMTU;dcpPB9EX z^)6ri0aQ{m^R$Zgj>d;!bb0?H5<L0>Y++h}Jbe*x)X@mXIKEM&jYeAX!$Pa05w7~N z2i+Zwxk{8eN=N+64^F`$JT@~Ab_%4KZC{(M8L(9RNjR2I;)^$6l%+E|M8Lb`+gx%) z&xV-$?*YQdA;h2(Y^33kPF4{mN_!CoBE2>@e?cxZqqrEv!KVAI*1*?rI$u6C1P`p8 z{K8ShN0K*~TYP{ZaXDzkJZ0%)%u}auPJr#ypyrQz2Vp-%cTfn&-z{(x$k~|81c5GW zK|fWuPajgam+i!6JA=oHiO{+%CHgg}7n3~~N{fPedvfsW01NXIr#O+7ZRW4~sOi8- zrEW8FDyxx=m>za|3!%Y+rj4vXr}=}!d=LSZ`c%5!3}*x{es2$|!1W)vYAN8>v*|jM zhFtUbkgCJ@QOvi{;#%x5Y`l63%^o=Pl1wh6<{}DA%wtZCV`GP;+mKXik<bipP=uig zTG)mq{`Enq0<!U~|3%}qE6m>JU9bj$sJ&<EEBV1g=yTj#O6A18TZLPiUDG~5otAg; ze~Jb#KvgH6rs_T8kZs*@;@E%uu?km+3Oy&FPT>78)VR?M*qyTI3Kaj0B9Hc`s=V)f zC}8}Zs5nyezA8G2qm5j@=tp3kgsK6{d=x>S1h0Z&?+3f(q^uRtH&eD!N5j=D)a>Rz z|FP_Ezb~-x>2C-Nxjs0QfDxW3!W<}Bi=7DA(fa>Ixa=a%b)oPZnV?l1gcTsnBJaET zSoA5(X1(v0_$4Ki2DeYtVtH=_7E@Ba5a<`C1o}BbE`tmpN0-i7VZikvsqx1v2781# zb=4*eHUxeeXa0NeMrlKN3L%mb(z1;>3>&{PkAEkOE3II&d^sspVy<&O1q3ly9z7ta zxZ*G>_M!6?J<PO6FP*Y^k<|}03q9;%-qbACBF~{u0KsLb6L<Vz_tQ$Rlc)){KOESk zJd72Xa1_oz5sBXi->H*s<>4se$i94pW*KV_2R2vFT4&3}OJJj>OxvwFc58v%RsAW? z8-N_DPAE%;L3D%8^Ln2ac&F+LN_&oa6=>3nwMHD|h@aI3r7Hg|)bQxo3;;ss@E;Se zNS*2CrcCmSr1z;h?nXCK8l|9|t+d0UDcf^vAIW4~@BuQ4cJ9ZGQUb>UKa!=!NBrt} zfFGZ_5|1A~XW1hOomTEXS#JLS+j2v8VM_#U9T1q!Uxax9j1l%k5Zl*wBYC>q#TwVj zgLiJ-K__-Av?;h{1YWttbl%R$StrlgU6Y3!=#DgPk5s5r;7=66i3LX^l*_?EaGNgg z1D&ibuLO#{v)MH{kiM(3nCf<Hgmhh{sH8@29A6UHR`nsZAO&~Gwe*kh2TMQPSO)x- z4sC2n+n-05<~L$prkHxnCz?kJ3;G-R$j;qnn>{6}i_7H17+g-{$4GPq&2G`1)}AEJ z(qTrX#slqup+Grq@h34uK?O0|)zV;XB-vW-fqM%GJ}BhaQGPq{M+$YKS?JAH5Z`3= ztI$rQ!qr!ZReOpj>jTNn+uWF|HMTi%T#;xrK~deW)lTHXjXrONaV1l9I;x4VY3@?0 z^Afz^x(JuyiNtPlLz{adK_?{;WjBOR+Yr&{OD|C8V*j8AyV7YMbt`pTz~MD^Aj(sX zU)8a-lx+<K_AEOu-1vbLo9I=@qLS*kF}E}}+up@IGbp#K1iy|}<Xrl0?c|^1E>yPu zWn?vST1<MH_)9LToxBn$>9|^oyS;WYcw2WIP1xjBwUd9*E3S^>Cf81m_lkR%;>OiZ zeymsABNR8Fb}~3#gOMfMC7Fr+f*=ql0&oT{Cg6frh>(Nx)iHsH#79_D!H~q<InxA< z@$~%tJ;Ijf75VsweEbs+!AId|j$mRHR4z33kc7yNL2fUp8%Llx7VZj_g&k~<`FVyC zCDoG%JPY7Npe7vvk`UuiqCXP>r(SA)-bbHc9<%GW@>Q_WNwtkON<ZzcuGI&mc5)AD zhQ=q8U}PQ}9%)bX%EXJP5oyPv@j}|Sc=V)U)F^GAOxxW%Eotx<sBiFEq>T*eKo<xq zTDb~^urUVp&fEq?>5Wd(;x|I&nIcwPHrHCkPkXI)QML@s`}l1*;yJ;e9EoPjWV7Mk z&GM@c6T9bN=5`|!Cc_T2R$BL^k)_5<9sGeNC_Ui1<c59jZE)z7=5aSPN5`}E{^oI~ zo)ZCwEeb(0s!U!GVH=3jBT%(LW%36KLvQak28P&bB9E3w==V|lC0(KjB^EQ!U0Xpw zduR*9T(=?YXr;*jJ)ZDJcw`j{VAXAPONCzn^AsUd@=YFV2Lp;Z{Qxf$;9YXavfgkb zbKsESVZWrd*e=z2JLzKE@CY1&4hV3&0Jkw95)-f@Yi1}Wpet-hpVfqeW_7UJNfS4S z2>Oe8ir)n(f<V>Np0J}@-gzr%gRmbP0AF(0)FCuGvc+t$ykn3Ab`%25`sCdd<i1Jt z-k0i0>qD?5^>jhG$lt);oS0`Wc1m<=R?n2XqaIa<;K8`wp|(hzqRls#<T;J8Ea;o+ zbNynd?wvY{9{r|{rbp&fTkzL*qYwWXl+W9RJkZU9!C(Il{%UzU>(A6J_U5Yv=F}bk z1~v^Bze)J?k9ZZF2pVOG8pDZBw;*xKR9uJv8`U;`jI`5n_-U<hz{d9(EbT&a!Cgf> zu%8GVr|ex9qXz0F*ujXq5XQBo`khqzHI%LiOpRCC_32v0SHk?K!I#cPMPr#%rYb_# zcgTIMJR|={#KTYCLUyyo4G$j8u^+V?&!Q!3J6c5}Gcb)cbL`i61!<iFqwyY0VazrX zn82Tcy*%Dba+kp1n8?ig$%2chV8Ra6{jfh^k8HKjKNn}J;gYACcVcR=521WeTS!xl z?(fyXA~V9~CU@bNHG$Daf7tuK46YuHl^f0rj3<lf`d9KC%v|B9&x9|7vbvB`cJgyE z7lDd_XJ$ZZ5Epa|#{~XMu;!Fc?}OjI#xqn&-{u)ON=v7c3OneUSaD@nO#nx;Y65)? zacdE-Lqa^b3|PR&x;q@3;wSJ_t53=fo1|>;zX;6MQO9WGlIT`r1pF8J;UKZSrf4*( z!96Y6<m+G8fqt;|J&9z0Tuz4e`!r|bLS`J2F2OysMv}-wzZ%Y8?kPTf#+1JLbRgtX zWkV~EU?x+6;pkz%734A^I!^^tct~a=2?%MTIDrGJDRCplBh?NzC8C|gAjDBuTyVMa zBWIs8hZp>-ytjl%YYRL}!S+cQ1nKX^EG5#vl~g40sk5QFO7ElK=GpAJY9G=q?*uHN zps+gR)?!l^fkR<>5N2(LgIw8R;nu{d9CE@SEr`?+yiP)X1y0;(YXK?!8>s~jSI^ce zu))xvHmtq|heF{$w5LiV<!GGfTJBPyg>bg_)GK^WQ?>pCwT1*8$EL2w>{K!24WZbG zmk<`N>4b%{wCjj)OzyTho#9&>WS;xcWw-^xD^88;ew;7dZd_=2e<M0f`vN_u#T7;# zBI@KQ_)9>-V4eVC%&sL$XlKkbiNbUYbse(6L}GX?@6Fxi#j*nzPvGx34pfYR&fakf zfpd(`bl@v;R4k&O0xkczwg)R#Q{moF{AxR{z(6c6D7%A>g`7guS_M}FUqH7Et}*9L zLKikAoAe8Ms-SYB0$BSO!YhT?w&mT3vT9(Hkxiz$u`oS{*|!)c_zP2|a9pbn?9}_B z_ex!a2FhD2;>FG=IvEk6A|JT6)qtnbm3p@4H(`5R(N1;l5%#_=07D8_R9u7#5;l~i z%eZhwBN*C_v#Bkloh2#<Llpx>TS_dlbIFx(KFBpF4%!QM9mvTbDY4@s&y_(`F6P=y znm5dmG2~iNAbo;}>{{WTLpPj)Vn2kyD3%r>QwzG6`yb}&{1-~YYofrWy>a2QhtB^s z*evXaP-1mLnsc=wIk|{bUImu73Dppk2)>LUR>5%LLCbqlukcFBg4_@kWa45(knem^ z1akTsLMDAGA~I&bwx%%ETqJNPqJ;KGVk7QGYvIl}5t>h6p;(Y6tXP%BmIOaN_b0)z zWxo^btFWOIDtV#`x&UfC|K(LETf2$UX!)fwint$9AQ4Kvyb$u`hFcnG5ly;Nc~<sh z24e9~tle1i&7-Fb4_^d#7O7`T{zu)GB@+XlJAnA=al)h0TS<e!8hfj$a2KeuA>@Wi zEtnk5FBRS}fU(yBDOnwlK=CS8Ye)-1Mo9Zb@MHfVng+>|2U$wrDLlr;+G^515wIm; zaMFHa!kGabI;|e)+h6|wT$993&u=gM(+z3|v_D}Px9Q5fl`CjQ;0mc*U&u6$gx93+ zpX#~W3RW*%EC?-`JA$hfJ8>b^p75AAbq>>47s_3O)eQGHifgEf5uTI^k3x8ejLyO} zRBOQq?NGMi_mucODSl6g-{a!<nD{*^e!FNz@Ba@e^=z?g#h$14K*{zvcDuB%oEHLB z_;8^imVmjqBt#qyA+tf?ZDU|0uz68GEwDq+h@A_0`S<83y*bRjR=5^UG}c3l{QQ=k zDgVKqvpg{@E6^13DwrqWD{-I3<UvrOI_CaYhz)?Y)#3$%lsbq+aQ~18HibH99`3`A zXo2s*90Mm8dEf;~(|IRf_!2hAU!%$v@nsGEG1ZP!b>JAJbMDb9_wqEDOLyW?UDHw5 z;wk)Plo9@q-v@T{cAQkC%9N;vuJx`^9H*@B1HWSOFD2%m%J>=fc|@RTZFk}wib$!< zV}BM}b(PI@N+%lN1bS21Q&kuda0nPTy^A#%>*_-g=r`+wi)A^bP9ZSR=6}LG^mEI5 z$8uU`eyY@UQX}8TPvk}5XBT?$BOUyBTXzS4awgn#iw-CNn;Dv-`~#_wD{3;wKCm0z zm9#=|N{1^V5c6o;;-zB02c?FllpF<}6+^p&H{8bkHN@w&;P5v7I?P8>%{NI*LeC&% z5`&8MW*M;!u??J1?8-(0#4AXxdyWX1&y#$Kp90j<>6stt4$>MmfWL%X{Qd4oDbPZV zowj3xfe9M#4L6)rj}nBqwr;Dqi!XUMq*EL*I2&Y~oUNJ1+7?eoPws>EL@pV12Q}i( zM1{EZ(DH8Xf%(2-*A2*rD<=W-2nln(W*%=_L{@d4P4Hdz-@wO5ArVrf<*i=|L86s! z*-9ryl5cZ&I^jN<@UlptZm&P1PX*+%j9wikA^QT%l=uv|VIK(x8mh<eMikRVE$zLr zPvLUk7Gk=%$w2uVOj!690v|D!#sa!Xtj;@mlb{e98GW!8I9}bK?#qnlWD*jZ_y>O^ zxX(B;Ld%rEw-hILA%{4=F@{eTV9Y)pjKM@4WdI|)C3%H7IWd{XFg<}ed@DmakD%Gc zTUs#5TR9(3yPpSKIG&M&JHyQJ1alU@3)GH_b;jGwiaZ;gUXv@P5c32q(49p5!hQt0 zIDpb161WdM(E!DRpFfM%Q`!$f_dQI3zY3chYe|j+U_rf)d0U<>na7tuFO<jIxEC{% zP_>O8N0e+BGORrKMmQjjnpW7XDHx8PzJE75l-~yPbM!9=NjFp<QVPE;#8GHY8>Wf_ zU=hI*z((qc&-x%AXmcVT1~^9*2|M8TMpK}%FQBFE=|52<!j99mZ*kXq*t&%qPvOAo zXCrYsr9Fb_TUNTjDpyzNN>MPQBe?q%woDmf<77Ab!egg%_X~D?rP>ivU{><Lth7y- zm7c;xMqj^%ew^H64@0U#{Yz2*mCV_W?3wNwCHgL+`L!_5k-8fPrLkZ)V2qLTKajKd z#z6!GZd+26$D1tg&wolIsziT}QrJH9#a<5gKjFplE<h59HUcpmf=YQw-Iq#qF;YmA zQvSLJbyDU!Q^?Wq-d&Mhf^FVW+~$2g$A%70)^Fo>kH?!;bLkK`YWvg`p&^m_i2oM( z5rX=Vf3|Agfg}QRb}~%YD{T{f(=UPpqn6(kcHq+wuvq<k7qtO-E+mU$a`1~mnZm@j zh|=JBf0im41tt#V<b%=~uA>YfEF38n5+;_Ya@xh<z5!hQkX`{GrjB<Jp0K7%@qEk! zKsP7k$gP6#IVZjhEk>s3U=Fm>xW_@jPZ)(o&+@*uL}HY_dccmW`6nDp{lVge{)qA@ zZF2?UZ~{q*{*79rRZDXFVEsZm_wV`hRuB(W8;X};JCM`ZUA^U<o2vU$6ovbH#J==F z9BU5ZdoXu`gzSQZGK?Y0s}2msJhLln9=d|tQXa?EyG<FrvRtCPN;sN74*rk<WKrs% zoVCG&5Rl;_wH@;?142BUPBxZUEz}TeQu8;dfz8Upb}%MPbKGG8Y9?c49WGv4;~*kZ zqCdscJnmBJ?nHn$ZBC1<d_RJ*yu^N3-B&n7QLE)j7Ws~jZ7Y#0SqPz)P-YoWXQSGa z&s*Ma7a_bq`AhNs49J*aPf0W^<_8FVD`=9;pI-=aq;*n|>Ip>0uk{eM2DSJ<{XPhY zIM};c_Mm#)3Me|P%~P_B?E1kf&RfxcI8Zl2z(BC}s5Q`LtJ<xN0v91sf{NqwO`-e- zfZzrQbU{f_^g-C>wD{v9PkMI2j~0M~Z(oe@*U~j;`R!T-9a9K2E02=Nmu+50GbxSM ztH99`(&gcVLH$mwLMCDlN*!c-*|X8;nJD#ReY*hn)PUGGXAlV(%DmWM)og}mDE&2x zzj-lO>+o88^b~b-^AC4(RO|nso7({=O_D1C`j2+?T}U!#boFxT>PEzi(Ygvlu8Kp* zG<z$-^U?z~@wCq5KvIUU8uenM_?wq{tv&VvxNa5X`kt9iv%E4NA4tH1=J$0#HLO|W z@BHihjfH#nbcL`HNDXdk)}N2=;JPyEQ4N5jvzFacRIAvDVa_2^D8aHD_u%srn8K0` zXrcUOVgfjKs*8cocEEfe3Uoa5deUuq&qpNNk5}cfR**kCDSHe4pu+tBa38|P-;h96 zh}A_<mHe8B<^4&jO6<n9!h?y&kP-e#)q+AErs}rwr#GU8<wvm+!=ByTYfT91*=o%c z|1jLLg;ahK^0m;_{x%*)(DdOdEyU-ar1kSrKdpu2EBpyoRFdH9>AiLnEuOtEQ;{-; zw26qdJ-y754hvVf(&w-$4v-W5S^UFB;L(Z|@wEt~oJ6on5<M4MfkVop&ma^S@te)q zftXJqjC)eCcG995iBEkR(dMW4_D4tgOy=xVHbe^C<_C5opRYi5sI{WIR&jZ2FX`cd z2C*I|?*V$g8;iqzR6$3m0B0Kem#|GR<s*Ua<bn5xmk;l*hZl&NA*Uey4lqH8Am@s7 zH1{nkm7O@Vxh&Zni9hp6{H-KWq#J2sA5XeILRad;Ed}r}GObg_K>pkAT1kL_S{@op zrT(vkn5hqMBE&o^5OYX_gONbYSQF9aM?lQMa@@J`EfA9@5Hprv(_NWdT6&>m-Ww7n zKZQ5KhkiQmh@u@K_{-?|h?<Eg=xlJ_uZn2c$g;fp{X}JC?uLBe<zCc{BWYiup43oo zqnk%B1A4K?9K+x4PWWEipKlOt6Mp6j)ZnUgd45EQh7jM=+X6rTIjT9cg4Ep<&!HN~ z%!^3U-bXhr<6IJS59Fd%_MF_)7O6OlYBPqy*Ga>2JsmD%!j&q0W@EAzzZO>`ZpFRt zi?i|3q-nsw2q*c>Z^LIMKwVn?0Z~@&XoG3J25L$}Uq*5^^k9i879gcPd@tuQnhcl- zWhJzgr`sCE-Tenj13Qd<Vfpj6;X@}b!<#-N9C&-t07`U)>d#H`(!gfpa)fvcJ^kKQ z^uqgx|MqoIZ4()g%H(Yy3vk;<HIVR8>Xbb8`YVZI2sOOu*%V%c6=PdT@dCHui?Cf# z1M+e>nuM_7*7U!hhNI_j4ipzhuAt>mob*yBZ`LP@<6g<+xYMI^C|bvo0`GxO!njeP z55UJ-ijFCDF0l3xKB|Re%Wm8V10g9oBY}^qhAFF|#)mT${|ELLkSpk(xSd+yNcE>G z+mzo7DfqmS`U!qsgWj%#JZFpLN>GKOAw4X(k@yH!NdYgmjwkJluGZpu{wa-}LS58~ zB3mi#X=NAfraooO`7LO~7pkAwT`$C(l+)arGPIa@5><!l7v@{Z_d@mg{JYnFU}rDK zBnwHR8u(EWJP<U~ASTL0L?eV+NVFMCZ`9)Ve;>ZTz?~$8h11~62Yh@fYVVB$oZcbI z!|IfVS70Fpz$&a=r=>lHi0#4ada>!bINSo!D0WMk7BkAV*s{6U72UfEG*h@)i<RVs znAiD+&9(v32KaO-I}nML=7wS=SRTKLUFXI|E)>7l3I+BVSHp$sHi)JrY=<}-D8HO1 z*rVl*+zTECO>PN$I}|(rl?~A34!68#-$To+_c^>mXCG2R?}TFBC-4?wx8Ul6(#lX^ z*Yb;1wgn$3QS)~Mi;DEDuw!#zmvI>G<|=E<Z&dR)tAWO4St0oRhGM0aNnDEC8Y@A` zca-RCKn>88=(Pxx5E<4`40|4iNBC%l0-qU~xX(Pq<~lq7izW(gV#H~b;VDhfQhXTT zL$~U9+ww*MX{4en6o5P56x5-uhZUIqDe8uQ!%C^XZgb*(yqjsyKdmj?*+~Oj6`2{2 zT%L>Bjc*~vRRw1w7Q-ro!EbBlH_b*Z*n{HyVi4vdCHe_wNK58+Y|oOpJnt(SIpG!t zOEKJ^am=1FHPAEyVj`?0SJ=h?Zb<5_0IlVHZz0LIfkq`d6FJ#+HmozyX+f>XO5G(i z*Kv&d4P>J8v=!}Ypk0ZM5_MijmoR>qRUKe;HNb=#fb4@CkZj2D7_{Uzl*cw=yv9nF z$a-)aX-ZnU5A`JuibCzn=Smc4ogD%Nup>n-5hytCdnmZ!<`fE`DF_Gl>myqnqWc5+ z&@aiEra?H<z~Uw_&;*LO4t69Qbf?Vsc6SJXKnh1MA*92;us~u!zg%_%;Gp}k0qi9E zErJDsMkBi$ElE$hSE4gOr{$f5D!{GdGuuPO7Z@)7*m?{`{OZ(OE#6pjVh3=8WjMk< z3k5pKdIK`592AP-zU<eDyx`vstDl1{apDR`KHo><#_7xssS{SBaD**eLc>T0q^97# z@L(ifTFG{^UFeAH4X;Bn(#gR=4R@|16(25P4XCg?i{<^`ZX(TA5Wh1N*oIrYk0)|b z9m0|{m){QOs4!^=ZzTT>Nc%*pi!Z{lU{K_N#aTVHteGESk!s=_Zlr<v2<CL6&4c>b z)WGEOnk3PsaJ23jl~O0!<eh~FlV)i}BM=UOY337PgA50XCDa%!az%g-S95Bd&I8!7 z5+}q9XCdyml7j^d;Cn+&G$i<v30-~!s^$-k#CR-2LL0m#aP4;p*Qd&{8PAWvfSDX6 zOQ+hR(m;_Y3;Wt#DBJ}#NZ<$^k=n@{Q3C4@-PL&lwr2PM{tYoC_m<{qg**7+r>KkI zhYb9Xfgi^2^rhvuANZzACEZ>i&e~%QKA=Kfwi^|&sDBNJAOzXD0Z&?h%LoDFtX+h} zml26zfrju42t%7m^fw-_tME$Kw!DLPAHN#@6A(h?r<}Ft_Hx#)46~bavEIXBn~vau z50Les7jF*|Z!Z9E2Y)v-@OJdc^`B1x9KqY&A?BH|HsvQ&c(9bUhuAS(!X962CqkNv z!2saiID|lg2QH_-oDY7`q`PBNzeVqomssA}KcPg=CwP?{d}k=;*@w4KV5brtC+Sd$ z(xEr-a;1*^*_bgOA4SNd8$wy7v-6fE7`O6L);t`Z(?lcSxq?O<`z&t`T8vb*g#sT* zZlu0W+;;hVZB2^*J_LeTd?WZQT(eS?eQ}!6WOe6K1k3&GdLrvKV!1d*d|cjn+s$&H zCrdk6E;@)aqvMI?!fOGyiBL|4K`CXMh_=b?moNNJB5wh<V8d|aCVOydwYwfzK{eh8 zE1esHzZB6j(02o(F?R$fITw88(pO1*OAxmRu{$f#7W!#`Bx!Y>JLq&g(J9H%*su`` zp_|yR!$pvO3=v@tOrwV*@G|5|bz~ntHw=yqAVfZu0D&$Rgk^af=K&h9mg6)ncJUWi z6I;V1aML9C;#Xo41ThITOoB2@g52JdASLUjY!Gw1=Ri<iX~wssd^au28>(pz1ZfTw z5#b~8N%Wg&p5_28zVg;HT%siie<DN`5dN8`6iD(0rsO9q=ALGa?QM_6_u}C4tvvi& z&>Q?C-Bq{I$80X4V+YwQoLTsejgV$L8Z%%mWQZ_1&dmy)LPw)h_sA%xh;f$UTY8NN zmvM~@ICPxoc4lcJQG7zL9iQ6E#7!kMc1=z6{XDcG8bCv^KOzzz)T4jt@A)B^{=S|M zmRp=zbmGSGSy^tdXrC5S+amN?Jr>Gpr`Rs>ojny=V|**`Ei^VVL8p&;*SAuuJx1=& zRsULp3T;ZBGfT+}Wd*g`#u~f>j4yB?l5(sG;yuE0WP1^%sW1MnapPi)tXyg=53k`| zip!%oAH`udGzKZYjpCsnkE8&zS}C@jV!MnN!?m1RfIX5Pib+7qFZ->9<oo^p0|zU^ zj@B~=2;a?4kC7N4%}iwU8YD45h;w!iQhI>OdIrc$fU0SrVU4#N-2()!Ljwe*Uw0G# z!|@4abrB}o(J&1V&R^iWh8Q3qZjfw7#V1+&8*hu@sg}djGu~o+z_S+1@xfTouyhZT z9G}Ks;}c1>NBHd`{DKl9SwQ`)EE<F`r?@tXgFS3k)^5NhMu>**8VqDaLM8{ujmZB0 z-T17doe7=gY{P^R_o|V>h=tw!KVc!J!z(-{19`kg27G+642<XZ%0L0XQv|a4Eixj= zXUTxZXUaespC$w4yjTY2@&Xx{&(D#8B7U|ERC2EjEa5pKzzApDCd0%w`M2;S)EHYy zVJ^eOR``1|yo$oRW%vaOZ<67cDZEC8u~^yopJlj#!mDJsmBNq9@NNp%%kX{*FO}go z3RlW7r|=yz+)m+g8SbKRM25*(i3eqv4kz)8WS9gtK3<0ND14R-`zV|%!{Vs4Q-%vD zzUyVt_aX{^A;Uomx5+Rac;;`(a2bVLDQu?hPlU;CTF*G+dtIKs&%k=>;?If__<CEw zW33V~D`iYBV!o3x%e!k5G((GHPhH_WWPD3zyiOLyaSP8@88cnRj7Lm^jJZI@U`6(< zmN6q`Oc7%KEMq(}CWx44Wz6xv39^I^-Sec3Nl;9xd(!8m0AH~r+oXq-L~i2G6GHWN zUi6ogLgh@=5;R(oKhu&-da0Y6=q{<gWDby*+rawgQtSIC-@t8D_;Rjb?{FoALIZc- zB*{3aAeq058sx1`tFTJ{3(hLS{{>gD?#C5XaKVy4dxhrbasqD%fj58>q50_x%}*N8 z$EYf@DgFSU&%M+GD8A5%uT?<Aw~RboIuV9{Vtq!~+6d?-U}3WxpC@rG?rHJ(WC(|@ zMtu7BV`|z_QlEu}mAZN0T%xM%P<^Psg;NG)$tRofjU0QrV~Kl^rMq80fZ%<A?Z@Cw zzStY?EfSY%y&WH!??&e5gv@@x<<F_2(Lg}*U%=&7w0Zi!p7m6Ix{lWP;qrrZ_*&id z7(3K?L;72FpRVk2|2gBcb=%<Aoc?Ux8$F+^!-wkVdv#d++^G-NwIr4F$LerKg;w$Z z`8VqrooY#a=}z|JH2B3TIGVaJ2>wg<$<8ce0%^~zR>T=!rIt2hBt}VBWO|NFHx6s4 zdUykULT@D`l??q-^hXPzhMP4Uu+aiori=)Jn8Ts0Tw^MNn5ChtJOjGCMjw3!cn7Up z>GktB>GH!x-;w+ki8x7<Uc3KT4!-f*swrEb*pRLF_#F74_{V05zDiky?O+#-F3<<y zdJDexPidvG1}%5;1}09nhWu0LQvjrO4ni{m5wM7|545~TZxV)-zVJNQfTBrULxACe zKb7}qe?g_GkAkPZc3pFa+kKK$UPUA*LT}RR+~ohnPBDT{MjOIT(f>3!g*ILqDxL>H z21b1IXOeJ!O|!GNq2dUlf5=cVfq(FVFjTC=<A*H=yUCG*P;x)*pMkJmmWl!0mI}J3 z0MdPOFt6;ciPwp`HEF9L1DXb7#d-W*+2oAwjAt4vZb>ys$eRB{)(XM9e3q;2zo^aw z@>5O^p+52TCQzaWCw<+iPc|h7;ss}tr~42AC7DfRqJzD-T~zD7eKoarfUkerF9TX~ zY#bol;2U6v`S>?50&p?x(uzks{vxnkN6Rk^ZHMk5kA%BOIf0D}8Rs6wx&}g6jRZkD zCFKZELNz6TV&2*SP~+Y@kzwcmZtq;+qb{z+Kbr?EAz>3pAd%N1QPC)dhc*z<UD)VG z5{wW8TOSE|m}p4W<hKZl5Zqu1OImByTD3|kZShg{Rz<XG1IWV{;G6nPebirEt*MoV zFY^DM`TaHt0b1|v?d|8@e;0l^^PAs1&YU?jb7tnu8I(w;lOT57B^;k0wm#47`h2qf zd~mMy`DW|0tLt-`{``*pS<WM4`<+yi@E7%*QRMYBt6{7&bf#^zgB3|CoLj$3R`!^I z?-2*8Rq?xUVB>B#K-65zP(C#-7PQ7ojBwH;@&SW8qjf%QVvCajqt%$)`Kka+fLiw; zc=fq_t#YfE`nWA+FUfd2UnW%FeKZD6Vz?grBrS3VspjkKb{XT%XIW5}gvM}K%39MI z!S`|YcXYb!??}>e4<<pvNwIu2Z?HeGBKJHupXH0;V?yY|cGmo?#=c_Ez6+NT_2V2g zRo$U4VwNU_zK9JD4#yw34LXbq$9DjmlRlES(dKQk<Je09$lmgKV4byd6cU?(q$eZk z@#bYmkFbmgx<L)Jj0B&62q;E^Ka`4*RJgBG*tC5^SOzq7c-O~^)u7s2&?@JO#RR^Y ztJoej_dab=D&bKXj?K?_-4}m0!D5U{q!xrhJJZgV^#x|R*<u%qkIKxumUv8WC0)@A zW|`jK!t7Vnq0>;E5g)goy=Tqgyo_NzZ;q7;Q}mrUtz)}YKhQ(&b4S#dx6gePanZG2 zit_Ks3;(e&Y?^1Slw$~=7;%NoL5^1J3!Y@=YMPX1x)0I))uobsGrix{-cIY0TP86O z_jSyYXZf4CY^!(GSh1Ukj$3}q#SU-u%G_f#-^nc%`n-+#q-IvaMF!?u*XGJMEF-W4 z<Am9qo>f_*sq<vmx`9Eif(XWkcE&_FGxAMVu#fef>|HBog9n*&Bt749Wx9SSM(O3s z%Q13$gyHl)F0~ZNY0O<@BsJ#F6CbDe9PfQRS)i05IhZb?g99ZLha=_%!Qyge`&(iP z!`F+@JmEz;Uhn?T**p+*IjkCYj(1;c9J)}hC!Y_sXGf0l?r#-!Q{&{8ygS8nO2(D3 z%mqW6o<=#pVQ^@t)63O;#|GnapIJC8v@=dlvmL{!7tg+J&R_;_`L4XTS?avN>$?Bz z*e`4{{D`L1xr{Jz!QuRM1Sf~Lh1y~aCsw0StG*JF1y4ZrcC@*i?Yr$tq#+5%fil$Z zl02)nWyb8=GqiL6JF(yBs?Kk|NCLzdG5g;+!tN#G!iX-G@Z_*HD!ZHA+eg-UG?p^u z@_^`e;?<l@d#~#-v$VYlt$E=c2%VaL!!JyVAG(I)Dj0-M8vi4R&JjTKyl<rSY5Sh+ zi&{GVn9|r~eoSK!S-`k}K5)w~VR31MvMq?>*~X2yg9*7`1c&eQlyGd_e1hOwL6;85 zd_dx|v^Iit)`?pLhLOe5ZR+P|$qJinQ}bPv?h7~rgIK}sZrs~ElHPeX`T4_%&lIv@ zK5d&X!zl`Hi43^&e{SuG%YnCU(Lu&46sS3u!{Vw_s}WLscI<7fhD2g%Y2m#!(P14% z(nr%QVc}+qlRJFtIuRCD;nu>!d-<EbMyuhJZFqMH3%(Cj54DB|Ne?}P)m_Q<9=g}w zY2jN6?jxWC!U8E+dJX;YyY3)@_JPO%GrubdOFZ}~fwd|_k(I@XUEh0Wai*1pkfTI| zgDRO9Sv$*?Tp*gFNCn2RIGhGXM)Q-+`LHS1E$+u243uQh=bA^%Y=|T#_qc{WM$U*& zYJw7$J;S2V)R-Sbm`VujF)A5icJPWu^TA-E`9go8SkeZ|hy5>>tNA9~muSZLWJlLy zsr+@OWmEYwgJ~vAXzFin(01Tf^3s|1a1mYy76q>f9d{G{_<VJql~9*HASyumtQ1Y* zFl|8L^3Jq$i4sma(MHBVx;z9CKTExxX}1!JZf;PeG^$9-_V`g`NWY;XpK#<vQeZ1U zbZeSrYzRG771ihNdG@hLR0cYt7eK#a3`F~%n~J!(k#kxo{a4Bv0J~neYAPzZp^l)( zAIu?}=a9T;_GgP`KQ_fhU*5H$Z)J0==*#zN^;&5%a$naTxdR1k6#SZQ2X8?*+ZS#Y zBP?EyQ!UN*=Kf_#7Uo(}&&+)b{arQ{AL~a*8Nc+(eP>!R1lJMKVi@QzTP~6PxgGUm zJUMj^<JhqF(1^I2Cei~+*sg8z(Ri3Q{7f3uNhEs&e5H+jBMiRPsw)c*<Q`VzwrezG zq|&&A{c-4tpGzy;>RRC-<;XfFUns-0H<3VeKG`jkN@K@Rt-i4Pbwrlx+@!ugXNk5H zEgh6v2jOPh4>ev<!11HOOYgZCo}ALRGdMLg^_=C@cJKtI_32!fXe2_gV1~B!5lMU$ z69Ju(_(w58fZ|p&I9YL<hp{J!K!4}$(LTg{2xrJGx35^85z3X!XheyTcEqZ8H@+HG z@NCFUx?~M_UQXWxo|ofhLqR&dO`YJ$l{R7DH}nsp<a0LYrgs{i(A3)+1>F-5L3ij8 z&=s+1&rFT*HxxE8R+MiBo1fg)g>lT0FxJS*cp=R>&3v2Sl*-)D6)kcRsE^A{T6ZU? zpXe`RBQ5Cx+}M=vala-jxtsR+xQ~d{mT+7$w-4NCr&I$xTwD}pG?&Xho)A!vL1D3D z#J*B5+m<p-EeJ>Z<I~C6R;HQ}Ha@UU(1(^xNL0ZIE$8+#&!KO--g?iVp-r%_?5W$_ zDc1qLIQq*@--JX<Y#hnJz**Ad8R3EtL@3Ni?o9js4C#683YCKqDDrv45~E*g6-$iB zpqc{r-EkxekV-PgnvV06j9veS-KF5km%B*9AEWsz7l9|5_tU$}#ssP~?N8GPAEify zHehGnvXF_Q;F)9>>h!o;ZX-ZJS?4)n%%F%0uk>4zQ#PvQ2mJa9E37TKLeG=NzUde? zU2!+A(ACf<*DCfHNmzRz)<&;1I(L)Cp}&vg)uJ#vCKAi#MplIVcZ%-kzMu}yxtepV zlo3jZ&i*3r5x*`JfzIUiB}YLsrwil5Oh{*Bf#=3wgvUN+t__d%?~gEn%-{4)oal{j zGS4iCHN)FCwZ;2lO&^-f?nnj#A1W@CM-rsqXOT#|o5q-z`>|^UFP244p-Gl}k|Ra> zrmU88c9?sA3O~`eWXqJv@Rz*?7V(6_7QpUM{JV6ONKA>l*>I5?vse;oIA)v2iCqHs zHc!8VP)Q=~rj_hPG=6o{hw-wtjY&{W>P6QuE`M5d_*%DdP|tz<;zxj5(aH@IUt_{k zLR)pW^$zrdD4{hfvo$On6o7*~)&`w5Hwwq!wFE4zF?Ni|=x(nz68l&jVlk$(k7p3v z33Xu(eTN4c`)nVZw;_v3XFNuRs6SmTO-Lq6o;kCllXb6H@s?rL(i{rMdvr#kEyRNB z!w>K!FFZ=Fv)DsN*?bKYKw~KUk&nYZSQpQI232~=q-9Pz=QZ=`m{EYB;i=Fy>2Q=* z{p1_F|D9=R_UA_XbMUI|TnokvLVc%E!o83v#r)tdJcN>6d%{?zaD88d3d+>4YhSqL zX#2vuatJB=!nV4@6kFY4rYJJ3MP00Akt1?*Uidjw6KtiMT|IPesz5S)KqQYkSPAWp z?|`9szMQkMX4M0>E7`S%`;tX86^)8N6qM<cbkE9W@<>C5>OAywo;x)83q|bcNAg@R z$Mq$yrl%=WVeWndB^{BIwap9plPzN&>t`Uy+*9->kXW$~;TJ_7;vth`$!K4DGtf8b z8WlXbJ8F+;T9e4un>dNM*biV`VlKRHnc4g7W+@ZrnztL%j+lT&6?m;P?W41G-j;pp z!dpbAdB2{FaU!2x=45tHQQ}xWNhlMHH?s(#Pcao{%l>oCVqRM+{Lww<OD_JN*1eF^ z*V7W(7jv46+ThZMR%1$@YXci_o4qaG--|u-IB#f^8!ybD+di>)==JV|JO;XWU+&Y! zv%ajS(I4Bwx@qq@wG61te-2pJQplQklPD?sTl{-OuKH{dm@&1RYIfX+>&QzL@qFr< zd?5!$bqV2*WqQ9~)^eWoFXz2;*_98=1S~tWC{+bVBfr@9NDb$kmBx2_N=K0b*9Otc z5QWJYPF6&<Ct<bDt!9U`EKV+<gK0S7vp6)Rc4h79!lhfvLQmJ8>XeAtiJmefLXjS` zr{;;Q929e@!4pi!(Th9y$J`etMTrcTy^NRH0M-S2)|^KV8gU|RnK$FI`V!J+z$@pN zH-E;U@J}fyP*M>Ky@Y&>H}nKF6D>H4FU|2Az7GgJ<=69vG05P*)E-zjMd$Pj?&jlO zD+w7+62m%Tzo7d=jC=@*Ju`dEjGmheO+DXQy&XQ1X2GF7>=vWOG=f#f5qMybCyNOr z-Q)QfSooR_PulG{QgL~rMzm@R<q<B?_uh;*uafuN?F-ZKX`C`?YS3j>rTG@cgH72d z+Tx6`iWbX6BgZmKrRSMQbsY8Vu}+PY(slQZ+%uM~rvjoC{b*lkV?M<|bUorfU7tQX zcf477gT3LxVc%X1X<qdsP6TWa3d?mp!V<QHHclVu=%dXO{zmj%qDQWh0zV-YsMlS! zsuwf09p(xoAKhgYv}DGJD%F8n0%?0G+`6=jxb_jpr*MYT#aIu=BVLxMPktby+Yu}W z{``j|0iLl8^b_8&iu{78lWdV8&m&T>UnHj@h$dHKQLjv$q}2wrh|cuNEDSOU)n>OF z=F2@FMWM%J2I5$nE+b))rLwcj9LScI{w&L}*Ln!Sy3ZoahJjczKC*@C+7Or1ZbCoW zkfnvi4b^sg=Dzkn3T0`&MbY)J)5D)i<1E_rjoAKt-rUft%Q@1s^4`ow0*isq<v<L4 zUJFo<(PCA^ZLYoECZ#>;Ay^|{2qvM)gL1KKC`dB*U7gto4143aKLQ_Gi@uWLdOT%q zQMV`=6WD%nhtEruvAxKg{s%$D)ij>QDJSYSSb8@`l54~2Oc^3JwK@B5>MAEU;Y3y5 z!`3lqC>{{2G`1{l+3XO?m&ln{ZXdGx$ow!S&Gwi(P=b&amBAeVhgl+Rzn}bQOu@<K zda3YUY-=z1KEbjl_*hCnLgY0&i1v-u*964s$|nEvuXJCtQ7GgOEk@&iPyr*LunX7W zq3_oR`i_HCn4A+jc!XFY1Qu|$_C^QNkgR)*!N+a(BP?~lI@EfwD_bbnL+P%>Qo8GD zB~|8<rZf(cV2`QBnm&4@NE~ZqeP0$kX!b&SEiZFLA>X1a4>-rrILlenU^yN2PPwnP zGwp5<vC2fO(4#l2Sek3iTA>z2C=xOBs-6iIhzjcS61&GRTt+ekJX>=B#uuK|C0v}Q z`APO}`<oBIc{Z|Q{LjL4#RX8+T4R_e<3kB`?~%F}Mp{aY@Ycw?>}?++7s}#}RyhpE zXVrtgRx_l(equef=0i<)jtZy!22S(-PPkrl4!`g<=b_p87qk<dc`ap~xi4u&@^mCq z#33n+ZD_?B4=4?*e+l03%Xvs^jz~sl+8@rKA*9XiN|kjUWagJdS-3gPgSRi-vPSaH zeRk;uT9<sgH|sg>z2oABe)+Laq3ZZ)cqfMdHu*4f*KCCiuMj!bm%ByO&v&q!MwIUG zpGCuC-9`tDq>>&gkJoHN{QD)X&zHMx30Ep&!S8-bD)84pZ|=*%w|(K?i0tOejff89 z0AILT^mdJYWae6N4`1?fcgTEgOZ$Z+l$ZO|QayP)SHC>BG(iuS?H*ncp_8?k{O75f zETJAH9Ur<TIi~)loQt?TC2z3tjNHJ%625D)vp#;Z-?5MdIk{~k^1()_iFP?gJn3gr z=A~IW=IUt75HUH-2{&{{e%6lsZlS&M0~RoUbn#~{HBwO4;miH2tLbAJMt)Q<cP%YP zgHkKVTiW4sP~1GdOF-{dk{7FTq9lLXDU?zqb3-&XN$zJPx4n<8CH~hZVO&NeIKmYb zvA1cZ&A;lv0Rr130a17cH1+&bFX(or-LJ{!YWiHNBitgTk1k~$TA=F)7}Y}EE;PC{ zT8z(G$d0L>cZmM!xTDQ8E<M>U4FbF9T`seAPY0PN>XK;P)2@<qtDhR@cVU<3v}Xtu zgnmP>*m7^w6kY!#!gJ!ng|r(~-M97pemeLgAEJ2LC2#+3HMDD)+3j&R9`Kw=@mM!1 z2uFN0#s2wW&Qlbj);<Rc{nFyw_k?fpE<v;X8S@8!5h8bRl(k7QVfAA3sG^`nw<3rh z-i^X(7i*Xg6Ig^Mv1a+=*Ve3uz(RR%_|-##t|BM~0tqTph+Sp^__g1m<KW*Kq0`87 z+RfBz;8y8n)Dzn~ZgOXS31x&szLN2Lm${XVzWng><`cm1Hl`s=bFqzHBebZ<={4Cn zR9@_%<7(@9n?w@@@AY6Gw)D33_|m20Dm#C-2t5TS+}Gnq(Ysr@`$<c=`&;O^_QEAP z+%lRmCy~MSds2p@4z`;G3kKV%W-eQT)?mZ1#SshXVeP@T==(<>Y}*@k3Y{`(vBq0H zY4L=MlF`*klf`&evZ6!o-Jc;eo)PvqH9Z(-A%GrodyltrBRvv!vbm1DEi~Gh`E?$7 z{1y2xAoAZL1|v)NSLl+CkdxfQ#)F8=oVnA=1m5sla?~!<oK6PaCDuo^>|$SV9gOvn zu9{JWxgWTiUc&ttEruEMbLNB00fb{IK>#Demd>~wLTEzKgA;94T+4CV+pK`(ahTV2 zBNq>zwuiSMc>bAHntU#@r4j9oa1wBvv$M5e(%9hM&ekr|glj-c&mx#qZw-!ov>%C@ zC!k;@mNl@;MYk;CbZ9&M^;X8_JnWcl4ZdH{e5#1R0S4wp{^rvzCP#9zwm!VMpBR%0 zCY^Eto<_D=x!*cYcA4p+pjMgnvhwYjjbx^UXnj{H7ALXKlb8FAA?oGtXgiYTjl^LB z_RZCj!B%5iLGu`rKFBMp+D<{X-U<=1L#!hN6nTzUC;(E%4P4$XliGtEZ!ah_Mdmn@ zZECGIfNf?L!{LBq{NcXd#wGD;s;g-&$$E1xj91v8&=^v9eVdA0(R^CHq|C8C%r)<S zhiaCC)2mk#u3*vvVq7aR%Jw6t>{aHgQt1?^vS3opUS$l29ru!!1B;QO$J8tf_nq7H z$Dqk7N7N{oSi{@x3h5Oj?5vWbccU)sHxyRruq4s|Dj#0eg-UxpT#Ko<y{fQzY~&&` zb*&J=9PF-PBev!27?xpH%Z@`qS!;JT1)Q=9)#7V01k&nlRt~NvnK`qlRnVNd18&{n zBwZ@PAWI*1Bo<*|n34*IIv%zs4oKfI=D900LkW^K^7XxkPys+-XA`ugD8}^fvA7|% zS6eW%*e=on^RE1?m;JHDTxPfOB$iMp3H#QZfcx@vDb3d4fY7t(LxhBtP7+$vtJZ<D zkQqjQ&YaH+xH6Rdl;J>piY%Y@U-5ouKb9>@#_+>g<`mGBp`25E=CDU}5k$U4#pQgl znI~<b<uyH#I^5KJfMpcXce0l=Jk|`6$zk_Ci9P2pB0rg>u%RUfg-^H?5qF<I_wAt1 z98HP3X`%%LyMLGjWjr}dI(u)F+bgivzNl=yG11JKRPPLql!*uT#6lh`;wvIHN4K{k znA7ZEiBZ1^t_`xQF+2{&#C~SZ1mhOhhFI4lPjC98v;Piuz?0<Aa^!K>Bb&HLLmSH6 zs@<*?boNKW3AMQPN<LX<k`=B<-^rWNf9>3~in~gKe?==2Q_p(YtMj<*39NS?cdh>0 z#9#VNTc>8QFoT|vbd$uUMwSqp{v$F{)MH<f<(}RCaEw&ej>a5iY++0>uN^3<$-1%V z|0T=T`RqeG=y~49;cpmxlNWmkh%yuD$a4@Lf*IyUve0|#Kg40F%C(PV<%11%+R&#= zU~=P)70k>-@8O1PIOKw1@Grcu8+&qWsLu$m{!1fAjl^8QD&IKgdL-CK2x|>p3x}9< zNSWRBu{r}$erdm(&*4w8L(sGe*Lo~%Tq}v^zGl4WTeW0d4#qbLmKW3M-QDSRJ-JIZ z_tN;o)e~E^rJj32?;T|SAyRI?-}XYpo4d#Bnzjd4C?q2-%xn)1H8(a&u@Xtnd|o@H zYiXY<2&~RrgIh0hI?M-NB~nY$D9VMF*^F?LE)%z*W_zM97%%W{OdyKv`}?i^+EoSF z{k)TRa2p%`QXrPZFs)LkqLI9zXF9#HujjYSad=y*_WM@)vitcacN+7f0Z3sIDH!LW zk5;%cA?i&WIs~E|kSLS9jc9C)jeaD~WQjAJI2qk>tO#EaRpLyJR*c9C>?zY^635vx z?Aq~Q%To0&8F0&3-Q?Wv>dm|miq81^kKkm-WsnC0BOj4#hg7f>yV2FOm~Wti?QNOO zP-g?Yjn}AzVBbc}M8rkn8_TnuU-`>WRC}v1`~fG3WjOZ~<eIL~WIAbWjmNtxE^`Xz zF%t0baL7GLUwN9}`BZxZ`pFWH$KSbwk-uSRK5Ix=olOY#!%A&TyCv4OwLd{P3aAm& z1;k8<KIkW<w3HM`&MxkQ<D|G^S|KA_yRM$ZtiT9T#OyOWJ9`$;ZyekBxK1d+IKi_r zE1JhD>loom-?)B}v-5M`3c8}fg7Mp86Cx9AcCxbeQ|snMFC*gFX_3>mGdepBm)xTl z|2v$dO-EFaTb}80T`Lo}2ra3b&>oAPF_C^kD@~qo#GCbrFoJ7^tUTv_>S{89UTuml zKkJ=+v5lOGihZa3x59(r*CNTGFXNV_gKYgEK6_(dqsN<;^SDZ$=upOcbd1wnPc}K^ z4dSGlE!RZH8816_?LQ*z&eq(`K@2Q!#=vsq;-2{Vja;${eHpWo7O*5`Rcw?{_(G&f zp)X^DhxtyHl(P0jQf*@Ge?1RjrR+s>{7Xy`5L*kvk826voAuTUCP&neTST0n@S?UL zV{evJoC=?Edtq>JXIlPP+&j#HpstaAABOU=MK>`Q<&5~*Q#;vTwTS9*-LyUSljbGa z{&pc)?rV=pQ#J-vdMC|MM`7NXEmOu6Lg&!cU5v|`WoBjQ0KA)rUnL`dGFl!iH;awu z80(6Fma`9bv2IM|q-4#yaqXMQk7Kp%Uml5dWwvLrE@bBv-BU3(@9w9BlyyL7+C|LI zX|yZuBY^O)t7#oB*r{epZyr8N7p`*Bjrw4$F{83M3kH@vqSYjfjF+hR^zfP#t>Tr% z*^?u4h0jwDNh%m$**u8ZhShiaw{Mn#g<Yapv+e~XBOxgWy^+fSv}opOk;JI~7V&S! zP#~&+xgWZ&y-(Qw*l3>8zjU#EBKKH8X^XU)^L4dG8H8Gq<HXOKCA#LnK8QVo57>5( zRClJGb~4+WT--3!{2ePP)|h7Q*3NkFYaj8AtjI3l07&@5$bE3n%Y18>OED3}Pc(nU z8^hJIuDIR9vaS;ICMHdms>8hQN$f?UZ^f{B6uoz@1=sd@wC$N;<}?zY@CHX<GP-gh z#r8B<YQh^FfnEJBh~`fH>KYk%UlpQ;KP(9Ex9#(Mjkh=S{>Z}1-`56uXvPI@ZHQ*9 zX@VT-ZURIV-&t$zE`s^mB8`3fU8ITu25a-kb#p6I|19%vD|Sf7mZ4gT)HC)^t=N%T zB+<0D*%}f1KG<?`qb`zyu`V(2v&(E?8iZzGnmM@(4f9-`H1aIpL&RiD>_q(?YzK7( z>z&_;R(>M=Rf(u6TknS$__5Z<lM9+X>3%NE>M8he{WT?EGxwoJudJBAzTLAv9iNsu zNAsfFWouxMF5#jF@|vFGob{rO-VMo-zN{$+e5<%qtRS=4yla58IirUJZ}C9&Lab3d z_9s_;+Wu|I(-$Sm<x4V)6&V__c?qA(VmE7sN?Kg2ck~X~W^2sdWfW&UZ%js~Y@F$# zV9hz9{+;GvT)j-r=sciH)|Eo1_OFmue5e;@pla$goaCs;@e}XwN!1f!9r{b!V;e8t z$EEWKwI_4S1%F1%pA7lq3Vq=ThJCqThIhGc+{C@s;T@6wtN=y&grASZgm;CvJw}pZ zzrsIyvvJl`nN1lvQx(Y>Crwop#TYSFG4RV9jmS8DssbrvK<;K^X#1)30p9S(k(4K- zeMJ(UARx9QIAj2coZcrIc@?FQqJ|Nx;`=T@fZBa*Q>KaU`bKX{-g4TmRvIayd>&&k zrZGM_hCiPsho0t+bm9qKB$e2ZAm1=<fFEJqMqha!8tKnVG7Htb4AURY{5K(QtQ=|? zWxhgPS){%P*LEd5V6MR#=Bg1emX)JcL6H&2?}wDTd66o>W-Z$?jHHt0nC(Iog^T_6 zX(vhuOf-sWt!stMh@~fO^@g{P-h|1E=~~Cn)6`*1Iy_a-+|N}VB(2jWeJjyV#`H)u znCma=kJf6kOnVQpFP$IuZB=sg=3r;qIVb4hZxDqscd`u^&S`%R;xmKmOndcsJ#Z9S z>Fikix6+Bx>9Df(G>ORkX<ldA>7c{i8NW7z_-$87lrM6tOd9%l8+Upl{Xz#~gK;>S z<74xZOO1}(BXbNv`g>iO=>=3#x$z}@rV;m}cjH@WI1wr^<I&S@cC=hMjb8Mu{VRRg zZ(MO5x#nT>vUxMC=xzGkSQPHh=^PQSe#P<)Rp66K&M-R+HX(CD1UHJnW$%l0>Fo?J z>=<{et$J3X17^O$f*B)fI-5?OW4Lq_`PWC3CusnpD7}dsWU0=~BLnexKo>$|A=YRf zmG-{kFTrHkrFirvIqdQ00g;&g9pP=GH*pgO7@RYe?N5}~c>^5BTZ}TYcmrhe7N_)` z9dRl+X622#7mAF0)IlqgBw(L`zLo1NZ)dcdvKqasNpOKReO{W1YsJ01!E?t^>{ilM z9#@mx=q%1gV~GG1WxkIOLd<o`ByjG>3kQV0iCdTx`UY!}HF&w6T&?r6B-ik#-Yljw zZXI@qYlR$UWs}p_d61D)PRnZgL!D)EN`tPkHA=2p@sQ@ww4{sfSP!LC%AC*ovi>Ai znq<}5E!=ZCeWvfz-~FDOUwti}gT9qb8j<!liQ?kwMBmhdoveKwBfN!lVSdcIkM1d( z)3Lkq9>`1;w1T5G3T!!;H&}J(YWjlFJW9lNVWKFO0V_l#H}}(pS3nKdbzg%L6mfn3 zBaJrPMd^ONLzm9g^tR=x8Dh0~QjB1ZUTzVx2=?B`rHn9I*;XRMZgD<e)>d;S$7pq# z7k~>|ak(EXd&8a`l=b(lx>uLgY670d50*u5IqYr*9%qd+$6v<UWKZ=>?yB1gpEQ=I z<Sg4{Cbzcrb^20r<ZwYjaFiY(h90G96*!&lp3DMkh$fh~3A02u<FMQP8JQG@EziR{ zE)m7MJ1>gwmV(oNb*7CYk|qsiN*+Fz1a_E9uaNb(q1XV>rvc~#<QRZ1-n7Q@bmu{; zbuCk*_Gzqf>ta5mwNSr6f%Zkh6+BND8<!xfnYU-|5d4-u)hPM(SU^R0Cj3-$kskgF zn*DBV&3#^og||@2o9MToxAC+W%?q(CJjT2?ARU<&YkIA>n49V>sYtIvwlrl*M(n#e zePPc5!e%pmQFtk`hcDa{Du<k;V-YdIXD$?hr-LB=5G<{XNvzO}@t4uT$XXypp!CSa z(+zqQF0{0D4|OLVi4(<CgreG45Qg;&S}%!aCm1zn%i>QA@k39|6U%+w=bKpv+H5W8 zaV+a4!X9M_$rK$CNo9_#8olCYD0R!&Gf#9g*w4Vm$_{gv)9UG7#gYMEsD1E$NuLxk zKhz^6D{68g<TL72vxzA;^2)(b#4#ja>Oo{**$PVUDT3+EfqjLRamsKzJ1P0OJE@6d zLAYBc)e3a>l2?w6Z~G9sT3^mMgR9wIHFmP<m5&XUZN8jrW7A_7QU~TjM6<`33c|O~ zv#M`a@@~(C*&kbRJ74m154u*Y!QpM0JBeWCtd9k2uIC`YO8mud?47c5`kKFGUaTx6 zUM;i~wLA9M(5aBSDhp1NkS__Pg6QCQL8OO3sIfQau}WAVilPMDX@1mtlwjjz=cr|A zOe6{1SY||riCho(k&EG!mf5G8cQVkDgp~GpI-+EjuE-GE_n^z#G6J?_u$MlC3eg%d zX3ZVC1O+W6@v;Q`sF2VqWYbP!b*lkAvgs&j-Fmr1*=Zh2N(C(w`<lzy6)DX6lP{c; z-x4>4d&RQLK#S@P6o%t6x$jr5YOEqTnCkFF;u$2Tt@oJcp`A+*x$XGX`7*El*vZsb z7I*^JJRBKeW{^(-@>e5x>Z0xPG4~o`l}?ts8>Kqf*g(qIX*TG(VIk{6y(`r{5nwMx zc#z&#>z((!--h#gT5BJBkP|@4$6Zw%d)-7m${HaZv{8g#jNBw^-h;39;>`A2EL8Ye z(fh$BQ0q)<94Xu-CPP~0g3AuQ;rYgJsVlZkw+F|WGpSm8rExmWFkdc|R#PKFB_^9? z4+(h@-SbQ2SkIQn6on>Jv8L?{x3NH%pZktK{7Rmya68`juhqi`>)^Lom@FL{dBf~S z%AuV2V1M%+XlzMkauS)rk2qN*)tUCn2&r>eafcivI29ZtbFR5aIzuLBJI!s>niSI2 zR1ACL@$@dKd?dyjiMW4{e`u$F|2zK9UD~?iapuCVjLfiR6Rh^XI1DL-RSzaXO#?`U z#AW8U)2!}FT<&T>KSN*HK;K~L*;zHA536&J<Fn>W$y!F#WYeXyLFAHi7?D{h%95y@ zbp^58C`0&wgmZSLoloAf{Qz6_qeTuOUWBT*kEyrSQYA+?rY^(Cg=hj$6FE`|V$4YT zEN4L(9r^IPh{kz*FURupIloqTdFwpPN<TYomCuoLmTSX>4rffOclmqNnDV)v-0gkg zODq6+5cTE(@ioLEkjQ*v1S00S1tQ@2r!^KhoQ>%8Kg+16a+dS1&`8Yg<$taAkBOuc z%HdoVNsfL834C%IxyUovccbJLae4Q@KD6~X)vB0_frOOIDdn;E6izTVR|{RsGu@)& z2_1WEJik_j`lyV7kp%3MF&S%iz!`e~pg;x(y@@b;PL~mX^v~M}J)tw)-g0)FujNwa zoBMsMK4msLi1RkafTbxM$z0l3>(M;yC}f`MG3S#%?Kl_E8v$$nd>&Y|BMysk4{uIR z@PIdGk%Q^nHuU-}pFjPsifm<g#WXd$QfB2@q{*Iic=-D@dX;G}fCcbV#jq?F3HF*y z#I+(5Ih}CKvz^Z{k9kwf9&e$6EdS~XILH-x1h?xEOUJx&Q(J6HL3&(e^Xg1lJ!N0W ztQQ(KTdQWYa97iHM96&ytxx(Znb;R_cW{e8F2AKXHg4%$lv%{4R?F~<L90+Y$X2g? zs-_TmrZ6^ji+9yD=lbLz#;Wq!#A%L+^!2Qq<PRluQe<|Gu&?dRmtBrcJ#z3({?r)n z&3&^gC#<%=hb_&eLs;#yqf0~`AL}C@d!J-5$1V-qZ8Db?LpD@FGa8G?bkYfklp-$y z8T5Fei)!M~I<#h9kt06YT5m^$9en9fGMO>UT^(-%B~2+jJ(l@C6oRrSh&^XsPkxd5 z&^IwbxkmE%^Vk>5{WO>*!a@<Vwa&EHhDc=IWT9RX#%{lOl|8QCBK`E9Pp&BnD1_=v z+mHc|##_p#_%I_~hmY(%y3BXkc(eLieduWUQ*EHsB^b(Doac}|F#8NeINmXXB&>59 zi#Qs2)hR-qePSyZVXi8#rIIts?Np8Hk@!l!NsE|Q**wj;D*ggqVeXaFxIl$V&Go{- zJ|R@L2mm?anutKgDG5uP;I*5j32t$=Ea{8ZLM-EX&_sbtD2hlZm0%`Av;5}1^66MP zG;a3qDwgTiPN_;+7;Hz-7J&_oKg??)7I;}O7dd2P=)hptid6*bZfBN2vb~H7F(iDI zIYV%PhB@ArDRENGMTlX@m=o}iMcqPs{Mps?UEu=M9vJ;1m|bIC-7Z94OL<(h6d(G- zX}5k)gsWFsF<k#6NqRTC<=1JyZNVY=VHXN|<~B-K*!&$SSi7ts<%R$J;8b7Ecw@|} z81A5%yu}!4{`Mw`oi>B0c`Y^Zj{LH%+_jRt%Hf^7E%;VmcyE5$^N~|MIafH0?8e10 zlY=MaTo4;P&f9WU9CuCnW1letRto)e3Pzv!d<@3NK9iGSJmVFeqqi_w>x*skvFYjY zPYNpI1dAe*bTqv-z>%I-b1zaZ1IjF^G5@3q!9Vz7KZLDyb(vKa7WwA+IY+@vVg@BN zKcs?S9ZF~xmq)qLtj0;<w=1c+_I`A5G$S@xVC4s70XtjB;X@{1Lk`xFOHu_hM1zw2 z@W_I&Hf*PNpL1kc1<B!A)3H&DS*g7*s{No;&~ljzZe#>*MNEj@qjgup`UXuD>Dfll z4-cVuGCF3x<d1#TeE5;0h-|mmiMdHkry}J2!?svAx*~Ex2gQC+FqX?;=WUzbskX%; zu${@_3|EtAd*@|QSBR#&{IO|EE`U4A-j+`LkN0aT`D4E-5bDqHhTlY$3<g6?-sR7F zEkAaMISQPPC{xF2oC=j0{;?pn6_p+-<pD`5xY0L>7Ux=V1GM#*VU*iyAEX+7$=tc& zC`tZDi3qsylXXufIGATXe3YQq5mYxCX)7maqZT^CfTKm2BN1Z1ipWhMBHd$m{7f;+ z{T(i<l)vGmvU$>Mc4GMJF8D+zUeJ76VVCcZ@fEHuK)mHd*vokYTK?2ZO4!x6T}<a@ z*|@@VJ4Z!MG50~GkXxBMg<5*d@3orDLh`$y#)5m%{>@*&D?u)E+L)@Re6oiYKZq`A zhmLPHlSo)aPGFcCwccS2-?t^kNH>3s?{-=DRc4iTCJ95osO1Kxe_D>x=O{$JL(u&L zwlU~<MDJrlr+JDL1L@^-GfPnHeJhj5BBmDvk7ytvvP`C<Io?T&MAZXv@LBUbT9p;H zOi0zG>M@5MO>~{ujc}mmaU5K`s(;hd#=uSQI#K@UzdQG{Ao{sicVZU?d%*<#D$*zS zFMgNrD}pvX9c;~EnOXEsy3>@YJHl0ow52M9Bot4WXE2JkJE5ap?xUS0=NP%RKOB-? z)gs3WrrReI4^h7mi|{DVQ{7sDW&g8CM6##I@#^3dQ$djKE?pGe-S!N5@FhYjW)+93 z$k0h}+(}<bj&{)Rg%%ig@7w}8G9ZW7las~f9n1YQ*afac>xFNX{dZJ)b7v&ivkRI# zW8js2E4{HZQX?nI+u-_R1*Bg&R6LJ~q@oR@jrJ!S{ibn-AzjSOx;6}fx$!>6%HmYX z;uXoFZzW{sTV?;<Bs1H}Vz!mVY%7b|Ru;3ZEN1I0HuuQlMx8}v?hC<_D%mr^Y#vH? znH1AL%Kmd^7+O`pKB&-sJsz0GYK!UI(M6!1b*U?|rh6kvY7-i_Pb41J>!{XM4&*5B z<ksLmY*yxTbS*9?CHQ$xN`cGA#rGUv>+$PhPb~B?OCPD3Xp3Yz3&pfFS4|dV?Jjgp zd#R!zJnT4TjhrNWsbO%Xclo=jqp;;R)j_XA7m9C?ok8M?3=fATlZQucGGMCm5jwLa z<_(i6Cd(`rZPEU8$RCBCXe332)f_GBxur8<PSYcV$SC0#!cMLK((9XbyfA`%(CdT0 ztdP`^KGR;8*?u_n8FPV^IZ1byybBF0p|wXyi2J*JBH<;lCetgEN2TvD7aSf*+f_1) zkMKdq$nE-IW73TVOC-u1+V#EbgZakvXc@b)$JG@8DouELc@7<0E8AjW{`EjsDj;-C zfTel_+9&28RtZGr&hO<p2(g?Sz7bpYvKkhx1iSh?=1Vz;#1#K<VUgLm=?LB>_Wb#f z%C?SfPq7e)CNErIeHh*K;V`<e_M*(#uJ5|olK-Qufh+SP>5RMi%A<?R+U0jb*Z4(F zDw~5B)2hw(;^lRhFk<vxyo?Rc@r0i-f7`0l@?5lql>hzvKTd)5ayuKpr)>DT4LfWY zlWKiG#)jE8^xLq+hK3E7*zgB7yxoTP+3;~2?zG|CHvHIz2W>c5^e6b8WWzIT_+1+= zvf*kQuCd``Hr#2$w{7^54fokFX0Vlhq7Bn+c#;h#+wdG4&a+{q4Ffi8wBgM*Tx-Mo zZ1|)N|71fYqdLEI8;-Z3--h#TxX6ar*>H^wAF$yz8@Ac-&o(@0!(`dt<Ckf}i8egP zhTpYejSZLD@Om4rwc&j>eB6f5+3;N(erCg%3@g868y;)Ji8j2@hE+CPWW!Z9)X4sg zKUK%b{;N_`W?QiM5(}=s)PlXEn)g`#1w)VgJsQ5Uw7RCE+-=mkFRd`#6^p73cUfI| zg}bu8Zh<>cUsqPq&@dKNsP1rO^%bQ?MbB^U;~EtI^>2Dzu%_HyTPJB%l*t#{zqD37 zE30eE-9?Lys=8VoAZV1%uc;uIXj{o|^r(RTI+p0xyY^Pot@w3;idr4|l!mhU>VPpe zu-N`ySDy#+MHa?NEl>@rOx3A+Rl&cps$A9ZPpL7gRt2>iwFh~x4c63HPW|3TsXnZI zvN#^wNA-zGj?2r-i<jSN*{VoKaOV`w>+4kC$<Cfz#Ngw0i`=4|B~>N-lv)&6#Lr0x zv{0N*fRlgns(;Bj4qcBA*w7IZ8yDZFud`o5|HPyLuH=+~gHqE54@u8BX6UftBSyMM z9XmSnxZ_V4bK*%^C!aF*)a-HNCrmu;^zY<Mnw&dj>KSKxywj%p^3FQjpMTDbg2I{S z7M(Y1b}_qF^Dg-A_b$BX;!8?O=a-dNR9;$Dec9zT3u@~ESJXEc!G%{YT71>jORibE zOmD9XV)emVqk2JwyQ03nuHLOwl3gLi1?SG5ZTV`i+4(ci?(wR8=N5YNXLkF{Iz4;B z#H0jot-CZ3sHrY1HL9uVs?rAcf>PM36o130SP(FT<!b6mVZEvf_jGqO|C;Lg^`-TT z-PN^ab@lZXWk${7u?a;r6{QUoFlMb$T1HG_^ho`L26sa+5U8u?OGW7dcO?Z_P*-0; z8aNkd48}&wBlt~7N;t*s?M5R=+J&?83wm(AQB~dGE^TP2STMh4vAaB2UtN2tyOyLD z3K|roy0+S=F0HA)N++LCEaBm8DR2cb-SdN&^6p+-7p(7z>sWWb;U?&Ux(35tQ+;^_ zsY`L{D;k0|hP$rPT~=CCBbh-d!ReH;x&;B<M8}+3R#ShXyE0f?rfI5MXlXZ6wGBpn zu*{(F{MR3SH8q8$)wR0pQtt6mZrwC%>w=e7xf=qdWwdmH*VK{iAq4A5uW`NT)m8Qi ztMX<QTl6-nK)SBBtYYl9r$^6xvL&DCq$W6aXHqU<z<+#>d=J*@9s};_4&kn<C=FOC zNx1L)jdEUD-6Nu|yY6_WA2nWsQT{jLohI=DK{#$<b-fWRt?8~LsZE`M;6=MQ3jHss ztCg<zRG3G4VBINp;WciO#Op4%?gMEH4RusmdBwu&vI;A#v}5uaXVa--QGoVC=PuOg zZlMy&3a9B5BxgI^0$8xxsG@%_7mm2RXB<iQ==8B8m6sZ&-Kgk%k}Ou}(Oh+BP+xIH zu%bbb6Yig7cRp0AQBl93nuZ253J*v#2-XH0gs4}R{x^07lqXx$^@#1EqL!Mht6fl0 zYuM$H@S3hi3}0G*X;1<;bd_Gh>-JVjCuc~54%AiG8eKh=BqQBlh30Oi)YWD6bq#fu zhWq?#UE1kcSzUA~usTH{Xaa3v?AWnt3S;x7_4IbNrS#gt+RJO}uB<(SdbLTJC;j-S zgaige2{zfSYeP2KRIALTqCa*cTjQcHK$K?=d2iu8I(A90AM|?XtjHnXukZEFG5SNk zv&4DG`;U9Q_i1dru5o!I190qhjn`e<m>M6?2)ts&3J}lEZY*kCshn!e2{}b`8yR02 zgo}z+f|h$s<H|;2DTd*ysw$_m@1j89%0S?-@s}X~U;o^y_rEd7MApCFUyk(dM>6_b z|C-d{{|*hmTy_6*sBibLXA0M<?td|CPk)<#(fIEFuj}3_{Nc4)^*_x4j^$nd9N+R6 ztwDj;I=cVGIKJJ#X#B%V|DW~wdo4h6O66ZPM|taZC#!E+U^`gv@ZYYq-Jz0Ix7%_# ztcj}K5*n9Z8){l{-S<~EuL`ej`N0pb|IrOUzVW7;e{#!DZ@umIpWSiinxC)z#kybq z>euV<y8E7ce{<jc5B$e(AAIQH4UcSm^s(PP{=}2NZ{4(c%TrsoZQt?qGtWNv{LWpw zUwHAQmtT4HwLO1${f#%@di$NWKfe3k`yc%2L$m#($j6`j`O}WSeD>GR_wL(&;EON6 z`uZDmV*k+z(9tJ2-)aK%uP*<;I{$x|{(o-*di3vl0{X8mzu!N3!Gg&R(Pau%&hKP* zAwRb`7W30BrLgeS^72!ym!d*8F?r<Yt0-fRSW$1iDK)ch;UVwmG9#1Evnv8jd#!-p z;HAL^)Mw8L*675~K?axj-avh|tWgw})|XY;37%Ckzdp!>*nU;#l-BB3@|C<4=}X#* zG$lQrTH-I3v?Luxe2JrGmm0zPaz5}otG?QHDOFq*tZ(RgQ)+HSd2K}xk7C4h`CM36 zt3%BW+OX7+bR@pSQG}B)itifLvn!%&F>{#~*IhZ=(335N|D1-3`g7-B#@r;odxGw@ z3&{6^(gwrJ9Cu+wQC%Pyus+~#`B}-SLe`~9FRhqXx5$b)XLjDK3FF853JR?7-~l>d z1#;jBs!)JW&;pV`83+WOAQx1Fc+e11LQx?szv<`BJa<lUrW(uqTi&DVQDf)pWbj{5 zuKh2Rzg%OrnAyyNS#@=i$+!49MkJ~cMt?P;JVA{p?x#jfbgB{Kk7-NaJ-9VvWV}k6 zc)dz;tX6#}|9bQ_ixAQsN#Z{e|6$tSk)EK^iJwmVbmFIvPu)GRH90Vf{5#T=dY$d) zDO|-X@8Z6X?VU0Doy1=Dv*?|FsQ<7&Y8d{h_&YJEdq^B-jB*ywIwai;cONwXEu_93 z@olkzm~6o_n+@%hVex9%{PfnrfwYp;Y^7Fbi8`TDOEORyI0hO0j~0O(83`(5qDy7W zO6wTZma^N`niNPZ>0jjN6Qlan$7DNFV^r#Ile6{vc-~!c$~Cc%a*gjFNEw!(hLyY2 zu!#fIu=@0l!EILAqj|k|f>IxkVL8sut6xH#N|@MBCCus*h=zIOB<c;^ZY7LBN1Q{& zO#`|UmAgDexr>vPoAllF!#b>*NewuX`>152FXxVd;}csQ=*9FKAD`_=hyLX}#eJ!Z zK2jHfj1&8-Ars44^8T($?ikRPxI3ZM8R%Qmr^u?)9nh+uJ4v~p%1~}2ojiw--(cl- z3{)8%L)y}Ichjz9vQjlXLPzIRV82+^&+)j5fxeoKMn9E7{u$(-LH-%z(^?$~F)Cqv zpX?ODxx61ZJ5}<m#MWr}XHeEHJR58prAU1|m8de{%MAD`S}zhFR8?OeeG|_vJN(Y+ zN?pc#r~U3obE-6hr@XI91BbNnDXorFr%DB{RPaj0FLiu!Am#9IyQ4UrdzMl^<Vk<m z<`G?QPF-(SS_!1pkF-d0R&v1Mf*;EJ!xst4Ro_40NQ_a5jue%V*;frLe@G3S_@El- zctG_JSTqkXk4({N_7&Q6@xqhz=R;;HHPOyDV<fbih}>4+U2DSMIiO|H2^tyD2)br~ z3$*Gg!zr_r`j97@R*LX5{2MLfBj+piJWrvWmxWKCE_{U6tL7?o6Hlcb=5E|C@LU&- zGbm0Cn%Gwj8t>9&kT_#6Q0hXSXq+o>ujh%zv1pa7T*WTs`Yp5?;#5Pxe@HQqw1$iy z6wr0}a)0VEfjXovXQj01^7bt2__Ve`yHmRO=rMLvuP#yQP8&D7y%zPe+f%gMAC@Y0 z%zP&NgcI2N`y~9P@;E4qz?2~g;Fk<;E;XcnP)ACeYj;v>|E@Y~W7KS@RO*lK5`mvi zk9g7iKIdEPrI>x>yFkbAL^T}V9u990hlhq!zTx9D+J@|=t@PxhS<pt>f{{f1(jJPb zYxpapo^Vcwa!w<yC||-ulDDI8jOy#S&FVwI!7;E8yqBy7{&qkhsU)$;O1~d`>QpY$ zPtkoD@3^D*?hg`gp;9B?lN6Q8I2BwcUJ*OoQ5k!r{=+>K8VyZQL(2!Kp%atT&{;z| zteUZSLg;w%Ql&29nQ5n)lF~<|OiWZMvxJffCDFXkT*i(#&v)!_R{0WD!VP@_);N=_ z(&3wQ`or`atiCqml%%|oMk@IaqK*ctLDL8PHlf4W)@OHIYfO>V-p~hAR@qZ1JG}Q| z|3JpLq|-(l$!aA1_fXOsGGSo-fR4nrgx${8Xx}L9%!&uE5=QgufEYDke1bI|%!<kW zdu4z1W_aQ!-DP(SPEdm>!(h@ITtBcadG~<U#6bTNtL`4Q`6C7XNQOUL(0+g#euK>) zy1uP8nxflH5@k+QLuN@!=%#n<os6+OQ95R@j~utzq6H+e_+y}5Hu}V_@l5x<^d$y; z3H_(thwqNo&*ke-Y~!hj)}szTfbj4rc)*)_43+RP<kRv?r5@y2YKNbQ`-5L8b%*_~ z@q$mKPh*%=87K75%b1=@&zaQGzpdZyzOC_rxRTiHXgvy(>+$hgp!8?6Vv4MOoPL5n z#O^D)`h>sStJEKUqtqik`KdTXCA<hfrOKGVycim%LSx2ws~;~;gdX(e_3%h$!fAsi zq-^eujo_<!N@O4SDScLIM|Vvo6ge`W;o3vxiG=LG-%b*@DRl-<w4FFcC8$voGt{Wh zj_F8m8@xNUbzmT+BsnUZ6s4rbs?@c~0ar<PfAi^1rH1WNYIn5ENA7Pry8D~%`gg>~ zsQ8Jjh7Iedh9TeeC_zzw@Xr{{xYxUOiY%FHk<^XuzmlLIG`xZSOVb$I7AHaDM3s6& zav(iLdIak?Q}&%ZqHl-8f9pk9wEDMRghhvcwO+(*$JrIN74>WkO}BQwrW^G&c?;Qd zK`otchV1@NXJ@uc1E4-`ZfUh~R$cvUc3)~LtQjZ!8`HJ^f*s7O)I+heD~PGL(<D)U zX>EB8GxoibYGGY@u%_ZHHehG6&qC-oR9-E6RMYF({$+D-HnUhZxRv^IOhHBI!ivNE zzwA!MN*EdL)VSF-70lU>jUfj?#9Lm@1~6+7eH=ZN7_N}G)9V&20HcEHTC%?*c9u~y zr}j#w)Om~4=YqMFDry%(i8Ca{*+#kLNe?V32=>K`0~KnD^|h2e%79G0y{eV<i<$~( z+N(IZamCSnxGs9$qp=CHDPJ3%+N*-NIki=qUf@&45(l&(I|zg(M;zE4_4DqS{03hI zyX2Qv)E7~BsmME}bmv=Js8%7Bx<&j7>gp~J2F|i~zNr9N5BZUNnO+)TT|;<+ol`@7 zC^*Xcf!_X7>Q^y-_CC+5uRu~<tKHrjb~e>Tx-3OP1XV0<@AM+2QiVR}<`s(jb?`f% z{rz&yQ>-+o*Qj~f`Y)1wJPP=zto`(O_c+d~X&?b&u@>T$Hwa+8ohfe`jRR6=Jutk# z2UUyp)@yz_^(f&jRMl;9bEzH8gQ_E@fIUNdI}mPsEG9pyhtRtYy|v}D1J$(_V-z?f z^Stg|&Dn-%G&FeCCdvQs532AeG3Kh3adWH7E2dYK))&_m%8v20#YTnNa^!U2_PaIR zDRqz49;Mc4U#l%L`;I*?SW&;YsG?qLY@kA*@rKHmNu3l|mtAgi_`N;oWwRy(o2@xp zFToU}#o}$yJdaD=rSq9pVG(nMj%~MfYWXKU-f8M^$#f_mY^aj>(}I<i74@{rwwQwH zg{1+DW>7sNwyWI5bx~rdcYB7S+#aj737w_&5pVjTK7?tP{0p@5h1DR{$HE_ydz8)8 zJr@0{uL3)tnqE`aP+>Rk>n+Z(`!27#tw(9j4H|)<A)I{cA))4~1ZkH&`iQIS9#Jy& zs@aMTCs0~n(N)^>5A^}-w*<!?Jac|&eYGfMc-4%&Su^trScfaGVIi|Bb{47xk}mDZ zic@}WrS*Qi(88`jX`@O#E7)r!4489%5Iq`b_Rs#c<yrbz(R`xshwPFhN538&ip=de z`sc&GNO*bv{rfis{!M}ZIt9kBedm;)GUt8%BKM1xSYRnQ(b9MAYKxy+?;U@&AV+TW zuhG_T{IBPH<d~B0V4i6Ej<wx!z;vE?o+O?=JYpaK4N`5<)oDZVOXLys<XeB9=r>7M z;tF)}NFLHPiC+p2%L@7t|4}^RkGT&W&TGF<x8E5UbR3o`b-39!q<h!tvuvpIrW@Da z7XaNnbkvF?=jhd1_)9qipGF?RdASX*1xi^$Jo3GXNAN)(NQt`b9rpXrfr9Tk9x3au zc_iE;JW?j6)cX5tK>3~yQG`D72wkE-N7P}%-tWCWAJ$j@qv8Lv@&B{<{Abhe9lrN_ z@BIJ${?DL5@=<?QZtkQ0{u$W(&!>5G<qQj#qbmpe&*S>f%JHZyU`v%pWdZj;3!{H& zy8qi*VvIFkaKyyv;b$EKe95(ouN`F*^;hp$j-UV1g3Ir0`&wL{rHvY{C;X;gy#5Qf z_4%;B%MV&!9veRVEyH{5@EZufYwi1Mk5M12HP>QEqSvo0{iQ$GG0sCEIq&t0Uw5lZ zUcc=1@x4Mbp1-u`?Y1wJ8n@Jn`T0Rhj^dbcrv#qfE5`rSIO93x(0N-gG}OQPyU^ip z(V}Slk@4^N+M;ix!~Py?!QI&wEV9cTO*{IoY`zrXwkIt_wvyjGOgu@PsLV9Reis={ zeh0p=zDLF468qimq|_MuU1T!(9XMcx7nxIjyY2Tu)~i}$zl+Q(zbgAZ!+KR7`yF)< z{d3yyY-#G>?)_H!B5TTTz5PDIdQ~g!ceaD{&uzcE?RRsZ6@Qfd-m%wuKh}OPvfpLz zM1CIoorOjH%eLRIvfthIyKcnzrQ7dOVms~koLjAY{<|Q}S<eI30HtoC^?_6WqWtoi z-7bsbEj}r*q2Go+8+vRw#fCXH%(mee8@g?nY(r(k&*QB0O&h*%!!{efX~R7>eA$M( zZTOrGci8YL8@Af;aT{*5;R7~YW5XM5xY~x%^qcJWB{no{SY^W!8y4BnW5XO9PPE|| z8z$RO*{~lIxM-Ub!bjWVSgRVk{(9_oT{F$1(?1HA*}rIiAvj2$QCx&SqHSD|Xk>yW z-#Y$c^#et-i^coD{44VPWAWQ;dblT8^yu9`^?sLeMSf8zZfWzmJm2M!_WBc^hk0J+ z`74iXYi9Gz<XIqv=NFBK%9N71?3Fw>^E|}!63=Hm$%H+Xr;tai2mfFA{XOmSm|nkF z`xh;HP9LkDvTZoVhHe}7<h5v=|J9HV^+TRTeH^L-cmV_2jkrsI_b`}={{z66c@ok6 zX#+aZt-KfiWZ)+}k4s!&RNu0v-lXVURxk)A_H}6ZFz(L@FYpPT_i+n+gXd-3Ch#H# z#bUy9=3AY^fVd7f=eSh^kKkYcU$XsQ2BI#Y!^8o<%Ohbf1cq#P6L2e!q~l}2{56lb zMVDeLkA&X={FJ8%16Uovn;0mu_NHzD9zR;C9W<5_V82W&ZX$3M&y9px4Lt5RrEbT4 z0C?Q-R+ursQrle)yvlap2;9zdFX49p9VeiJG5|dp;DfgNA>bJ-6m2BTBH%kbf^!@2 zO4j>K@dvKr5&T8(<&;y{!^52obkIp=<BkJP;_={~0u1p;I!(Y=c>MV90iKWb-I9I| zH4iwIPUAxSJ-}1YwQR(l4Xor5`UHSCodIt6-vS(dCS@UR6>uew;3IIo?H2fF9?7=@ zc%jG2OW->^PZ7QiSmCwYRlp7&%~!xvrYZHN-~epnd0)Z<FPIL0QZE+*f59W^uLIuV z0|)R~2OOKHQ~~a6;DbC;#^-<!orTRE+yW2q2>k{A`fR1v;J+St&~KGX<)h!n(<=VJ z$9aSf0{hHhEX3alyp>1Nza6-&P^mq*8-Y`1!t=NVKF1?GBXIh8$WdII<O5>YKuyFg zu$)I|DDZ8DA1R~zeCnM?%D4#l2~RoU6X!BF;gRqYfq&wWtC&n+%{;4I0<Y(hxB|B_ zAZ#Se4q*OwE&l@GobRDCjQ>2~2Nx>!wWI?~x`eT!KkXejn@94({(`!hN7B3n__GqF zG6}N=_y~`L*$C|55!z~4YPrV%FSgxnz)|zz3F2k~&*oWz+Yc<~k#wqnr<Yr_EeF0* zNn0aK2k^K{p(*Zc;CvpzryTf89*K*62-Rx41%6s()oBOt_m@##;<f@eTu#46oo)nP zwt#xUT?5?6lP&(h%WKhp#oY*8$K%KSK5%xO#Sg{6pYllk-VS`vcDEvv?5<}HLU@7i z^9cWZ3|!k_)$cmsm4@YJBVP>+GG`!6D)47K!jo%&gBKD8|8(HOYoG(}MZmk3Qcm3W z0)M{@y5nvIUe!ohl4$S1tPpjC`($ACN_Y-;4KSt|TH}rb)`n>pxC6j1cy7n-`yuV< zN6-y-HgFM-v`2wSH(373z@PFwM3~!wSNzy=8^8~2_sW~-D{i)Uzzv-H6WS8t=K=5G zk-EDVxaOzS3;qH-c!X90Pruc2`+y(t#KBi4@Uov#*SKqdxARDNf%ERL@)8)hllDaz zfxqUFyw(FBUjtv^FYuJLv{~Ak2ly$EwB-)q?Z2SRgc0aoXQeN28_!DoJAjG5hF5S4 zyoBcf?h@b!cfnUK+V$PYS@&4!7Xk0#5j^h&e#mn&VNBrYdo8}r1a9S#w!Z`T)o-XT z!h8*^xgXxZE%53Gs4v`2z=i(-KDZYFXKkP##9a)0i%06Q4Y>Ca%Y6X2{&(O^7=c3` zxA-j`IN%9uyz>En!XtRz0vxgxJ|=uRaMd=(Al$2gt9HU;;JF&Oco%I1_Yz>rZi@#} zfj7NqkEg)wmuc^W5x9*<ml1gLE8vg+Ex^}!B;P&2U+kg(!hapG@h$MiEin0QD}90A z@W>eLe21O%HjB>5f25z`2}oT4<t)TRa26<M9*SxE0yo%hfxB$Cz`eFx^!r60U&0F% zy>@X66diVP3lzO`aSL2#yRQS@X}bkJXuDg1qPH#K1&WTg;3iP?pT%FG=+TP5K+(+< nw?NT@6}Ldqah31_e`34u06t>71&U6lgcmsMed+*O$?yLG6?YM| literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/setuptools/cli.exe b/env/lib/python3.6/site-packages/setuptools/cli.exe new file mode 100644 index 0000000000000000000000000000000000000000..b1487b7819e7286577a043c7726fbe0ca1543083 GIT binary patch literal 65536 zcmeFae|%KMxj%k3yGc&ShO@v10t8qfC>m5WpovRhA=wa=z=p_%6%z1@blsvwI0vv2 zNIY4alVK~j)mwY3trY!Sy|tffZ$+^cObBMdpZutbN^PuECoa`kXb2K>zVBzw<_Fq) zU-$d^{_*|%@qt&)nVIv<%rnnC&oeX6JTqHy>n_PINs<G9rYTAL@TPx0@%--}9r!$a z((i^#&t<$Zd7o|Z8<TGd-?_=NVdM9{v+=gOJh$I=_ub!9J^yrvXQOtv=gzx5rAw<k zcYSZ|9am>%4a-Xw9jfY!Ot@}WQUBkK=MqH|Mf{(O%J6=?F0E)R-u5-_q9XB5EmFjL zRMB1HZ7a&fd)b}0hpCKjVjS>G(qfxk>Uow`_J8Y;?6yo>h9td;lqFW`r_=Cu;je?@ zJ}aCeNvRaYzy7!6vsuJK8t7Ip04X137Vm)<B}y|cNYZo>`v3N5I`@q}=|CK){8#_3 zR`1xV;$zJbJP0ppD|Paae;!F%bM?lxx2d-wfQV@O6ujTW-;jSkRCTolCLPMh2Nx=) zGP{NVA?TB&mP=FqZ|whc3RJSvJUJGyHOs!nBie<k<-z=e)r`kVud+vM0lsONB<Y9b z0<+))qcqReE=`GTutop6y*iN=`x&*3EzZknc4W?3rP&uIJaeXK<D%wvS9N4nkT;0D zPW$-+vpsE9St6ytWVaCXsHU`%GVdR^wE=Xv01fto0vp%r_OvPOWj3j{W@V_Y;fxbp zySskme5v4&(U>PA7G%%m<=|b-UJ~!-boN$bi#jT{Hcy&A=Niq?KHpr`Y-?=MzKk{I zIl-)f*v>o`q`5M7OP+gKtTfLZsOCS(qPDr~x8=!_5`6-VLD0EMY5XaI$Uqq@V-Jap zR-V}6Ja=V~*CHdz@F4Rb<?;{KZ*yd>ij_JtwPEG;g{#zT!Uq*Py$3gDv`Z2tYF|X8 zYEi!^3#I2mi!9?8K!AuX>_C;=ltI=m5eE7*@I4UZ&p}=3ho&bc^h3P|C;`K|s)PJt z@!8GLOb})@Yp*SMou>fLhC@WZw%7ar>1Sm0aW&hPm&@Wqv5z<cJW4gM&zmkfJJ+a@ zj6&r=dVrlbR^{dLe--p{MqAX8%7LY}g_XQXq&T82+UL#6!luP}xs6BE?<fb3E#r6f ze^S%+ZFw$9UEExnmrHC?k~jf28Qa}v(?%Aw6cJb9i=;f%LL7GNV)O&mRYm+WAK2)J zoc6N?AE0A$CG}^`sG(_iS>i_&0GwOEjRhPMrYB*+WA64e$@ELiFO?ay?gvgcC<n$Y z<L^1CK%h$vSZG@q;PL(x?eqG1V1nyS(*z5;SA+M!_HB5xgCaCQzioLANgKIa^30b| zP)0-wnAuW?PuhpB1D*9VD+*d7r2(|XN$tU(8-F?I^V~ojiGY&$x^&Sr^ySP^J_*UW zrARijT__0kuL5&8h*xu#MI`axM$bS5AWndQ;JM+aKJrO?BE}`X#TVcgz$PT9E&8Dq zZ6JXIg6WKy%Zx0-)XbKtWRx0n<OM3tY=>1!dbl2?B=#{!9_2$Llg!~3%n@58CG`RW z1LPlkk=p2eFSa3N`&F?g@~A1mHitQyVq0yNK4^CN8joui^5gTpuf^0f+qMtEYVL?F z$fu`~#PaZA)VQ4Amx;XbZ%EJqQT~UlXZwx7HHW!>vn=MgCVU7v0(=qWSe%!~9KS(N zgLM=3LHzO$mU+*{wx!#)wXd#auhgvU=lF&*IVnT+hZ`~0nCHPOETKA3I;S!sQ8$^{ zZcv4UbEsTEpxvZ3yazYCQD1%G)vA+(ndH~oy5$RmDNA{h9?j)8QlvdBd-|V!63d!_ zr{P-1vS(7D+|itM9Rk61MnI<ijY!Ly%7^jv=YUlg`cLmOwOJ@HClJm79G^?wO8q+) z2vf7m?6nYbY6S#*GNiuY5H+x^+G@?tJP#TL9re>+K~KhBa?C)KKh+E*p-K?e54p;H z-uNb0vkbWyR)1lbnp%G$OG`vjpo}PU*o}&pp;`PEODluTuiNcFBFmELneD_AsyG+G zkGm*r)oMJHmxrXL#=Plxfj%;6&nXBm<I#%{teK#)2aU^vKFj+G2|d8ZfX<DYT4pfZ zfo|^HD@jrnxXrnoJ(D*BEsHtwkuBFp`spvA2GpIQLK~G_Fij)vWt2{I(c2x~KW)!t zCOE{y+%GQUQ^og%kazlaaoZ=NV(uK8O?>)d`#6i)km>UtDzrb-*V{hPU&@;WB&3=+ zxL1-^s(vuM%+x$5wc!b>TMmX_2j=|8Kt*)b-4;r#_ff_ny|oEKpX@DE=!THWD9l;8 zEWjV=HO&BTAtLP*tp;IMlM0_Vn8(sUqI$?Nv_U1G^tEZC@of=jxa%BH_{Ai!MYo}y zE@)vjviC#f;TCVZ=HXtX$EDFgCrJNz+eAX#tsgc!-#{X?u;vu7>K}|6xr+Y+O$ixV zZ+D5)r){a?S581&?=jW!dQYD^njLNZDwQ49Kbq9~QJUTP@Z(p`mlCNjK7uj2dw$*y z?Fs@NOQ3Fcxb;G+-Z81QBhBuJS%CWlpf9gp&E>m+$xzI$NMcrT+APveYg4QEVhkj# zC+2qrf~MxI;{Q2Zk_`Xps%rkG7-Dkc{@y;QZ4Oz0#y`#fgd*BZP3DWK6>a+@*L<mM zcZ+wv6pXlQp*qv|N$8nGnzy|!owe_wFT`9w_5eJz=cRm7?ApYLBWTQ~Z~Xh0d`OLq zTT$CqaQsCoH<7xV;0<Sr-s;g0IvOs}L}lA&k-l0$xByYj4z~8BGDno!&c4z=oz(hi z8grx*iDYlPN`q&LaV@ehXt=Ne8MeK-x}c@DjsM$J%twl6LU~JSD&H^}!^3Q<i@!_g zv@vrzI}>D@EZXPo+Bl`5Zw>0+GLF5OFNogis^p(SM>i~SO7+N+7^b&-f@XG3hYwRL zs{rPg^&WTKXuZW1;J*Vf^E(^LEqH+VoqCH0;~Qle%pqFtZQVGjSX7wPu*PZbFwOi{ zG*lGy6QCZdX|wX?4#`^~>lfT8wQf{0k4{L2{|oR+{f=JfFn@0V9WOeR5QLU=M!U6~ zB7d(sir<zi(J(xWuRwrR^cpgzK1ceMKSTyn=7h94qQ})c3tBJ-kufbC-S8FZ{*A-+ z;wE$p2;6zcG#Z^Q=wCTDUVHvM{Uf{T%s<wYuE%Y9r%meyA9u+1R(iScdR70ky|pt% zO*{K56g<p=`;6dF!Rj_V9Z4Kex3fBWL}~ny1nH|{??HFC&$rtV!@%g$GEs~YjUt-3 zyg5y8xAoVl=3`2GjRmRwg}nzj?Kb^myE<wR3=lWy37hs;ROnh+ySnXsoC;P)_ZOlx zK7zQFs(oe^qFNu3t$Ssyg|9J2k2}y#^%uW0`}(%CH2YD#%Pcs^MniW#E!k`h>Z!)# z>Ws#2b>jJh;6zDv(pxgML&lgyPQ#zcbb!!sgpiDoqu{tG6%!Ja>nvz7KufAa>qaA# z=oV|HC9oE}Y-%~C<~B7KIy+)gcYDw!`k|a8<5gBx6?_n^Hfnl`YGk#JRXDw`Y3W5Z zF72K~Dqd=&sK!kRIocXZ$WcQ@HMx}F(UwwzM=dX^$<yW*)lApsLU0ONe1#L$wDK}< z+m`P7xi@OFy|1a`^g5Sax&QBIL?i`BM9fM)?J~l{Rc2^%VhrUz829&peWXrWCnHlz z(^x9cG-`TL;&SCcT7aJf@*!}hy(}@hIc?50YSx@pYQ~(aH5qypGnehQvcielAG{aU zX~0_@&*J%hxyYZhxenZpYC#MBj39u^sFM>J%<uNLp{5+>??vDyuV3EiM+4QdBA;io zzdv6tSFL<#t<s2TfRwNG7HQKrPlW>QrIPdbG7F+JhObn}j(kln(mY$%K{!!5k#)1E ziz+3WTCrR!=CNXVR%|-O_{kh9N!CV3M%Px+KVv3eg)|H^tUYmMQB9Bbm&lY5<g+!A z3q(W{bNLa7G-%8GR2a%BXjxsm@<>uSRpgw1Z~T#cB&t&nSAs!Ug_}|kVHMz$WCS?l zqwD<1@hy6X9b^#7A}+?pyqY#|7U^Uy<!oE$R#G6OIHC7~?928tC#m||`Rwb!vt=?X zUvCU&<zZuqgAMm)Z5TgaQb)3^o#QYflyA_|`O&KZm&VE*-qc-V@o_Xmrh)G=FTI?~ zaUiwZw;@Gy>*X6#P>C%ujL9h3=b(@6wKWGF78?2)w89yy=;G^09Q<ASzGu)Qw(X;0 z{;ohoCMo#dETWJz;bQfN@r_l;$_tKiy+f|A>y^}WR?(y1w&Cj}$@F5L2YsfEL<3pY z8Z-dF^8sAbhP4Aqi=v(obhDs>e#QftDyng66L`)T%)98HH5&8BF<Y>v2#E?5hTb_9 zH2mD~chFE=MQHmw0&)Lo6u2YqKeGV1@zG*g<1#Bwv#zb_%-_+JlMrxKd<~ir3Ze1+ zy(_eP6{~SYKhV+(S~~v~1yt)79UHaSeZ5h0^WBheRNU;+TO4|;1L|kljg`GxMRVY5 zgy-B?`L%XKbD$65%Wkaf(<V0uOoUxGf)z4#f3Kscu6N_X#60DBpQ${*$V`+W)Q3=C zVh%!IBlLCRI)r)=>P<|yYD*~1E|lWFafIgb%{TqMMK!$}&wwd`weq~AJfD%@n)sU_ zUiHfyy0+TP&cgr)(wf;G1RCO$+F-8vOp><HO7p|jNn-Q6t|xsd^WT9I=Ikc$B){h> zOt(p4nn%&aNx*RFpHZMF4f(Ufvk=7?JRPMYo=R06O@dN!hp9(J{WAdZdPL@b!%!G% zLqHJ$fo+g=B{EqW3P?d+m=J67#;*QZ08JwbS`rFm!NrD0j{xSFfN^d-(+{H;KZnVO zq>c^Kn`akV>TQ^)nUX?$=?!SjnvZ-^xEv3@Td*3+ToB$GLi`Q1f1eLu;*Pvh0=OLj zdhtFgHl&UZQ-JSB8KgFySnsCLa+gvITEM<JVb|Z0=_NNbv&@H6(`bHB@Igt@ghI@c zl*U&;NMph*gq!`YU((D;uXAEi{}>T?_A^wxGy~aKk5P9rYN}h!*-ueoBA*hw4DFOr zciPZ8^v@j#d(UsI=5c%~N>l%e$W7+;ycJQ_!+(R9k!HS|Ec90*HCfot5kX%T)t%N- zi~Jqxa4NIzB;-ca!0JvWei7b)=I>ieG+2$PYbd;x;wr_LQoMggi&;CG;F7fIhG-(% zJ!c$nrEc$qdPCdkvnu1mRQk}y|2ztlU(w@aFd)D-lsL#-NVQSwulrLY!m_|0v*K-t zB7y%f8D%CG3s<7iT|s_@7ZVu%+>P|Sc?3OwD#DH8xgHD=<f-VsApaaa9sX=8nv;#Z z`k}l%#O<|7rBhsro=L%+c2xoT1-LwYZBh#O<!BUXr-(Z|lREpYkzkpMTP0~-Q7W02 zwZh$V@M_pc5wh%Sm%o^4qt8t_^m(klPsMxqW>>+Hq9%@@@^GtBaXR79?>LQ?^WZ#C z2`ni`a{1lFpInCsiUb$05edblZ^2mnBP=hXEp>8aJojRG7BaJEcKD<{j}yzhTP#U? z=Aa#XBtim8=Gg?r4Uj`5WN-&1pw{2h8%&)Z;9p{i7uubJoO^Qd2$-{7c$u@ERF>y& zqN~6wdfjPB!z|)D^aBs!k+_=q&oG%~7!{|m@ca2}v;&KPJ2>;78Umj~@P&9JSqLha zzlFYP<2&bKzVZaVB-Mc?2YHnu!LA|`O$fbh{3s#N;_-HA4$=p_MZ|rGufc4|OmzUu z^JPvljA~1&s$+Aa<w()zNx!G<0L@dyGr)f#BOMeS6)ST`QZT9-X)BDf9E^O4EH=;B zE*o==+8m?Sfptj=P=j*yt%Pm3WkA!^$&z|GbdnQQQMu~aAXl=XRo6Mq&w=2&97(@S z($~pS2zk2aJAG=JelIfRnTs4-Gueoy6w{_W-;!`D2U;p&H9!}KX!)wyGt%13G>Z>O zBaXr}qS-H-6;8gFl+j!hB|&HG__QCH?uAZY6+qd0>UH`KS<+@;OtPgV@|*2uh0NaK zb;wtOjM^yvHpr<LUa2YUt!L-)wNxOQvg7UAl}UBoaAs>tzb)z&!{3Y1&uQu2YF0;6 z-&pJkNPw~TIeP9tMbGFy@$3@M*Ts{I=TY%&5zoVT@~P)d6APo+yaISwqj*6}fd26l zSTkcVuiyVH03~%8i#~&ZzGlPMWCA!0Gf#IJR{FI;?gP_@en$)RA<KPQ>9elZzErW? z-z!$}DeP6T*8k_BYkgYiUq~IY)=yyvyM1}}O7uIRM!^y9drD&sLd~O$*hyeu#5%<D zB|MuR{sPa&<4WTs;8UXSCjiNK>=0hc&P=2=ADrQtvtr8#<-kGZK>Z2~i+YDr(2b== zcR`DCps{r;k|OD?J&uqOeF)jSt;!F64YPom7yZ+9fQ}L6K;B(=8G8lk_6m~j6~x@z zCDMtQotu#j_2}HA-lTK8dcDqNby|73nvIwet;T0PM(}dy%>!Xa=e&Wit+N2(1_4tK zJ>Ho&@F}G;2jTj!uGD5=No4gi+tKUoGxifUO6&p|zC}*Q`Nt@!^HZd-C<VXUGE6z} zYOGW~YKVB}>-c2srIvNJB1pwv_RV7Hs}lRAC|1y*^It@P6dqcjDCIs;$|7}n{a0bN zwEnC0YEJ!ETa@VSNVnP}A=G&bfqB<!qf3&BkW{O;I*ahh!r#?-)j-(OIT_(*`<&~w z3HA5cW@%$e`m=&S$*g^tLCz@<0M`kCCyB^pUPuD`kpR{zjc?QYPNne;dVddtKfN`j zaX-DcDvf*Ty+UdHHQvTv;)Yn1ge#yte=uO|J&YiKVh)%++R_{)&I_qiSd0WOwwE}M zKLJhMY%j5@ZER5*pMVy>1mb=`bXK5zVw9e>%7YwwQE9vvGOqVjDG&Y)-L5pEZIaIC zt1d9l3jE3C<x2EN7|!Ysdg9Sts0z6xi~B92`HDn$#vVI|kHS`EJa!sEBl<X=N~|0e z#G}+#WRvWC64CQfBGXLJSBXA?#3B7;AUgP28#eff33<>jm|E(KL}PG`1?WOK18iyR zr@EEK-#D<=?b9-MKLq7qL@AMpXFN*8q(*e^0F2H-_4k1j+Inw(tI~Km%BD8|oIZZL z3U#LP!ouD_m~3*fC^b0{i;`Lh@J}(6VsVI}X;M5&;!2eyMl~<&Z4!WS0Y`~eMhmOX z*{Fz-wZUowjBH+3?(n{;&a#?E?5n&i88K>u>i%i|!DBr`8qsAZj-fVnlD&ENu7UOj zcr8tPJKsdI-m^h@@FMC~8b8KU@3}+S`I1Qgj`G7<7-#jKJJoyip1alQde8Ti=;Qd- zEqbZmLK{d(>TSv1K-&|`*$o3Y^LH_kih}8`ftlRO=24yNSd>_EospK1t)P)MNSMz5 zMFbXV!)H|iohdPqaK2TlCsdyXsw|yVJM_5R`8Fcji2AR-qupV#6XH@LR3unydzvBM z4f~1F_TbC*c}(zSLwgMXgM4Bpq**9!s9VzD=qH!e1;$?DRCY2k%qp0&7j#pf$VRk@ zJ}vAuqB{{t3Z*G@GUUh<RahMtFhwyjk)sMzr4_lDBo%wm1?Ew<pEzDWl-uxWJxW(S zme6Q9$r7u~*=q@WxCI^x)$b=M|BjXmCLRK`hJZRJi82A?y-FLA>=QH+(oZ~6)oG_G zm7oW8n-SZG)I^@nHz|$JLoI;48x87n8XKNR#<&=^F9+-;eGV0gPPh}0%>uwt*&h7^ zikjIJeH*WM^eCR-1*y{y7<3vkDAAj#<hY}|)uZNEl<988lt+1aVQ<1g!t+y1WES>P zqW!0sNgW>q8t;8)$CzynZ~LYZ=TGX#rStC(HZCa)yTB3evmPy_-~(OswN&RE!Vcqf zp@Gi}J#;B+uy|&hmNr=+9n;P-K_62nm1xV3H2SPw#e|IhbXfof`+6|7-a1piP-HwN z7^H{2zdg+^sM$1pNn(G@e>T6pEQuKCV2I4dULmNrfxpt(oApIA)u1V4mx*V)ZKf|V zchNeer}=!|H??#5LN6WbNlX_CYfykKg_THOR9^_2FTwuZg0(8r_mh$V#aE#VnGn{e zeCl;DfP%p?tggB$k@J+TKa!uwd@4m9VSVvf-3M5SiBUWMu?`fM{}^?u#Rg7oj438} zF(JrR5f9(+cj98FDW)K7zZihT$5@OwgKx%nE3=G6vK4Y@Bde<-Gp$1S)m91meo|RL zn<`b;MO(K26BC3>4jV6|nK2@IAd(jIpM#El1d*~p8E?Q^LTFiSdXY#}J?38eXq6wU zILE&{2PF4XZYiYgP2}og_GW_ZL=T`a(o6hRfQ6D1w{88ns)Va232{Fagx$LRq%S0O zl)0Az+ySZ5pA=~!CT4ui_9ihZH^Qxh#U26>6Z7Hbqn#h2z5ie)Ybiu*0bt+kjg>s@ zjA<Te+x6L%J}EKXCyl?tC*6y`SMYZff1{CJnvdz?E#UyIH1B}!gaNm%H|Bp7#ui@( z%oNtXQp6YWU}CIctPO>{aix*=UiZ)(*qFTw&sY<UCyANuK8K{sX1gzSn6XuE_vK0L zzG=hSeU~9x*zTJ}dxI>C@-?(l4s4*jzOJb5O{H-dahv}rm2DF96vkFyo8F5}t^)$F zZ(9oMi~Bo>vl1%_AO0!k4`R(0WECATr`T9CY<emo<caMP7+pC8BYll5)vw8`??*{r zQwa1doJQE+frH9%)8A24O!>DxmPlhFq~FmY!A0jT?5Z*B+?Z-mztE>vHrpWqH$Nq7 znQ$bS14=<K=P<2<wbKUBCzDz~Nwd$g_PdY~mJ)PknIrr-mL;(=XMopVX(6vP9zl!D zG8t8u=>F3%*>!CDalr@dER`@@Y?!6d@*<PA64UCJIO-D{+shmcuo$LBx>vxe+Ey;C zzAb-8pA`ZV>?nizOJLlY2g_U%w^_#AX+&7PCq<)De2EOb$F4aLln1f;?205wZvaM# zVFVXXgXYER?xJ1UNedWLbhw#43pHVVJOXQCT7oAT1xqP@drH6g1<S->K{s|^C-D8~ zII-`VG_Cp(PnuTk%;)M~Y9hy;0G87Oi^b`fGFXmJv{=-iJc*G;s){U*MNc7w4PZX$ zFG5NYGosTWBeCdAJRx94bOr)R^%*-w;fF~?jmJo-7}k16tTxu|e7FZm>vqP@h}UDJ zMb_<%9ulu7Tg2<vB$|&tC^RDTJ7N`%xTwhn&1g*%jMzDVutmMrtSTNQWXCw9mbgHc zSQk?Rq?y?(K)r~>PMX=bAQTgbqx%Agz--_|=gN^3-U*{nC`=`o*^BWB5aoD5zDc^L zbCPah$}ndW(fDOKfCnSmYs?O0|98q>)A^t1Kmi5fV)^NK<0K|?>Ztkpg{wAx87u#* zeqqFx;gPHrpt<9XQ}|ZXmRbrVBf~@9!{b|~w(2b~o%2V>(ripi+vjs*FBxfV+~`j# zwUV4ks{+SXm<c0&r6KeC5rkopzl66j6a9?+$nen{e9~GIIv0{&3jd(>d9E1#@;j=6 z)uOkr_4gLM5-{%ICcH@ey-Dse{MZBUT1zu282Bo>*21v||3a&=U&8)UQ`x`eDO#(a z$+2t;o8*GowEI!b(%StdRN6V}iP(KElBg`U#9@D{z*)%O`vf>Iabn-XiXWl4ADbAC zbxL$JvcOIfTh5KDUbfOny8snu^oxD!YWTy%94p!42i&pJ2V91~3)1fIfdSdg-sO4d z0#s^?wrun5SjhZ6>?CT{-mI^K=Fel0?4c+GlPClQ3ODjHfx<bfb!|YLTAMfm$~F|; zzUi(GI2jc0gto%WFHCQ)PbR4%le@x}%Msf$Gn>-kp8?Z8kIzIS{LZ2kPIYA1qR0t$ zn7?WzV-v+FcYYJ4Hb@syr5~l=QXFk8m(jW!<oq3}hoUN{(zpzPWU;St4WBx5kz$$J zstdZw%J~Xa)f0lN%jHF>w}53gPr_z=9*MvMv}fS8675hU*yDz=>Qxqp`&p8$PzafG z#m<%=%AZ_k$Zh6-SXSFN%1V}W(ZY$4no;C;s{g~%TEA5qZDWZ>Vk4~|HI(T3pO(1a zDly^=Z=limT__6dNkqF<O)qXlFWR+|h=Y&CAT5mkLH;f(3SopqcV`3xyoaI#cJoZI zim;&G0GtxTkTVqo4z&eA!rAH-<PNvS(l(>HhpOr_vsaOh;YYEgH_}4<XGm>}xWc;# zn?;DgBeLc+Ou7F;1!12zVqb04b$E-(L8Pvlop1dlMR<bP+lzA4QYLl#oVuz6cm(EQ z;W=YB{ik))y=}SxV~#Y-JE9cTiWGBJ8vh#n6tWyja?=(jex4Nl0ne6Hft8KlkV35y z+y&dDCbKdpJ6!*f9e$D*QZ(PwG9*?lf;3mNx%oX9!Dm#%Tj>sXK7|7O2c;w@PH!A` z$}(qT%e{);@wHLrOr+~eoF4r(b2T#R>l_%jYgt>r>5{5}aWNyvNppn~*97@Ca5!n) zRB&u!64`2fsMa0iy>Oxm@QbJ?bpB*$d`r@}3#0zCM9#0Uq@}4Awna{XqNUUrOuWc% zslzKgZj_jgN(3Qdj%SMs)!HOMgJ?$SA5m?n;P?V#d2f=I&$4o7cdM>mQ?y*xMg;gx zgc(g7CW7dRu|;*V=I(Ayq5ilg`3a_A7|!c@Ic8!~S)viH$y!IUBc2WN3Q-Bvj^$c3 z5<sx!+AtAP?XbA>`_KmLmGEEV1Gd_1d=iz5E(t<VUtR&}*5~|vF-8WPHZkV-dpSZz zp_pr!Gxc~5uY<A@^EYRi-j}!SIA#*7YuofZ0ZDU<FPT}zCJ=W74^VFOBqlYZ^z9Ct znpJI{sOCq(3^0R-^me(SFPx2e+bIFLTI}*=5Tu69@DqdIKdD`5F%49^IqMZF*38aD z71(fbhEG!8)PhF}%!TM2><dpIQPFbva~SF(6L|_oSg~2j>p!M007t}T351I#sty)U z+#Si`84w_Buz4?P3V#KB5SPf|6%DG44C5i97KEp0qBcViqnfK8ixAqFYTieA`GW(w zAaRLIV{Rh7ntx26`g<b-#gL;{Hz3<k?DQn<ll%HHt7-aNNgEa5Q|P1E;2FVHjLjkQ z`T-Xxw7Q2{9Y#SISPD$<Tbr+rbgU>ie*R0Z-#Na;r%mD}%<5Jvs_7s90pggwVaNJy z;Gz5ncB#LFXNdQ_W-sV26M91L>)3K<zv8-CZ&&nBu)9dR+1}I*&}Lh1fJ$0Sh=Bu1 zZIV!tHtTQUYHDH4Y44xZ5%^qP#jpQBOzXUV(rydFEg-4H)}rs&NhB^VDy~OgsRcp) zBQj;caunT&@|oX7tBL@ERuek?2okS5fdLs%LT$*NCE(OF3x;97gEqE-ocb9DFl2Q! zgtm63uT#EgNyte@*InzB9Z1=+&_xdqJ!aCwM~?tK*3e@^?B#m2W|4N3p`^dmSjEDp zr5EJ*DeEctDj!a93cWB2&A~*29n=53!&rXK`>HxJ|5fbYYy!?SjKig2`8l{-`R#sJ z{y|JM;N@7?!z#|5{daszTz&pedK?9JQ8F;@qU0|0D_iceAI?7tSL#Z>U6e&#kwgbP zkkbtwSlf+Cu<f@_ncfPo253+zF_re*BqkMOz=e-l@dSF=3tHNe6Mx!NOm-RZ<2n>! z2^i*I1ua#Wv>X0&z_aSn73?s&*dqlVd-T@)W9p>J$FO7ZOZr;Fjpb*IiZ0<kj-=(t z)3frtzZVEN)Zu&;5GEyyDoKyR4}t#_Nqfj|4VZ{Qpi+zi1s_y<&#G{Aa&GbPMOY+9 zMu&t)2l!LwN5#q;zBt0;6CDn2Z&SxMOE<QuqarD*i|U-p1COE7rnIv5v>VIdYQtLL z+vF=8tIkQ-iCW8@Pz=4^uQuJ=>}nca<}1w6IQAlU`d|lyHiM6o3qDTHh2A>nrl2_S zA+q^%P|?VQl|Hvwh66uk?P7j%C%U{@zVS76a{Yy?)f|yCw>|CZvLrN|l>4FS+vXAI zH~1Q@M_VFOIwyh-O%sQD3<-Z4nfz%+pMuT$dA}3f(Y)N<c#Ca<Hc{-Aj|5{d<1iXZ zo-tGXE}|+3jBfS)BafO0JZ&L^nBNGx!%&i(k|jT2v%Ep@)Id7GlWuGz+R=G5+`2DW z)a`k83dV!1XXu&z6g?+ALC@Kb)3f+dJlE~aJ}h2YFNxQLN5m`jA@Q2FOT4byiPxhK zrncaPvkrTn6K}_!eR#*Pnmk1DXa@$0c&dc34gYu3$34$Yo-f5ypTaYP)@Z5EAVe%L z79fULyzOojc5hm0T5GmFJpjT`w=@qL21F6dx9}hS>_d<iZ+bBSNLanucs{{|sq9Nu zZ%5j$dIA$Db&Ad%>KL78sm^jCQ2QJXENk|S6i>1Swe1^0VH!|z6vhVJ3d~qpZgqg? zzXJ`{qP%dJwHn(Uw4c1)+4_+yvo*He^{Zd~>O~p~F~0$D{+lmT#%8yz$>m$BosT^* z0nr20&}O%cv?bbkjJiUE8qVZG$Ol*3*xZhC4DtbUv%|~|qj@h=J~GK)1f2?6ni^AS zZU9&Mjpv%9p98c#N(mlVtgend_5~7@=MO8-+r5XkjLvWM1!50n(f5dF84tfLw0Q}( zm*9+g613dxj758q1+@iGGXVyKBgR-iD*K=c=}3jXt{(VYjZ9Vis|CbfrAYwv)gXY_ zQ4v6I3!prr+D<=J)7@%Qhu1Goo8W5RnM%bbM$r5yo02?~go2uOrV+Uka(kl)NYvB= ziJ(Qrc=R;N`2{d8IC6yuvxg}q);OGU*^kC<_2?JJZgJKx9*$a$VY4ft=wFT9f@+7O zj$`$od74}ad%Gmf_rA69AldC`VZZbwE$pF`3rQ)z)dl0=BiP1ZJ-dY$-og#)1bxSP zNgczsgfSnLVGH~D`xwSpJO32GZILW~7K4{qB>)7j@ZQ<NRquK%CdOgGwE<m;>40L* znbh<k|G`<n?<OE)VVDVMWCQ4WfcB5bU=AtqL#CZZ1^b}qlhbb~9C*-Gk;ZxAT`V0Y zybkv}y{}K37*C}jNCD~Cih>GjdU1BZa@I@C(fhvEMh*p00h0JY@9QPky)JkP4t`7= zqP*~?>!A&M*52<x2k*Th{F-zns1|+)7*@OCH45wZaE#_Jpf@pHc?`&iqX9+x9zkQ3 z#(yT{uqtVpS=@!-#!nke{xxk-Yyf0~*(t(n5msJ^!~C*MP!4Ndq{RF@00SGz1&Krf zl7x`PN^-FpYdVe!k1rrQ)O`+Ple1_!S03m=74>zWqxiQFifLao4{wB9^g%?F=gS~0 zM>_u(!b6Igk78KGX%zF_BQvo$i2dd%>Ll%S;>zYS8{}-d^88%#^8m>@n(H6JN4eBH z0j1d%dV4m1hFL&aSv{tK$Ix%EF=8gH*LA?R>-5G>76)qa5?U!q{5zOkM$(KDXRO2( zGaf}bx2|K?&R=KDobU79gq@AE{9S-_z5ubTUu>V?@OfJ|ccbj>v{^6<LJ%vN_+lT5 zs+VQoBJBbzaqyAIfg+76Ibk<ohp|+arK#>CO_g}6Xg2YP5?z6EY1!XzyS@qf0Ycyo zuOK0K^{@C^(P8ojvDHkzYo|CVWwttu893J<y#^+hB@U&rn!3T0f)?HX1<Az8=m$z; z84_P?0&WlocJb_!`cw(tn=;==vp-BaJ7}^<vkj)5GB<|@BxD3D3m20zCAX#9AzLA% zHeAJuNh-{DyURAfZT&N3>rN%fv?<X)A_D19F*sY|SK`=n3hiSh@}3UycJ4WiH(bHN zbUmqcI2E<H#I??F`i~;nm*C<{G3o5OtmefzxlK(?W9UPt^?{_R4jL<mG)z;|t{nRI z35>GnumQA32}vG6{NITX#smVXGT-f&W{?OLdm#JQzu|LRVj9_7JPjAE=2mf)a`9Ab zAy_6`@*nHK5Zl4;M_QX+{4AWn;AI>6ng`K$p?E4K0IPv1nYAu|;3Z1JysS<AUUB&Z z&@#*(cou0$s4dFTZe<VbvtnZq!)oOs{F}_@DHn%f0h22Bz;l-Xygvx=wvPbJ=czn? za4`J^1Sw++(os(-O7^h_4k30Gv1ow*3jo*yuOlp`=K1je*G1A%BvDKgg|#5YBM4&7 z6Fcw+#8`T96Shm$F-4CMRvOmRzlU3yc>^y2SSS?R4u@cwoDv##^y~sxs3TZ9P{;%d zV4{fxRJ6JmKGh2ygURWXjF~(9skC^I_ki6)F#9EEOd#ZJVmWw7$<^jN><83bny&>Y zLev|G5KaS;mcdAD^#EG;S!iW2dlFE;4^Gs>Ag}%LHh~9<rUs`{k*H`89YP}tZwN9_ z5Nb4>{Qrg)EWdHM7sD`c1JExBvYFoV>hx-(khc<7V#FIC<h0_$S~x^Q-Xqi}81h0S z`z(%QOf59lZteEL8@Cf<Egd#yUDjAzwgL0B?HFrwc{U|)Sf3nluR1}w+xceXKz4pV zDF<3R#md&RV)B~jccRiE>scXhtpKePdPzHNO}c{S>_$Md+4Z2J`3~AJd3QY$$aFIX z`~CFMe8)VB4>GIofqW${KcIdLn~0fokH)b<em8~*vP0#B*Wwcfs_7_=ve2~sD0Cwh z4X~qPqW%M5l^nSL-&NiFUsQeeSbx>K{=2Hp>_(s@oc@#bn%UH3)&+`=hYRR5kn9dZ z4t}=DW@k4MKznW507XWFA~^)<B}jO2XA!N;-9#m#*l;v`Co<_-f^MC^gCL=EAEC~D z;8WB52Ias8vj}~36ULEv*{WTgK1{L~8r$6<UY<ovHi3v~o-iID>W8V7CdN|4i6qAM z4ebxmQmUl=ftwL8iI;^*g+j63Erc38A%+wZ;C|f;g&~0xDhNPW0h~tJdNR=LCeA_F z+`OLKFu)Did$N&(XP^abKo7X0_}Qc+i1%iQ04)<N6RtU%hyow&e})9WON1!ABurbj zSe5(+yGE=FcDHWzM$lQ1Z?>CA%1Iyuqv1qukiSCW1Bc&-h@49tFbOAM`K$%MhYGq; z(=Mdb8GBlv@Exc~)FVe+e8f?}(3glDZXwD$X&-}Zr%EHufLK``s0(E{f(m10Gpv~1 zip{cOe+QoUHphy6YQ=n3>^&=1YQ<i&V&ztBzZF|mOkGKpJVOZ}R|iHdYfRoAhPD`o zCJfAjO>5Ar<~s<uzn7}5Uivr6h%|Jr#I~<T-l^66Eav$kuMl+A-Czo(;)D~h21A_* zQ`$fw6Ok*(FQ;<(B5a<J1c>h2oIp|=g`GTNh0%lGX3!tM2{;A|w$fM&6xeLy#&FBW zLg$8`qxT*s`p<kP{FI20Bq8#+h)~a(@94z@fxIM8dq{xP(RwifN@|u~OhA%2g_*aT zWO5IE*-dg3Po<1&m-?_UCn%BE66HNfnNu2R6tx5x!vsx*e~$$I3b+71-N?j8VH#)w z2u!(M#6@{R?1`9`T<@Vo{xRYha7AVO8L$Pq_Kxt1N(i1+U@-~+tM2Jnl;!>0eF79t za`&uDxqFzE1tpCq?*5dbmvA>3m(ux<kWSVVOF6@ag?XYYR>Ap^S5b0}94oOE(<En$ z!u;GijRYIYiiCzU!>x6)Op5~OTCvw2;0wtUob>WYcvweLn*2RYH5c0bU(rF-f+I~e zJ?;Jr(tMPJ0|^`4<^~5H^sJ2edjcqjt{$0)Qv~`U4^)Gz(0`5=KwY!|f-Tvtyx{Mh z>UY-HodcW0prhZm;p_foQ6+hf2l<u`8iBB-=?pz}zcz*!!uA`N$aE~WIpFqu4VnV? zo-95=e42t!iI1_GgLA`ZxTinmQW}4NG`2+6JNk^_*djq;ddC;~VR*GW0Rc<))4~;g z2LDMLdW{_CRVQa6OiuGzWHovkZVzODhQ2)jTTloaCA8|ORvPQ6bQ~a?8!NZrbl8%d z{GLVLi#U9?eL^*zV&kXaC_#%Te{Z5fKkPxRwAFGijIrd5F`k?;MzdBpU9)32kS*M< zlV`D$N30zl6+ZY?Rh9fosNJat!B{j>Ohc{B6>^iD7!8eD4O5Y*?yiCAaCS<~NYV+e zhRHr%y%HyDErVkvwwGnv>kvLO-rTR7pmo&@vJdL!n2n#~q3B!C%!r+T--lM~JvOCr zmX&ZPC4eH3zMZf!;lp@*Xt+p=5T$WG!r={2V83@`)=~Ac2U1bZXBG-lfSt0eBkU(X zBsp=58&D1u0S23U?Wx6=&4)aSdmK=~W#JVlCwwu5)X?WQ^p~LYyTw0bl>rj~{NsJV zan9z#Apbr&%YW{*w@2(R&YC`73g3c4@(;rh-7PqhhQ|>F-4+^^RuM2Fc83FigO{62 zKsg6dy~={YUOskRc7jj<O28b9t{nuDlkIVNY*KhSN~-23iv>*Ly2!btcgsodhiaaF z(Nrfzump#s%=((j!^xyq;0+K8nAcaC*^fYXVZw?9q@DMn+llsSHX>hA1Z0_%q`Njc zOeE)5^kMVbq|hXU=vWCIk%UpXI(fk9RTw<1<4v^u?B%~hoHUL1ymCKHgxQDre~Ohj z^d85?E!F&ORD%QiC617{XH)q;;lk9jDTT%DaafQPuv#zQ^bu7ATt>$hVvAy<Po&l) zQ`Ku*FQ%YzkMOr)#t!YFqg%9OjU#5@jI<-jUlJea_!hV`L^fQ}WQ@nK%X)Ym(obiW z9tIf5EK1lz(3lRSMsjd~A6sX1%pMaYPQ&yaAU|(83}~9OpspSw#gHj%|E5y|0NeO4 z0BMnlU|#@v$PWp-o#nJ_3GVAS=aUZ5qZ)f*?VA*a6EWiCUEJaA+xVr>vB7<upy=`6 zK~=->`GOD2F7$Fc8S&#d-jJr7(>HPy^SbCOY;q)zN!e7K+yM^r=h#~t3dIqrFK`n< zCWLBTQF)H?&_Q-k_@P+0N#J~Z@;EFjpJP9)yfEKg6;xihC#~Q(ZYh#;qTQRvvpOgC zSG^ZDX0R2q{XOr+jl&k`Ez`a4Y{Y_Htc?20qPHk7(ifJ`L-K^L%WiOp6rg*D1{_>^ z;NUXg%>qvs%rFQj3@McOm7u2O$gv!KdljX@JDk1*#1|Q)^fF&wE1z`!sNP{qPFaTf z#0ZxdTwg#Zrfdbr#r}<G`Ve<5>=F&}qOo#d(l#A<^XgOJ1`lz$Z!2mWEtukH0>@N` zI(+e;%#kF%0kCc1td+=iIaw0-kj`l9*ONiM1}sR^L(3Awf~$6`=uBEivRA8$iqzrk z<aa-C>a9-u``*_!e*WDSr~RP!@FuyaNORz<w6!}i45Y_!lRPR*7HIuqs^%oOKH$_z zb{PF46zPWuuqA7Z3T%rxjU{W~_pV=%l_;%~SymVo!+=B2WA+Q)ckA-Ld&J4MuhQ4z z#0D!CpC{1g1@=DyA@7N8e`Ynk*a6$Vw)ltG`_eMvWot>`6Sc*=`r{20Us4QXqV>Iz z;&Y3C+#iop{OaOZfBb%mPb_}0KmGv4hZp~d;^`>A8F6#-TI_P32pQYg!Yu)ftTa!+ z{uwgL)?fr&xw?NG0)Ol&1iAOjp@)wirFbMw2l&deh}glRfCFAZUw*gSY1d@E#p!L| zcm_?kSID*A)=jDO8Fa2`GiOs7{QWP{k8Kf8xSW{bCfJvg{t72C>gg9VcPv)3Sz9C} zl;5gO!Jmx3wfU`DDc=MRNFFc6>2FLjZiC<*AQX4gBeBNZvWlG$Ck^4`(=M~L#I3AN z=ZZQ<=V@wwITqVLe6Qc^)IUzSk%F-<@xKocdb{b77=3`+yqg}0VF#$yyXleKx(x8q zXoKPJ2;u&Px(;y0NszV3-=U>rAo$xWa9e^a16By_P?Ufn|H6y1It-12KgUIfHl8g7 z7yZFlxCZI4A1z&LR2+>jT)Pv+P|DR7H{moQ%MuKgP26LDwW#7$-B?y}iWsYUl~FnZ z&Yh<cAMow45#X>w(w`zbS;{1H%i1b)c}FNQ7L>)=Sn}GzaaLSC^e5^9@$FK?um#wU zRT`XTjfHCqTKF048dwrX9I+U57-WGxD=v+$5>fc}gsF4yLQYHNlmC*L{dfna`*0e$ zCb{(s5*8dO9s}l79%^N+q(2(!Iw+3C3*c!b_>FDg)t4Z%X0Ud1HbwY0vVlOWC{*E5 z3eo0n4Qw%kNHeLSP<Xjrsc&`JwLIo?7kg5FJXXyvo=mUd#Z%~&UM%^3YSU7AiI}?6 zy#nDMuEtV9?9IWr({HIv<>gpr!CpmYRxzSr7|bE|d>kDyr&zTu400V?93i@~t2qsu zQlCW}3*oR2#)HpV$S9^0t62TLW|dHtSP<mPkb#{nsh?XMQm>8Js`xTM1D1xmCBdoy z-*z>4Ma*#qW?WO=7MzSR%zl<E^DmkLBW{O`>C*@~NxvK`uO|k~sUb)^<dW*=e<V4W zMnQ=t!l$iy3S0)N3R;3jI{O>8sN-Zl2B*tv1_`TQb{M0;-Su;)XfE7y<nR6M6x=jd zMsw;pW;(nH<mR-d6gU$(n<pyIx4|ENB6*3R4WrC-ItvQxV1=_e&Gb8)Y-Okb)ir*A z!=Si*L3_IXq6gP!UChvafs!2U3rulz7%fv8JAno+{_v=dIT>17S>o)H#K+<TSy|~| zC=kT$JA|OiwBaas!I4Bt+5GystJDjG?Pb`c!&HqfdBA3-t-f#y#)GazRzV9~bNsz@ zU7o-9SSOq<M=lbTr>t6l1|8A9q_&_B)#U<587SO5CqrF``|^r$AT|Ktsl14$T4-ce za~hgwHO|CRs=uX)EIv93VlOk(@oBlUtTTuK7}?X?QzW7oWpH&4M<QBMyAs9Ob&q7) z`Y)q6<HT|*SY0%MtmEL)L$Cx`6ZS9!Az0NkVLiN7tm*o0I#+GXo{r9iX*eBigO7k6 zccrl9@X7B9R8__5&hcTGmC;7nA!jjaoww;G?C)bOv}pnBY5g=M=1|~Oe?83E?*ObT z1b2ullG*Kj)j=xY2n;<|0p)w>%(WrTUt>*4ewWE9BqqPRHvlmm_(No#gNRobd_evZ z+SM>R!?{Uy##0G`SS>NtvOMWMTeV@4lofmE1MY<qC1BMPZ2%DYLs?nHT^Fw+iN)6y zO;U&ZeCuExzhJ%o#%4c@+TgX3AFn#r;|o;d9u@yN^BwqvfGXDn_|p&|OiOzan_PwU zc@HMe=Kw{<2Xeve<@?Zfa<an64KvR(D2}xyR>AjOh0R^N-^_lBlDfQSmBx*rAug;L zM(!9F>Cv6v?hBwUz5vxg@PW1yw$>+*LwF9MzF;+fI$y|j@&kEp_OHE3z@WXsn_)V- z1cT&0WZgr4WI!*4bewMw`Ew>U9kx%!7N&kjj}V-y>X(;%;`=>pC^)<uSF@sRYR37a zd&m<Zu?9Cmp|#ns6Z%?jf!1SYA4a&K%d*qa`;drZW(l|!g7cp%@OKq-!8t4az*3Z) z$c&!VaOoFramws6glqKqcZ}IoLG9}PR*+c2QCZ;*Se7lD0qJJp&c6*VTy#icV=n&$ z)>E+vv_SaXhzrNC#5mlI)<GwsnRPM)D|6*Qsm-Bx_+W^(T71}sD+*G#f-=^?(m#i$ zyQ<E&V&w}T>1LbWO8cBktOV@~+J%;q{#VHtvxzI4k{34Nq7>`8CeG&fBIk9Dr`5ct zK~6Zm<0YADO5%;!e7Ysik>A=Do8LDO`g$PLn+yr{iY|f>Xin^6u{xLctmgJ!-0T90 zz=0_S+?+ba3Q)xDIRDZBo-%iA9?#>jfepC}D1a!agS&um`A-gQm~YxgqS#fm!mUIf z1#Y-|$o(QML)T$<^?Jyzf|@d`tAf1nIm+wgD$0mUuu@=y0YN4<)%$P25nPB|*Lg2) znZXxP?NbJBB0Bz-s2v;WIG+mylbh+CcOl$_c?7iv?r$W|0%qC}n6U`QDx8&7)xn4@ zR^hI!GHRT#SDD!)tH|hv%aszXr7RUPT&DILw#1A5O5yuTlnxY-xX}?3??vT-)p%30 zZu_lhR_9X0t!2}tu0z|P>_D<XS%FQ62zMjaoA7NS7q>xArfE_=?XQ3PN+99B#9u@m zbhF0mK^!`8XSQh5(aA1^o#gDuP9h}Z-No9@uSNP{)=qExvBW}zS0RP2Q3K4e&SM`O z`|Q}s%p=;l^JiHXpm4_@zPQeRVn4QVxEF9+<c*3Ku$wcM<m1D5T%K9*0YWlD&hzi% zAmaNHdzGEQU1+GM_Ml7Br`1EI#4WX0B%&_D%nb~4mM;rbR)#%y4xE{=TpkYLN=SLF zF%A7irzmD(c?9Sg1!LI;C)_WvKD;Gwmi|>Abl%@KUmcsZIkxJzE|v)=fBimO-}<`n zGQh?(Pr)ID7pdDR;zlI#?Aix~nBnFzuv8n#!uk0Q+SJ@faB2bS!%b0g!D0T(y(U)A z;T&@V_`wA$CZ7v3gHvk+44Pr2>?2Wz(<5%fWLKE?<eK;7nD<QQ*-1dm*l-(f75j{a z^@8JMP&1EV%7ae-jD5*kv1_q<Cial&>k)i6%}+2qfk<?{OE?a?RPvux;>KUvFkOzj zd*x-7CT^JH&k5#n)*O_v+Y)Y~xo*Q7K<<vy(4Mk)w(vup0x!@*e*kCD6c`Mdi7DVe zuzAFgu??Uvp8%*e&nACxxVb7n*p22@RkPx?kOjS%G(EWtH(*-^F2iqO(rH<iD!{X$ z&~DQGFh^;_u?2&huoC2T7r=Q!9LK^=UKKGZ8HF%CwUt?Zvx7eS?~*@*c6G#ATa+ri zU9-vd@=J0zz|2DdLY?=a0KVjPEH!5Gh2pguF6;^Tq~AwiyZ~vIldHIH1dD*Dh%jL! zW3q_Shm+ZLJfYF~I(i#=52(P+>UQXlQ0EIsO1kwbQM&F^EDHr0nh^tqwh)D2B7?_n zilAi&`QQE=G)hu@5lxJ9;K%_k0oJMH<2)NCd6<`o@)-0kXC=MmSfHk`cDiQkG`}$q z6y~3x0xU+5+li9FoOHubIR>^gcpbyJc)-h;taj85W;S(+Ri@{gWqvXhWtv(Cf0>$e z$lbp%!;Bqs(+)|yc1RbX^k5a#NV3>Jpjg%eryF=Q*T`t}QyBQb7ImkwPZNC^B_zF( zX9T(9EIyHg$#JkFe-8TyIOC_SA3Sie8c8r`C00{j8cFzr7LXdYIx2CGz~tKqz*{(& zWQ18k{xfpq06{0AH#WZ!<c#9H1ZDO2H;*II#%JQ$xeYyx{G<64#0HT$euNgO*ceY7 z7y1~}VN77XuWg<l=_ok9f}Fx#n{xSI0VW)4t)jVxIB1AT<b1e;yP&|nq$>(Di9HWr zfsSP->B2i6qq!$mQ&>m2y&rCJ<(~y}+y7L>SNvLN4Kb7IUjt@^Au7Aq<MG`iZu{ZH z2pnq44>)mgC1zF|GxQc*KD;q8ux7+CO`gv4T{Ko#v%dU$!4bW!U*Im9JC8WPF|nPt zQeq*D8N(MD6*w)9sp$!PsEXxY%SOT9ngx4}<vnn*#_-mC(59)aUpa2lznZt%9+`J5 zyV>ErS=JWN_Ex?Am1omf_Ueg5Y;lU?{E5k{_LcT!Xj6f}<gtm|*i9V+Umo2@ekb^d zRfaq{<banNtCHDD2Yj9E73Yjw9kimtbD0cBDWF9=8AEEV>Cr#788zpWDC|YJ$FPUh z^t4`dMCO4fZ?5%zxH*M=Xos;&<U)4uJ4kuQ`#w&Lz%TzEhxZ;?^Bxd5U-WDm!(Kb_ z`T2JytH5`$-Jwk;q^?bji{0EI(x0=irB4Fidw?cNk=Y^#T?r^kWQ$~Di3}pcCmQQZ z>_9=AzOOXaqY@0rG3PNB0<=u~L&(1bPZ>||5?Nc*401J9D1EI>2oMpc)z>K!eDq!w zWId4pJ{e<0SWvfgUui~8;tB!e0$GPZg&c_gjv992vsk0RI|H+_UL(yYoe9_aE)!P2 zv-rMyo0xoC1|XKT4GhI*zXTBuOFl_z{YbHwJAY4ehpI{}P{enUC0TYxKo(J)Q?)+o zPc%`NTIC|Oue`(pD0kK0TOw&0`Wi={NYS^#1LF=-92g$o5lI*&2ldDrAOR~9u{q%g zHfPzy@A-#gi$|QPjFr2w<?`2jkQMWBoRAlw-c*9!?9lI$-9kF{sMI1@eJI^1ruGT@ z;O?ymVf9Ak!{CA4xLLTH_PZ@^cu`O-16q>Q84g3yg;!hkRLbSDa_teq*X_0o`0%0m z(D0WWy)eqKb)m*1j<Dnr#%mW{2Y3?YVW$p7jx;yB2CAXfCVr+bkxkrxwcTN+5@M{( zg()+`mF4~RVsHSP4@)__$AvX#!ftOV!DV6>SlgW~LW&z_k`#mg{XMrDKH2a&a2oX{ z?OepcE{Zi*>!*tSUT2tkG>HrbRGDl&kD=FMKan;-2`q;f|CSQ=YW`cTolfk)%-73% zOugw0wkplou3o$h7v3;b#eKb96b(4y^&A0;q|(}Mk@gyv)|f}9l4nS4sS|gb8}sGZ zO$f-we22dF=cU4(<fWezzciPXG#~D3ZEQhTH7zN@@vE&4!D0}}&(0s89FQ3<+wWh2 zVdX6dA(kF4EIgd--TX>uv@xxpDeTp6XtZ-|X)jLLEb@LC+g8-eCK(kjtbdgsE(c=x zl>sG62d=SkaaMWIix5;#>jejNV2^%b-sZH(ybzhoS3A6`Wv#^0Zx=k9#*sAk#1`9x zg4;z3?lMvrV-u6~Rw%f^kB{!61`g42OJ$U1K-n#IupP2-FDB}){5NeCy=0G3e)uGy z={N<B)R>N?vBlS7%Ty@Y)vV@REcc>O<AQ>u{538kBpWw7NTb{=<LM2_T6Oc{bZC)L zq(#yly6M@JTVFSdw8&dS^uyR#>8?`tR>C8`xnfJdp*$J|(n#)?bC)n}^~OrC!yU@T zVjJ$LMG6d0#)4j>^tztTIUpTYdxdx@G1@zaF24f)0ZVMg&AqWz1-(pjwe~rdVDvzO z-Y1$=+YR3lC0b8S)_Uo4{|6AqyL4bc>7xPVO$-}qT0gyq4-P0x#DF5ce2dr^P(bf3 zLfLMSQ7Y+M4K~wW!@_5v!isY-=a=kWA|<&cgT6Q8DJMrZkTtDeIj1>vAOx}s<@_d1 zY3fgWLCU#Eko8R>E54!e9Ya3e>xd=Ex?~7h{Vv09l;-qeraP3u-MfVXsF0zO?5U(` z^wu%@M_m}8!JSo$^b4L~bzP?Zrg`FXy`slVWP$DUSIvU%6Q9vAoh9_%dzcqgIhc3q z@}8-EneS@D^fouVF}x=?a_>oP2b(|z{}(Xt0p>kzWdchg+-o<OvkN(|P3FwF<lB22 zyO1NBKMo%ib`td@_oFgWXoh+tY|tTgv&*ot5|>_Rs(&#i2qa5f%mtOBe}#Du+bI~2 zZQE5kwSsVd3kSKe_+S=4mY1@k{<aLq^{eck8$o<nH4>kaw)wW?FWyyJU`~A#Uh`JL zC^X_(4ZV3}Ve|;}X2m&n%LNA;mXCSQmr4GExNpatrWV`RjbtrmH#xjF$=WK&l8~Uf z%h+2a;JvYJh2Tb`=FHSpO{E6@`V_5zRh+@VKRGio1JYxG?G!_z1wDCepMo4(CV&7s z`DRCQqR@kSWcGcBajydvvhR~(P#Uo<28GnmnK#J>04fQ<sFag<)mogH+1CoLYyy|o zO|7rXl(bC2dXSngGQ4b%NqaN4HI>q&0U%j}44QEt&ADPPS*R}Q5R;-4pJ&_vMFtyk zrZLP|Jc5KCx=`z~A0xR&(sdB)b8L9*UYju&w&ii&2{g`v+?Z>L$%2-yPopGKtA-p~ z;230bvKz@5dvT^1>y%u+_W<l3^e=f2Mls@;H)pmb7U23pUA+On5dz<tAUnwqO(&O) z-@Zf#i4(X+NvB)D>QYe>n7J$$!|t#Ef3ua=4%>5a07wiT;uz~;TG0K3O2$tJV2_vX z<wi&2hY;episL$buxb~G@ZaqhD9~<#ldeEiom3dk^8G6S+k*UG9;YhmdV^wDdg$7i zYy^q7QGAe}CLn77-*<W(mN11dQ4Jo=z_kM~9U9SD@Xs>#7K-OgJc~4!Fa~$Rwt#y= zF6U1H87y3Xh*#3CI2x7k(E~Vk9snp7+t@me<EoX|EbEe$H0wtN?D6Imc_|+py=d&6 zj^djhyByE@i@0gE{-RBri9zW6G1^nOjL$=fz-T6)`i-i71%jhTI!jOwE`RW-Bj^%d z%Yt+}P64AEXd&~?XJ{}vyFCWMXKCG~>5h7(aTg*yL6&#lde}D0-LYscFo1b8z|zcF z=|;?hsF~e?nGj`O19-rRR8?-oQH20f%<NP6&K?ug5(Qv)GCBu2ah-tjzyi?Sh?XMS z9HsW*V!r5iAj8d>OtiY71;1!Qdm~Y*3>VqQ^{u$;DZ4o^t7-YUri#DQ%{Ta|6WoB5 zxLG;S8sP7q5sguAWHG8U|22CBHi~@S!^#6sqF}&AeMrZ`dk&Zq6H$0jS-0Vpm;#Z+ zcx--IKv>!jfr&Y2#0&%?sklR_61Kw_6;z39&4@0^+?Ey5au8UB3~=lbtqs83eJ;SF z)RjyE`7FmCBHR@KW1?ynBSx~f7VRYh8Bt;`WoI_N>-(ww67EL?3k{SB9EKFy?mw4x zNx?^9tJ3#VQ8s1gTZouZD&G|43Onx{_?OH{(IzV|6cij;r}u%>ttBP8Kqkf5OYO6| zISIJT6lr|gG%SPHc?BhvXqf5|g{CC&RIk7#ECEA&=RJ8tfxQ9`YMF%%j;<Do`jq=G ze2umI<@nBqH;=NgY`R66#fBTDN@3@4d?+|VEC5ypf4&UvVwMz&jsV9+X(J}dT@~Oi z53=C$Bf&{5MugCxBwmy91#iTn<%oDIT$_s6!}Qe@UDZ5te*IU&@WTayTJ2Jn&teRm zFth><`>7BU4v{$McG4;(AIJV;(HTe&fO)7~OG*a2d4a%}AZ&tG-Zo|DjUtVz&KE6# zK|;BIG0N`r;EN>~5P2nf3=J!yCRHGPut|i6{v_r9R+Gxu!{V#em&ywx=g(iKqgkVM z(X5n6*2;B8j?bryHm4+C>kOCA*C2SNkJ`8Qf8M@-qM=t%V6c6+iZsGwNc-kd`+WE! z8nlf-V&7^A$!Ylo)2yZLnPasDjj-({Nc)?jDY)r}+F)<D33;)eXo0=mYQa-bdmCRa z=ne+M%d@bkiFLt#Ss9B_x%sW)p2z@e4Ftn<G%hK)C-EygjXy~WndnZ|mfs$THO{8Y z|44vUr+qI0dOzIpTEc1V6Ih&&lvS2sTdlVQTJ-TS&>%4nEEA)w^m7O1UQ$=)%zlP} zONt<-{v=5uc!5Ob((?8FlqPBG_5A`yy(*GgTO=eDzcw)%Cfejy)<gu2nTdHx>77Ex z+r+g=xe)r^2ZO8N!1}^*V(pyA-+7+$=YkacLj-k?*razdfk?h!qSY%gODK4wmWO{X zPPn<koQ7)-a9ZSJ(``KerInZeKokeNC>0|XuNcVV1N(22`Mm(ZQJ2*NaMqCiDU9+M z!*Ep){R&PjSKN&TXB%-Z8Ou}-EWXyEe`Hf%4)7vUG#K5Py}NWKF4h=LWVJ4`xw?l+ zf$Qz*#Ax1&B9oMHh)QX0(Qh&(3~9y?#uxFkLpqg8m&eFGXqyws$+nH+za1!u+Vt<p z3G-sxK%2(#9}NHq10x@oY|K%sF>@|$jDp4t7maBT@by!vG1&J_?=DS4W3Hu<x?>6w zu^D>0gT`DfGs$gel^vGnqMFm{Sbi<)U=^ovM}T{v_J7pCAK<HK;4i5rYraFfgY*j$ zGNyO$V3#gw78UcBTEs20XoQTC*g71?|MMF#H(D_Gc^3R00hwTMkv3e;yLj+XLh4+s z%q$AYYHm69mA4F2o_BSZ4x8Y>-2wQGBXnZ^mrGc?bvo8MSvz1spgD`Uk!U$&1RXiB ziRLDk1WeoL$6{zZ(?vgjfdRksQ|J|JABy`ECh`m*He~nmN52(q!R-kxq=%5#(KIn} zL~My()Fw7f<R<|!B!jiL=kA;iaIxQchU-5gPQZSrtYPQET@3_-e9tiO_aRp&{Z^HZ zJHTlb-mWRlN|Wqch>H;>;rMA{+(1;m2|oZ);nqGU6zokoKJN)7dKi3EIEij9ciXht zv8{BCA-qf{#{6gCkKc>mtqAa$FGGaMK#t4K@nbN(oBm8cIMe$S7UyjwVs!oZt(d7| zb7u36v2AI6Mx7gFOt#8!i!#n&PTXIHyGV1R3^>@om0y9&buceznv`%ftx7WsYkJ68 z{~S5%M*=IvZ_I!|FZ|~vJF-4R!5u?^u^+US9nODKzmT%6BDOV&Lb4ea3U_`R1vJAA zm;KzPN&FU+$qq-ZTw&O#+%e=Ff|CJ>;X`W~@D#>A8Uzz08Hu~S8w&sUN9<g|BW^3$ zeDDWS+=KJ@svzxwe_1r4kyb#3RaN9WA71+znNrbv@VxF4Ql`pAF@Yqq`}ct17!psV zq!f@EJ-2-d-LBzxEh@}WWgmXVs9Qe*)^O*ymV5o~I-Ae%yLS^jyf&1^XHYoC{>CSW zMaZFqcBaJ7AbD{0QyR{S8-5R)eFl}o|Dq<3+(O(~@Q@@qUI8rpFf@<leWElzh=lDW z)_%r$l)v$YSm`{uSi+of%P9Ush&DTfJ?-4M^g7PABt~Gr2|w`?LQ+OtA{xQo2$vMn zALoi-m~Whm0>R7YtXnVW*CkLFO;bNc&1^Q&q^imS5H5D_u)|n@dtbATexLU{scQ8K z{0foM_$;z`D{_?w{|y0C%Z20&&Dpt&zQ4BJpWKci^kI?7NTNTQzcmF_o`V!e;%S6F zJS-FAa39pi-)sRKso=2>!1=<ZMWAmv04DozN>vs8dX%H8Dv@R(LV%#G#~Sxxe+^nk zsF9cd2PUF0g@!sqqHC~&(nUH^^o|=R5a~Cl2D*y$vd2Tp+J6RX39$y8jC@|dM``>3 zErhERybREN)Ngz)K(XBinxhZ?z-DtnP*59RErJ3Uc=n_hba%dh+}n%wo{lYr=q9UE zNAnjagDSo7TKZ!=T~H-1s4|QE+%D-??CRk+dI9(x8jC{;Ek6>v6A|<R6a@NsXpOjc zKQRr&fnN?f3iknkINBK=n}q6c-%%H^KL6qP?y1PmW4)*>F|MDKC@eYBn%UGK26~-S zGl-TwzX2rlBrtR0_pr!G^)Di+J$6S2j0<80!7u-pfeRop27#nBXiP?;sZB=^zi}n7 zAr7(_6R7j)KmsR<{*jkNW#yot?{0$VS<-$1guRjcj<CrZ6tWJlryd|on$(z0fQeZ{ z#GL%UL}IEaM9A-3=oFIQINm~jIRZj{bHEhoLVj}w<<~><>k{(o9F*Uje);_sb@7}A zvkP7}TkuPvgR*;^=>84a4Ul{9rG1P|boI`dV;+7?wu*naOZ0FxRS61_^r9v-4);#E zY5N&2uGCzxSQS4)W<PLwLM!Md;Sk7!y>sa|*9KaGF6Q$mfW3*gX-Hq_MK4Yyrgnj; zodHzA?*st-l3xx)@D%p)2KtC<gxqJJBc|xVR~(!A<Ufcb;;}o<40QkWhyFqLPeCF& zUUWY=@zTB@-A65jP50X#GBh0^|NI6BAud|sn^B*+S>|_(x0A0EZx^o>Z#NH$cMe}d z@9X(O5%utS;+@BD5bx>y8u6aNFBk8be3E$2;$y@+mn-63$kWAp4mbZdVdyhA`}jEo z&CR9!jChyx)8f6DpAzo?|ATnn!e1Bf75tERui`I>_Zt43c(3Kph<BJjA>QlxqvE}R zKP28N-znZ(d82r5<J<5i6rQgKm+`wP_4!5$-Y$Yo6kH*K<Oj|xM39s+Um$`HQSb&4 ze1w8CM39`j_+$}$oPwi8@CgcLir`Zeln~Sp%^0}xQgn(so27YE#mx!O1AoLmInKr6 z*Vh))T?$BfO{8pwKTANQ1o?}U@{K~a<KP~y*G%U5iB*cro4O*I617s?-qcmelucGj zjyH8pGUYZaCD)s}Hkq>2O7VD8!^xClk+M0@JA1uI3G#eO>Bk1M4dD+9c}&Na7W~x4 z^W9I2X`?aIn(tqUC}u^N3E@Iznw~oF3u^DPqlM#C$AYCAxt@OBJiKYxf-=kv?Mt<@ z@X&POMyy+@81d_RUncfmaw-S2oM7@C!T;0Vxd290UW<AsGbBR@%pgI-dk|0*#3&CF z0ydEZf)W@AB&3QG$zT#g5|h1oSON(XY?3jR+SaPa(~79Ix3<SVL~XStKodZUAXZU1 z6_itV&TupyBg7h+`>lV^B$Ei%bK85*z2}~RmA&`>e*f!VYyE3s2}W2t*mRDL+r|C9 z-BHe;*vF%45dPr)Anr&THpVEgmMG^A`}nF4xLvr{9lmX$=(*rPy-;UNcrz=pvd2^n zSL)zXy(+bgPpeXY3}em*(8-p1R3Xtv6xu5|ZyY%94b*Ei^$HB@{&Xygz<DtdNR|Bx zU*#HVe2GU;&gE_E8LA+eOC;w|J8TKbaD*ED<(~3Q?p?lTe-tiXQn=BF(db8%VEA10 zqjfj*F!LkAhBIjH)zBdUP6W@y^tR*dZX2T-g?7<1ql_su>SZ$vqKpY~r}R<HrfX(; zv@s0F!7~eNh70}%wqxT?8Hk-Aw7+e{t|KRWyQ21--OY-m>4}Ze^cBgxPX`g{_}Sgj z;{Nz*KOU0)AzWJ|{oj-ROTOmlKz&%Al>X0?;}_&#p&K`I^QR^C95bfVxkWI_+D`>} zt>jK%J**<`M(5?Cj?edJXX?3IZ!;XX-nOD`GBoXw3DKcgA;t75cZw>n{P>CB`0p+K zcAB=$-}-B*tgp>p$pu-PZ65}AingU;cc-aP{CS#uZd=cv$ANvoIBDKk^!U`zi)x%3 zO}h2-qJ1qkU#m*}V0Y?_%kHo$RFtnJ+SeK_Wq7hX)HW*&_EV*V7;VM3zT1~HZlWN` zKoT$!a07{e3vdAbjBlN4$hhwmPm`y~^EA)XJllD;^X%Z+!LyTRCr|jI_jNVdg@vQp z+HIYo=I{rl(xt$9;9f}^>G<1FMlUsve79;Ja*=r%*&;MYIBb)C4ZNt7u23h8@9Bhr zpMU&B7x}i|PcFf;Z_?6_@=99aKKaz@lS$Gi9h8L-5_p@PKNA5D&^XsN?nwPSo9_eF zdLOFR`$a_3QnpZ-p1%4Z+V`RAh5Cq)+akhI18NxRvkz>(52a_FTXLDI5iv;namw&C z@GIa&U@veGcnx?Tpsh#J)+2c)@=WBJz%zlTizmXO--_pnfa<p#Jh7_%Ejv$?=tuUA z)kfNP=x-nqm<)v5m~zts5q+V)scl3*SYa%;UVRsyY&^f(dg~9Wg%*hhYoYxJLPx|( zyLhoMjaZk#yErH2VR^I5Oc=}*dj)i^)fj9R?+BBm{H^{s0yly{HDz~!Ux|pkc2Z$% z1RP@FrXY0vJ?72C$q&4u)bxi8Qd?B9Ca7OE?$5#PV6w{Px{`#Vi9)<uL<~64Vi^(j z{uYI9q^XIkTQmRVvF<Xo_+M{3%rxjjqI;bXkmz3Q4rr0+GWcdg2<-cE5*?hX?^y|a zqfY`hD*@Qy{@sC_J!XYVj#E8^JW#)$6NdR?h5ES~Q24v-L}0jiRd;IUbd|m@`?%7u z6(;G$QxmlO`j?$B?<asFdi_+gu!vrk9Xus%V-9;<P?BsUUWAe`&^JHc(VCtp0y2TY zeAt`P6Y#=GR%|4Dd<7_0j*6g0ai8LLgtLVQ?wh@h^8|OQoLjkV2~~lc!NH-AC`?#X zU|h*U9a4eO@iBK&tYdZpu4wu|m>#>Dr^J1SBolnyV}9RqJggkQ8*<!YIsQsHJ{WRb zgJb@VNBN=_2}O@s$$QLY%KZ`Cx62<emqjU~B$z(WWBwA);B@&y$NiHMQgn5k(I+F| zI8mJ<hBak(E-pc6{WR<^Pw)*Ak2!-5dZT}BHcjN#0x8?2T%?<Xk}*kwAQMDuPZuvE zw@dl(9O5zOhCDeQbSZ!Ie&K0O3AuB8krRwMKM+9f&4QPNZX(e^a(m;@#?jE0HlaPi zW+ZISaC3N@s2&Xi)yD|)B3QYRyw`_+s75N(T97zMx>+(SQV0ZRd4+J6-wAV;j}bDG zv%Io9W*{f53OE^I*<~OQmV|J^>++U~gs?uqU)AONpuecLv!SalJPu)+X(BJ{f_@Sb zzO^&8k<xE5KP7$i;fRz0N(t@exF<=CJE`V<4f3LJpW4$C*_V3`wrBcn122ur<%VUP zIaNq$X58;#VsVx&x!8>7HQx#X)yd+Fi7lCizq9=a15F?HhL8a-u~!iV24Y#T^QU!{ zzy%a@KNyVRv@S+2W^M_82|+%>&P54kmL$+nE{9_yh&RjZ#d!=%aOw5)#$eD|pOKzl zro`tR4>7@@#^heAX)EMxiF)EM$opT5EPsMOt83~$^A}r{yuZuunYhI78Nb9#po4sS z9bXXlmrD%Xd|2k;BD{-CLiQf4p4jVY!aTfX$$?N4<?e#qS_tYheH+J5#sp=mK7R7r ztGKn`kN;%@_T%N+!p2{6Z{ZT_-a^JN9p-#lPvqq`UINcau?sDe5S*&13s<cQ{V=h> z@HW_`44C#^9PeKepR(9t^ix+E_T()7&373PfdQcx5<zy$(J;r}aA*9o#h&H)EAnsV zhC=XgnA)F!bh*%4PMgox2{FJ0W+`hvSAozyW=uAZJkndnBcE@U`kLxa(bQrQg(0>d zW6?^fPSE2)<fAw4=kNH<ShYBv(>R)C9OLM|7oMi*QJXFi0yOtBOB^24%Q{IIMghjK zzr7ECJkUUM1NN;M!~Gh^%nP*Ee0G%)<I7Hr4j}e0$*|!FWfgkly*H7k&|m6qP%q=1 z_oeUxSLDi?&yt{SW+p(3hn&+GJ8M1G+LtRQhd7PJkL8Ms*1k@cF@)g8AQj3!Yq?>c zCt3Vlio;UG%JAx0$gewJc0L!s@JzE^cQ}9hvac;EFoH{5<fmWL_;O8KLCvSba9?Nh zwYh!G`%|+Ms)kW$2NydlFE{L|2iA_|)2@vFqJ=tf5!QCxN`EmbmE&cz2;9sCKj%NK zNU*&L(?_cAXF>-zKgHecr=pD6z7x@U|5~UW$gZvHPc0`w^<R6LnFJT&OlD$KtHz+$ zU>an11p`i85cF8iVrFY$?WJRB(CCI_ao25US9JC2K$r@F#Bi9TUS4RZ?!KMRv9o(o zPU$Cx$&J{e^&=Q?X!rREbDV+EOBaQpQGbW?%0`C$h0ZJXAAtLYapTDIO5#5%+&Dq} z!I2;2bK6AzECtpB-Di+5JFiIU;IrLf&wpM~Ww_vZC6vZz<Y@vYfMdX6U>~pxcpd=9 z{X3jjBr|_dDm@aI2+R_f|Ly0MM}H{!s`HA6*9)9i9;YmFq9Me#U-5nn(D(?SG0uBl zk<ef5yrR+#r`3(sf7y8@l=f1xxCJN#N&y|%2-E@J2k4u>!+AwA^9P^d@AJSu;JCPi z`{r*suPE$5&KG&P=1Z_&gjTD2wu{9r-#M_eGc`i>i!uiI&P5v|&!lC*8wa(xpP(gC zDA#L{I2=Uuk-28IymRPqfSIt[c}i<OXTz6k>I#RErv3nvcIClH@!{vM)zJ_weD zu_-L8NU*G<xQC7$Bg`f~d>lC{d0L!!VW10^+~>qmNB~Y8H+F}!P8_d(PpvjzMJQmr z)F<LB!IdzF`7%cck^aLb_J<@DD#CfB0B$E^bzV@-Vr`q!&`=<s^68_Wa_GZ_v^?aY zU=VZGXAzm5x{LcyVkUd8JxnNsqtS!3fw-nje@5tui@0AmI$b-*P5O7)s<z9AVj!{a zusK!aLirXkGmKBs9|=}}+<^)RB1ao<^{^>kX;2B~<|3JfJeWv@IXo~nTtp$}Gjie> zs8UDG*kid(%i5QCBp~MA;#I186PI-nZ&k7!k8BiLJSuR>h7ArSYHD~<iO|JiNP|OD zR=9Lm@@Ua+Eq87EAwAZBPGrH*)zP)xEF>B0I<PUu3WRluor4HwG59U@*GT3C4#)*> z=T6L{zqglekt0JjG5z&|GWb4?+B5+{p^fgTufl_KesA{@I&g7rNq==^SGc5GcM%$N zDBG2)qExz*Z;jGN_-iD-y8i2BCq)p}2lKcspLg>w-;qwg(()HXrZa3jd!}spuwBVX zwmX!iwU<Qo&ds@10tJ4pnneT?LI)M|HS1v7YY$x9Bv-SsJ$Cl+xPAV;6Eqk-srxG9 z{LT5_#k!V#{GO}ibh%Xvw5jxHs@yzGY~@?`(yJD$GqsX;X$pypI5DT^o5eVu9#Z@z zw!tumU}_j8#vZXTB&Vb!;K(WYBw))aIfHo=I@urFFfxYS9PyXWVFQN5U;5Dw%tIz$ zw`nTQR_c;mZr;Y5QwPf3_^KR#GvcZKkFXD~jQGWdi~_bGh!>?#7uoQnunw|OlU~+c z^L5Ak3zWhaA4B^FhMMboO0k*O2GL)lD9_<$5b>czbCvKcSt+u*gA*=%dH>Q-Bc11h zzO7jbXN)&5mBf=w2anK6P$YcJZQoWa2#E!v{hFKxxm7Fc)Fc9iC35{|Lp7bIDjrhC zgMiGf4r2yquH{U7WdMio;XS4Y%Ry{q7#kv#gZ07i`7eo#MMh_o68E*Fd_#nrri^4b zX+slbsv>+8pmck%oLDU<yTk`c&RTk8mVQAOK~qMQ#2raos*zaqlvJZo>L()8NRJ#Z z8DReF_eq2zsjEXGs)yS{k}ykS1B!ZrY0f6O65^lslJv3g&wfpDg-&EwF8wrc=hSwm zPlV&n%%yE_@onOwK?)`GNJ6MQ0drMuBYWCH5dkD)uErh@*k}#GcFl<-;;TN+5vb|b zctkCv;*zL7f)A;QuO%(81r0)&aUz4EQu;kA!k@7i8RZ)koMaWW`5cC6n@{w!!J$5d zx}l)4VP4xL=BKi&c^{n_Qi`q@G{vimblcVR53b#<Dz&@nl0LRIeY=p^I1%{g=J)$y zJ4tny{}tcKG0i7qLLJtU;jl;LnJu8bQak(kB&;UDjom{#=dp=&3s}YXYz3C()*?Ie zpOr>*X$FUOQFm!A8JKahNSiBdY+x3bJZfD8n{--FLUM4+Mx@{vM<W!B9QJEa7>_ep zkk)U=K8R(rhU(X_faI*ZO}cn`5t*O}lx^j8|0rt-)o=Axn^DGcQTi!#7hxLTq?|HQ zB;T6(nrsCeYK0_o%)IO+CP{n#+|;w1ZmvD2c-J{i88bp63RjyKOE!B!D3U{RCs*Zh z&^%65VM(J34230U4bHS}M@SYS9TEK}c%)2<$h1|T;##zRtjRt@#1T%J=kAhOiw+Z% z7DpyWVK@6%9K^uVD9LDKj)dR^aZK6$@Lt)l;sj@`QSzBm{TlLG{JKM_^60Zr2w~nr zr>P-BaV8OjjWm?hQ3$ZCx+lyD%q`~4iNF9xWKi$t&pzBhwN9Dq-o^v9@=abLR#|<P zZAhQVQAqt{KX8b!o72`jV*h~V{I<6~6`|CSYi!tcFRq-OP_ri!l#8;keBk$FyRh37 zh-vx<nho1V<uSlQEH;(ry7_afSZop_PK$8boQKoq+i)shoyMOs4}aFK<j<xGJnq14 zb2)CC*WtE#b4An68qy4#ciQ16Pbjcq3r`~(syir#2qbbvYtKWddcXwdfk_9bi9C9n ze)1pT^3siP-~5MsCpR}_o2eh^LneJBm*p>KZqkLal4YCRR9VNhIM|rBqmzzcImvcx z66fD`zj4}M-A;gyA17cSC-oI$`q?*q&8~)Qv|C#(aSFd|hYbf}FFVB?n3Q?Svt+Td z#AW4x=9X}?aizE|`r{}3l-H&b6-{_j#STR!lD001vu;K>KT;*^ChCevBwCMFpg{JI zv``4YsjK1&142Pl%%A#u3rbGso1<_fngd1`+}!pMu@z5Me_5UFxiPYKqFL4_`WXmY zeWJrZUKzrrMuBcHupOq4Wr12sE*T-*CXh;FA=)Q+BMN(?DJ!kq?%Ww`xlG3e;lz2t zY?tl;i?gHO_79VwJ_cThq^>FqRUPlqS?IuI+CfSbNkv_1l~7eGaCwRmuOF|ic1ac2 z9ldo$TN~LhX~J01P75nyi&d8=Y@QNZ5e<=6v_R3rM}nN}5ae`^LV&sAD<=;*z=!~` zvJ0@i!orMuT*5kyXNzJnxfU!+#FTW(syy@yj7XX8#zD_9TWBSg(;KZ25VO;is;-&R zf(29n3U}agkC`j4sjX{=`D1EkCC@enOA~v{GOLYQKAdPN6+?W+QE4fLMhrW4RG<SI z@?qI-KY>bH5^K(rm4T}`=ra<6GP2}cRBE9K8^r(O+ZvKpJDL~qNguPmwQZp-8m7V@ zN^KFU8@Q*E7UJswZD=OYtct4KqA&NDKSOfc-#M>@o#)4;YLqtENdFS^3K9&dFBr|M z*loqE3X2sMmi8hv#7H5<kgna*Z>rqGc_y=ShEbHT^m7S`?4d%B+(-6dYGI-*t5E+< z^P3gqvBIHjFQNKiDKj-p;Y*MmMAXOK^8{gVhrBn?Un}%9(JqaGPiann?Ll$aX-{n1 z!AnT<v!xN*zo+dH+)yR$d)}fNUUOcJ)Xz$%vH5mur0%L;@p((;IW$raH52Q@7``Z{ z?rO>WyjwZ7y=hrziEYVZVX)-}D^!8a+Bc<5#*3h1xvWqS7I$WL>iwNNvp;P<;TX`| zOF6ZibFB4T(YJC~mj~?Ev*ln|9sgYVFTcLiEi{YE;!ZWj>X*aK9|va;HulW-D`RH9 zw=O#R&of(j+rwMS%oCi;+oFskQ}@q2q4x)O3<fKs&%WtzzFD};-G{Hxx)V?F$WHWF z7(*i07&g=2&}`P4G>k5e6yDx`kLvQs@M`+D)vGA+`X6%Dl9YOA?Qrurfg>XqT9E@^ zgWxOT&hX+yo>7=HCb!3BO$p54I3{j@qbN!+nu>Ti*O~vw`5RU!f_JXS+*x#-zFp@m zr}GGVhgT1=p-TFp#dtAVjM3QdpDoi{l*z?1s=d~(E;Fkn=*i8+oB<M)E&5W?I^M)M zknOw+hdKDcP%Q}tuai)WoEa!7&-Iumsf3KA>cJ3Ib?Vh+rZWNZ$pO`dl8LcBv_cAA zc18lYB|rc<0u%wEdTGEup|%_S`L>@ui4LTkvnNApm<q=y*er!iCv8V>#>+b4WIF<} z^J}=w7L&$J%unXCb|Wy{z3WVlMDNhz3o7S-3)6oqjx)7WX0HTEH<C-Do)>{-=9>q+ zXXtoVPHKfVJMk8bt&h;MII}u~0l79^#`5CdW6Ef!eb|E&Q{UJ$n$yP;^Jd)qhw~ej zB?c~nN*%0zm%$}MD%|<q*x?^2$-sGY)_qDIsjoQeKH{k^*%_~Mm`JG>VZuS8W+Qtf zS+Uu?;oSPL<h#s;p3UgxZ3c;@9(LZhh9?&RH`z;Ufi?^GL|RbrQ|i$u#k>L}G`jMH zn3`(J{6K%B(Gykos(!d}z)Wr!%sjC6=V@s)qG1MJN~uoVlq{jeI#XKPMI;@L^`RBZ z<X%K$e<C_&9&p~HQ%fuI$-p5?U{jDsR}QoVqzzw}E77mP5v&U`27f1F&0F8zlxE2) ze=M@fh-;2;q_!ewec2frY%fKQkh6Y#Ck=~JBu;z6vOFXzd7O1mkt`yaC)8Gn>0Fhm zEI{|uQr0z1gk4W{mj*%4Z*00DBL5ko{4X}2{Dl0wAi#aSmq_r~FBHL|;}P&0k>OU! zhx64h5vSKwffV0W4JQs2dFBrfQx(B{AK=BGc`U!}S&BFnE6QSvw?`~m^}8j(4$IzQ z_WzjR?fD!VI8Aa=N;O96$f<JeDN}@@k24)dnpa7nV{o~|y480HWd%qi09M-w5HA7H z5t)dJA9OeU2(Ddz+nofIxgaM#sfN{v)}n+p872aEFyGb(<(TUTpJ(1Bv9RRP<lWbe zn*X9W;yA^EqlAv1#u2Gg|1wrNw~{@z1W#o_GFNuVYLs|BsZ*hkg_h`Il0YDiCHm+W zmS~Y0wwCC%sMd>IWzW@IV2KtfOm4MwFVU~FM5pwL+-yY-+$4mvEEjvjP+5JUm8n(w zTE>U0(q9W!VAi2soP~_07HUw%Pt_tTYxD^79a6Fw-(PjP4xwLxv3Ycv!%RV}m`xvC zX`nx*(H@IF+EJ)392Ul)-t@Oj>L>VGb7%C~V}eWde6yYkCcYR2>L5_BFiz*D#3I_* zY)|v0XvW#xv=Y0=d;t!!=&NUW2H8t2>2H>>rUwQga=@Hd8s$Z+x+rNk0%K7J*cGvn za#2GFTwHgcx}(hY&AoeJJ>OtvvdouZfGLkWz?5@JX6KrhfDJ0`xz(qU+f2hY)2ykx zl5dMrs#`m^OO;aljpVNpXHI7j?NBazjFr-P<5NZ{lysyym6ILI!i}auR#r=s8-sHH zo|F}x&aDr!mLdRfA3dBON<#lrL!uSm7=o9syd*hDuX`F0HkX``(5Ixonj|KOyUg3^ zQc-Q1zi|oXoEJ7t`z@l)r8HbVnV=3@R147(4T%Z?MF>|u+vhb+dmd}f?PMV8SW8Om zNGeF;<~ukE61hiT7Fejt`7XmU^|R{ev+p#`i$*Qly)%e2TjDu=LV)p<*h6u5gyTBv zF2X}pxW+%<Fj!P}AZas9RZ`k$Jvv1owwn8%W?{}x!+bkqQCghlz9l!;d?w_cXMXg@ z&=}JPT7tF@L2ahnMB72@q!wG|Y3@>;eRIVAvq#45Tg=WlQSFR|)0f>5G`p(9xM7}| zFKtPEbWZkN=1qLjD*3c&W=C5QZ78nOyIt7^bEIKqkTQs5B8y0Tx?-c7F3RU`pPOs` z_?hl<U&@p~CMd0Mfz5AN1#S&Vwsi0NvWloHbK|_KEOMjJm}q8E=E&9JuvOv6IZ8ov zcoQ8$o#cQM?=kPAi}LePW480inT%^k+4bRRjjowT_3NF_?RV~cwfUrD02;pIjR9GK zQO@U%q%4cq2SOIu>A-(AYe*|k@#n%-mt4P66m+?M)nmWXqWP-^>As_PEzQPQQFQR8 z8-h3Q39C3Q91oVz2*#A-KL%2bY;8!cmJ9uHA`|<v{z~0`eQ`+GHZb5=o_|mCd#>C8 z$NX`>3!Xc-34zzMQ(s0p^HbkPL0@}t>MK)QkhQHnsYONA8Y3sjLq95yD8o_vXX;;L z>_rtUVz~Yrx{&>y!BX_$%=h%m(WLsmNbc^@hvIY`rx=`G3p{Y^ZC06YKwy@l-|)Hh zU=6u>PjJFvP!kJ(Tc+sbM_EIjrY|G=W}4NvvWB>k^nM4`K&TNt=8t0byviN1Lph6= zm_yLKL?eam;`vUGWXllNQpvgH+$3sPb_yL=Bg|EjmK*vv&mK-$JqW8%=|ASK>2#&P z_Hr|Y5Dkgu7#^X*C_?v-?p6bh!n7?WmSW!JeSwnSm}M7T5((zV1Sgd@d05#6N@`iq zIof-m%Wyrh&Os_zmvwFpf)UBIy{<8BeDtovo%NaL&_|tBV$bJ-C;E$apFPY)zG1$1 z&owMVml>CDJKAdL5zE6EYkt$pYmLfF?wDG0`I8N*#DQu4-A7E6KcN`U27=18Fz;s6 zgRIKZJ=&bE;>8osoUL9Ryh=TbC>SSDx$a_ae4Sb3Y{(ciQKVJ&x*C=an(TMl4xLH2 zXX$$5{C?<{&`X7#bw|C!?@WU>(wf=M60Egk4C)t`yyBd`(C=(qFld4VoFf6R4+pHN zK8Ll6cJ>?zJRuIOK|)?8A%{uGgm6egv3W?S%i_2=V{%GzdHk`#X)(c}lhxAXtow#+ zFHp)}cHUdTEBD@=-@HTIVx!PQ#~t7^T8*<#^hS~|xc9~6%di^At;m{`IHO;U1JyJ& z?$6LV#Y%45gWjnIu3a5-`VNydN5;meS;L)mKjUK-hMMbbbJA&Cbq9~|S=gw!q$wS} z<Z(t^y7;u%;xGk;LG3lcOw_zt$NHvB?!ZTuJIo+vtIY)W*7UDg7nZYhgoJ`|`U@?# zf&SRW>>!$M`UNJWuIMmgl*gmkLk_ZS(?`c%lMZ(&XFK8NP#)0^vSl6vFEG>}Yt=qY z>WCarV-#iQR(@uObO3d9Zj~Ae<}6f(n;Hky?Oz`=r|lj-I0#^gmZN5;ee)19uN-uf zbLW7xnioz$Qqpv@afoy00q1WU<dahvrqv*^Tb#kb-RY_O47=@EAgz1AjGqJEU%$BD z#{P{%{LcENgC^i$Gs0h&&6#v8aM9Ug50ykMQMk~#qpD^cswS=IIHD-)jLMD@Eu?Zl zXzx^j#tYp#^O##HK)x^gH2Y8oBzw6P^DLtqvNE>|&pEgH8343To6masFPXZZ+i2fw zw(TOJh6NWV1zH#tgBTU7eP2E-U^0`E%lVvRweM3##v6R|Hc)r2ZWu6UP8uu_SKF^7 z5Ei+b&tX|(bW>KeN_C)b7q?VhC2@*pFT<#gaK20zQb%f_ppm8Xf&=AdHBgp?2g=0N zzUt06{THYVS>0fh!O|&%MP5GTWr9DpB_rmtxWJV%cw()<Th-`+9pNw^epR)x<&H5y zNn}p<5E>yvDADh1(g)ek#K;gD6diD^_G>B>y~3*2ri=>?y@k#|fr6r^y=jEkKl3E7 z4M}aqf+KgXac<4$1&vT`xA250AV##H0=5ek@I!)vK3Iwme$0oDmHS)WNy*wIdYTYj zZRu7LFxIS58JMfP!&x-K4>+HK()5vW=nSz9Me#w3T`4{giqU44ixK<NS-`KgQcF~+ z$)Xx~#$%3oPu5N7C1^%ShRb#_>rd!tunBaOeaO;`@Gg0VSi}FyYeUlc*jfuoTFFEd zOR8Z4RTBHrnM_v=qLS_KTIyGvYt1|?i!+C4y??`sV=b9MS0Ju6Q)C6T`W3;Z%o85d ziENh~l0#_RtCgzGELP8JHB9M!#^AHfT3W1T^h?P+q1$V+gEe9y%{FPzuSsRs@Ay-r z&&$%MWa*cg*GZ8R;SHL@d5gHczoSYe+a|;+l&uAZooROH4pP=g`GeNXPLfFzb`#S1 z2_-JE19Kg4B`^wb`OGw9drEbu!t~n%qeIJiU}$Ld55)5#)skz}?aZlPlQ8z#UJ#-| zYO^vmzd2P;V*j5ETWQQ}A;NIjCB|%xCEmF;jXrG6JdLv!xSAK@X@Sdl!B-26nk^;Q zowGGGn&>N2cRRN_tq77S`L(hZ^0u`V19Af$;OpSM*@-NJvG_<B4C7r?o87^iy*8Wb zMrpq6c67@_sMBrzt2>@@hy5J^v<IIiJ1y|!Q!YK$isdqQoTPDML_TG>d5CVZ8v5tF zwQ7lkRx1I6-#=R@`m)Md`q#Na+?08k)vz7fn~b?P7;2Kt8t}>IiMVUrKGxYujGZWb zLanz`MzcgG7IDuLahiX|7e$b)I}hh9p%{<(HOiH54&kp~Ytv~>ArTCn#S8~^$oQ)X zh^?`%yGTMs6NUtL_ntBL;MAmDP#8v#36b}%i_U$y`ln#i)B;*>S*Pvjco$ClL? z%=q~elnuXpj0WVh4c6?B5^b?x@W;C;BYJ#|yQV(-^BV8xS@qdyP_7}XGtF%KKWAjn zLectNCDB|O$s?N`pgU^fn(!runKLO{ZL*IDdN#goZ=z)9FDy|a4b+7tIf&rq{hz40 z&UP~#62@?Yv#|LPJJk&HQ3e)?F*x^tH_b5TT8Z=h%QKll3XntrekU{W1ucz%R_!vl zu6JTwtI@B2wku%k4*@aLHLf+aS<jd)!%M#cTQ)o{<ty6y;vrvlB!}@s{CO0_`ltZs z3fJ>dHs*_rgZ{Wh2W%`KXEPa`u}qU^8Nd`Gtzm`f-1-zBi0iySJ$H?3COIw5Sts}8 z<+Vm%m)h*yTBpLCW?Q^x1F!Vd+Cd-yYm=~2?%cW>C+BZ7&rJ<xIqNRtBg?sU36IuH zGk8uOY8JK)$4P80(iq7HrP*8qcI&NRs5o4XL)iMFv+i5c$~Hy3oMB$wp_-Th?yNKL zAangr28eU(Pbpw+wfW(1ey17vQuDUsxUj8DIfV^QQ0G0jGyEy5^P3)CLis=cawvai z-5gx4GVHJ%DF#_>{WkI2`jH<!Izhz8W}oAaF^s~#^M*_X2XtOm#D*kvo)l8G*-}>+ z<t5PsS#I^dD)cT0YpM^@RaIwOUV(>b9w~ZgNut<T7H`U!4Nfz|w82YY^r-kX#J6>( zRG;4bHiKMr_Jpiv$aIiF9yPwvac%awnv<K8gmQS^5Q443>2~cp8C&!2=C}j(2#tMi zjAaHm5bPpSUwa%RYp-#*{ngfz;(tXArj2S*S=&8{L(57D#>Sy>ye}&aBu|6{WXYoR zJy=+9jhe&f&&Pd^I=}K3&D!?hXM~&KKNL|-rI@I}J}9IBm%CT4Pr(h2lA`RU!W}#z zTt1O71J@X3uEEEm16dpYC#BMwiUd{3p3PQWl4fnzvSl_Q9@M}hNeE;-!hE}nWGGc1 zPd%s4GDneKLvjGcS1HB`9XaviNE~IJ5)rQKQ@w;(FbQa{p*Dyv{NvkHXAi;5a-v(C z`r^gH3Wfzd%G^(xROzgOnu~kNc%v|Y{{$u`D4$wu6mDT|WDAsPz{x$PmVRmi?cZF+ z-U3yHJ4XL3ya%Jx{3B1Os@RU`W_KkhwTO`EP<`_mS~KR8U+7dTIE{Ja&Tt#Gon$nl zE(dWJp-%nLFGR6dIAy<_TXIXDnE(n>ay2-K8OIy5nAx_qmLyOgtQ6Fj%*-=qe@HKi z0nCq$syuW4!}7)5RiQ;?m+>J6id0FQbux>KbU4=#b?)3Fg%G{}A@pSk=NYO@J@Gx( z+{gD5$inzGt&2vIBM=9%&Ys$We)D#=;$X>?T(d~*H3&8|nSsg$L4-o()4BCDnT9d8 zE_0<UD}u4Lw;fd;UFHK1Sw-$AMSfUDn)r(v5hd^Sk`)Y2*Ymsk6l$eaD9LZJB+_ZC z?#wseq9VdWMx##Wq_ehmu!z%RL@#$oFo~*F_DyBDl?uh~G*>`&P_=OS)^ylwt2<5* zvwCk}v{^^0RD(Mo4Ce-R%T811{Z?J%>mVhkZSqsZUab`AH#ms$5NI#mLjx`}s<cDr zd(bT?x#j~c4Ean`t;tA{$e7DliznxUyYchy8+U-d7c;x*N+iTJseQy>ob@d<%w|L( zocFxQ+iwIN$`Lbg(^wA>sk1CDaCHq1dn;88aoAtv)vqavty0V_rw}n1A$&%RTW^fp zY)}2T(vF=bG5SC~B*4=@Q8ksK&3H(1Umvsi=+-mqUO_!8b(bJ>RT_kck`^w4=oz2- zwmQq2dD6<s{fq(TOjQ^`MAUW8j=)Q)pKZQtBiUBnNhi3h<-*+j`^bGNgVvX9{sEGR zNO&hvNz2S>)<X=Yal0`ZAdBD?=G#SKJjZ;G*RVweNW@0_IHN=HbIvdd$%?KtCDDXl zS-puTv{HE}Vwupja?ML6W68l~ZcsT0fl8=k*}`^H<U@)jw_TZWQdA3@6ACGl0(xdK zv6O82hzlWrpNr9j5G_^2VwJ3Rizru3uw+-GLsw+ulN!^ZTID%+Zm>hOs(rtPvK;BG z{Y=ms-NO?H{RW<b%v>f<@R!l@1ap~PGv8k0k3-q__{PCC@7C5Fh^ikPxV*RPmYM_6 z0kfvSzBw?k$ERj&%~qlI8?ow$vto~Q!31rW=wT=8P}xDGS$oy?u<(xFOYiHeWgsP# zT)aFG=O0)ID^^KfcN36{h|5_lk9ol<i^Xs#!VJ1=)5TyRo4{4=Mm$HcD9|-JJ&<fh zkv<f^_enN#g)O(Tku&Sh7?;YX7>2Erhw1%VG`GJQ^J0PAl8jr?Yx*E!U4=K2it(Ud zQ6rhrtZtLI1dW*3;fTHQ-7(GY#w6b|7=sK8vsi6UF!k;QP1I`7T{{)D%r}j9f6JY_ z`axh=-H>^}`P?qy;<rl2GrJD5de^xKlln23Oy<F+EPK<&BrJD#Zc35s&LNx|Ji}&J zXm_K>er7j3=la1cXR(2P^}~G5U@)^Y9R^W~(Yf&ei6pNG>XS)n>Z@{y@SU?&+x_PP zwi4TIm{g4?h9h`GI^_u<CDQ?3teJ-(%{L@AWgch0dr;Ksu;h1GD-v@Vd?KD%8=f^m z;~-ZoK9U+x<NkT(4r1pAmLrJ72_nawwuDKdgr0<*Fp4!2$;P1$QjoiH>ccL{tvDS( zC7i=<#ERSNqK5joFl%3Dof%|KBvEU5qQ@ea%d`kN0xVuIHgfZRyPgfKsk;4%Cssd! zRZy@kcG~O{Xfb=dB)TDUpTCpV$~J|+y5e-hioLf6Tpsh<?=bFK?P5~WABz$q<20L1 zgK^Njk^zL6F8vdO>o_n_hSP(E;qsV|s#j?^8BAB(5Hf@{N#z(eFM>tMXu;~1uk&K# zE;Rzpm%)M=;(^<h1j!5clYZyCd5BydPFZnUI5nru$8oe_LALrZ21JRzsDzD_MOjK( zk00E|rj4;t{uou#?P7|O!p$-N?LHWDp|9zbIyggai<?WN4itPete-Y-G=orT;ji9@ zLZ=ymGJHhw=e8|l=poY$b}_LL$-0_PXX|5f%|!A;LiZHb1)@|=P1CS_a;kCA%$JSh zxHn`U3rtF09;IJZvp#yJae2*p+iYVjBMKEb-&RqNfxq_i50rAjaJMzrB+u3l!Dye9 ziMZoyHmr2-3XD;W@iY-=yLLglF9DNcS7U9=rn>O${@GT2SY*Q<WH6{6fu7s|*TK2< zT3P#Nn0GR%^BYE+f1!axn_2WK8jB`q6;Wudt(Y3NX71&$7WkD1)-24lgPvS-^RHD$ z_24>}7pOi8US|%YNHQuI9Dx}gPKACg9BY2xSRbtn$9iuY9oSBsmKgV3c(wEn=%-nK zD|%o2NhvE{vveJc2sn-K3I^M)_Ob0-oNJyT-AUD_7&*4H{_58PGyIvmsB7>#GLE9O zM_%Yt+6~?L-bud7E~=~mV~m!R6?=_4{MCo0O}Rex{k}23X2mR8`5ssCbIoY$sMFI9 zV=R9en4=k(1bGJ`JxbOSr0X_SY1>&{IxnuM;$(R1rZhlZsNjrRzXB)?&li~var z?B}%klDLWDf^4)nO#Q>nX4L#{frSueKHj{6e&Bw?L>`d{`ZHFsWS3ZmQoc`R>p!Zt z)MWNo*@Q0+(@KUAHQ#)n2!1ZmKjktmg>5tXOlEwvo@l;@bE{CFH1qfBRZ%~VD0^FK zYxkW_5R7B$+uR~XI@m1DA|0`t2h;L9#E9HeM)1wN?ybHta2K0&yD%+>v34#tOPGE6 z`4T2CtnhJRUgKcr&fU(Poo6zxgN->hy>T#X%%RSme-YWd)|AY6<Q>vM0lNYNQ&yn% zUR-P#5K5nU)Yx-dWQHOQ5Jo1y$g%9Mk}!8IeeMr47nESfX>;2=StXRpPm!JqVOg!O zss1JtXWbeChf1w%MT>HGxYweE6iHzp10k|K23P|lvUm(HB!wrCOfHOAC+sN2t35LB zOh)u5<f*#!IgOW4DXvp=1(w6XCDf~{2e47@U+w>B9syRTR=6tT`Fqj2nANt5guo2m zFRo1DZ{oTuaTy*M?|e>p@X=?|N4fNYq|h*m3`rtjb3S)K(tr~W*Ak!p*pjtM&|QE` z1g;w|3YQ_Trwmq5RfH^6ge+BrELDUoRfH^6gsiVr1gXj)W9({XO@BJWxitVf8QE40 zLOB<V*u~}OEb%~M+|m&GzUoKm-f$<4BQ9%Yue(_y!71{a^buyY_Xq#|XDDPs%>2Ws z#?1K7`D%?yj@5<1AMJ1LLKc%*@PGU7yMNKNXMh&qIPd`w1JXJYm<B8WRsu!9-9SC? zFz__+B5(jW4s-yHF5&^nKrT=M+zs3V+z<Q!*a;j0jsd5DGl2bbjG6(Xfr&seun_n< zPy*Z!JPqsx{seRYgCIwZ1g-=!fTchQPzP)SegOOo_$_c4I0bY7age!&1CxR40S|CH zPzG!S?gbtLegW(T4g>E39l%IX`-wm@a3j$7_kLoU_KWm1ZQ4y~+M(s#*}g5UJIHUI zPSYM7*7F_qSY1$D>MeBZ<?cJYy4$<HSa+`~FZ8-sSC+4FS5%g-@>W$%;b7krZdIkX zK=(%axhGU<{MY7`8>NNrvT{ksyGmSfD<~6()x~9nZqEk2sJu*h8hXL)rCx%Nv^H*R zh4Ps~G%44(vEA{?E4*bY)KyihDvK-hDHR(epUO-M>aj|vX=}79ZIxE8Rcc=TP0<Rq zQvT7GTA603_bVh>ZDN^GT57!tV<JYH(52a8w3uj@Ju@@2pZumLX&x2Wo$Og2>(H)C zO3L#<8gjb@-_RT@i&pZ}wDlG1`8fyy(bwVN;ozTqYEO+#*R)Fkeo@gjd%u`iNB_71 z@dF1rU4t(gk}&k*OA?0-A2D*&=rQiGmyR1h;j+soUUB85$yZIeI_a8gr%szb<GSRO znW?j8U;nkV^c&`6WX_$JHUGw&7Gy76<XOBVXDJptm*;=|=37?WdfUo^+gBBOSKm=o zTykgWnzHhWyDF=6W9_>28}9zb#_CO*6`47+OuE!lUR<VoD=E`WTBf!{Tgcx9+EndY zS}cRN1**Im-riy7mR8NJ^m;X(IbJ=tpwv+B^CI5UOH0dFN#shSOfO#Jb$cr-%PZZQ zHjvI;x?oXGj^!esTF(51^CCXAj78b$^B4BGESZrsb=ttV^fGrrMMY`xssg>3AyZUP z<z7?3uq?n`*S%{hbQ!Xx<pm7gBCmUnJDhiE@$Hobl^fi})VZ?KyGk$JFeT1Y>Mf}9 zGO)|^f>p#MMnvkDSGlW<ii+||e7pr~+^Z@4n(|67Y4Ey6m0*f0Jmr`2O&u6_l{>ws z7zSx)=geOaF>~~y;wpDRRh4(m?WG&sg+^s@*&XgOl3FXppd!U(#d>i;Y4P1E`M9ML zo;e~F_7c;5yKx8K?hWNeWn@{WxaaF`g03mA(%q%ScX~-(s#EE$GD>xK`D*v7g3?mS zjFyrzUA3xwO@*4`6R%!XT6u+gwNbW8wW*rn1wDl-tI{itRXUaDzw*o|EzK?{E>m@v zdS5H`R@1wz+_<C2T~$%Aij{)k41fZrb3}thw%0X%+N-<nUaRw#EVbHOFQU-pWvjeX zzIuB|K2o+M$zu*FN%?v*C=B^un=JlDnOb!iIXxlVMc#r6tF)wZ?R8&L$92UK5mmqS z#G7%!cvX7gm&BVc@hS{P+uGtv-6$yS=^*Jzm4TFtIdOruzpcDXmhGz<II?=Hg|)j} z*Q7|io_eeGlzC89PInc0*A}nx_Jj?!k#~Is^M*}9TBc`as&>9cwU0rLp)hM0cEx%T zdqSa%f;;<$zi_*RA{7?s1r%YR)#VY>Qce0w?_GwsN(v*Rd`W15p#xdT))X_L7<AI# zGTe<aqe>cZUBTaR%G35qstwOO?!9I7T6x(TZ<$UVB&=$~^M);`yu*-yRjR=yteQ`& zS;TaiuobdCcdtZ}ge-4fHG(xQyLeS)c~$vp-JM&kYB^`pr0(`uU@dwqPg)%FVak*# z+AQ|&J1SYt$_iMKjj}t-%GZ@$PalSwFjLm(v2k&1q7rPTTO#x0<g^R2zWR;gT^RfF zdm!SyiFdUb;*JiC?svpDyWh7(yu<A4cIU1@_xpDu-eYQN?y0G*VMDgvQ*+OjnuLD+ z*patx-AaLyl4?9P^_oMQczLoXuZI1WP1)nACwuqAn)(`IX>7|yMMVxr?D~p|brlu8 z_G7&NzyG<lzW*kIA6ftU`ke1O3ry+D{?%z;{MS2tt=97|O8aX6B2(C+_56#5xcycB zh2y*bzwdwT3;pj#!{h(q5fD||{SSfXuk;J|pggxk_56#D`fC5e@y|D=|6^`{Z3akA z3H%G^C|^DAE)ntm5B&Ou|7x}E3FXpy-mSN&D47H`wOf33TkrX1eM6)F-llKex9!{a zf9Jd3d*J&IKJ@TEJo1k}_~E15AKUTx6Hor=sUQE3pFI83pZ(J_KmWxqfA#Fn=bnGz z*S~r3rQiN;SM%;Ydw<{3x^Mr1mk<8o&?|?Jyn6JtKfeCPu{Ym(`}jZq>75fN-+k}Y zzx?@qv+Z94r~mDP58FTb_m4Y1Idiu2)4zPy#pTGq`9O5x1J74F5dCM@|35qbzq$SY z+JW@K{^~&bpI!f~teI=p%&Zd9gjUFJvOAlfTV6Ks)3UR#E-bv77k-{>O-lzj6LXGJ zM`vwe`P%OHMVywzImcVUk<<#1Zrov1>6&(<QL56o5nNf)O0TFa7MetMLFK9<o^!po zR~j5t#qY*~GWAM6lD<Z|lBPylk`7QtybY3u#Fw}dN6RVDjmkniB)!UF^|rLgsH_UP z<#`LsyrGY!pwZ%-U0$YqbBxflK$o~0@if9~gp)8D{u+n;5RD~|qiOlN99<oH#C=(n zw{p?#C7cuH_Z*Ui;(_0Sf+{_oGv-=I4i!d)a<jgzWVCE(N(Fa#Zzx}%t}V;STr&0A zDH#hOKaeL`QvwP?c_<b&wAzO%Q*#=CcAz<E6&i;&qN!*xX*hm!7A;(~Z0UGy3TIyV z4%3sS+^&+reNCZqzlFRuaH?3dq`X`*;Fo1R{+IsNT$HXIhC^v1_TlT;X^TN)A3A?h zkaeNtX&N+m^$dT%0qstH;qQHY{9hc`+y7vM|Bol6X)git3&+1V!hhEEG%XE?^zWPh zdoz3cAC8DG@qV7#+dndY@lTy?`OAAO@8NRv&1cv3R=5lKfBdxz`;SUb(^3HWT`2xl z^LqRDE$3%9_V({vzB?Cwx&Kc+J#~9A;{8~k_9|b}6Yd)k?|t)|p5Hsa$aLQRdYbkj zAir>ZBmJ+sIZe9;i1gppryTXS_V$nL*F@;USBGfC;q?2K?~0NO$CrF(miG4V8~^$Z zz5OHem-q{7zuf=oExrBw_UHKT_4e<Z{!8Ega{r~<d;9k-|I1JG_U}6{zx^Z2U*q?O zCwuz5Z#fqHtamzn{fl<@_U~KI0SD5wrJs^X=r>3MojVc!>izt0p32|GQ&|!<&s*lL zgt#=vqLj_iD@!xiLc4)ag`Y0mhdDx04|5>O?0E&n`rPu$94I-ZUTbI6zNgJmypm8b zw#R?6K}3&8G^?PjuoMj96G=6@ywE81&V^XJ5Sk64-_kOLVn3%6QZdB99CllX;qZc@ z7kCTSdcWZQm!4Ftg!43Ql0B!?3odbKG&x8?(hCbA7K8uvi;85TR7l)8<!jbZq6Nie zWZy1jwbFsHBXz%C(#X*ZEk}505=Y9rbVG$#n`QYHK*g*Oq##}U9hg(8msadkf$Qu` z!_>R(7W^M7e*=<zSs3Zivh2&sic|{~X0Bfal11&wPBAgY*eTrwy<d->UzOp7hJJ^) z(nEEn>)w|f1UFHnFHL(gIt%)yVs2=UsdtN!af>R6N2;LxK6<|NfDkslh4af`eF+6m z)0!jQ!9K$7ITAO0jz`lHq%{_0X3P5tN(1MlxKNE5FdyxD`_j@X0$BW%S@IR)qI^x> zyE!eh<x3T@LwX~k^goMeuceCoIv?ET`}REAT8$y?O!NZihau7+qv_X_ImC15+au{^ zg*g?)WmY%e6eSsE_E0u+bm3l9rE9w+&o6pt3oZ~NPph-%6&HHv6cto1EzcH8@eLbv zueSUA=`dO!SN&kk8ci#(=UOyz)dKmp#fG<XgU4H`xH7N_RC$>_CDPVQi&xzl8mB*r zXq(Ugqj7T7_*7`$Qn*y<Rchq&raf$1qL(f!TL+S>{aBS?iP!3mTf-#?^-i5iIkYIy zvkydkGkwAIZ-|;(YE%_T+BX=hS9>d&X@8DhFekg9!fHo)VvMc3EtZyt8%Q%FL(vv# z)_jt-m-$7!IlWy7(<b>ZP|O!=%4zS*IFa1D*?m7zHOeWzo6==yb4tsryrBtvuQggi z>ruM)a71ku8G41G%jkWeSExKKMrK~bDzG86%1Nf!ErdI}rlO$I+g;n--Y%5-n3OSM z9OV{N77Jr0UArlB$->M9oCgX^IV_dgmcUk!bT#ddR-D2`tF7<Lq%A_7EAtph04cpH zgwBAy-GGlqoBj9i|LzvpB?|HQ$<v}xh05y+JtH0nS_#&3!JqgG{P*v_Ti~m<z`{SL z{pRPxewXpD<I>dFDt#B-`T)nMV2ubY{4f4woL&rs$D}RvZs(Z@^aBP0$f0Qcfmk3O zaD<-XCf`y7@e`h0*iX`xxbj3Rhsr~yi?|I2E((F<Jr)r6>41EvhrZ{8zFFW^oFyUm zoY0eHTBV=QQ}SjxR_Uza=>}MEkw-%21CX*xJ)}G}fRwp5^xVQz{C$A<*8x%<xd3<t z@Pp9zcAiqc#{tRjM}UNT4v;z>0>u9fK>QPF6ltGuoAKJcHblus#4r3Eeullm-+iBb z{ri6ZweT1652y2A@9DbW&#J5Yg1`S7ZE<0ygjK%_6UF~))L&|G!66XZ$uBqr-2Zjj zfSUY2J`{?Ef`>)h9gnkNt=zI<%h*uoJo%3Gvi%9`S^L8iUGkQ;sYX4YB7F0Xw|2NK z?=SqVMfO#GX`$z{Uom`oDEv;szw+3r$A)YF@|gM9%~oO&f4kG)v|Ysz-BF9*y7eu$ zcH3JeZ(SP^(t52udhAappr>84$%<L}Zx-!tPAFt}4gW&KztLga@bq3O{H@<o&c0<8 zd)47zQ6Nog|1eFf_$W=QADON_Nd6LDp3>KX=g3d?)=o1`;TQ*b%AWlwPua^IJY^Ce ze?Lv_#ZU7T9HXA+5T3X26r5%}&tW{f{+y-_=ed{X2%h)y6kMT@=V+c8Jjd`n@h@qb zo99zJ$MSsURGP91=Hj`YZ;j^$9_{a?X?OEH!BYm?ah^e*2YDWXzWY^x;iK><NmuF= zT9h<tpA!21!H?6l?*iL^dx3hO4yXav0~J6Ka0}o8vVd7YGB6ED0wx0!f$@MF7zrc- z34jZT2kb!Sztbmx2}t-8JdXi~fxW<sz%#((z@xw;z&2nbPyzI}_w>2+=@jadL7(4y z#b1Zbp`VPADB?+6d4_+|PVRo+k#0QiPsT~)ucpF^-~N%s&+_Cfjr9Hxzk4$Nw)lss zmkZ@sGN!|sN4^W6LqL8q7E^(*12QhY4?GLJ27C+*reTtRg@9a?3CEd<Up}x7cmVhn sa1{7=KrVY;4P*nQ!2j#Nzb3L0-REZu{lfJw?Z8eMa0{>$=sSM?C)~1m4*&oF literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/setuptools/command/alias.py b/env/lib/python3.6/site-packages/setuptools/command/alias.py new file mode 100644 index 0000000..4532b1c --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/alias.py @@ -0,0 +1,80 @@ +from distutils.errors import DistutilsOptionError + +from setuptools.extern.six.moves import map + +from setuptools.command.setopt import edit_config, option_base, config_file + + +def shquote(arg): + """Quote an argument for later parsing by shlex.split()""" + for c in '"', "'", "\\", "#": + if c in arg: + return repr(arg) + if arg.split() != [arg]: + return repr(arg) + return arg + + +class alias(option_base): + """Define a shortcut that invokes one or more commands""" + + description = "define a shortcut to invoke one or more commands" + command_consumes_arguments = True + + user_options = [ + ('remove', 'r', 'remove (unset) the alias'), + ] + option_base.user_options + + boolean_options = option_base.boolean_options + ['remove'] + + def initialize_options(self): + option_base.initialize_options(self) + self.args = None + self.remove = None + + def finalize_options(self): + option_base.finalize_options(self) + if self.remove and len(self.args) != 1: + raise DistutilsOptionError( + "Must specify exactly one argument (the alias name) when " + "using --remove" + ) + + def run(self): + aliases = self.distribution.get_option_dict('aliases') + + if not self.args: + print("Command Aliases") + print("---------------") + for alias in aliases: + print("setup.py alias", format_alias(alias, aliases)) + return + + elif len(self.args) == 1: + alias, = self.args + if self.remove: + command = None + elif alias in aliases: + print("setup.py alias", format_alias(alias, aliases)) + return + else: + print("No alias definition found for %r" % alias) + return + else: + alias = self.args[0] + command = ' '.join(map(shquote, self.args[1:])) + + edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run) + + +def format_alias(name, aliases): + source, command = aliases[name] + if source == config_file('global'): + source = '--global-config ' + elif source == config_file('user'): + source = '--user-config ' + elif source == config_file('local'): + source = '' + else: + source = '--filename=%r' % source + return source + name + ' ' + command diff --git a/env/lib/python3.6/site-packages/setuptools/command/bdist_egg.py b/env/lib/python3.6/site-packages/setuptools/command/bdist_egg.py new file mode 100644 index 0000000..8cd9dfe --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/bdist_egg.py @@ -0,0 +1,472 @@ +"""setuptools.command.bdist_egg + +Build .egg distributions""" + +from distutils.errors import DistutilsSetupError +from distutils.dir_util import remove_tree, mkpath +from distutils import log +from types import CodeType +import sys +import os +import textwrap +import marshal + +from setuptools.extern import six + +from pkg_resources import get_build_platform, Distribution, ensure_directory +from pkg_resources import EntryPoint +from setuptools.extension import Library +from setuptools import Command + +try: + # Python 2.7 or >=3.2 + from sysconfig import get_path, get_python_version + + def _get_purelib(): + return get_path("purelib") +except ImportError: + from distutils.sysconfig import get_python_lib, get_python_version + + def _get_purelib(): + return get_python_lib(False) + + +def strip_module(filename): + if '.' in filename: + filename = os.path.splitext(filename)[0] + if filename.endswith('module'): + filename = filename[:-6] + return filename + + +def write_stub(resource, pyfile): + _stub_template = textwrap.dedent(""" + def __bootstrap__(): + global __bootstrap__, __loader__, __file__ + import sys, pkg_resources, imp + __file__ = pkg_resources.resource_filename(__name__, %r) + __loader__ = None; del __bootstrap__, __loader__ + imp.load_dynamic(__name__,__file__) + __bootstrap__() + """).lstrip() + with open(pyfile, 'w') as f: + f.write(_stub_template % resource) + + +class bdist_egg(Command): + description = "create an \"egg\" distribution" + + user_options = [ + ('bdist-dir=', 'b', + "temporary directory for creating the distribution"), + ('plat-name=', 'p', "platform name to embed in generated filenames " + "(default: %s)" % get_build_platform()), + ('exclude-source-files', None, + "remove all .py files from the generated egg"), + ('keep-temp', 'k', + "keep the pseudo-installation tree around after " + + "creating the distribution archive"), + ('dist-dir=', 'd', + "directory to put final built distributions in"), + ('skip-build', None, + "skip rebuilding everything (for testing/debugging)"), + ] + + boolean_options = [ + 'keep-temp', 'skip-build', 'exclude-source-files' + ] + + def initialize_options(self): + self.bdist_dir = None + self.plat_name = None + self.keep_temp = 0 + self.dist_dir = None + self.skip_build = 0 + self.egg_output = None + self.exclude_source_files = None + + def finalize_options(self): + ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info") + self.egg_info = ei_cmd.egg_info + + if self.bdist_dir is None: + bdist_base = self.get_finalized_command('bdist').bdist_base + self.bdist_dir = os.path.join(bdist_base, 'egg') + + if self.plat_name is None: + self.plat_name = get_build_platform() + + self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) + + if self.egg_output is None: + + # Compute filename of the output egg + basename = Distribution( + None, None, ei_cmd.egg_name, ei_cmd.egg_version, + get_python_version(), + self.distribution.has_ext_modules() and self.plat_name + ).egg_name() + + self.egg_output = os.path.join(self.dist_dir, basename + '.egg') + + def do_install_data(self): + # Hack for packages that install data to install's --install-lib + self.get_finalized_command('install').install_lib = self.bdist_dir + + site_packages = os.path.normcase(os.path.realpath(_get_purelib())) + old, self.distribution.data_files = self.distribution.data_files, [] + + for item in old: + if isinstance(item, tuple) and len(item) == 2: + if os.path.isabs(item[0]): + realpath = os.path.realpath(item[0]) + normalized = os.path.normcase(realpath) + if normalized == site_packages or normalized.startswith( + site_packages + os.sep + ): + item = realpath[len(site_packages) + 1:], item[1] + # XXX else: raise ??? + self.distribution.data_files.append(item) + + try: + log.info("installing package data to %s", self.bdist_dir) + self.call_command('install_data', force=0, root=None) + finally: + self.distribution.data_files = old + + def get_outputs(self): + return [self.egg_output] + + def call_command(self, cmdname, **kw): + """Invoke reinitialized command `cmdname` with keyword args""" + for dirname in INSTALL_DIRECTORY_ATTRS: + kw.setdefault(dirname, self.bdist_dir) + kw.setdefault('skip_build', self.skip_build) + kw.setdefault('dry_run', self.dry_run) + cmd = self.reinitialize_command(cmdname, **kw) + self.run_command(cmdname) + return cmd + + def run(self): + # Generate metadata first + self.run_command("egg_info") + # We run install_lib before install_data, because some data hacks + # pull their data path from the install_lib command. + log.info("installing library code to %s", self.bdist_dir) + instcmd = self.get_finalized_command('install') + old_root = instcmd.root + instcmd.root = None + if self.distribution.has_c_libraries() and not self.skip_build: + self.run_command('build_clib') + cmd = self.call_command('install_lib', warn_dir=0) + instcmd.root = old_root + + all_outputs, ext_outputs = self.get_ext_outputs() + self.stubs = [] + to_compile = [] + for (p, ext_name) in enumerate(ext_outputs): + filename, ext = os.path.splitext(ext_name) + pyfile = os.path.join(self.bdist_dir, strip_module(filename) + + '.py') + self.stubs.append(pyfile) + log.info("creating stub loader for %s", ext_name) + if not self.dry_run: + write_stub(os.path.basename(ext_name), pyfile) + to_compile.append(pyfile) + ext_outputs[p] = ext_name.replace(os.sep, '/') + + if to_compile: + cmd.byte_compile(to_compile) + if self.distribution.data_files: + self.do_install_data() + + # Make the EGG-INFO directory + archive_root = self.bdist_dir + egg_info = os.path.join(archive_root, 'EGG-INFO') + self.mkpath(egg_info) + if self.distribution.scripts: + script_dir = os.path.join(egg_info, 'scripts') + log.info("installing scripts to %s", script_dir) + self.call_command('install_scripts', install_dir=script_dir, + no_ep=1) + + self.copy_metadata_to(egg_info) + native_libs = os.path.join(egg_info, "native_libs.txt") + if all_outputs: + log.info("writing %s", native_libs) + if not self.dry_run: + ensure_directory(native_libs) + libs_file = open(native_libs, 'wt') + libs_file.write('\n'.join(all_outputs)) + libs_file.write('\n') + libs_file.close() + elif os.path.isfile(native_libs): + log.info("removing %s", native_libs) + if not self.dry_run: + os.unlink(native_libs) + + write_safety_flag( + os.path.join(archive_root, 'EGG-INFO'), self.zip_safe() + ) + + if os.path.exists(os.path.join(self.egg_info, 'depends.txt')): + log.warn( + "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" + "Use the install_requires/extras_require setup() args instead." + ) + + if self.exclude_source_files: + self.zap_pyfiles() + + # Make the archive + make_zipfile(self.egg_output, archive_root, verbose=self.verbose, + dry_run=self.dry_run, mode=self.gen_header()) + if not self.keep_temp: + remove_tree(self.bdist_dir, dry_run=self.dry_run) + + # Add to 'Distribution.dist_files' so that the "upload" command works + getattr(self.distribution, 'dist_files', []).append( + ('bdist_egg', get_python_version(), self.egg_output)) + + def zap_pyfiles(self): + log.info("Removing .py files from temporary directory") + for base, dirs, files in walk_egg(self.bdist_dir): + for name in files: + if name.endswith('.py'): + path = os.path.join(base, name) + log.debug("Deleting %s", path) + os.unlink(path) + + def zip_safe(self): + safe = getattr(self.distribution, 'zip_safe', None) + if safe is not None: + return safe + log.warn("zip_safe flag not set; analyzing archive contents...") + return analyze_egg(self.bdist_dir, self.stubs) + + def gen_header(self): + epm = EntryPoint.parse_map(self.distribution.entry_points or '') + ep = epm.get('setuptools.installation', {}).get('eggsecutable') + if ep is None: + return 'w' # not an eggsecutable, do it the usual way. + + if not ep.attrs or ep.extras: + raise DistutilsSetupError( + "eggsecutable entry point (%r) cannot have 'extras' " + "or refer to a module" % (ep,) + ) + + pyver = sys.version[:3] + pkg = ep.module_name + full = '.'.join(ep.attrs) + base = ep.attrs[0] + basename = os.path.basename(self.egg_output) + + header = ( + "#!/bin/sh\n" + 'if [ `basename $0` = "%(basename)s" ]\n' + 'then exec python%(pyver)s -c "' + "import sys, os; sys.path.insert(0, os.path.abspath('$0')); " + "from %(pkg)s import %(base)s; sys.exit(%(full)s())" + '" "$@"\n' + 'else\n' + ' echo $0 is not the correct name for this egg file.\n' + ' echo Please rename it back to %(basename)s and try again.\n' + ' exec false\n' + 'fi\n' + ) % locals() + + if not self.dry_run: + mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run) + f = open(self.egg_output, 'w') + f.write(header) + f.close() + return 'a' + + def copy_metadata_to(self, target_dir): + "Copy metadata (egg info) to the target_dir" + # normalize the path (so that a forward-slash in egg_info will + # match using startswith below) + norm_egg_info = os.path.normpath(self.egg_info) + prefix = os.path.join(norm_egg_info, '') + for path in self.ei_cmd.filelist.files: + if path.startswith(prefix): + target = os.path.join(target_dir, path[len(prefix):]) + ensure_directory(target) + self.copy_file(path, target) + + def get_ext_outputs(self): + """Get a list of relative paths to C extensions in the output distro""" + + all_outputs = [] + ext_outputs = [] + + paths = {self.bdist_dir: ''} + for base, dirs, files in os.walk(self.bdist_dir): + for filename in files: + if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: + all_outputs.append(paths[base] + filename) + for filename in dirs: + paths[os.path.join(base, filename)] = (paths[base] + + filename + '/') + + if self.distribution.has_ext_modules(): + build_cmd = self.get_finalized_command('build_ext') + for ext in build_cmd.extensions: + if isinstance(ext, Library): + continue + fullname = build_cmd.get_ext_fullname(ext.name) + filename = build_cmd.get_ext_filename(fullname) + if not os.path.basename(filename).startswith('dl-'): + if os.path.exists(os.path.join(self.bdist_dir, filename)): + ext_outputs.append(filename) + + return all_outputs, ext_outputs + + +NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split()) + + +def walk_egg(egg_dir): + """Walk an unpacked egg's contents, skipping the metadata directory""" + walker = os.walk(egg_dir) + base, dirs, files = next(walker) + if 'EGG-INFO' in dirs: + dirs.remove('EGG-INFO') + yield base, dirs, files + for bdf in walker: + yield bdf + + +def analyze_egg(egg_dir, stubs): + # check for existing flag in EGG-INFO + for flag, fn in safety_flags.items(): + if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)): + return flag + if not can_scan(): + return False + safe = True + for base, dirs, files in walk_egg(egg_dir): + for name in files: + if name.endswith('.py') or name.endswith('.pyw'): + continue + elif name.endswith('.pyc') or name.endswith('.pyo'): + # always scan, even if we already know we're not safe + safe = scan_module(egg_dir, base, name, stubs) and safe + return safe + + +def write_safety_flag(egg_dir, safe): + # Write or remove zip safety flag file(s) + for flag, fn in safety_flags.items(): + fn = os.path.join(egg_dir, fn) + if os.path.exists(fn): + if safe is None or bool(safe) != flag: + os.unlink(fn) + elif safe is not None and bool(safe) == flag: + f = open(fn, 'wt') + f.write('\n') + f.close() + + +safety_flags = { + True: 'zip-safe', + False: 'not-zip-safe', +} + + +def scan_module(egg_dir, base, name, stubs): + """Check whether module possibly uses unsafe-for-zipfile stuff""" + + filename = os.path.join(base, name) + if filename[:-1] in stubs: + return True # Extension module + pkg = base[len(egg_dir) + 1:].replace(os.sep, '.') + module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0] + if sys.version_info < (3, 3): + skip = 8 # skip magic & date + else: + skip = 12 # skip magic & date & file size + f = open(filename, 'rb') + f.read(skip) + code = marshal.load(f) + f.close() + safe = True + symbols = dict.fromkeys(iter_symbols(code)) + for bad in ['__file__', '__path__']: + if bad in symbols: + log.warn("%s: module references %s", module, bad) + safe = False + if 'inspect' in symbols: + for bad in [ + 'getsource', 'getabsfile', 'getsourcefile', 'getfile' + 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo', + 'getinnerframes', 'getouterframes', 'stack', 'trace' + ]: + if bad in symbols: + log.warn("%s: module MAY be using inspect.%s", module, bad) + safe = False + return safe + + +def iter_symbols(code): + """Yield names and strings used by `code` and its nested code objects""" + for name in code.co_names: + yield name + for const in code.co_consts: + if isinstance(const, six.string_types): + yield const + elif isinstance(const, CodeType): + for name in iter_symbols(const): + yield name + + +def can_scan(): + if not sys.platform.startswith('java') and sys.platform != 'cli': + # CPython, PyPy, etc. + return True + log.warn("Unable to analyze compiled code on this platform.") + log.warn("Please ask the author to include a 'zip_safe'" + " setting (either True or False) in the package's setup.py") + + +# Attribute names of options for commands that might need to be convinced to +# install to the egg build directory + +INSTALL_DIRECTORY_ATTRS = [ + 'install_lib', 'install_dir', 'install_data', 'install_base' +] + + +def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True, + mode='w'): + """Create a zip file from all the files under 'base_dir'. The output + zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" + Python module (if available) or the InfoZIP "zip" utility (if installed + and found on the default search path). If neither tool is available, + raises DistutilsExecError. Returns the name of the output zip file. + """ + import zipfile + + mkpath(os.path.dirname(zip_filename), dry_run=dry_run) + log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) + + def visit(z, dirname, names): + for name in names: + path = os.path.normpath(os.path.join(dirname, name)) + if os.path.isfile(path): + p = path[len(base_dir) + 1:] + if not dry_run: + z.write(path, p) + log.debug("adding '%s'", p) + + compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED + if not dry_run: + z = zipfile.ZipFile(zip_filename, mode, compression=compression) + for dirname, dirs, files in os.walk(base_dir): + visit(z, dirname, files) + z.close() + else: + for dirname, dirs, files in os.walk(base_dir): + visit(None, dirname, files) + return zip_filename diff --git a/env/lib/python3.6/site-packages/setuptools/command/bdist_rpm.py b/env/lib/python3.6/site-packages/setuptools/command/bdist_rpm.py new file mode 100644 index 0000000..7073092 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/bdist_rpm.py @@ -0,0 +1,43 @@ +import distutils.command.bdist_rpm as orig + + +class bdist_rpm(orig.bdist_rpm): + """ + Override the default bdist_rpm behavior to do the following: + + 1. Run egg_info to ensure the name and version are properly calculated. + 2. Always run 'install' using --single-version-externally-managed to + disable eggs in RPM distributions. + 3. Replace dash with underscore in the version numbers for better RPM + compatibility. + """ + + def run(self): + # ensure distro name is up-to-date + self.run_command('egg_info') + + orig.bdist_rpm.run(self) + + def _make_spec_file(self): + version = self.distribution.get_version() + rpmversion = version.replace('-', '_') + spec = orig.bdist_rpm._make_spec_file(self) + line23 = '%define version ' + version + line24 = '%define version ' + rpmversion + spec = [ + line.replace( + "Source0: %{name}-%{version}.tar", + "Source0: %{name}-%{unmangled_version}.tar" + ).replace( + "setup.py install ", + "setup.py install --single-version-externally-managed " + ).replace( + "%setup", + "%setup -n %{name}-%{unmangled_version}" + ).replace(line23, line24) + for line in spec + ] + insert_loc = spec.index(line24) + 1 + unmangled_version = "%define unmangled_version " + version + spec.insert(insert_loc, unmangled_version) + return spec diff --git a/env/lib/python3.6/site-packages/setuptools/command/bdist_wininst.py b/env/lib/python3.6/site-packages/setuptools/command/bdist_wininst.py new file mode 100644 index 0000000..073de97 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/bdist_wininst.py @@ -0,0 +1,21 @@ +import distutils.command.bdist_wininst as orig + + +class bdist_wininst(orig.bdist_wininst): + def reinitialize_command(self, command, reinit_subcommands=0): + """ + Supplement reinitialize_command to work around + http://bugs.python.org/issue20819 + """ + cmd = self.distribution.reinitialize_command( + command, reinit_subcommands) + if command in ('install', 'install_lib'): + cmd.install_lib = None + return cmd + + def run(self): + self._is_running = True + try: + orig.bdist_wininst.run(self) + finally: + self._is_running = False diff --git a/env/lib/python3.6/site-packages/setuptools/command/build_ext.py b/env/lib/python3.6/site-packages/setuptools/command/build_ext.py new file mode 100644 index 0000000..36f53f0 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/build_ext.py @@ -0,0 +1,328 @@ +import os +import sys +import itertools +import imp +from distutils.command.build_ext import build_ext as _du_build_ext +from distutils.file_util import copy_file +from distutils.ccompiler import new_compiler +from distutils.sysconfig import customize_compiler, get_config_var +from distutils.errors import DistutilsError +from distutils import log + +from setuptools.extension import Library +from setuptools.extern import six + +try: + # Attempt to use Cython for building extensions, if available + from Cython.Distutils.build_ext import build_ext as _build_ext +except ImportError: + _build_ext = _du_build_ext + +# make sure _config_vars is initialized +get_config_var("LDSHARED") +from distutils.sysconfig import _config_vars as _CONFIG_VARS + + +def _customize_compiler_for_shlib(compiler): + if sys.platform == "darwin": + # building .dylib requires additional compiler flags on OSX; here we + # temporarily substitute the pyconfig.h variables so that distutils' + # 'customize_compiler' uses them before we build the shared libraries. + tmp = _CONFIG_VARS.copy() + try: + # XXX Help! I don't have any idea whether these are right... + _CONFIG_VARS['LDSHARED'] = ( + "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup") + _CONFIG_VARS['CCSHARED'] = " -dynamiclib" + _CONFIG_VARS['SO'] = ".dylib" + customize_compiler(compiler) + finally: + _CONFIG_VARS.clear() + _CONFIG_VARS.update(tmp) + else: + customize_compiler(compiler) + + +have_rtld = False +use_stubs = False +libtype = 'shared' + +if sys.platform == "darwin": + use_stubs = True +elif os.name != 'nt': + try: + import dl + use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW') + except ImportError: + pass + +if_dl = lambda s: s if have_rtld else '' + + +def get_abi3_suffix(): + """Return the file extension for an abi3-compliant Extension()""" + for suffix, _, _ in (s for s in imp.get_suffixes() if s[2] == imp.C_EXTENSION): + if '.abi3' in suffix: # Unix + return suffix + elif suffix == '.pyd': # Windows + return suffix + + +class build_ext(_build_ext): + def run(self): + """Build extensions in build directory, then copy if --inplace""" + old_inplace, self.inplace = self.inplace, 0 + _build_ext.run(self) + self.inplace = old_inplace + if old_inplace: + self.copy_extensions_to_source() + + def copy_extensions_to_source(self): + build_py = self.get_finalized_command('build_py') + for ext in self.extensions: + fullname = self.get_ext_fullname(ext.name) + filename = self.get_ext_filename(fullname) + modpath = fullname.split('.') + package = '.'.join(modpath[:-1]) + package_dir = build_py.get_package_dir(package) + dest_filename = os.path.join(package_dir, + os.path.basename(filename)) + src_filename = os.path.join(self.build_lib, filename) + + # Always copy, even if source is older than destination, to ensure + # that the right extensions for the current Python/platform are + # used. + copy_file( + src_filename, dest_filename, verbose=self.verbose, + dry_run=self.dry_run + ) + if ext._needs_stub: + self.write_stub(package_dir or os.curdir, ext, True) + + def get_ext_filename(self, fullname): + filename = _build_ext.get_ext_filename(self, fullname) + if fullname in self.ext_map: + ext = self.ext_map[fullname] + use_abi3 = ( + six.PY3 + and getattr(ext, 'py_limited_api') + and get_abi3_suffix() + ) + if use_abi3: + so_ext = _get_config_var_837('EXT_SUFFIX') + filename = filename[:-len(so_ext)] + filename = filename + get_abi3_suffix() + if isinstance(ext, Library): + fn, ext = os.path.splitext(filename) + return self.shlib_compiler.library_filename(fn, libtype) + elif use_stubs and ext._links_to_dynamic: + d, fn = os.path.split(filename) + return os.path.join(d, 'dl-' + fn) + return filename + + def initialize_options(self): + _build_ext.initialize_options(self) + self.shlib_compiler = None + self.shlibs = [] + self.ext_map = {} + + def finalize_options(self): + _build_ext.finalize_options(self) + self.extensions = self.extensions or [] + self.check_extensions_list(self.extensions) + self.shlibs = [ext for ext in self.extensions + if isinstance(ext, Library)] + if self.shlibs: + self.setup_shlib_compiler() + for ext in self.extensions: + ext._full_name = self.get_ext_fullname(ext.name) + for ext in self.extensions: + fullname = ext._full_name + self.ext_map[fullname] = ext + + # distutils 3.1 will also ask for module names + # XXX what to do with conflicts? + self.ext_map[fullname.split('.')[-1]] = ext + + ltd = self.shlibs and self.links_to_dynamic(ext) or False + ns = ltd and use_stubs and not isinstance(ext, Library) + ext._links_to_dynamic = ltd + ext._needs_stub = ns + filename = ext._file_name = self.get_ext_filename(fullname) + libdir = os.path.dirname(os.path.join(self.build_lib, filename)) + if ltd and libdir not in ext.library_dirs: + ext.library_dirs.append(libdir) + if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs: + ext.runtime_library_dirs.append(os.curdir) + + def setup_shlib_compiler(self): + compiler = self.shlib_compiler = new_compiler( + compiler=self.compiler, dry_run=self.dry_run, force=self.force + ) + _customize_compiler_for_shlib(compiler) + + if self.include_dirs is not None: + compiler.set_include_dirs(self.include_dirs) + if self.define is not None: + # 'define' option is a list of (name,value) tuples + for (name, value) in self.define: + compiler.define_macro(name, value) + if self.undef is not None: + for macro in self.undef: + compiler.undefine_macro(macro) + if self.libraries is not None: + compiler.set_libraries(self.libraries) + if self.library_dirs is not None: + compiler.set_library_dirs(self.library_dirs) + if self.rpath is not None: + compiler.set_runtime_library_dirs(self.rpath) + if self.link_objects is not None: + compiler.set_link_objects(self.link_objects) + + # hack so distutils' build_extension() builds a library instead + compiler.link_shared_object = link_shared_object.__get__(compiler) + + def get_export_symbols(self, ext): + if isinstance(ext, Library): + return ext.export_symbols + return _build_ext.get_export_symbols(self, ext) + + def build_extension(self, ext): + ext._convert_pyx_sources_to_lang() + _compiler = self.compiler + try: + if isinstance(ext, Library): + self.compiler = self.shlib_compiler + _build_ext.build_extension(self, ext) + if ext._needs_stub: + cmd = self.get_finalized_command('build_py').build_lib + self.write_stub(cmd, ext) + finally: + self.compiler = _compiler + + def links_to_dynamic(self, ext): + """Return true if 'ext' links to a dynamic lib in the same package""" + # XXX this should check to ensure the lib is actually being built + # XXX as dynamic, and not just using a locally-found version or a + # XXX static-compiled version + libnames = dict.fromkeys([lib._full_name for lib in self.shlibs]) + pkg = '.'.join(ext._full_name.split('.')[:-1] + ['']) + return any(pkg + libname in libnames for libname in ext.libraries) + + def get_outputs(self): + return _build_ext.get_outputs(self) + self.__get_stubs_outputs() + + def __get_stubs_outputs(self): + # assemble the base name for each extension that needs a stub + ns_ext_bases = ( + os.path.join(self.build_lib, *ext._full_name.split('.')) + for ext in self.extensions + if ext._needs_stub + ) + # pair each base with the extension + pairs = itertools.product(ns_ext_bases, self.__get_output_extensions()) + return list(base + fnext for base, fnext in pairs) + + def __get_output_extensions(self): + yield '.py' + yield '.pyc' + if self.get_finalized_command('build_py').optimize: + yield '.pyo' + + def write_stub(self, output_dir, ext, compile=False): + log.info("writing stub loader for %s to %s", ext._full_name, + output_dir) + stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) + + '.py') + if compile and os.path.exists(stub_file): + raise DistutilsError(stub_file + " already exists! Please delete.") + if not self.dry_run: + f = open(stub_file, 'w') + f.write( + '\n'.join([ + "def __bootstrap__():", + " global __bootstrap__, __file__, __loader__", + " import sys, os, pkg_resources, imp" + if_dl(", dl"), + " __file__ = pkg_resources.resource_filename" + "(__name__,%r)" + % os.path.basename(ext._file_name), + " del __bootstrap__", + " if '__loader__' in globals():", + " del __loader__", + if_dl(" old_flags = sys.getdlopenflags()"), + " old_dir = os.getcwd()", + " try:", + " os.chdir(os.path.dirname(__file__))", + if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), + " imp.load_dynamic(__name__,__file__)", + " finally:", + if_dl(" sys.setdlopenflags(old_flags)"), + " os.chdir(old_dir)", + "__bootstrap__()", + "" # terminal \n + ]) + ) + f.close() + if compile: + from distutils.util import byte_compile + + byte_compile([stub_file], optimize=0, + force=True, dry_run=self.dry_run) + optimize = self.get_finalized_command('install_lib').optimize + if optimize > 0: + byte_compile([stub_file], optimize=optimize, + force=True, dry_run=self.dry_run) + if os.path.exists(stub_file) and not self.dry_run: + os.unlink(stub_file) + + +if use_stubs or os.name == 'nt': + # Build shared libraries + # + def link_shared_object( + self, objects, output_libname, output_dir=None, libraries=None, + library_dirs=None, runtime_library_dirs=None, export_symbols=None, + debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, + target_lang=None): + self.link( + self.SHARED_LIBRARY, objects, output_libname, + output_dir, libraries, library_dirs, runtime_library_dirs, + export_symbols, debug, extra_preargs, extra_postargs, + build_temp, target_lang + ) +else: + # Build static libraries everywhere else + libtype = 'static' + + def link_shared_object( + self, objects, output_libname, output_dir=None, libraries=None, + library_dirs=None, runtime_library_dirs=None, export_symbols=None, + debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, + target_lang=None): + # XXX we need to either disallow these attrs on Library instances, + # or warn/abort here if set, or something... + # libraries=None, library_dirs=None, runtime_library_dirs=None, + # export_symbols=None, extra_preargs=None, extra_postargs=None, + # build_temp=None + + assert output_dir is None # distutils build_ext doesn't pass this + output_dir, filename = os.path.split(output_libname) + basename, ext = os.path.splitext(filename) + if self.library_filename("x").startswith('lib'): + # strip 'lib' prefix; this is kludgy if some platform uses + # a different prefix + basename = basename[3:] + + self.create_static_lib( + objects, basename, output_dir, debug, target_lang + ) + + +def _get_config_var_837(name): + """ + In https://github.com/pypa/setuptools/pull/837, we discovered + Python 3.3.0 exposes the extension suffix under the name 'SO'. + """ + if sys.version_info < (3, 3, 1): + name = 'SO' + return get_config_var(name) diff --git a/env/lib/python3.6/site-packages/setuptools/command/build_py.py b/env/lib/python3.6/site-packages/setuptools/command/build_py.py new file mode 100644 index 0000000..289e6fb --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/build_py.py @@ -0,0 +1,270 @@ +from glob import glob +from distutils.util import convert_path +import distutils.command.build_py as orig +import os +import fnmatch +import textwrap +import io +import distutils.errors +import itertools + +from setuptools.extern import six +from setuptools.extern.six.moves import map, filter, filterfalse + +try: + from setuptools.lib2to3_ex import Mixin2to3 +except ImportError: + + class Mixin2to3: + def run_2to3(self, files, doctests=True): + "do nothing" + + +class build_py(orig.build_py, Mixin2to3): + """Enhanced 'build_py' command that includes data files with packages + + The data files are specified via a 'package_data' argument to 'setup()'. + See 'setuptools.dist.Distribution' for more details. + + Also, this version of the 'build_py' command allows you to specify both + 'py_modules' and 'packages' in the same setup operation. + """ + + def finalize_options(self): + orig.build_py.finalize_options(self) + self.package_data = self.distribution.package_data + self.exclude_package_data = (self.distribution.exclude_package_data or + {}) + if 'data_files' in self.__dict__: + del self.__dict__['data_files'] + self.__updated_files = [] + self.__doctests_2to3 = [] + + def run(self): + """Build modules, packages, and copy data files to build directory""" + if not self.py_modules and not self.packages: + return + + if self.py_modules: + self.build_modules() + + if self.packages: + self.build_packages() + self.build_package_data() + + self.run_2to3(self.__updated_files, False) + self.run_2to3(self.__updated_files, True) + self.run_2to3(self.__doctests_2to3, True) + + # Only compile actual .py files, using our base class' idea of what our + # output files are. + self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0)) + + def __getattr__(self, attr): + "lazily compute data files" + if attr == 'data_files': + self.data_files = self._get_data_files() + return self.data_files + return orig.build_py.__getattr__(self, attr) + + def build_module(self, module, module_file, package): + if six.PY2 and isinstance(package, six.string_types): + # avoid errors on Python 2 when unicode is passed (#190) + package = package.split('.') + outfile, copied = orig.build_py.build_module(self, module, module_file, + package) + if copied: + self.__updated_files.append(outfile) + return outfile, copied + + def _get_data_files(self): + """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" + self.analyze_manifest() + return list(map(self._get_pkg_data_files, self.packages or ())) + + def _get_pkg_data_files(self, package): + # Locate package source directory + src_dir = self.get_package_dir(package) + + # Compute package build directory + build_dir = os.path.join(*([self.build_lib] + package.split('.'))) + + # Strip directory from globbed filenames + filenames = [ + os.path.relpath(file, src_dir) + for file in self.find_data_files(package, src_dir) + ] + return package, src_dir, build_dir, filenames + + def find_data_files(self, package, src_dir): + """Return filenames for package's data files in 'src_dir'""" + patterns = self._get_platform_patterns( + self.package_data, + package, + src_dir, + ) + globs_expanded = map(glob, patterns) + # flatten the expanded globs into an iterable of matches + globs_matches = itertools.chain.from_iterable(globs_expanded) + glob_files = filter(os.path.isfile, globs_matches) + files = itertools.chain( + self.manifest_files.get(package, []), + glob_files, + ) + return self.exclude_data_files(package, src_dir, files) + + def build_package_data(self): + """Copy data files into build directory""" + for package, src_dir, build_dir, filenames in self.data_files: + for filename in filenames: + target = os.path.join(build_dir, filename) + self.mkpath(os.path.dirname(target)) + srcfile = os.path.join(src_dir, filename) + outf, copied = self.copy_file(srcfile, target) + srcfile = os.path.abspath(srcfile) + if (copied and + srcfile in self.distribution.convert_2to3_doctests): + self.__doctests_2to3.append(outf) + + def analyze_manifest(self): + self.manifest_files = mf = {} + if not self.distribution.include_package_data: + return + src_dirs = {} + for package in self.packages or (): + # Locate package source directory + src_dirs[assert_relative(self.get_package_dir(package))] = package + + self.run_command('egg_info') + ei_cmd = self.get_finalized_command('egg_info') + for path in ei_cmd.filelist.files: + d, f = os.path.split(assert_relative(path)) + prev = None + oldf = f + while d and d != prev and d not in src_dirs: + prev = d + d, df = os.path.split(d) + f = os.path.join(df, f) + if d in src_dirs: + if path.endswith('.py') and f == oldf: + continue # it's a module, not data + mf.setdefault(src_dirs[d], []).append(path) + + def get_data_files(self): + pass # Lazily compute data files in _get_data_files() function. + + def check_package(self, package, package_dir): + """Check namespace packages' __init__ for declare_namespace""" + try: + return self.packages_checked[package] + except KeyError: + pass + + init_py = orig.build_py.check_package(self, package, package_dir) + self.packages_checked[package] = init_py + + if not init_py or not self.distribution.namespace_packages: + return init_py + + for pkg in self.distribution.namespace_packages: + if pkg == package or pkg.startswith(package + '.'): + break + else: + return init_py + + with io.open(init_py, 'rb') as f: + contents = f.read() + if b'declare_namespace' not in contents: + raise distutils.errors.DistutilsError( + "Namespace package problem: %s is a namespace package, but " + "its\n__init__.py does not call declare_namespace()! Please " + 'fix it.\n(See the setuptools manual under ' + '"Namespace Packages" for details.)\n"' % (package,) + ) + return init_py + + def initialize_options(self): + self.packages_checked = {} + orig.build_py.initialize_options(self) + + def get_package_dir(self, package): + res = orig.build_py.get_package_dir(self, package) + if self.distribution.src_root is not None: + return os.path.join(self.distribution.src_root, res) + return res + + def exclude_data_files(self, package, src_dir, files): + """Filter filenames for package's data files in 'src_dir'""" + files = list(files) + patterns = self._get_platform_patterns( + self.exclude_package_data, + package, + src_dir, + ) + match_groups = ( + fnmatch.filter(files, pattern) + for pattern in patterns + ) + # flatten the groups of matches into an iterable of matches + matches = itertools.chain.from_iterable(match_groups) + bad = set(matches) + keepers = ( + fn + for fn in files + if fn not in bad + ) + # ditch dupes + return list(_unique_everseen(keepers)) + + @staticmethod + def _get_platform_patterns(spec, package, src_dir): + """ + yield platfrom-specific path patterns (suitable for glob + or fn_match) from a glob-based spec (such as + self.package_data or self.exclude_package_data) + matching package in src_dir. + """ + raw_patterns = itertools.chain( + spec.get('', []), + spec.get(package, []), + ) + return ( + # Each pattern has to be converted to a platform-specific path + os.path.join(src_dir, convert_path(pattern)) + for pattern in raw_patterns + ) + + +# from Python docs +def _unique_everseen(iterable, key=None): + "List unique elements, preserving order. Remember all elements ever seen." + # unique_everseen('AAAABBBCCDAABBB') --> A B C D + # unique_everseen('ABBCcAD', str.lower) --> A B C D + seen = set() + seen_add = seen.add + if key is None: + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element + + +def assert_relative(path): + if not os.path.isabs(path): + return path + from distutils.errors import DistutilsSetupError + + msg = textwrap.dedent(""" + Error: setup script specifies an absolute path: + + %s + + setup() arguments must *always* be /-separated paths relative to the + setup.py directory, *never* absolute paths. + """).lstrip() % path + raise DistutilsSetupError(msg) diff --git a/env/lib/python3.6/site-packages/setuptools/command/develop.py b/env/lib/python3.6/site-packages/setuptools/command/develop.py new file mode 100644 index 0000000..3eb8612 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/develop.py @@ -0,0 +1,197 @@ +from distutils.util import convert_path +from distutils import log +from distutils.errors import DistutilsError, DistutilsOptionError +import os +import glob +import io + +from setuptools.extern import six + +from pkg_resources import Distribution, PathMetadata, normalize_path +from setuptools.command.easy_install import easy_install +import setuptools + + +class develop(easy_install): + """Set up package for development""" + + description = "install package in 'development mode'" + + user_options = easy_install.user_options + [ + ("uninstall", "u", "Uninstall this source package"), + ("egg-path=", None, "Set the path to be used in the .egg-link file"), + ] + + boolean_options = easy_install.boolean_options + ['uninstall'] + + command_consumes_arguments = False # override base + + def run(self): + if self.uninstall: + self.multi_version = True + self.uninstall_link() + else: + self.install_for_development() + self.warn_deprecated_options() + + def initialize_options(self): + self.uninstall = None + self.egg_path = None + easy_install.initialize_options(self) + self.setup_path = None + self.always_copy_from = '.' # always copy eggs installed in curdir + + def finalize_options(self): + ei = self.get_finalized_command("egg_info") + if ei.broken_egg_info: + template = "Please rename %r to %r before using 'develop'" + args = ei.egg_info, ei.broken_egg_info + raise DistutilsError(template % args) + self.args = [ei.egg_name] + + easy_install.finalize_options(self) + self.expand_basedirs() + self.expand_dirs() + # pick up setup-dir .egg files only: no .egg-info + self.package_index.scan(glob.glob('*.egg')) + + egg_link_fn = ei.egg_name + '.egg-link' + self.egg_link = os.path.join(self.install_dir, egg_link_fn) + self.egg_base = ei.egg_base + if self.egg_path is None: + self.egg_path = os.path.abspath(ei.egg_base) + + target = normalize_path(self.egg_base) + egg_path = normalize_path(os.path.join(self.install_dir, + self.egg_path)) + if egg_path != target: + raise DistutilsOptionError( + "--egg-path must be a relative path from the install" + " directory to " + target + ) + + # Make a distribution for the package's source + self.dist = Distribution( + target, + PathMetadata(target, os.path.abspath(ei.egg_info)), + project_name=ei.egg_name + ) + + p = self.egg_base.replace(os.sep, '/') + if p != os.curdir: + p = '../' * (p.count('/') + 1) + self.setup_path = p + p = normalize_path(os.path.join(self.install_dir, self.egg_path, p)) + if p != normalize_path(os.curdir): + raise DistutilsOptionError( + "Can't get a consistent path to setup script from" + " installation directory", p, normalize_path(os.curdir)) + + def install_for_development(self): + if six.PY3 and getattr(self.distribution, 'use_2to3', False): + # If we run 2to3 we can not do this inplace: + + # Ensure metadata is up-to-date + self.reinitialize_command('build_py', inplace=0) + self.run_command('build_py') + bpy_cmd = self.get_finalized_command("build_py") + build_path = normalize_path(bpy_cmd.build_lib) + + # Build extensions + self.reinitialize_command('egg_info', egg_base=build_path) + self.run_command('egg_info') + + self.reinitialize_command('build_ext', inplace=0) + self.run_command('build_ext') + + # Fixup egg-link and easy-install.pth + ei_cmd = self.get_finalized_command("egg_info") + self.egg_path = build_path + self.dist.location = build_path + # XXX + self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info) + else: + # Without 2to3 inplace works fine: + self.run_command('egg_info') + + # Build extensions in-place + self.reinitialize_command('build_ext', inplace=1) + self.run_command('build_ext') + + self.install_site_py() # ensure that target dir is site-safe + if setuptools.bootstrap_install_from: + self.easy_install(setuptools.bootstrap_install_from) + setuptools.bootstrap_install_from = None + + # create an .egg-link in the installation dir, pointing to our egg + log.info("Creating %s (link to %s)", self.egg_link, self.egg_base) + if not self.dry_run: + with open(self.egg_link, "w") as f: + f.write(self.egg_path + "\n" + self.setup_path) + # postprocess the installed distro, fixing up .pth, installing scripts, + # and handling requirements + self.process_distribution(None, self.dist, not self.no_deps) + + def uninstall_link(self): + if os.path.exists(self.egg_link): + log.info("Removing %s (link to %s)", self.egg_link, self.egg_base) + egg_link_file = open(self.egg_link) + contents = [line.rstrip() for line in egg_link_file] + egg_link_file.close() + if contents not in ([self.egg_path], + [self.egg_path, self.setup_path]): + log.warn("Link points to %s: uninstall aborted", contents) + return + if not self.dry_run: + os.unlink(self.egg_link) + if not self.dry_run: + self.update_pth(self.dist) # remove any .pth link to us + if self.distribution.scripts: + # XXX should also check for entry point scripts! + log.warn("Note: you must uninstall or replace scripts manually!") + + def install_egg_scripts(self, dist): + if dist is not self.dist: + # Installing a dependency, so fall back to normal behavior + return easy_install.install_egg_scripts(self, dist) + + # create wrapper scripts in the script dir, pointing to dist.scripts + + # new-style... + self.install_wrapper_scripts(dist) + + # ...and old-style + for script_name in self.distribution.scripts or []: + script_path = os.path.abspath(convert_path(script_name)) + script_name = os.path.basename(script_path) + with io.open(script_path) as strm: + script_text = strm.read() + self.install_script(dist, script_name, script_text, script_path) + + def install_wrapper_scripts(self, dist): + dist = VersionlessRequirement(dist) + return easy_install.install_wrapper_scripts(self, dist) + + +class VersionlessRequirement(object): + """ + Adapt a pkg_resources.Distribution to simply return the project + name as the 'requirement' so that scripts will work across + multiple versions. + + >>> dist = Distribution(project_name='foo', version='1.0') + >>> str(dist.as_requirement()) + 'foo==1.0' + >>> adapted_dist = VersionlessRequirement(dist) + >>> str(adapted_dist.as_requirement()) + 'foo' + """ + + def __init__(self, dist): + self.__dist = dist + + def __getattr__(self, name): + return getattr(self.__dist, name) + + def as_requirement(self): + return self.project_name diff --git a/env/lib/python3.6/site-packages/setuptools/command/easy_install.py b/env/lib/python3.6/site-packages/setuptools/command/easy_install.py new file mode 100644 index 0000000..03dd676 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/easy_install.py @@ -0,0 +1,2287 @@ +#!/usr/bin/env python +""" +Easy Install +------------ + +A tool for doing automatic download/extract/build of distutils-based Python +packages. For detailed documentation, see the accompanying EasyInstall.txt +file, or visit the `EasyInstall home page`__. + +__ https://setuptools.readthedocs.io/en/latest/easy_install.html + +""" + +from glob import glob +from distutils.util import get_platform +from distutils.util import convert_path, subst_vars +from distutils.errors import ( + DistutilsArgError, DistutilsOptionError, + DistutilsError, DistutilsPlatformError, +) +from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS +from distutils import log, dir_util +from distutils.command.build_scripts import first_line_re +from distutils.spawn import find_executable +import sys +import os +import zipimport +import shutil +import tempfile +import zipfile +import re +import stat +import random +import textwrap +import warnings +import site +import struct +import contextlib +import subprocess +import shlex +import io + +from setuptools.extern import six +from setuptools.extern.six.moves import configparser, map + +from setuptools import Command +from setuptools.sandbox import run_setup +from setuptools.py31compat import get_path, get_config_vars +from setuptools.command import setopt +from setuptools.archive_util import unpack_archive +from setuptools.package_index import ( + PackageIndex, parse_requirement_arg, URL_SCHEME, +) +from setuptools.command import bdist_egg, egg_info +from pkg_resources import ( + yield_lines, normalize_path, resource_string, ensure_directory, + get_distribution, find_distributions, Environment, Requirement, + Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound, + VersionConflict, DEVELOP_DIST, +) +import pkg_resources + +# Turn on PEP440Warnings +warnings.filterwarnings("default", category=pkg_resources.PEP440Warning) + +__all__ = [ + 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', + 'main', 'get_exe_prefixes', +] + + +def is_64bit(): + return struct.calcsize("P") == 8 + + +def samefile(p1, p2): + """ + Determine if two paths reference the same file. + + Augments os.path.samefile to work on Windows and + suppresses errors if the path doesn't exist. + """ + both_exist = os.path.exists(p1) and os.path.exists(p2) + use_samefile = hasattr(os.path, 'samefile') and both_exist + if use_samefile: + return os.path.samefile(p1, p2) + norm_p1 = os.path.normpath(os.path.normcase(p1)) + norm_p2 = os.path.normpath(os.path.normcase(p2)) + return norm_p1 == norm_p2 + + +if six.PY2: + + def _to_ascii(s): + return s + + def isascii(s): + try: + six.text_type(s, 'ascii') + return True + except UnicodeError: + return False +else: + + def _to_ascii(s): + return s.encode('ascii') + + def isascii(s): + try: + s.encode('ascii') + return True + except UnicodeError: + return False + + +_one_liner = lambda text: textwrap.dedent(text).strip().replace('\n', '; ') + + +class easy_install(Command): + """Manage a download/build/install process""" + description = "Find/get/install Python packages" + command_consumes_arguments = True + + user_options = [ + ('prefix=', None, "installation prefix"), + ("zip-ok", "z", "install package as a zipfile"), + ("multi-version", "m", "make apps have to require() a version"), + ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"), + ("install-dir=", "d", "install package to DIR"), + ("script-dir=", "s", "install scripts to DIR"), + ("exclude-scripts", "x", "Don't install scripts"), + ("always-copy", "a", "Copy all needed packages to install dir"), + ("index-url=", "i", "base URL of Python Package Index"), + ("find-links=", "f", "additional URL(s) to search for packages"), + ("build-directory=", "b", + "download/extract/build in DIR; keep the results"), + ('optimize=', 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), + ('record=', None, + "filename in which to record list of installed files"), + ('always-unzip', 'Z', "don't install as a zipfile, no matter what"), + ('site-dirs=', 'S', "list of directories where .pth files work"), + ('editable', 'e', "Install specified packages in editable form"), + ('no-deps', 'N', "don't install dependencies"), + ('allow-hosts=', 'H', "pattern(s) that hostnames must match"), + ('local-snapshots-ok', 'l', + "allow building eggs from local checkouts"), + ('version', None, "print version information and exit"), + ('no-find-links', None, + "Don't load find-links defined in packages being installed") + ] + boolean_options = [ + 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy', + 'editable', + 'no-deps', 'local-snapshots-ok', 'version' + ] + + if site.ENABLE_USER_SITE: + help_msg = "install in user site-package '%s'" % site.USER_SITE + user_options.append(('user', None, help_msg)) + boolean_options.append('user') + + negative_opt = {'always-unzip': 'zip-ok'} + create_index = PackageIndex + + def initialize_options(self): + # the --user option seems to be an opt-in one, + # so the default should be False. + self.user = 0 + self.zip_ok = self.local_snapshots_ok = None + self.install_dir = self.script_dir = self.exclude_scripts = None + self.index_url = None + self.find_links = None + self.build_directory = None + self.args = None + self.optimize = self.record = None + self.upgrade = self.always_copy = self.multi_version = None + self.editable = self.no_deps = self.allow_hosts = None + self.root = self.prefix = self.no_report = None + self.version = None + self.install_purelib = None # for pure module distributions + self.install_platlib = None # non-pure (dists w/ extensions) + self.install_headers = None # for C/C++ headers + self.install_lib = None # set to either purelib or platlib + self.install_scripts = None + self.install_data = None + self.install_base = None + self.install_platbase = None + if site.ENABLE_USER_SITE: + self.install_userbase = site.USER_BASE + self.install_usersite = site.USER_SITE + else: + self.install_userbase = None + self.install_usersite = None + self.no_find_links = None + + # Options not specifiable via command line + self.package_index = None + self.pth_file = self.always_copy_from = None + self.site_dirs = None + self.installed_projects = {} + self.sitepy_installed = False + # Always read easy_install options, even if we are subclassed, or have + # an independent instance created. This ensures that defaults will + # always come from the standard configuration file(s)' "easy_install" + # section, even if this is a "develop" or "install" command, or some + # other embedding. + self._dry_run = None + self.verbose = self.distribution.verbose + self.distribution._set_command_options( + self, self.distribution.get_option_dict('easy_install') + ) + + def delete_blockers(self, blockers): + extant_blockers = ( + filename for filename in blockers + if os.path.exists(filename) or os.path.islink(filename) + ) + list(map(self._delete_path, extant_blockers)) + + def _delete_path(self, path): + log.info("Deleting %s", path) + if self.dry_run: + return + + is_tree = os.path.isdir(path) and not os.path.islink(path) + remover = rmtree if is_tree else os.unlink + remover(path) + + @staticmethod + def _render_version(): + """ + Render the Setuptools version and installation details, then exit. + """ + ver = sys.version[:3] + dist = get_distribution('setuptools') + tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})' + print(tmpl.format(**locals())) + raise SystemExit() + + def finalize_options(self): + self.version and self._render_version() + + py_version = sys.version.split()[0] + prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix') + + self.config_vars = { + 'dist_name': self.distribution.get_name(), + 'dist_version': self.distribution.get_version(), + 'dist_fullname': self.distribution.get_fullname(), + 'py_version': py_version, + 'py_version_short': py_version[0:3], + 'py_version_nodot': py_version[0] + py_version[2], + 'sys_prefix': prefix, + 'prefix': prefix, + 'sys_exec_prefix': exec_prefix, + 'exec_prefix': exec_prefix, + # Only python 3.2+ has abiflags + 'abiflags': getattr(sys, 'abiflags', ''), + } + + if site.ENABLE_USER_SITE: + self.config_vars['userbase'] = self.install_userbase + self.config_vars['usersite'] = self.install_usersite + + self._fix_install_dir_for_user_site() + + self.expand_basedirs() + self.expand_dirs() + + self._expand( + 'install_dir', 'script_dir', 'build_directory', + 'site_dirs', + ) + # If a non-default installation directory was specified, default the + # script directory to match it. + if self.script_dir is None: + self.script_dir = self.install_dir + + if self.no_find_links is None: + self.no_find_links = False + + # Let install_dir get set by install_lib command, which in turn + # gets its info from the install command, and takes into account + # --prefix and --home and all that other crud. + self.set_undefined_options( + 'install_lib', ('install_dir', 'install_dir') + ) + # Likewise, set default script_dir from 'install_scripts.install_dir' + self.set_undefined_options( + 'install_scripts', ('install_dir', 'script_dir') + ) + + if self.user and self.install_purelib: + self.install_dir = self.install_purelib + self.script_dir = self.install_scripts + # default --record from the install command + self.set_undefined_options('install', ('record', 'record')) + # Should this be moved to the if statement below? It's not used + # elsewhere + normpath = map(normalize_path, sys.path) + self.all_site_dirs = get_site_dirs() + if self.site_dirs is not None: + site_dirs = [ + os.path.expanduser(s.strip()) for s in + self.site_dirs.split(',') + ] + for d in site_dirs: + if not os.path.isdir(d): + log.warn("%s (in --site-dirs) does not exist", d) + elif normalize_path(d) not in normpath: + raise DistutilsOptionError( + d + " (in --site-dirs) is not on sys.path" + ) + else: + self.all_site_dirs.append(normalize_path(d)) + if not self.editable: + self.check_site_dir() + self.index_url = self.index_url or "https://pypi.python.org/simple" + self.shadow_path = self.all_site_dirs[:] + for path_item in self.install_dir, normalize_path(self.script_dir): + if path_item not in self.shadow_path: + self.shadow_path.insert(0, path_item) + + if self.allow_hosts is not None: + hosts = [s.strip() for s in self.allow_hosts.split(',')] + else: + hosts = ['*'] + if self.package_index is None: + self.package_index = self.create_index( + self.index_url, search_path=self.shadow_path, hosts=hosts, + ) + self.local_index = Environment(self.shadow_path + sys.path) + + if self.find_links is not None: + if isinstance(self.find_links, six.string_types): + self.find_links = self.find_links.split() + else: + self.find_links = [] + if self.local_snapshots_ok: + self.package_index.scan_egg_links(self.shadow_path + sys.path) + if not self.no_find_links: + self.package_index.add_find_links(self.find_links) + self.set_undefined_options('install_lib', ('optimize', 'optimize')) + if not isinstance(self.optimize, int): + try: + self.optimize = int(self.optimize) + if not (0 <= self.optimize <= 2): + raise ValueError + except ValueError: + raise DistutilsOptionError("--optimize must be 0, 1, or 2") + + if self.editable and not self.build_directory: + raise DistutilsArgError( + "Must specify a build directory (-b) when using --editable" + ) + if not self.args: + raise DistutilsArgError( + "No urls, filenames, or requirements specified (see --help)") + + self.outputs = [] + + def _fix_install_dir_for_user_site(self): + """ + Fix the install_dir if "--user" was used. + """ + if not self.user or not site.ENABLE_USER_SITE: + return + + self.create_home_path() + if self.install_userbase is None: + msg = "User base directory is not specified" + raise DistutilsPlatformError(msg) + self.install_base = self.install_platbase = self.install_userbase + scheme_name = os.name.replace('posix', 'unix') + '_user' + self.select_scheme(scheme_name) + + def _expand_attrs(self, attrs): + for attr in attrs: + val = getattr(self, attr) + if val is not None: + if os.name == 'posix' or os.name == 'nt': + val = os.path.expanduser(val) + val = subst_vars(val, self.config_vars) + setattr(self, attr, val) + + def expand_basedirs(self): + """Calls `os.path.expanduser` on install_base, install_platbase and + root.""" + self._expand_attrs(['install_base', 'install_platbase', 'root']) + + def expand_dirs(self): + """Calls `os.path.expanduser` on install dirs.""" + dirs = [ + 'install_purelib', + 'install_platlib', + 'install_lib', + 'install_headers', + 'install_scripts', + 'install_data', + ] + self._expand_attrs(dirs) + + def run(self): + if self.verbose != self.distribution.verbose: + log.set_verbosity(self.verbose) + try: + for spec in self.args: + self.easy_install(spec, not self.no_deps) + if self.record: + outputs = self.outputs + if self.root: # strip any package prefix + root_len = len(self.root) + for counter in range(len(outputs)): + outputs[counter] = outputs[counter][root_len:] + from distutils import file_util + + self.execute( + file_util.write_file, (self.record, outputs), + "writing list of installed files to '%s'" % + self.record + ) + self.warn_deprecated_options() + finally: + log.set_verbosity(self.distribution.verbose) + + def pseudo_tempname(self): + """Return a pseudo-tempname base in the install directory. + This code is intentionally naive; if a malicious party can write to + the target directory you're already in deep doodoo. + """ + try: + pid = os.getpid() + except Exception: + pid = random.randint(0, sys.maxsize) + return os.path.join(self.install_dir, "test-easy-install-%s" % pid) + + def warn_deprecated_options(self): + pass + + def check_site_dir(self): + """Verify that self.install_dir is .pth-capable dir, if needed""" + + instdir = normalize_path(self.install_dir) + pth_file = os.path.join(instdir, 'easy-install.pth') + + # Is it a configured, PYTHONPATH, implicit, or explicit site dir? + is_site_dir = instdir in self.all_site_dirs + + if not is_site_dir and not self.multi_version: + # No? Then directly test whether it does .pth file processing + is_site_dir = self.check_pth_processing() + else: + # make sure we can write to target dir + testfile = self.pseudo_tempname() + '.write-test' + test_exists = os.path.exists(testfile) + try: + if test_exists: + os.unlink(testfile) + open(testfile, 'w').close() + os.unlink(testfile) + except (OSError, IOError): + self.cant_write_to_target() + + if not is_site_dir and not self.multi_version: + # Can't install non-multi to non-site dir + raise DistutilsError(self.no_default_version_msg()) + + if is_site_dir: + if self.pth_file is None: + self.pth_file = PthDistributions(pth_file, self.all_site_dirs) + else: + self.pth_file = None + + PYTHONPATH = os.environ.get('PYTHONPATH', '').split(os.pathsep) + if instdir not in map(normalize_path, filter(None, PYTHONPATH)): + # only PYTHONPATH dirs need a site.py, so pretend it's there + self.sitepy_installed = True + elif self.multi_version and not os.path.exists(pth_file): + self.sitepy_installed = True # don't need site.py in this case + self.pth_file = None # and don't create a .pth file + self.install_dir = instdir + + __cant_write_msg = textwrap.dedent(""" + can't create or remove files in install directory + + The following error occurred while trying to add or remove files in the + installation directory: + + %s + + The installation directory you specified (via --install-dir, --prefix, or + the distutils default setting) was: + + %s + """).lstrip() + + __not_exists_id = textwrap.dedent(""" + This directory does not currently exist. Please create it and try again, or + choose a different installation directory (using the -d or --install-dir + option). + """).lstrip() + + __access_msg = textwrap.dedent(""" + Perhaps your account does not have write access to this directory? If the + installation directory is a system-owned directory, you may need to sign in + as the administrator or "root" account. If you do not have administrative + access to this machine, you may wish to choose a different installation + directory, preferably one that is listed in your PYTHONPATH environment + variable. + + For information on other options, you may wish to consult the + documentation at: + + https://setuptools.readthedocs.io/en/latest/easy_install.html + + Please make the appropriate changes for your system and try again. + """).lstrip() + + def cant_write_to_target(self): + msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,) + + if not os.path.exists(self.install_dir): + msg += '\n' + self.__not_exists_id + else: + msg += '\n' + self.__access_msg + raise DistutilsError(msg) + + def check_pth_processing(self): + """Empirically verify whether .pth files are supported in inst. dir""" + instdir = self.install_dir + log.info("Checking .pth file support in %s", instdir) + pth_file = self.pseudo_tempname() + ".pth" + ok_file = pth_file + '.ok' + ok_exists = os.path.exists(ok_file) + tmpl = _one_liner(""" + import os + f = open({ok_file!r}, 'w') + f.write('OK') + f.close() + """) + '\n' + try: + if ok_exists: + os.unlink(ok_file) + dirname = os.path.dirname(ok_file) + if not os.path.exists(dirname): + os.makedirs(dirname) + f = open(pth_file, 'w') + except (OSError, IOError): + self.cant_write_to_target() + else: + try: + f.write(tmpl.format(**locals())) + f.close() + f = None + executable = sys.executable + if os.name == 'nt': + dirname, basename = os.path.split(executable) + alt = os.path.join(dirname, 'pythonw.exe') + use_alt = ( + basename.lower() == 'python.exe' and + os.path.exists(alt) + ) + if use_alt: + # use pythonw.exe to avoid opening a console window + executable = alt + + from distutils.spawn import spawn + + spawn([executable, '-E', '-c', 'pass'], 0) + + if os.path.exists(ok_file): + log.info( + "TEST PASSED: %s appears to support .pth files", + instdir + ) + return True + finally: + if f: + f.close() + if os.path.exists(ok_file): + os.unlink(ok_file) + if os.path.exists(pth_file): + os.unlink(pth_file) + if not self.multi_version: + log.warn("TEST FAILED: %s does NOT support .pth files", instdir) + return False + + def install_egg_scripts(self, dist): + """Write all the scripts for `dist`, unless scripts are excluded""" + if not self.exclude_scripts and dist.metadata_isdir('scripts'): + for script_name in dist.metadata_listdir('scripts'): + if dist.metadata_isdir('scripts/' + script_name): + # The "script" is a directory, likely a Python 3 + # __pycache__ directory, so skip it. + continue + self.install_script( + dist, script_name, + dist.get_metadata('scripts/' + script_name) + ) + self.install_wrapper_scripts(dist) + + def add_output(self, path): + if os.path.isdir(path): + for base, dirs, files in os.walk(path): + for filename in files: + self.outputs.append(os.path.join(base, filename)) + else: + self.outputs.append(path) + + def not_editable(self, spec): + if self.editable: + raise DistutilsArgError( + "Invalid argument %r: you can't use filenames or URLs " + "with --editable (except via the --find-links option)." + % (spec,) + ) + + def check_editable(self, spec): + if not self.editable: + return + + if os.path.exists(os.path.join(self.build_directory, spec.key)): + raise DistutilsArgError( + "%r already exists in %s; can't do a checkout there" % + (spec.key, self.build_directory) + ) + + def easy_install(self, spec, deps=False): + tmpdir = tempfile.mkdtemp(prefix="easy_install-") + if not self.editable: + self.install_site_py() + + try: + if not isinstance(spec, Requirement): + if URL_SCHEME(spec): + # It's a url, download it to tmpdir and process + self.not_editable(spec) + dl = self.package_index.download(spec, tmpdir) + return self.install_item(None, dl, tmpdir, deps, True) + + elif os.path.exists(spec): + # Existing file or directory, just process it directly + self.not_editable(spec) + return self.install_item(None, spec, tmpdir, deps, True) + else: + spec = parse_requirement_arg(spec) + + self.check_editable(spec) + dist = self.package_index.fetch_distribution( + spec, tmpdir, self.upgrade, self.editable, + not self.always_copy, self.local_index + ) + if dist is None: + msg = "Could not find suitable distribution for %r" % spec + if self.always_copy: + msg += " (--always-copy skips system and development eggs)" + raise DistutilsError(msg) + elif dist.precedence == DEVELOP_DIST: + # .egg-info dists don't need installing, just process deps + self.process_distribution(spec, dist, deps, "Using") + return dist + else: + return self.install_item(spec, dist.location, tmpdir, deps) + + finally: + if os.path.exists(tmpdir): + rmtree(tmpdir) + + def install_item(self, spec, download, tmpdir, deps, install_needed=False): + + # Installation is also needed if file in tmpdir or is not an egg + install_needed = install_needed or self.always_copy + install_needed = install_needed or os.path.dirname(download) == tmpdir + install_needed = install_needed or not download.endswith('.egg') + install_needed = install_needed or ( + self.always_copy_from is not None and + os.path.dirname(normalize_path(download)) == + normalize_path(self.always_copy_from) + ) + + if spec and not install_needed: + # at this point, we know it's a local .egg, we just don't know if + # it's already installed. + for dist in self.local_index[spec.project_name]: + if dist.location == download: + break + else: + install_needed = True # it's not in the local index + + log.info("Processing %s", os.path.basename(download)) + + if install_needed: + dists = self.install_eggs(spec, download, tmpdir) + for dist in dists: + self.process_distribution(spec, dist, deps) + else: + dists = [self.egg_distribution(download)] + self.process_distribution(spec, dists[0], deps, "Using") + + if spec is not None: + for dist in dists: + if dist in spec: + return dist + + def select_scheme(self, name): + """Sets the install directories by applying the install schemes.""" + # it's the caller's problem if they supply a bad name! + scheme = INSTALL_SCHEMES[name] + for key in SCHEME_KEYS: + attrname = 'install_' + key + if getattr(self, attrname) is None: + setattr(self, attrname, scheme[key]) + + def process_distribution(self, requirement, dist, deps=True, *info): + self.update_pth(dist) + self.package_index.add(dist) + if dist in self.local_index[dist.key]: + self.local_index.remove(dist) + self.local_index.add(dist) + self.install_egg_scripts(dist) + self.installed_projects[dist.key] = dist + log.info(self.installation_report(requirement, dist, *info)) + if (dist.has_metadata('dependency_links.txt') and + not self.no_find_links): + self.package_index.add_find_links( + dist.get_metadata_lines('dependency_links.txt') + ) + if not deps and not self.always_copy: + return + elif requirement is not None and dist.key != requirement.key: + log.warn("Skipping dependencies for %s", dist) + return # XXX this is not the distribution we were looking for + elif requirement is None or dist not in requirement: + # if we wound up with a different version, resolve what we've got + distreq = dist.as_requirement() + requirement = Requirement(str(distreq)) + log.info("Processing dependencies for %s", requirement) + try: + distros = WorkingSet([]).resolve( + [requirement], self.local_index, self.easy_install + ) + except DistributionNotFound as e: + raise DistutilsError(str(e)) + except VersionConflict as e: + raise DistutilsError(e.report()) + if self.always_copy or self.always_copy_from: + # Force all the relevant distros to be copied or activated + for dist in distros: + if dist.key not in self.installed_projects: + self.easy_install(dist.as_requirement()) + log.info("Finished processing dependencies for %s", requirement) + + def should_unzip(self, dist): + if self.zip_ok is not None: + return not self.zip_ok + if dist.has_metadata('not-zip-safe'): + return True + if not dist.has_metadata('zip-safe'): + return True + return False + + def maybe_move(self, spec, dist_filename, setup_base): + dst = os.path.join(self.build_directory, spec.key) + if os.path.exists(dst): + msg = ( + "%r already exists in %s; build directory %s will not be kept" + ) + log.warn(msg, spec.key, self.build_directory, setup_base) + return setup_base + if os.path.isdir(dist_filename): + setup_base = dist_filename + else: + if os.path.dirname(dist_filename) == setup_base: + os.unlink(dist_filename) # get it out of the tmp dir + contents = os.listdir(setup_base) + if len(contents) == 1: + dist_filename = os.path.join(setup_base, contents[0]) + if os.path.isdir(dist_filename): + # if the only thing there is a directory, move it instead + setup_base = dist_filename + ensure_directory(dst) + shutil.move(setup_base, dst) + return dst + + def install_wrapper_scripts(self, dist): + if self.exclude_scripts: + return + for args in ScriptWriter.best().get_args(dist): + self.write_script(*args) + + def install_script(self, dist, script_name, script_text, dev_path=None): + """Generate a legacy script wrapper and install it""" + spec = str(dist.as_requirement()) + is_script = is_python_script(script_text, script_name) + + if is_script: + body = self._load_template(dev_path) % locals() + script_text = ScriptWriter.get_header(script_text) + body + self.write_script(script_name, _to_ascii(script_text), 'b') + + @staticmethod + def _load_template(dev_path): + """ + There are a couple of template scripts in the package. This + function loads one of them and prepares it for use. + """ + # See https://github.com/pypa/setuptools/issues/134 for info + # on script file naming and downstream issues with SVR4 + name = 'script.tmpl' + if dev_path: + name = name.replace('.tmpl', ' (dev).tmpl') + + raw_bytes = resource_string('setuptools', name) + return raw_bytes.decode('utf-8') + + def write_script(self, script_name, contents, mode="t", blockers=()): + """Write an executable file to the scripts directory""" + self.delete_blockers( # clean up old .py/.pyw w/o a script + [os.path.join(self.script_dir, x) for x in blockers] + ) + log.info("Installing %s script to %s", script_name, self.script_dir) + target = os.path.join(self.script_dir, script_name) + self.add_output(target) + + mask = current_umask() + if not self.dry_run: + ensure_directory(target) + if os.path.exists(target): + os.unlink(target) + with open(target, "w" + mode) as f: + f.write(contents) + chmod(target, 0o777 - mask) + + def install_eggs(self, spec, dist_filename, tmpdir): + # .egg dirs or files are already built, so just return them + if dist_filename.lower().endswith('.egg'): + return [self.install_egg(dist_filename, tmpdir)] + elif dist_filename.lower().endswith('.exe'): + return [self.install_exe(dist_filename, tmpdir)] + + # Anything else, try to extract and build + setup_base = tmpdir + if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'): + unpack_archive(dist_filename, tmpdir, self.unpack_progress) + elif os.path.isdir(dist_filename): + setup_base = os.path.abspath(dist_filename) + + if (setup_base.startswith(tmpdir) # something we downloaded + and self.build_directory and spec is not None): + setup_base = self.maybe_move(spec, dist_filename, setup_base) + + # Find the setup.py file + setup_script = os.path.join(setup_base, 'setup.py') + + if not os.path.exists(setup_script): + setups = glob(os.path.join(setup_base, '*', 'setup.py')) + if not setups: + raise DistutilsError( + "Couldn't find a setup script in %s" % + os.path.abspath(dist_filename) + ) + if len(setups) > 1: + raise DistutilsError( + "Multiple setup scripts in %s" % + os.path.abspath(dist_filename) + ) + setup_script = setups[0] + + # Now run it, and return the result + if self.editable: + log.info(self.report_editable(spec, setup_script)) + return [] + else: + return self.build_and_install(setup_script, setup_base) + + def egg_distribution(self, egg_path): + if os.path.isdir(egg_path): + metadata = PathMetadata(egg_path, os.path.join(egg_path, + 'EGG-INFO')) + else: + metadata = EggMetadata(zipimport.zipimporter(egg_path)) + return Distribution.from_filename(egg_path, metadata=metadata) + + def install_egg(self, egg_path, tmpdir): + destination = os.path.join( + self.install_dir, + os.path.basename(egg_path), + ) + destination = os.path.abspath(destination) + if not self.dry_run: + ensure_directory(destination) + + dist = self.egg_distribution(egg_path) + if not samefile(egg_path, destination): + if os.path.isdir(destination) and not os.path.islink(destination): + dir_util.remove_tree(destination, dry_run=self.dry_run) + elif os.path.exists(destination): + self.execute( + os.unlink, + (destination,), + "Removing " + destination, + ) + try: + new_dist_is_zipped = False + if os.path.isdir(egg_path): + if egg_path.startswith(tmpdir): + f, m = shutil.move, "Moving" + else: + f, m = shutil.copytree, "Copying" + elif self.should_unzip(dist): + self.mkpath(destination) + f, m = self.unpack_and_compile, "Extracting" + else: + new_dist_is_zipped = True + if egg_path.startswith(tmpdir): + f, m = shutil.move, "Moving" + else: + f, m = shutil.copy2, "Copying" + self.execute( + f, + (egg_path, destination), + (m + " %s to %s") % ( + os.path.basename(egg_path), + os.path.dirname(destination) + ), + ) + update_dist_caches( + destination, + fix_zipimporter_caches=new_dist_is_zipped, + ) + except Exception: + update_dist_caches(destination, fix_zipimporter_caches=False) + raise + + self.add_output(destination) + return self.egg_distribution(destination) + + def install_exe(self, dist_filename, tmpdir): + # See if it's valid, get data + cfg = extract_wininst_cfg(dist_filename) + if cfg is None: + raise DistutilsError( + "%s is not a valid distutils Windows .exe" % dist_filename + ) + # Create a dummy distribution object until we build the real distro + dist = Distribution( + None, + project_name=cfg.get('metadata', 'name'), + version=cfg.get('metadata', 'version'), platform=get_platform(), + ) + + # Convert the .exe to an unpacked egg + egg_path = os.path.join(tmpdir, dist.egg_name() + '.egg') + dist.location = egg_path + egg_tmp = egg_path + '.tmp' + _egg_info = os.path.join(egg_tmp, 'EGG-INFO') + pkg_inf = os.path.join(_egg_info, 'PKG-INFO') + ensure_directory(pkg_inf) # make sure EGG-INFO dir exists + dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX + self.exe_to_egg(dist_filename, egg_tmp) + + # Write EGG-INFO/PKG-INFO + if not os.path.exists(pkg_inf): + f = open(pkg_inf, 'w') + f.write('Metadata-Version: 1.0\n') + for k, v in cfg.items('metadata'): + if k != 'target_version': + f.write('%s: %s\n' % (k.replace('_', '-').title(), v)) + f.close() + script_dir = os.path.join(_egg_info, 'scripts') + # delete entry-point scripts to avoid duping + self.delete_blockers([ + os.path.join(script_dir, args[0]) + for args in ScriptWriter.get_args(dist) + ]) + # Build .egg file from tmpdir + bdist_egg.make_zipfile( + egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run, + ) + # install the .egg + return self.install_egg(egg_path, tmpdir) + + def exe_to_egg(self, dist_filename, egg_tmp): + """Extract a bdist_wininst to the directories an egg would use""" + # Check for .pth file and set up prefix translations + prefixes = get_exe_prefixes(dist_filename) + to_compile = [] + native_libs = [] + top_level = {} + + def process(src, dst): + s = src.lower() + for old, new in prefixes: + if s.startswith(old): + src = new + src[len(old):] + parts = src.split('/') + dst = os.path.join(egg_tmp, *parts) + dl = dst.lower() + if dl.endswith('.pyd') or dl.endswith('.dll'): + parts[-1] = bdist_egg.strip_module(parts[-1]) + top_level[os.path.splitext(parts[0])[0]] = 1 + native_libs.append(src) + elif dl.endswith('.py') and old != 'SCRIPTS/': + top_level[os.path.splitext(parts[0])[0]] = 1 + to_compile.append(dst) + return dst + if not src.endswith('.pth'): + log.warn("WARNING: can't process %s", src) + return None + + # extract, tracking .pyd/.dll->native_libs and .py -> to_compile + unpack_archive(dist_filename, egg_tmp, process) + stubs = [] + for res in native_libs: + if res.lower().endswith('.pyd'): # create stubs for .pyd's + parts = res.split('/') + resource = parts[-1] + parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py' + pyfile = os.path.join(egg_tmp, *parts) + to_compile.append(pyfile) + stubs.append(pyfile) + bdist_egg.write_stub(resource, pyfile) + self.byte_compile(to_compile) # compile .py's + bdist_egg.write_safety_flag( + os.path.join(egg_tmp, 'EGG-INFO'), + bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag + + for name in 'top_level', 'native_libs': + if locals()[name]: + txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt') + if not os.path.exists(txt): + f = open(txt, 'w') + f.write('\n'.join(locals()[name]) + '\n') + f.close() + + __mv_warning = textwrap.dedent(""" + Because this distribution was installed --multi-version, before you can + import modules from this package in an application, you will need to + 'import pkg_resources' and then use a 'require()' call similar to one of + these examples, in order to select the desired version: + + pkg_resources.require("%(name)s") # latest installed version + pkg_resources.require("%(name)s==%(version)s") # this exact version + pkg_resources.require("%(name)s>=%(version)s") # this version or higher + """).lstrip() + + __id_warning = textwrap.dedent(""" + Note also that the installation directory must be on sys.path at runtime for + this to work. (e.g. by being the application's script directory, by being on + PYTHONPATH, or by being added to sys.path by your code.) + """) + + def installation_report(self, req, dist, what="Installed"): + """Helpful installation message for display to package users""" + msg = "\n%(what)s %(eggloc)s%(extras)s" + if self.multi_version and not self.no_report: + msg += '\n' + self.__mv_warning + if self.install_dir not in map(normalize_path, sys.path): + msg += '\n' + self.__id_warning + + eggloc = dist.location + name = dist.project_name + version = dist.version + extras = '' # TODO: self.report_extras(req, dist) + return msg % locals() + + __editable_msg = textwrap.dedent(""" + Extracted editable version of %(spec)s to %(dirname)s + + If it uses setuptools in its setup script, you can activate it in + "development" mode by going to that directory and running:: + + %(python)s setup.py develop + + See the setuptools documentation for the "develop" command for more info. + """).lstrip() + + def report_editable(self, spec, setup_script): + dirname = os.path.dirname(setup_script) + python = sys.executable + return '\n' + self.__editable_msg % locals() + + def run_setup(self, setup_script, setup_base, args): + sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg) + sys.modules.setdefault('distutils.command.egg_info', egg_info) + + args = list(args) + if self.verbose > 2: + v = 'v' * (self.verbose - 1) + args.insert(0, '-' + v) + elif self.verbose < 2: + args.insert(0, '-q') + if self.dry_run: + args.insert(0, '-n') + log.info( + "Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args) + ) + try: + run_setup(setup_script, args) + except SystemExit as v: + raise DistutilsError("Setup script exited with %s" % (v.args[0],)) + + def build_and_install(self, setup_script, setup_base): + args = ['bdist_egg', '--dist-dir'] + + dist_dir = tempfile.mkdtemp( + prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script) + ) + try: + self._set_fetcher_options(os.path.dirname(setup_script)) + args.append(dist_dir) + + self.run_setup(setup_script, setup_base, args) + all_eggs = Environment([dist_dir]) + eggs = [] + for key in all_eggs: + for dist in all_eggs[key]: + eggs.append(self.install_egg(dist.location, setup_base)) + if not eggs and not self.dry_run: + log.warn("No eggs found in %s (setup script problem?)", + dist_dir) + return eggs + finally: + rmtree(dist_dir) + log.set_verbosity(self.verbose) # restore our log verbosity + + def _set_fetcher_options(self, base): + """ + When easy_install is about to run bdist_egg on a source dist, that + source dist might have 'setup_requires' directives, requiring + additional fetching. Ensure the fetcher options given to easy_install + are available to that command as well. + """ + # find the fetch options from easy_install and write them out + # to the setup.cfg file. + ei_opts = self.distribution.get_option_dict('easy_install').copy() + fetch_directives = ( + 'find_links', 'site_dirs', 'index_url', 'optimize', + 'site_dirs', 'allow_hosts', + ) + fetch_options = {} + for key, val in ei_opts.items(): + if key not in fetch_directives: + continue + fetch_options[key.replace('_', '-')] = val[1] + # create a settings dictionary suitable for `edit_config` + settings = dict(easy_install=fetch_options) + cfg_filename = os.path.join(base, 'setup.cfg') + setopt.edit_config(cfg_filename, settings) + + def update_pth(self, dist): + if self.pth_file is None: + return + + for d in self.pth_file[dist.key]: # drop old entries + if self.multi_version or d.location != dist.location: + log.info("Removing %s from easy-install.pth file", d) + self.pth_file.remove(d) + if d.location in self.shadow_path: + self.shadow_path.remove(d.location) + + if not self.multi_version: + if dist.location in self.pth_file.paths: + log.info( + "%s is already the active version in easy-install.pth", + dist, + ) + else: + log.info("Adding %s to easy-install.pth file", dist) + self.pth_file.add(dist) # add new entry + if dist.location not in self.shadow_path: + self.shadow_path.append(dist.location) + + if not self.dry_run: + + self.pth_file.save() + + if dist.key == 'setuptools': + # Ensure that setuptools itself never becomes unavailable! + # XXX should this check for latest version? + filename = os.path.join(self.install_dir, 'setuptools.pth') + if os.path.islink(filename): + os.unlink(filename) + f = open(filename, 'wt') + f.write(self.pth_file.make_relative(dist.location) + '\n') + f.close() + + def unpack_progress(self, src, dst): + # Progress filter for unpacking + log.debug("Unpacking %s to %s", src, dst) + return dst # only unpack-and-compile skips files for dry run + + def unpack_and_compile(self, egg_path, destination): + to_compile = [] + to_chmod = [] + + def pf(src, dst): + if dst.endswith('.py') and not src.startswith('EGG-INFO/'): + to_compile.append(dst) + elif dst.endswith('.dll') or dst.endswith('.so'): + to_chmod.append(dst) + self.unpack_progress(src, dst) + return not self.dry_run and dst or None + + unpack_archive(egg_path, destination, pf) + self.byte_compile(to_compile) + if not self.dry_run: + for f in to_chmod: + mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755 + chmod(f, mode) + + def byte_compile(self, to_compile): + if sys.dont_write_bytecode: + self.warn('byte-compiling is disabled, skipping.') + return + + from distutils.util import byte_compile + + try: + # try to make the byte compile messages quieter + log.set_verbosity(self.verbose - 1) + + byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run) + if self.optimize: + byte_compile( + to_compile, optimize=self.optimize, force=1, + dry_run=self.dry_run, + ) + finally: + log.set_verbosity(self.verbose) # restore original verbosity + + __no_default_msg = textwrap.dedent(""" + bad install directory or PYTHONPATH + + You are attempting to install a package to a directory that is not + on PYTHONPATH and which Python does not read ".pth" files from. The + installation directory you specified (via --install-dir, --prefix, or + the distutils default setting) was: + + %s + + and your PYTHONPATH environment variable currently contains: + + %r + + Here are some of your options for correcting the problem: + + * You can choose a different installation directory, i.e., one that is + on PYTHONPATH or supports .pth files + + * You can add the installation directory to the PYTHONPATH environment + variable. (It must then also be on PYTHONPATH whenever you run + Python and want to use the package(s) you are installing.) + + * You can set up the installation directory to support ".pth" files by + using one of the approaches described here: + + https://setuptools.readthedocs.io/en/latest/easy_install.html#custom-installation-locations + + + Please make the appropriate changes for your system and try again.""").lstrip() + + def no_default_version_msg(self): + template = self.__no_default_msg + return template % (self.install_dir, os.environ.get('PYTHONPATH', '')) + + def install_site_py(self): + """Make sure there's a site.py in the target dir, if needed""" + + if self.sitepy_installed: + return # already did it, or don't need to + + sitepy = os.path.join(self.install_dir, "site.py") + source = resource_string("setuptools", "site-patch.py") + source = source.decode('utf-8') + current = "" + + if os.path.exists(sitepy): + log.debug("Checking existing site.py in %s", self.install_dir) + with io.open(sitepy) as strm: + current = strm.read() + + if not current.startswith('def __boot():'): + raise DistutilsError( + "%s is not a setuptools-generated site.py; please" + " remove it." % sitepy + ) + + if current != source: + log.info("Creating %s", sitepy) + if not self.dry_run: + ensure_directory(sitepy) + with io.open(sitepy, 'w', encoding='utf-8') as strm: + strm.write(source) + self.byte_compile([sitepy]) + + self.sitepy_installed = True + + def create_home_path(self): + """Create directories under ~.""" + if not self.user: + return + home = convert_path(os.path.expanduser("~")) + for name, path in six.iteritems(self.config_vars): + if path.startswith(home) and not os.path.isdir(path): + self.debug_print("os.makedirs('%s', 0o700)" % path) + os.makedirs(path, 0o700) + + INSTALL_SCHEMES = dict( + posix=dict( + install_dir='$base/lib/python$py_version_short/site-packages', + script_dir='$base/bin', + ), + ) + + DEFAULT_SCHEME = dict( + install_dir='$base/Lib/site-packages', + script_dir='$base/Scripts', + ) + + def _expand(self, *attrs): + config_vars = self.get_finalized_command('install').config_vars + + if self.prefix: + # Set default install_dir/scripts from --prefix + config_vars = config_vars.copy() + config_vars['base'] = self.prefix + scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME) + for attr, val in scheme.items(): + if getattr(self, attr, None) is None: + setattr(self, attr, val) + + from distutils.util import subst_vars + + for attr in attrs: + val = getattr(self, attr) + if val is not None: + val = subst_vars(val, config_vars) + if os.name == 'posix': + val = os.path.expanduser(val) + setattr(self, attr, val) + + +def get_site_dirs(): + # return a list of 'site' dirs + sitedirs = [_f for _f in os.environ.get('PYTHONPATH', + '').split(os.pathsep) if _f] + prefixes = [sys.prefix] + if sys.exec_prefix != sys.prefix: + prefixes.append(sys.exec_prefix) + for prefix in prefixes: + if prefix: + if sys.platform in ('os2emx', 'riscos'): + sitedirs.append(os.path.join(prefix, "Lib", "site-packages")) + elif os.sep == '/': + sitedirs.extend([ + os.path.join( + prefix, + "lib", + "python" + sys.version[:3], + "site-packages", + ), + os.path.join(prefix, "lib", "site-python"), + ]) + else: + sitedirs.extend([ + prefix, + os.path.join(prefix, "lib", "site-packages"), + ]) + if sys.platform == 'darwin': + # for framework builds *only* we add the standard Apple + # locations. Currently only per-user, but /Library and + # /Network/Library could be added too + if 'Python.framework' in prefix: + home = os.environ.get('HOME') + if home: + home_sp = os.path.join( + home, + 'Library', + 'Python', + sys.version[:3], + 'site-packages', + ) + sitedirs.append(home_sp) + lib_paths = get_path('purelib'), get_path('platlib') + for site_lib in lib_paths: + if site_lib not in sitedirs: + sitedirs.append(site_lib) + + if site.ENABLE_USER_SITE: + sitedirs.append(site.USER_SITE) + + try: + sitedirs.extend(site.getsitepackages()) + except AttributeError: + pass + + sitedirs = list(map(normalize_path, sitedirs)) + + return sitedirs + + +def expand_paths(inputs): + """Yield sys.path directories that might contain "old-style" packages""" + + seen = {} + + for dirname in inputs: + dirname = normalize_path(dirname) + if dirname in seen: + continue + + seen[dirname] = 1 + if not os.path.isdir(dirname): + continue + + files = os.listdir(dirname) + yield dirname, files + + for name in files: + if not name.endswith('.pth'): + # We only care about the .pth files + continue + if name in ('easy-install.pth', 'setuptools.pth'): + # Ignore .pth files that we control + continue + + # Read the .pth file + f = open(os.path.join(dirname, name)) + lines = list(yield_lines(f)) + f.close() + + # Yield existing non-dupe, non-import directory lines from it + for line in lines: + if not line.startswith("import"): + line = normalize_path(line.rstrip()) + if line not in seen: + seen[line] = 1 + if not os.path.isdir(line): + continue + yield line, os.listdir(line) + + +def extract_wininst_cfg(dist_filename): + """Extract configuration data from a bdist_wininst .exe + + Returns a configparser.RawConfigParser, or None + """ + f = open(dist_filename, 'rb') + try: + endrec = zipfile._EndRecData(f) + if endrec is None: + return None + + prepended = (endrec[9] - endrec[5]) - endrec[6] + if prepended < 12: # no wininst data here + return None + f.seek(prepended - 12) + + tag, cfglen, bmlen = struct.unpack("<iii", f.read(12)) + if tag not in (0x1234567A, 0x1234567B): + return None # not a valid tag + + f.seek(prepended - (12 + cfglen)) + init = {'version': '', 'target_version': ''} + cfg = configparser.RawConfigParser(init) + try: + part = f.read(cfglen) + # Read up to the first null byte. + config = part.split(b'\0', 1)[0] + # Now the config is in bytes, but for RawConfigParser, it should + # be text, so decode it. + config = config.decode(sys.getfilesystemencoding()) + cfg.readfp(six.StringIO(config)) + except configparser.Error: + return None + if not cfg.has_section('metadata') or not cfg.has_section('Setup'): + return None + return cfg + + finally: + f.close() + + +def get_exe_prefixes(exe_filename): + """Get exe->egg path translations for a given .exe file""" + + prefixes = [ + ('PURELIB/', ''), + ('PLATLIB/pywin32_system32', ''), + ('PLATLIB/', ''), + ('SCRIPTS/', 'EGG-INFO/scripts/'), + ('DATA/lib/site-packages', ''), + ] + z = zipfile.ZipFile(exe_filename) + try: + for info in z.infolist(): + name = info.filename + parts = name.split('/') + if len(parts) == 3 and parts[2] == 'PKG-INFO': + if parts[1].endswith('.egg-info'): + prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/')) + break + if len(parts) != 2 or not name.endswith('.pth'): + continue + if name.endswith('-nspkg.pth'): + continue + if parts[0].upper() in ('PURELIB', 'PLATLIB'): + contents = z.read(name) + if six.PY3: + contents = contents.decode() + for pth in yield_lines(contents): + pth = pth.strip().replace('\\', '/') + if not pth.startswith('import'): + prefixes.append((('%s/%s/' % (parts[0], pth)), '')) + finally: + z.close() + prefixes = [(x.lower(), y) for x, y in prefixes] + prefixes.sort() + prefixes.reverse() + return prefixes + + +class PthDistributions(Environment): + """A .pth file with Distribution paths in it""" + + dirty = False + + def __init__(self, filename, sitedirs=()): + self.filename = filename + self.sitedirs = list(map(normalize_path, sitedirs)) + self.basedir = normalize_path(os.path.dirname(self.filename)) + self._load() + Environment.__init__(self, [], None, None) + for path in yield_lines(self.paths): + list(map(self.add, find_distributions(path, True))) + + def _load(self): + self.paths = [] + saw_import = False + seen = dict.fromkeys(self.sitedirs) + if os.path.isfile(self.filename): + f = open(self.filename, 'rt') + for line in f: + if line.startswith('import'): + saw_import = True + continue + path = line.rstrip() + self.paths.append(path) + if not path.strip() or path.strip().startswith('#'): + continue + # skip non-existent paths, in case somebody deleted a package + # manually, and duplicate paths as well + path = self.paths[-1] = normalize_path( + os.path.join(self.basedir, path) + ) + if not os.path.exists(path) or path in seen: + self.paths.pop() # skip it + self.dirty = True # we cleaned up, so we're dirty now :) + continue + seen[path] = 1 + f.close() + + if self.paths and not saw_import: + self.dirty = True # ensure anything we touch has import wrappers + while self.paths and not self.paths[-1].strip(): + self.paths.pop() + + def save(self): + """Write changed .pth file back to disk""" + if not self.dirty: + return + + rel_paths = list(map(self.make_relative, self.paths)) + if rel_paths: + log.debug("Saving %s", self.filename) + lines = self._wrap_lines(rel_paths) + data = '\n'.join(lines) + '\n' + + if os.path.islink(self.filename): + os.unlink(self.filename) + with open(self.filename, 'wt') as f: + f.write(data) + + elif os.path.exists(self.filename): + log.debug("Deleting empty %s", self.filename) + os.unlink(self.filename) + + self.dirty = False + + @staticmethod + def _wrap_lines(lines): + return lines + + def add(self, dist): + """Add `dist` to the distribution map""" + new_path = ( + dist.location not in self.paths and ( + dist.location not in self.sitedirs or + # account for '.' being in PYTHONPATH + dist.location == os.getcwd() + ) + ) + if new_path: + self.paths.append(dist.location) + self.dirty = True + Environment.add(self, dist) + + def remove(self, dist): + """Remove `dist` from the distribution map""" + while dist.location in self.paths: + self.paths.remove(dist.location) + self.dirty = True + Environment.remove(self, dist) + + def make_relative(self, path): + npath, last = os.path.split(normalize_path(path)) + baselen = len(self.basedir) + parts = [last] + sep = os.altsep == '/' and '/' or os.sep + while len(npath) >= baselen: + if npath == self.basedir: + parts.append(os.curdir) + parts.reverse() + return sep.join(parts) + npath, last = os.path.split(npath) + parts.append(last) + else: + return path + + +class RewritePthDistributions(PthDistributions): + @classmethod + def _wrap_lines(cls, lines): + yield cls.prelude + for line in lines: + yield line + yield cls.postlude + + prelude = _one_liner(""" + import sys + sys.__plen = len(sys.path) + """) + postlude = _one_liner(""" + import sys + new = sys.path[sys.__plen:] + del sys.path[sys.__plen:] + p = getattr(sys, '__egginsert', 0) + sys.path[p:p] = new + sys.__egginsert = p + len(new) + """) + + +if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'raw') == 'rewrite': + PthDistributions = RewritePthDistributions + + +def _first_line_re(): + """ + Return a regular expression based on first_line_re suitable for matching + strings. + """ + if isinstance(first_line_re.pattern, str): + return first_line_re + + # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern. + return re.compile(first_line_re.pattern.decode()) + + +def auto_chmod(func, arg, exc): + if func is os.remove and os.name == 'nt': + chmod(arg, stat.S_IWRITE) + return func(arg) + et, ev, _ = sys.exc_info() + six.reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg)))) + + +def update_dist_caches(dist_path, fix_zipimporter_caches): + """ + Fix any globally cached `dist_path` related data + + `dist_path` should be a path of a newly installed egg distribution (zipped + or unzipped). + + sys.path_importer_cache contains finder objects that have been cached when + importing data from the original distribution. Any such finders need to be + cleared since the replacement distribution might be packaged differently, + e.g. a zipped egg distribution might get replaced with an unzipped egg + folder or vice versa. Having the old finders cached may then cause Python + to attempt loading modules from the replacement distribution using an + incorrect loader. + + zipimport.zipimporter objects are Python loaders charged with importing + data packaged inside zip archives. If stale loaders referencing the + original distribution, are left behind, they can fail to load modules from + the replacement distribution. E.g. if an old zipimport.zipimporter instance + is used to load data from a new zipped egg archive, it may cause the + operation to attempt to locate the requested data in the wrong location - + one indicated by the original distribution's zip archive directory + information. Such an operation may then fail outright, e.g. report having + read a 'bad local file header', or even worse, it may fail silently & + return invalid data. + + zipimport._zip_directory_cache contains cached zip archive directory + information for all existing zipimport.zipimporter instances and all such + instances connected to the same archive share the same cached directory + information. + + If asked, and the underlying Python implementation allows it, we can fix + all existing zipimport.zipimporter instances instead of having to track + them down and remove them one by one, by updating their shared cached zip + archive directory information. This, of course, assumes that the + replacement distribution is packaged as a zipped egg. + + If not asked to fix existing zipimport.zipimporter instances, we still do + our best to clear any remaining zipimport.zipimporter related cached data + that might somehow later get used when attempting to load data from the new + distribution and thus cause such load operations to fail. Note that when + tracking down such remaining stale data, we can not catch every conceivable + usage from here, and we clear only those that we know of and have found to + cause problems if left alive. Any remaining caches should be updated by + whomever is in charge of maintaining them, i.e. they should be ready to + handle us replacing their zip archives with new distributions at runtime. + + """ + # There are several other known sources of stale zipimport.zipimporter + # instances that we do not clear here, but might if ever given a reason to + # do so: + # * Global setuptools pkg_resources.working_set (a.k.a. 'master working + # set') may contain distributions which may in turn contain their + # zipimport.zipimporter loaders. + # * Several zipimport.zipimporter loaders held by local variables further + # up the function call stack when running the setuptools installation. + # * Already loaded modules may have their __loader__ attribute set to the + # exact loader instance used when importing them. Python 3.4 docs state + # that this information is intended mostly for introspection and so is + # not expected to cause us problems. + normalized_path = normalize_path(dist_path) + _uncache(normalized_path, sys.path_importer_cache) + if fix_zipimporter_caches: + _replace_zip_directory_cache_data(normalized_path) + else: + # Here, even though we do not want to fix existing and now stale + # zipimporter cache information, we still want to remove it. Related to + # Python's zip archive directory information cache, we clear each of + # its stale entries in two phases: + # 1. Clear the entry so attempting to access zip archive information + # via any existing stale zipimport.zipimporter instances fails. + # 2. Remove the entry from the cache so any newly constructed + # zipimport.zipimporter instances do not end up using old stale + # zip archive directory information. + # This whole stale data removal step does not seem strictly necessary, + # but has been left in because it was done before we started replacing + # the zip archive directory information cache content if possible, and + # there are no relevant unit tests that we can depend on to tell us if + # this is really needed. + _remove_and_clear_zip_directory_cache_data(normalized_path) + + +def _collect_zipimporter_cache_entries(normalized_path, cache): + """ + Return zipimporter cache entry keys related to a given normalized path. + + Alternative path spellings (e.g. those using different character case or + those using alternative path separators) related to the same path are + included. Any sub-path entries are included as well, i.e. those + corresponding to zip archives embedded in other zip archives. + + """ + result = [] + prefix_len = len(normalized_path) + for p in cache: + np = normalize_path(p) + if (np.startswith(normalized_path) and + np[prefix_len:prefix_len + 1] in (os.sep, '')): + result.append(p) + return result + + +def _update_zipimporter_cache(normalized_path, cache, updater=None): + """ + Update zipimporter cache data for a given normalized path. + + Any sub-path entries are processed as well, i.e. those corresponding to zip + archives embedded in other zip archives. + + Given updater is a callable taking a cache entry key and the original entry + (after already removing the entry from the cache), and expected to update + the entry and possibly return a new one to be inserted in its place. + Returning None indicates that the entry should not be replaced with a new + one. If no updater is given, the cache entries are simply removed without + any additional processing, the same as if the updater simply returned None. + + """ + for p in _collect_zipimporter_cache_entries(normalized_path, cache): + # N.B. pypy's custom zipimport._zip_directory_cache implementation does + # not support the complete dict interface: + # * Does not support item assignment, thus not allowing this function + # to be used only for removing existing cache entries. + # * Does not support the dict.pop() method, forcing us to use the + # get/del patterns instead. For more detailed information see the + # following links: + # https://github.com/pypa/setuptools/issues/202#issuecomment-202913420 + # https://bitbucket.org/pypy/pypy/src/dd07756a34a41f674c0cacfbc8ae1d4cc9ea2ae4/pypy/module/zipimport/interp_zipimport.py#cl-99 + old_entry = cache[p] + del cache[p] + new_entry = updater and updater(p, old_entry) + if new_entry is not None: + cache[p] = new_entry + + +def _uncache(normalized_path, cache): + _update_zipimporter_cache(normalized_path, cache) + + +def _remove_and_clear_zip_directory_cache_data(normalized_path): + def clear_and_remove_cached_zip_archive_directory_data(path, old_entry): + old_entry.clear() + + _update_zipimporter_cache( + normalized_path, zipimport._zip_directory_cache, + updater=clear_and_remove_cached_zip_archive_directory_data) + + +# PyPy Python implementation does not allow directly writing to the +# zipimport._zip_directory_cache and so prevents us from attempting to correct +# its content. The best we can do there is clear the problematic cache content +# and have PyPy repopulate it as needed. The downside is that if there are any +# stale zipimport.zipimporter instances laying around, attempting to use them +# will fail due to not having its zip archive directory information available +# instead of being automatically corrected to use the new correct zip archive +# directory information. +if '__pypy__' in sys.builtin_module_names: + _replace_zip_directory_cache_data = \ + _remove_and_clear_zip_directory_cache_data +else: + + def _replace_zip_directory_cache_data(normalized_path): + def replace_cached_zip_archive_directory_data(path, old_entry): + # N.B. In theory, we could load the zip directory information just + # once for all updated path spellings, and then copy it locally and + # update its contained path strings to contain the correct + # spelling, but that seems like a way too invasive move (this cache + # structure is not officially documented anywhere and could in + # theory change with new Python releases) for no significant + # benefit. + old_entry.clear() + zipimport.zipimporter(path) + old_entry.update(zipimport._zip_directory_cache[path]) + return old_entry + + _update_zipimporter_cache( + normalized_path, zipimport._zip_directory_cache, + updater=replace_cached_zip_archive_directory_data) + + +def is_python(text, filename='<string>'): + "Is this string a valid Python script?" + try: + compile(text, filename, 'exec') + except (SyntaxError, TypeError): + return False + else: + return True + + +def is_sh(executable): + """Determine if the specified executable is a .sh (contains a #! line)""" + try: + with io.open(executable, encoding='latin-1') as fp: + magic = fp.read(2) + except (OSError, IOError): + return executable + return magic == '#!' + + +def nt_quote_arg(arg): + """Quote a command line argument according to Windows parsing rules""" + return subprocess.list2cmdline([arg]) + + +def is_python_script(script_text, filename): + """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc. + """ + if filename.endswith('.py') or filename.endswith('.pyw'): + return True # extension says it's Python + if is_python(script_text, filename): + return True # it's syntactically valid Python + if script_text.startswith('#!'): + # It begins with a '#!' line, so check if 'python' is in it somewhere + return 'python' in script_text.splitlines()[0].lower() + + return False # Not any Python I can recognize + + +try: + from os import chmod as _chmod +except ImportError: + # Jython compatibility + def _chmod(*args): + pass + + +def chmod(path, mode): + log.debug("changing mode of %s to %o", path, mode) + try: + _chmod(path, mode) + except os.error as e: + log.debug("chmod failed: %s", e) + + +class CommandSpec(list): + """ + A command spec for a #! header, specified as a list of arguments akin to + those passed to Popen. + """ + + options = [] + split_args = dict() + + @classmethod + def best(cls): + """ + Choose the best CommandSpec class based on environmental conditions. + """ + return cls + + @classmethod + def _sys_executable(cls): + _default = os.path.normpath(sys.executable) + return os.environ.get('__PYVENV_LAUNCHER__', _default) + + @classmethod + def from_param(cls, param): + """ + Construct a CommandSpec from a parameter to build_scripts, which may + be None. + """ + if isinstance(param, cls): + return param + if isinstance(param, list): + return cls(param) + if param is None: + return cls.from_environment() + # otherwise, assume it's a string. + return cls.from_string(param) + + @classmethod + def from_environment(cls): + return cls([cls._sys_executable()]) + + @classmethod + def from_string(cls, string): + """ + Construct a command spec from a simple string representing a command + line parseable by shlex.split. + """ + items = shlex.split(string, **cls.split_args) + return cls(items) + + def install_options(self, script_text): + self.options = shlex.split(self._extract_options(script_text)) + cmdline = subprocess.list2cmdline(self) + if not isascii(cmdline): + self.options[:0] = ['-x'] + + @staticmethod + def _extract_options(orig_script): + """ + Extract any options from the first line of the script. + """ + first = (orig_script + '\n').splitlines()[0] + match = _first_line_re().match(first) + options = match.group(1) or '' if match else '' + return options.strip() + + def as_header(self): + return self._render(self + list(self.options)) + + @staticmethod + def _strip_quotes(item): + _QUOTES = '"\'' + for q in _QUOTES: + if item.startswith(q) and item.endswith(q): + return item[1:-1] + return item + + @staticmethod + def _render(items): + cmdline = subprocess.list2cmdline( + CommandSpec._strip_quotes(item.strip()) for item in items) + return '#!' + cmdline + '\n' + + +# For pbr compat; will be removed in a future version. +sys_executable = CommandSpec._sys_executable() + + +class WindowsCommandSpec(CommandSpec): + split_args = dict(posix=False) + + +class ScriptWriter(object): + """ + Encapsulates behavior around writing entry point scripts for console and + gui apps. + """ + + template = textwrap.dedent(""" + # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r + __requires__ = %(spec)r + import re + import sys + from pkg_resources import load_entry_point + + if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit( + load_entry_point(%(spec)r, %(group)r, %(name)r)() + ) + """).lstrip() + + command_spec_class = CommandSpec + + @classmethod + def get_script_args(cls, dist, executable=None, wininst=False): + # for backward compatibility + warnings.warn("Use get_args", DeprecationWarning) + writer = (WindowsScriptWriter if wininst else ScriptWriter).best() + header = cls.get_script_header("", executable, wininst) + return writer.get_args(dist, header) + + @classmethod + def get_script_header(cls, script_text, executable=None, wininst=False): + # for backward compatibility + warnings.warn("Use get_header", DeprecationWarning) + if wininst: + executable = "python.exe" + cmd = cls.command_spec_class.best().from_param(executable) + cmd.install_options(script_text) + return cmd.as_header() + + @classmethod + def get_args(cls, dist, header=None): + """ + Yield write_script() argument tuples for a distribution's + console_scripts and gui_scripts entry points. + """ + if header is None: + header = cls.get_header() + spec = str(dist.as_requirement()) + for type_ in 'console', 'gui': + group = type_ + '_scripts' + for name, ep in dist.get_entry_map(group).items(): + cls._ensure_safe_name(name) + script_text = cls.template % locals() + args = cls._get_script_args(type_, name, header, script_text) + for res in args: + yield res + + @staticmethod + def _ensure_safe_name(name): + """ + Prevent paths in *_scripts entry point names. + """ + has_path_sep = re.search(r'[\\/]', name) + if has_path_sep: + raise ValueError("Path separators not allowed in script names") + + @classmethod + def get_writer(cls, force_windows): + # for backward compatibility + warnings.warn("Use best", DeprecationWarning) + return WindowsScriptWriter.best() if force_windows else cls.best() + + @classmethod + def best(cls): + """ + Select the best ScriptWriter for this environment. + """ + if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'): + return WindowsScriptWriter.best() + else: + return cls + + @classmethod + def _get_script_args(cls, type_, name, header, script_text): + # Simply write the stub with no extension. + yield (name, header + script_text) + + @classmethod + def get_header(cls, script_text="", executable=None): + """Create a #! line, getting options (if any) from script_text""" + cmd = cls.command_spec_class.best().from_param(executable) + cmd.install_options(script_text) + return cmd.as_header() + + +class WindowsScriptWriter(ScriptWriter): + command_spec_class = WindowsCommandSpec + + @classmethod + def get_writer(cls): + # for backward compatibility + warnings.warn("Use best", DeprecationWarning) + return cls.best() + + @classmethod + def best(cls): + """ + Select the best ScriptWriter suitable for Windows + """ + writer_lookup = dict( + executable=WindowsExecutableLauncherWriter, + natural=cls, + ) + # for compatibility, use the executable launcher by default + launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable') + return writer_lookup[launcher] + + @classmethod + def _get_script_args(cls, type_, name, header, script_text): + "For Windows, add a .py extension" + ext = dict(console='.pya', gui='.pyw')[type_] + if ext not in os.environ['PATHEXT'].lower().split(';'): + msg = ( + "{ext} not listed in PATHEXT; scripts will not be " + "recognized as executables." + ).format(**locals()) + warnings.warn(msg, UserWarning) + old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe'] + old.remove(ext) + header = cls._adjust_header(type_, header) + blockers = [name + x for x in old] + yield name + ext, header + script_text, 't', blockers + + @classmethod + def _adjust_header(cls, type_, orig_header): + """ + Make sure 'pythonw' is used for gui and and 'python' is used for + console (regardless of what sys.executable is). + """ + pattern = 'pythonw.exe' + repl = 'python.exe' + if type_ == 'gui': + pattern, repl = repl, pattern + pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE) + new_header = pattern_ob.sub(string=orig_header, repl=repl) + return new_header if cls._use_header(new_header) else orig_header + + @staticmethod + def _use_header(new_header): + """ + Should _adjust_header use the replaced header? + + On non-windows systems, always use. On + Windows systems, only use the replaced header if it resolves + to an executable on the system. + """ + clean_header = new_header[2:-1].strip('"') + return sys.platform != 'win32' or find_executable(clean_header) + + +class WindowsExecutableLauncherWriter(WindowsScriptWriter): + @classmethod + def _get_script_args(cls, type_, name, header, script_text): + """ + For Windows, add a .py extension and an .exe launcher + """ + if type_ == 'gui': + launcher_type = 'gui' + ext = '-script.pyw' + old = ['.pyw'] + else: + launcher_type = 'cli' + ext = '-script.py' + old = ['.py', '.pyc', '.pyo'] + hdr = cls._adjust_header(type_, header) + blockers = [name + x for x in old] + yield (name + ext, hdr + script_text, 't', blockers) + yield ( + name + '.exe', get_win_launcher(launcher_type), + 'b' # write in binary mode + ) + if not is_64bit(): + # install a manifest for the launcher to prevent Windows + # from detecting it as an installer (which it will for + # launchers like easy_install.exe). Consider only + # adding a manifest for launchers detected as installers. + # See Distribute #143 for details. + m_name = name + '.exe.manifest' + yield (m_name, load_launcher_manifest(name), 't') + + +# for backward-compatibility +get_script_args = ScriptWriter.get_script_args +get_script_header = ScriptWriter.get_script_header + + +def get_win_launcher(type): + """ + Load the Windows launcher (executable) suitable for launching a script. + + `type` should be either 'cli' or 'gui' + + Returns the executable as a byte string. + """ + launcher_fn = '%s.exe' % type + if is_64bit(): + launcher_fn = launcher_fn.replace(".", "-64.") + else: + launcher_fn = launcher_fn.replace(".", "-32.") + return resource_string('setuptools', launcher_fn) + + +def load_launcher_manifest(name): + manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml') + if six.PY2: + return manifest % vars() + else: + return manifest.decode('utf-8') % vars() + + +def rmtree(path, ignore_errors=False, onerror=auto_chmod): + return shutil.rmtree(path, ignore_errors, onerror) + + +def current_umask(): + tmp = os.umask(0o022) + os.umask(tmp) + return tmp + + +def bootstrap(): + # This function is called when setuptools*.egg is run using /bin/sh + import setuptools + + argv0 = os.path.dirname(setuptools.__path__[0]) + sys.argv[0] = argv0 + sys.argv.append(argv0) + main() + + +def main(argv=None, **kw): + from setuptools import setup + from setuptools.dist import Distribution + + class DistributionWithoutHelpCommands(Distribution): + common_usage = "" + + def _show_help(self, *args, **kw): + with _patch_usage(): + Distribution._show_help(self, *args, **kw) + + if argv is None: + argv = sys.argv[1:] + + with _patch_usage(): + setup( + script_args=['-q', 'easy_install', '-v'] + argv, + script_name=sys.argv[0] or 'easy_install', + distclass=DistributionWithoutHelpCommands, + **kw + ) + + +@contextlib.contextmanager +def _patch_usage(): + import distutils.core + USAGE = textwrap.dedent(""" + usage: %(script)s [options] requirement_or_url ... + or: %(script)s --help + """).lstrip() + + def gen_usage(script_name): + return USAGE % dict( + script=os.path.basename(script_name), + ) + + saved = distutils.core.gen_usage + distutils.core.gen_usage = gen_usage + try: + yield + finally: + distutils.core.gen_usage = saved diff --git a/env/lib/python3.6/site-packages/setuptools/command/egg_info.py b/env/lib/python3.6/site-packages/setuptools/command/egg_info.py new file mode 100644 index 0000000..6cc8f4c --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/egg_info.py @@ -0,0 +1,697 @@ +"""setuptools.command.egg_info + +Create a distribution's .egg-info directory and contents""" + +from distutils.filelist import FileList as _FileList +from distutils.errors import DistutilsInternalError +from distutils.util import convert_path +from distutils import log +import distutils.errors +import distutils.filelist +import os +import re +import sys +import io +import warnings +import time +import collections + +from setuptools.extern import six +from setuptools.extern.six.moves import map + +from setuptools import Command +from setuptools.command.sdist import sdist +from setuptools.command.sdist import walk_revctrl +from setuptools.command.setopt import edit_config +from setuptools.command import bdist_egg +from pkg_resources import ( + parse_requirements, safe_name, parse_version, + safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename) +import setuptools.unicode_utils as unicode_utils +from setuptools.glob import glob + +from pkg_resources.extern import packaging + +try: + from setuptools_svn import svn_utils +except ImportError: + pass + + +def translate_pattern(glob): + """ + Translate a file path glob like '*.txt' in to a regular expression. + This differs from fnmatch.translate which allows wildcards to match + directory separators. It also knows about '**/' which matches any number of + directories. + """ + pat = '' + + # This will split on '/' within [character classes]. This is deliberate. + chunks = glob.split(os.path.sep) + + sep = re.escape(os.sep) + valid_char = '[^%s]' % (sep,) + + for c, chunk in enumerate(chunks): + last_chunk = c == len(chunks) - 1 + + # Chunks that are a literal ** are globstars. They match anything. + if chunk == '**': + if last_chunk: + # Match anything if this is the last component + pat += '.*' + else: + # Match '(name/)*' + pat += '(?:%s+%s)*' % (valid_char, sep) + continue # Break here as the whole path component has been handled + + # Find any special characters in the remainder + i = 0 + chunk_len = len(chunk) + while i < chunk_len: + char = chunk[i] + if char == '*': + # Match any number of name characters + pat += valid_char + '*' + elif char == '?': + # Match a name character + pat += valid_char + elif char == '[': + # Character class + inner_i = i + 1 + # Skip initial !/] chars + if inner_i < chunk_len and chunk[inner_i] == '!': + inner_i = inner_i + 1 + if inner_i < chunk_len and chunk[inner_i] == ']': + inner_i = inner_i + 1 + + # Loop till the closing ] is found + while inner_i < chunk_len and chunk[inner_i] != ']': + inner_i = inner_i + 1 + + if inner_i >= chunk_len: + # Got to the end of the string without finding a closing ] + # Do not treat this as a matching group, but as a literal [ + pat += re.escape(char) + else: + # Grab the insides of the [brackets] + inner = chunk[i + 1:inner_i] + char_class = '' + + # Class negation + if inner[0] == '!': + char_class = '^' + inner = inner[1:] + + char_class += re.escape(inner) + pat += '[%s]' % (char_class,) + + # Skip to the end ] + i = inner_i + else: + pat += re.escape(char) + i += 1 + + # Join each chunk with the dir separator + if not last_chunk: + pat += sep + + return re.compile(pat + r'\Z(?ms)') + + +class egg_info(Command): + description = "create a distribution's .egg-info directory" + + user_options = [ + ('egg-base=', 'e', "directory containing .egg-info directories" + " (default: top of the source tree)"), + ('tag-svn-revision', 'r', + "Add subversion revision ID to version number"), + ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), + ('tag-build=', 'b', "Specify explicit tag to add to version number"), + ('no-svn-revision', 'R', + "Don't add subversion revision ID [default]"), + ('no-date', 'D', "Don't include date stamp [default]"), + ] + + boolean_options = ['tag-date', 'tag-svn-revision'] + negative_opt = { + 'no-svn-revision': 'tag-svn-revision', + 'no-date': 'tag-date', + } + + def initialize_options(self): + self.egg_name = None + self.egg_version = None + self.egg_base = None + self.egg_info = None + self.tag_build = None + self.tag_svn_revision = 0 + self.tag_date = 0 + self.broken_egg_info = False + self.vtags = None + + def save_version_info(self, filename): + """ + Materialize the values of svn_revision and date into the + build tag. Install these keys in a deterministic order + to avoid arbitrary reordering on subsequent builds. + """ + # python 2.6 compatibility + odict = getattr(collections, 'OrderedDict', dict) + egg_info = odict() + # follow the order these keys would have been added + # when PYTHONHASHSEED=0 + egg_info['tag_build'] = self.tags() + egg_info['tag_date'] = 0 + egg_info['tag_svn_revision'] = 0 + edit_config(filename, dict(egg_info=egg_info)) + + def finalize_options(self): + self.egg_name = safe_name(self.distribution.get_name()) + self.vtags = self.tags() + self.egg_version = self.tagged_version() + + parsed_version = parse_version(self.egg_version) + + try: + is_version = isinstance(parsed_version, packaging.version.Version) + spec = ( + "%s==%s" if is_version else "%s===%s" + ) + list( + parse_requirements(spec % (self.egg_name, self.egg_version)) + ) + except ValueError: + raise distutils.errors.DistutilsOptionError( + "Invalid distribution name or version syntax: %s-%s" % + (self.egg_name, self.egg_version) + ) + + if self.egg_base is None: + dirs = self.distribution.package_dir + self.egg_base = (dirs or {}).get('', os.curdir) + + self.ensure_dirname('egg_base') + self.egg_info = to_filename(self.egg_name) + '.egg-info' + if self.egg_base != os.curdir: + self.egg_info = os.path.join(self.egg_base, self.egg_info) + if '-' in self.egg_name: + self.check_broken_egg_info() + + # Set package version for the benefit of dumber commands + # (e.g. sdist, bdist_wininst, etc.) + # + self.distribution.metadata.version = self.egg_version + + # If we bootstrapped around the lack of a PKG-INFO, as might be the + # case in a fresh checkout, make sure that any special tags get added + # to the version info + # + pd = self.distribution._patched_dist + if pd is not None and pd.key == self.egg_name.lower(): + pd._version = self.egg_version + pd._parsed_version = parse_version(self.egg_version) + self.distribution._patched_dist = None + + def write_or_delete_file(self, what, filename, data, force=False): + """Write `data` to `filename` or delete if empty + + If `data` is non-empty, this routine is the same as ``write_file()``. + If `data` is empty but not ``None``, this is the same as calling + ``delete_file(filename)`. If `data` is ``None``, then this is a no-op + unless `filename` exists, in which case a warning is issued about the + orphaned file (if `force` is false), or deleted (if `force` is true). + """ + if data: + self.write_file(what, filename, data) + elif os.path.exists(filename): + if data is None and not force: + log.warn( + "%s not set in setup(), but %s exists", what, filename + ) + return + else: + self.delete_file(filename) + + def write_file(self, what, filename, data): + """Write `data` to `filename` (if not a dry run) after announcing it + + `what` is used in a log message to identify what is being written + to the file. + """ + log.info("writing %s to %s", what, filename) + if six.PY3: + data = data.encode("utf-8") + if not self.dry_run: + f = open(filename, 'wb') + f.write(data) + f.close() + + def delete_file(self, filename): + """Delete `filename` (if not a dry run) after announcing it""" + log.info("deleting %s", filename) + if not self.dry_run: + os.unlink(filename) + + def tagged_version(self): + version = self.distribution.get_version() + # egg_info may be called more than once for a distribution, + # in which case the version string already contains all tags. + if self.vtags and version.endswith(self.vtags): + return safe_version(version) + return safe_version(version + self.vtags) + + def run(self): + self.mkpath(self.egg_info) + installer = self.distribution.fetch_build_egg + for ep in iter_entry_points('egg_info.writers'): + ep.require(installer=installer) + writer = ep.resolve() + writer(self, ep.name, os.path.join(self.egg_info, ep.name)) + + # Get rid of native_libs.txt if it was put there by older bdist_egg + nl = os.path.join(self.egg_info, "native_libs.txt") + if os.path.exists(nl): + self.delete_file(nl) + + self.find_sources() + + def tags(self): + version = '' + if self.tag_build: + version += self.tag_build + if self.tag_svn_revision: + warnings.warn( + "tag_svn_revision is deprecated and will not be honored " + "in a future release" + ) + version += '-r%s' % self.get_svn_revision() + if self.tag_date: + version += time.strftime("-%Y%m%d") + return version + + @staticmethod + def get_svn_revision(): + if 'svn_utils' not in globals(): + return "0" + return str(svn_utils.SvnInfo.load(os.curdir).get_revision()) + + def find_sources(self): + """Generate SOURCES.txt manifest file""" + manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") + mm = manifest_maker(self.distribution) + mm.manifest = manifest_filename + mm.run() + self.filelist = mm.filelist + + def check_broken_egg_info(self): + bei = self.egg_name + '.egg-info' + if self.egg_base != os.curdir: + bei = os.path.join(self.egg_base, bei) + if os.path.exists(bei): + log.warn( + "-" * 78 + '\n' + "Note: Your current .egg-info directory has a '-' in its name;" + '\nthis will not work correctly with "setup.py develop".\n\n' + 'Please rename %s to %s to correct this problem.\n' + '-' * 78, + bei, self.egg_info + ) + self.broken_egg_info = self.egg_info + self.egg_info = bei # make it work for now + + +class FileList(_FileList): + # Implementations of the various MANIFEST.in commands + + def process_template_line(self, line): + # Parse the line: split it up, make sure the right number of words + # is there, and return the relevant words. 'action' is always + # defined: it's the first word of the line. Which of the other + # three are defined depends on the action; it'll be either + # patterns, (dir and patterns), or (dir_pattern). + (action, patterns, dir, dir_pattern) = self._parse_template_line(line) + + # OK, now we know that the action is valid and we have the + # right number of words on the line for that action -- so we + # can proceed with minimal error-checking. + if action == 'include': + self.debug_print("include " + ' '.join(patterns)) + for pattern in patterns: + if not self.include(pattern): + log.warn("warning: no files found matching '%s'", pattern) + + elif action == 'exclude': + self.debug_print("exclude " + ' '.join(patterns)) + for pattern in patterns: + if not self.exclude(pattern): + log.warn(("warning: no previously-included files " + "found matching '%s'"), pattern) + + elif action == 'global-include': + self.debug_print("global-include " + ' '.join(patterns)) + for pattern in patterns: + if not self.global_include(pattern): + log.warn(("warning: no files found matching '%s' " + "anywhere in distribution"), pattern) + + elif action == 'global-exclude': + self.debug_print("global-exclude " + ' '.join(patterns)) + for pattern in patterns: + if not self.global_exclude(pattern): + log.warn(("warning: no previously-included files matching " + "'%s' found anywhere in distribution"), + pattern) + + elif action == 'recursive-include': + self.debug_print("recursive-include %s %s" % + (dir, ' '.join(patterns))) + for pattern in patterns: + if not self.recursive_include(dir, pattern): + log.warn(("warning: no files found matching '%s' " + "under directory '%s'"), + pattern, dir) + + elif action == 'recursive-exclude': + self.debug_print("recursive-exclude %s %s" % + (dir, ' '.join(patterns))) + for pattern in patterns: + if not self.recursive_exclude(dir, pattern): + log.warn(("warning: no previously-included files matching " + "'%s' found under directory '%s'"), + pattern, dir) + + elif action == 'graft': + self.debug_print("graft " + dir_pattern) + if not self.graft(dir_pattern): + log.warn("warning: no directories found matching '%s'", + dir_pattern) + + elif action == 'prune': + self.debug_print("prune " + dir_pattern) + if not self.prune(dir_pattern): + log.warn(("no previously-included directories found " + "matching '%s'"), dir_pattern) + + else: + raise DistutilsInternalError( + "this cannot happen: invalid action '%s'" % action) + + def _remove_files(self, predicate): + """ + Remove all files from the file list that match the predicate. + Return True if any matching files were removed + """ + found = False + for i in range(len(self.files) - 1, -1, -1): + if predicate(self.files[i]): + self.debug_print(" removing " + self.files[i]) + del self.files[i] + found = True + return found + + def include(self, pattern): + """Include files that match 'pattern'.""" + found = [f for f in glob(pattern) if not os.path.isdir(f)] + self.extend(found) + return bool(found) + + def exclude(self, pattern): + """Exclude files that match 'pattern'.""" + match = translate_pattern(pattern) + return self._remove_files(match.match) + + def recursive_include(self, dir, pattern): + """ + Include all files anywhere in 'dir/' that match the pattern. + """ + full_pattern = os.path.join(dir, '**', pattern) + found = [f for f in glob(full_pattern, recursive=True) + if not os.path.isdir(f)] + self.extend(found) + return bool(found) + + def recursive_exclude(self, dir, pattern): + """ + Exclude any file anywhere in 'dir/' that match the pattern. + """ + match = translate_pattern(os.path.join(dir, '**', pattern)) + return self._remove_files(match.match) + + def graft(self, dir): + """Include all files from 'dir/'.""" + found = distutils.filelist.findall(dir) + self.extend(found) + return bool(found) + + def prune(self, dir): + """Filter out files from 'dir/'.""" + match = translate_pattern(os.path.join(dir, '**')) + return self._remove_files(match.match) + + def global_include(self, pattern): + """ + Include all files anywhere in the current directory that match the + pattern. This is very inefficient on large file trees. + """ + if self.allfiles is None: + self.findall() + match = translate_pattern(os.path.join('**', pattern)) + found = [f for f in self.allfiles if match.match(f)] + self.extend(found) + return bool(found) + + def global_exclude(self, pattern): + """ + Exclude all files anywhere that match the pattern. + """ + match = translate_pattern(os.path.join('**', pattern)) + return self._remove_files(match.match) + + def append(self, item): + if item.endswith('\r'): # Fix older sdists built on Windows + item = item[:-1] + path = convert_path(item) + + if self._safe_path(path): + self.files.append(path) + + def extend(self, paths): + self.files.extend(filter(self._safe_path, paths)) + + def _repair(self): + """ + Replace self.files with only safe paths + + Because some owners of FileList manipulate the underlying + ``files`` attribute directly, this method must be called to + repair those paths. + """ + self.files = list(filter(self._safe_path, self.files)) + + def _safe_path(self, path): + enc_warn = "'%s' not %s encodable -- skipping" + + # To avoid accidental trans-codings errors, first to unicode + u_path = unicode_utils.filesys_decode(path) + if u_path is None: + log.warn("'%s' in unexpected encoding -- skipping" % path) + return False + + # Must ensure utf-8 encodability + utf8_path = unicode_utils.try_encode(u_path, "utf-8") + if utf8_path is None: + log.warn(enc_warn, path, 'utf-8') + return False + + try: + # accept is either way checks out + if os.path.exists(u_path) or os.path.exists(utf8_path): + return True + # this will catch any encode errors decoding u_path + except UnicodeEncodeError: + log.warn(enc_warn, path, sys.getfilesystemencoding()) + + +class manifest_maker(sdist): + template = "MANIFEST.in" + + def initialize_options(self): + self.use_defaults = 1 + self.prune = 1 + self.manifest_only = 1 + self.force_manifest = 1 + + def finalize_options(self): + pass + + def run(self): + self.filelist = FileList() + if not os.path.exists(self.manifest): + self.write_manifest() # it must exist so it'll get in the list + self.add_defaults() + if os.path.exists(self.template): + self.read_template() + self.prune_file_list() + self.filelist.sort() + self.filelist.remove_duplicates() + self.write_manifest() + + def _manifest_normalize(self, path): + path = unicode_utils.filesys_decode(path) + return path.replace(os.sep, '/') + + def write_manifest(self): + """ + Write the file list in 'self.filelist' to the manifest file + named by 'self.manifest'. + """ + self.filelist._repair() + + # Now _repairs should encodability, but not unicode + files = [self._manifest_normalize(f) for f in self.filelist.files] + msg = "writing manifest file '%s'" % self.manifest + self.execute(write_file, (self.manifest, files), msg) + + def warn(self, msg): # suppress missing-file warnings from sdist + if not msg.startswith("standard file not found:"): + sdist.warn(self, msg) + + def add_defaults(self): + sdist.add_defaults(self) + self.filelist.append(self.template) + self.filelist.append(self.manifest) + rcfiles = list(walk_revctrl()) + if rcfiles: + self.filelist.extend(rcfiles) + elif os.path.exists(self.manifest): + self.read_manifest() + ei_cmd = self.get_finalized_command('egg_info') + self.filelist.graft(ei_cmd.egg_info) + + def prune_file_list(self): + build = self.get_finalized_command('build') + base_dir = self.distribution.get_fullname() + self.filelist.prune(build.build_base) + self.filelist.prune(base_dir) + sep = re.escape(os.sep) + self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, + is_regex=1) + + +def write_file(filename, contents): + """Create a file with the specified name and write 'contents' (a + sequence of strings without line terminators) to it. + """ + contents = "\n".join(contents) + + # assuming the contents has been vetted for utf-8 encoding + contents = contents.encode("utf-8") + + with open(filename, "wb") as f: # always write POSIX-style manifest + f.write(contents) + + +def write_pkg_info(cmd, basename, filename): + log.info("writing %s", filename) + if not cmd.dry_run: + metadata = cmd.distribution.metadata + metadata.version, oldver = cmd.egg_version, metadata.version + metadata.name, oldname = cmd.egg_name, metadata.name + try: + # write unescaped data to PKG-INFO, so older pkg_resources + # can still parse it + metadata.write_pkg_info(cmd.egg_info) + finally: + metadata.name, metadata.version = oldname, oldver + + safe = getattr(cmd.distribution, 'zip_safe', None) + + bdist_egg.write_safety_flag(cmd.egg_info, safe) + + +def warn_depends_obsolete(cmd, basename, filename): + if os.path.exists(filename): + log.warn( + "WARNING: 'depends.txt' is not used by setuptools 0.6!\n" + "Use the install_requires/extras_require setup() args instead." + ) + + +def _write_requirements(stream, reqs): + lines = yield_lines(reqs or ()) + append_cr = lambda line: line + '\n' + lines = map(append_cr, lines) + stream.writelines(lines) + + +def write_requirements(cmd, basename, filename): + dist = cmd.distribution + data = six.StringIO() + _write_requirements(data, dist.install_requires) + extras_require = dist.extras_require or {} + for extra in sorted(extras_require): + data.write('\n[{extra}]\n'.format(**vars())) + _write_requirements(data, extras_require[extra]) + cmd.write_or_delete_file("requirements", filename, data.getvalue()) + + +def write_setup_requirements(cmd, basename, filename): + data = StringIO() + _write_requirements(data, cmd.distribution.setup_requires) + cmd.write_or_delete_file("setup-requirements", filename, data.getvalue()) + + +def write_toplevel_names(cmd, basename, filename): + pkgs = dict.fromkeys( + [ + k.split('.', 1)[0] + for k in cmd.distribution.iter_distribution_names() + ] + ) + cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n') + + +def overwrite_arg(cmd, basename, filename): + write_arg(cmd, basename, filename, True) + + +def write_arg(cmd, basename, filename, force=False): + argname = os.path.splitext(basename)[0] + value = getattr(cmd.distribution, argname, None) + if value is not None: + value = '\n'.join(value) + '\n' + cmd.write_or_delete_file(argname, filename, value, force) + + +def write_entries(cmd, basename, filename): + ep = cmd.distribution.entry_points + + if isinstance(ep, six.string_types) or ep is None: + data = ep + elif ep is not None: + data = [] + for section, contents in sorted(ep.items()): + if not isinstance(contents, six.string_types): + contents = EntryPoint.parse_group(section, contents) + contents = '\n'.join(sorted(map(str, contents.values()))) + data.append('[%s]\n%s\n\n' % (section, contents)) + data = ''.join(data) + + cmd.write_or_delete_file('entry points', filename, data, True) + + +def get_pkg_info_revision(): + """ + Get a -r### off of PKG-INFO Version in case this is an sdist of + a subversion revision. + """ + warnings.warn("get_pkg_info_revision is deprecated.", DeprecationWarning) + if os.path.exists('PKG-INFO'): + with io.open('PKG-INFO') as f: + for line in f: + match = re.match(r"Version:.*-r(\d+)\s*$", line) + if match: + return int(match.group(1)) + return 0 diff --git a/env/lib/python3.6/site-packages/setuptools/command/install.py b/env/lib/python3.6/site-packages/setuptools/command/install.py new file mode 100644 index 0000000..31a5ddb --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/install.py @@ -0,0 +1,125 @@ +from distutils.errors import DistutilsArgError +import inspect +import glob +import warnings +import platform +import distutils.command.install as orig + +import setuptools + +# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for +# now. See https://github.com/pypa/setuptools/issues/199/ +_install = orig.install + + +class install(orig.install): + """Use easy_install to install the package, w/dependencies""" + + user_options = orig.install.user_options + [ + ('old-and-unmanageable', None, "Try not to use this!"), + ('single-version-externally-managed', None, + "used by system package builders to create 'flat' eggs"), + ] + boolean_options = orig.install.boolean_options + [ + 'old-and-unmanageable', 'single-version-externally-managed', + ] + new_commands = [ + ('install_egg_info', lambda self: True), + ('install_scripts', lambda self: True), + ] + _nc = dict(new_commands) + + def initialize_options(self): + orig.install.initialize_options(self) + self.old_and_unmanageable = None + self.single_version_externally_managed = None + + def finalize_options(self): + orig.install.finalize_options(self) + if self.root: + self.single_version_externally_managed = True + elif self.single_version_externally_managed: + if not self.root and not self.record: + raise DistutilsArgError( + "You must specify --record or --root when building system" + " packages" + ) + + def handle_extra_path(self): + if self.root or self.single_version_externally_managed: + # explicit backward-compatibility mode, allow extra_path to work + return orig.install.handle_extra_path(self) + + # Ignore extra_path when installing an egg (or being run by another + # command without --root or --single-version-externally-managed + self.path_file = None + self.extra_dirs = '' + + def run(self): + # Explicit request for old-style install? Just do it + if self.old_and_unmanageable or self.single_version_externally_managed: + return orig.install.run(self) + + if not self._called_from_setup(inspect.currentframe()): + # Run in backward-compatibility mode to support bdist_* commands. + orig.install.run(self) + else: + self.do_egg_install() + + @staticmethod + def _called_from_setup(run_frame): + """ + Attempt to detect whether run() was called from setup() or by another + command. If called by setup(), the parent caller will be the + 'run_command' method in 'distutils.dist', and *its* caller will be + the 'run_commands' method. If called any other way, the + immediate caller *might* be 'run_command', but it won't have been + called by 'run_commands'. Return True in that case or if a call stack + is unavailable. Return False otherwise. + """ + if run_frame is None: + msg = "Call stack not available. bdist_* commands may fail." + warnings.warn(msg) + if platform.python_implementation() == 'IronPython': + msg = "For best results, pass -X:Frames to enable call stack." + warnings.warn(msg) + return True + res = inspect.getouterframes(run_frame)[2] + caller, = res[:1] + info = inspect.getframeinfo(caller) + caller_module = caller.f_globals.get('__name__', '') + return ( + caller_module == 'distutils.dist' + and info.function == 'run_commands' + ) + + def do_egg_install(self): + + easy_install = self.distribution.get_command_class('easy_install') + + cmd = easy_install( + self.distribution, args="x", root=self.root, record=self.record, + ) + cmd.ensure_finalized() # finalize before bdist_egg munges install cmd + cmd.always_copy_from = '.' # make sure local-dir eggs get installed + + # pick up setup-dir .egg files only: no .egg-info + cmd.package_index.scan(glob.glob('*.egg')) + + self.run_command('bdist_egg') + args = [self.distribution.get_command_obj('bdist_egg').egg_output] + + if setuptools.bootstrap_install_from: + # Bootstrap self-installation of setuptools + args.insert(0, setuptools.bootstrap_install_from) + + cmd.args = args + cmd.run() + setuptools.bootstrap_install_from = None + + +# XXX Python 3.1 doesn't see _nc if this is inside the class +install.sub_commands = ( + [cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] + + install.new_commands +) diff --git a/env/lib/python3.6/site-packages/setuptools/command/install_egg_info.py b/env/lib/python3.6/site-packages/setuptools/command/install_egg_info.py new file mode 100644 index 0000000..edc4718 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/install_egg_info.py @@ -0,0 +1,62 @@ +from distutils import log, dir_util +import os + +from setuptools import Command +from setuptools import namespaces +from setuptools.archive_util import unpack_archive +import pkg_resources + + +class install_egg_info(namespaces.Installer, Command): + """Install an .egg-info directory for the package""" + + description = "Install an .egg-info directory for the package" + + user_options = [ + ('install-dir=', 'd', "directory to install to"), + ] + + def initialize_options(self): + self.install_dir = None + + def finalize_options(self): + self.set_undefined_options('install_lib', + ('install_dir', 'install_dir')) + ei_cmd = self.get_finalized_command("egg_info") + basename = pkg_resources.Distribution( + None, None, ei_cmd.egg_name, ei_cmd.egg_version + ).egg_name() + '.egg-info' + self.source = ei_cmd.egg_info + self.target = os.path.join(self.install_dir, basename) + self.outputs = [] + + def run(self): + self.run_command('egg_info') + if os.path.isdir(self.target) and not os.path.islink(self.target): + dir_util.remove_tree(self.target, dry_run=self.dry_run) + elif os.path.exists(self.target): + self.execute(os.unlink, (self.target,), "Removing " + self.target) + if not self.dry_run: + pkg_resources.ensure_directory(self.target) + self.execute( + self.copytree, (), "Copying %s to %s" % (self.source, self.target) + ) + self.install_namespaces() + + def get_outputs(self): + return self.outputs + + def copytree(self): + # Copy the .egg-info tree to site-packages + def skimmer(src, dst): + # filter out source-control directories; note that 'src' is always + # a '/'-separated path, regardless of platform. 'dst' is a + # platform-specific path. + for skip in '.svn/', 'CVS/': + if src.startswith(skip) or '/' + skip in src: + return None + self.outputs.append(dst) + log.debug("Copying %s to %s", src, dst) + return dst + + unpack_archive(self.source, self.target, skimmer) diff --git a/env/lib/python3.6/site-packages/setuptools/command/install_lib.py b/env/lib/python3.6/site-packages/setuptools/command/install_lib.py new file mode 100644 index 0000000..2b31c3e --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/install_lib.py @@ -0,0 +1,121 @@ +import os +import imp +from itertools import product, starmap +import distutils.command.install_lib as orig + + +class install_lib(orig.install_lib): + """Don't add compiled flags to filenames of non-Python files""" + + def run(self): + self.build() + outfiles = self.install() + if outfiles is not None: + # always compile, in case we have any extension stubs to deal with + self.byte_compile(outfiles) + + def get_exclusions(self): + """ + Return a collections.Sized collections.Container of paths to be + excluded for single_version_externally_managed installations. + """ + all_packages = ( + pkg + for ns_pkg in self._get_SVEM_NSPs() + for pkg in self._all_packages(ns_pkg) + ) + + excl_specs = product(all_packages, self._gen_exclusion_paths()) + return set(starmap(self._exclude_pkg_path, excl_specs)) + + def _exclude_pkg_path(self, pkg, exclusion_path): + """ + Given a package name and exclusion path within that package, + compute the full exclusion path. + """ + parts = pkg.split('.') + [exclusion_path] + return os.path.join(self.install_dir, *parts) + + @staticmethod + def _all_packages(pkg_name): + """ + >>> list(install_lib._all_packages('foo.bar.baz')) + ['foo.bar.baz', 'foo.bar', 'foo'] + """ + while pkg_name: + yield pkg_name + pkg_name, sep, child = pkg_name.rpartition('.') + + def _get_SVEM_NSPs(self): + """ + Get namespace packages (list) but only for + single_version_externally_managed installations and empty otherwise. + """ + # TODO: is it necessary to short-circuit here? i.e. what's the cost + # if get_finalized_command is called even when namespace_packages is + # False? + if not self.distribution.namespace_packages: + return [] + + install_cmd = self.get_finalized_command('install') + svem = install_cmd.single_version_externally_managed + + return self.distribution.namespace_packages if svem else [] + + @staticmethod + def _gen_exclusion_paths(): + """ + Generate file paths to be excluded for namespace packages (bytecode + cache files). + """ + # always exclude the package module itself + yield '__init__.py' + + yield '__init__.pyc' + yield '__init__.pyo' + + if not hasattr(imp, 'get_tag'): + return + + base = os.path.join('__pycache__', '__init__.' + imp.get_tag()) + yield base + '.pyc' + yield base + '.pyo' + yield base + '.opt-1.pyc' + yield base + '.opt-2.pyc' + + def copy_tree( + self, infile, outfile, + preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 + ): + assert preserve_mode and preserve_times and not preserve_symlinks + exclude = self.get_exclusions() + + if not exclude: + return orig.install_lib.copy_tree(self, infile, outfile) + + # Exclude namespace package __init__.py* files from the output + + from setuptools.archive_util import unpack_directory + from distutils import log + + outfiles = [] + + def pf(src, dst): + if dst in exclude: + log.warn("Skipping installation of %s (namespace package)", + dst) + return False + + log.info("copying %s -> %s", src, os.path.dirname(dst)) + outfiles.append(dst) + return dst + + unpack_directory(infile, outfile, pf) + return outfiles + + def get_outputs(self): + outputs = orig.install_lib.get_outputs(self) + exclude = self.get_exclusions() + if exclude: + return [f for f in outputs if f not in exclude] + return outputs diff --git a/env/lib/python3.6/site-packages/setuptools/command/install_scripts.py b/env/lib/python3.6/site-packages/setuptools/command/install_scripts.py new file mode 100644 index 0000000..1623427 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/install_scripts.py @@ -0,0 +1,65 @@ +from distutils import log +import distutils.command.install_scripts as orig +import os +import sys + +from pkg_resources import Distribution, PathMetadata, ensure_directory + + +class install_scripts(orig.install_scripts): + """Do normal script install, plus any egg_info wrapper scripts""" + + def initialize_options(self): + orig.install_scripts.initialize_options(self) + self.no_ep = False + + def run(self): + import setuptools.command.easy_install as ei + + self.run_command("egg_info") + if self.distribution.scripts: + orig.install_scripts.run(self) # run first to set up self.outfiles + else: + self.outfiles = [] + if self.no_ep: + # don't install entry point scripts into .egg file! + return + + ei_cmd = self.get_finalized_command("egg_info") + dist = Distribution( + ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), + ei_cmd.egg_name, ei_cmd.egg_version, + ) + bs_cmd = self.get_finalized_command('build_scripts') + exec_param = getattr(bs_cmd, 'executable', None) + bw_cmd = self.get_finalized_command("bdist_wininst") + is_wininst = getattr(bw_cmd, '_is_running', False) + writer = ei.ScriptWriter + if is_wininst: + exec_param = "python.exe" + writer = ei.WindowsScriptWriter + if exec_param == sys.executable: + # In case the path to the Python executable contains a space, wrap + # it so it's not split up. + exec_param = [exec_param] + # resolve the writer to the environment + writer = writer.best() + cmd = writer.command_spec_class.best().from_param(exec_param) + for args in writer.get_args(dist, cmd.as_header()): + self.write_script(*args) + + def write_script(self, script_name, contents, mode="t", *ignored): + """Write an executable file to the scripts directory""" + from setuptools.command.easy_install import chmod, current_umask + + log.info("Installing %s script to %s", script_name, self.install_dir) + target = os.path.join(self.install_dir, script_name) + self.outfiles.append(target) + + mask = current_umask() + if not self.dry_run: + ensure_directory(target) + f = open(target, "w" + mode) + f.write(contents) + f.close() + chmod(target, 0o777 - mask) diff --git a/env/lib/python3.6/site-packages/setuptools/command/launcher manifest.xml b/env/lib/python3.6/site-packages/setuptools/command/launcher manifest.xml new file mode 100644 index 0000000..5972a96 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/launcher manifest.xml @@ -0,0 +1,15 @@ +<?xml version="1.0" encoding="UTF-8" standalone="yes"?> +<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"> + <assemblyIdentity version="1.0.0.0" + processorArchitecture="X86" + name="%(name)s" + type="win32"/> + <!-- Identify the application security requirements. --> + <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> + <security> + <requestedPrivileges> + <requestedExecutionLevel level="asInvoker" uiAccess="false"/> + </requestedPrivileges> + </security> + </trustInfo> +</assembly> diff --git a/env/lib/python3.6/site-packages/setuptools/command/py36compat.py b/env/lib/python3.6/site-packages/setuptools/command/py36compat.py new file mode 100644 index 0000000..61063e7 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/py36compat.py @@ -0,0 +1,136 @@ +import os +from glob import glob +from distutils.util import convert_path +from distutils.command import sdist + +from setuptools.extern.six.moves import filter + + +class sdist_add_defaults: + """ + Mix-in providing forward-compatibility for functionality as found in + distutils on Python 3.7. + + Do not edit the code in this class except to update functionality + as implemented in distutils. Instead, override in the subclass. + """ + + def add_defaults(self): + """Add all the default files to self.filelist: + - README or README.txt + - setup.py + - test/test*.py + - all pure Python modules mentioned in setup script + - all files pointed by package_data (build_py) + - all files defined in data_files. + - all files defined as scripts. + - all C sources listed as part of extensions or C libraries + in the setup script (doesn't catch C headers!) + Warns if (README or README.txt) or setup.py are missing; everything + else is optional. + """ + self._add_defaults_standards() + self._add_defaults_optional() + self._add_defaults_python() + self._add_defaults_data_files() + self._add_defaults_ext() + self._add_defaults_c_libs() + self._add_defaults_scripts() + + @staticmethod + def _cs_path_exists(fspath): + """ + Case-sensitive path existence check + + >>> sdist_add_defaults._cs_path_exists(__file__) + True + >>> sdist_add_defaults._cs_path_exists(__file__.upper()) + False + """ + if not os.path.exists(fspath): + return False + # make absolute so we always have a directory + abspath = os.path.abspath(fspath) + directory, filename = os.path.split(abspath) + return filename in os.listdir(directory) + + def _add_defaults_standards(self): + standards = [self.READMES, self.distribution.script_name] + for fn in standards: + if isinstance(fn, tuple): + alts = fn + got_it = False + for fn in alts: + if self._cs_path_exists(fn): + got_it = True + self.filelist.append(fn) + break + + if not got_it: + self.warn("standard file not found: should have one of " + + ', '.join(alts)) + else: + if self._cs_path_exists(fn): + self.filelist.append(fn) + else: + self.warn("standard file '%s' not found" % fn) + + def _add_defaults_optional(self): + optional = ['test/test*.py', 'setup.cfg'] + for pattern in optional: + files = filter(os.path.isfile, glob(pattern)) + self.filelist.extend(files) + + def _add_defaults_python(self): + # build_py is used to get: + # - python modules + # - files defined in package_data + build_py = self.get_finalized_command('build_py') + + # getting python files + if self.distribution.has_pure_modules(): + self.filelist.extend(build_py.get_source_files()) + + # getting package_data files + # (computed in build_py.data_files by build_py.finalize_options) + for pkg, src_dir, build_dir, filenames in build_py.data_files: + for filename in filenames: + self.filelist.append(os.path.join(src_dir, filename)) + + def _add_defaults_data_files(self): + # getting distribution.data_files + if self.distribution.has_data_files(): + for item in self.distribution.data_files: + if isinstance(item, str): + # plain file + item = convert_path(item) + if os.path.isfile(item): + self.filelist.append(item) + else: + # a (dirname, filenames) tuple + dirname, filenames = item + for f in filenames: + f = convert_path(f) + if os.path.isfile(f): + self.filelist.append(f) + + def _add_defaults_ext(self): + if self.distribution.has_ext_modules(): + build_ext = self.get_finalized_command('build_ext') + self.filelist.extend(build_ext.get_source_files()) + + def _add_defaults_c_libs(self): + if self.distribution.has_c_libraries(): + build_clib = self.get_finalized_command('build_clib') + self.filelist.extend(build_clib.get_source_files()) + + def _add_defaults_scripts(self): + if self.distribution.has_scripts(): + build_scripts = self.get_finalized_command('build_scripts') + self.filelist.extend(build_scripts.get_source_files()) + + +if hasattr(sdist.sdist, '_add_defaults_standards'): + # disable the functionality already available upstream + class sdist_add_defaults: + pass diff --git a/env/lib/python3.6/site-packages/setuptools/command/register.py b/env/lib/python3.6/site-packages/setuptools/command/register.py new file mode 100644 index 0000000..8d6336a --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/register.py @@ -0,0 +1,10 @@ +import distutils.command.register as orig + + +class register(orig.register): + __doc__ = orig.register.__doc__ + + def run(self): + # Make sure that we are using valid current name/version info + self.run_command('egg_info') + orig.register.run(self) diff --git a/env/lib/python3.6/site-packages/setuptools/command/rotate.py b/env/lib/python3.6/site-packages/setuptools/command/rotate.py new file mode 100644 index 0000000..b89353f --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/rotate.py @@ -0,0 +1,66 @@ +from distutils.util import convert_path +from distutils import log +from distutils.errors import DistutilsOptionError +import os +import shutil + +from setuptools.extern import six + +from setuptools import Command + + +class rotate(Command): + """Delete older distributions""" + + description = "delete older distributions, keeping N newest files" + user_options = [ + ('match=', 'm', "patterns to match (required)"), + ('dist-dir=', 'd', "directory where the distributions are"), + ('keep=', 'k', "number of matching distributions to keep"), + ] + + boolean_options = [] + + def initialize_options(self): + self.match = None + self.dist_dir = None + self.keep = None + + def finalize_options(self): + if self.match is None: + raise DistutilsOptionError( + "Must specify one or more (comma-separated) match patterns " + "(e.g. '.zip' or '.egg')" + ) + if self.keep is None: + raise DistutilsOptionError("Must specify number of files to keep") + try: + self.keep = int(self.keep) + except ValueError: + raise DistutilsOptionError("--keep must be an integer") + if isinstance(self.match, six.string_types): + self.match = [ + convert_path(p.strip()) for p in self.match.split(',') + ] + self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) + + def run(self): + self.run_command("egg_info") + from glob import glob + + for pattern in self.match: + pattern = self.distribution.get_name() + '*' + pattern + files = glob(os.path.join(self.dist_dir, pattern)) + files = [(os.path.getmtime(f), f) for f in files] + files.sort() + files.reverse() + + log.info("%d file(s) matching %s", len(files), pattern) + files = files[self.keep:] + for (t, f) in files: + log.info("Deleting %s", f) + if not self.dry_run: + if os.path.isdir(f): + shutil.rmtree(f) + else: + os.unlink(f) diff --git a/env/lib/python3.6/site-packages/setuptools/command/saveopts.py b/env/lib/python3.6/site-packages/setuptools/command/saveopts.py new file mode 100644 index 0000000..611cec5 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/saveopts.py @@ -0,0 +1,22 @@ +from setuptools.command.setopt import edit_config, option_base + + +class saveopts(option_base): + """Save command-line options to a file""" + + description = "save supplied options to setup.cfg or other config file" + + def run(self): + dist = self.distribution + settings = {} + + for cmd in dist.command_options: + + if cmd == 'saveopts': + continue # don't save our own options! + + for opt, (src, val) in dist.get_option_dict(cmd).items(): + if src == "command line": + settings.setdefault(cmd, {})[opt] = val + + edit_config(self.filename, settings, self.dry_run) diff --git a/env/lib/python3.6/site-packages/setuptools/command/sdist.py b/env/lib/python3.6/site-packages/setuptools/command/sdist.py new file mode 100644 index 0000000..9975753 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/sdist.py @@ -0,0 +1,202 @@ +from distutils import log +import distutils.command.sdist as orig +import os +import sys +import io +import contextlib + +from setuptools.extern import six + +from .py36compat import sdist_add_defaults + +import pkg_resources + +_default_revctrl = list + + +def walk_revctrl(dirname=''): + """Find all files under revision control""" + for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): + for item in ep.load()(dirname): + yield item + + +class sdist(sdist_add_defaults, orig.sdist): + """Smart sdist that finds anything supported by revision control""" + + user_options = [ + ('formats=', None, + "formats for source distribution (comma-separated list)"), + ('keep-temp', 'k', + "keep the distribution tree around after creating " + + "archive file(s)"), + ('dist-dir=', 'd', + "directory to put the source distribution archive(s) in " + "[default: dist]"), + ] + + negative_opt = {} + + READMES = 'README', 'README.rst', 'README.txt' + + def run(self): + self.run_command('egg_info') + ei_cmd = self.get_finalized_command('egg_info') + self.filelist = ei_cmd.filelist + self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt')) + self.check_readme() + + # Run sub commands + for cmd_name in self.get_sub_commands(): + self.run_command(cmd_name) + + # Call check_metadata only if no 'check' command + # (distutils <= 2.6) + import distutils.command + + if 'check' not in distutils.command.__all__: + self.check_metadata() + + self.make_distribution() + + dist_files = getattr(self.distribution, 'dist_files', []) + for file in self.archive_files: + data = ('sdist', '', file) + if data not in dist_files: + dist_files.append(data) + + def initialize_options(self): + orig.sdist.initialize_options(self) + + self._default_to_gztar() + + def _default_to_gztar(self): + # only needed on Python prior to 3.6. + if sys.version_info >= (3, 6, 0, 'beta', 1): + return + self.formats = ['gztar'] + + def make_distribution(self): + """ + Workaround for #516 + """ + with self._remove_os_link(): + orig.sdist.make_distribution(self) + + @staticmethod + @contextlib.contextmanager + def _remove_os_link(): + """ + In a context, remove and restore os.link if it exists + """ + + class NoValue: + pass + + orig_val = getattr(os, 'link', NoValue) + try: + del os.link + except Exception: + pass + try: + yield + finally: + if orig_val is not NoValue: + setattr(os, 'link', orig_val) + + def __read_template_hack(self): + # This grody hack closes the template file (MANIFEST.in) if an + # exception occurs during read_template. + # Doing so prevents an error when easy_install attempts to delete the + # file. + try: + orig.sdist.read_template(self) + except Exception: + _, _, tb = sys.exc_info() + tb.tb_next.tb_frame.f_locals['template'].close() + raise + + # Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle + # has been fixed, so only override the method if we're using an earlier + # Python. + has_leaky_handle = ( + sys.version_info < (2, 7, 2) + or (3, 0) <= sys.version_info < (3, 1, 4) + or (3, 2) <= sys.version_info < (3, 2, 1) + ) + if has_leaky_handle: + read_template = __read_template_hack + + def _add_defaults_python(self): + """getting python files""" + if self.distribution.has_pure_modules(): + build_py = self.get_finalized_command('build_py') + self.filelist.extend(build_py.get_source_files()) + # This functionality is incompatible with include_package_data, and + # will in fact create an infinite recursion if include_package_data + # is True. Use of include_package_data will imply that + # distutils-style automatic handling of package_data is disabled + if not self.distribution.include_package_data: + for _, src_dir, _, filenames in build_py.data_files: + self.filelist.extend([os.path.join(src_dir, filename) + for filename in filenames]) + + def _add_defaults_data_files(self): + """ + Don't add any data files, but why? + """ + + def check_readme(self): + for f in self.READMES: + if os.path.exists(f): + return + else: + self.warn( + "standard file not found: should have one of " + + ', '.join(self.READMES) + ) + + def make_release_tree(self, base_dir, files): + orig.sdist.make_release_tree(self, base_dir, files) + + # Save any egg_info command line options used to create this sdist + dest = os.path.join(base_dir, 'setup.cfg') + if hasattr(os, 'link') and os.path.exists(dest): + # unlink and re-copy, since it might be hard-linked, and + # we don't want to change the source version + os.unlink(dest) + self.copy_file('setup.cfg', dest) + + self.get_finalized_command('egg_info').save_version_info(dest) + + def _manifest_is_not_generated(self): + # check for special comment used in 2.7.1 and higher + if not os.path.isfile(self.manifest): + return False + + with io.open(self.manifest, 'rb') as fp: + first_line = fp.readline() + return (first_line != + '# file GENERATED by distutils, do NOT edit\n'.encode()) + + def read_manifest(self): + """Read the manifest file (named by 'self.manifest') and use it to + fill in 'self.filelist', the list of files to include in the source + distribution. + """ + log.info("reading manifest file '%s'", self.manifest) + manifest = open(self.manifest, 'rb') + for line in manifest: + # The manifest must contain UTF-8. See #303. + if six.PY3: + try: + line = line.decode('UTF-8') + except UnicodeDecodeError: + log.warn("%r not UTF-8 decodable -- skipping" % line) + continue + # ignore comments and blank lines + line = line.strip() + if line.startswith('#') or not line: + continue + self.filelist.append(line) + manifest.close() diff --git a/env/lib/python3.6/site-packages/setuptools/command/setopt.py b/env/lib/python3.6/site-packages/setuptools/command/setopt.py new file mode 100644 index 0000000..7e57cc0 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/setopt.py @@ -0,0 +1,149 @@ +from distutils.util import convert_path +from distutils import log +from distutils.errors import DistutilsOptionError +import distutils +import os + +from setuptools.extern.six.moves import configparser + +from setuptools import Command + +__all__ = ['config_file', 'edit_config', 'option_base', 'setopt'] + + +def config_file(kind="local"): + """Get the filename of the distutils, local, global, or per-user config + + `kind` must be one of "local", "global", or "user" + """ + if kind == 'local': + return 'setup.cfg' + if kind == 'global': + return os.path.join( + os.path.dirname(distutils.__file__), 'distutils.cfg' + ) + if kind == 'user': + dot = os.name == 'posix' and '.' or '' + return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot)) + raise ValueError( + "config_file() type must be 'local', 'global', or 'user'", kind + ) + + +def edit_config(filename, settings, dry_run=False): + """Edit a configuration file to include `settings` + + `settings` is a dictionary of dictionaries or ``None`` values, keyed by + command/section name. A ``None`` value means to delete the entire section, + while a dictionary lists settings to be changed or deleted in that section. + A setting of ``None`` means to delete that setting. + """ + log.debug("Reading configuration from %s", filename) + opts = configparser.RawConfigParser() + opts.read([filename]) + for section, options in settings.items(): + if options is None: + log.info("Deleting section [%s] from %s", section, filename) + opts.remove_section(section) + else: + if not opts.has_section(section): + log.debug("Adding new section [%s] to %s", section, filename) + opts.add_section(section) + for option, value in options.items(): + if value is None: + log.debug( + "Deleting %s.%s from %s", + section, option, filename + ) + opts.remove_option(section, option) + if not opts.options(section): + log.info("Deleting empty [%s] section from %s", + section, filename) + opts.remove_section(section) + else: + log.debug( + "Setting %s.%s to %r in %s", + section, option, value, filename + ) + opts.set(section, option, value) + + log.info("Writing %s", filename) + if not dry_run: + with open(filename, 'w') as f: + opts.write(f) + + +class option_base(Command): + """Abstract base class for commands that mess with config files""" + + user_options = [ + ('global-config', 'g', + "save options to the site-wide distutils.cfg file"), + ('user-config', 'u', + "save options to the current user's pydistutils.cfg file"), + ('filename=', 'f', + "configuration file to use (default=setup.cfg)"), + ] + + boolean_options = [ + 'global-config', 'user-config', + ] + + def initialize_options(self): + self.global_config = None + self.user_config = None + self.filename = None + + def finalize_options(self): + filenames = [] + if self.global_config: + filenames.append(config_file('global')) + if self.user_config: + filenames.append(config_file('user')) + if self.filename is not None: + filenames.append(self.filename) + if not filenames: + filenames.append(config_file('local')) + if len(filenames) > 1: + raise DistutilsOptionError( + "Must specify only one configuration file option", + filenames + ) + self.filename, = filenames + + +class setopt(option_base): + """Save command-line options to a file""" + + description = "set an option in setup.cfg or another config file" + + user_options = [ + ('command=', 'c', 'command to set an option for'), + ('option=', 'o', 'option to set'), + ('set-value=', 's', 'value of the option'), + ('remove', 'r', 'remove (unset) the value'), + ] + option_base.user_options + + boolean_options = option_base.boolean_options + ['remove'] + + def initialize_options(self): + option_base.initialize_options(self) + self.command = None + self.option = None + self.set_value = None + self.remove = None + + def finalize_options(self): + option_base.finalize_options(self) + if self.command is None or self.option is None: + raise DistutilsOptionError("Must specify --command *and* --option") + if self.set_value is None and not self.remove: + raise DistutilsOptionError("Must specify --set-value or --remove") + + def run(self): + edit_config( + self.filename, { + self.command: {self.option.replace('-', '_'): self.set_value} + }, + self.dry_run + ) diff --git a/env/lib/python3.6/site-packages/setuptools/command/test.py b/env/lib/python3.6/site-packages/setuptools/command/test.py new file mode 100644 index 0000000..270674e --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/test.py @@ -0,0 +1,247 @@ +import os +import operator +import sys +import contextlib +import itertools +from distutils.errors import DistutilsOptionError +from unittest import TestLoader + +from setuptools.extern import six +from setuptools.extern.six.moves import map, filter + +from pkg_resources import (resource_listdir, resource_exists, normalize_path, + working_set, _namespace_packages, + add_activation_listener, require, EntryPoint) +from setuptools import Command +from setuptools.py31compat import unittest_main + + +class ScanningLoader(TestLoader): + def loadTestsFromModule(self, module, pattern=None): + """Return a suite of all tests cases contained in the given module + + If the module is a package, load tests from all the modules in it. + If the module has an ``additional_tests`` function, call it and add + the return value to the tests. + """ + tests = [] + tests.append(TestLoader.loadTestsFromModule(self, module)) + + if hasattr(module, "additional_tests"): + tests.append(module.additional_tests()) + + if hasattr(module, '__path__'): + for file in resource_listdir(module.__name__, ''): + if file.endswith('.py') and file != '__init__.py': + submodule = module.__name__ + '.' + file[:-3] + else: + if resource_exists(module.__name__, file + '/__init__.py'): + submodule = module.__name__ + '.' + file + else: + continue + tests.append(self.loadTestsFromName(submodule)) + + if len(tests) != 1: + return self.suiteClass(tests) + else: + return tests[0] # don't create a nested suite for only one return + + +# adapted from jaraco.classes.properties:NonDataProperty +class NonDataProperty(object): + def __init__(self, fget): + self.fget = fget + + def __get__(self, obj, objtype=None): + if obj is None: + return self + return self.fget(obj) + + +class test(Command): + """Command to run unit tests after in-place build""" + + description = "run unit tests after in-place build" + + user_options = [ + ('test-module=', 'm', "Run 'test_suite' in specified module"), + ('test-suite=', 's', + "Test suite to run (e.g. 'some_module.test_suite')"), + ('test-runner=', 'r', "Test runner to use"), + ] + + def initialize_options(self): + self.test_suite = None + self.test_module = None + self.test_loader = None + self.test_runner = None + + def finalize_options(self): + + if self.test_suite and self.test_module: + msg = "You may specify a module or a suite, but not both" + raise DistutilsOptionError(msg) + + if self.test_suite is None: + if self.test_module is None: + self.test_suite = self.distribution.test_suite + else: + self.test_suite = self.test_module + ".test_suite" + + if self.test_loader is None: + self.test_loader = getattr(self.distribution, 'test_loader', None) + if self.test_loader is None: + self.test_loader = "setuptools.command.test:ScanningLoader" + if self.test_runner is None: + self.test_runner = getattr(self.distribution, 'test_runner', None) + + @NonDataProperty + def test_args(self): + return list(self._test_args()) + + def _test_args(self): + if self.verbose: + yield '--verbose' + if self.test_suite: + yield self.test_suite + + def with_project_on_sys_path(self, func): + """ + Backward compatibility for project_on_sys_path context. + """ + with self.project_on_sys_path(): + func() + + @contextlib.contextmanager + def project_on_sys_path(self, include_dists=[]): + with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False) + + if with_2to3: + # If we run 2to3 we can not do this inplace: + + # Ensure metadata is up-to-date + self.reinitialize_command('build_py', inplace=0) + self.run_command('build_py') + bpy_cmd = self.get_finalized_command("build_py") + build_path = normalize_path(bpy_cmd.build_lib) + + # Build extensions + self.reinitialize_command('egg_info', egg_base=build_path) + self.run_command('egg_info') + + self.reinitialize_command('build_ext', inplace=0) + self.run_command('build_ext') + else: + # Without 2to3 inplace works fine: + self.run_command('egg_info') + + # Build extensions in-place + self.reinitialize_command('build_ext', inplace=1) + self.run_command('build_ext') + + ei_cmd = self.get_finalized_command("egg_info") + + old_path = sys.path[:] + old_modules = sys.modules.copy() + + try: + project_path = normalize_path(ei_cmd.egg_base) + sys.path.insert(0, project_path) + working_set.__init__() + add_activation_listener(lambda dist: dist.activate()) + require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version)) + with self.paths_on_pythonpath([project_path]): + yield + finally: + sys.path[:] = old_path + sys.modules.clear() + sys.modules.update(old_modules) + working_set.__init__() + + @staticmethod + @contextlib.contextmanager + def paths_on_pythonpath(paths): + """ + Add the indicated paths to the head of the PYTHONPATH environment + variable so that subprocesses will also see the packages at + these paths. + + Do this in a context that restores the value on exit. + """ + nothing = object() + orig_pythonpath = os.environ.get('PYTHONPATH', nothing) + current_pythonpath = os.environ.get('PYTHONPATH', '') + try: + prefix = os.pathsep.join(paths) + to_join = filter(None, [prefix, current_pythonpath]) + new_path = os.pathsep.join(to_join) + if new_path: + os.environ['PYTHONPATH'] = new_path + yield + finally: + if orig_pythonpath is nothing: + os.environ.pop('PYTHONPATH', None) + else: + os.environ['PYTHONPATH'] = orig_pythonpath + + @staticmethod + def install_dists(dist): + """ + Install the requirements indicated by self.distribution and + return an iterable of the dists that were built. + """ + ir_d = dist.fetch_build_eggs(dist.install_requires or []) + tr_d = dist.fetch_build_eggs(dist.tests_require or []) + return itertools.chain(ir_d, tr_d) + + def run(self): + installed_dists = self.install_dists(self.distribution) + + cmd = ' '.join(self._argv) + if self.dry_run: + self.announce('skipping "%s" (dry run)' % cmd) + return + + self.announce('running "%s"' % cmd) + + paths = map(operator.attrgetter('location'), installed_dists) + with self.paths_on_pythonpath(paths): + with self.project_on_sys_path(): + self.run_tests() + + def run_tests(self): + # Purge modules under test from sys.modules. The test loader will + # re-import them from the build location. Required when 2to3 is used + # with namespace packages. + if six.PY3 and getattr(self.distribution, 'use_2to3', False): + module = self.test_suite.split('.')[0] + if module in _namespace_packages: + del_modules = [] + if module in sys.modules: + del_modules.append(module) + module += '.' + for name in sys.modules: + if name.startswith(module): + del_modules.append(name) + list(map(sys.modules.__delitem__, del_modules)) + + unittest_main( + None, None, self._argv, + testLoader=self._resolve_as_ep(self.test_loader), + testRunner=self._resolve_as_ep(self.test_runner), + ) + + @property + def _argv(self): + return ['unittest'] + self.test_args + + @staticmethod + def _resolve_as_ep(val): + """ + Load the indicated attribute value, called, as a as if it were + specified as an entry point. + """ + if val is None: + return + parsed = EntryPoint.parse("x=" + val) + return parsed.resolve()() diff --git a/env/lib/python3.6/site-packages/setuptools/command/upload.py b/env/lib/python3.6/site-packages/setuptools/command/upload.py new file mode 100644 index 0000000..484baa5 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/upload.py @@ -0,0 +1,38 @@ +import getpass +from distutils.command import upload as orig + + +class upload(orig.upload): + """ + Override default upload behavior to obtain password + in a variety of different ways. + """ + + def finalize_options(self): + orig.upload.finalize_options(self) + # Attempt to obtain password. Short circuit evaluation at the first + # sign of success. + self.password = ( + self.password or + self._load_password_from_keyring() or + self._prompt_for_password() + ) + + def _load_password_from_keyring(self): + """ + Attempt to load password from keyring. Suppress Exceptions. + """ + try: + keyring = __import__('keyring') + return keyring.get_password(self.repository, self.username) + except Exception: + pass + + def _prompt_for_password(self): + """ + Prompt for a password on the tty. Suppress Exceptions. + """ + try: + return getpass.getpass() + except (Exception, KeyboardInterrupt): + pass diff --git a/env/lib/python3.6/site-packages/setuptools/command/upload_docs.py b/env/lib/python3.6/site-packages/setuptools/command/upload_docs.py new file mode 100644 index 0000000..269dc2d --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/command/upload_docs.py @@ -0,0 +1,206 @@ +# -*- coding: utf-8 -*- +"""upload_docs + +Implements a Distutils 'upload_docs' subcommand (upload documentation to +PyPI's pythonhosted.org). +""" + +from base64 import standard_b64encode +from distutils import log +from distutils.errors import DistutilsOptionError +import os +import socket +import zipfile +import tempfile +import shutil +import itertools +import functools + +from setuptools.extern import six +from setuptools.extern.six.moves import http_client, urllib + +from pkg_resources import iter_entry_points +from .upload import upload + + +def _encode(s): + errors = 'surrogateescape' if six.PY3 else 'strict' + return s.encode('utf-8', errors) + + +class upload_docs(upload): + # override the default repository as upload_docs isn't + # supported by Warehouse (and won't be). + DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/' + + description = 'Upload documentation to PyPI' + + user_options = [ + ('repository=', 'r', + "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY), + ('show-response', None, + 'display full response text from server'), + ('upload-dir=', None, 'directory to upload'), + ] + boolean_options = upload.boolean_options + + def has_sphinx(self): + if self.upload_dir is None: + for ep in iter_entry_points('distutils.commands', 'build_sphinx'): + return True + + sub_commands = [('build_sphinx', has_sphinx)] + + def initialize_options(self): + upload.initialize_options(self) + self.upload_dir = None + self.target_dir = None + + def finalize_options(self): + log.warn("Upload_docs command is deprecated. Use RTD instead.") + upload.finalize_options(self) + if self.upload_dir is None: + if self.has_sphinx(): + build_sphinx = self.get_finalized_command('build_sphinx') + self.target_dir = build_sphinx.builder_target_dir + else: + build = self.get_finalized_command('build') + self.target_dir = os.path.join(build.build_base, 'docs') + else: + self.ensure_dirname('upload_dir') + self.target_dir = self.upload_dir + self.announce('Using upload directory %s' % self.target_dir) + + def create_zipfile(self, filename): + zip_file = zipfile.ZipFile(filename, "w") + try: + self.mkpath(self.target_dir) # just in case + for root, dirs, files in os.walk(self.target_dir): + if root == self.target_dir and not files: + raise DistutilsOptionError( + "no files found in upload directory '%s'" + % self.target_dir) + for name in files: + full = os.path.join(root, name) + relative = root[len(self.target_dir):].lstrip(os.path.sep) + dest = os.path.join(relative, name) + zip_file.write(full, dest) + finally: + zip_file.close() + + def run(self): + # Run sub commands + for cmd_name in self.get_sub_commands(): + self.run_command(cmd_name) + + tmp_dir = tempfile.mkdtemp() + name = self.distribution.metadata.get_name() + zip_file = os.path.join(tmp_dir, "%s.zip" % name) + try: + self.create_zipfile(zip_file) + self.upload_file(zip_file) + finally: + shutil.rmtree(tmp_dir) + + @staticmethod + def _build_part(item, sep_boundary): + key, values = item + title = '\nContent-Disposition: form-data; name="%s"' % key + # handle multiple entries for the same name + if not isinstance(values, list): + values = [values] + for value in values: + if isinstance(value, tuple): + title += '; filename="%s"' % value[0] + value = value[1] + else: + value = _encode(value) + yield sep_boundary + yield _encode(title) + yield b"\n\n" + yield value + if value and value[-1:] == b'\r': + yield b'\n' # write an extra newline (lurve Macs) + + @classmethod + def _build_multipart(cls, data): + """ + Build up the MIME payload for the POST data + """ + boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' + sep_boundary = b'\n--' + boundary + end_boundary = sep_boundary + b'--' + end_items = end_boundary, b"\n", + builder = functools.partial( + cls._build_part, + sep_boundary=sep_boundary, + ) + part_groups = map(builder, data.items()) + parts = itertools.chain.from_iterable(part_groups) + body_items = itertools.chain(parts, end_items) + content_type = 'multipart/form-data; boundary=%s' % boundary + return b''.join(body_items), content_type + + def upload_file(self, filename): + with open(filename, 'rb') as f: + content = f.read() + meta = self.distribution.metadata + data = { + ':action': 'doc_upload', + 'name': meta.get_name(), + 'content': (os.path.basename(filename), content), + } + # set up the authentication + credentials = _encode(self.username + ':' + self.password) + credentials = standard_b64encode(credentials) + if six.PY3: + credentials = credentials.decode('ascii') + auth = "Basic " + credentials + + body, ct = self._build_multipart(data) + + self.announce("Submitting documentation to %s" % (self.repository), + log.INFO) + + # build the Request + # We can't use urllib2 since we need to send the Basic + # auth right with the first request + schema, netloc, url, params, query, fragments = \ + urllib.parse.urlparse(self.repository) + assert not params and not query and not fragments + if schema == 'http': + conn = http_client.HTTPConnection(netloc) + elif schema == 'https': + conn = http_client.HTTPSConnection(netloc) + else: + raise AssertionError("unsupported schema " + schema) + + data = '' + try: + conn.connect() + conn.putrequest("POST", url) + content_type = ct + conn.putheader('Content-type', content_type) + conn.putheader('Content-length', str(len(body))) + conn.putheader('Authorization', auth) + conn.endheaders() + conn.send(body) + except socket.error as e: + self.announce(str(e), log.ERROR) + return + + r = conn.getresponse() + if r.status == 200: + self.announce('Server response (%s): %s' % (r.status, r.reason), + log.INFO) + elif r.status == 301: + location = r.getheader('Location') + if location is None: + location = 'https://pythonhosted.org/%s/' % meta.get_name() + self.announce('Upload successful. Visit %s' % location, + log.INFO) + else: + self.announce('Upload failed (%s): %s' % (r.status, r.reason), + log.ERROR) + if self.show_response: + print('-' * 75, r.read(), '-' * 75) diff --git a/env/lib/python3.6/site-packages/setuptools/depends.py b/env/lib/python3.6/site-packages/setuptools/depends.py new file mode 100644 index 0000000..89d39a5 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/depends.py @@ -0,0 +1,217 @@ +import sys +import imp +import marshal +from distutils.version import StrictVersion +from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN + +from setuptools.extern import six + +__all__ = [ + 'Require', 'find_module', 'get_module_constant', 'extract_constant' +] + + +class Require: + """A prerequisite to building or installing a distribution""" + + def __init__(self, name, requested_version, module, homepage='', + attribute=None, format=None): + + if format is None and requested_version is not None: + format = StrictVersion + + if format is not None: + requested_version = format(requested_version) + if attribute is None: + attribute = '__version__' + + self.__dict__.update(locals()) + del self.self + + def full_name(self): + """Return full package/distribution name, w/version""" + if self.requested_version is not None: + return '%s-%s' % (self.name, self.requested_version) + return self.name + + def version_ok(self, version): + """Is 'version' sufficiently up-to-date?""" + return self.attribute is None or self.format is None or \ + str(version) != "unknown" and version >= self.requested_version + + def get_version(self, paths=None, default="unknown"): + """Get version number of installed module, 'None', or 'default' + + Search 'paths' for module. If not found, return 'None'. If found, + return the extracted version attribute, or 'default' if no version + attribute was specified, or the value cannot be determined without + importing the module. The version is formatted according to the + requirement's version format (if any), unless it is 'None' or the + supplied 'default'. + """ + + if self.attribute is None: + try: + f, p, i = find_module(self.module, paths) + if f: + f.close() + return default + except ImportError: + return None + + v = get_module_constant(self.module, self.attribute, default, paths) + + if v is not None and v is not default and self.format is not None: + return self.format(v) + + return v + + def is_present(self, paths=None): + """Return true if dependency is present on 'paths'""" + return self.get_version(paths) is not None + + def is_current(self, paths=None): + """Return true if dependency is present and up-to-date on 'paths'""" + version = self.get_version(paths) + if version is None: + return False + return self.version_ok(version) + + +def _iter_code(code): + """Yield '(op,arg)' pair for each operation in code object 'code'""" + + from array import array + from dis import HAVE_ARGUMENT, EXTENDED_ARG + + bytes = array('b', code.co_code) + eof = len(code.co_code) + + ptr = 0 + extended_arg = 0 + + while ptr < eof: + + op = bytes[ptr] + + if op >= HAVE_ARGUMENT: + + arg = bytes[ptr + 1] + bytes[ptr + 2] * 256 + extended_arg + ptr += 3 + + if op == EXTENDED_ARG: + long_type = six.integer_types[-1] + extended_arg = arg * long_type(65536) + continue + + else: + arg = None + ptr += 1 + + yield op, arg + + +def find_module(module, paths=None): + """Just like 'imp.find_module()', but with package support""" + + parts = module.split('.') + + while parts: + part = parts.pop(0) + f, path, (suffix, mode, kind) = info = imp.find_module(part, paths) + + if kind == PKG_DIRECTORY: + parts = parts or ['__init__'] + paths = [path] + + elif parts: + raise ImportError("Can't find %r in %s" % (parts, module)) + + return info + + +def get_module_constant(module, symbol, default=-1, paths=None): + """Find 'module' by searching 'paths', and extract 'symbol' + + Return 'None' if 'module' does not exist on 'paths', or it does not define + 'symbol'. If the module defines 'symbol' as a constant, return the + constant. Otherwise, return 'default'.""" + + try: + f, path, (suffix, mode, kind) = find_module(module, paths) + except ImportError: + # Module doesn't exist + return None + + try: + if kind == PY_COMPILED: + f.read(8) # skip magic & date + code = marshal.load(f) + elif kind == PY_FROZEN: + code = imp.get_frozen_object(module) + elif kind == PY_SOURCE: + code = compile(f.read(), path, 'exec') + else: + # Not something we can parse; we'll have to import it. :( + if module not in sys.modules: + imp.load_module(module, f, path, (suffix, mode, kind)) + return getattr(sys.modules[module], symbol, None) + + finally: + if f: + f.close() + + return extract_constant(code, symbol, default) + + +def extract_constant(code, symbol, default=-1): + """Extract the constant value of 'symbol' from 'code' + + If the name 'symbol' is bound to a constant value by the Python code + object 'code', return that value. If 'symbol' is bound to an expression, + return 'default'. Otherwise, return 'None'. + + Return value is based on the first assignment to 'symbol'. 'symbol' must + be a global, or at least a non-"fast" local in the code block. That is, + only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol' + must be present in 'code.co_names'. + """ + + if symbol not in code.co_names: + # name's not there, can't possibly be an assigment + return None + + name_idx = list(code.co_names).index(symbol) + + STORE_NAME = 90 + STORE_GLOBAL = 97 + LOAD_CONST = 100 + + const = default + + for op, arg in _iter_code(code): + + if op == LOAD_CONST: + const = code.co_consts[arg] + elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL): + return const + else: + const = default + + +def _update_globals(): + """ + Patch the globals to remove the objects not available on some platforms. + + XXX it'd be better to test assertions about bytecode instead. + """ + + if not sys.platform.startswith('java') and sys.platform != 'cli': + return + incompatible = 'extract_constant', 'get_module_constant' + for name in incompatible: + del globals()[name] + __all__.remove(name) + + +_update_globals() diff --git a/env/lib/python3.6/site-packages/setuptools/dist.py b/env/lib/python3.6/site-packages/setuptools/dist.py new file mode 100644 index 0000000..612040c --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/dist.py @@ -0,0 +1,914 @@ +__all__ = ['Distribution'] + +import re +import os +import warnings +import numbers +import distutils.log +import distutils.core +import distutils.cmd +import distutils.dist +from distutils.errors import (DistutilsOptionError, DistutilsPlatformError, + DistutilsSetupError) +from distutils.util import rfc822_escape + +from setuptools.extern import six +from setuptools.extern.six.moves import map +from pkg_resources.extern import packaging + +from setuptools.depends import Require +from setuptools import windows_support +from setuptools.monkey import get_unpatched +import pkg_resources + + +def _get_unpatched(cls): + warnings.warn("Do not call this function", DeprecationWarning) + return get_unpatched(cls) + + +# Based on Python 3.5 version +def write_pkg_file(self, file): + """Write the PKG-INFO format data to a file object. + """ + version = '1.0' + if (self.provides or self.requires or self.obsoletes or + self.classifiers or self.download_url): + version = '1.1' + # Setuptools specific for PEP 345 + if hasattr(self, 'python_requires'): + version = '1.2' + + file.write('Metadata-Version: %s\n' % version) + file.write('Name: %s\n' % self.get_name()) + file.write('Version: %s\n' % self.get_version()) + file.write('Summary: %s\n' % self.get_description()) + file.write('Home-page: %s\n' % self.get_url()) + file.write('Author: %s\n' % self.get_contact()) + file.write('Author-email: %s\n' % self.get_contact_email()) + file.write('License: %s\n' % self.get_license()) + if self.download_url: + file.write('Download-URL: %s\n' % self.download_url) + + long_desc = rfc822_escape(self.get_long_description()) + file.write('Description: %s\n' % long_desc) + + keywords = ','.join(self.get_keywords()) + if keywords: + file.write('Keywords: %s\n' % keywords) + + self._write_list(file, 'Platform', self.get_platforms()) + self._write_list(file, 'Classifier', self.get_classifiers()) + + # PEP 314 + self._write_list(file, 'Requires', self.get_requires()) + self._write_list(file, 'Provides', self.get_provides()) + self._write_list(file, 'Obsoletes', self.get_obsoletes()) + + # Setuptools specific for PEP 345 + if hasattr(self, 'python_requires'): + file.write('Requires-Python: %s\n' % self.python_requires) + + +# from Python 3.4 +def write_pkg_info(self, base_dir): + """Write the PKG-INFO file into the release tree. + """ + with open(os.path.join(base_dir, 'PKG-INFO'), 'w', + encoding='UTF-8') as pkg_info: + self.write_pkg_file(pkg_info) + + +sequence = tuple, list + + +def check_importable(dist, attr, value): + try: + ep = pkg_resources.EntryPoint.parse('x=' + value) + assert not ep.extras + except (TypeError, ValueError, AttributeError, AssertionError): + raise DistutilsSetupError( + "%r must be importable 'module:attrs' string (got %r)" + % (attr, value) + ) + + +def assert_string_list(dist, attr, value): + """Verify that value is a string list or None""" + try: + assert ''.join(value) != value + except (TypeError, ValueError, AttributeError, AssertionError): + raise DistutilsSetupError( + "%r must be a list of strings (got %r)" % (attr, value) + ) + + +def check_nsp(dist, attr, value): + """Verify that namespace packages are valid""" + ns_packages = value + assert_string_list(dist, attr, ns_packages) + for nsp in ns_packages: + if not dist.has_contents_for(nsp): + raise DistutilsSetupError( + "Distribution contains no modules or packages for " + + "namespace package %r" % nsp + ) + parent, sep, child = nsp.rpartition('.') + if parent and parent not in ns_packages: + distutils.log.warn( + "WARNING: %r is declared as a package namespace, but %r" + " is not: please correct this in setup.py", nsp, parent + ) + + +def check_extras(dist, attr, value): + """Verify that extras_require mapping is valid""" + try: + for k, v in value.items(): + if ':' in k: + k, m = k.split(':', 1) + if pkg_resources.invalid_marker(m): + raise DistutilsSetupError("Invalid environment marker: " + m) + list(pkg_resources.parse_requirements(v)) + except (TypeError, ValueError, AttributeError): + raise DistutilsSetupError( + "'extras_require' must be a dictionary whose values are " + "strings or lists of strings containing valid project/version " + "requirement specifiers." + ) + + +def assert_bool(dist, attr, value): + """Verify that value is True, False, 0, or 1""" + if bool(value) != value: + tmpl = "{attr!r} must be a boolean value (got {value!r})" + raise DistutilsSetupError(tmpl.format(attr=attr, value=value)) + + +def check_requirements(dist, attr, value): + """Verify that install_requires is a valid requirements list""" + try: + list(pkg_resources.parse_requirements(value)) + except (TypeError, ValueError) as error: + tmpl = ( + "{attr!r} must be a string or list of strings " + "containing valid project/version requirement specifiers; {error}" + ) + raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) + + +def check_specifier(dist, attr, value): + """Verify that value is a valid version specifier""" + try: + packaging.specifiers.SpecifierSet(value) + except packaging.specifiers.InvalidSpecifier as error: + tmpl = ( + "{attr!r} must be a string or list of strings " + "containing valid version specifiers; {error}" + ) + raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) + + +def check_entry_points(dist, attr, value): + """Verify that entry_points map is parseable""" + try: + pkg_resources.EntryPoint.parse_map(value) + except ValueError as e: + raise DistutilsSetupError(e) + + +def check_test_suite(dist, attr, value): + if not isinstance(value, six.string_types): + raise DistutilsSetupError("test_suite must be a string") + + +def check_package_data(dist, attr, value): + """Verify that value is a dictionary of package names to glob lists""" + if isinstance(value, dict): + for k, v in value.items(): + if not isinstance(k, str): + break + try: + iter(v) + except TypeError: + break + else: + return + raise DistutilsSetupError( + attr + " must be a dictionary mapping package names to lists of " + "wildcard patterns" + ) + + +def check_packages(dist, attr, value): + for pkgname in value: + if not re.match(r'\w+(\.\w+)*', pkgname): + distutils.log.warn( + "WARNING: %r not a valid package name; please use only " + ".-separated package names in setup.py", pkgname + ) + + +_Distribution = get_unpatched(distutils.core.Distribution) + + +class Distribution(_Distribution): + """Distribution with support for features, tests, and package data + + This is an enhanced version of 'distutils.dist.Distribution' that + effectively adds the following new optional keyword arguments to 'setup()': + + 'install_requires' -- a string or sequence of strings specifying project + versions that the distribution requires when installed, in the format + used by 'pkg_resources.require()'. They will be installed + automatically when the package is installed. If you wish to use + packages that are not available in PyPI, or want to give your users an + alternate download location, you can add a 'find_links' option to the + '[easy_install]' section of your project's 'setup.cfg' file, and then + setuptools will scan the listed web pages for links that satisfy the + requirements. + + 'extras_require' -- a dictionary mapping names of optional "extras" to the + additional requirement(s) that using those extras incurs. For example, + this:: + + extras_require = dict(reST = ["docutils>=0.3", "reSTedit"]) + + indicates that the distribution can optionally provide an extra + capability called "reST", but it can only be used if docutils and + reSTedit are installed. If the user installs your package using + EasyInstall and requests one of your extras, the corresponding + additional requirements will be installed if needed. + + 'features' **deprecated** -- a dictionary mapping option names to + 'setuptools.Feature' + objects. Features are a portion of the distribution that can be + included or excluded based on user options, inter-feature dependencies, + and availability on the current system. Excluded features are omitted + from all setup commands, including source and binary distributions, so + you can create multiple distributions from the same source tree. + Feature names should be valid Python identifiers, except that they may + contain the '-' (minus) sign. Features can be included or excluded + via the command line options '--with-X' and '--without-X', where 'X' is + the name of the feature. Whether a feature is included by default, and + whether you are allowed to control this from the command line, is + determined by the Feature object. See the 'Feature' class for more + information. + + 'test_suite' -- the name of a test suite to run for the 'test' command. + If the user runs 'python setup.py test', the package will be installed, + and the named test suite will be run. The format is the same as + would be used on a 'unittest.py' command line. That is, it is the + dotted name of an object to import and call to generate a test suite. + + 'package_data' -- a dictionary mapping package names to lists of filenames + or globs to use to find data files contained in the named packages. + If the dictionary has filenames or globs listed under '""' (the empty + string), those names will be searched for in every package, in addition + to any names for the specific package. Data files found using these + names/globs will be installed along with the package, in the same + location as the package. Note that globs are allowed to reference + the contents of non-package subdirectories, as long as you use '/' as + a path separator. (Globs are automatically converted to + platform-specific paths at runtime.) + + In addition to these new keywords, this class also has several new methods + for manipulating the distribution's contents. For example, the 'include()' + and 'exclude()' methods can be thought of as in-place add and subtract + commands that add or remove packages, modules, extensions, and so on from + the distribution. They are used by the feature subsystem to configure the + distribution for the included and excluded features. + """ + + _patched_dist = None + + def patch_missing_pkg_info(self, attrs): + # Fake up a replacement for the data that would normally come from + # PKG-INFO, but which might not yet be built if this is a fresh + # checkout. + # + if not attrs or 'name' not in attrs or 'version' not in attrs: + return + key = pkg_resources.safe_name(str(attrs['name'])).lower() + dist = pkg_resources.working_set.by_key.get(key) + if dist is not None and not dist.has_metadata('PKG-INFO'): + dist._version = pkg_resources.safe_version(str(attrs['version'])) + self._patched_dist = dist + + def __init__(self, attrs=None): + have_package_data = hasattr(self, "package_data") + if not have_package_data: + self.package_data = {} + _attrs_dict = attrs or {} + if 'features' in _attrs_dict or 'require_features' in _attrs_dict: + Feature.warn_deprecated() + self.require_features = [] + self.features = {} + self.dist_files = [] + self.src_root = attrs and attrs.pop("src_root", None) + self.patch_missing_pkg_info(attrs) + # Make sure we have any eggs needed to interpret 'attrs' + if attrs is not None: + self.dependency_links = attrs.pop('dependency_links', []) + assert_string_list(self, 'dependency_links', self.dependency_links) + if attrs and 'setup_requires' in attrs: + self.fetch_build_eggs(attrs['setup_requires']) + for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): + vars(self).setdefault(ep.name, None) + _Distribution.__init__(self, attrs) + if isinstance(self.metadata.version, numbers.Number): + # Some people apparently take "version number" too literally :) + self.metadata.version = str(self.metadata.version) + + if self.metadata.version is not None: + try: + ver = packaging.version.Version(self.metadata.version) + normalized_version = str(ver) + if self.metadata.version != normalized_version: + warnings.warn( + "Normalizing '%s' to '%s'" % ( + self.metadata.version, + normalized_version, + ) + ) + self.metadata.version = normalized_version + except (packaging.version.InvalidVersion, TypeError): + warnings.warn( + "The version specified (%r) is an invalid version, this " + "may not work as expected with newer versions of " + "setuptools, pip, and PyPI. Please see PEP 440 for more " + "details." % self.metadata.version + ) + if getattr(self, 'python_requires', None): + self.metadata.python_requires = self.python_requires + + def parse_command_line(self): + """Process features after parsing command line options""" + result = _Distribution.parse_command_line(self) + if self.features: + self._finalize_features() + return result + + def _feature_attrname(self, name): + """Convert feature name to corresponding option attribute name""" + return 'with_' + name.replace('-', '_') + + def fetch_build_eggs(self, requires): + """Resolve pre-setup requirements""" + resolved_dists = pkg_resources.working_set.resolve( + pkg_resources.parse_requirements(requires), + installer=self.fetch_build_egg, + replace_conflicting=True, + ) + for dist in resolved_dists: + pkg_resources.working_set.add(dist, replace=True) + return resolved_dists + + def finalize_options(self): + _Distribution.finalize_options(self) + if self.features: + self._set_global_opts_from_features() + + for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): + value = getattr(self, ep.name, None) + if value is not None: + ep.require(installer=self.fetch_build_egg) + ep.load()(self, ep.name, value) + if getattr(self, 'convert_2to3_doctests', None): + # XXX may convert to set here when we can rely on set being builtin + self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests] + else: + self.convert_2to3_doctests = [] + + def get_egg_cache_dir(self): + egg_cache_dir = os.path.join(os.curdir, '.eggs') + if not os.path.exists(egg_cache_dir): + os.mkdir(egg_cache_dir) + windows_support.hide_file(egg_cache_dir) + readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt') + with open(readme_txt_filename, 'w') as f: + f.write('This directory contains eggs that were downloaded ' + 'by setuptools to build, test, and run plug-ins.\n\n') + f.write('This directory caches those eggs to prevent ' + 'repeated downloads.\n\n') + f.write('However, it is safe to delete this directory.\n\n') + + return egg_cache_dir + + def fetch_build_egg(self, req): + """Fetch an egg needed for building""" + + try: + cmd = self._egg_fetcher + cmd.package_index.to_scan = [] + except AttributeError: + from setuptools.command.easy_install import easy_install + dist = self.__class__({'script_args': ['easy_install']}) + dist.parse_config_files() + opts = dist.get_option_dict('easy_install') + keep = ( + 'find_links', 'site_dirs', 'index_url', 'optimize', + 'site_dirs', 'allow_hosts' + ) + for key in list(opts): + if key not in keep: + del opts[key] # don't use any other settings + if self.dependency_links: + links = self.dependency_links[:] + if 'find_links' in opts: + links = opts['find_links'][1].split() + links + opts['find_links'] = ('setup', links) + install_dir = self.get_egg_cache_dir() + cmd = easy_install( + dist, args=["x"], install_dir=install_dir, exclude_scripts=True, + always_copy=False, build_directory=None, editable=False, + upgrade=False, multi_version=True, no_report=True, user=False + ) + cmd.ensure_finalized() + self._egg_fetcher = cmd + return cmd.easy_install(req) + + def _set_global_opts_from_features(self): + """Add --with-X/--without-X options based on optional features""" + + go = [] + no = self.negative_opt.copy() + + for name, feature in self.features.items(): + self._set_feature(name, None) + feature.validate(self) + + if feature.optional: + descr = feature.description + incdef = ' (default)' + excdef = '' + if not feature.include_by_default(): + excdef, incdef = incdef, excdef + + go.append(('with-' + name, None, 'include ' + descr + incdef)) + go.append(('without-' + name, None, 'exclude ' + descr + excdef)) + no['without-' + name] = 'with-' + name + + self.global_options = self.feature_options = go + self.global_options + self.negative_opt = self.feature_negopt = no + + def _finalize_features(self): + """Add/remove features and resolve dependencies between them""" + + # First, flag all the enabled items (and thus their dependencies) + for name, feature in self.features.items(): + enabled = self.feature_is_included(name) + if enabled or (enabled is None and feature.include_by_default()): + feature.include_in(self) + self._set_feature(name, 1) + + # Then disable the rest, so that off-by-default features don't + # get flagged as errors when they're required by an enabled feature + for name, feature in self.features.items(): + if not self.feature_is_included(name): + feature.exclude_from(self) + self._set_feature(name, 0) + + def get_command_class(self, command): + """Pluggable version of get_command_class()""" + if command in self.cmdclass: + return self.cmdclass[command] + + for ep in pkg_resources.iter_entry_points('distutils.commands', command): + ep.require(installer=self.fetch_build_egg) + self.cmdclass[command] = cmdclass = ep.load() + return cmdclass + else: + return _Distribution.get_command_class(self, command) + + def print_commands(self): + for ep in pkg_resources.iter_entry_points('distutils.commands'): + if ep.name not in self.cmdclass: + # don't require extras as the commands won't be invoked + cmdclass = ep.resolve() + self.cmdclass[ep.name] = cmdclass + return _Distribution.print_commands(self) + + def get_command_list(self): + for ep in pkg_resources.iter_entry_points('distutils.commands'): + if ep.name not in self.cmdclass: + # don't require extras as the commands won't be invoked + cmdclass = ep.resolve() + self.cmdclass[ep.name] = cmdclass + return _Distribution.get_command_list(self) + + def _set_feature(self, name, status): + """Set feature's inclusion status""" + setattr(self, self._feature_attrname(name), status) + + def feature_is_included(self, name): + """Return 1 if feature is included, 0 if excluded, 'None' if unknown""" + return getattr(self, self._feature_attrname(name)) + + def include_feature(self, name): + """Request inclusion of feature named 'name'""" + + if self.feature_is_included(name) == 0: + descr = self.features[name].description + raise DistutilsOptionError( + descr + " is required, but was excluded or is not available" + ) + self.features[name].include_in(self) + self._set_feature(name, 1) + + def include(self, **attrs): + """Add items to distribution that are named in keyword arguments + + For example, 'dist.exclude(py_modules=["x"])' would add 'x' to + the distribution's 'py_modules' attribute, if it was not already + there. + + Currently, this method only supports inclusion for attributes that are + lists or tuples. If you need to add support for adding to other + attributes in this or a subclass, you can add an '_include_X' method, + where 'X' is the name of the attribute. The method will be called with + the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})' + will try to call 'dist._include_foo({"bar":"baz"})', which can then + handle whatever special inclusion logic is needed. + """ + for k, v in attrs.items(): + include = getattr(self, '_include_' + k, None) + if include: + include(v) + else: + self._include_misc(k, v) + + def exclude_package(self, package): + """Remove packages, modules, and extensions in named package""" + + pfx = package + '.' + if self.packages: + self.packages = [ + p for p in self.packages + if p != package and not p.startswith(pfx) + ] + + if self.py_modules: + self.py_modules = [ + p for p in self.py_modules + if p != package and not p.startswith(pfx) + ] + + if self.ext_modules: + self.ext_modules = [ + p for p in self.ext_modules + if p.name != package and not p.name.startswith(pfx) + ] + + def has_contents_for(self, package): + """Return true if 'exclude_package(package)' would do something""" + + pfx = package + '.' + + for p in self.iter_distribution_names(): + if p == package or p.startswith(pfx): + return True + + def _exclude_misc(self, name, value): + """Handle 'exclude()' for list/tuple attrs without a special handler""" + if not isinstance(value, sequence): + raise DistutilsSetupError( + "%s: setting must be a list or tuple (%r)" % (name, value) + ) + try: + old = getattr(self, name) + except AttributeError: + raise DistutilsSetupError( + "%s: No such distribution setting" % name + ) + if old is not None and not isinstance(old, sequence): + raise DistutilsSetupError( + name + ": this setting cannot be changed via include/exclude" + ) + elif old: + setattr(self, name, [item for item in old if item not in value]) + + def _include_misc(self, name, value): + """Handle 'include()' for list/tuple attrs without a special handler""" + + if not isinstance(value, sequence): + raise DistutilsSetupError( + "%s: setting must be a list (%r)" % (name, value) + ) + try: + old = getattr(self, name) + except AttributeError: + raise DistutilsSetupError( + "%s: No such distribution setting" % name + ) + if old is None: + setattr(self, name, value) + elif not isinstance(old, sequence): + raise DistutilsSetupError( + name + ": this setting cannot be changed via include/exclude" + ) + else: + setattr(self, name, old + [item for item in value if item not in old]) + + def exclude(self, **attrs): + """Remove items from distribution that are named in keyword arguments + + For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from + the distribution's 'py_modules' attribute. Excluding packages uses + the 'exclude_package()' method, so all of the package's contained + packages, modules, and extensions are also excluded. + + Currently, this method only supports exclusion from attributes that are + lists or tuples. If you need to add support for excluding from other + attributes in this or a subclass, you can add an '_exclude_X' method, + where 'X' is the name of the attribute. The method will be called with + the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})' + will try to call 'dist._exclude_foo({"bar":"baz"})', which can then + handle whatever special exclusion logic is needed. + """ + for k, v in attrs.items(): + exclude = getattr(self, '_exclude_' + k, None) + if exclude: + exclude(v) + else: + self._exclude_misc(k, v) + + def _exclude_packages(self, packages): + if not isinstance(packages, sequence): + raise DistutilsSetupError( + "packages: setting must be a list or tuple (%r)" % (packages,) + ) + list(map(self.exclude_package, packages)) + + def _parse_command_opts(self, parser, args): + # Remove --with-X/--without-X options when processing command args + self.global_options = self.__class__.global_options + self.negative_opt = self.__class__.negative_opt + + # First, expand any aliases + command = args[0] + aliases = self.get_option_dict('aliases') + while command in aliases: + src, alias = aliases[command] + del aliases[command] # ensure each alias can expand only once! + import shlex + args[:1] = shlex.split(alias, True) + command = args[0] + + nargs = _Distribution._parse_command_opts(self, parser, args) + + # Handle commands that want to consume all remaining arguments + cmd_class = self.get_command_class(command) + if getattr(cmd_class, 'command_consumes_arguments', None): + self.get_option_dict(command)['args'] = ("command line", nargs) + if nargs is not None: + return [] + + return nargs + + def get_cmdline_options(self): + """Return a '{cmd: {opt:val}}' map of all command-line options + + Option names are all long, but do not include the leading '--', and + contain dashes rather than underscores. If the option doesn't take + an argument (e.g. '--quiet'), the 'val' is 'None'. + + Note that options provided by config files are intentionally excluded. + """ + + d = {} + + for cmd, opts in self.command_options.items(): + + for opt, (src, val) in opts.items(): + + if src != "command line": + continue + + opt = opt.replace('_', '-') + + if val == 0: + cmdobj = self.get_command_obj(cmd) + neg_opt = self.negative_opt.copy() + neg_opt.update(getattr(cmdobj, 'negative_opt', {})) + for neg, pos in neg_opt.items(): + if pos == opt: + opt = neg + val = None + break + else: + raise AssertionError("Shouldn't be able to get here") + + elif val == 1: + val = None + + d.setdefault(cmd, {})[opt] = val + + return d + + def iter_distribution_names(self): + """Yield all packages, modules, and extension names in distribution""" + + for pkg in self.packages or (): + yield pkg + + for module in self.py_modules or (): + yield module + + for ext in self.ext_modules or (): + if isinstance(ext, tuple): + name, buildinfo = ext + else: + name = ext.name + if name.endswith('module'): + name = name[:-6] + yield name + + def handle_display_options(self, option_order): + """If there were any non-global "display-only" options + (--help-commands or the metadata display options) on the command + line, display the requested info and return true; else return + false. + """ + import sys + + if six.PY2 or self.help_commands: + return _Distribution.handle_display_options(self, option_order) + + # Stdout may be StringIO (e.g. in tests) + import io + if not isinstance(sys.stdout, io.TextIOWrapper): + return _Distribution.handle_display_options(self, option_order) + + # Don't wrap stdout if utf-8 is already the encoding. Provides + # workaround for #334. + if sys.stdout.encoding.lower() in ('utf-8', 'utf8'): + return _Distribution.handle_display_options(self, option_order) + + # Print metadata in UTF-8 no matter the platform + encoding = sys.stdout.encoding + errors = sys.stdout.errors + newline = sys.platform != 'win32' and '\n' or None + line_buffering = sys.stdout.line_buffering + + sys.stdout = io.TextIOWrapper( + sys.stdout.detach(), 'utf-8', errors, newline, line_buffering) + try: + return _Distribution.handle_display_options(self, option_order) + finally: + sys.stdout = io.TextIOWrapper( + sys.stdout.detach(), encoding, errors, newline, line_buffering) + + +class Feature: + """ + **deprecated** -- The `Feature` facility was never completely implemented + or supported, `has reported issues + <https://github.com/pypa/setuptools/issues/58>`_ and will be removed in + a future version. + + A subset of the distribution that can be excluded if unneeded/wanted + + Features are created using these keyword arguments: + + 'description' -- a short, human readable description of the feature, to + be used in error messages, and option help messages. + + 'standard' -- if true, the feature is included by default if it is + available on the current system. Otherwise, the feature is only + included if requested via a command line '--with-X' option, or if + another included feature requires it. The default setting is 'False'. + + 'available' -- if true, the feature is available for installation on the + current system. The default setting is 'True'. + + 'optional' -- if true, the feature's inclusion can be controlled from the + command line, using the '--with-X' or '--without-X' options. If + false, the feature's inclusion status is determined automatically, + based on 'availabile', 'standard', and whether any other feature + requires it. The default setting is 'True'. + + 'require_features' -- a string or sequence of strings naming features + that should also be included if this feature is included. Defaults to + empty list. May also contain 'Require' objects that should be + added/removed from the distribution. + + 'remove' -- a string or list of strings naming packages to be removed + from the distribution if this feature is *not* included. If the + feature *is* included, this argument is ignored. This argument exists + to support removing features that "crosscut" a distribution, such as + defining a 'tests' feature that removes all the 'tests' subpackages + provided by other features. The default for this argument is an empty + list. (Note: the named package(s) or modules must exist in the base + distribution when the 'setup()' function is initially called.) + + other keywords -- any other keyword arguments are saved, and passed to + the distribution's 'include()' and 'exclude()' methods when the + feature is included or excluded, respectively. So, for example, you + could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be + added or removed from the distribution as appropriate. + + A feature must include at least one 'requires', 'remove', or other + keyword argument. Otherwise, it can't affect the distribution in any way. + Note also that you can subclass 'Feature' to create your own specialized + feature types that modify the distribution in other ways when included or + excluded. See the docstrings for the various methods here for more detail. + Aside from the methods, the only feature attributes that distributions look + at are 'description' and 'optional'. + """ + + @staticmethod + def warn_deprecated(): + warnings.warn( + "Features are deprecated and will be removed in a future " + "version. See https://github.com/pypa/setuptools/issues/65.", + DeprecationWarning, + stacklevel=3, + ) + + def __init__(self, description, standard=False, available=True, + optional=True, require_features=(), remove=(), **extras): + self.warn_deprecated() + + self.description = description + self.standard = standard + self.available = available + self.optional = optional + if isinstance(require_features, (str, Require)): + require_features = require_features, + + self.require_features = [ + r for r in require_features if isinstance(r, str) + ] + er = [r for r in require_features if not isinstance(r, str)] + if er: + extras['require_features'] = er + + if isinstance(remove, str): + remove = remove, + self.remove = remove + self.extras = extras + + if not remove and not require_features and not extras: + raise DistutilsSetupError( + "Feature %s: must define 'require_features', 'remove', or at least one" + " of 'packages', 'py_modules', etc." + ) + + def include_by_default(self): + """Should this feature be included by default?""" + return self.available and self.standard + + def include_in(self, dist): + """Ensure feature and its requirements are included in distribution + + You may override this in a subclass to perform additional operations on + the distribution. Note that this method may be called more than once + per feature, and so should be idempotent. + + """ + + if not self.available: + raise DistutilsPlatformError( + self.description + " is required, " + "but is not available on this platform" + ) + + dist.include(**self.extras) + + for f in self.require_features: + dist.include_feature(f) + + def exclude_from(self, dist): + """Ensure feature is excluded from distribution + + You may override this in a subclass to perform additional operations on + the distribution. This method will be called at most once per + feature, and only after all included features have been asked to + include themselves. + """ + + dist.exclude(**self.extras) + + if self.remove: + for item in self.remove: + dist.exclude_package(item) + + def validate(self, dist): + """Verify that feature makes sense in context of distribution + + This method is called by the distribution just before it parses its + command line. It checks to ensure that the 'remove' attribute, if any, + contains only valid package/module names that are present in the base + distribution when 'setup()' is called. You may override it in a + subclass to perform any other required validation of the feature + against a target distribution. + """ + + for item in self.remove: + if not dist.has_contents_for(item): + raise DistutilsSetupError( + "%s wants to be able to remove %s, but the distribution" + " doesn't contain any packages or modules under %s" + % (self.description, item, item) + ) diff --git a/env/lib/python3.6/site-packages/setuptools/extension.py b/env/lib/python3.6/site-packages/setuptools/extension.py new file mode 100644 index 0000000..2946889 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/extension.py @@ -0,0 +1,57 @@ +import re +import functools +import distutils.core +import distutils.errors +import distutils.extension + +from setuptools.extern.six.moves import map + +from .monkey import get_unpatched + + +def _have_cython(): + """ + Return True if Cython can be imported. + """ + cython_impl = 'Cython.Distutils.build_ext' + try: + # from (cython_impl) import build_ext + __import__(cython_impl, fromlist=['build_ext']).build_ext + return True + except Exception: + pass + return False + + +# for compatibility +have_pyrex = _have_cython + +_Extension = get_unpatched(distutils.core.Extension) + + +class Extension(_Extension): + """Extension that uses '.c' files in place of '.pyx' files""" + + def __init__(self, name, sources, *args, **kw): + # The *args is needed for compatibility as calls may use positional + # arguments. py_limited_api may be set only via keyword. + self.py_limited_api = kw.pop("py_limited_api", False) + _Extension.__init__(self, name, sources, *args, **kw) + + def _convert_pyx_sources_to_lang(self): + """ + Replace sources with .pyx extensions to sources with the target + language extension. This mechanism allows language authors to supply + pre-converted sources but to prefer the .pyx sources. + """ + if _have_cython(): + # the build has Cython, so allow it to compile the .pyx files + return + lang = self.language or '' + target_ext = '.cpp' if lang.lower() == 'c++' else '.c' + sub = functools.partial(re.sub, '.pyx$', target_ext) + self.sources = list(map(sub, self.sources)) + + +class Library(Extension): + """Just like a regular Extension, but built as a library instead""" diff --git a/env/lib/python3.6/site-packages/setuptools/glob.py b/env/lib/python3.6/site-packages/setuptools/glob.py new file mode 100644 index 0000000..6c781de --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/glob.py @@ -0,0 +1,176 @@ +""" +Filename globbing utility. Mostly a copy of `glob` from Python 3.5. + +Changes include: + * `yield from` and PEP3102 `*` removed. + * `bytes` changed to `six.binary_type`. + * Hidden files are not ignored. +""" + +import os +import re +import fnmatch +from setuptools.extern.six import binary_type + +__all__ = ["glob", "iglob", "escape"] + + +def glob(pathname, recursive=False): + """Return a list of paths matching a pathname pattern. + + The pattern may contain simple shell-style wildcards a la + fnmatch. However, unlike fnmatch, filenames starting with a + dot are special cases that are not matched by '*' and '?' + patterns. + + If recursive is true, the pattern '**' will match any files and + zero or more directories and subdirectories. + """ + return list(iglob(pathname, recursive=recursive)) + + +def iglob(pathname, recursive=False): + """Return an iterator which yields the paths matching a pathname pattern. + + The pattern may contain simple shell-style wildcards a la + fnmatch. However, unlike fnmatch, filenames starting with a + dot are special cases that are not matched by '*' and '?' + patterns. + + If recursive is true, the pattern '**' will match any files and + zero or more directories and subdirectories. + """ + it = _iglob(pathname, recursive) + if recursive and _isrecursive(pathname): + s = next(it) # skip empty string + assert not s + return it + + +def _iglob(pathname, recursive): + dirname, basename = os.path.split(pathname) + if not has_magic(pathname): + if basename: + if os.path.lexists(pathname): + yield pathname + else: + # Patterns ending with a slash should match only directories + if os.path.isdir(dirname): + yield pathname + return + if not dirname: + if recursive and _isrecursive(basename): + for x in glob2(dirname, basename): + yield x + else: + for x in glob1(dirname, basename): + yield x + return + # `os.path.split()` returns the argument itself as a dirname if it is a + # drive or UNC path. Prevent an infinite recursion if a drive or UNC path + # contains magic characters (i.e. r'\\?\C:'). + if dirname != pathname and has_magic(dirname): + dirs = _iglob(dirname, recursive) + else: + dirs = [dirname] + if has_magic(basename): + if recursive and _isrecursive(basename): + glob_in_dir = glob2 + else: + glob_in_dir = glob1 + else: + glob_in_dir = glob0 + for dirname in dirs: + for name in glob_in_dir(dirname, basename): + yield os.path.join(dirname, name) + + +# These 2 helper functions non-recursively glob inside a literal directory. +# They return a list of basenames. `glob1` accepts a pattern while `glob0` +# takes a literal basename (so it only has to check for its existence). + + +def glob1(dirname, pattern): + if not dirname: + if isinstance(pattern, binary_type): + dirname = os.curdir.encode('ASCII') + else: + dirname = os.curdir + try: + names = os.listdir(dirname) + except OSError: + return [] + return fnmatch.filter(names, pattern) + + +def glob0(dirname, basename): + if not basename: + # `os.path.split()` returns an empty basename for paths ending with a + # directory separator. 'q*x/' should match only directories. + if os.path.isdir(dirname): + return [basename] + else: + if os.path.lexists(os.path.join(dirname, basename)): + return [basename] + return [] + + +# This helper function recursively yields relative pathnames inside a literal +# directory. + + +def glob2(dirname, pattern): + assert _isrecursive(pattern) + yield pattern[:0] + for x in _rlistdir(dirname): + yield x + + +# Recursively yields relative pathnames inside a literal directory. +def _rlistdir(dirname): + if not dirname: + if isinstance(dirname, binary_type): + dirname = binary_type(os.curdir, 'ASCII') + else: + dirname = os.curdir + try: + names = os.listdir(dirname) + except os.error: + return + for x in names: + yield x + path = os.path.join(dirname, x) if dirname else x + for y in _rlistdir(path): + yield os.path.join(x, y) + + +magic_check = re.compile('([*?[])') +magic_check_bytes = re.compile(b'([*?[])') + + +def has_magic(s): + if isinstance(s, binary_type): + match = magic_check_bytes.search(s) + else: + match = magic_check.search(s) + return match is not None + + +def _isrecursive(pattern): + if isinstance(pattern, binary_type): + return pattern == b'**' + else: + return pattern == '**' + + +def escape(pathname): + """Escape all special characters. + """ + # Escaping is done by wrapping any of "*?[" between square brackets. + # Metacharacters do not work in the drive part and shouldn't be escaped. + drive, pathname = os.path.splitdrive(pathname) + if isinstance(pathname, binary_type): + pathname = magic_check_bytes.sub(br'[\1]', pathname) + else: + pathname = magic_check.sub(r'[\1]', pathname) + return drive + pathname diff --git a/env/lib/python3.6/site-packages/setuptools/gui-32.exe b/env/lib/python3.6/site-packages/setuptools/gui-32.exe new file mode 100644 index 0000000000000000000000000000000000000000..f8d3509653ba8f80ca7f3aa7f95616142ba83a94 GIT binary patch literal 65536 zcmeFae|%KMxj%k3yGc&SCTD>S1PQP}R5YmQ5=~qJi^+zl1UE)DtPsG8blp-*!#RLg z0>QIub24npZS_`f<yJ2Gx%RfbwfBl*uV6xG0{-MjRTOJur8;p@W1&fqnDc!<b2dM) z?S0+v>-)#|`^OhvIcH|hGc(UT^E}VYJoC(K^_@E<yCg{t{F$aC?Zcb?`Ni{pesFxw zo%Wkt>DjE;rth;Yer@_4k$X3I);E0Tn+<n;+jI9__ucm$)$@&eJPq1?o_p`}RNPkU z`Sy3#+;eqK&X~ef(Wh%$Pd;(of3Tsy@11*-?Gf=`u?u)lX)Iw+;(cKCl`JOSKK7sD zeHA+<-V4}nyl=nv?g*9f_b?6yBx$kDF4=y~YKCCCB)cu!mL*9qBV~z|I{q@eUHI#w zxZet=Nm4pR@o(rY`E3@_kcQ7q0+8}iX7L_=QKB^Wyd=#Mq5o%(=5t@`n=ZtG%HR8U zwR+EH6(2u6f(PM6ZKcj0_0J<otFLZYbC-ITBt;MrZJ&Yn>-Zb>&yT9Ew!oxAMfl)C z#Z+d`C?Ev=lGJ)}%Ksnx|0)G)SVf_n2-;d?f9!~MzIJJ-=wKb=iHfW2QCpC29wSNm zA=ztsPZ<@3t`2ENV!bW?>DIbrM&c*bCbqaRzr~R~Z-r)Gl=RG-p<NO;x4P=0D?)s` z$m_KCdCiWD6_v>}ugUHp=<&@N<(0nQZ)pc;t^f@UfdU)Xs*a2q9hEj|W&QGS`}Q+V zaO>`-aSJ8yAtP2OBNk%M7Utt!$6gfgmQ40WtW_PKSW_r1oOg}p=vZj3XtBjwwJ#E} zLMNCsnAlP1f|%AM?kIHMo~S5v2kZEcbEs|ZrY(iCq{N>@V-R$%P-2fEhzyjmCh@Sy zXyr*PE_By~_)26%86IRFp<L0yrY(-_6^RN*wl=1!sbqzkNBE#Zr|)1xR)-`}qV{=I zsuT5#vQT;fwD0ZwJO~iAMI5M-JD`zRj|c<(+4vp|@n?~!ADWe%G6eO$3}GdB)>9Ya zkBHB1hGv2=t60ZM@2flwcy2#L^lN{0=%0Q@MjzL)ErkWFb2Ro*N07ImOt!9YmgwvP zqh2yflmnST)@Q6JEa3kv=;e&Js^gRcx7ile@Me+Xh_`B=wJ3|47Z(=9j;P;M4jj9k ze|zYYnyGIobV=&smWsjxVw3XZ39!ke-gcWd&f8i_T!k-^@^CA0*s%-oQ>v?$_-7%o z(GNN8XT7J;F$I$PlNQv_oLiavAq4>E7I2dQhlE)vSn!y;BSSI+5(`L`#@q*i(+$dj ziMR82oKzstr3NgrEei6^p%m@2rUhVv>rK-H3%XZ<_rUh;c(a2dG)%uOg$_v@w_EZo zlu%GsR0^7TQkP%ahpqsf^)t)7t<j1g+Tx`4;LnY}eDrxiuoH=ZlK9$8(KPhsobi4M z$psZiHuGF42=%W3b2x}s^KXwz;=hfa!6-nS00F@ZB2Rzdm-tMKM|!J2$OpkDB&e<W zp=IqLfdhi+jGDI_IfSX1CsWBNHQ^`>)|hz?tCY-06G}<$V~#?~heoED!!4L2akG@t z3k(cUbnpdgqwk%>`n0WAC7vv#rU2V~=4eiAwpse1#pRD3*UlGpF7&;UP%~^>-Uq9> zqqY#gDuX1JM-HRLrTl?x<n8>L1RW6Nzt8%&-UwXtnfuqbCmh#A4k1U7-%L3c7Zx(d zuhG+B-K2d4zoLVczO#ufnYJw*t5&k#)-NC8`0Z!%(?;tLH)1SS=)o%@p*m1Hza}bC zH<@{EP=$nZv|K=--J~^q2RFJ=UsK7|s*{A7<k#1>>2riBOI3;<EmbyBr2Q;!)*t;6 z%bAU*;bM7n=w0Oq89^D~`RGjkug?ON9(0;MXlio>B9VN6@g>xk)TvhhOKNMSeI?sb zNT@@qXG7GtAEH*Z*I7+?xX^=^+#cd{e*xu~c+oK%QC`k~8T1Fj`XSd4etuu)23Ly= znHbY_evF#lbUsH*M$@PjpbB6kZlDn4%Pfry7Wc9o2a;HxjOT7A9>$Ks0zkIpxF}-P z4%J+UwB{X!v+x4J<l9l;41|Nc`2wVB4jNck69S=U@yowNLO-xFpm5`+mK}<8p^v+1 z@>vU3b1r4SD4dNJCLBe`P~a!!^eLzUU1z9JMV04G)5v%Ur4xPh4u|g#Tc-(r0PB00 z<2OM*Q-Cajywm3kTRsx?bLZ%s;?w6_FF__SF*1GDPvs6}`fAHZ`iq5gfrnJz3GS7o z<!S&dC^NOtiE-fBC#iZl6nPcM^GAV==(P<NR;%_=#!(%&0YabZIMPv&92tc<Zx7b+ zhXzbD$Xkg{J4C}ln^mO37mVbwG|+Ar#F^zd@x=IC!wbGLO_1QAONu%pJ?DT&$271> zuc4jxwz7KJ_rCH-tFJ@z@NXc!Q<?yrLiCS+GL^7*>xa$m*N_NRtT_d&`a7duuH`>P zd%}h`&|B{GYny6$%@oA-ep8*S_YbNQ*wMBx)7fGDgK2FaWZ0dLJaOehDVhGlqZp`r z7Zz^Qt{~7!1nOpo+s>!!UDMjSGVG3o1-MTD`U{)X0)7~njK(aO!mRqVS*o4ZX4diz z7)@AzBH#*!OwC!#-^rCEBXGL5j{ilBGX<T2fkEhQ4%vX(Kg~1H*mhHs`C@8C`##CF zP-@@Z>RTv<qVAQ@pPBn4bWbwF*U^~CI`+^PVzL7sfQR?ISVY=gn;M0{7SlKW)I}fC zqn9jO+3r350+pLg-%ap_Gfi*v=m#C!&(myW%O}ynm4I*oqK+MG>rZEnIJKR9see4J z?c)sQ$RrZUz7CZ}&@|&(WWQ<q`Sr-K<@HtG)|Ku2_)JVn%I2W6B{iM@WID!(VycU$ zAsB9F=2CVh#57s7&)3s1WBcH0)V=8v_Ii;ZdYh|;kGm9nx5OzmAxm<M-r)(EdHG#_ z%&)8hSU}eM-Hj9UR#%Y!30j>6oZG7`cz^_)daDP69Az2FAzJQhYnWChD$L)$+G%bx z&7w9mR1|a&sE6y@t-J-J@>a|Gc{fUJ9G}Xg6OuprJK#0?Jp<5bfq@`8o;q|BAqcJM zjQ48!rGWu;JZ~<LXe=JXw;{l)2MihWpCi@?07-K~${g|I>b>4p%t2&K3ny&<l5~GV zu3pxR9szB;9|4i-*m?a+N5i#!@8}=cRcFz$=1jfQrgz)4Ua)YNY;U8N3$K^;Kib>6 z)6|T!KS#l1EVxey4i&6w$J3D-fJnmY;zyL&4<!g*Eqe#L!`;_mM+^g_OUp(vN<5Be z^757py~8$Cr&@$5?KKvp_9ylZ;IzB+5AEvs5img9peJqGr>M}ieC4Y4zD_DwoiJ30 z5_=SJD^>f%DnzwDB3tkBl@`9nM7`62cB()9jX5~Dm1WqE>OH3SAe#W)`7_C8+pfMB zJFd=-^{P|*4uT0K)k$y3)D9UFllj~KNTvgXauGr@LJse7Q7R@RDA(z2H9$+ML+eE& zl=voVrX{czY;0=zrsg&^7y3DBQcnlbCHkTK6wlSv)Ot^a>WupS(t25KWYtdJD_Ul0 zy-WLUG9529T3YX>gnVr^CFHB&()t2Q@MyPDf=8_?tuNH(m)6hH=0j$@t^Sg!YDQJ1 zuYFT*)BGE?V&5z3C3>UFt~~e`G$NV?B%)>wUwRqg;i@z=IXRJXAM6bDgMFlKS|1}* zTJt0-&ot@>P~uYMKt_<u$P@-s+AEV2S~BKcqvp(8p=QmyT9cttF;Z={RhCTEe&@TO zUJAU`$*i*|AeRR6H#UONQ7ve}-xCCI8I5u>iv`@icGQ&50s{!#;tR+P0W?sZB=UJS z28Qw#@F%T&Xsr_aIZ!Op21>PA8)rgy4p7O3{6Pz%JAtoM$hIO)F4a7n)<P~(I+1mw zsEaBknp&{}E9S9cg;s19#kgY<l_YBuq7zou(m!JkZ_XDZ4C_c<Sz6z({V6&l4AE>$ z761{^!~%XE(hS<N02PLEysfKNE<cjeOV#;(?@T_jk3@Cm;TkXqt9DZgBCHyGl8OLl ze024loZPB+*+B-OCpyKzSXkfg%OQ2FrJZf>ewuU#=}f4+5c{H|(n(tWZhp^o;Mq!< zRjo5}SyjYX;$XSHob{6zO6oY4v*QvB236~|OfFpmxC~b5@TKpZgpU&#G7W#1xq3O3 z<3MV!e|?(f)~nX1p%Pni43kl^-$5TcR@NVMSZL^H&<bawx`(eNaR~J2`!Iu(Y+J`C z0zJW~Oj7XExkMpn(#4t%;~T4%mFFE*dY9bPI3TH+th!&nYyDR#lIdl<5c*6ThX%5o z)o1{K7XrAx9cu@a7Dqi{sAWL~{fq}PRa)=Vrtpf1n0nDaYar&YVxnNp4wBU<488MS z$Ov#F&_$zgEukIg3U&rgqrh#QfipJ&H-3{?*0{{-)2wH6CJS^m=O+bRE#HY|gu`h3 zQ11%GUd!rT@l#r+x3&A9Q9zx3!O@^49vFz58}EaJqv95q-s;fX98f>E-&ixCRksAc zLU`VdHD75rv;+qczU;=DL2Y_V&_vjEBUm9@4-7a;8wVN=CKo8r`Ay}yo6Te;LW2km zCg&ma6+&MnuR~}6p@HNqtG1-l;zB9z8^>xc|3Wh`P+C9Ga0W~Xtd-{^<+-e)w&b4$ z@#<dU(6x1DULnRdkk-ueAh5lYQn#C{Kar$Ow9<TkRf^br*Y%_?W&Q~$VHP)oC;9HH zFyAJHX&yxvrvM`re?)<zG~~~V%taK#?<|y#csf;eGzCh<9i|=?_0I;xt5KQHpov;L z0t+x44o?z#lG!W+1*D-aOo%nPp=W3UKr;w$Yf^zMxL9ud2w;v07-z$oAsD^vS<E{m zby9@hJWyh(w=tq-N(%FBH=s4EKk!SDDm?gZ!D=Y;rpVJ_#J@uO_xbUq(@|JK0CxjG zFWX1OhSkXt3h+-+2B}Ra*1Ku6+@(}+E7&(b;`$3RaW^!x%;!_nXlmd+RbD!!1QR4B z_FE9rm@*gPmVoPDY0{)OI<ctVMFcMX1r<MMHnOpPqw!?iR5zQ&PgCM#k=SEs?-`A! z4XsQ6%z?14uc40j6+x?IsGlNoi+Mf&0#Vk_Kfue#FyBrUdP=0G3VR(9^kr$|X)V1p z(52>5nT;nQH;igvjVF^ojjTuW_pKostir4{9NA29mEyNid}uN|4TxhrlC)WdXd>FZ z?h-VBx_toZ4Q;2-s*De{^r4;Sf;^URlfi%h+fm{Ob0O76slOabjS9;G-(|(y5k&(3 zek#h$5I=h*8r>7(VIL+i{Pd0V+%%S+M@0Bp@q8Q%5#q(@z7U^EjPS`!G$(+(`k}%- z#O*6nN~f#>J!8|-`3^7o1-QI(ZAuFG<!BUXr|7cC9O~=~<E*93KqBxcL|`r$JUY0_ zXdKvAeWxU?Elnp|vsSWu9$wq`QH0F=+T|}~+vqdKAAFvq?^E&4-RSZjDSd_`s65hU zRG&`TX^nKMyq3SQ0JH<6%FzP8jJTHXf?$dS7hfb2>L9cj-g!Tk8}ZggIXanNhBaH* z%$w8Ym-akCd{i@ElJ?9)<M@uU6qL**g5q}2PGrmCpJS01uI2wm>6rRw2KnzPg>MHL zWA%sB4CVRi!%2H|Ot>Z(icp)l{Aa9616{Nh!pveS`i2Ma03DLWEO3U&EX$~V4~xO) zi_s8B{5_ln-a`((@w7x)Y?Ng>9x2X(W=@XB{D&Y@N&83*@i)+~?fi2zq<b^Kg`y+v z5aP88t>nK&lp^`u!hZ&&FuC{jXb#dH{4o*tBfc6Xo9PY^qOa0PMpSJ{ZCzqsyow}p zf%M<BWuSR#dCqtgW@LiS;}ezcXc|UfBV(CSnU7I2nZp(sTV-Ruu`=IS>A><O4X8m8 z`<KIx+&Zk48f8hn92h!L6_u+_3i0uI(7<b*=4U`~ZN8*mCh2QsDU3Y53!Q#7L%$!H z3eB4xo3q*2<}}l$JlC3ZDhFC?g1j3YAEs5VX3xrKH#01r4Y8i&cuYB30<u}{<a<eR z%{NgJ^vkx7hmh%A<n-49l)a-~r*D%bZ8pX)TSl^|#co#1><!+CeC5cfjpuKIoO;QX zn!?_AW&vMA1)?e2-dwpnrP{Zj*_<|HxB9IS7{EyBwDfcxYouv%BJm`o#n}5SJ@>yy z&-gy^>=Dmb#gmKYQSodQ&%=1~zFyPB`l*;#0}pG&_qGP<A3uSmH3t5s{m%eUQpd3P zFA&gIum6fH1&3i4>aB!9U}cE=Aq(N(&^msURe%fvtfy@-U04P7ip72!ds&zS{&BQP zfb0S1(?^*E(%8XXe_@jn|0by6J>q*uiPa<2GTum>1O`T;OFUo1v-y$F@r)f;V$*<6 zxxSwOBxBbhyp$c;NNYJb+cR(3rm@O_gUW%XWq<TbdY9tu#j>Q=+o~LhwQWXHG_$SW z5jNrvBb%>H`Q9&KJunO7*<L^=h;ktBPP~l0f^>TYN%sn3?(GrjM9l7u$cB1!?on^i zxm~?p=dyZfRh62Dm=dqUXFWmia`&ynVMq6Z;jpdSi|}><(*!Z>E*$=p)}4=V)0bCj zv$1@#`k8GT@C_RK2^%GGo{Z!or=xEdC3Sy{6c(r8w_3+22VPE8$VUwk?|v1ZjJ?#d z?luIe*vr0NEPYiH|0;?VH0b^(Q6Pm!7br@3K$LQ`y0q!bh+5I~<vKOL>B~(@{BERM z?U4}bzJtJg>$C~wsYFPs)mz=A_+;Vl>b`0??CGA4aEpE3_1cuC2W)e-iRD9CL7-ID zLCiMic?H0A0^lhkGFc%~0KX@IHA?JFdf%(WUZeMSFj1hlro{Hsd$SVTOYdb$?3Z{O zdx;woaT2be^4!6ovG*{7T!u=A;%kW$=Y`c7EJ1>o*h`$ppM(Z)v6oxb##)uwlhE!L zK|BbE?rM}zjMBeG`2mMsRATo-#`XSM<p+O8w<|HUP15;7)dl8RhCjKgN{Rmvqg>NL zPiK55szNTw;(m*0{!-DMiCyRLQJA!hU8fN=;!ohIB&twBXPo+q?3dk7A=(!wGR*;f zmH4Ab9Mw+-q9dQRF(aRtkO%#|sinU_GzQmLfG(6X%$CM}s#}Tu+JSZPpq9P+VJHV9 zPKiuBJL5!5YDD)oz~~%Qe-}8Rt@jtTDY45@HnsU*=;L2kq0UjBUo;Smkm)WFrzQsz zaZ(FGek(>;EF>{BP3w%4xKbs_@hyu6ngw8|fTKh!qlHy>F)CtYnXuY`0oli@9KP4p zxmNRteU+CaBSCFY-H#O=Jk~#|5j}R|7;01ZpAg)=bGW@hevqcf-LE5A?_aO{-~#Ga zVjtqE_ur%Jcu}N(Q~CZ}jI(<Gz3O-M{`=HfdjEHn_!IcnD|)HPLK{d(>RqYcK--f` z*$u-u^BYl7987l&tm;-akLp~@;>4P3jf|vh1&xdm!gT*1BCt>!eya-TOo@qvzBZ|e zQ2iNDWtptbp?AvNZz7_NZTj+?+C3IKAuc7urGmA#W*FkVeLpeU9(>ulfC;|b-cb+0 z5TB6^X%<Qw>XtM(`pIQ=fw7l3m7PqEu?nW_-d^ex*@!pOr$qxsd<Oz4p)`d~h8&rq z3ajISrYI&Ma?}RR;$;Pxhb{D=3(TWzKXJT%s9^iYO(<RUSVE)ar%J3fi`NkNI14-+ zZrV>${!Og_Ogsu`H35A(O_T{B-&NY!RG*-ckbdHk+HO0|vjjb;+l<6Mq$Ue>zCnpS z2ekn9jv3VFG&VekjGbcGz8tU@^*K}|I^kYGwg>=6O-KB9C~8h~{7t+%<45rXFG$@q z7euEagA%`$O73*@wt3Wii!!}!nDQtuEgDEVNO&H@L}t+dCE6duOzQXu&}83R+a_*t z_&PR>?K`O-m-^lvX<SMec7h|`W&K*3_mnRBT55ETVuwp~p@I8^9=ez{SZ8*-mN8u* zozTuQK_62nm3Zs64En5I#e|GLc6$(Z{nJ=O=xuZK^QFcv!65zY-K`mRLCxmeCCUAX zz}cdX$`oRtgCQ~-dxfCh1^&upuQ!#>QA4JXT_&C#wmJUf{F~PzJ;U$!y{?@r5_;)a ze{z;kSR(>#DXe7X%}ph+4-@QPELf`|eLpD~P<#ctkO^UZ+OJ**V<{Lc%j&ADlKD^D zh9X7D?5ESzvDO!l)qQ}Km>9K-c6Fh+qFvOf78^LViKdv`C4?Z?Mm>D}Ux<sHrkH}T z{bB$T9}@}U489THt;{kO)K<u$jjOAT&an#NS6e0M`$=U1ZK_mV8*knE4JHVe8aAHK zFcU=dU^F8UI0qg3C?b`?O8zG-Foc%XW|fLW)no3Zk5>7K>T~>yb3k%G<(9(Q-eiF; zW^X3gPV@i@BfZ3523R;XaoaM4t4g?fQV<VPLD<~ePx?Yq$D4a8z-364{**`yGcn_9 zu{VoRIR+OHmUtLIOw5N{j&^^5_Wq5TtfdgKQ-D3T*Ov2llcss3edmNCzcld*zqAN{ zPvP$i{0-pmrYrr@dVGuC5m`p7(tDsgVeD<hs`T;Hsx-BTiu$7-OpNcxSQ`%eI+Yl0 z+3uk^uu;4d&qOngC&@V-eut#XW`{q0jImkn@E1xQ{!7Pn_%B1Wq{Ba#_7PbQ<=fsy zIk3<2>e|xA*Ok~9;<mt1D%&LHDM>8Dmc9>rVFv`@;FdHt*cs>|&PpyPe0UP`2eD=g zvFfgbQ|!MPHa(pX@+5W&jIJDok-l1%npPJ!4WXp3E&+NLPGjwF!I|Z_iN$Cc<=?U^ znZZOzzo$!rJI}YV`NpupW2zzj{GeLXVuu9W`n0TN!|A}^<;Os!&SP2^>!5w2kEXSK zlwqH1ZHplztSactN=M`gEK3rV&LEFnX(6w~j-W+mrHrb}^}uPE_qw+H$a{*Nr4ow8 zzFGz?FS2RJF{5dTqbb?YQR&zY>tcGecNr|O?N!1;-1-;v**su^4QMcbISfGyV8u(} zHrJScDG^rhPt&Lre=<w&w`&dr<q@ntyCOx>8-P)A48e6~K=WdCcfqdgpaqO6I^4`F zK}}d6kG*)cjinU7J8j5RgJojK+lx)wDSSUVPHfMn%&-B(Q)XB@^Sg$Yn#i#yh~@O~ zVsRFx43?7=Ef)2sPGY2yYNLx2@%IoSZ-cY2)IzclGvc!#BZ>GNJRx94d^Q3p^_h5& z!jF)M8oNlT7}k16tTxu}c%&amYj-5hh}SOCB5QZV4~f@Pt>X1d63xedAT%NiI1<&4 zPEnH$n$emj7>RQLVK)z0v#L&k)I^8W+9{AF*2UBSh?;rJK)tBMPMUdlAe0b@qx*u0 zz--_|=gQGEUJdhoI6@_ud5iH05LI|VzDc?VJ|^iFrVO)~h{mtX2Rs<jUT=0GdoE?K z@BUA8pnw8#vHWzrb`q00b^Jp8{8bHKB&t5u&yU@d8_ih;nmb;558vwB(<^{vG&k%! zJh^pdo8AgDJAVQjA;2wTpWlrwXQZ|B#86U&mE=rW6*#udOc?ZQ44FTOV3_sr7x6ac zpr5hbACXG@(i#&w7m{89U!rw|t_1#yx@tppqPMRN40wMVH16RhJWc`wDK%sSuvOl( zhGtSQ23Gg1ffEq^g;!y3h5f0%X2>^&JPJgM^)vaFePM&_EvDU)I+oE9Fs07GIqHqX z11^%P9Ja(^f5Yo6;XnHbcrS5cpTmkjM)3ePJsfM5_ylButt7FO8?^&$xs!Gcs?X>b z2Gv#YpGi2Dv&9d&6BQ4+j6e@0KF|+?vzxumV=x1vQd_)ri+|f97U*XuQLFZPQzNv0 zA%k>}M&Ys)3L$~QjeLSY;hfdNb|6kIP96bux0l|%;oDvCM=09?jfL4?gx*}APLf3? zdW9{Oqqf`4JW7W@2etzE<v<4eN~O!3>bQtSkrV7NztT#^ri)SK{5ncM`jbVKA(V8A zqm5NETDO0WB>jd|L}{&4iQSGss@PZfoA}gSfE3HzR_E;{tLUXvReu=XF_)L7-vPGW zI1T&ug(L<K(H?`(O0+|jU^^TJtCv|P+|^R7g+j>uD|W&H7y!uIhCFTlmu0not*lf@ z%PpJ;soA9gr~1Dvt?jQ$qirwINSJ_!P(z8X|80r;trDZo$YvUmPe56~N*V7}HN7l` zUbJiFQ3s!dfm&=5g!m1pD2!1O-JKPJcN0a2?d;iL6=5p90XQYcAZI!V9BvPRgvII= z<UY6B(l`@%0aevw=B*$-!(YX+-pB~^A0xFr>WVx{*aQ%P2W9=~sEz*<6$Ha^)DE+C zm#>U`NgC@|U)x7%!fC|bQJSw-Fsaw?)Kw+OUnVmHjbnB*a9TIrTV@F`=E$%dDJoE{ zNHOPT@UOs6VaxZVAY)PTUsB>f>;z*ISlRduY1A6QU9eATGOKj5!%ZL9;a7P+P4oXu zhQz9+kmfozzo;Lh`0P4(oZbabsc?{gTtRZ;^mW2kS?P?m-mmCgUm2CoWTw8v>Cs;? zS0SUm)`78mC2JotUs5$NFlJ#(0K^R^uL<!j;BeBq>EPJpG_u$FQLQ_~`{8sI<jY~X z5BHr6Pi{>ac%$yfJ|br?mbEn9!Zyl#plAg(29qyxaq993=Nu)WqY^=ggyWgg5_M&Y zpdmD4((h4i*n9jYW9dMOmd~&%XK$OXUQ@bM*2V_;Erb~neJY5aoK)H<Ywq5*H0qCQ zQlDTBhDE(`fMYf$RVHI_W!Ab<9q|m-x1tiL9m@*|+ZJFb*@nrGYKJMFZ$cZex59sk z57?Ts@o7{px+DZaeQ6n_Tc7ur#TXrI+SG*OFI5N`C1So|&e1#bc_WmSn8P_M^})g| z$1$5&wX$6=6p%E(_=1_WYzlEl=m6zLPhw&-Uf=4lsX2A#i8_81%m7n(SnrUx4@UAZ zcY9Ajt`fU~Sp=zJ^Zdlf_m5UCx0nX1-JJVdD%Q-iJb55^UDP*sf=9gOB6JS+k*AQT zX!-nE40q9~JPo6)*xcm752*{l5sA41;nJz9gLNkFi{|qz2oN^pd>1r@w}B5jB_~LP z2GvBz@Gwye!c#g`n=Ob@$5oF-2yJ2=AEdmT4d;TyC9{qB$;>+bA$=O^jVu&HK4E_b zWIKwTm7;yh4<KPRO`k7m<AZz#eH2?iV|fL}=dgMGu(uRi4MCOo8We<q#cTTB*m!lc zYnk_W-xt1sb8@R+o5nBn4Yi_<{&5{~%;2!Y{U-2GeuZ7_FW^by>(lJs-b$e-^uex8 z_YNtpTlEe_{|I}9wEOK#Uk`1z=?18z#e^6*kkn=swo*x(4YhC;wXpuQ?+@x&e6FkI z8K=b5&i4oHt`OV^Qc7$M*n^!!;^NY>CiIo+4e=k6IRn<Ccmv930T-<-f(Tk2(H%gL zc-;vM$cPedNA?^6r)F3%teroKHnxMD`WXi>WQ{b0wsmK&RX%S`$|=X#ookhCNZGc? zMGp@>=Fr1Wk03o((_?+&r6#oIX6-0LNq?%hiiHo%0Lbwe>-T<H1phgOUKoYuVWPo~ z>3`g2EIsFYSshpOGWKvb0B0J;;R3Pr9Ne=4_JFJCASN1ch-~a<)#uLsJH92a?)!t@ ziGq7585s9aau52IEp^!s7afJ`bq(Jt%A&4Fp#vW95D%=z4hro*uT^HX!3zQ!R7%dI z%{YlkWf*Ybj#f5>UUqM5dusBp-*XyMDxo5XAHRVjECJKc!11LP6L%wU4tUl+zKk7) z-t<VpU60>cbWELAvkSWx|4Lu$xv}(&QQafl&5^VedHR?41qOhCL(SzYfG{apR7rXi zehd6DB<&$TH((+Lff_Licu&>&&Z=;Xa&GeQ02a#831Q&@0{)cwt77%-W*x#g6dew3 zZ&xR^NH?~t<D+S-N*kTZL%UFEb4F!H#*LM5&0%fuh4Pn7Qs*V@M6IPxD24&wmmBVH zaWzk<^q1so9GjG9{ICT=o53f_1)nJAB449(Lr9zu5!nLysAyc$N}t~%!{MK@_OJlC zA6?!e-}s6;z3KebYQD%>(2;R<WeOUO%|p=iZR1$<8+?-@XiIcP_f*iKdFp5nBjJA| zlmE>}5E$jTfD_!&veX^B!!|{mD)!dLfiakI7!4&)nwbF?Q56J6xBCB<2Ts%>w%swm z5p;*KBsC>VeZc1WcEMA_>6oUa+}=pE|FnRHTlYl^yFJg$z<7}J3wq`~P0uM$(zEyp zdX_zo=h_{4hs7)BMe&;QsCcD6EMAxH6tAmx;Pv<q(p&Mu*@!*Qinn9WKD-lHQ68dr zybA+GXS#&24gYu3$34$ZUnq5^KaFP=t<%zffe^90ScDj20k=CQY~QrpwAO8V`T>NY z?pKA-Fd&Lp!bN`fM?ZqJfYZweK*9>n#u>pxsO*bYa7Ws&dJ+>Tb%xFz>O`IAsLm=O zQ2QL1+O_W+C!P+B$?f~bQkVu*9G$TNH?NtfET{|e3vWV$wJOgaW^Kk+2kj|ub+&!r z%5F<+b^ZM3KYxLSLd<UfT=e=&l(EHaYj*i>)A|w*O+oYkHMGSoBW;P+hf!CE(DpM0 z5b}`~H#WHA9D{t&+~_d#B52-Al#k5v7eFU(YjZ4}1Rw7A4d+_op8>QZP6-}Zt*%b& z`Wy+$bBC4Z?7qXBCKR>#gNcW8=zG+2J1;>KfMPkenBcs6613dtOvDF}1+@iHGXVyL z<Hr4%MR`xvA|0vF*LB06>yW9I-&s!VRgnTfUyT5WT@?XTEPx7$YC8f{O>dh`&23to zF~!xgBb|y(j-~lg9wm7w2?aIp$RKhh<&KyLNYvB=$&f|G&iHAR^HX5#J#vKzvqvZ; z5zD1q_M?eAJ^F=7o19IHb5YANY<MLV{mV(4P;D;iIM(!ur`eUXcSzDg-y01F$#zGJ z`)Ma>aSx^JC#C#K4-ABlVk?97?-pKri`J`C^lj@Tbt2mo!F*JPJ?y@BF^sVe{vm+d zqdEL61~0Kn00=xne8s}G?|LjIF2RCpJ-QOp0mYg#shJ`Ey|aMdO+dz?2ouoA2GDf? z9U76r98&W8OgoJV_Ce35rr%IF@VKibjibJerNfk0;jX6-4r)_7(<um2Ksq*~ppyCl zoHekV`;znY!LPJ&qd`=FBv0vs1LW%01JA;dkI6%n7v6XMv}w;eh8*tT?Kg^FQ|<(H z!uJ5fYA?J@VFAy@X#PBU6VsJlKt`M*DBbrc8mq+qk&wfxq;*bN4}uLJZ#Vf@v`MiZ zklW2}5nh9^@_Z*uFk1xWu+~LNBEW+%vXNYnNO+MXgfvlJK&!FisPOnrU~%IChq1v~ zx|Ayq^`nZW#?Mgv8we$|&s%b1aHBqmi1J(|gyl&0|3P?EF=J5-t3HilzI9{{76*x6 zKTVyaolaiaQfY&n%~GD5Pre=?SyxNb!}usy_@<yV+ah28#!oN{sH|+lH1HVu4R%J% zg!RTQ_=25o=w_Wjt+Sj~N)rDjW|z?nquiM&cO{I+QO=!f*|iJT8gmx<{kLFu<1Bw0 zAl=VHESnbFr#Sq+wvD|gdn;`i%!Lpn%BQ|Ch@zTg*?+Tko|QZJIOIT)My(9TB-mjr zm1SwF2S`&TpDryX9#P`UP%bU|hwRsvKtDhT+>zBJ1RbB^Yju~&e}L^~@^yQUlTv1@ zBA9`54bp31Vp;A`Vs+FFo;0-R!Oux1PR36uu}UPq&<xxl4(!6&r}UW;ygg;Uk7j?E zbav5Xk!BlAd(Ye$8J3W-tTIwY%9LE1?uKlIjg^sFRz^}`zTI279&YZRAX{%bNv2JS z{~i%Yhl;`362EfCp7+o`Rxa=95^v|8(|E&m98A}r-soD(7MHu$8qUB`B>R(Gd?_QH z-I&v|IKQB|xp^Xe=(awPG&MqF<&%bKZr+(s-#&t279BQ>_IM%5!-)So5yF^4AhqV( zL(&Wq!D<g=Km9X4w<j+pdy8lL1*^HWT%}yxc7~?S6A0Ep=5TNs--@($z3dtIhrug1 z`V|kM@4}twlmM)Tr)1W;{Gk^q3G=dc^*d!%Q$WiId*~UYAz@`{zIG>jXrC3Eh!|EY z7vSS$K1aFuPf!CESr0vX5x~160L22pe2&WF2S?JMN02hMS{W-)vY$P42(hb(MT7jG z0Kgu46=5+oFX{|(T_hbv62&x8SSw;YiXi4Zi37hwjAfQJW6M;XSo$borC~ii8Pgl{ z23`)Za5%9Q4#YA!CT!o<zY|=cj%Ar>YBo>+6HO(c(p3ZS!CvGTNzSBX%-rEqrFFu3 z0Co?<?3bD`fsn<-a`2Lp>&&;<_o%rvUkg%%s5cxToQ5N<Bay_aVYD8w(8^-=6rlb9 zoUX?}UWelC0uK~T4Nj*bQPBuGghm`55oDks)Mz;Qe+?~Ie>>rh48y<;K;Ii;b9{a3 ztU9BFw-Hxj#G4%AwBo~BI7~y{qtquD^1>whtP>}mT4}6p>h;5OwHsqC9ZqIF)>vD) z9`m%V7;6i79wo0|ml|-tf?lQpw*fhjoj*v*f!0om%5|)ayzKeCsC3kNR>)f$KpTZ# z(oS2Gu8>(A12ijc0u{}-(1z)|n~*@Jn~B)-r;p}a=23i*SyMmcD|z_=^+VW1hTN%f z(vZ(5bO4ecS%Xg)sAi!w$^tEC9))hiq5*bPOw_*ztWpE_|GlaQ{!Z2H$A+rj`9D={ z=EZ=LI3$p&*UY0PvmQ`%vRUl96ePQckb_@ts@ZwX1kkaveV8H>K#_cc^bsVyzH^9H z=5C@AQ7jit-+@eej-XrjZy-qM+$X4WAH<%?*C+=za1i?FCX6GUl`D33`!UI0WNdYV zc!d@**%TtCdBS*zs2`zLnixwFCz2Rj*LOTbOR4gXhi*l@yt6VwDin(KJ|WcL2{ELQ z01xS2_@d%yBd;a^VFhp+mFvhrvzs^vVRPd;PL|GLdruy6@N~4G9q0j96kkkAf_QJX z2+%UYGU1xVL=^aR|05&-o+3oyB@x=T#j51j9Ez_8cDG*jM$lQ1uh>l_<s=Y-(QuMC z#D7cT17F~WiJVIuFbOAN`CJKp4|{u2(@vz*nS5HG@NK9_)FVe-{DU_DLtmnD<S<cQ zrhN>uohmV!0kO(LP#4N@EEUEoXInA56`O0t{sKJlZJrhT*oyhB*gICN!iv3O#j32> zek-=3jJlF4`2{6_TwNHotTB0O1lr;fG+}riY+8d}9p6U4L%mdI_0qplMx>#0CAM`P z^3JT|XEDzY`-GsY?(L>fDo!{8YcSNAFr^I_G8MT({BkOn2e5fU5+J&7BR1$EhzL7* z)C!{q|C&MXejRWO7HlQ95-6}@;>JkpheGE@o~8F5C;HEPEAq66kR&1Ugosejns4c4 z1cAIHP<u##)CqbS0ZM9)UPeHYIIvl`n`Ckiec4TN)R|5hAHL0xg*icqyp|~MNy(fN zqfyinU<?y975;A|@JEh<CyFUMACGCE1t2ixb`cll39%<)T5`RI68VRSW55-a@n3)~ z(6#qOnrk3<R)J+G0Ia%aNKsY|arX&OIK|y_FXrwsRu+^rnYjC7ieALsWL(PRKSVlN zQ!M2S8y4n?u0%EGkG+hN>*Ykbt&Ao)n-mt{*6AhKP?jY%94~Hblx12JK-Y@>_8|Ya z@ic!yo#WtT9ZhQv^f%X^?+AQJXI8yOn(O;J0_UZLC<zA`*1OI14muNBlL+(&Q4U>I zvK2;A{g4N$!BrACM+=}HS^&Y8>{gx+49pBTn;Or7&0)~d?^^%W(6Xq8yvIX)Ll=!e z*wS={pMFrA$mhcL+bNOhSZs5^_4yh!1ui~0e3JMy1D}!~Vl@W`hY4^|f7+$QzK1ln zMAo|oja+PzpfJ7bbNw(p+ns=bCHrT>9ey@n*N$Ez=Xur1SBo$?&gYQTNOpk^Xaw}_ zR6l~)D4|tHof2!J(sAHyexk~T(_~BXi~4W&UBF?rtyAjg)El2yL=?b=>p-$vKkPxR zwAFGyjIrd9F_|1PCa^X*UbAC3yDeO=Q^&Sbr?DL#6@K`&wKcp2YIo*AFcyszm!j5| zYPnfXPJl+OgQ-YV_ZoaNtm<&qO3g~q3GRleK3%mOhj1-}V-2>KW!mcyelxy;ubQEC z)hx0P>gL3T&+t(6O=xD+&fle0>-{z*HrGlxLJ6P<q;CgoO!zPvAGTkhMTinxh;U>* z6xe^eG3%&($pfjV<2y?PZeXVz>$Lmt-X}S6iyKo8lmZ5udmZUzmo0=mihCbW!DW$U zC?|3ujnvSR;S!V~*Z7@Q8ITD0$oqlgyp1Ix{w_Jpf9A7yMC~ukowZPk+<`)h4#N-~ zx`B|O;c=|D*FvM(Dgs8t-bfH|@N`=*_|`ds>J=6Y_VcmpvIB$y(5+twa-`bh^4O%v zER<BoOVDTNkK}dHb14s(lfL)WLj8iNPK#m*4oR8&6_tmROqT-baL~NI*35epx(gFl zEFkTCC8p;@do>S{8j64{(^7QTCPawj{E9(rUYit}h7g@Mp(B+rD%YhBM7<1yhjko^ zmY)OsH;9v_@%1SW(nOfOU-XAWxkK-FG;FHl#i#~n`^z0+U;l=xeZq~Ye?uDUw0FXS zq=3~1_=XRtBH%J1u?Slf4StbYpGsA)ZM%?$#y!g4gc&=$hmLyDlC={t181roA^xKH zK*znnonf-!iY8+`hF#XfJ0bma#_17&frO%jJp_&EKzcMEXZ^8tMkn$yLF%Dl`Yw>4 z?>r1>nzNv;ej>%FDeTauQzHP|`F8+mk%?fR2YJXB3A>$Dv}_6O>pJI`4$z|xdtn_L z6oykV;-p@u!#CLQh0w8~eVm}^@jpS;!SMOKAImQEat9glJ8{GzLpNtNa1>+tdtj3z zb%M&K;`9!1SUAt#w!K80p86b@7Gy)H)|OV~D-R!J2Zb++b^AohUj#H{RrBnJmFE|_ zYeUNO-_7tI$E`+ke!O?%WY*}!{;KbMLl#>m+u!kBXc%*o-a5<oRs$C7Vr4W`*0BFc zbTH!TgX9T+m)+nHDM<Ge4LiB?!^vgXqXphBm|+l51X2iZ9#GSA<X8&4uA($}h|`y# z_#%UpKISiM<J0<%>Rq<flx4JEjBty=O$T(8%H};T_HRVfM;(yDF3~7Y8Y>4TZF7J( zuYC{P;2|#eZ$@ns1XCPM;#jMHR0+Iqo+R;gfNhVIEl0M?$&$E-bVmD-o(%ETU_qK5 zT9z0VTCrP2XVN;7y<A&bs^+qj-#X>g+nn}yeXlfp_N`W@{h;sg2D!9UbKq>XwL38e zq{ncRI$BE>X#GOE<|NlX;M7fa82thi>H7$<C992UY>PRKC9C24uAi5c_&!R{iJ)Q_ zaOio=e%|+XW8t@sIN8<}`Wl?tU}fU-6#9IV{SQFMcVf#QS^WTZz_zX_`#$!*w5-m` zH6-xKm1R4J;@c^{qzuMH>wApi^UHoT6pvH<>axU8{6UIOE&IVx{2_|xmi>_8nJB*n zadYDu>~fw68(Y`FEdh<JF;Bq$88#|cV+35jYG@n+f9xp%x%bSYho2r5c%)1R#ML=O z>`-aY0k5DhzSZlrYqH+z^mR0xLDTKk@=9OZhIIN2I@h<G#Z(4=_Y3r6d(;yN5;Ii7 zzMS$`IEhhDzmUCcv6{!)qiNxyHgyL6Wc;luYSSwC25>;?I4VwyW0G+f1n&T$xSJly z)#j!Z>;$g|Bg4t3LuMJtJ6XHV6?LA@Gt{CgEVf(T88SN!jZ-e9VBAUm#{oibH$9RQ z4p5tS(<3?N0JVBIJyKhjK|TR(Falj++}F_91<p7LvX%zAv`h>H2Y(B<CAczRh0p;- z2^jJ*ydbM%&^Y*WTySWU*=^vW-x-TmBOUgm+twJ>M>`j-*@0px<!XzYa7>Zq2!_fd z?y<jITK!(*Bv$<%F;?9Qqhc%^Jl{*6;#*-Oz<~v8vy{_{j!KzkZdy}oF6{~@CxNm! zOG{omIQ}Z}JN`gjAiiCU7`6b1u*!hrtg&c~x0Q438dwrX9I+U57-4}u%Px+t5K;K{ ztf$Vs7db7JPyS10-V<Gz?!#&1n$*@WNa#IMHWAFJJlw|GNcy)oc2OLQ7r@g>@N3(^ z%P&G^^+@ezF-7<mvVlOWC{*E53eo0nJ!~-}NHb}BiSTl}Qs3;dYlY13F7u@SXp)*& zHl1F%Wi#lNStj`(qocRwV(L!!5JV2F!csx(&57+{Ow!C!VXq`GthHD%9d4y@@W3}d z^h>zQ!m|l?sHj(CaaV|o+_Jn!u--yr&%?AH<Sz2{0FJiGO5F42*_2t?l7UUDzli1U zkRddkcYk7<Fo)4;SyYJ9^NIVPKtInbQ*DbvJcb>VFkK)fvVRhFEUM$v!Pjt!3mawm z$cOr0u}Y{--h>0H$iPmPH_a~#tJg+twfrpT3RoIRmxOAAyzy!<5uD&a$ss{`>32d< zFhttVlHvaaQ((lOBmugVkdySwv9Nm*6o6ntcZQ)%Aof&0-zuOeDA7Fov^5QaM?$T) zHDqM6KVt{HldRJaBw5WOT@a8R#&`%%)BG8l3pXwW2L5XXF21XzDf>J#6V3{9OGa}V ze3hInQ<dl1;d1{HO>%(rcr%lZo5J{5?QF>~1I}h!B`QF5u~Rs2ipwChpEX_Z;6|?t zS=vuglB44$6TCJcp=C;}8)#79sg8MBT1I8^?2_b%;sY6R>Fg;G#63WSpv$!3ShV*@ zGOco9)BF|cdBXNG>;YmXNOw+PuhiC5G6Ta+Pcp~b3eTUw0Nvgf7&z7qU(Rtii^|hh z+=K=l(Y~OzfCbd00!JAr+&V8yU4-lV%5dg32;iCgT~aG(WKK&4nrAi6#7b?brO6!r zd<w)~X=dWnQfFm%2x<}8Gdt2Gq8Mdxb?1_<gavOoinHq;$+QjKjd8|_)mo^obP5^Y z!QJqhHLdkP1acOtZJx3YPBGSMU^g+nQ9KKs3(IpR+6ET{92kdJ1Kj@mgSEAZ#&diO zCVjNecF0+VS{H1%1?~e_YHhfQ^|yVTmT)L=+`m4^3*Q1*PZ-`7SERDr2kSyqz!BJy ztOBa`(3M_Bu?tTuS;?(4HABVRdiQ!DrUQS7%(KuSb>36tj-g!*n>Ku>RA*;8K@h7Y zXIh3Wy??VdCYrWv4}HK5RiXqes^Z%LMDA8rR&n*l%Sd9KYfGo8xqkmz7~juZuRpWm zXHXlQLW(+TkM;Y5b-30gaL#-SE+?SMHSnB!6a5C_AU3@g%m04N%g+IdY#Zd^Il#kc zJNa;7VgM`BFHjt7Pp*J_y$X}Q_Mn;fG$r-;&ML76&=B|Mj3IB23-stM>hK3q7yl4) z3c&~3PMC6^L=NGYg!)2t{NIa&T&F&eW9ZP*o&*eo19&q+r=wu++=r}t$W0CCrI8Bt z?;&^5lp@9Mtk@yd@97tUQ(O1al8^lV4HFH{2Y0GD@pd(<@8}+KbV#noom6OT-m8SZ zHsICz&Ah`1dwVQ1AiWQXI3})uYbChAId7oH+XLUP%mcTf<YadItcL5yaH&*wk0Cs- z``$8&se+ZOhFU>l2|s9s?}qu+GD(o?7bga`z(b7AVKfwQ9bd&7(*ohyh+`4}Ub+Og zv~|&8Yi1q(z`|cSP+@cEU4GcPtrj1);c|rZ&7h1mZVgY->F%t)Hmt1SgWY1&+h`wk ziIt#zPP^Pv%D*f1Vm5JwRO$jLT-;(^AH~_i0pz?cc3Lg`8R!Yedb}i4O-sI(SZGo$ zMQ!bgg@ePPuZBYdsgTgG=p#sh=EN=;YjpX}YHr_!jV{m#ESP4%jjCI$Fh$&sGdARG zV{Y3xncoc?+o-#V&cN^r^5AYFTt<{n8}c7wSq7U?=`yzxe;l~sE+qF0w9H+L-P`LS zyb5Z{uB#34r~ixcI=Kr)c1o~<NIV@uCN}MdZsZYch+NnCE^M03|AgwIGlp+Qy3eW| z8}&E?3<Oh~_1)h_xEb>lY7N}$NT3DGrK4abA)Kgo*3{O8qP9e}yQbEtcfuZK=8>=> zqZ=+=N_-_{sg~iAwcoHMUl`H~|DeR_&;rTZH|c#rd1w{h)U0FwDVo)N8{&f2<jFM3 zHE9d99Y{7JEU-Bd;r{(O;X6exbR(Wpmr6~vfB)B46j7lve*tySO&_m@aInFh-Kxz( zC%X`Kk~1YciI9wU4{PsRgY?6!gWmRI$wdgSKnh*!2AE^r$4(vl<k-pVBigyXv#bYD zxNZ<%Tzwzek2U1_0JlkQP<(*hn6;z`A134OMeiwuWQ3f3@8YoIyApeuoxt5}sAnav zQq(VPf>4QDbFm0TU4)q%80Ig<ZH+aNXYL(7mtnb79KtP?@*3k(^cS7fn1kgPpl5q0 zvGq>4cVPW_N8w!k%Rwl;KX1G`F?VBP#ecb2HVzT!58yi4SA`b?HokcpJnUbfZl{PF zk>oRLejvmQH=%*0+DR7r7CLCtbRWUtdQMc0GX~zneB53WmY7JsxgPxBf|Zod2bsaC z^#TUXFw*vsD8s3eZn3<={BD8y-F)-Avv^(#5HmvD4qVGVp>f@NoD6p6G0b_;>7TGK zSQ~alR?VS_5WXJ4chmd`;}eKP*Ud!gqJH>H{<sD=5YvY2Qrsmh-(G`xqMJV}n8#Uv zP^OD2chX#X%4<OGp3_jDvaeY9xz2!>=^E&IvG)+-cV%M^_&01SS0H0MKv$grs5Or# ze{;CeD&O0U=GE4*vNezey^K^nxg<}=whvsAzk~U#Wx3i9o(+e0lk$hTOUuO;4{qj4 zl2>04XBKhf3p<6i#H3_&!u-@$Y5C=joC$cF{3W!jqt2D3>B5^fj~M$Vm|SQkqX41q z2T%b2<P|Js=I{^2YZYANlkj<;Okn&Cqz!pI)0U$v@(dBi@hSwcUPkG;WY(QbXmr1d z-iF=-DsbbnLw|(3pGQ*4ZCHu_2obUD6l7>Y3>2D36oLt^mS3MHXxT;nz5fClr6_(g z&5ZNmC;~14*6HL!T?_*!%vVHtjCz-|@_{NWfYVq9UHf&K-&hC=^N&yg7CXr8M9E-I zy78zABU=W%n&G@W?8Qu0LFxuGkGjMv)ARK*Kbna$O|6T+L`^#69$NTe%8totm!w@g zstZths1|A@RqXFjEbE6;4?L#pWi+}9BOlnJ@if*Y@t06S%G-H%h(Gyfd?E*y<6uV~ z#6AVi5o+s34s={NLIlf5uA;m&lJFu6NR3z>mHe*2<gXEcH*zS&2y;W+XH}$5LvL(+ zEyRl`&i{bYhx(h}je^_xt4QkJf*wZx3H$(JBgou`7*3bKRsOip$CwXe2J3re<E&_x z_xLh$I(Ka-;0C~i<E~XSAB#9>h>?FG+|6B3U|-OciP^-Shp#}#vXgWHA5YNa6U!+q zq};yuH@J$<g1PN~sO5)$A+&~=N)4?sb0QFx-Rto9))BY;aB?gTO%(;5xJVOItA;GS z6_+75B!}0e7^caSdZCNP>N+-9bU!#^pzU+qcXRI%2RJ6N!&X5ogfS!cW}_M>(lIwZ zfe*Ebf@|4$_;a(+fU&e6F5DR2dJoz(we3sCE&7)WHrk^L?qs(*e7DNlO|*U1q<`tz zFp0f<BAHm6=IA>yeZ{_t!7Obi5STtGS&+D;Yxv9K`^c{aAF<4kr-vQzf@8HZTke1_ zmA(3$ai@cpRCwMl!x0N;(N4*zTI>7u4{b*MIVBEz6z)~*XZ8JU7aY+A;K^H8`rhA| z#@@HXm?m-|yYDTeyybfrCsN?||6PagyRzmxAaK6m*)Wm4a^kbTx2CJWcd^}}O(&$T zO<t0?wM(QwYhg>D1is$|nkYqPH#_KxLQx{SSvHo)AToTevB1O*7qscSN~{T$U_eed zkFhYIW!is2{v~+Ic>0#e+UgdNtGQYkY->h<h<IsJqawiv@MS^P6G`BcHA#d8bu0E& zWaTHX5I`=Fbre+Cf%tEzVJALG#01`1n3W9}8Ain%xbF9uuqvL#_uX5>?AtOhv79Yn zC|3L;L^vY(C8_NL#a`w7Z<;&Q)?kGqzKblWva^D+h~g})^-+JanYz>}7pa3)<rYAd ztLgr7Nz2k#I|fCHz8M}K_mJYi@c5QU!YDbSM^*y~SgDB32}iIw%Oid-I-FQM_DoHp z%8f0ZPqEmb2{}&T3s7G=!ESWu-<I7%I`*j4B3P9u-6*5>3H#&j%?M%nM&-lef!)5j zxF+{ot!{W}P%Xn+lGGUvThXOjoAq?c<+5_^5yIE&whQ>kp@q=!7ai>|DzP=9c19f$ z$s>&8F1nuZB+A21Ac`DkZgdS-L#<8zL|-DCxMORp!%Qc{SfvY7W`--&hwRbd0Jad8 zc=lZv7M)4Ey|o<on4M?s_qGZtj?Ez{2LA{8?=<|f;dkJ~>n+;3sDoV)i>|hh75n`- zH-jEcA%g)`CS%Vo^jhM_(t0R?r8p(9shquB^hR5^6FWQ$^{ReTZ$6`7g^<`efS2LI z`*Ubd|3D8#gO1K7jsQi{X>oV6_6pY4m`A6R=Sku=CoWqz7RrfR5Ri?94t>qPR0wyK z7ypI$rKPgG<?vuztQB3=yrdk*yEZ!ni$Nqm={r6>C^KCCKePnH(pwNhEInLUcsSYH zMK#c96Wcyf*vntjXy@2%131BRv+s+<meK(>&8T)^0jzv~DG<Z29w_ku0@xTitNg%+ z5L8dwc?Wc0zkYtf#*FBKFqz|5Iee>Rt=!UY=RF%PA!+PSEVc;+x04jyWuz`9C8z0a zP;et3AKyt09HrxKlTn%hWp|r{ZIg}rF;RCFy>6=>AcKtZ{igs;$2D+d$8_A5SbQzE zWQCGl#p=%`3N9G+E+|OKU+*%)vT>_}G|H_qp1!cG)wL|ngccc3S|rn<o1P5?O^xG8 zi@Y&PKTJwg?5tpKBt7DrD{<S`lt)Y;jpQLYcM03pK%(M0T<2^ow&BiPq`>lI+%#ZR zT-V<{52V9tuLLh8L3{Ji<yXM}V2RDRbs(|AJHRwo+n{3!Mh_(DgQ7_*d*Pd+#G9ze z+5mkX`T*kiZW|s@25CTf9m9s2F+}g&kpX3i7*NEQzalmU6wrH<P_~<7luG(mgH3k8 zu<#kKu=-rW`31Y5NJ(zbpzp1C%BhhJWX%{-&KV9J2!X6ZIloR*nx+$<lX5N<WPP2; zif?Fq*Qk&8I}$0fE*VAEfXlEO75M|0>5gV__imv8s%5AodpfBay=|iYK@SFKaA)n! z`gu>Nt}$DG-8}J`UfpjdbHH}`%ci&Y#3wXN=Lo&`4(0{54(6M=w14Jc_S@PRz1<CO z58ufK?mMY%V^gT$zXS6QVBXP|C$S{L-FYK9dyw<mRL-o6zP;1XgB*GM3HZRUlc*=P z-<6d{Gt?Vl;|{Z1U51U7yYv!M{gW|8AX)BWE~p&+OU!%N4#9YA%g&0K)r9jKI4BOA zDYN*os)CgcwIvtV!Lomhf%vd$BtIr?^VgEUcxQ#zocTJu@~whVXw<U`dh^Jl_z~#M z>T~Rl^A0wq2=ksVQv3&T--<cSN^FnE$Xv{BarkbLwH1&hAwi9ou{TJ-2NGLKz>P-z znVBn^D-8S%Dw>y7pTWRCJv%uY(qn<`5JRE`J$=%kf*e{lfB-uER!3^0(2sg#_74u@ zeg`UK|3HdCiDBCf3TcQlZ;=fE)DVDCBd73MX>n%uU>mry8C=>pv#Bv#(y|5XL25qF z^05&n9mv|!TtSltfaHuYXx0NX=SsY2p}M3?Oo~o?mUROZ8H~u;#u#JqSQ2{ZLaoPs zjN}?g*Fmh$vE0P{He)`F%a{13&^QZnW3DA83tFarDJ79wHRQxiju9p&yOE5s7iX5S zPAT9u2VnQ0f2q4R-q|na&DrhAn{dUUuHF#hhY!*=#Yui>7P*An_97irPU5O2oo*Uy zOh-vz=E?#LyJLd<zBXDrY%Rb6BQbbjLFbGdr3IZAHR<>@1MDHwJ>lqR{3b&uuKRc$ zRa&(RM0m(TfwmKzbj_mbq{47k@OqTc9^%<gP!){>A+hT{dTmTLg5;Yh9^SeHWDVf^ zPG5p0ObJX>BS$}QtpRL@Mtm;(zl^;l;yDM;Qq3i-!QHSe;4YHOc?FQc!u3kLQijC| zsD%F~sDR}K4dDj>ip4gzraN(+OJc5dkxPd4`v&&TmSu%$r;c7Q_Rd1_&ATqgv*|(_ z?NHdXIT(ccj?t#VW&9LM1V(fCO9+gvYLQh{cRA|8<q{rsEL{q0S&;6=DPwd4Eo9!r zW)iLHV!I&tETgv~)6t~Fb|S(Vncn^DVBD;7C*lRb0QSuw%P{9=8VL`gW?mO&LX>$m z-~lI6RXK*E5J9AvdGFyn+a;(a3c&7Xd>(S*x&q~)n?QFXUV&&!oZ5%W|Ki_-47X%6 z(Q0oier1I=N8(f&F4phVH{(93yq4hH=B4MFtN%i`>qOJ&mZjva%7L~Zf16w=u@t|N zC8*A#SM1f;Df0UcD-S(|f&m-%BOMFxd0<LRMB$-j-MCk73Ph5VvHN8KVQD`KCgGqF zGZ>7f<DRA(*bWm^Pz|n5Bf6w=TUJEN0bvC)z;Q^lHVAw7xgd*ES279YvmA$ra903~ ziK<zG7|GsNx|axK#EH3-9eMb!@2B=lxPuWaG+ZWd7*%LT;9Sl{1s{d2O5aaK*_0h` zAY#U;d{dMw?7Z{fzcMdPo31?X^&VNP4}#Qf<>k6SCe7GO?X$W$1$etD()gv9Vi~;F zCn%}JBUFzlG%bavdIc_e2^!)%?=Kt;>=SrU%PeegG`3XKr#yK6E3D-&$9I<7GTy?n z`3_|+%QY&LlI~o5@E#!+04sw(UjlbAOA19tfaBt{6O-buYH*haS#ZIU;3SqHLg-Hs zuSrFMHxltGM10k*4W;Z6`f7@<Y8kh%>B}+rAq7FL4k^cPF$PXBT7m8RsSpzmmpDjw z(ki70#|jhi*+>t9d8k}VN=CZ*CV?+O*aWS7?aGcDMH*FIBw7N4g!15Gl-=#Y7fUc8 z@=E*|8dge8sz&-qlL!y}Da!v>O{!#%h_6;(D$kEwxNxnGW=+sVv(lnD%hwwDe!ni- zoR)g6HC%rGcEK}))V{s{`}Tc<hF(E|k@npw(g=@H?OQ<Y^W%$X&=vwo{8d9pPOHwF z=1S_Gc~)D{2-{wQw7)Kzg4=|s4fYP3kQeKT7T7zi7Ca5L*YJ|JHx!C2&B3B3(F6Ns zO(H?%7PX1HD1)pGw?xy?yOiLb#1H<&ew-3A(VeWls3Vw&6;tNFCBUlFzLx-f?{9l0 z>9qC<EY3&D3QMr9)>{HC`gjazkX!(kNl;e$`2}+?sVj5N5W~RbMG#Yeilh*{Kq7N- z`TBlJleBgEegUIi6-{4RDkK!Ye(|3$(WdsYeuJPfC%GUcy$8s6o4ht97ee3rVQ>{3 z*i>?fSUVT;29du2q~QO6pzaa7^iC!aDH2SyYB^>J-q%+0le@$TI#;BJhU*x>X_1dz zx5<3Im6y*H#lbF0#fZf#2J+6~4Y=t%4*)nya{)$p3vFvi*Ad5XiK~d{2YC_&;{G)_ z^N738ShjLt@wE>91DpC%ke8C8!RXHHy%lqCamNHAt94P%)%{coTzgL^C-6sytKd%{ zXq3?0V#s7l7}AWv0d&MKAn8;p*_K`XXxr1skZRj_e%o+C)TVz&PM8<lhud@szj_!z z7#R6;&svQ+YBgrw#f?$Wm|W4Ajv!w*lNy7K-^|{M3^e9i8mYTxAQ8Kvr@Ls()v{CE zhE~~Oc`mI#txn>vp$=Ak8g~#pgOEkaztzB*z)dvpU#TW*zC*i%^otfUrgsg<oidAx zdCQmoC2)sbB}zs~Y#m<0mwXN8Eei%e7lYqNAQKEO>xN5v5AXO1A$2ZMX_kg%wV(<c z%bUh1&$)Ul#!PYGZUX$=5<0QyizTeXI(=)M+#R+c(40lwc(fEUf{q;CM01l*0;X;B z<2AIM>7t+Gz<}TVG4u+y55@fqQ~6UsY}D@M)fS$(ouQTV5b`>jrzVexEzt|w)aI#N zy*R^HVsFpgJqzGszw-<~`_IG)*zc4z>|D6(fMAI483X=4<m#rM&C+qtIIY4vG^Isp zmi>!x@xnA5Z%tk@9F=du4^mXSwa*9zdvm_ucS4CD1|OA7qubHlHmx|ZnXXEN7wgnS z;0*lz@p~IMQ+O2fS>f%E3)S)CGy@y{NI!rx@H7_Z?IdD!#rd6>sbX_x<Bf?e8G}Zn z8)Zzl%5aM^c8n^+U8=cJ1|0a`D5}QgJ(w3XPfI$QS7ewa_5E}h;2a$Whz6I5-@E~V zYC(}vJF@TnT5!i`VC)C2VTX%e*UzVIsZMN8p^$2Zg+kU}qkv|(aU`Iic^dCQne1@% z%4LR)%AH8wAvk%E%pG0JuqQJ1(IA+Z`HjQ<;oD1okMpr~3NjyTKJtSt?vZ(XZHV^3 zzbKs&qZLp|Z7uocN7j5ord0GEJiB{@l&P{&Mj*+&p*>)DhIFP=QW{8&p4&QuZtn=V zZZ64JWj}sasaHP&)^HcKRrvz$Mw{OVxOWpg+%}ZhFHktf{@9bmBIHp*J5%CknLM~! zDg$THjev(0pF!ntz^E@IzYsSTJS0hu-vSnn7@Eg&KT%>oK*H8?Yd@n8<u}}rs91o@ zwlQbiG@gGSqRkFrPrIN~dKG79l4G&ogo_NrNXqJzh(@qC!Y76F$GK7%=410wAb9zl zwRKIuc7eKRn))GXX2nF4+FA=hxbVHj4r2lCd&N3h-WPCE)#?@aRU{?$46^vD3zQ%H z8v>?Q0LdAhvwJ6fe`RYRwH-s~!y=QFLVp5(V+N``2PuwrW)S-D;7ncuuNm@@yQl^5 zq{4{+04@|hEdqVZ!7$Z_Giqz;*Q^}1waE+%5ds8dJ=VAn`)kNLqK&-#SD1*x6dLXh zi>|>AN)PEo(K~LOaHQYF8ty96%N`FY>%bYTCBzzVI`a7f9wl}PErhQVybREN)Ngz~ zK(XBinxh53W5rw$6x7C7i=e;-u05IF-tOm-duy5A-?ga(-DGv@1pdNwP-OsaOTX{T z6jbRHRG||$U!zJtr~(%S^;t9)hal$sQ0PuX&<juy=;P5f;%@)sr63L*bI?(^Zve#6 z&hW%EREPVNdVqD``;&WTB0EnEpt9s8L!?Ausgc&qqXse1>ztZJw0smo9EP4mYn}Lg zE^>m6i=>XkJzX#^h#3U`@gu{ROkxZINommdM<klsEClhJTLK&6Ad4}9I-dn3aAN6i zc}djNj0pPfW{938?dL(*8_Dqqo2(%r>u`JO2f|PrvQbQc$+@G%oE*SJV!9|q$nP8I z6q4UgyoLO71cdzNgDEnF{N|6yuZQH<CFIvRBER`V^80h@;(6Om`0H-lG<US@9w)kg zO?HFi#CI|0V-sDyH{n=-AGfXLOLmGLuA?eJA(CFygvQ}sD>rRF!-bZb3l^*8N6734 zE>CLSUJ?$0JlMN{egkf}CFo+la0=L)c$<dwMLzW6RAOounA#ac75rWR(2ok{Lj>Q$ zUfysYQH_xMymQ19{rHMwSr7e+IHEIg&za%wfAmLxqx*k|M0C99esJQ&eLrE4S_+%) zUwg>Vbb$Q-w?hbVkqe)I`pk_o&lPVc&k%1HAN&tWck^EH&gY-e`+EMdh<f-R#JiBc zE#9;E8{$2icZxTRE#f_wKQG<|{8!>#!v9UY=kcH7tsnB68~yxYkyOEVh<6o_iT7f@ zMZAMt74JLvI`Lk{*NFEDzCyfL^E<?Q4PPwY5ndtQ>-aqJUeD)>x5{UW_hw!w-dlJ9 z-h{$)P2e(~OR3MrC}<bKW(xNIl2XafoPR2Uq?Gv|Metz?zAb`}Qt(v~B<C*PCW22; z@Hr8Dl7c@M!KW$s1cLgZ+2r{$^edZi5-DaGzI1Uj1N1;6KydCBzXrFM?rK2Fw?xWD z__G8>3XE}-^0h*?;$R@I?@Z;n!79b&OJ9~sxztK=`_fmWQpQ^;`M&hksT7-)Qs7Hp zlS=s<yY|4w<NLqbI~TyH$}92TWF}+?ff*Du$iqP%Vo{9pkPv7SlR!`c1A&CB28d)Z zi6M!TdwH}35(aFNF%?^D)!J5kl|I(mt;I)cOMoVTu0rvFO50#rz3H$TD?+G|`Tx#$ zXOc+->u&r1?|-{HaPr;z-S7Q8-#O<yC$1#y^E>6UW^C%za^;g}z92r4(tvF!fmr5a zJS;8b)P|e0exUHohGYxhZ`mP@AX0KDZ5H&@jzzaO0|%#HqT8=uV2JGLdyRwY6Rw{P zZfILze29pq3yoW+h-X>*`ylx9UblY0a`M9B*I1homJT+iV-t39e{gq<^GEivs4|2< zxIctH(uR%w)Tfph=Ogy9)$eh8aj!dan?uoa!GU_A&X^QuR$}#!sT!$NiInD|WsypK z@cl@oUX5VR2hjPJdRQURhZNc?IBx<t@AcGc6!i)Y>wa}Ch{Aa>SxA)w3SZ@#Yhsy4 zP|l_8>ll<EneUNRq#ZVgWjMl({z6ar_DQIo@-6HxUvi|;htcSVlw|m9^sjX{^f0q2 zDud=;4IP%?MDR>Zfjds`wlS(vm=`-E#+XE-j-OE!V~k5Uu8(XsT{F^SjbV5Wo>62o zT<|wAW1Dc?K<tD|0o#V}I@IRh6|?8`ZdN2sPil;%uSn)yI*3R|Pw$Qu|3_B^_#o-O zgl~(a{~OYO-rpP>td9tk(*OB#{DS-|bmL}j7PX|FWyW+mHw#8tcSev`A9oJxVHI)r zIzJC}fBtuzsb`lhHyq2B7q(vsO*?GTbSPF)F~!QACEpi5d@MBfo5$}?)3ya#pOeb^ z+wDFs;M#2aFzVB}Ee+c~O(*3$?mBTD{FwqQ1;$A8#-k^weojo|>{!yRpA+kEvH4q7 z>MwSu&baIjt3t*2TVnmKu~LS|yF+cW!eGx;N{A6zzSehtC5^Ypb04q^cm{Y9*a18Q z+y?|QzjnMK^RDB#Ca#Hl0`~-N2W|)MN!*jTow%L2@I~+HYO)IpN3(U<I>XHo2uY>8 z0LRzUv=IOkf7x;r-b;<6pRL-5ePmunw+PJ<3EQM!11~D2E8GcVdpcp@Cm%l6MZUG) zAeYeTH)!c(9!V?GCugianJ9g-g|ZMr0&lyA=VyR6pmDZs%%S=@HvfC7_1;&l_b*XN zOWDF<div_USpWN~7wV%zZi@;>4X9zb&)&27-<O_sZq8$>M#UiQDHLcXkO|BK76Uf} z#lTvCwjM!SkHAgBO~M_5i$(9Rxo{B{{aPX}0;*qg;5u;axG3t6?i;I(wvpa_zz*P- zl6ItTX4`0isJ>9|)HbRgs2gD{zg~S8nQXY9Z@mqK)Iy6ygSF6p0HGslrCqpCm`1G2 z;9Z;(^RWclWeyq46nhzTuGJW9#yt`t)dX4tuLo}cfojU>0>2U&dF`0O*a&!`g`0xV z_4k;kA7(QOzN}0Egl%J6RIw(gU$yQ}!0lkN%H_SXAtlK|yb2Nn4zyTm#DsuFp&Ma7 zD86p=D&kt?qCiXFwf2KdgFYlWA0Z&oE$t3yk?7jCs|_Kz@3TpCaH_7c61cce0^hR| zfE^y#9lXh7R=MOj)kDYw_3Jrdm_JacpQ{0d!b{qMmzevB9VT=h;!((XN0kPz2uUxI znxI8Eu%ykLM9zxn_0N)pg_>Bl_LQ`Z`7HfVfMfuoFEsK%|J+1JYkHCh$OH%TVsA<x z!Y90B#YVEnUxec3m?&x#7b;>A&K4fHf7Uk66I`ltZsj&7R0VDxhlW0=Fkw-#@dXy@ zu!@b7A95+hI%W^S*JI9mhC12D9vA;dB$?1_9`icO^Puv)C+vBd<@uEIyf5rI5YK`~ z9^#E!3@LfgO5S6Bgp7W{BM;)gUH*W%EJztC!Sp#EGnYuAsq%&%{n?U&=mI&VUx|R@ z1a*oS)|At^uneK~6R^KLq1Q>g-zjw58~y8YXd<^3OxZ5wBHd(<X_F)fGETGtb@4D_ zyOfWQ7kbQhq$K!pJm^y2(JRJB^QEvq#}_%lsPh8><X$d#N%$%f9VFK`UfM7U+R{d} zGuVtF+cVu9-X<ugVW4^$Za(q7-VD)cyj#3iOI+9^v*J}e;Vc&lXZa5i&a#eYG-tW% zyOEf|+=!~-=?Key^f>iksOFkOUX!ORB!u+=f$A>*d;LXqo()}ik#PvqOcQxo7xa^` z@U5Mxjg)?i`Azae-;PKbp!Cpg?s<&Vxbtd;>g7S<K6NK1urK!<Y){2)122uq;|6Df zc^Ecxf%(I|FtKRWvWv_g^H^X7f$C&&#>8Gt!{6CPg@Gm!dqdbrnApUK0RyqD<OR~Y z%HRTuNg>O0h8WWLVO``+2=Y<3G|DjLB=$9ia`_xPL_ArhHO^tYf=jil8$%&$eMWkI zi4vc`?|vp2)R?@>G_6q1mZ(4el)V47>MBBZ*W`WXWm}cJzboLGuqfaeyGU%~LYr}X zO59&AF>v!?iHD2!50OdOri9fKdp%8<tGBF05Nd+lU65M~A$^8_!`Le^bD64-y>iV} z+*$}E{;UCe_Hu1u!_T<4aItl7A@gSrbFQo>^01tT;L}p<V$19Vr)uiLU8~{%Oe`?G z^>!%(riK?L1{NizEOZ!g>MFyY+=aimhXD~B5Pl#LWVaj*8TN+T5|=FWEG;N3xQQDI zp@R`>{}80hh1PPy9JfV?0WL60S@XFHgl;qAN^|vty=6Q;f{xDws;%i1O)wTw7-IVo z7Oj+;A$lT+eC&q({2jXq%NZwf8%HrWFxKvW_Qw=GX5+;|faYRmnZsj>B|O3~3NX%n z_ddS!0S!0TV{e-=9M^d1oM3D1$5$Es{5eUnLBt*=8a6zktU`~x^G5O%`pcH<)x%il zT`4@k75PH#$H`DPvxY#6hn&+GKXV<{<CiKghj@+V8_N|Jx&56k<3fTPgH$N{%%z5X zj%4vuDUPg%DAqg;`E}<D&ZiUSpK7-24(G34@V6%ihjWRG{Pb%YU#M*_sy#Cd|Ft%M zyW8KqKQ(7a^)L$U;AW@qa>Jf_V9jV=?aCN2TCS58VA02|^dqCPIZ-x?;7#1{bN-}o zi0uuSK2r4nwDHiU9o!Ay5o65qx5euH>!5ZZySBDJwVVjmf6aLFMYs^BvXWw2H3q!~ z(;%lS6m;T)pvO`cGg}L5FC9yR#x_hBf8BPvu&Y-G!c+(*MZzTa`h*7T?%V$yJG&R< zlsGYzZp4?Y8_s}3d(e-V;|z>mx-JBb`a7IgHZbhZcV4;YyWqYN+&KEYvg11nH-1#U zgCkE6_Zj?-0}fug&mf<5UXj$nXS>6m`@EvcaNhGuIE?^Ftplon5?}?e6z~Aq066a7 z;k+W51wvBk9|O+-FN#kDC;q>7UP*pP@>S=Rw(p(yyfTGPa-t#dwoIN&fNenJjB(EM ziiG}r=M|N1B&}|&{<F?2;k1uah7-U^pbM~*Wg;*HxE!Ew{to9A$t(~`<8L;w6et&; zNZ<S|=ap^>TYjGTJnR>t)#{$@V%5uk7VPX)tx)}9i~;_$vBro~X_@fGK`p*c(6Shm z_ccfy4kG%9JhMigIdnL{Oju?TtP=+pgkUA)nQwrAeEPsq(87sB6bdBfn??76cEAp| zFgA55t4gq}O8mn|j^XANy!bhC48jd_s9~TBmfYvWp%H)+$2)KWtZ>$eqk?x<o6jQ@ zFjndlb(Y{tn8SR5BZNr*1)XM~JLz*V$<OjtoflNI^pG;4K<@DCqjos-ON6xiv-?6J zOlF@(WELF<T-v}C_iTHFPzXn(2WbOwO_}<n&=VJMziw2zc9yI3Z?jcxmlwrAV&7qN zs>*}%En;RExS~IXSp9J;Iv|J~YrNURrg*tQC773oWE%2dA{FNFz}RpRg_uvaG0X<4 z)KO#ha9-1rjzt~`h)KCbm8#yvWnIKul`Kc%2BF2HVwY^#;84=0h8L9xUmS)sI5efu zrMsq&67AV?*ESC6u?BQ53x=+at{vtpUy=Tn>%hjPRv@fb>>NZei@|TH*Pe_fyaRH> z+qn}v>wgrKRZayp#0=C6%HTf}vvC}PLL1zZe+v)J`OV#n=)i?}W&PEaUEz{$-9>27 zp&VDLisExmUlyYe57bJ0b^X`NPKqF`ALem;0ng^WuokSF$I*omA&wcc<->L*C)w^$ z#@105(>pikRtXe*PBn`NCWH?v<}230wAUWEut~0FW8dub!7=*+d&g-odQ$iK5(3Qy z_h7xtK6cMla=P5A1>046G*w<cCcFx)i|N%1)tOq!yEKKxMVy%I^Uq`)PYo*;6We2$ zTQD^YA7k^_xG=ZuWYCdY_EFH5TXqWbD|B)ozF|Z^c5}pE?uQK+J}++<j-Xp4a=J}l zakf&I<nr=2+>|;{F2`5r2AUC14SawNdSxguK5Tff1wp(ReX7WYCr5Ogjhy&`?wYGR z=ANe%{=|N?Z*Zu2VNWTB^VlE?Ocdog(hMR#lw^kPwpNPcxZNv7<o5n$;YK>g4Sid) z6wVlH{)&i*#y*M@7L64NAM;8{S4rUpV*{F;2Dw!$>r^WrA`-cQ)8U#<Q56p>`$0fv znZuaInX8j&uMF()eo2pcLnnx>(zYf-IaoN1od1%^SY&iYDsf*+$~R27Y08`qCv9kw zOjU%BzDgnXV4bl>PIk|Hi{z}OM`r1#lo2###z@=|#HAWZB~MB<G^wA6Od~yVv}}Oc zD2cG1tE)pIs)t{SDt=8@1B!q`Y0f6O5)zp5y!5f~&z_^WLMO5-pE#vhuEXgU;kZ+? zY1^Cq8@XtZLJ2!0ade)5xhlUAJ#C?g0Fp6RV~+-Hw1!~2<^&S)*Bs>t)U+%SQ46WK zB&rYRMQY-2Nega9LlI`8$l&K}0|k3jgm<t?8RH)mnrIcY`7Fk7o7>`SaHx-?&M0K8 zpVK~(`KfGoUd_k~D_z%%ni5q-x@~s`2G{LYmD*i>aUc7g{$0pyv;}|H{B9h!nN)WL zUiKfmwE0-SaEG;II_xp|W(#Pq)Xsjc&7=7)dXaWM%_h<<V3pXj6<F3`OYF>lRvOXO z85-I}-KDi;2ThPg+FW5{1GBi~x37s}lTPVLNDgi}h!h;*XoQB5g8>Z+<530+()tZK zFJd{Zq2?7VEIGF<moA=KLMA90Wm|bIFw$B=^=1AVGsajdN=1e4B242Ol~)#u>RYp3 zk*$D3t&n7nnB$*kl5`ZzPCdQxrn<9=cb(gmIV~)raJ6}nWV089VtQEa<f?oQnn#H$ zENN7Yp|Rw&!I`%G5XpMXb<MO8!J}nTM5e9gIM<@}BTe>cB93s}thilfElNyKiX5FB zh20b=d=UdqBPF8|xe|g0#4%;}<MWD!!ZyxWBjq)v<`v|%_;rU;<<V!N5W?)D)6|fm zI1>rNMjB4)Fa%gu-8S<#aM?jA+JXZZks&=UkaMtsY8^M%zQqUB);D>DSY`Fu^Sbnz z9EH?R_5+6qyE$#m!}kwpE@*%Aj0mNMed8m(d-3J$gc?6^mj*7%!t#ONljFiJRIp#u zw`n$PCsp<X=3^16GSAJQWnvLZj6^NKYg0a6o0j8Mxhjo66(0VqS;3!;ReZP=zfG0+ zZCZ=prcG5%ic1_ZAN5FpJfXlwEJ%%Ls5wb7L?DqXT6^wC)dOZe4@^8jO~mPKS}Jge z%S$)FeG9zgKenkM$4vb|zi{FQa#{Xz<|bVzD_M@oO_jA=i-V16J3R3amYHlvCUXAm z2pA^<H5~-_@KFK=b5mb7rk;Mo-|TA0L3_5<636+L<FMgD>?OyU0~523dloHJmcFbU zP~8$~Hm(%6$A0)&fb!Z@qM~U}s(4aSiKMN|60DmM&JR=xyNS9Y5{cTQLKM`#N~?$Q zo0C4SFd!5($($SLEhu>i$`o5mG-d%t7uwW*Kd}{0RewR9?YS|sW`dc}C;Hbv9UcDh ziZCuU5_E%s?J)f;3)E6_$qeH*!BiRx(LTW&J?5NP%1SGDICsWdK2z~QIB`xW$E7>K z;_T?p{nv?5AA`?EQ&$y+s*d;QL_}$vSwe}zd#92F?PyRHRFw)|o?;~GN9$@_QpL50 zmld|RlMRz5f)(wwup+itb$P<(DYKQ(5NRdz6g_+d$jKvuobFKwFjsu#<RJ$b5g=A} z2ewyPm~oF!L}&6W(JUs{f<=p%l1^EfkA8vSDO25e=(%PKt;BMAgB1c|cAC=FHA7mk zhzdaA4qlF?S$RxtT{A4uuXg72S;k;#Vs0c^ZOroFL<_1I`ZEqoOEEP1v17*sPa+n4 zM7G<zX_B&d^IcgPxQc^9BOxdwOU^~57MgIJe7|UU!*tb-<`WQg86vE2?VD+fhRN`U zQd@-T2JWe(g?Kwa8=6CCRz+2A(U*G6C!S{A?VMA_&NHf9jnW1i>0fOAh6Kav3!dXq z?80KUg~bXBPJ0m=Vx*8_SeLKkt19<Mp3~VmBPdEl`nezF-9v?D%4!&)7ADEE3iaPK zPgjyhp+nhrLiNF7W@?1OH$-+2(H}P+3byz|-WwRG6MC9xuSS8WG-sghMe*2aPilXJ zhp=X8OXGB4Py2)Tp{m;dj72rP=A0U@e=eOSr-g{d>#q93Pg=6hqVamD`4n}uFnm#d z-PMxyNw@NAd()E6GTWks!eGk_RjC4-b#F+Uj1@sg>J}2h;?As2y}xs3&Y9*m$AIQu z%CF^|W3A_kzLm?mJYc_`1BZ|K{dD@z{%NOMXcprWjyJ~Zm&45;17{F6_KbIZ{bu}e zZEWm2Gg^7t!&A$QHqPbkF~*_E`)9Q2{lOhWAz$q2Hv-K!375J1@D*NnHdIKnx<rqK zabfft!)E#mn$231ett*qHE9;_=UkKORg^^iU-Q(Gl={+|OU!kBB5PLU;Floyinuep zIFV-*=8VbhaamJ>(>RWaAK)m75saoPQO<SdcQ}8;3PteF6<t~u9jAZSS<CAj!rqb9 zLu|B?et0onh?Zn50t9Bs^cHP$@r-J(wX4g_Dhqk?@-UZx1Z9i9ShSj7CF~O>P!}E< ze1oA{77AS_p%^*SP=cQ4F^^FR8A&yRA*$-stIIql@yG$)hLVY~J-k8+UUo_X?2-UM z<Oom%gzBXM`-IwV^yl4v`WQNpa!(%%t6?f0JH%!wWIAR$d=sCn6HbmJ7(cg`%WVD9 zxQY4ET-I&`hP!v2E2Ggnv;>371>VH8VBt}wcFL?3AnC^RvY2N?V43;m0q+?)mX(uQ zq0UY|3&z$*Xj!~joxy-y8^^P}1W>JPEimlCNvW@I9L4Elk$Dq-frAANOOk>YK&1}V zyv^VeAr<cYZa5hjD9ONib8b099;q)ow|s!hQ9gB_@fwGTlo}Bx93*Nsaz>C9o6YOa ztq(}POI+yjj9uDpkXY(L=UuCDxd^z?US<onTev6Ef`Xq?k47ox6(FIpzBVys)s*#~ z{(7S)X3KB&gN*}baKm86fi*u(OQR7DGx&T;P145c5?ZW3rL|u`(vev2Td_>;MKty& zqGQGZ=N%wsAuIB+;7gXkrXY{5TxbhO8@?u2qF;d{xFy6G{I!TRZ+&ZHnkB3Jp~xyD zt~uP1+KQa@_)|34UWyzgXZ`3-1_)l!IBlC{*+^9KIJfK|Swu41)K-aUUX`gVK<MV> zj-MbS2)iEdE)9a7U)gwlRQ}V#`Cnu{{t@|iL4f<GULwJxKUD;ajz_?2M21@>AIVq0 zSiD|Q1yX!hHJmt9<eT3+NL2*$y_bhT){%ntpHsxiSZNkpzdd5ns^2XMc3Acfv;T(# z?<nBdz-f|`QmQdRM^2S%Pgx=ieU#}q!n{fX9f8Xw*0b&*locR}09b`1K%xXdNn{c# ze$d@C2d-T~`)vf2xgaM#sfN{v)}n;98YTjFFyGP#<(d~0KHnTHv9J`<<lWbenqO8L zb(~_sQ9{Qf@I>k~u!L34tz=Iv!Bbg~%oQ*tDag5`PK7=eUZUS9p}<RIi9Y<PC0eA0 zttI*b_@L4EYaXaQ&k`+CnA~dVUZP)PiGG#9(UA+S$iW+haF*?2Zx|}8FSIhXN?*(P zkX8Cip(@NqbcnZ*(bPf>s(3~%va&`GH@`wk7UTQ#F4tl7D>yozE_0YEh!wNxgDVXT z^lP-oqmXtastbojFsL^IEfeDeUu*7+J$*!Qsh)S%Q^CX+qM#iF>Sf01?38#!8=LKE z{uIqPotIW-_m~Bn)v%J~8DuZ1tiSmtofaH~-8AOB(pWEA+eHby5gd&=z^<r`l#3cd z;NrRi)g5Wxxv6(U4&j}RQkMA&3_RtN2bgkh*{nSCVz5D_KDXusa+_(`ewsOX*YxEv zN_T7LcBxWo+z9>}3FcG=(Id)dkFi2JZ*0m)g_4diCv&o6S-8O*OjcG)lN*C_|DKe> zPUqJ9SW6KAxSHWn5Kcn>eM6EJ-?)%Z7=huFBnRnrPXof{k`og8l=P{IV&b^VyoD|m z-KGT_7GW-We$$j+A=;cs!xfMT>ZV1t5G~P=q!3VqaOJgQPSccUuom4x2BMF(tjvz2 zf+TKk!b_0IJ^GU1d{xf38J4LZ*TkOwL(`mC)S}%vjX1L;p3^S`7*Cl!95*8p*SX~a zK8Oz2#Ag}?i^>ipZHB2zN*k?1rwGJWr9UgJAPqSn#-g-1&3$uTp7|uwx8k2~e(-8| zjOha{LEEVit?4$=cF;Pp#g=t~yHuy&7{34Xp)vawvNKLlJEP(B=bXgCWlaP(%s0=F zg*1uI$-c`BN`@FXpiQ$*wwKU`;wzKQ@?{&$m4=l;${>=7EF$sgij8i%C|{sscAoiz zCwZ{SeHl{%nV_`31>ORATngM8mTc+X_hl7PSLVJ^ta6nbg~kN)I2DYZ@a0y8qvt3E z(GfB`Dbz_0IEfzfF1o0o05xVi51q=qcBEauB(2dk<FNik=hOS0JAd1J%rO8B;)%w9 z?BGb}(}z-)B<cep3+#08eHCj+E3SO!!c~`Czfu%*xqj7SAJd}ws|M-5qjxRM##m8w z@TTiSH|>e2I4vFvme2^slp8n#QjKhFSgw`}{Rtuy`-1-Rmi_v|u&`}#z>)mGp5{Ng z@&+6UB>Xyb_UuLkUQbVc0qM*${trU_j?m<nC$}JLTX#&0iK#P2j1xycEKZE!sC$R{ z*BX1#1uMF_ukS+kcN$C4`!oKiUydf#cSUk{k3JNyqj>eh>y_ZW%a&VZz8-;Dihlhk zmctry)1J_{gP<lB{<cKX$q%!JWYd??eRJ^3s&8ctaU<#d2UG*0M)XJ^hS~F5?ufmV zyKs?tA)1$Hq=?-;|A`T786qQCc6KQ@i5iw1N5|E0GbCxbHS;)bH~qW49)wk>^dEB9 zbgEKdd%5{4AsUj*U*LobqX^v@l7L#!+7}W_G4Jv}Magf>wu>%_A?96HDh7^~U9ha~ zFZAc8wI1j)Tu<EMAQi0FI=6<vh-BJc*O)docGtnq`mD1kq|Pq07jVH7{YAS^ALJt6 zF#p?U8<wEUjLWwt+w15N>w_`c9Ao9xU*#o~1#2$fy<U|#I3=+Akcsjq6yw<%ve<uJ z<|T}Jka=0UN12BR7e4d8p&lJ1L8G^qP%uuQa^1z;@EWto*^oJCf=H|Ebu}y=bY;M4 zd+AiVJzLis=f<I5LN6C~)~)r9fHMu+NNZLHOR(0GIVdh+df{1pe!$r{Z_qdim>~hb z7ztQga~5kD9qc(0cw7QlgM=I}A%{uGA(4=TV)Kwt;}f_zV{%Gzc>?jFDg8o2uT)Eu zbIVs`dx28+g7eNQ9=Z4K{OYaZ7axNjI_?0U(rTSsL~kVdf_q;?z6`5@+={GCNigDS z9jK<Mb$^W3DOPgZ9`sH%aP8`d(|?exIWjiJ%)G?8<q2M9VhFn4mXS{5syldu&&CGE z#ZBobCQmRD(&bBwEdf(g80=mh%0kVXb*yj7;tqUtxg!i>w%ROkZ%zM_bzwPMM@T4? zpg-GU8yJXh%n70CCN4NGweY0TPknd@d&?n?V)W6GSER#T%G*x(49X+gK{n4};01>U z;;q`JNga^`YK)=m+{({7DIGu^om-`bf;kJ7;l{=RTlTN(m(hL)FB}B0bjwk*)4u6K zGWQL-(YbR#TJ5uKkd!ptY`oC9^MLbL4f4t<Y@oSeZDel<emR}<jNNu5nASaD#%6%` z*Ds9Q(7*A*fU|z_pmBKEjL6&gjEP5r7o0wFe_6~Tg$tcMtZK%gYGUEZLyEG_s61Jw zg;fp+?VSqHc;Q=T9&<DWDDdZ;V8=NL$zE>7EMbB`R_1o$S?AUO1Az8v_gik@;>r8D zjrPrE+b$Ann0HZfu!T`Eh*7c1|JlO=CNn9yoKHJe`Oh#iUgw>sfx2^5!+?y8G*}?6 z_NOEe7QdR$V!2~fQ+BLMb)bJ2w^Uta35sVg!)OcP{8=ufj?_RwBTMIb2g*%qpe%_D zlnJZ+HJu6izo0T?RfA0iOQ#GLc{szvxIlbMX20<X!7s?*iMIl8Rig)Xgu{H`x2laT ze~cAMA{pI7Xt)faq=2(YA7nq(PlnK-*q~!oKvSXU6;`!&WxR0c&2$C|6cjzpFe2-p zS;J#Pa(k)Z$epX5TMKwVBUJm%xDW-zNEcMVPN4z@2nwQLDL%;J#m~z9h3=$eZ4y0A zh_1GDD+w5Fj!+qxvEAV;8et>nQx@(%G7g<#wxK9KNU<x$2hYm#%yKb&e>w~JOGJa; z`4o<YTn3-?n3u|pS)rGp8DTnHwu@MQ!bgLRXC#}jW`vC@mfAPuc-)YDF1FU6_@ZPY zN+s0@fhw8(=v0=g7E#F#crEpXXIrxlCQ@4t(R%-e!XqtNAy+V=HA`d#wfe$PQ&yYD zbRyd&hvYCCR{>F7p>eKfv|6V0K4b9dW-TpVGvZRR+H`wuPN-Hau-PW=d5%<e{hB|u z`kZWiQno(cJX}qYli&@SJ9&z_?*AoTNw!^xRVZ5v4m;KC&>f_#k@9=3S)C-4ChR7p z^M{nV#Lmohz!!j#fXi>D8QW88Iu)kh5gZj>&Vxh4tA8+&2dS1^qwZi%Jx9XWe|uJl z2C2=;l>MeuJ(>OgO4v%5&JrRFhh1XK(pci1Thr*n)~pkFYr(5|Af6T+&jVkz;K*50 za@{#gL!*hlB6YWOtJ8`gnUY^CYavftTQN{K&;h;<-kX!eG8oSn34`Ii3+i%C@?@{e zp}H}eKc@rT@(}8DTmPDqJKT})jv(5DPmrA!e0+yXkGEpE%twyVxcx*v<r1@uZn7FW zho@F8iO^~#VDJZK2}NI4IZOXKSBRUk4ze0{Kzoxh_d4_|NoF<p<TFIvHD({{>_o;+ zj6SZ;+bN@2q7#d_=ZH8ZFzwSKNY<T)vzAbd$9xM$VS)J*{sy#moz@f*!O%2jIH*JB zUrj)4ncXKzsA$5F;O^d&=5oARHIc#%KEg)8PL>l&3-*^SK!zr=?8iA}P5C{!_6uMu z>r%`F28JjbfdyC%C}10`-5(>`Vn6kr&rO-JV{6^D^*Nu^dOyjo&q0H7Em@svX50TM zBZC%-)o(A0<<dw#**pTeqb9BiUvilFS`{Kl)BQxybNJf+21<7R!V)FYKwVg>g9vVZ z{UbHk*={a@gmH<%S=hXvoobr-5Ce<E7@T{+o2Hqwt;Bi%*{Q4$1xTg<zm}Q!td_<= zt8p1z*J~ToYQ*)=aRqJt;Xr4(#<Zq3>zT7;c<EPQD+lK?-eRpc9C@=NIm|c2pGQKh zj|p<Fa6J=aW4_2Z=#O7)(8ls{I*Y*>&ouct1DHajH58i8tvh((V#~ACbJv(=lGD<h zTjZX+Jl5)KQ=6Szx2P~D*cR_t&m%pxW)KL#nq;h?JGZXF%lWIUvy(&F&Mo74$#!mC zgwvX3hR%wkW?}m!c!@1X8e{s4(rm5)yY*HuR6H)nBVygrx#erp$~Hy3oMv8qQZ+FH z+_}Zz1DWf$F+iMK|Cs{T)tK-9;@6r{AT@74iVxemlvCK?1a;nV3&WqXI=|}SA)Nm+ zFNE`VZppycD#Ig|C&eJEt#=c@J&ye7(QzU^HtQ^ZjA0b^53kEqcoepQx+96slVYki zOX>=vyeyU=ORe5lh28~WP4z*#s_HE3Q}BM8M~WU^k|;Ko%bPN1fzwP=H$50VDt;~T zZJjAKCpNvsAQzoIVY3-B9b}NljBRvWn{&4I*rsHm9G)|TV5@MtUAvCO*S@_e;Xpk? zW1kqKnE?(2yNJ}+AP33XYaQ-DjkTl%URHx?gIZM9bWh^&vQmaIb7&mz%1Q&t6CnXv zvM7BI7WVDcY7U<}ANN`6{PLSLYx{j46K-1IrKoBu#Y7GEL16{B+`URV18z`Bin5yu zcd$*kd?H~6t})W=&lhW}wl@B|%cZ*&3ChQw%~oBOW^LB8Wi}xm)W9N12xL4We7g%| zDAgQIJ*&?&pCx|7^dO3_Qj9hoIq{=N9AzCB5w4u$y@XgWIcTq?Hi#~K=PjzUhhXLa zieqi+3l|D27#8qI(@UDFbXGylf4{A}j5i1a`1fF9g7T@gM&TCb2DU({2Atd@YU!sY z(EiOO>@84LxMNf!ya%JxG;pD+VmqRn-8Dq1MTAU;>YI<zn(=Ss7e3W07WC@w{M(N) zno*a7xQkGyUJVFQ>}5{bFXWZooNo>R1u454oWxAviCN5S+ge9!p*~nCs4tt5Z_aw3 zUK9hH9~#y9=G+J5jk~Kti~4sN2x6f~mBhJ4W^suQ=Nh8UZF{8LqW3?HzWf9-Bvq!K zd_B_K=j+|p*QT|xNOA-dAlBJaThMRb!B!k9o0Mmkh`k2EhOT6wazPNGP<eH3Jwc`s zjIGODA<K$jY#r@~)rT(g-uta0$4QZA$Vij#qDDl?dp&OjgVXiQ?mmU;f>y1H++{A5 zL^^FXodxC^4ranbMx##W#M8D8u!s|vieB!Mp=7G&>zm3>D;0{}X%>P$s#-Yxt54eN zYEHHhvu1B_l<6i_s==KPhI0eEWv40heyc9>RxXWQ<0wcGd$`gBH{l`5L!iBM4-L4` zsL~Ff??Jbq<eK-kFyymLwI(A)B4e&VEuNeYzRb74zA*>rdokmiu0%py6FY|g#aZ7% z!)!tn!g<FpdHRK*L%CvRZVKxGB6XI<1+K2aVP8q_g{cioc?@WZVyhH$%PB+*MhKq~ z<JlV$HrZ1@^w}}gBt{>ohXnZXk5o;iXw&YO+}HKnba?BjwJ)QdmAXri*(wdfLrIGi zVFf75<hRsW*8EUfd3u~Nz<iA-3lUM*IZp<kPyKk)?HkCp`ZhYjWi1!xrr$*GQ<=2B zWb<uEA|m0POeHNds@eB5n8xhJXn-t&SD0(NlQ%c<7_q1TiP-2EW1Lj{oKuWKvZ5<Z zNpwiBtlr=wv{G>tu}tV%dFEx3vE<+~hpHUppdnPU9AUdD@*%~N+pf$wDXN9d35AqN z0X;L0SW32h`1ugPPsHd#n3gJHv68V0+cd<IU5yQ2kxfi)OowWf@7%fG4%Mpe-CD|W zsI%^4L2q;qE*|>zxPr`#7Z?0xl(=9nvufwsYXb==`ySgkxc2S3+5<85gM*j%_T5~2 zAU0^$7TGri2ljla9bLOssQpH~I^q=WkuDgg?GiogWF0O$h%{@j+8+M2s`t|C<DD5> zcG1#cLSSGqtXL&^-AzC)AueaJeC7qGEEdC|2s7xejTeE1Yy?-e8;KmnVnEmE^x$;! zJERBQ(2o<n!Va*qku&QPj7w!y48z&ehv{)Gnmf>peX(F(S>`hIn%;+4*DG^L#ken^ zsFBQQR=0^<f<{d2VAS6D_NC2l_nUt6U<@+M&t|o4W9r=rnyA&Cy>>EanSTn;ftK5L z#X(?L)sS_-`SdQ~;@>JA&+K}U)q9JJFsUClBnPryY|6GbZAiv4c<06xx$Ydsxxq7R zc7=8~dhDlm!*i}5%yJeVjH@5!=j4>tnGS;}#pv8{fJCMjhV&~*Y4UI75aB;-tFZ^p z25n`w<(O<uB!(k&eLCd{A|-PYyjU~KywYS%Sx4FL?h~~-Ecqv`6^XeFK9R_*jm(;m z@gi3&?v@%*<No>Pmxx^uT#6tPCx~40(S=MBCG;fhgpooLJIeJ7QjoiH>cuX}6`ly9 z63$^a;>GVZQA2%Hn6<C5&I~g5!Y#0tCweS;xlD_aBf#PXV<RvBSL@ionrb>8du-KX zSRGa3Bn>%jXfb=VEVdzQU!arL$}xq%T6m(NaPP99%VS>q4aQxoU2IAQ;!#3moM5wQ zFkUndFj5fHrGNV2I|dAt;WVYYJmyUGC=Dlr>1vxs#X4xY6AYVQf<?(_!RnU3^CIJR zH3H3B!Gam$!CRCB$+KT4{mwaa5V<^<Qg}i*H7CqR@w8!~w&oxPN{POpjE$5<SxQ>Z zH@J;W8{%UE{ZvV}i!DkDmtmf`3&vddZ7QV>O_ST==AWew6nqq{pLTC7gHUP_sM&`? zr)h#Rd_eJMw=ZGnA=3?ZF`*I3y4o|d^h@*1B=SQ-_c+!CVpL8|Q?Pw<ym8Qs7mTC$ zH{=`%PMp3pM!%|dUF;0w^4fK_S;lBal*jzt-74x4@YlG&Kq(gtcUyDq^jZ2#Fxn?( zA@2B!4J+Wgf|shs_%RV^yADCSF9wrhS7U9=p}O$xerKyWD6(PG8DXkNpeHxLb#QLI zR@VM$rcCOBhEe9dG;nw``>wP#P0%W$&{}&bHEhk=%U><{ln2%<%(NFhdFH0)R7dsT zI(t^AJ_=oD4x>miDi|EWX&z360WA`1Zr@l<-Ld|-jSlP}PD?-cY<RWw4(O*@zYM)E zf#j6JS1et}A_7h$yo^D3t9@+y7Ur3!NOxk*aYl~qbfD&y;Iu&2F6tV(j*Md{?V)G; zly+!$zPFLDGK?xKz@<h@O5tAP)<DfcX;ZFGeXDQGx0b7VmaO<ASMl@AScJ~Vwx=C_ zVSSf@If{WvkUt=#*DJ_<RuJ217DZ;DnVO8Q$5FHEM}>!_4vqJACP_iVNErc=6xh!R zvrzm*aX}7R947zkP3G;{-2w|?%zUi*duj%~Z!b<Xf<Dixu<Q~`P|A0P?l%srEp<Bk zt8Bs-MQ9~IA!vc==Wl=u^gCR}Ww32Voytm#)sxIkc()4m37hTeQBgk*!S?IkaE1uR zG5IZS5hERJ9))NRTNm!(1oLWQMDHn2TMf}$ePi%;Ht7ywS`K6FTxgat`w9vqOnyY+ z<NW-_!Ooq#ojW^EWnKpxb98#+VAz;Lojd;`vU#m3S&7Iyq=N!>1qY@SqV`^VY#0zq zpK;jOvphOOkp_q$lb_~TDs07nLbQs)z)`yV9$+pg!HyHACUvt^ev0%|7|UvXMfEqC zIJc}OaJbaU7PTmMhkGqrNRbr2l=?@v$M=`1u@zlBh8L2;<47hCMywNdl;YJMnsX{M zb|mstU3y02#Z-#x6kWlkaBvCr+f@VDDEF@ld@zRqt5U06zC`|Bu(sbSTh)-@G@dW= zCG$6F?HBO5BskXjwD90#Po<A^=>tijVI&!nM9}7Z`hcVXCmyaPU;1NA)+#}F0kROd zZoD8;hWwr~SV2`0vQ-hXRS~jP5wcYgvQ-hXKUWc?DlZwMS21h)(;3dKLD0$Qwqg*< zxnTG%E=Om}2PDQV4WaLLGo&M(G={jWmA&p}i3F#}Z_-DY?cN{y^Ajj!Ld^XAn8vKc zPk3vMnI5kTgFiOV+J!78v!L(q!M|`%9C!&h4x9o8fh3LvW&(?W5}*p$3~U1)2A%?1 zfY*TIKo{WZA|8+iECYPNX5eeU1Hj|JuYlKpHsAzs7D)U=(~^MkKr)a9<N>z;KHvf1 zDd0um9iR)i2=dQZ;96iFa5LZo?gZ`w9tU;;Ex-}r1keRs09olWU<xoBSPGN@Yk)1l zJ-`ov=YRvi5#Uci7cdr7IvGd<76E;KCz8^%x6@ItaATTwc4?ZXtpLKm8~-^?`_8bQ z_lW<hqSA72v0JZn-|E%f-gTwAdu3&@*S*SDx!PUjt6b@=uAam}x+mO9pSMW&Mt^gU ztJe6hWmFpF#qNqqNyocVeDN!)5RX-*6~%7PdcCBwLVYy!qFc(n1Q8trV@6l0FO!HS z<r*`(J6>g#w?c)ws(Pibv`U{;wSF!6__8Rd$10tst=6iwm0G3d)4cqfq!nxB{L{1v zT7_n)=PM*xZ9;`nUT!@KBcPu&p-Z#%)B44_>{(e^aq^p*ta(&m_jJ$Fc!zdfa&o>0 zQjFUz`@7~?QL=)crmd@5$In3sh^!6=j)Q;ls_ht^PA3EWVq$IfxPI}D{s{vT2M%(& z248UDkf9e{oHXo`;Uh+ly3{@TvN2=FjlX=t6<?Tm<yDiePQK>a$y26IyKZ{QjMSO4 zzWAlI^y@P+vu4l9o_oWM^K#}d@GM-EyBG_ZOAG$#rke|wEniV|%gSQ!s#{A+%Wf-Q zT~S$eyRTX|)~sE({>xw4P_uE9BI{;VNSAslODlA*k22k;Wifu{^LL&$S-X}N%j9XE zDsQH@ci7qG)w6wGuZElJ)$@wV4fQ-H>N&l<ymF;P_8Ap=>1war>+@Cm+?qC!&Rslj zL2j<)Bd=QS-1&2&UbV~xIq7rf_xLQDmOOdNz=ZS)cTrVUdFjd`y_6wSQdI3;UBs{~ z!e7_DtE+SwvgMUU4BZm1JHs8xyS(%kUy*OUyOcWneBPCM`T9u-o^o$dwU>cip%<+r zCNZK?zr5OAZB$iN`uO54TJ2s%;a6AsyrjY7YE^<ss_>Lw$~Spn!d33{o?;lJos&Cv zUewIdOG>NVMb*{b)wh(dcNZJJ(u!N%6(qGria|w6D@yg!qVm!&tK<_FOL*ppRM<;Q z_btY)yt~&|8oubVPIAxH-2`1-S*^RvOK<a%x>U#Ktv1SacjYSg%A)de$&8kgGF`Q@ za&?uO;uEf3S?;^Sy~?OqsoGS{@S>hVRaEOfW2H{z`L8}^mY3%gl~$;_OTDj^daLPO zQEA*-;;ybLTFFX5a0WmT(>bcaqTB15KJC?AcdylXixyk$t(Q>f%8HfVNuR$xBp)eT zvgDCLN>aX_42r|wubnR6jS98uFmifAxJ$f6RaR+9=i2K&qmFA!qavz)>xnn*yz#2_ z;?IaTRpM0{jJ7qUKHVrP@97}vNtJ<=i#c(gwqIUZA<OpF3>;a#)xz3cu4_^xUQfN% zddfVguB5w)y=zKWdV9i#+sM1Fih0APAT84~GgUiZquR$H$8ea{47*ajggv2HM!{`; z!=Jxh!jX!L^dgEd(CYH2X{jc?&wIP!t(L;bC|?v_VCX<rvel(bC<dMMw+wfq!l;%8 zTwC;aobt4NvTDO~j(cwfy;fPV+FPMh2MMd%@SI_be771Buv#^^gjMrt6^ocI6Shj$ z=kAqAl91)it46S<<&>`URaRH7(%pHbs+JiOCw8~TJZsTodD0S?50fTM(q^)E-|AyE zt0-bcHY#qbs9am|Mfxz@gjupik4{Kn6O~{y+!C1|CzV~0(baDx&%#KT-@Q@KO+2g3 z5Px(|bU!05+5NmN>KW!*w?DG^-Ot~MdhS<Sdq-_uEgQ1!j@mmm*A9t`V@KY)bt?r* zPOkOT)@u%J!sXLF`L*n~Y|0)_J=wb_)YjJ$OJiFuDJgL{;@4GGt*xr+wIB2OfBes_ z_5C*i{K)#(_shB7v%!=;>)#gb)Bk#huhV+|#b}@JUvvtawVr>m5R*U8zes%d|M>pb zKGpwjG%Ef-9sx0R-Tx3U{#?IE4~n}vrsrR5%;)<TiGQv!{U7uDYcoJ{8p6Lwj`G&? z>=Kdc|G=+r_|I3{o=`5W=h=FSiIGWATesQ2W$PVZt#4=y+}ZTCySCl^^>5ts&3nIf z-~A7K`@!#g_j?a*fB2C{AA9`!JAUxPAN}~BpZLj>KmC`VJ@xaQPe1eQbHDiI^S}D_ zuIAl)_Wq`&b>IF2FTD7#FTH&5&~FdF^6G1^A9>@=w~qeq_kU<R_Vyo-|Jyt7n(coI zp7{6o-tYL}&mW%r=+x=XGk^KGi_3_A^MUC62cFM$Ao{Pa|9^G<e{=i)wFBw-zpDf3 ze|7z{vuCVcJ)>Gk6IwC9E8RK#-14xVpO%wzb#d|4Jn-}6Xj(eJnV55&Iy!6fE7x>C zFW|H!-nrf?j-*zAbmLZ|TGzB2jB=I64dBX>R(h4MRA>@8MZT3KxU;>t_zVuJ^6iGA z3iU`nlD<Z|lBPylk`7Qoy!DcX#Fw}dN6RhJ4PP-IBt2iLdRkm!_^QKx`QG9RZ}?>~ zXta3eR92|3xklJ6(j~4&JdN-g;UtX4ca1}Sn8uRN(X?`HuC5L};=iQY>sxS38Rvw# zJ%?nWc<^mrQMI1V8FLLJhbp5=`C0E)GFlEarJ`HC*H^Af*OugFEt-7oq|AAcAIOue zDFFqcJQRx>TJ1xXsW}ZmJJ1}o3XMY>(NwgUG#tN-1@jjySv*#o#F<y#BlM(6x2R<B zUtO&HZziwxoGMl?s;ra@_+?wpf9h}T1?k#BID$5bJzdkDEY-A!?mu@@kWr!JX&N+d z<wo9*Lc5b+<b7YC@4p<=`+I%V_rHvT-Y0<HF5Fkb&ywDqQQ=CaqB9SWUnHNt<+w1l z_xFQQ@g?4|KHp#L^ZmA2R(uJ29na^>r{jxOxbuA<lXm{^Iq7LyDImY|#V?%G`+MJV zPJ~7(zw^ca_WaNO{yR@k-A+V3AL-K`-&@oZ?nhD2ecRnz&^y2AbOzj%rd<liFH+v< z?}dCT>hpb9pK?62tatqAe$8H<rY#5L7fHWw`JOH7{XIIq#5+*l`+MK`FRkzWy>I;A z*M0W)UvKXHy>EX$_08Vj`=+0B-)Db6zP<PNzU9B^@!sG2&d<?1tnV7X!teL=dEasz zeWG_deZP0^?)|-QJ->Y*O}qIFnS_5Aagx&7B5%Fj|K+XxZM>C5F>|~XULQoJ42xox zq5I0S)<DC7ufsQ8xDXjaT90rdD(v}1rTXkjUoI4#a<8>RYTwi{6wf3ajBWBKHi+p_ ziDnm76qkcZd?cynR2CcM-q{ds=R><8^qX3iQ0_B)kc=S;=CbQT6xXzqvGcq|YrLQG z|4UCQR>Jw3HqoA2?ggi~ES4OkAnC=$5RJiu;$otiDOD0TqjL3XN;I#ug6wBX47Pr# zlU1_Wr)wQjdMjmEKGGUrw89iyo^Y)s6{*4E^;KTv-ZQ=BURtqF1+KF%j!^NsTkwY} ze*@BeMFjcKvh7PMN>mFKXRTWavPJDlTro2)wNsY!ets=>Zgr*?TKcVCpNHy7*S#w_ z2#%siU~uYUv!Qb;CWrR0dbSuEH>;9(q{`ZFV&_T^2!YdEJhuWCm{9UGtvT8sEF|Ke zD{<2^JeoE{T4q63jy$(f8aODW#cIre0cl^fFD|bpfW=ptDQ{tJ%9rH1o8vM|-c%7! zO4~=3{)wpeTCB*hbHQ=GWzVOr)fm!F#m<9{7$y-inx3P~VctXE9!ak#&aEn~usZd| z7|AfJhr*ew3m2n0UE3vje)@wp?>sT`wJrAi(qeB$Ns(`HWsXpcuV1fwwcY1Vhtc|| z>IZAqXj+jy&!Ua17AUYSG`zm`9<NVvXJ8ko@-lnMq^%d1uDmTgDt{E!HsJwA<K(Kb zs?fj1aI4a*)i~uzd%(6xFJDrz7GziZfhxfwuhkvPA|(j-&K8w&cu}Bd?~QtA`hxLa zA2Yk$s4kJTuQyh$^7@!*@5Ii_$SJC_+L4~P)Yjb=iz_1yq?ys7Xp1y!Zb{qAY$9Gp zZy&<6OaAi|6ULgN+PgANB=>H%-;Y#{a!bEV=`yv9^2%y&c)H$cjh66wl&(DxRhtEd zUS;SqdhhKODqrg-GcQ-~p7ZO&tDIzty+F9MtE-B9-tOAw_4c9EN2H8V<0!AlS1Jse zbnV8hMf0=faV{t>=g?GPTLgPS($%zAtvJOCR$1@kr7gmpEAtpkL`ts;p)+7_G2o}s zX8-&9|FZ>li2^!);#w4{a5-IJH_Ab<NwA&s{^YyB|Nj2B1wL;J%zr2C7e5{L>&!om zNmFB|{B7`Sfa6oBRs<IQlRp`!7XgtmX$wEwapk&a954_-4n^w^!~=<dBkYQwyh{<} zoABf!-y~g$D=u0vR30*2#BVTgK^P?O(SZ0*1>`+F{GJhhXJJ=y7KQzD!!FCSO1}VC z@@5%U>8!?e11z-K2*3wOS*0FQo?1Z4To-mX<H~nGAm6tDQXaW*cLng>@cVXLDc_@j z<oA6*!aWU0on8Xu`|E&wPohzzeIjkfWB1w+BQH_E$a}<%e2TpHb^Ctr`~KI$pYMAl zoqs&nb>5#<SNC~;{}^p?ex`&~zw;Bt|1s(>wK(q(2=C<Q9RluuoHn2)|ILR&$x!gH zSi9p<Hmnt!*KZyj?wrT}U_ESq%yR3#Cla)pmbS50xjP8o{K%V+xUJ8h`df$WtNhZ! z?$1AG`1El2orHh+;o}cqqW#;$=EFBxiADYGPJiQe6+?72Eqrs?n{I9Sn`Lia8x_)e ztUG+<_ifP8uGwhCEdO_lW|t8T8Ck<W74dKM*mg;JuN3~)cPVGzvWk7^$gd=rrgglJ z-J}oFwE7Y0+I{3N;l-7{7Cc9OvbT1cX$r@95m)x?hj3*tci_q-KKgE&+KYdTD>z0y z?uEEF;|fkQ7IzqK*E?z2CAfQWhvVLfE4V^2?kL<$+)HuW{w+;&<L<y6jr-*BH0?56 z7w$S-4R<|G#~;(QFXOi1%3wQ+8^V1NcNuiu&jSn}g-1!cQm62uq)Gdf(f9X#n5NwW zYy<8D>VYjlEwB!#0!o0J0S}N3%mk(bQ-EaPN?-yo7H|V2fFxiD-~ti>JJ9)O`UEfm z3Ezf$1ULxn1%3%U2|Nls1Uv|A12zCvK!1BrpG%)kqCT1Q`JGq%b=VaC$ry<tp2QV5 z@{@LQ$9+S(@ti*yC(*y!Dl2}+2Nplele;+j^MCl+lliyBKS;e?D5H`w9mzcUS@;_Q z@{_Tc3j7lw<KkO@C}w>H_z)OO!z2Uq0lAnGi8F(51;AS1Uf?O<Fz{zUE>~U+<N)Qs ffA`;C6IqGv^RtD2k$RV(<URs$Gq4!wJAVETV*lf- literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/setuptools/gui-64.exe b/env/lib/python3.6/site-packages/setuptools/gui-64.exe new file mode 100644 index 0000000000000000000000000000000000000000..330c51a5dde15a0bb610a48cd0ca11770c914dae GIT binary patch literal 75264 zcmeFadwf*Y)jvFwnIS`x;e^XT0FeO(MS~a}F9`!WhfMU0Of(kMsHo8(V!frwIe?W* z;+fb?HdA?8+uGK)RPE!XtyXK1i(*0`7w+JN0IkF;dl=CmnuP25eb+uSNkDzx=l%Wj z{`2x7bN1QSwbx#I?X}lhd!ORlR#<Eni^YyV!?0LZ<4OMl;`e|4X-D#)v1<oe-Wa%T z+-hrh+ql{D@2~PyR6cTF<=qc?%I|*o;YU=@J@<MlwTC_TKkNzKFw67MBXjSa;&Nqp zlT}Z+^ZDQ3clGAh)L-D(Yprv|`<B+Jc<!s1(^`(_qYqu*S}2}(wLT=Cq1J)od3)<T zJb!e5`FyG)1#wA{#WME^yJh5S?8a1Fr)7dAGi{*7@&RHVG-J2s;+ZYN0V_QyoMy2& z=m-B&PfG<-2}$^el<HKWWLd<Tm82e&FBwBYi+!-wGD(DzKV?>nGoydR|7Ez-Vp(B= z`n?rQQSV)(BIV?J_#uF(@5z23B>s6Uma-|8bMIE~#`s@=DAZ}W5P$pd*Y95dWHH6e zX8H7TBzS<6;dt5w=6Z7?U&E9NGo$Du`fABS@~H3RL)QQQ-~X2wP@;3ZP9^%FH(QCS z-W(;m*z1vJ%Qwk4EBY6nF#AZ++YDbrh@D(ZgZK3-O82f<aG+I*J!&ZBt-J)|>g)0y z4wrw`Y#Fb_O08kmS!*o4R~lPQ{gS0sS(B@e&C%>ebK?B!W8*bXZP(IaLDu~G9EELR zr}>XjgJL_7+tqBFqZmzzG+!4A*(WQ;CcK9HhwBQB#j8<hNWVgtn}rnipjT0t>Mc>& zVsB})ZG3Z~)uOOD-av>oEBZ!{e5ZVeJf~@E>L2wt=N6^ri!w|Cg*o0Dg8aUXN;Kjv z5ixre)+ntSsIcRaHg)I<#b~HLcClt}4j6Olosl-}OC=WZ27rrjY`HgpnHP=)y#XaQ z+na~}DAAzT!*3W24zbvqXOU`O0S*uh%#k9`A^1NP-eDFVg2E=!l^6;F<D!A?U5e4F z7;TEJwYp%A=0p%r)orHwTPri0(GwA=CHlccP=djS0b2`T0}K{^z-6(B;ao#AmoEn& zQesbue2F3b5~?VHy(_P#Yzk{tSPx&9Nx>F{EjJP7+sd5;F?+^aO$e;nNSM7Vh4KHH zz7)3C>}r@DQrL-DiBk|5y1~1_r+tRPj>^#`7HNGZ$g0TqsS?fM_oBJl2GuQ%4O);g z(+V=-B_dMmlvd^9H4r(h-X4(FZ{zu9W=B!&r)nrreToRNC9xNw@!Ie}SBq5}<ZD2p z^i)IO(!)X4vCF76)FENkLiD+vZv_~Nt=nf%mCpw1rYNA}-<^@=rBs&Y0T$UPvV_Wu zFc8h5=w;1R=sW<=Ujyp}%!5~?;9V&qw9aZjh~!$sKu<xmXVLTb&@g7@q}n!Z2y;C? z&T6S`Q=PuuhWm<tgLBjT1j$cIp<a+Y;Xj+`y#uMf2EyoGB^LHp1Y_6E_wA0p<t1iM zlvhGOrSwzAKX6(sv0E_7UCRL)=%!*mavAO~_Y=L(L0-^gMHqD}R3JcXBcFcqihONF zz6KDDuMMx0h~x+^!~Itjt!>aI@#7A(7jyshLwYD>yb|O>C7$v25F|AlJMg%xi2)9U zg}o*EW+UqO6>2fuccBguN7PDi8}4AL+ULw_C#R|%{R7oT%nqO3Tz~%1k00JbywK!? zag$QlQFlV@RH&STR{j4`*w<i*m|o%7jn*Zju4B_Sn;E};C1f-rDQMdj_HSGKd8m9d z(89;2i|%jzkHu2VHephQSqC2?Au`EmPnp%C&e;9NlDsgpe;6v?28{g*MMAc%{IfxX zg=rs}1wid$&IE07K(lz~S#%U)8wDE#6BKhYFzXiiW|;`06ub)zaGk4{0p<}mV_yd` zqMmU1F~QU1)fRNv*Jikn?@hr-d@0YIsIg$y#Y9ediobC|jx^R%oj*m*7A2dJ9URNQ zVPOJ6j4=8qO8R!AEOSgncg&*EYYpb`;Wc_~I^P2cl(p+UhBlt>AjSns%R}!^fW!s8 z%m9?JLR<V8;37K6!_$Nk3@Z9JFG)ju%&SN&Z&hM%Wl=iY!e`d?Wmk;Nim^fQ@2Qfc zRcVn1)j2IgwNG<t@#Zwtxm?tVHkYAIc{S>@a4(RK2|N*i-zp$UW{O&wqXZFA*(t4Z zT!&DdoJIZjQazWVZGP-HX1BRM<SVRQVLSMOV>IEpf(hZ_aWsI&_R-t|W2HH9C(6Z& z(&88!%*{8vCCGwR&Kr(C?^O^Eqo1_)6vZZAxfXNPBFBoXv>Z2r>J_$)Xli_qVd$r= zp{U&(!hkuKdKA6MX>3<mCLe$_MQ?FZjG}*ORifASXrGJG;D@>mLl8M-2>B0C+LCe7 z*a(^-%Fp_cw;&7Xu3v`52XzPzXxfBTX#tg6Eb4_J_8!3DYySc~Sd;yPR7sr-vrT*f zG70=9h8M9-$;^+QB;>Sm`GjGFS+c{-?686-4X}dchsagI@)M<1s%9h6vwW9)=Uun= zXMhTG-+zwP!d!RZR~9@n-Xj{onqLB;M{$Ouft+wu@yxmzvmJ9CgLKTdpB-gQihqmr zs|J6Qc0ONmp2gB4gk9pO9+S=acKh1+e^0bn^j0J8COSircT+{~_`xDo$s!-4`{CGJ zZv`h}UeR@JPC%;t6(Wg7KA(VkdkpnLz2`LOt{gLav(k9X5so=pF0fkkkH;zx>@E%2 zhJngm6Em!q#9#!@K|o>P9gb&_scT05GHoK&GKy+()0AM1N@I^h{|Lp~P&})lOU|!W z$MaVJ)c5yrqZg2DH~dGn3kk5|p)^B_*;c{mXM5*UWSJY0oeJB7sb(35&QRn(2_+<k z<%9d&DaJ*KIie1$r719rxGHnZ@mnqHke}9u^wqSrN;v#YQn(4A3d)W;3Xp}{flMXp zaOI+V$m)ft0C6ii<{U~q2+)z(d7+t@zIqfYOf2%XVOotwYf5yORna%(DS9KwJz-TL z-Z?fPcj7bZL(Dw{nTleHEd+KPbI+e-1)Vn}(G+6#4TP#N8)gmZ#|<?Tzo%74aqVtx zKug+bERZ1s+-*Z%NRL~!w}{hi^iXGMt>!<&hN^nHm$p8tgAYER2G?~BL5ih1-iU5( zHE|&pX4iudwG{u}%Bet9XF7%37f!*tp{)Mv%i`aKO71SD`;gLj+$IPjeswH7IGazy zK2}=$K#r8iP+~Ll4EHQ-_>zE__3OumDQw>oNpH;NgZk&b4!I}x<u>64Qa-X#^P4NL z1St0kP+Aw}N^5_TBPqF?`@z#4KO2}=(PzM+H=^cu-xY9>R6_Uw6iXy&ZDo#t;|Vik zj6is~H)9gsx!!;&T=VC!870n%fgfD}aYJ=;Y~_g%)J)zr9z+)Q2BIJcup|@pspUNR zoHsAUzd-&Wy~kNOOIo!%w8onJ7m{Axh3G)#xk~q5{iAesKsdKiiDpCCE@rJEz2oXo zV|;*CV7{c|#ikCPH*emG6-sn4QB}xj)4nMNJQ;O^6{9g^v}#>V(%687GU0!y=9uLi zi=`@$@<(rkgmGgw$_4Oj$6p7^<H7OQiN7ALJ@FJk4x*1z(_s9e1b)mS2(;6iD1;}c zmrnZW(ROxLXL&90*&xdPDCp~dnC&gjY*4)z!mbVJ>ZE!se|7f3Qsfh2JH`e;uBIbJ z`#g~qVogm-)Q%2r0B+MlI(Jr{7g}SS7XOxpZIE4dhV-wEV&AUN8jFd`n&R4BYFkKe za7qz|I+NAY>XEE|QRLG)?_gC+zTU4i@@$byy(bxUvzcR7^7Y!j9D!uiWoC{`lCKkc zs~DS%8ER(8HeaRMX*5l#Keo+^Z#Tv|yRxXOF<s5TXw?lyuM<bmKTqYz{sR=fF$aU> zp@gb~=n{pTl>?JwP9++gh_Y6ui&0M;r53g(=W`Lu!F&s|Hd+6qNA9xN!)%v2RAvEZ zae0ZoyFF~%1s)fkuq#yFbR8R(t+2vurZ^SbOlOyDlhiC}m2A^HI+dph(Z0<g)+VSs z{#!^zVlEXk8EX|1cJU~>cg6<5T*pX;hBP-R91VLtAl@+Bpg^AHX_GJ-V9QNg#r`0S zJUKVf@<$tgNQe3tkUO9EzKB5!W5s=%29F(sZ0Orv%#N|m(b?V##eZDQ2>ZX*q_BU3 zDy;#7v&7%RFTEZK`!{P@O2Jd!6^Pb81~*8C)epk{LuS%SN@_8aD6Fmv`#(05{y|B9 zGm|K+t~7hc4&)D2GsR9AOYMe*N2>i(waI`&9fvWsNsnVWu*hq$j0jl@eGOp~Hxz8f zw_AxlW=%LLuT8ESuF#J2YXudKQ17KJ+CJdKw;QlKAlf8G)Z3<Ath%PnQ3p<&qG7!_ zny@Re2WYREKUCYH_z$TUhk=2KVMtrKJHiFaMNg$CUhd!Y4*s;LRbi*7<>S=y2n7(_ zsQ9}p!@z_(F3h$kD_Du53w}Z}pn!WDzg-jtQq&S9_d})N886{t!S%G;U|3hFcU$@8 z$dv#vs7uK`K)FOklSHoGx}@H^>~h^OudgBgU#N?1PT0XbE5a<|t;RcH2Y_x^Kqw-B zU8!-Sm=V;-Ac|RuybDm#O(^lP86`jyb%QdriTutnL}PQk9?Lq?5%x(;*uqzW7qX_r z5D>{8emOF(0TZ`Gosdni4PFG&%p*~bR5y3sc?YJHpi^*7l{T~b7bPK*qmP?nzrv1? zI9QDuNVw^453$DL(ff-hv?Gi)p?LIe+NpxqhQ0a46LyN&7KLJ=w4tdnDI{Wnu;S4T z3SvDFWMsVqE9`c@Pe_Y%Xg8`t*3mbX^eQ)cS!^GFRs62|v18H(D~*lW^ST=iLrXi_ zq%^i=$NzlBTHh?^U;*1L)jkfm`Q=cjD$znPffWtZkLXZ^)nO-u&`j`Nmm`zb;$7-+ zR^5u&TF2snXvE0}`X~$Fbd)=hqoB~KjuwohPGoc4MA-)NLzn=l9yJwacZnL(G`BAD zq%{}jU|JlN9!WbYEwlDtL&Z8A(5EjPiAklD@6`aF<8}y`(wp{Dy~CNfnRW~w-)?>$ z*pGr8yGLK0g}m0K!)e>*5ds_p!Yi+^Sc0rQf%4S>qz9!p&nX34bV4(hZ&9<TXr8{3 zKt3glMLZznCyYe4;7x*mk;GUAl!3O=Mgt&0TYY3@%C39_WIu@GiJKHCM?Ro25718@ zsq3oIfY{_f>Vsw?A5bsDQ<;Hy{zq&h^as89R@S~KgR~5JP^cxuUM|nq#+RWF0<^L- z_7^4z^o>8s02)NJF!=Ji)RIUG&DeVDjQU{%vD{4Epxr{t?Dg1qUZ-?7(pE|P=(^aj zf%9rUHl%qq$9trOyA)={sxS~tPTM3T3@kmNwW+mt0T$&>BW&9p@@)v!HmQvO)Ys6Y zfPD3KqbagmJwMW=PEZ;TWg|Qq;StHOgm9)AZI5(mbyN(UFl8>bm)}r;es1BOD}gHJ z`uizhChrnVP}qiO$?)8+7#;ocW6SYh+ei^}v<>O#{76WSk01s+IOvO#k#@Gl*eOb% z(bk(70HnBgARFpj<3t<rN)Nr5;dx^z3?a1YBB4m6xsSPdoMdHYqvq16UTk9h2PzK} z@5rN8FhTpWlWs{AKrJI6L1JcQ5^bazyHX|N{Yxf!joFkwz5ZMfEZeK*pr^|a<{5sW z32+kN4^zbDQ_<U)`=?vz;hKpDUy6>QsoU^=0Qltf_)%hG#)>S{J$NJreP0Lk=@Y0q zbu0>wqPqWpy3tDs1nX;)V<l;ZI}P#Fr?dJhcq6H9a{4dhfg;wy_66B7flodh_*|h+ z|0DDYRw;54=x%Y;(+fhux{1pWtlclw?!YSszj_QH@Lfz{NTsBPscn!Ve=-wqr^MkR zv4;{pVb(=3VA+8fi^-+vUx8smE1>vKS7z}8Q&3Mqx|WvsoFbrHmG~ZtW9__&p3!vU zT{N0W^{zJ)@cIq5?fg}|hOzy0g#BDaLq}<JCt*#dCnS|*gUkdZQH#;Y+Keh=uEU@# z{?;jQr<i-78FieZUP9Cg(g|mnh&hD?39s6DEsmw&V1y4Dyv@l!MS_g2Y!(XOX}Bk} zkn{!YSI~MuOI4tEsRD7+K<$qI7`s9d#*kU#bMQv0f?#ZhHGYFg+A6f{h+-S!(<#QB ze|*hFgppQ4%Ax5L+`^wtJ_li!Oz-u{_n#)8yNUb|-<5AZcheKJ3KHb^P<2tq!DD#P z+)c`R!qh`Lz?C$X=qI*cw>N_{Ru|u9vCJ!QeEvSxt$UPm$H)%|b(epDcg5CRlTT(< zHPg30YKkI>>(^vL)|ywK<n)it*H@FgKWJgUoL=Alf~R{BEB&e|RXV%3BD7J7Hr^q` z1KY0@3WdP9g6UaU_%sJ!a~W6=hQh*sc4?9s@qa--#7jYem}$uQF%~A|e3EizQ_eej zb27?#E*SU<zEYz6k7lgF3S!{{kYKn=Hwi2~iak27mPNQ0mGQ-aWM1M+d>_<!{C*%^ z6dy=YEr<fNTTu%pX*zUP|DsH-(_ko#EcQMqy$Ly4UW0`NOJ33DFavFnNO9j`l<T2M zQ@dZIV$Gl~z861<QLIOQONe<`-jT8zkz4t8{H|av3CC(;!{L}I;)U4lIU!c%39(Ov zNCM_KiNAxz3}ZbhK12|j0{w5a6ccfNjuNf#kk0E2{!q*wbr!R6A@-B};@pE>vVC4L ziBpHdEH2gl8;!wY5LH^CBimVUmGlJEFCdsZvshtI*xw;N{sMBa!jlx%e~+;KnB5{p zNV3%ZR&^wJG*Oqr-VfPYjGbT~bwn6TtK^y`mh!5HI<!fOKD|2!wW{ZWXum{=zXVwb z=o}=bNQiAS+<OqsX4*~lov3UFe;54>v1<Zsmc6*V7*vjJ4&En)Y<q-WeVbrPhMP5E zpgurm1EO$Kw*RWCAIGo4sQVfc^Fr)VkMD3O*C?2>U^cpy&1QZR_J34)mD#<jD-{2+ z$}Gj-Q<W}v71=%7#k$|34n(i~J?ezS2!+k|E<(><gO+tb5O^rIwaCU!7%r)$DV6^a zn-(&d1Ta>4A@%^CRSL$dKg&qTwu`;lLjUN&>c%<f6vICbfD_aG4Y0-=zQ8Qh8=z}% z*X)3QD1XI_DWjN$qA|nqFjO_&g*haLY31SA#NDL2DenpC(@t8n+%@C`z^@wu<VEc# z!O%4<Y=xi;$evM~(8Wdzy$}@>BcbX&*;44G0xgA3dO#ROuFRU5IcbBF1}B(n8_cx` z23YWXSX_m*6$@;hQ1MA?@5zCHx3B6PY*l$9m{?7Dj`1aQ)8$?e>ID3iXQ#MRN)G9o zkpoP%Lo(EVnvGd48<xa*`V6PB$OT129gLr8(yGRUQ(E7~Kc5U@gSo&y(3VIuY)L*> zyL)L^$N+t|ZLy+<*s&1nWcvd3aoT9H4+8buj4iwt6ro>jsP@|Z%MK>{16hz*e1K{+ z=NDER%%qg9T+}Cb1qf8LQia9UtdPD)fNUL{xDrtK>Wjrzlzo6^&P6k@YojG?1fLF! z>iHLHgH1qQyP6xAvH)P)4*)>@Ib)k%^Tp0Ij0$sf9mT`6Vz(lOhGZ{Ez4J-*!3<m! zVmpgj9CM@$CQdwN2U#Z`G)GGDSHkBWHH;!CM*RCUnLh{O^X)%dw5H}g{LMiYOa3!r zv#Ux9wvBZ(*-hD<)ZnKe&dT}@qpL6{5RSQ?*<lz`?ONoaHEM_p&zO55z?J<i>LgN1 zPY9PcAY&CWLj8(e*I3eW7eCNYT5OB7Rl}a2$bjAgSxS%v_=ZaR0xEqjl^!V+;~PjD z4z0GS5r3+YN<sHst;&24;QgV#BmmA2^+jea@k`Jbft2Iwn}Pa^WwMRU_6F!DC^PII zpAxDOdFml4a%cc`@fo2rk=KzTTQOQ>|JMpktp7mwrRA;25i9DLR=RMABCX#vLt4Mw z*$GVOA4v(D%r-0K8<cXWtcSHC>8XtDZ!DI^<94()hi#VqyQRpZ00$~&DN=_8NdzuV z1rn*GeW}38RNyygRzGHi3Jd|*#5d_ZbEPMjf;~u)YJjQt$WnxMWqMDc6xm6m*;6D% zrihqprN~4Pn590X_moPJPsQ79>Il8(ZYe@G551>cioAegam7w783u5D6AVWi)Qc5X zioibgJXu=%X{Pj!rE17;vEM2|DNF8#T|Mz3C_&gPi8~Qe*qGuYsOJb2TypouJai6I zUt0S`W{BNkDe`yAta%M)&@w3qCGI9C@?;~A6d~n0+DTQdNWn2#s0b7n{~Ar5Raak0 zb#jsPW^oT$5gU+?W=gP_HSymB#JJ1o!x&UrO7JFz%JoG(cni{7T_joJ8S#u417xI; zlb9t?y~!i%TLVQHe5}+Bh?3b+DRxmB0_!mdmiPk*>OJ>L%iSoa_uRL1hu(9)6amb5 zdsvG6O9UQ~BEJ)X3iV#Sr%H-^3;v+@Xi{XWh+ZVszK@DlpO3f1ETeT^uwXDu8+v0J zAlJT9a<?eEjwQwcGlY?^zY-WpWEic%{J|=CXd`7ilDh?rA{b`^I<O?T?5zDlS`G5C zfHRcILYOLweEMja{l?~?H=HNOZv46~=q*mnl7;Y0X+bJ9Ffl#EmWbi!lOZT!>YxQF zvIrU!xoe|Gb<B%inMjLXnZjxOK^keG%9N3?nkqyoQe`?lvZ^wQlhl-$BF3BQ7>1ex zYI?EsPEk){1jY}KY!Nr0xEx`75i5ea6?t66{tZi<q3(8q&1qJgAu6u46|n{k&l0D+ zUW{#~tbf{F<Ud*@-EcIBg{+LsKN!1rfE1{UMz>Aa3?wNs+b$d1W&h@74%Dqe^MQOJ z%-QZEknLhK^7Nj9r8e2tQfE_)Es34v?L$?_?|^EJ+$Jawsr`Y#Yf#cjt3o6;u-cy| zMIh&bV{9>y)NIR(p9K1~L2y&KPm_~C79;_bYfe9h)TI~5vGsRQsq!8CQOKC&!}K%~ zu&Ar)*g>%F!~l6cWu-}pz0`{12!i^-1WqaC*sVnbx8fz^P>5EEAcGGQ<TX<x*o@#L zvSPnTm9lq(*xh-IoiaP=Yp6L`jYxG&(BBCGg1L%OHFt`7AQEBX89RLq0{T(@9u3M? z*96M(xrbUx<*4>wq|vy10a|RL<>7{@f@lam!GhV|QmJ+(`X>hS5<;A_DxE0sqC_U* ztZFvB<cd8*bg@@S3`T64DzbPI9K%S<_iXa1nV+kAgSp*E&%$zxt_EOzW*@xf;qSqe zEg}d3VT#?uhrv3ItWI?Ve(h%z$m7qU0ICl98eoYkQ8j<h(w`_S0hJbnP+}xRGC<l& z;749fv)$OC=$q2`4D1Tb8KGUuObsfyx_Vw1%CGrJ5SEML{Fi7$WIe9EAiz&d5D%<L zz)c`AvbPI+2yJuC?5HOIdRjb+pjL<V=AmvL?h-Z9dQBuk+!=Zh*w{fgXeqUlDa>4~ zNbJFEoP$Moe+!Ty)-zfGvC`Fg;k*#cH#Pet0xUO0fIqjQ;!{vdBZ7nwGR=Q^2=WdV zMGxjVO!OqJ^h&<a>w-W+>QwyBS99_Epz6Z!LhaW?6Pbx8tFL}ggMFrjUb7O_U=-Q$ zg_uYPc;XKuP)~f~3u)RF+OX<n*2}a(@JL7#QSlp)Jk2NKFYS&0Mv7la@pGlf#q<Qr zJ)fRnv}5TB&N_mgi=>D|Ppo(8c+v_rN04nmTD48ASG)(iNne-089H|$3gZXlLzLvx zzBLRW3Qz~8ekn!LK)+{Z7>x|Tc>K5E<>>8&+Q=fNiD?OjB*lJ%=pxn~e-h8aSk@|9 zu!AvG*%@CVQofFBse)tVBzMH1gDhrCvD=UY<iNO;kU$NyV_DTyJ{DAVQik|cv#3Xv z(eecK68z?><MDfuIuyToQf-b|gEKBAtBMaW1J?K{>_G{)>G7i!(zm9?4<SJ4sGy%x z`k75XN)h`QeV|}TTx@NB<RCI5&oI)1kov)sRM*bOx*y1YL&%fyg`iUC0eknX71(Vo zf^SBdCux_e`C<i#jHar`aKD6Aa>d$GL<D2^w2~#{0GbK2_9CAV^0#PC5=S2+N`(Iy zwBs_{8g;3pCU;meNuktURajK_7%X_1hTL2@Frz5?SQaAk@lue1pQ#j6f|zhfZz_eD zeMA4kl}*fb9wM;nF81CdMM7ezF_+P{6d^lQI5yv|l;?$P->$PjPASNd!a0Il!L1|~ z1Ki=*<tMQ_6MZ1~$C~h?0`-1u&rUPPCM3(YjZw#22!vwH1blCm{2jpM>hk>R?}r>7 z45xehT)Bxk9-%Fv(c*7f908$>DZ^_b9l%h$%naFoVChmtzsgV_!0&1GUTl6XR`pJL zI5C;nAj2JggBGtAH54vCNIqr|zOjamEq>rri0xi5fdS-r1d+)iLsoExFl5<lN%_L} zU1*j}m$BAmCB!Jb4`diEA=)@MJN+jXKVHO8D_F+?<$?XBifzpM0|2q^H)u!bKdla^ zp6RSkENd=w*2tK71})Kg<F~6pKSq)NpcI7e`PqNc)az8p`{g=9X^~J#{}Ryz_?1f3 zC#`DGd(t$jEsz)p`=Mq>&<O{MB&<`CusV#wtVA}M6{b*LrNxF>VaUctU{TQxo3#8! zyffEufN8irXad`F8}gH?hDa9Me-F0)&`>;<SIo-udsP6W4~O0+9~x=cH7+D-{eHW~ z)gUMWz{ccrup@=(7J37h0~$5*rGbAZXa^-L#OzQZd98j5?eeSxw7!wHG8XY>6NzGN zqGzx3W{Kf$d7V)8jMqucV|fl>Rl!{4r<UOz(uAL2$`_0*K$EXbNC^~zS4=Ct2suGi z3mXaEJ+PRpLFt5tmK+Y)NZK&#?|Xld;7O*F^gP0DA-jx<Xpz4fPs2SJ(D~X}yWuuo zLp)kl4EGlZLV1w|1)4Lar1751DC>5_uBBSUP_L%!@Fzv<!e;Y5`T(e=p!|2O?*dV< zy&-6j+1EUfgL3Hhs4!SNHq0=#lBPg`r57v>B2Z$YurPBSjfNRagJ<TUZSs5&2yNp7 zv~VjVh?HQ|@`N4%tLpoo5{bZaAB+W@{tPwOXb9PM>OB`#ejSq!>pg=P4p@!Nsimo= zF$l_9Jse^E*dSTD21cHzWfp9-LzheXzJ(^RFj2=G2R{SG?NAYAqpeABhC%u*{nEFj z(uaxkUYn1vU!E6w^T19!3JGwCdJ=Jj5PLXQk_~~wPsAThLnWkAPU)}C(2J0x@ezF+ zez)_vJ`^|IcP14$Zu=IdV-Km)TVEyC{U;9LAm|@61MxCDAzgdQe@cS}yjT4KiUJ~& zhMnHEVLsM|3g|Q!;kW`i>Y)Z<&W~eZ!ukpVpz-4OLjX%QePMy)z&B`mJT+Z>M$;{b zN7J%&?Mc~xQbXas#vw(LO*91oX}5kDhAv@h5-`AmOaOTL`hKwjw{bvms|m$+%)3_z z0e?&)Ko(FO1r*=N{%^GP{|``n7w;)wWnY&d<U=y>j}sh%df%t@<-YF%v-PMz34ob; z1~6|R9=lcm^R4XvR$JGPj7@9^wU{u_H<2~%N}=ovlL6n=10^+irB|ay%+V2i7UTqs zg5jQr7)YHbupxxeI!Qh$`hjg<3}v3LD|Wq={}__NirAet(mMIaTsG8dS#p24{1Yt0 zPB^Arr%&s!s3q62td1@@M_04?>*yTu`T<5W<O{EUV%XwKka<5uFv^8(F{~Va_&d>q ztJ#eFh|8elFdMT9?=yApCl;fLnoB$>yjl1`@Iw-4#WaS`6d=w60VMfI(ig$Q<QyLc zey`UyEls<+Th4({U{SAN1-XxA<0Q;Q{2X!sX0x(`tOcF_7@HhOClV{ni8MSa=^dw{ zg*l0IeP)gaPL>LrnXQ*QMYAdtkkQOu(i6PHoU^3f!-A2{F9%;pOy)mEH!wdPv_PCI ztu4<PROP0f!Ltz6(d2V5Sz?K75XxE;>m-9gmkFJ7I6Bvx)93dSWJhq$!W;tX{|cXh zTu^B2F#OYB!6`N=_5>Qmc^@Emsa1>wx2Qjcv6@3|tE*+Oh}7?ay#ncXQaa1xVu&u6 z;f|~g;|0V$umVrS`WZyy-o)sl+AeK4GNoZ0N14g86zm3!li<LcBWf9T2o<kE#YPJO zBsKu%Fp=_#>PC@oXt;>iVvB~gX)cy38Z+Tb(j;=n(@;b2+`$+U5^_u)0&V%<IzYQ! z5FpvV^~ao64UV_XLT)jd6^PSdvM+angko7(_A>dP@xoMb5u*S3F`}XNhd|(OU)&^= z@#fG0o_vDGoG~Du@)pI`5YoLHNlMt?3(Fb&6V~E!07Z#ibQ@L7PAKe3rM62QtuJ$0 z;mFG{V|TtxDckvC@=(#wNAoS&ivQGNxLgYhcb4eE0K@$PWdv+=KmZenm}wt}Gqu}7 z^XPcx05aOz6o&2@6LY8-<^$-Y7f<3a1bjh+-UPOrOrfY4!E;7Jxq1B<&aqMnUjaV6 zgQ)(5VuSo~(M_m0q%S^&iD75WiO1GV0uAvdkY|!ROMD7mTEsCyVC6PpG~@G-YlT@( zyI2eZQT5Xvldn*?noN5~v0+aZ?Mh^aqH|7J5^&kt!tX&U=+LzQ%^PmzrPOpr|IZkd zJIpyPH2UbA5}W=!og=aBSM+HI;LO8G^9EK1QDZRQ^&vr>b)auz0#~0xNg{AXb->co zPAdWU;-%zwHlqU?BE{cQ<>iX-yr1j!^xF@apz}Mrg;nYfMSAs^Nj|lPA_aS}nCV8x z!W{JDk5Hn(^BEl7a9@btU{TgC(x?9#(H5w}F+tuMD{!+#sok%>-eSWsIZNVYdKqB8 z5YR-3B#C^#JVc8qAeSO1P?kKDBBVp5<#jJPw~UkP;nS&(BE1$|lJ-bXyhVZ7t=2kg zvu!FgIgo0K(Q{d@F0ep!qzQ3a(tnLy^=WX&B;8n3^;C=Y89W+!dp_Kw^DkD1R_D)w zADPHp^^kcKkeqPJ2#F&TLy{@8>aC(Yl$WSogX~5|4rIBc-U_I4r%h4EC$mm!w&AcA zoXnE%IcFD*U29eR%?q-di$IG1z}8_MW;49#n{6~NC-6T|6bW8uOXLuYUc)XvwGLt` zohjh;%^4zw0NV$Le6eSh*)f@Q@}9j!Ktb=MptNeg99e7|qm9MX#-t9C=UE-`vl;NQ zx^+S`acpAjf*yLkrJ$nIO?3+mCzzdzgIjP!pfP0|*e-bu)=sd7RtQ3ZPj20sili-g zTl_YY2hzSn>^AtV<nBYe3KHI(*iO_@1u<9bOPV+@{5Q$DV-`V!OxuQ1lCQ8$C?o8b z@;z0^3jG2E+{NA!iz+LS;W4aK0ZdGkgabU#k5C931xG$ArLZTA@+GAIDkU9B8TJgd zs4Fp^_5=cesKbsnY3m|h^#-sa$A3|A<~Ss3aom2G-Xda`g~U0CZE;+R$bqz(a7;!> zY$upwSG(Eld=%c63|AQL*Z%@Vx8oV)Ggp&WCV|><-su;J2L@(hni=jTc+saXKqiZp zVdi@R`3(0QB&?;T#E#<{DpRwOfc*iv7!w7C(D-^RX#kttIN?5b-!9S#?N?$;vgO#! z0kZUFQ!sjm9e+;zWz9SKS8${s{Tn56Pu1JUnlk{$b~G3mV(^!-tffBI+Y9R8pW3MC zhbZNH*}RzZSn_bxm;67f9R!8r%{_RS=EDjRbA*N9?F#jc;okDR#R5k*;wn;PI-cg( zSJb89(1WqT-&FZ+eb9R|RI%_bz&WFv6BkIUZn1*28-j4q9WLkYgp&NaSlEsuhcm3N zd-$U}LH<zG)u%@qw0GGxSz>cZ8ng-`6?Tms+bNS&BHjvY4wAkyf@JvbuNM2<fCc&3 z%~{BoPxL{S7m#M2pfOT?Rs>lS&LBdX<8z^TMH}BK0uFX&5%`lLE?H^{O40V6AW*Qh zVN2a*v#MFu1GDQR!>B#7JJ{0HA=Lvt6oaC5HH4`|db4;!$I?jt=Xw*iN(rm>PU31> z4Xz&pMEpsP1w4As$c0YS7n|WpWXbe42z6n(IIA9<RWlm>?^a?Ly4)*92)fl@z+Z;o zqcJ?w6NLDWaFg}$|76er_pqcp=rvdeq4?ETH-JLn$)K>OS0j*kc#R7W-i^fx%jKUa zjw*qt!I(@egldphkaIe9n*m)u&L8ciTFJ4)--<&mCt*7V6@By{D)lo_m^t1RZy3)` z-2$&tRA#n8x^2{krF5o;KLK$rxw{g+19zF{f&%6lRoGYf*7soYn)p6uwM9R1TASG7 zXhs-F#@q`$i?u^|kj@g&Bza<@NI!8(8`9!<rZ?vx<V?J$pE#-E3=9}gi=#T3#sc=l zx?aW#aFeENFn2K2+l5?^vbhs8M?a(Qp`SEci1eT?2!Wa6yjTy;iNQNzJ9j`Fi|2qE zAou(Sla_6PeIUd($>bbwDaeP?83Eb0HDvpO+&T1Pj>>qA!66(;5jtsI11ma(dyrjv z6T8*B{){a{lN33K2%45+_k3wGvROo4e-5d9h^z3C+pxP@YLDKT6)b?DAw3ZjIfCBv z^5=NZQ!mOdwW^b(Rr%5?#p*w{(4D&jbzV6J099w$L$>!qxm&ew0a#joj`pq+yXM?A zr%^$*(;2dD6lv^wdrka#Obd0A9=EIK=y8{tE&I1Zv};O?T5ZSTlNh?1Y`cl9)pjQy zj@5(l7QH4b7@g-#*rInr$F?*ZY;Mf}R1N+X@4&NQ%$HxF$F*-l*uqXG{sH1JUHW=< z^;VEe?7@eC*)fmpN22YpycQK(ietgU+2lQtpQB!qf2&oUEUg-h^AlG8&V^(wxpa(N z54+rZveQbj#kQ^foeO~c#<cvA+Kv#`m15h!i*w)8)&X%fUs2x(Qq`+}Wmj|buUu*t zDF#NZGyAsA?AtoCZ|g+g?u4iC&Dl6<dDt#GCB2zWOl}^jNj9Vr-r%1KSsi;p(oTdy zJD9}V!1+n@R!v<6!S#B)_v#q>>%d90gb0CcJ-5R?3+*P)CfT3;ktQ9azx8;7gNMJ+ zE=8UMEv)f?4EY>*+d#~Q2uGUf#fVqfugz)NDz6q<KEtLo>W7gJN^<TbwLas>T<aB? ze@>Y@b*rI`QkZzbPHDsYWJlVn4&o=jg5w(W#}i*gloA!dfLB<%o@hn6G^rL&=$0-= z>po0esrDq|Ojc0$4SBT{+M|w)1i&wJMjZ|j$cj2F6xc)RHXLQV<?kSf<Blb8_Sh`F z8Jw9tPmV^EI;=*<2FjB7*vwjUoF>4M5y(~_9C^-+x`@?tVQ;37Xxmt05c60v3P#iV z$Vgf{DOVo++RSZb;zP{v5#VoNTL!%NnJWV?)K3Q=hJGs1F~`~|)n+w2(eyPspGyu% z=K%wM2X6@Z{|)Opb|0St@B9|HXqmQ-gu@54ekIeX?_P}p_Jxpu<_h^OPsTn3Iy-&3 zi$rd1*cuFk!H?j##nFAlWP7w5Al)9=v$-!bH!ZAY68a+a0uAb;kXx!~1LJR0A5xf3 zidoX%-L2<aG<e=JkBDefhwBic2Xnt55Jold!mFqnmUCu~k^OS)oi1`vrQF&t{#$r8 zqOm+tvO&F;8k>Qt@+qPwPE3UF5_y<{sCTLnq2%u1Z<}!?lnt-1n6Fd~f7T3_Qc}#} z0W+l)XOzCC3^4@x-Oy~H3Ch4V${c&FRJd3m``s8PrQq65bqIWoX^)UWy>;+n%BL^u zp_P!`;Ov*;6DchoIufnDjUh}5QM6ao;RF^Rf(%=?VkTfkt04pkt*E)e)tE?ymNfZp zqOk8hg%~qECYPG#VfaG{`KzF$lTJcpW6MQVq~XNsBEX0x1xH=`;=~~|tA;fVQH zuO?hrg&l!*ZBGL+GLG7J2CZ1$`vDoWf++g|X}<RXX}<RXN$>rE9700knLq}uIOKU2 zkRtAEAcNLAf)dAb2+ouaYaew>Cj3tev%z5)!!M?zb!;>L9aaFGuT{r}@G=pTK-RHg z#QA2&GguVD{+*bO#|7u3`(kKDkRsZwm&Zj*?J1e(M<@aB{glizh_{LKryGE%MD7~e zA@kFi*(;P7qc|v>euJ*^o6#(|rkUYCMCU1~W#@KEApt?Czqexhzv;K|3WsIWn7EEY z(CHWx*HDP&Gjq*Dh59i=bs26-*Ily_0V0H(t|3Uu+>0ltvN){}bKLkGfQi<u1WYY5 z+~D!3A%;q!<{C1R6gJm%(*t<9Y^TUfjN0T&xuQ!<rx+qgGuDlMm_5oA>Ctr!NQYvY z%zBPL0aZ#=7g0<ggJ*;JtT0RLrP)D(oR|x#{f&Uxa4!elG1pR5z<LaKGv1Pl9VMn% z*OET~m$^VFO&K3^&7!v0PT1*0-Ytk74tehzjJ)CgZ;I1rI-w;_r1NLuLcoF`^n}RU zr;Sg_iyr<HbFfGs0v$~@zi3;(Ap(U-5#hPqD;N`_WFfM;fs&@7e&}5l^KFXxR%*U^ z%r~K9aPT4KTZNfsH{TYSZ(X8$tXklcs{PE2SV<8vhyG_ggt)v7@#bj!3>byH%~n$u zY`k&6qD>tm7TOUgQnnq@DKUEh{}sxuFbiIfMa3MHpjky~7}Z=-0v(0gOYu+NiN#1A zg^KQbm)h=82kBSiG#KT08_Kriu%?j@F;=T91h{jOtgdgK^1F9n5!wn*4h&HlR+hhu zA<Fy>BnC$eO_0)E5kqWljBov%Dr~25zJ$3RAZeM#dF`)-uJl}NfzTSAr!d^>5tkh2 z)kM}9>@Aqqy)&A0qy5#QWlH%moZH0qE&z{K{%R`(mDpWYx#k4TiiJXh5=d%Lpg?&v z{wGw*x=CgZG@gdz)2i+KDtB^63HZ(p)V<-Q-Fl$zEpHUh=7_f*4_IZcvnGa8ETtlr z5^;tNSGb^U$Q=3Mq*8*(!^Eyt#)g@ago*=OS#!5~I8UhKhUY`aVV-j<Np3KpVj2Zm z##=FA6Sg0v;uIX+c4O*w$YfgvfAKT@`x*K2WA|?Q@<$bCl3@U<eSFnNP)W_qQOY~J z8Xt$z<-<=%@E8cNg=qou^ku+NS0fzb_y&<S9%+e>eMVO!T=k=mIlCIOr3iJDjtS}? zorXhrbY>3h6iCxMzS3LMV5xXXIF?_`ed{sGrZYN3z=`Ht89Ab7Ld?B?s4#K}F=!Xo zXgH*kRYZ!=UW9>2XJzL;kPXc!t{$<mLa)*4{|Zj$OGgIbfwi5lA4hy7af{yO0R-`@ zK`Z)cL!F?XK8<q%Y`X$Af6U$RIr@fsEQI548{7o4HYCzPpgAq*r|k5oBYeBrc5JrO zxEt~<c>+k0uRy(+?AcIS<keXd!`}v2n4dTaimYrCFBDDtPf4|#kW*TPY{c}i(|Zsa zENI%u3Ur1)ILrrOP^m{;nTB(Qm)GqA^teI<*Eji{Y9?Kj(vYp67*TlyKa&0)T3mx2 zhJ_nYG3Y&T=p~uljQRpmU}7$PdI2_eNV*$IH3kXI@CHQ~nxLExEb(s-LluyXGyg#2 zwIjsd=aDPK40E5YujKm=pwBV)G3@@$yS#jD&5kco3pUXcejysX1XaEG3{~&ijcjXA z5XbiYP=)oPLf4DP$$vKlrRV~To@ooNLGfQwWGzL;+>d`OV4Nu`4(ER;i%#NrB)7nF zg$ejwST9D^fMpnppijiBLYMtORy$=ahrXGz726taV8Lc5AN51o-~Uix;TOLrEM$A& zP=d<q3NQzX)?g<BcJ#=95iWa(b6qO@MkXue`(XtLvG9jZ{@P#yY4(Rs6ThTnQsDN9 zS`4=XSWHUwLZE*zDbU|3<TA(r=I9Q>RKS3%Ba-6}s>EQA(Wi$uVz43b(>U|z!5d8* z%I^>&DIq1>hy%5;>vH(F!no23Hp`ciLM7^W_cK5cb!?;u1QkaNM#TYizM_wr_U##x zHZQXJK|p~X_6T3rEY>0yLk0XQ)QLNUu=`Qz^<rv*wTJv0rN^-X6OKZ;C&RHv;5&87 zDLo!R9NCwb(JW(~A^)bT*=sG?c=2ygq!~LE+fK#5vvM%yc?Xa~)d^+ED2Q&*dEV?% z{2x?aLut=Zul!AFfzpVB9I<nHpj735gc=?lJNhZLv7J9DUXeP}$#pYnr%3vcs^c3s z5vW2!2$-{#c33oJ`)&dxnT!iQKt|E-cHB}Wa4hg+veej^!oL9g*z{?5eE(U^K1t|| za-+?1!~WlvYr<mx4zzVZU?zVV<^?cD*z7=TUs<)p8FClI%iezwsn?i?_MEDXP5_rH z({O7EJah}_te%#&);yqhV-9Y(JKD50TrN+8Ctet*7i^7CGzW&kg}QVA^s|<nA}IOJ zWjAI)60gi)veUK!l6IvelS;X9Qjvd4<;T>5Da0osAY8)g50{qL|3C*g+ETXY@x{4~ zSfeSX4s(m<l*9twMn1NCr`};ritXaEIx!wT8cS9OF&6aOrrM2N2@8KbA8+Q^pdBz5 zs7nmK9J3V^aRKdcDRBeI+2($@zp&tea*iG2Hw%Z${epg>L#rnq%Ia34op8D1rET=K zt6-`+lw7{`4cSU#hh4EX61~PLs`s_Zj$F7Q=-m*mc#7bF2}~k0oW-P<y8<t`e!`)- z!qMBD(CnU!)2RtWSvBF`HbOM|*B7aC(SOo|U1!&iIi*@I;BdPE2XhU@uWZ{~%r*!8 zyOvxSYW&EK4fRT7kx7l*m|Yy5W9?zCgYf@nj?eIGYemk*`)a2C9Cxm=b^kzCEvrSR zr;fkGf|{u-kdlh4p}2c$rh?D)#?j<WTwgQwm;K^uDQ;@b)L6f`$0_c-nyF9ri+h6N zhSW?2_iNBH%yvnBV!tE^#OVN>hl>ihpdljU;JkKJAR_(=)>kkmF^|qRM`Ju)H~yQj z<q~#}sB4z_HX9GYQ<+OfF#Z(OFEsX$ipZuxE-=X(OrS&-t_u~uF1AZQlqN+;4J884 z0yq(<P6dD@#Mq?B&qTnk7VC!wsFU^MR`o9a)V`DoM;WJ{arf8Du;h`Zau;fb_UDED zL`|-hc%;12E8;JsMx_1TOnd5#G>jUhEi}_A`llr{{tWdE9*nf9p;jIcRJ39x3SpBB z>P>8h()3n4Y4jVR{!9`pF1Bl}<Y&BAIVf8i=6&pL9QT~;O^ijeolwXD+&CV+;PS#F z#QHfHyH!hv`LGME71titGUQmXjbG3N1qj@joUqlkfm^T8PdK4PI+3Xk)=${gtT4E3 zeh^YpMdFe$TThf8hT0A4lmDhLbofqfXppTU@@RR2ewX7f;SfbAv4FV-qE~DeZHJh{ zim<JfCIfVO!ZYECl_-D}xYcPY|MHlty$w~o%a?S50Y&XzfR_&NE<Awq#7<=PAJAOv z*VGo<Asg=}9Bd07{sYhl0d5E2)`o<m0#;;A4@L!azJ}DfO*m^-1$rGeaU+SKzo={P zUXUUP^rJJLu&EmE0rj+5Xvb#2lNdF91kH|2F&hkb69jD7`huWYk9pSxxpES{zeM$< zbR*cFx}HV^|0nk8#5}XHYoZghYPz{o>Qj3N9Rse5sL2;6YIF5PId*L#3wWk`9KRf? zx~Gq$$Drxs>5)F&68NoE8^C`CMf6r78}#yE@YmPCUk&$f>V%n(cx&I<<}(VWFZd7m zi-X^iAi^A@;0?RWbr?d39B@@=ul9Qu;y8;%^<fY$sP>Q72Eu-AVCi8!(yC0p0DBa4 zfjj`nG{18ivLjG$gC+22a@p=xFMJ<Q&(o(L!L%nJc8jwGWA=j!LbDB#XEe<bkb-5} zbX@KLTiF(VnzZDxIX0_k;UFyjLW07*OZ=b0^n@D&9Jitd!Z29Tm>9wY|GiYY0i~<` z(_<A@wNNSlQkWqX`1CEJqS16JQyC^%1M+7pACUV4V(J|*VZjvOgeQ?=1Bxu#vuJ4o zwTedGX{XeQL-7i-J|D*GZ@~sI(@AgxZw&PFywk~T1BCIy77)f0X2IVfY>8VjY~Syf z*eByX=q<z9Zny@@`n{Nz>|-cF<QCGHqx-v6u;;XpzR~GBOyf2f<90Z(YCMJx1H^cu zfUdSB561L*TU|PQDx_6DO4-i;jEM$R3_UvoQUkbbWHgw^-viaBJ?a4b4%Gfkl?-gY z7DswP2U~nyz=(PM7^p{eRQm^N;sz#M?Sy#hT`}%yaE7AOyab+X3`p986O;{pApSWj z>KLzG5!tMbfgi;n9B8&y=Z{A<xN|0x&K%Ts5eatgiYEr+qBXQXpgA3vP2;e35$@2{ z5=0*A4RAtpPV=bOP8+Be0wGsQ>s$Fo+BBfRX!LMUJrS<xJQYmhA(4qBAf$=n1P+X* z_^lX^WINa#iFV?{5Jz2c!1c?EoCD4tUhvM+{*o%qJ$Sfc$swT>q~8UGK%~FtAZm|I zuZFoLwV#8#X|tp91Ed@75-jPUFybdlbo%cwB``e*vlh)pF7>dqE8=tzIfIZk#?)23 zO`DB!ocvMN08;ulR`DOHnxm9sqoY85S#={0r^1hESEWKqS_jd!xm$uZ#NOFgukd|M z)_Nam4GKDrPCw8}lFSxgLohmK2g1Tdp0H4oa$yk;(!I8?vwVC5%=IgD8SaVj&XZ%R z7v~(eYL^=BcSMJ2f1+l!I37YCBI?9A!~HF!Am+LYF?!D;DYzYS1cm81>{?`jsYY`f z?q$8@#gYeCQ{e9e4t7j{?Z9>#f%CQQRNzZ;n9Qf2JSF#pvJ0zalW%u0c7qkyc_0>- zt<9z5DdVZqaxVM7fQ}nn<AdFVE^LlAs+aUtLFGgR@H%)9-Z8Xf81Byjw(Q@iWs=G8 z55RMXeS>i_+?$X9<wv5*zg-=O-b=M%8YuT)M7-FcMW!MmnD4=gVKm^W^(3F2xlP!n zmv>T~ApuMefFZ>%DxQN1;ue&oi^Xu=BpBMRbEz$)1w`dwsA8aKYl{WGj9eP$gIojR zz`t-Cf{YH55<5Tgpvk9lQAeD#kC-D9$i*Yi^i3kNYlWK--Qfy~9e|u-SrhWSpnG#4 z#vG&nh0^fe$g?Q#T>9*Ri+&3>3p*y1Y2A<{9d;xq7Le*K&u|}vj7m@<_#T2-fkVFi zxZk5+_zlW}+z?XC#NQ)=eE9Rj*o>|wWYT9a!V}t+)xKnNVgG?J7PoM8%+KEd&2+zu z&~k*#`HQWkkO+FWWC--#2L&gab~{*@ub~*`0iq1L&}tI@_4O!Uvyswh`KL0HxbIOQ z5(>tgAo690S{i8)PdJl#R`g{CdEuXs9Uyb)$4+Z5eh8{sQ|FiXQEl6zDSlT3$get2 zcz3#2&_J-p{wg!vZ7Qt~I-%YRB*yc<qWIa$BeOc*0GkIEB%KbP2pJ{iqroryC($*? zmb}@Lx>w=7Hqla@^3Q->3j>t$Srd*G=+GJUK=<GA`u}ZBCU*LM`{AE%gxjmUgr(e~ zO7m9K)2zUiSa-dct{n}nPTi-~cUKoIaJVQD8arngS4DQ?f~{Sl3Gb>LX1E@dyAdlI z?xPgfY84=SaWXs(;SpwZ2Cmgw17>K2kb~dT;`fyJJt=-qh~MMl_n7$Yp;i5o*G;Lb z&8if*-r5O;-&5Fa)4q0I5LDs81&vq+%5Y(cIHp1-4FCJu(6E2gf<cOZo0=BA0P_0t z=qSC}^npgG1`a*OvISng3-*xjT*F7Ybr1i1E4eZz9#NQiC{?Jj`D{pnG%W&h!2`pj zT5L?=ieerf6{@LuxbHix_`d~%^q*Sbf=4P%>FxZPm$5-FM{6zO3nIJ}L5354;2Na= z?$dDh^Li+wJN~GyLe#Zz8ut>g<I!T@k-;d|K?1e_z>3PGh=Q*5uTUKAtQ!CyXYzHW z1t6L6AoiI=pefCJ`~!-JMTBZU`Zw{A*-X3X(1T{6!!>&<3xfu3$;VChVjaf0x24!n zY*L38nB}BeiNHXczksRg=Y~77gqE70O10h8$anFx_$A<{5WV<;4wi1|?cjZ9!+kSF z^!aRlWGV;qoAiml-GT0Y*CzlUS2)(OaIx6jL8+ohMaMvAw?fl|H{3j44mo}exV(j5 z0#lZ$a=c4SLf2);BnH)RH!dc&A-18D3mmyffQSXj^+vdTfvvj|f8~{cI_brHUvH4s zsUbWUx%iKIBTb<eD)p329Sls+IN{fHT7xkImyHsHxQ1`DxLYvsV@Rkt?(hpxMq-Yl zAMaRLh@LzNvNV?sbNe9x#x0J9`?EfnA1QDwL_S=h37G%zwSYNS(NA<NAPYZdh~ckq zPQm|O`1r4o2uad#zxWu0iB>)x?-=a&`QlW<lV*ZfBv7~4oz<s2a-T-8j*y^z31&*{ zTDXKC4fz|YCh*ItnsJN!D;AQtoY_W97q==%ufm*$Z$0oa6KO1<7sU#_oi_;zp^;IC zEB+HzgX#XySXMd?bh9Qt_yvOdtm7-RR0({WBIOR`5JyQS@K?~7GH%Y9U<@bX*a$OQ zW=rB4af)LqKLzRq=I|{L=|X}A=fPSq$y+&}L_45I9XKkIfNRCfNd$8S{|^Qqm;6k! z=;b*UI!V{(fo{SA-A&jlY+0a-y(o=AfXVh(4N!b|`EbCMyq8?~D)%u3o(sTmE7o}c zET9h1@6NF#a`-FH3q|%8?#9d{RBhq8f1!NTFyvVC5FX)xIBH5^v^sAzdivpy(V^T9 zn8Kg`8$zZ_tOqH+!#*6#=Co-l-wPHIC<1Jx9yvGw`9Paf_|E~%xO{#e9^V;FfyO1k z5^Yi6K#?#zLD$&D94E2C2{oR^;n{;@aZ;u;jA>9({D4s^*Q-)~AgwE~^E9?iX=3wa z)ds?QsC(y&R&|Bk6_jA&a>2y4MVPpLhlz~7eg$1Ux#}KC17Pr%K>gP-dndA|JFBJ0 zK1A~tXl_XLjzim6up2PO$XSV;1-A|(AaL`OBt6w+xL<jcMpTMCk5bq|48(p8cTwR5 z_i7;tL>q=E4nd`~sP?cFS%?(U<dnYcLY<VkRu{4~Jc;Wwi?G!@hTF+6a-t<Te7}#I zMxJVx^~EFLH13h>gCoLqVecL02N&vs-Z`>97fA%>oJ5GOdfFoTrd|eTN+q``WW%Q| zU_JZ!4r&83UC=Cw$-yrNWeRiO0!o9b;T+jy6qq=alMhQ}xQQ|d4`fry#1d6XI~m-4 zfNLmHD*!~*Ne;pj)^t-uFI)t4b3%@}T@e275bpqq>-^2g$+Dmo$DI-ae!?iMi-!B( z3r&p9K(jb;n0wN;*c&K#&>NPP11lDRIGl!(BCk?wv}&0GS)lGgx`V*A6}vf6Z7^1Z zEkRaeZ}m8Dm#q796oo5(*t+;J9I+1IdpGxjgsg&u(zFrMn>Gx^JiRAl9=d{?Tb{yI z!cA%YvRom(NjRE+9(*(X$RgE3Ic$M9BOt@2ZrkQz1_XI1m8>l?TBsq`B<F6F{hOr6 ztzb-;ZMaVZ)J%p`=zwZh+lYvy$WQUqPdKF7dlBGQ!eEn>F~bN(bK>pr0I0W#qDISg zEc`7UA(z6}u^>V%!SoWK&O)^({$jX?EkL+E@oVw^XOQt<v9BZ=7V`rHzZo=1rr0k8 zIYO$!J&z#OlZcMZauKx#l-L_y4+KOUGTvnNpz6GOC_9Wz(=xQoy5Ta;e$jt8b2mc3 zK(OYRG1OwI+$s1ai4s&CpQj4uHUNZ40D&$`35Y%jJE0PLO5{n+F5HW+5h19TWBip= z4N7jOQcg!E{LRvGGC#9TYiTB>(0V;MTHJKMI0wa9dweA_5qpqo-%IsuJbETd{ZQX7 z!JRoE`Aum=0-7{0I$YM9;iXD{jpA=!6qZB0)*L%c-Q4v3-IQDY7v20qHR=62fc}GB z-3LkLtgc>7UEP3qF<RGS$YpULnr3eWcwTCtrkv54EJ(`mo1<QA5P$QMuQkVC1lO&E zT#vnbYCnkyUXhCrKHx#~`zD|o)->|H{%!6C-|k&KL2Lw)gPWZ7#pn*MPNQjG4dCe9 zXYUkM%C}>fvxpRmu<XWMp5{I_pagT9i3u3)eN|%MGi`7s2>QF0y`6C4JTf9#J6@$H zTS5Npl-XPG2N|vij}IVhyov;>LaZ)=s?2Yu81A1XtHh36@$HX4iH!JOPo<!c$Emt4 zJbMFbSPHKn&}ZGIerrNN&6KOBc}L;KFQoDp8)-V817hNDBdB|Dtry~RPtp3h+)HaA z`7OJ#qLKt(NAEQoY4PlTu}kl|4x5Zv+f&Od>9KGnEq(5*d@nilpTloPGceTT^NU2& z1JN|Cl0?rw!+$_p{%3^zW7ciN4n+SI!npSpYbPz5;n?)I5UqcXZ<%zJ&Sds(X?-}) zsefeEa{1{7aFcw#2M?3Kh|6gENe_qL5$kc{A)x15$W<$-g05g5&Q}gDVjJOBfCRc9 z2%acz{$y`G{CQC`<P@aO1rvk_a)C%kbMt$%o!#70vpJGN=9BnaL83@6(!@TV^nHY` z<cDbT;O(Rvr?sJcNN=r#8qxwnKB{|#5HtPRCPK`!0x<^^I6Dc%OneT}`X@ll{!-lk z@eL4@BM>u@Zvr4mjGQe{?OSi6<frhA_}EKlFHy8B2;Utw7f~}21-*^o{^L)GhP4dC z{Zs`}8JXT8AGmoGb>n#4J-tonTj++=tAJkYF(>d)Z-Tk3^&5^m&9(_YWdb$0`aO9@ zkz`ef@2PEpm#3kcvnxp5|BY%OGcO=Xdk@_ljWbfvJ&?Ot^|R)lHebfUSc^6iepd>X z>q5A%3Ae7)`H`tgY!<F*+>Cqd7iQuEQ8R#nF?RCb--6F(fV!02y`rqSqYb3=8mK7+ zeF@3g(1pdP8Gw}b@ckUwXfjZbifAiOH%E$Z5$rAYZ_@^a%%Ar)4?1xb-qaBx|N9Gu zP@*GPcR_*|`!{J<Bg9X={XKhn;fchDAc-}R0jtEkdE^1yJW>TDe3Cq|kG=j1q8LIA zpa171UW6rMOHsiCPR$c$JD>{WrEq!)V)w47ubqLT=Wr$!msr-*awtxn$x}C}Q^e7; zMB=<Nqq8Vl#gYO~hR;H{-C+R0$6AVxNwp5J_8>kQhGfI4-3kLGDLcddPbx=AtDwq< zV-`Ojk~8EAy0dP(;y+sTxy&}^HbV-&u&8dbmw)q?VXTEbXNhK;pbAApYFKc?@=>gk z0$yw#Pgxh-pv2VN(+WF{x~LV&Y^4z%Fv(VS&~EB;)|}gdMm)i~DZTYV%t<=%tu8@} z@uyLBu<pTJBk}KGT`s>LpnPX%Z;r{*b)=RBCgIaX@IcT^ffz3l5seUPA<?ESzEz3+ z<h$^V`vLfJ0Uz%~?fr3plSD*$Se;Vv3M?c6Sc$dkjI<{au{Cg0KQ>*4gEkP2qIZ-i zQLR*oE-AyV=;wa|&G<Gc(W0Cnb9>iYEbAd{fKL~*z2Rtab}(9m<?-w2O-^j&g0Y8< zpns2c1Khc4Aet7jZQ`7w`DH-C9t}4R^WZiFHLHldAB<kK`)z1*M;q>|9;9W~-Go=@ z?SoSAgJ9JCFT91>9k@oJxFYD^vGj78wc&#+a_+W3e!iL!vTgG3(2l_MU1p8BjdJcL z+26P%BMATFV6?a*feU(DqeUqBffShor~#T3nT0?RkzqB(u)oxyH@LaVe^5)u{p>+j zX7Bz3O%&V;iIXv-lbRsx)%A~^vh97t{X8HIm-htya4npMI+S&=LeoD<UjLu}U{!qE zV#i&5x6__~Mn|Z-n+CWtJTn%)IvcYa-*$@063%HXgk=VU-_gl$n}b@g2gO;+08B_y z<TK2Wmh`PK5GJyD4jj0XMi*GBVJpRvf6CNA(+G$Ov!ZNa9|O2SQ*Q-m4fn|hNWS$q zN|Bk!$!@Y>oq<jZYDHG;ETXxNBjpE>2}}z%0@>dwMaGFbZ=wq!KhCJ~v)XE4LiR)U z!97tH<aiRAatq318!<^?MT^XOa5HLBT6z-o#rKOsolDD16e!(Y0tK)og|84OxbQnD zxaIaF3ZN+n`P<d8EjH2pp?u_FIw{*AoOxh%6BuX$Mcf2i5)R!{=7)Pb1VA8#qnFs~ z<KFxv2Gpy~jsP5VA9jH4WWz-;&)=wJ_M#=>O7%)~2Iw^0H~bjgg`I0=XRzQB&B1M$ zbV}@o<lDDv!E~GB+khJ^!(nzX=<g;A4#=otSTKs~yx%7Bg0DR+e>S$rj_V}(d=HHq zr}IOkPFR7$VYXxu4I>@anud4Z{&1|gg6(8G&=IpYycWesCkJOa+#!!te29fLpu*lP zhT95g!{x0YetXcr1^0}fh-afZgiX?1dJmklLZl(QmHbB_?GvdkybMQ_L6LhGX7tgr zqJM%#s)?_^l?LV$nAC|j_p1|=1C!0G6GWH7>AP=KitS{VxBK=d^y2bHARGeIV^4t% zG8}F;p~hg5D+GMVnv>&n-Th$XMRtf6b|3EBG6xG7!1t4yXh`s77P^QDRLz%-#ds`1 zLI=Dxa0Ph~SGk&FGl|~^BW7ZpSvuJkl?IALS;PJDd=%~>SHz=qTx&bO93`;s(7mB2 zVQ+>%;snHy+*_QZ__pzJzoRaKA2RSm27Va3*OQXpzULb?6?7euIQNe=c&`j~nFSTF zh?l(mgOHsY@T3K}gb+ZE<M~MZ2O<&7QxJX;VQ4dn{wCpdC0^+YnGf)eZwwzd3<x3f zlaAwM{T#<Du;yoDy@&I-xES8F9`xhw0pjg>;O*e=ngZUAJ~>|hEx-}H-5F%AFrXBA zW8eN_)){2SaUpzcp_K?}ItBxPyZ;U$kl=y)>#F;}51LeGbowxqOI%^N7tf<amjkaR z2j3oyy1L&)q<^~<InSg+DMAPEz{{mt@~30ke0<~~oo*{-7545s7Gc~<i&^t%cySYr zfaeMtvF$P3lhI<hyd&uU#N<Zu+r({`&R13^`R_6i#KK#_XW<%_r0mO6j3%Qumn2y3 z!JCP!JBa1tNb?Ev{@q@d`xkDqTyzlUS0@q6h35ipHldshgHp^k5^a+UGJod3h`a^Z zf(^r|oNU6$)ouZ>f@<7hR$LZ@zZTIl(6<oLm^*@#TmZiE*Ht9G#fe)4*}WBL3;onU zlC-*(4LcK0bYgQnHf+Q~=vMffa4Dr1LqwPZ)9B*}yac&u?EnOO@Hu60Yycth$pi@W z!XPZe{n5RE2CU@-O^Y4;TmlAK<YFgHf^&W&CP4s`K*1y^!6eA;KM9huZc>+D);k9R z=Jjg)<gdjXFlpJmEt}>*faX9x5k3h0Y4n?Dp5_28zUJ*}xX?=w{uGERApEmWOpxRa zOqrkLC_Bp{+h-5N_wV3-E<OH7&>Q?Sot1af$9b-xBM_PO_6&TNM@X|>jcKqJGDPSc zXLyB9p{voZy38oMh_M&r+klO6hjybGu&Fp*ZqHCeqWC0WXGrfz$E_(ec1=z6JwUV} z8bCv^KOzzz2&8|h?-L@J`d*+1mRp>kwBz>k*%?l-Xpa(=JHqstKo-pCq}U$u-9Q;y zV|@GXJv25p{u9U^{p(wy)Ep;Q?8<+wMuiqB$DSeO1Tz9kO=C6Q0mc_NoJl!W2k;(d zS!R1-sc9hoZgk?3j*M(-EC;WlY>LaFI1j~PHZ%q(zJubS9}g!1Gg>LOlVW?cmqRt2 zT7W&09+FN#nqMkh1IhQh{Ra+Kglw&64-mc!o*E-DK#Cqu>o-VZfDmWz9i-F%mGlje z9tTy^K*Jhu)p`dAT!#h-O26JF{+Htu%;+IZbfRGzAe;rkcN#H3K-@6185y6L9jv`C zhNsFLp1$!G;{%?x&>SC(1r1B@Fqz}i*l&Eo$@U1pJ%nFSLO27cpPfO25aJZqL2>OA zw-a!Q5u)L{5d#@EAu|WaiO9kK)A+2Voe7<v>%fE&cf66oh=rVdfG`x!%;u+HDu%Tu zhks)RJUn3rCh?EWKpx*K0-1c584=*EW<cTZn1K?$$_$k9zng(F{=6BO&wp<Q^7${! zKn0JQfknJp1Q_9rt7e$kCZBJHS5SD4878*EOU&>}3J1+FEwen|4F7||lg%)eE(`aV z;RXs1GsCSEcADXx6h8S6LI7*0aHkpWpzx<=m{Yjj40lp^s~PU0aDy2phb8`o8K#3$ z{6#ZN0vmtE4ChdIg&FoxIAVsyvF$}>IFI5VG{gB6E;GXc3ePsfboiPpX1IjH(<rPb z?{b96ZbsiY<NIT-3s%B<>fpmg34D#t?;2~y*v*)1#JJ6vuU}2oBxr^f$G*BkImq}8 zc95v7jWV*CIQro_WX8N{#!Ny?hZ*x1GX^WN>jN|9mu5^pVz!zwHD*izF&oU7N6Z-L z&|Ry|m^&yY**(+eBoANZB-^BmltfPA&y$07R{poYB^4@XtCpbAYWOQH$)uOMy@~F% zg4-%iMTm=bVEuE*b%PV{;ASj*30SaqxD!I5f#d`k2PGu)>#6qfz(`^xR_TAiSw;B2 z;5yiLT$cqmEc0i#(EMCY;Ef>ghEO6jKLerpNdap69{?TE4^Vt@6kpDOh;L{)xBw#r zAH}+~kg);KO~%4z)ea?aMeiB$_<RY6u10*y_)}`yR#caPhNaqh;9R1r%wSz`uz^z! zC5fk-@x2}mEsBoCA3~Pieti#uXHrhGg?<l$?|Qip!SvBoflIm08ZsJtk$H%aIS9B+ zOEsDJ7jU^5ZJznBZ#^|X#Yb!WX!8Sn`1;<>7(3K?OX}NupRee1|2gY3d|TjGo%#&l zJAI$u!-x0i`+HdYoXHRHwIrm}$M<kXhF0<a{Wtg+ovKNGxzFs!8Ssl$a6ENk82p#4 zQ|%erWYV4)t%%dUOfGHOSd5Y?ndw<(x^_fC)uS8elYlEAsidh_qCbisHQcV?fREzG zGNpwP#2gN0WNXtA#4HVF<Y>_4HG1f?#@lG!O0A#2Pn91n`i|r;NyJI$^xFH!vhdB~ zRz+%qV#92`&*#7c#XmMf^p(wgYzKQ_bb&qqS8ec%Uh30J;~vXfm^ft{^iHGC5|Gxp z3~B+0fccbtsNo)Yn=qsdgy+GfD4M{P2pBH-Q@LOG8!AnH<UINH?&`Tt=P6Qo<&&TY zy-B|_oY~^+2zLI?UUz`+*eS;FS6)ooDQXc&>Ccnec+*hv7f`l;%n&p#>DWv`*6wGh z7>elcGgM6GH=#aQ4yN=~OPkw%n(^QZ#K3@(p8#Pqfv|p-iXpw03c54l|Fm}|@KqJp z<DZv>JhJc-NFZT-NK_Psu-FCy^*wme7fCci5VS4{Sxht}F}aV$A_NkY@JN5w@>5&2 zTC3JpTm4%Xv}zM}+=v^Zb)l{|eW-B*+<5=*nR{On0<`{?{`&d<e|>Os=FXkv%$YMY zXJ*cvLAnnOHs2+@y`}mk&K6Ez=)DTrK=ZR%akBZg_BQ|69kB0a#q)PrSqiZ#kG5N( z`!07lR^1|LzG_`7^%?2uo1{c7h*QT-`}(NRAYM2hJ<E*;i)2a%l0(K=I`wy3g0<%k zoZ*V-Wl#-F9FT3ekL(lk<|nBER16RLr;d2=H&A(v48Lr&g{ws)p=E)fBHA#n=Jkwg zFv4y=Xx1s8k3&8*$OkyaPg(@HQwMksMbc6d45!VIaC|<=`drifIbVMsX@8ElK2PZW ze473omU$$xLoB~zhn`eV#b4BOMw3@33s9^xgwyue!L|^LFb=|m5E)|+B8kXZ!`P2; zU~jJrAgZpVD4-e_OTu?aj9}6$@&V&NH|Tu!id|3!j5cFhc((w|ky>{$c(siHt#+%I z`nb8}3zG4MUm{f8ei{QOL0pf0m=^j0saEOib{Uh*(<K{%jODPFwWc$Y@8{az2b!bo z??}>euO~sc--EAaKl=kKa?f%LTb>wUCWJohXU)&5?JE=QyL}l^_hqB0>TdcnYDH4h zm(hX2!PxYhpu@yqY%;JVDPG>jm@e6I?6Y5GZ~0`R@k8^VO=G{1^kgJG!F&_nV?_Au zSMrGlHPA9xeCDrNWy4@`oK&x*!u_Mdrk(GvlK~AK-n(PPg3*s}K(m}HBjfpI9%8%F z42aScl!|{;hBdRE*Zr}V5-iHNL~218G@N$nJkn*Bn<X~7Zj^w5Rm77e9?})PV3z6q zt;~K!B{~h&8S!!Z*?ZO;&dXTV^XycZqJLBrIWK-=s~&QnIjYXQefFb}i@Wtwlz&HV z@Gk{H(_DOw97Xuhh$(0ZaJ*uF;AHbYO_Q=rcQ36=o4#AvH`DuFot?BExiu4Gb>BoS zf11CUE4O;rjTak^=(y#zUhMEjt^gjY`A%-k&}VMUNwgUqE;KMNsILK*Z&+zy3C0Nt zot|~$L{sO<pmiIBTuTv%ZF(*$#JQ1g#|8RX-^t#!b}o33ImhGkELW!M-%hu13yhVU zEDWdjajB(Hc4N*`BdIZGf%rJZ=LGNL$pWPe$$@kU9T+H~I3Teg02Y@s+us~j5WH4| z=E*O>C*A{}vw0xsa#%LzEbsod7<8drPd?k!nH3u9J<ulVrp76)xwnev^o%9Z%mtg; zccP%*Fu3VCr<ZF4j|;@)Jhgau({nL$nr<j3Up)J_IRhEI<+*a-WU2Ffuj{^VqQA7s z@DrL+cqL(C0wehA2uurZYuX!SII&=bTJ;i07B~^r+cD-BY~O8HB3Vi}4z!_um*iQu zEi-EWo?+nwZ$*Ert2(dcA_)*>L>+kRD7%-83nRN(!jsL`sO)a`Y#&+Y;aJL)iwq*$ zi9h0O+&kR|tEKHtZp#hsK<L!`%fZ^%9E5Oej<hDtxfY^x1kpVATWNjT)+qa;vbT#& z@Fgov`)CXz3mE6q2flL$EG~^uwgpi<+qe;TAU@~Iz=-{xVvf+8PY_%y=+Xh1_e)$B zwnmc99pV;&;q<wYZR!utl@&JGrslgS-RE--2C;&h=D3G?6uol;`T2v1PZh9XK69Hd z!zl`Hi43^AZ?pEq<-lE!=pbViI?0^P>6RNP2s`$+RzoAPv{u7>9M)hABkAL5mauR= z#mO1*-mgShSch8+3-9E$e}h)Tsqf?6EiCxnQ@zw0P9!~~1=XEw-=TZ(tror|;64&c zAS{rArPq*v-_?f@v=4>`m`@PU#!QO`KO?YKW!S<8vbd%Dd*3Yn@C&QMg&f5q98^-B z7%!8fk(OK_nxaSr#&I~D1_n>_lFi+)DOW!pz%~t(WYFizNlbnaRjepMJmienQ=6cK zWm~bZX~uD!D^?W{*ke>M#F)II(R?V7Xg;4H6ieD|`LO@>sE|+(526|4lO0`;rSivl zC@NoOFfD{>n(^#Uv`xCTyoA$UJ_oOZO9NLm9sdyi_zWYkBoxsS5)~kQUW%r0gf^gX zIp<soH8OcNG6vG6^rPK~_*v@3{tcn%<_1+rqY9;LkM)uv{e}vC$gvYifvo`1t$9?& zhNdl*5q<97XW9!zWuPl^q4mqgK(zn4HHlj!Ije=ze}$X@5H_V=xb`X{xuK4r#~(~H zn^%&&X!d7`W<U1LMPJ_aa9l-8wCKzCY4uuZGW7fIJ#q)fKv3{&z8Sm);VfUUMGV4t zIa0ME%bWAb@^P4sMLjd;4fJ=}RD7&IA!Yp1EBE0v1A^;_XfX`*m#&h?{+zD*v7YQ& zhjCm`duT*l%~QfMNcP$$AA^V4?-pU(lS%d{_(~i5Rv3J%RaX`s$UUsaZP#eXNTqQJ z`eV=&Kbuy#)wRY!%Aq@$d?9vsHj_YPKG`Fa>PdptTLoW3WU0zYI`KA^XiMn4P->lw zn{7YTctrunj|MNj=NGWj^tf<fM$?ST8maBTiA?L$xw_FvgkXUTZFeM;_$Vd{!lBql zF@b>M)^EVcirX@rJwXKeK{rQQsyP;ClUp>Ttj>s9W=11QjI<+Gy?gN0sDfuhPSQ&H z;D*cTo4_-On+*l&^xDJV$@Mxx-?#J+qU3WX=%$AaPt%M)t`u}nIt<-mM?qJ_rh^3< z;cqEyVzemV3^q${>c)66&Lc3^$jW#j%{k4SV}&tK?v56^2-GL$ByITxsGsC7Wg{)A z12^`qd)@WPN^bjpUox1pr5cmWO$bgqrM<FQcZ9eo%xHe`Gx-#e7lUF`iG8I$b~a_2 znjehx$LEo=txPpLh)EQ^GuE_xa-s@MZat^J`6PYYwbpwE4Q;Z0ebC44VY!;<g)v`+ zeUlR{vGJ#L+?*#(o*m48PlUpZWbA97B|WcQp>i++MLv&Mh4f3UVigh@R8!zNJ=^L_ z0a8ikSkv*9BxBeA5%)TH^5kBW;65~e<zn+hbBy4@#ssP~ojYlSkJ6(;8+@%BA2LxC zyoBtU!X8)aO$5j<4WAXnB#Wr<O1~vJWuaPr(66u4!t#@==~>d)KMNzPYkrHX=||8f z$13*ClCbtbtc_f+w5v_ykl^EpwJ6Mv4MlU&k`>|dTSfPCe?SN4Tuq*pGC~Q_*<a*6 z<ky8F(COR+<;ZX0gkkJGbWO9zf#=3w1;;;T-X0w9KM-O9nb-bpjOdNGo2TbTo5Ahv zdt-gkrVmYKcPIma4;2^6BMDOQ3KHpb(-?De_PN$T1<N|9&_rw*b;^+<eQQ_iSv$-s z;V1f*ESU%x{?b>#;&?(~i=d+^HVPLKQ(^}jE^>PpOCk+Jw|Sh{MR0HP^p9^UPNdzm zkv%DdcDH{JE3<#hlX6lovW9W_PSN3O+r~jX2l9&_0cuSfw_SXLIZ+91)!kG^W!t!D zu|AwB98?Dfd8`dOYi<;b-T5Q1u*TT2BBQ&#+F<QtF^I*O@jih;@FS=TbLjg-(AY;y z#JmYvOgiJSGDHpjku)KF7I5C&$Yk9s7R6;)wKRu<vBf$g(H3IC^`ZOuk{cW?S8ME{ zqinef3ZO9*{Hu?{K3F=>c?wl}$)t5&dN{4fPsfY`1ih7Nx+)!x(yE_)WA{ItcAEXU z(f%B`aywU)@q$nvHj25U5~Y|Q{{|1CWcQvhmN8t{{8W5f^ZR%23s)a&UwBtGA!T3K zR(F_gt2>-6iVU}J4~JWqIzrdy2A@GS!B)E2MSVned)I<w@SsQ@wXhP}9p48-^E^53 zW6i1uY*(^t4fiFBXet^NujZHPlXOqZX7V}g7NH4(e$F$8Cx4-c9Vd}ISKV=yimQ1i zWh%%yV4$QUa<aC$A%C)D%wzow1etq^-UJdWb`;MPMIPdb%##<~-`N86O}$D5PU(r- zE1K3Mvh^m;A}%%rSeKX&uWJF^tYBA{1qr!jZRSxEu&4sBh124#ye(VV?QAFKaZ#yE z#yFMFE^{)wrzml(nktkD#G1G24d-oq$&&r&o0pPPYq>wN=X}Y<Kh(Mxasqp1eCIMw zn^7BFK+$GQ&viY_2HYlZtM^Z0TRq0x)b7R$lkB!nG#+}rJ3g0zF4mW`(|Fo9ZYTO< zn^`yQJExWbmHE#>>z*lD6K@tJWq+%GkH}TW31&>~W|(EDxEwk5=mmmhKeeaQhfl5$ z0K+Twe!r~cJn2V7!(+)qG6BnKTAHc?V~}6$JFQ0W&6>bn&|5kR<+~mhy$n&9jEZJj zVQWvqYT>PBm$WQSE}(;HIN`GxG^KWp+jF#upk-3^Xfh;1ksh;WlndVk#B^)mL^D8{ zj#1oo*Kv256eTo5_A*|w52P-6+FU>n8ge3Snb+g8`V!J+z$@dZH-E;W@J}fyP*UCb z!st8Yz&?5cnu%I-`O*@*`)WYb7Qdc9jAcTwReNA*6`j*BxhF83mLnm9Np~Fa;W+uw zB(~M;F*9=hkb53vjRp$}r>_<82{x2bV;ae-;}7t_Aka7_kaUmd5oEXofu3hc#c{*n zbLP6ult;Kk-@!A<yi(qCwl7Y{r*Zn!83C77mF6214>o0=XtOiKDq1uXjcm&>mWbyf z)v<EhYn>V?rTZQpx$`VbPX$CP`q4NLHnSOsu0{N(>(giFPB35liM`>%`Pn|gkonQI zoCtVW3My9z2}{`4;y8VzqmMCf`Ww;jBYNmcDex0gfqLClt9n()LggBc8|W@8zcn*T zRH??+5J=lh;RdK#q-!5>%*Gi^7h^#jk9bL<KKY)EZbz{UnD%cZ0iMwe^ppQ=6*-sQ zfhB+F<q>-MW!x)-XmU*#^~%&qT5X*c(V1SER~bw~wF&Tsg>vUeVbfzW197ZKmyxj0 zQrX#MUd{fJ{w&L}t38BZ-DfFg%Rnp{AK5~6JsgwWX+l5RkfnviZP}6A1GabmMY9lT zM%Kf=7yMWnXJPxdVu$ou^I<Lg7^6IE@6Bu^uoxR%1;p6sYJhr-7R&vK=3oe|@v<j9 z1Z(6A!6Y>NNx4`y6eO8)uFq@)2E8%dWq}W^MPH9`EuONrs9Thb31T)qcy6kU?S<y7 zSB2!R=1DYFIZ^kprFUZ_xgK7hDNVh7uQQ>&yPVw06H$2&TF0QFc%4|Lv1Mt?Zii65 zSkAn16Oz?O<^?gSw#PhJuPZW;!F>crSVir;kNjv%fobM&sqj8*YcEMo{BbWOAR+Q? zJBaqJ)z{RC<&}2-s;_k?x=|?PZ(4@N|Db$EKw%fI=6lX;?+1M+LMlw&2^~B_ED-|p zx#oML18GRsJ;vhWHv1Enx?kVab_g=`)jhJUwTjYRZ;P!mmo%kukOX^7)pF;GTp>Y` zIM&Geev?#RG-9KxS<A~@m&mus$^*`^G|sY2HyTjjjja~3s`1q6#3~iBLXY08VrlL$ z--aY2L>7t|dS&l~@<j#pS&7|i7{(N^l;}*&0T^F+T9<HHn&v0jyG<}N;XE5zF+^x# zy5@YSYOOIWkTr&4>fR%DFO2jlH5S|&dYirN!{kC)+|eqB!PwbXfWB5Uq`!XRZfebk zn(jOmOnVk4_5M+~UUUw>^tI%o+4%|DiO$^C(s0g;T9G^($rN!&3S%2vvBm>R!|GqW zH~3O6(wZZb5l;JZ1`Q!?Nq4HO^B^<7D9XYuX~lT^f~~hn{y9&tIA80MZ}*OShCBGU zM52FQ^cGYdKMp>}A%J!tX7*aFu)#I=>nNK={d@<zX+-G>|7j#V7H)LFP%7!6@_5xY z#J@XfeZHJ+%emeW3xfAiQh~n)dUIY1yy*-6PGmP<PDpeh2l#?jqPJ`G_hDji%{_d{ z&DkOIwauLul2C5WmKA#Pc8-2|W<|UnE;~KEB0?u?F?j$afGkbDN;;|Os^qBp7qc(o zBA493##3?|2ut{`Y0moCX@19I7UbmSkI;J?r6xM%81d9wq|7VE>6q&yF`J0VVNSTA zC-T#F<hKj#l^?Kx`6G)zOF$>Tw9A+CnX7pp4I?iin7dY#p+Tt?<Sp&+c_?mvuUkOx zQIgk28c~uz?NmxBlDWY^DaqYJa@+gaTH>EQ3F9&%QFK>C#NMWrHb2vW>j-R<1VrH( z(A4u!y`URT+cjOt=4$?2sw3DcrH?FS9bTZj2pG{q-7Yk`G*XPuS;&s6UvQZI>BM8r zGcG;FE)4>^=v}U~bx#Lb`;Z6|y-U)gerlZ8ja{x&_X4^g^c#A`7P~sSAS{Z{iwPFc zZcugK)>|L-Jia3zqIlXZZ%<ec?OM<7@fe8*JZDlo){XLmAs<aKAuq^zibB-d=Ru)6 zExvt6_!jSCG~1stfBcCMxr?K$&58-D7rRI0`K`JYLG)k;3a8zyVLn7)5t@YRFMMOo zdI&6(_Xc+#7IYm!F;GZQnL_L`R|Jt`exc*w-xi|N$aUJy)N0^X>1EUt+dFP@XMUMO z$>ET%Wjx<yP9>4N;IrmLU{EF-Omm+#CsYe9%Cq}SHV&5;d+E5^dfw?o69w<P!9Hl| zZR_!+TgO#){<MfGIN`pQfGB$RD0e?;DR=iBXGG4a6FFxoovx+h+6R}&aLZ`MoJ0oO z;N_G7@%89~?Ix*J2HP3teQXI@gAKzLM=Yd=jqLwjeeA)uvr(rImPv~>-s(w<Cs>$_ zu1=b)fwPho8FGL7DMI59f*z-)2jeUR&_izD@%Cr5P$X>5yMUI3M&~k-PL4YM9)m9F z2sz2UY&<adW^v|DD-(EwZ^%)*O!E;6*HdDBRLd^*vuj|izv`+PU6AvhOH3)L$LPYC zF+XGefjNM1EG4MJ;IXAME{71B?<IsUyOJwHPCLX3NG^wYT^qOr@w9`y1*pG|Sf$D1 zQe7I+7a>jpZgYm)@~4gud=YNzHcyx;)giM8Ce>R5qaN)~qUL-UODt>bFrTGc7IC_1 zJN@-m#^zjXnQaZco8K})MBq9G=B56Y(^ilpIaymD-kcAOsrge+U52NTWmX)pj=NoE zK1e~WGV5jKn=>29ObgNa-c+`Au+Nj5^Q|H3<!?Re6jYp$jS1KYoxxUPTYk$}k{&4~ z%&<bdPpX7SutVHI2q?1eN+H`vAZ1*~Me;JKJ;d?${8Ce7j?>wu)_McjiDoez4jAeW z#(5i;$Eq2w=G)2Gn|)!d!Ul!LkizSmUF5px)2@@0Io5~i=mT$2&2n&h{d&UXPhCWe z)e@uh0CLI~$~+6|N`Wf!r&fQVj1jQo7o_FDYNY5fwaCJKc$@whFj?h@7zPuIcpa`L zy@C`>a+9NXqbA1{kb?>^mWLWZC9VgRPGsFM=H9+g1uf%47m=xJjR@vocNH74t!GBD z46|N#9P&%sda}vqlvPs=z7|6ut-7onT+K3bW>G7@C36Sdy2DAjka@#0m<~G<OR;cF zNrgil57`q3r0*zmbF*eBL9$xDzVjd|00``Cg0>b$nf^T%H>CDy3+An?MQDL}SKhdn z{Lw{Rthe@LmQW}O`_`O*8~Qyd&DOvGj{2HaO~Ohi3$5u@-+={$%rN>h=5AiVm7(Nk z3<u(~#q#OAi}%C(u`E$Ch9Ax_r-P;p<(%R(hd-i=Ao49LF6W8eJZTH9uN-5-_-><w z-_FunTdx@+lf#~U5_`^HNPaR)VM9v}3eT@V#NF@Dc{AWMZ&=;Cf6xMg-9P*e%6PJw zbRNEzV`Ww>-E<|5NVeXXXl75XcLqku#DhC)A&(XDWf7Yrr$9rP)J&+ru-|0Y!?LR} zA_m3`Z}wzQHg0r19PN5!XZv5A2|L&UPm)8+p~qd1v~#J4HkP?nyIpJOAdZF;YH^*E ziCrx@ldN!s;-+mv|25pc&LOr}(Tc>>v|jcKAHQG{>)prSuK(V_U;0g3r)HfngPxJ} zu!&8LTZP#4AE8mA9{aK^_jLG!QBqku8nczLnVikl10^+CHx~WBWZ62Odw2)E!23A- z4THCPv4_CXnJEYf*$5AT4D%Fn*L&*GIINxP&QYv<u%S*H`j`n!PV9zeX68-r;D&2B z<bq-H3@_})o*WzMvxDnDY2>Jpm<w3vo9Mh73HA}fT0__3A?8j>!PfWf0IOV`zvXlA zW9$$#ufugWmNr&P;yJGvFZk9ipO}pSPO39ED(vkDdtFcNlFhv|{%{S(W^JkGo~CyW zvHuV%v)^xeKIF~W<8{s411q$XkrrmQ2Zoua=v)&?&h%=hbS<4T1cCLLx8c@{oDTE; z-9&0l@_Hohp4q`>T_$d3&GJNEFkax@7*7=0_vgg%%{bTPXZ80^+riCnyhwqr0eaUK zs7NGl(^Fw@^lN#o^BmsR$^)qRX7%??3mXd~0Z3sgDH!LXk5;fYKH^OrIs~E|lqgfd z-4Pfc`AD2;5@!T)GJ4`z5xyj<#F-YU7?Bs)Q>MuzPPAp%O%uVErRrTW;Fhww$+_M2 zn|L7<o$)n~;AF>T^63~D`7610Nd-%>8(q!I_y#)I{+8JcbvD4;c$JC|#5H0jA|@2u zSeE7d+Fy#I+8YJI_c%c;!?`Cv$8<GKqm$Owc)aUkGN)r6BOVVAhuo9&^{aW|EuA6g zCo8lbe|QHYf5Wgm){w9~8z1P8rP`=YORU@5`2^u8phip=5HlhApr4e|Qc@r}ySOiA zNpZ!r!qf@c^`oiG3XA|nEc`(@+`E8&<G9AhbwcsRiJrCNB6+N{juEc)P3#{!GcV_j zfGZL#5W6ipJ~Y{8Co5||wQh=y;z%HJdVfYZY`El3zt}(HByBpP{G75(k88C|+(NXZ z9zuI8dPar%3#~MHf+6p?4}}q2Yh>j)=VMp13H0iX)4XwS?T>EcOjPt+oeu~P244v! zH+>beG96^=2l3e({R%za%<RWi@)U<M-l1ch>3Xu+A#V^T)pT4H8E3rg*meGdw8L#V zn*tbF-h`3m(8ay+^BXy2)$~==T3W#Jly%V&Lg5RMrZ#;Q9XP^wnxr&tPbk$U)`8b@ z5mriHFekmp6ald{Klr$o@V(>Sc;4iQ8gh$>^OIlD7G&(rk~QPuQ|zM!28YwCn6olm zUA>%<R*-%dhVrpRHzfz<jM#?hVfI%oqIz8azCHTGmgQOgP9a#%E00N2HU?C9r_NKy zVBWJ^r;jaw&P_k+W?a@RGb@@7!n?WnRWR}=qvgSJv)Fl#vaTp-J@ZgE>qb>fP1dX% z)47TKI9A*F)zMg2W_uRvLUvBkZHcmZcL=4WtOK|&`4n-v*8H9T!oRNOJ8;2H>vQ_@ z@EN*r6;n6pgR#c!ik5LOu;dY`CShc}M8&6<*VITAuPw@&7Md@7o_bhPf!K<cLCiL+ zzSF;blMF2E5=EP}&m$QLNkQoAX&gX{WS$mEjQGDJ{w){^L=`aS1J~-`3)>$T$y555 zZnjV49t|jMkydlQuGR>HP%Cnr_*wHMUGv`@!k)o<Y`cf5!fEryvAxN~5yQ+0tfbgV z+dl1#1;5Ubh(=8Z7jXb2_(ACRaF3sFopM1ZqWDSXP~I4>K4WTR#qAlEb(NU?`C_R$ zEa;iUUL^Wf)|%we?DKF%xwg-vZO;rhA0~;(f942GYj-ZB*qH`PP5v`SVAsD5qB%2$ zT_pqWZXs&$gZ$tD+dj{5yuD5Djw-nPU2UL;W}NTVhG@o{7m^_9p4OeNAk|y(efCm~ zedljT6$1>GHB;C1ZA|^gnIo;(2MA*g)qP_pS+PSkNTO+PvNa<1eX!-?MqMNYV_jn4 zXP4Q)GziVWH1qd5AwAF9jI$-(GF{U|Oib6Dq`!mhHQmAb=6A~yi`GoiD@FQ~t@pzW z{8;Pb$@wjwbU&AO^%i_q?Q5irZ00`L=#>@o*S34^PRFOU*3q)`X4x9p!<)Zl>HWFQ z&v4Fq=|=Cv$)Pybl<R!!xZf;4v&j6-0BLhZFA3h_fj0tJqj>CnSAE)nZORje66LDp znMH~Wjp*F?&t<WjHA5vWuFX4U$78_8oLxrIxMz)N=#)(~AEaO{*-Z&ya~-ZeW@L39 z(B;;}LZ{BJkyd=D7iOSp>NK3>sL1g{@1IE36Jj0uE862;Uc8S>=h4)e%q<)I86$r( z<d3WAOHUx^%lRs}%eA4MJGO&6LJ6z@h57}b4Mhca1-Cs$l48HYKW3A0#tfNF8QC)w z$r&flP!z=&IYTTN$QzBwIAMkYDPus+CSzFV1o{APa9=3p329%U_$LU6?FbGTKq9C2 ziAG*UDWtGr<hs}ss}Z0&j%&^|@x8mz+nT$IwyTv!3Mrq*7>sF*4~O#S<K(8D+}BP# z!Hc948{*{~#trZztlNl__hF#~UXod;=4H74Xy&~Rd86e}%V;wXDq5r-g=@PK9xzjd zw5szqFqW00HbIdQ$nS@g9lS^tV6&EO8Aeh`bL@5@io(Ty`@*pj0uzm_NMeO=Js+ee zZSw}Vk7>u`#VoDk=V|UTrXHCpXdd9I5R%sElD?H_Qtw0qIsVcFv{tj2gC1^QIxpzk zs^sX+p>Wz|C+Okt8o1G%$)8|$=Q9wW8C*E+(D8cUD6rBom;SAEj??L|vNeN5Wd5`u zoOa%c#D6RBYqOL6z3nQA@`ZjblZJlY#^*et{!Is?12H(6<74xZ3zm-GBXbNv`bXWF z=>=3#x$(t+suB02cjH@YI1wr^<I&r0cBEX{jb8Mu{cC;LZ(MUVx#nW?vSkyj=xzSo zSQ<>=bdHEuchRj-1wN_d46_U*SBY_SjM9S37cbDIcQU-NW89;*>RF2pnE5gbW{jxm zY&v;{asevxua78C(f~-yXeS3*sxx!RKs@f(h0s`tHJV4Iy|4KskPN#NjcJ#|9v=+| zMJ03vw~cA%CJ-<<YX;k&D6jJdIG(pCWsKtukjYz&(szc$sKD5@8+0!e8uh4yRwhZn zJ_CJg@36d`k#5Rr^sZ*X1=jR=X)3NY_wokMQPZl8bd|@|EVoOGv(Z>C07aQ=@Ii>V zdZh%;*|&H=)3-5;vzxxfT4Xg|t|!;)ye!Ez__22!(;2r8yTi3c4zse!=?foX<doC0 zn*LB{rJT~BYix^<t42JeIW#ZtraRU{DU~u8vc9Z8iIpZ<wRQ{lTuz_q`}mK4;ucz8 ztLKn!ZL>zC^L3)QxW>^p<4~Bjuc5+QNEc=?>pr@tY)QxN$~z!4L(mG0(I~LxU|wg{ zp{w~zM)L>}JB5iNSk_q~LOD4fFTMh5xUT*Nl%R;~n!jqa;Vw$|%N@FOuI4u_Pt6eP z#gk$Lvh{L{kVUZfJ}za1(Mq=x8Fq{D`NnNE&%WO-^CECTD=z1~m4CKp2c-#~b@%GB zT1~*y_}<FMjf*|az~iiTX8TK7o9wNe$h~=6;giO)l<bx5W^&u!IHxZqTMifG2S)1w zV%Ra7R=(5e?#(Q)#;qXkZN@Co^*HQyfAJU!!<Off9hZpWJ)IZDcT2(Pzrtzf5=oN= zGbJyNCV?I1r**RaHVhj8`ZNH2fE)wR#hck!mhL=6wcgGYsdFZ4+`5=gX)V+*QJ{T+ zaQV;D#m2<TYUa(EI|RQ~TN)+5UJIz`&IGr#6zbtWzs2v?*4!5~vGCSZ{5twA=xyxu zqIn^fg~yt1FtWv(KI<*!X|-C;=(P0MnlmLM_T8MmpywcAvlzc9ycF5P7w#;TLr&7M zh?w9r7mL8tMG$`zEUk>Gtk8`0m(sz=S|CNB^vK1f4fH5nu4(HY>P|cqBZ{dAO*Jng z4C@!PUJ}g)Flxz?#h)nRH*HxUmiv0nH?t13$y(6kSk{?@J;oB!g`y)OsmzmAqnG^* zrEVE}7Km;J`x)3+*<tQ-T0PxvEE({H+6V6!^+^%)13f~rq9!LoDy|?k31eQUU8q0G z;cd}j64_U_g3^17V0v?4e}QG3GT6yZN?y)$)Wr2*)gxAG1v-1l>t4<~3%;F0=xTl0 z6AiA0+ig6@s#hL1Sho4HvyAq~E~F03#fWB)F`b8RpJi3wtl-_A3$s7AMpkF?at^uH z+=j#3I)5s`%sKl6f3D~tz*_vpZ~U#Ya{7wDbwRW&Bz`OelN|!~*cuRQsJ7}U67of% zQ~(_uFNpLK2sQTRGi(XvqY7&o5&vu3F@oJGJ4dZ6qC!dF#xf&1Oyqjdk6a9=w9cJi z-pW9WCWVyt1UjN*mafPU+xMVbpe<;Mp2ZjRDO8Boh%u{wp-Yh8S{y4&z^CdG=t4F> zN30$-phwz|fz|*)i)4=@rTo?@apo5+dKQd(-xtizYmJ%Cy=H|AL5u3GD+tD9a`&)Y z8(B$mFx8RwjsEE}rZ-}}$2>PdYedM+%lk`YUc1l9)L0gH>aKbyG}3G(pM2!6M(||r zKolQyuOU|HB!SPRFl=lfWjtqopi9O=)`fbvu4f{^UW)J_y|30g?|sJ&=k-KG#Em`3 z$spz9zABErwo{L?MkwQZA%0PEtF3ttzS@g3+i$Q?;b%qf$L*jNPP=WSaF>`2X`K%) zJM@O<*Tbc**f!lBm}qW-hPDFMBRGS6xlme7wFs4%Y?eJF<VAGPFOg$Cn;(<e0-1_6 zZC`LN3v_uoZ~22S=ei2E<9*-ldiY=+{6-6t6~jV*Hm@S(rtH{2f;m@bCsLW5L}u_K z&N!0dex4Ch=dj`qIY@90IELn3b&+(2OwOJ&w^3_SNLO<a?2X6HT~hf-j1Lm=z#jjw zu>ZhY{_rks-SK$yuT-Wb{+VH%a9ud<(_u&<ta>mBY92r;BrY>Q?kMg~T<&UMU0h$; zK;K~L*;zHA536&J<kRNalC_Me$!3$zMy86=Tg^dHmPF;OD~SD(G6WAwIA=I*F?q}O z1Dw~N78xX7h^n`bsjC_Ya+G80GK^f<M*&d!EN6Zx9r=izi==h!@Nz6akMnB<m$xmz ztn||}*ZCaTXSg1|(BX_~^R9Y_8dE;klO5jYzrq5L2T^YU5MM(q0*TBwRv==YTOb0S ze`aI8!`X;V|I>_mDti_03XR09KK`q<e^e-)P!8wHP;%ruNZ^y*$VH-oxQ&um$mKoo z+OW3cRhwci1`<*-Ck-I7r*NYAy(*z=S*BZbJfUN+jpx~wsE-a-BomK)GA3g!4md;a zALPs6pf?fb&g(g~ziQuJLQf6{J6q3;@wHyceDi>B-N(#k2XWrU7_cIRBbh7Wv>wev zjsoVX9&<OD(2nl|^hLm$KX1L1fgf>LjC**qvjYdc*-ITv=eD8OZ~46c$4au5;T6-= z>`Ix}=aMFS^}!J_U`@B224Devf*6+NBEvqDiI_HIBBv9Mc{=<Q^O)Dg?D1wA$f~ce zfSp`TkKlGaV(FMywC{~>%}<Z1Xjz{rtEcP>neT(Vzr|WLqlLSguO>pyTWEdKU&+Ki zpL>j3{V{p1MbR-U=A+CaHnmzuthiiQi4L;OYoDqqK%OaxPTlNXH`94{asXphd2Hge zM1|r!Yp42~;=>e~T_fykJM(0hqrF!SzG)vDle{^vcjtuF_II$Qxnc;bU3PSdsN-XO zWS{p*26ODvKwz26iXj`S0DA?D_gKemlTO?(FLg5L@j@5X%%OE?&AcL8e6qCOjtD#W z(xLc<(EMoi-vA{|DLg%vxd1MMvM7i>W5$qQsJ`jjsDNB!d0rv=VmTiN#)+^{$ZRc~ zb^xB!Z?aG?31hckyh<O}Z=wEr&nL$e1r*|h({`uBqg*#9%BLc7gwwX*BYTf7^E@`* ztiDzsI$E`5FDP{jhO!ptIcyKiK0^_V9eox_Sm!jBay0VirwDcSi>+XUxyszu3eG5Z zQZ=qeVz1_#w1@>2Ei;|#Vwdp>bFZDr1u9&yt``RO3!$<^0LT{C6a+F(Nm$whuUs!p zaI>>@c^p~`(TwK-69q1zC?cU$g4s+d@>=5L({XZW++0~6DVDiGJEbZ`80tjO7J&_o zKg??)7I;}O7dd29)4{>6HR}l0)6Oh`B&U=LF(iDYIa_dnhS}cM=`m8xg@|Fun3M63 zM%_YteB^4rK)3+42S&dTX4iI@1MNcOwwA?2O7Vd|nD*EOB3$ie#qf@wNYWkbmfxlQ zwgrad1zjlUnbY8if|l<~!8&CHDL44hA7=QnCmCbcMR5nsw9UpS^MQYt*lCv&HMg}o z){$4bmAh7w*Ezh?wgukE4StbV`fO-|C;JMAk=3{?YFgmr?DL}o$9r4Ph~d6TfAmvk zot45#It8O&Y#s*Vqo2yoFrM;?&e0o~to23j^|9&c@lOpX<3x)hQ*|`GHc*LzllcWw zE(6LOsWJc5$$?jW(I3Fpy1LBQ%PjIO@N<rWnZ#^LX#SAOgLNpOxdT$$BmWyXDg1UN zHP_jn4vuET1`Diwzbs&92|0Yo1Z>E`I&w*?U<Q*H=LJXQ2en~4z5ARk%PL3?Pn(X7 zTFgrAdr|KBC4!dAT(p4^xD7EOdXLs!3F=!kQKV-ZJuf(f;>qYZ?nQs}Zu6l>jv=xo z+KIVIOs68`eRW&38(k5(po3!nK`@rfXcugo6;|7#5!g=WaE7Z{w7ql3QCA|r`J>Zr zUI2HLzA2sdU+&XX@<)H2FVvsy4Ze;l84QLry~{uDmAvR7=4fy_s!YAKSPEF6%%DCE zu@#jbDdj;)DzMQvl@{k(a~-txmtL4zXtfVg4ZdhT_wX^2Jf0*OIxf&Xnc!fa{?IXk zeszge>)Fy)PSi#%bc6xNim+26M1LKUn?OXm$L{#)VwU^+TvjQ6gGo*ErP(}(t*#L^ zOL6DnX^Xmj<M0)(%}2cDL|6<X9+Td+l(4&RyO_?+vT=p!c8-diYF<XoHMx~JQ)*C; z`F&QCSx7#QVzc00cwp0)@JfKooc0XTQ$E>4J01lB+PcIyzm<S0bRxsl=(`=pi2a+R zjC3=OPupePSDCL9z+Mb|LCXzH|Fj&X&ryhcM{oTqwlU~<MDJrV`=CA$Lwfn1c`K2R zevi*X(C(-P5<)9wI-2dBx>Qs>5C^#<i&kZYEfrFAt9s01M-yEqb|W09c^nVdu1jd% zX$)+C+llf=LPyT00rYc!6vi$L_JRreb*Nv?Cw`ajYl1fK476pl%q)5*J!#6+9pS3D zm*NTY3`WsTCv>#SeXO(O93$8Ehnu8VwaD?jSvX539-@9B7U5Bzr@FNQ%Ymnnh-6QZ z<JE!brU5~Ex^z)=ciS`Mbr%b%m{lCEB10#^aVLE#I@&>h5?Wx`J-iumWIztDCwm;5 zcP#hMW*4{utrxykB<!g0=FCp6XBRYQ_P`}^72fFCsiBkPZE*c@0@9ZZ6VIWcRJ38V z(f(wk|4hy>q>GtZ*TX|#ZoG$DSxk^DUY0E4Dj+-GDiS(KX0DaRTq}#YRu*%uEaqBS z%+*<J>XpR?okc~?^MR8q*fYUw9!hta6w^M+{!3;UT2;V4sL**W9+<}38x`KsO`zU& zd6X0U`fU0X;$a?*YF+0*j`B`x3+%^cWgbV@VzN^LpJ%7!yL{~kbTY~8{`Ima*0hhM zkJL=GMKYZQVp^K3CiBO26u4%-Se_poemt{AP7=P@Fu20I>TT6k(0Y^VqSv7d#W%pt zAaO;8M+{FU50Bhrkfkp$C@3~JO{JJDvs|=U`_m!+wMlQOC@kb?S#JY_j!Z0jg%BAf z_<Yc5W;Y)3%{pFq$x$Me7LYp9XWCZ_MG#1R%DlzOoTR(UZJ{S<SP2b2N<zV;1+zrL zJ6RSp4#(_KnX;OHS$HH`iSl8`Q9kGx_jP};G3lm;HppcDTle?w4`u?5&C0$9dtBWC zpwi@>tFr0X+SnEg@~;=NQUOg@)v;8MKs(V&y>}%LnLEc<Wiym;Zg>N>>}549QVDkT zdCcf+jYA}+_y-FL&Bpelco*CA=ff)7I=X$o?%lhS*W{PocJqer4@c02wHIYB>He;Z zE%`sn8n`kqwmw7<^XTHTcKQ9LtNbD-mCnP9Y1Jls@$#;V88P}UUPcG!d4f-w547ph zcrMyZ%Kz(sZE|}Vzt?T}sSTZ}mj6&2PO_ojhQ&5qYQyz5++f4IZ1|uJx7l!y4d1un zK^r<npMc+B8;-Z(OdFnO!+INDYr{KixY33$*zkQD?zdsoU@QFrHXLfhOdDp|aHb9C z*l?i@>uk8fhHGtjqYZy=!^dp6&4#;ec*ut7GV1Zmvf)`aEVkj5HoVq`zp&v(8}6{- zn>IXX!+x^g#c!|;$J%hZ4fAcd(1!IkY_{R`HoV)0kJ)gW4PUb1yEgpFhVdCzzC&#| z)`rt;m~TVFhK)A7)`qv+P$U00{wy6T`;%BRnrp$kFR`Gr(t>@X?zq?Tzi`;mzemDX zlvGuhm${8v_od~AyL@St;V!K$D|c7a*Di9`)z_AmH#Cf=^Xds#T3=pbl=uGTKE6Tm zU;k#+2CB>4HMNpfd8vG{{Yz@Zv!be|%w4$5sI0Bg0Rl$J!s>E@N&hInF{A7B*YQNR z-nF-yWyP<pE3eU^Pi-izuc|Y~*DYJ31I((e&jtBH3uC1gsRmW5YE``|=ihi$rmFd; z)L0fB1KNF(jyJX@P+e^~^?N_1`mr|1;&F68)h{YJCO0=XR(_{tsX_@c)}39rAkL}2 zpOrPgkj~ldmT_G<iz|!yDYdk2DL*G6(9&=^0Z#tOtNtZtJ9ItXZ$n2^bWCi&IA{O( zgv6u)uH=+~gHqE54@u7$I&Aoek)zzBj~kPD{0S$HJ?Z3er<^)|Le|7dlc${az3*pF zot86w#t%;ScxTS?<(_e-KkuyB`2}a6Q+V#2xkc>iEI9vyA6|IT#g`P9EG#W6ueh|b z>axqL7uD3(T~Xg)1Qst@y6nmyEx&5TO1=Foh}8#bjH*TD?(+Kj+IqKANp^)4<)1Tm zuH~z}=H{J!X0KP}JEy>#cXp4@obP2#o{|*rt#Oys)m2xOmKar3b!AC|dr=8&Rf4}^ zlrO3?gypJhOJKdqa`!BEB>(EFh4m%%%iL8prM30-<)udTvhneS)#W7(<uGQAQBq1w zV)RP=#0GampsudAo-gGki`*3yU{P&-IceZrq%jyDDUaYcIVt{Bx3>q40BIM@&CBn_ z`9@_`gS(`mp?uN8>SgY-Kz&usrS2M%S}bT#kgA$0qpGC3>Pnq_e368Qx23@4#B?tV zT*|w9S#6-cH?HH|d4`*yi)tGTcXid}<)kjfsV{E`R2%Nv3U_Hqb+u#$r39x_OKTU^ z=_WdMLTPpVN$!e3O{u1-ZlNVTNYykL^?_1@!t-B$^i@|ElvLH|vP-!qNx5~?tf>uL zTIp`6D=DR=6TG^XY!4$?Z+cDaL$B_#ms^!Lr^uqWQ3=wuHKpa_zdJp8=aVJ*%px_x zu_u!<2?PF<vgLcAM)w$SPfrMUWqC=Rm6C+}{@*C)lB!-2b=~#E``$6*H5g@oBi?Be zuPy+`Ev~9J0wvWwl_a&PGZ4IJ7ssIgCABru^-h3!qzBfWVmDqBr%Jq@a_c^jw$M;Z zm6eq*t|~3J!b&?PpNTe|%9qyBe(2nVIz25^LRsN7odV=+hg$>-RvDG_?`6Ufm-mh% z=^mRtcBHZrqofBFolla*3cZ@E?hNY7uLzVk2y(*xbL`HCN;S&s7gf>FU`F8qX$FCs zK!Xr<Ny&d>S3r5PG+mF{9?EN|$=aGl<u!&~9tp4MderbG^_K=Da6@<LCA@BL6?Afj zH0Zk8sv4uar;=o(`zzPn&6KmMw7#~Xw!(0qSEWlkYuvbQy5w7(q7XEmwlIGDcr~4| z`O<oNyP6Vu?Lf`tHML7>en7q2q|B9md~|#~1EK_*=GL_#n^3Av<{FV7+lXywp>_XI zE;;PImG{WlC4qk2=bf_@hkd`c&pXx=4*Sj$;9>7S?epHRvGMB0RgDb5(N{NKy}B_q zHkJ{1&6+hJo|V;D*tk|X)z}lW3+Fd7zA^|G7On*?_t?g@jl@z6!<ChlPG{WGy1FHG zbw`Z91o>b6bF04p#v&70|N4G8+Pfdg=x_aNR!9CjJp3xv^UtBa+rQo^tX4h$qS(Iu zF8?C&-T$lW-YWc&wOaW<%>j;8-Txfl@fWE<fvX)o|Dqh<?O!DRk){8S`ux2XAUUP- zFOs9Y^|+JOcPy|StZ(@5R@$CW$*RX~xg6Gn)ouxmt5!EPueth~wJqy{>sx>PZ`c0h zx}R?N_v>%C@n=83>E>I0aqDfry!}^q+<Dip@BYni@45GPzrXMP|MS2f9(?HGM>anC z*dHH%;>ka?wQt(IW$U)>J9a+x^fS*sx2xm%7hZhn<=wCBdG)nFzy8LXZ|(id+wZ*l z-uoYzoqrAO`|zWWyFU5!v(LZSf8gMkUw!=zmP*xsbpmwk3C?$#0R6Me|Ig0<zfAwX zHvv8NcRd09XP4japSEbxw1&tsg(~BBio1ZHTO7;y>6TJZFrln$g7s2Zz$PD${Cwr5 z%n{4$tv994u3dcC`#H?W<n!F}I;Oo=KyTpEK!d>@<bi6P_*ux{65m@_UnOf41ts;R zm3D$>lrO9gFd?>I)mbGq`jvboFGc#2wjxbQkEe$C%OovHM-gA*sJSIZpuUU`{LZMa zvRz6QRR-!Cy5E$VUtU&I-piv1F<m|v)Yj-wa|1RkF(e&{FL4y%B#h#_M)l0{$Xd*N zrp2{O<{EmkrSPBEP+ot|!poSO<n>I@y><clo?p^nc$woaE-$RD3)ER3@VES|<WvFc zQYDv`&#YZ)#hf=cch2NV<9+%0R(S9L9k2p9a0FE-z$a({NuUe_f=-YNszE$x2q~ec z5SHJpbIv|zUQwnR&-`27BkNJ)7wTm2UsR_3FO<Jr^R$fF%%VB9wUWtq_&G)<s*y&5 z8d(;vMi%u~Bd0jk$Vo%@rgsc(%NP}_lBQg%k{s(*Kgz#xlv0HV>5e4vABF#L?LV4) zy0|UjIdpR}xsq1i#eF;59Lf5fNH6)7+LCv;|L}flIR2^lJIl^G{F^gMIg92TmTrc- zpBmtpt>U_3_eR%6WeGl6Z0x2Ck5$7Lrne2QODj&zQfluwQL9sGeTGu!4`t)`ZHo|& zjChqX#icUlq;(D2o6_NGOR7sOPAGKri&FjSqp}>SQ7ZL;<Sd6PM!BZ+Q?5w~b&mKL z6^}c9Qop*C;qhvCnM)0yGC&QlPwyJMH??D6TXJ0_zt2uo>YK4jEr{eN=}w9&>_0G0 z4J=Dn1E&m810AU<0a{8NP*+hWD>Z;e@VyVek8%G5cqM5Fbhs0hyDUYyi;|U_eBJfK zyR6ztt#c&zQ^`ggXLEs*65AZ;j`W`to8?G%s`N6RqBxb#xAaMbO?9eN{8I5t#V>VI za$Uwr32MlcGBw0;flBTgus5+IzRg(|SKP1As_Pvf*x#L`+*>k~+einGA>c4rxg7&l zM%R$NX&pVZesCHSC>|-tg&ZPr^p95k9gnLh>O<4r=&v%!KZE=;$UkFJTAL$19z1#A zyL9*tJT*NX@litWtQ09<S%1psRLOG^+ah$nb*557W+`<&G?HJ6)a#Z+l>r}TkY1#I zBQ*Y@PpMz>+-HYB4)>EhZ`tpTG^a{4c*^2b8n~rRN@+_u(yt?u|F6za>K&egk@%Xn z@zAzEw1viVlIt8U_@^uZK8jbadiW?YN+mi{R7R%o!h`U_AK-=iH7^Js*D<e5(YzL? zc`cIHz_XRQoG0}itE?HLpv4sAxcZ*jlK9!(bbtm1G=Ody-~uhW@m@6tWyHBXX{A{F znH9+^0i}}BJg3@uS@>AIAED)&eDCBr!wz!@_wnfNR7Bzoicy26#Hm4(T)JIEf!FHu zmAaoN5@##!Z+IecELtTiSCLD(9)MOuoN5U84=DnY){seq>U15wlt4YjQ%BU*oRqz~ z-g}pIQrg}@9Vy*>GN4$gT|6so+#E3u6*Ci_wqc~)XD+0@@!Uo@fqlRK48L1=gtrBz z42cK7WN>q-A@zg0Quew!lG+k<c_oaeLa7&d+U<OGdc=$5S9GaTr95x&U7%w`q8b73 zj(~SZz(XS_--t;Wdxvz;Mtbwn9B3oFZX{8^@Ou$;4S!|S6VB;S&Y7g8dB~}G2vn3K zE=t8YZc>hc{ouJ|HSmD}bxFmEg;u)#;ZLV>NxG4EbNbck{%}rIVT$et3B&gY?yoFX z>MuNDyKET~z<bIS(IXrc(MRh;+-P2>42xI8$_A)mQ<BuMIYYXvTC(^<=#{vVQ&~LY z-xZ7rpVCjIOi5HJbA+n##gV*6H9{|*A$B+m=R_5M9XRX0Bw3}yL+SLB>DO6(Nye#3 zxuc9!@*hNf4OD|>4R|2F%el8-M@(Ck-Os_k%A!XK^nedvNT|!0m~`40BUz22zaK_= zLnaTbAJCP!H@?H!7U>_Q%~|o_Tf%7G9T24kOp4F?du4w32HFu%q|A=N@oF%*4<?<# z^#k`NcMNDttV<}i>hB?&M^fOCWO&2{%?GFv*I7K0qT5Rn<x5mU=12?Zq3t`jvhj0U zFPhnHK7;+`m`(PWF6EFmfl`+)4}ElG{ImL2`Vxb_g#OX)yE`IvGW$;YC!X9$-RZt~ z0O8?L@PRk=SS#V9$Y;@AO8u1QVmo{)?ybMZVxRr4@uICrpT<zCGEPj$&6t%+&zaPf zu(y9lTw82iOmTEpr0(h>xD!X0VKw}t`)>LP`VhOX=<XAq{~f0>f3MUHy?Ll8Ma91W z52eZ&$vheQrb1t20jnP`N`xNt<@NAIX8dV`C#P)ci;du``AGN>9!j5++SOBw@pgMl zA|2AYPTDavz5Q@GB%ZPI@A1vPZAy*Y-ivQW$E(p(GSui#hjyj!9o&)HHn1+GI5{HI z6sDv`tJK?*>s-Y>{m-sl^uIj!M`$2CF$ekQ=>1SvPe0Vd7mnB{6+4Ahv*G>KaOA*V zB`Hjx92sL65Bt_yp(V2|l{(Y3hQ>un&^l42UYA^#l_I@?^{bHm=&s1yk?>#o5*Dqp zY<-4*=}TDj_-E-$%ypbuUQ=GrhS4l*M{Jf+U!A*{y%^NF`DTb#z$|ubyEOyqW9FAs z8E4ei&t+Gpy4;$Hs_WG(t=C`&^D6aV^xSe{>TNbj)L&9lR?STQ3rV%0wk%Lxeg+$} zXS4r8=s&C68uqSc)w3<krr%s<w`_bX^-)xQdCB7PBmBSWNySPSd2T|?-0E`X^2bGy zgCOE9D`R7rwTXU?-pUPENZeVqixz=VepL<GPnQ@>vDtmBlS#E#{UUmfQ9Z9_36t;K zrRsAji<J)w8bX^NSTV{hPo-X!G^IR6%j(Ki8|xa?<<PWGadKjcBKvdQ^t?x76JWEx zCNkw$`7!fqDmA^xy_BU7XhGr-2n~-Ia5?7Zj;Oo_Upc$ymzLKTQh2GzTcs@LSzD`C zk(9bo{PJsSFAvn6Veg1j0kTf=6ZtZ$q>l9t;R$wB4fTQGDC-J(TTH3DqWtWMo>5=U zy36g_?X70VQ(dIXQYa);MdJ3(DnxD<TAh<yAnw<|?>zSX%QR210-;`^=0zo-Q1<sI z;G?o8)a%{jIHS6O1c=2NiC5krfc18|ylFHJN)7eG@V;JDEz(=Ed1cg^gtt&tH^t1S zb~F#FuBd$W676k5xbd;5yoi26-#YZxl+CTHs<GH0-yxaj_Uv}fHAK^)!K>OO|06%B zf@8#(uhz!QuPQ5_RasJBR9hfB$upN3<!bZM(}CN6tLaXud#wT~b%*w~+9Ine(dP!r z>z5Ul*K17<R0}lTQ28>clcK89%WZzXw->!^)`VblHJ9t9nIg1XybYSeajD<veCCu} z#9X6e+ijg%zM<DUO&u<o1?2+`l@fnuprWF@o>sXxDt}f%Nu5PAsGbqsUGAdV2r<;# zy+cuMkJa*o&eGP1H|ua8!gNah`C2K%YR+n(@Q36cVKa4)MZc;m!Oo{<Ro6C@+l~2J zi!<!L%d2kcRhn}GMqR)VPX75}q2{Z2X_s@2?jGSvyGN8vy=tza!>KE&FYhkxd58Oe z^&5g?FP=HCq`pd&HN0we?wqr8^I4xOt7d_-GI|aw29hrA$%<2UPKEV;g3!XQKxv~& zJuTR4Bn+5yVF3LaX!hUr+na0YV@1-7ydSnpk{tPZY$!6e<Jg~%_#)xu`Pc7X6!;ef z{__-=bo*PU{){>9vlqEvsK7$Wg(q41uH9|xbL+k9GYfJgMgJPnqxnbtqz_;TUbk(* zA=-Aw0MmJ5d6Ibg@yG%CIG#ivrwzqV-UU7RmcSGFCh1CCfi50NU%DpoOW|P|K|kU@ znn(Ok<B@miGUa`i{muZO<FGWT{aK#WkZxS3&oZITo9<fsF9N!G=#UjhveB!x@RxE3 zK8-wr^C}yz21;3)c;tICkK~U&kP>y<U)b-z1PXq4@JLx%lF;EE0ZN&k<B{*L@W}Uc zh$8ff&<v;kIU<f@y!ZKhL|@%E{(m(5e>DC-I{m)*^nLG}|G(b<5fn&1=FiH_eazoK z0-OK&G>@&EVc~LY<$(WrT>nuy9+L%Zsq&aC;QmKp^iNIq|8<raYt0uNQ86+st2-Fr zi&rmOJ=!MfU2j>AU*2iKRk!Z_MqHj1jT+uf`1W7D_A9sb`G~)(4q09v8$R?M!+Y)U z4-<aZ?eE?`RK0h*dHWBKo&Jhn>KNxDkevJ4#jm;5C9hrf+N2}Hzqseky<aLdafOB1 z=Z7pgietE82|TM$jQ^=|#&hc7^R_-{sDJi%p~K&zMd75Q<KOj-Mc+n;{XN=(9a$DE zw96eyJMyk<z7*lMH!VbVlHUSMAW0m}w7{|UyU1wrJNTvbJt7tt+wXQKrN-LtB9qDQ z;6W?A$ei-)u-^w+uj)4YU1VPQRod?v)~oW^@2H9BpVNM4+fx5J4p`}ntSP^{?e~$^ zt6FQnv;CugZu{MBznlB5_@nLjt}m?j<LviB`(5@#<ma*9SwPgkEc^XR``vB7>qd-U zy8Rv@w$px3zsbtyzYEfw^*rD<pwvyIK5*(^gkL_j+ht*_#V7eT^xM#9Lyrxo+c4XP zSvDMNL$?i+ZK!Pcd5o35X~TDIxYvfS+i;H!U$S9`4WF^$4jXQ<VVeyfx8X(`-fzR( zY`DRO>unfGzuA6YZbQR{l{PH4VWAB@Hq5r+6dR7UVX_UC4f`{Ji?lf*e55^&x2mE0 zug7lJ)iW(R{a4{i`xogi1P948f{XA+q>T#_jZDzwTh}L6KTtTgNWA~kze3-CE&g7c z9`4B&J^J=fecxqVkzWLgTiSdM&jmcvUT@%ei037q&v<0}GK=SIo<&l4evx>nMk$%g zF5$VJ=Ruwqc|PSyChP>B0v@rh`~So5?`fAu_4!5Hzew4$`&sprWy7&Hblb2uuSMeg zKMm<nKj2x~&!M`2=QE&fz+DWyhrvwz?+3obQ<mURdx1`LF7L%Z8TcX3=jZ_S*2C<r zgDJY0moNx^PI!U$@w|>(;3GWG;1>8Y&*Qic0v9nTPLFda&U~v27!WH5I27l&RTGck z&<uRX_J0?c!XPPOQh}H8NSJEiPi^-G;LAJ`ricOa5gu`i?!?PH5`GUb%ro6ZLvSl~ zCj(^}INS~V-Wc{e@UH`AWLj~D1Aoqg#WZy@@U-Ju<H7Ab0XL7NpAEdib{oKTw)+m? zD?IB7zXzCdBKvu`Q-Pap_ZHyUEEL^=|61V3Jd$P?a3|}1ujBs=@J$|FFTho&GA>gW z&A@qdl2dRm0Jie@a9<02g-6oa13YPhQu+9w0{kscG46YTKcUkwaBl#vLZ|XZ+|59_ z`%dVy1=#rm#sK{H0k1fny6f*yj{{%l5qt!GW4i^;^jP`&fcNuAUIHh3iGzCz@KM|S zIM6rK;wyoxcoIp!88~GY`;oW>{*LE1I<fnK2Y94h2Z8&1;7OQ+z}30bk;DbYonhtc z20G8gmH_?&^Ld2+0>9>I#7NjTz&UwVxr%_hcsdCG4KVpEiw*)Wm<?~>e<AQA9w}EB z@Wlemmf-&q@Y=I6ugUd60^R3WJR|UibCE;wzY&OC1Lc!2z>PdYLxGd#S!FcNV<(75 z%J>Y>JD)ltd@*nhkAz<foVI}Wi~n?B8;_Jr;JZ8$S762kO6?%baNsRG;(r_PH$SA# z@V^&0^&)6an$v+Lmw-3!GT^UyBrohQsK4?^+<m}Lim4m?KL;k1SYZ-@J|4kA;Bwn7 z@B!QHSxEizR1>!u_-mf^l0I+?kL0xjczu~g+bzJ;E~Wp$zYw^F=XKoI0ypspK3jmX zl~!ErLnycH7WgwB!RKb+(^XdeJ_Eeza>`CRHv_L@(Kj6)*Z@4EhC0IS2X5f;h(GYm zTC4@(E(SL9EWo`5I2rjv+Q<~(G9Kw4mIF82?%S?_{~IU^;RSBtk?_v|R~uGcHv{Jf zEcXK7r#y9p{~UPLVv9c;f%|zj;C~Q!-U|2z_X1$cN@#+6J@D2>>M@D>1zxa<I>KEB zOlYPoxD$bSE#QwkANV#;Bkp&BXRn7ZaTfw#<=Kck^IG)AuY-SZCj%GoNZS<nq3z!V zT=rASPTO1#Ja>cTE&^_~-IdpKT{n-^g$Oj?zmfJun%Tf0kJRIOVB^mf89FrVz%8^A zIQS;ZoeA8^lTMf&z_WfqedEptF6WW(0<&+m@)B5h8~%h5cny!_wHA2uFQGGTfl0qY zh6H~%a2JoX>ki=ZJCs^W7=h|eD}8}?@!W`i2XNo7p$~3>r{7Iq0}dYG*586B?&0^K z>wK@3eiksuBY3U{Zs+mg#(s&4{+-3cF~B={q_4Xh_~+l#XA$Ogf%h{;;}-bC{{t`L zE(4zT0Qlfu0G#v)^GDoMfKTv9J+=W~-e|e^0M|Ya&V&(ofJgZ4An>Cntg$ciNn}VK z!E-6_z*g|beGqurcFG8D)xgVkL2GdX&+mXga9;@ggh%+{b70^_%0;~|1tz}?&iD(w zi$|9cxOg}11plSLM|dRPjliZ?!5RN%VDX#q3~qs4Jd(b^H{P;vHi7s2#iDZ;@CR?h zPt=Q?%aF4Y>!rN_<;=rN;3H6U`^7C#^!CLq@MYWm7Etu>#b2Q4$BSE_=&y@g;2E}C z;3c-Z0w_A+5=P)pZMW!ux7%)kqMt3e2^4*22`^CekHuf0=<kYKpy<GgTcGH-N_c@w eZ1)PF=$(qcK+(UH@B+W#DTHqS`u*>u!2bmiE2a|w literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/setuptools/gui.exe b/env/lib/python3.6/site-packages/setuptools/gui.exe new file mode 100644 index 0000000000000000000000000000000000000000..f8d3509653ba8f80ca7f3aa7f95616142ba83a94 GIT binary patch literal 65536 zcmeFae|%KMxj%k3yGc&SCTD>S1PQP}R5YmQ5=~qJi^+zl1UE)DtPsG8blp-*!#RLg z0>QIub24npZS_`f<yJ2Gx%RfbwfBl*uV6xG0{-MjRTOJur8;p@W1&fqnDc!<b2dM) z?S0+v>-)#|`^OhvIcH|hGc(UT^E}VYJoC(K^_@E<yCg{t{F$aC?Zcb?`Ni{pesFxw zo%Wkt>DjE;rth;Yer@_4k$X3I);E0Tn+<n;+jI9__ucm$)$@&eJPq1?o_p`}RNPkU z`Sy3#+;eqK&X~ef(Wh%$Pd;(of3Tsy@11*-?Gf=`u?u)lX)Iw+;(cKCl`JOSKK7sD zeHA+<-V4}nyl=nv?g*9f_b?6yBx$kDF4=y~YKCCCB)cu!mL*9qBV~z|I{q@eUHI#w zxZet=Nm4pR@o(rY`E3@_kcQ7q0+8}iX7L_=QKB^Wyd=#Mq5o%(=5t@`n=ZtG%HR8U zwR+EH6(2u6f(PM6ZKcj0_0J<otFLZYbC-ITBt;MrZJ&Yn>-Zb>&yT9Ew!oxAMfl)C z#Z+d`C?Ev=lGJ)}%Ksnx|0)G)SVf_n2-;d?f9!~MzIJJ-=wKb=iHfW2QCpC29wSNm zA=ztsPZ<@3t`2ENV!bW?>DIbrM&c*bCbqaRzr~R~Z-r)Gl=RG-p<NO;x4P=0D?)s` z$m_KCdCiWD6_v>}ugUHp=<&@N<(0nQZ)pc;t^f@UfdU)Xs*a2q9hEj|W&QGS`}Q+V zaO>`-aSJ8yAtP2OBNk%M7Utt!$6gfgmQ40WtW_PKSW_r1oOg}p=vZj3XtBjwwJ#E} zLMNCsnAlP1f|%AM?kIHMo~S5v2kZEcbEs|ZrY(iCq{N>@V-R$%P-2fEhzyjmCh@Sy zXyr*PE_By~_)26%86IRFp<L0yrY(-_6^RN*wl=1!sbqzkNBE#Zr|)1xR)-`}qV{=I zsuT5#vQT;fwD0ZwJO~iAMI5M-JD`zRj|c<(+4vp|@n?~!ADWe%G6eO$3}GdB)>9Ya zkBHB1hGv2=t60ZM@2flwcy2#L^lN{0=%0Q@MjzL)ErkWFb2Ro*N07ImOt!9YmgwvP zqh2yflmnST)@Q6JEa3kv=;e&Js^gRcx7ile@Me+Xh_`B=wJ3|47Z(=9j;P;M4jj9k ze|zYYnyGIobV=&smWsjxVw3XZ39!ke-gcWd&f8i_T!k-^@^CA0*s%-oQ>v?$_-7%o z(GNN8XT7J;F$I$PlNQv_oLiavAq4>E7I2dQhlE)vSn!y;BSSI+5(`L`#@q*i(+$dj ziMR82oKzstr3NgrEei6^p%m@2rUhVv>rK-H3%XZ<_rUh;c(a2dG)%uOg$_v@w_EZo zlu%GsR0^7TQkP%ahpqsf^)t)7t<j1g+Tx`4;LnY}eDrxiuoH=ZlK9$8(KPhsobi4M z$psZiHuGF42=%W3b2x}s^KXwz;=hfa!6-nS00F@ZB2Rzdm-tMKM|!J2$OpkDB&e<W zp=IqLfdhi+jGDI_IfSX1CsWBNHQ^`>)|hz?tCY-06G}<$V~#?~heoED!!4L2akG@t z3k(cUbnpdgqwk%>`n0WAC7vv#rU2V~=4eiAwpse1#pRD3*UlGpF7&;UP%~^>-Uq9> zqqY#gDuX1JM-HRLrTl?x<n8>L1RW6Nzt8%&-UwXtnfuqbCmh#A4k1U7-%L3c7Zx(d zuhG+B-K2d4zoLVczO#ufnYJw*t5&k#)-NC8`0Z!%(?;tLH)1SS=)o%@p*m1Hza}bC zH<@{EP=$nZv|K=--J~^q2RFJ=UsK7|s*{A7<k#1>>2riBOI3;<EmbyBr2Q;!)*t;6 z%bAU*;bM7n=w0Oq89^D~`RGjkug?ON9(0;MXlio>B9VN6@g>xk)TvhhOKNMSeI?sb zNT@@qXG7GtAEH*Z*I7+?xX^=^+#cd{e*xu~c+oK%QC`k~8T1Fj`XSd4etuu)23Ly= znHbY_evF#lbUsH*M$@PjpbB6kZlDn4%Pfry7Wc9o2a;HxjOT7A9>$Ks0zkIpxF}-P z4%J+UwB{X!v+x4J<l9l;41|Nc`2wVB4jNck69S=U@yowNLO-xFpm5`+mK}<8p^v+1 z@>vU3b1r4SD4dNJCLBe`P~a!!^eLzUU1z9JMV04G)5v%Ur4xPh4u|g#Tc-(r0PB00 z<2OM*Q-Cajywm3kTRsx?bLZ%s;?w6_FF__SF*1GDPvs6}`fAHZ`iq5gfrnJz3GS7o z<!S&dC^NOtiE-fBC#iZl6nPcM^GAV==(P<NR;%_=#!(%&0YabZIMPv&92tc<Zx7b+ zhXzbD$Xkg{J4C}ln^mO37mVbwG|+Ar#F^zd@x=IC!wbGLO_1QAONu%pJ?DT&$271> zuc4jxwz7KJ_rCH-tFJ@z@NXc!Q<?yrLiCS+GL^7*>xa$m*N_NRtT_d&`a7duuH`>P zd%}h`&|B{GYny6$%@oA-ep8*S_YbNQ*wMBx)7fGDgK2FaWZ0dLJaOehDVhGlqZp`r z7Zz^Qt{~7!1nOpo+s>!!UDMjSGVG3o1-MTD`U{)X0)7~njK(aO!mRqVS*o4ZX4diz z7)@AzBH#*!OwC!#-^rCEBXGL5j{ilBGX<T2fkEhQ4%vX(Kg~1H*mhHs`C@8C`##CF zP-@@Z>RTv<qVAQ@pPBn4bWbwF*U^~CI`+^PVzL7sfQR?ISVY=gn;M0{7SlKW)I}fC zqn9jO+3r350+pLg-%ap_Gfi*v=m#C!&(myW%O}ynm4I*oqK+MG>rZEnIJKR9see4J z?c)sQ$RrZUz7CZ}&@|&(WWQ<q`Sr-K<@HtG)|Ku2_)JVn%I2W6B{iM@WID!(VycU$ zAsB9F=2CVh#57s7&)3s1WBcH0)V=8v_Ii;ZdYh|;kGm9nx5OzmAxm<M-r)(EdHG#_ z%&)8hSU}eM-Hj9UR#%Y!30j>6oZG7`cz^_)daDP69Az2FAzJQhYnWChD$L)$+G%bx z&7w9mR1|a&sE6y@t-J-J@>a|Gc{fUJ9G}Xg6OuprJK#0?Jp<5bfq@`8o;q|BAqcJM zjQ48!rGWu;JZ~<LXe=JXw;{l)2MihWpCi@?07-K~${g|I>b>4p%t2&K3ny&<l5~GV zu3pxR9szB;9|4i-*m?a+N5i#!@8}=cRcFz$=1jfQrgz)4Ua)YNY;U8N3$K^;Kib>6 z)6|T!KS#l1EVxey4i&6w$J3D-fJnmY;zyL&4<!g*Eqe#L!`;_mM+^g_OUp(vN<5Be z^757py~8$Cr&@$5?KKvp_9ylZ;IzB+5AEvs5img9peJqGr>M}ieC4Y4zD_DwoiJ30 z5_=SJD^>f%DnzwDB3tkBl@`9nM7`62cB()9jX5~Dm1WqE>OH3SAe#W)`7_C8+pfMB zJFd=-^{P|*4uT0K)k$y3)D9UFllj~KNTvgXauGr@LJse7Q7R@RDA(z2H9$+ML+eE& zl=voVrX{czY;0=zrsg&^7y3DBQcnlbCHkTK6wlSv)Ot^a>WupS(t25KWYtdJD_Ul0 zy-WLUG9529T3YX>gnVr^CFHB&()t2Q@MyPDf=8_?tuNH(m)6hH=0j$@t^Sg!YDQJ1 zuYFT*)BGE?V&5z3C3>UFt~~e`G$NV?B%)>wUwRqg;i@z=IXRJXAM6bDgMFlKS|1}* zTJt0-&ot@>P~uYMKt_<u$P@-s+AEV2S~BKcqvp(8p=QmyT9cttF;Z={RhCTEe&@TO zUJAU`$*i*|AeRR6H#UONQ7ve}-xCCI8I5u>iv`@icGQ&50s{!#;tR+P0W?sZB=UJS z28Qw#@F%T&Xsr_aIZ!Op21>PA8)rgy4p7O3{6Pz%JAtoM$hIO)F4a7n)<P~(I+1mw zsEaBknp&{}E9S9cg;s19#kgY<l_YBuq7zou(m!JkZ_XDZ4C_c<Sz6z({V6&l4AE>$ z761{^!~%XE(hS<N02PLEysfKNE<cjeOV#;(?@T_jk3@Cm;TkXqt9DZgBCHyGl8OLl ze024loZPB+*+B-OCpyKzSXkfg%OQ2FrJZf>ewuU#=}f4+5c{H|(n(tWZhp^o;Mq!< zRjo5}SyjYX;$XSHob{6zO6oY4v*QvB236~|OfFpmxC~b5@TKpZgpU&#G7W#1xq3O3 z<3MV!e|?(f)~nX1p%Pni43kl^-$5TcR@NVMSZL^H&<bawx`(eNaR~J2`!Iu(Y+J`C z0zJW~Oj7XExkMpn(#4t%;~T4%mFFE*dY9bPI3TH+th!&nYyDR#lIdl<5c*6ThX%5o z)o1{K7XrAx9cu@a7Dqi{sAWL~{fq}PRa)=Vrtpf1n0nDaYar&YVxnNp4wBU<488MS z$Ov#F&_$zgEukIg3U&rgqrh#QfipJ&H-3{?*0{{-)2wH6CJS^m=O+bRE#HY|gu`h3 zQ11%GUd!rT@l#r+x3&A9Q9zx3!O@^49vFz58}EaJqv95q-s;fX98f>E-&ixCRksAc zLU`VdHD75rv;+qczU;=DL2Y_V&_vjEBUm9@4-7a;8wVN=CKo8r`Ay}yo6Te;LW2km zCg&ma6+&MnuR~}6p@HNqtG1-l;zB9z8^>xc|3Wh`P+C9Ga0W~Xtd-{^<+-e)w&b4$ z@#<dU(6x1DULnRdkk-ueAh5lYQn#C{Kar$Ow9<TkRf^br*Y%_?W&Q~$VHP)oC;9HH zFyAJHX&yxvrvM`re?)<zG~~~V%taK#?<|y#csf;eGzCh<9i|=?_0I;xt5KQHpov;L z0t+x44o?z#lG!W+1*D-aOo%nPp=W3UKr;w$Yf^zMxL9ud2w;v07-z$oAsD^vS<E{m zby9@hJWyh(w=tq-N(%FBH=s4EKk!SDDm?gZ!D=Y;rpVJ_#J@uO_xbUq(@|JK0CxjG zFWX1OhSkXt3h+-+2B}Ra*1Ku6+@(}+E7&(b;`$3RaW^!x%;!_nXlmd+RbD!!1QR4B z_FE9rm@*gPmVoPDY0{)OI<ctVMFcMX1r<MMHnOpPqw!?iR5zQ&PgCM#k=SEs?-`A! z4XsQ6%z?14uc40j6+x?IsGlNoi+Mf&0#Vk_Kfue#FyBrUdP=0G3VR(9^kr$|X)V1p z(52>5nT;nQH;igvjVF^ojjTuW_pKostir4{9NA29mEyNid}uN|4TxhrlC)WdXd>FZ z?h-VBx_toZ4Q;2-s*De{^r4;Sf;^URlfi%h+fm{Ob0O76slOabjS9;G-(|(y5k&(3 zek#h$5I=h*8r>7(VIL+i{Pd0V+%%S+M@0Bp@q8Q%5#q(@z7U^EjPS`!G$(+(`k}%- z#O*6nN~f#>J!8|-`3^7o1-QI(ZAuFG<!BUXr|7cC9O~=~<E*93KqBxcL|`r$JUY0_ zXdKvAeWxU?Elnp|vsSWu9$wq`QH0F=+T|}~+vqdKAAFvq?^E&4-RSZjDSd_`s65hU zRG&`TX^nKMyq3SQ0JH<6%FzP8jJTHXf?$dS7hfb2>L9cj-g!Tk8}ZggIXanNhBaH* z%$w8Ym-akCd{i@ElJ?9)<M@uU6qL**g5q}2PGrmCpJS01uI2wm>6rRw2KnzPg>MHL zWA%sB4CVRi!%2H|Ot>Z(icp)l{Aa9616{Nh!pveS`i2Ma03DLWEO3U&EX$~V4~xO) zi_s8B{5_ln-a`((@w7x)Y?Ng>9x2X(W=@XB{D&Y@N&83*@i)+~?fi2zq<b^Kg`y+v z5aP88t>nK&lp^`u!hZ&&FuC{jXb#dH{4o*tBfc6Xo9PY^qOa0PMpSJ{ZCzqsyow}p zf%M<BWuSR#dCqtgW@LiS;}ezcXc|UfBV(CSnU7I2nZp(sTV-Ruu`=IS>A><O4X8m8 z`<KIx+&Zk48f8hn92h!L6_u+_3i0uI(7<b*=4U`~ZN8*mCh2QsDU3Y53!Q#7L%$!H z3eB4xo3q*2<}}l$JlC3ZDhFC?g1j3YAEs5VX3xrKH#01r4Y8i&cuYB30<u}{<a<eR z%{NgJ^vkx7hmh%A<n-49l)a-~r*D%bZ8pX)TSl^|#co#1><!+CeC5cfjpuKIoO;QX zn!?_AW&vMA1)?e2-dwpnrP{Zj*_<|HxB9IS7{EyBwDfcxYouv%BJm`o#n}5SJ@>yy z&-gy^>=Dmb#gmKYQSodQ&%=1~zFyPB`l*;#0}pG&_qGP<A3uSmH3t5s{m%eUQpd3P zFA&gIum6fH1&3i4>aB!9U}cE=Aq(N(&^msURe%fvtfy@-U04P7ip72!ds&zS{&BQP zfb0S1(?^*E(%8XXe_@jn|0by6J>q*uiPa<2GTum>1O`T;OFUo1v-y$F@r)f;V$*<6 zxxSwOBxBbhyp$c;NNYJb+cR(3rm@O_gUW%XWq<TbdY9tu#j>Q=+o~LhwQWXHG_$SW z5jNrvBb%>H`Q9&KJunO7*<L^=h;ktBPP~l0f^>TYN%sn3?(GrjM9l7u$cB1!?on^i zxm~?p=dyZfRh62Dm=dqUXFWmia`&ynVMq6Z;jpdSi|}><(*!Z>E*$=p)}4=V)0bCj zv$1@#`k8GT@C_RK2^%GGo{Z!or=xEdC3Sy{6c(r8w_3+22VPE8$VUwk?|v1ZjJ?#d z?luIe*vr0NEPYiH|0;?VH0b^(Q6Pm!7br@3K$LQ`y0q!bh+5I~<vKOL>B~(@{BERM z?U4}bzJtJg>$C~wsYFPs)mz=A_+;Vl>b`0??CGA4aEpE3_1cuC2W)e-iRD9CL7-ID zLCiMic?H0A0^lhkGFc%~0KX@IHA?JFdf%(WUZeMSFj1hlro{Hsd$SVTOYdb$?3Z{O zdx;woaT2be^4!6ovG*{7T!u=A;%kW$=Y`c7EJ1>o*h`$ppM(Z)v6oxb##)uwlhE!L zK|BbE?rM}zjMBeG`2mMsRATo-#`XSM<p+O8w<|HUP15;7)dl8RhCjKgN{Rmvqg>NL zPiK55szNTw;(m*0{!-DMiCyRLQJA!hU8fN=;!ohIB&twBXPo+q?3dk7A=(!wGR*;f zmH4Ab9Mw+-q9dQRF(aRtkO%#|sinU_GzQmLfG(6X%$CM}s#}Tu+JSZPpq9P+VJHV9 zPKiuBJL5!5YDD)oz~~%Qe-}8Rt@jtTDY45@HnsU*=;L2kq0UjBUo;Smkm)WFrzQsz zaZ(FGek(>;EF>{BP3w%4xKbs_@hyu6ngw8|fTKh!qlHy>F)CtYnXuY`0oli@9KP4p zxmNRteU+CaBSCFY-H#O=Jk~#|5j}R|7;01ZpAg)=bGW@hevqcf-LE5A?_aO{-~#Ga zVjtqE_ur%Jcu}N(Q~CZ}jI(<Gz3O-M{`=HfdjEHn_!IcnD|)HPLK{d(>RqYcK--f` z*$u-u^BYl7987l&tm;-akLp~@;>4P3jf|vh1&xdm!gT*1BCt>!eya-TOo@qvzBZ|e zQ2iNDWtptbp?AvNZz7_NZTj+?+C3IKAuc7urGmA#W*FkVeLpeU9(>ulfC;|b-cb+0 z5TB6^X%<Qw>XtM(`pIQ=fw7l3m7PqEu?nW_-d^ex*@!pOr$qxsd<Oz4p)`d~h8&rq z3ajISrYI&Ma?}RR;$;Pxhb{D=3(TWzKXJT%s9^iYO(<RUSVE)ar%J3fi`NkNI14-+ zZrV>${!Og_Ogsu`H35A(O_T{B-&NY!RG*-ckbdHk+HO0|vjjb;+l<6Mq$Ue>zCnpS z2ekn9jv3VFG&VekjGbcGz8tU@^*K}|I^kYGwg>=6O-KB9C~8h~{7t+%<45rXFG$@q z7euEagA%`$O73*@wt3Wii!!}!nDQtuEgDEVNO&H@L}t+dCE6duOzQXu&}83R+a_*t z_&PR>?K`O-m-^lvX<SMec7h|`W&K*3_mnRBT55ETVuwp~p@I8^9=ez{SZ8*-mN8u* zozTuQK_62nm3Zs64En5I#e|GLc6$(Z{nJ=O=xuZK^QFcv!65zY-K`mRLCxmeCCUAX zz}cdX$`oRtgCQ~-dxfCh1^&upuQ!#>QA4JXT_&C#wmJUf{F~PzJ;U$!y{?@r5_;)a ze{z;kSR(>#DXe7X%}ph+4-@QPELf`|eLpD~P<#ctkO^UZ+OJ**V<{Lc%j&ADlKD^D zh9X7D?5ESzvDO!l)qQ}Km>9K-c6Fh+qFvOf78^LViKdv`C4?Z?Mm>D}Ux<sHrkH}T z{bB$T9}@}U489THt;{kO)K<u$jjOAT&an#NS6e0M`$=U1ZK_mV8*knE4JHVe8aAHK zFcU=dU^F8UI0qg3C?b`?O8zG-Foc%XW|fLW)no3Zk5>7K>T~>yb3k%G<(9(Q-eiF; zW^X3gPV@i@BfZ3523R;XaoaM4t4g?fQV<VPLD<~ePx?Yq$D4a8z-364{**`yGcn_9 zu{VoRIR+OHmUtLIOw5N{j&^^5_Wq5TtfdgKQ-D3T*Ov2llcss3edmNCzcld*zqAN{ zPvP$i{0-pmrYrr@dVGuC5m`p7(tDsgVeD<hs`T;Hsx-BTiu$7-OpNcxSQ`%eI+Yl0 z+3uk^uu;4d&qOngC&@V-eut#XW`{q0jImkn@E1xQ{!7Pn_%B1Wq{Ba#_7PbQ<=fsy zIk3<2>e|xA*Ok~9;<mt1D%&LHDM>8Dmc9>rVFv`@;FdHt*cs>|&PpyPe0UP`2eD=g zvFfgbQ|!MPHa(pX@+5W&jIJDok-l1%npPJ!4WXp3E&+NLPGjwF!I|Z_iN$Cc<=?U^ znZZOzzo$!rJI}YV`NpupW2zzj{GeLXVuu9W`n0TN!|A}^<;Os!&SP2^>!5w2kEXSK zlwqH1ZHplztSactN=M`gEK3rV&LEFnX(6w~j-W+mrHrb}^}uPE_qw+H$a{*Nr4ow8 zzFGz?FS2RJF{5dTqbb?YQR&zY>tcGecNr|O?N!1;-1-;v**su^4QMcbISfGyV8u(} zHrJScDG^rhPt&Lre=<w&w`&dr<q@ntyCOx>8-P)A48e6~K=WdCcfqdgpaqO6I^4`F zK}}d6kG*)cjinU7J8j5RgJojK+lx)wDSSUVPHfMn%&-B(Q)XB@^Sg$Yn#i#yh~@O~ zVsRFx43?7=Ef)2sPGY2yYNLx2@%IoSZ-cY2)IzclGvc!#BZ>GNJRx94d^Q3p^_h5& z!jF)M8oNlT7}k16tTxu}c%&amYj-5hh}SOCB5QZV4~f@Pt>X1d63xedAT%NiI1<&4 zPEnH$n$emj7>RQLVK)z0v#L&k)I^8W+9{AF*2UBSh?;rJK)tBMPMUdlAe0b@qx*u0 zz--_|=gQGEUJdhoI6@_ud5iH05LI|VzDc?VJ|^iFrVO)~h{mtX2Rs<jUT=0GdoE?K z@BUA8pnw8#vHWzrb`q00b^Jp8{8bHKB&t5u&yU@d8_ih;nmb;558vwB(<^{vG&k%! zJh^pdo8AgDJAVQjA;2wTpWlrwXQZ|B#86U&mE=rW6*#udOc?ZQ44FTOV3_sr7x6ac zpr5hbACXG@(i#&w7m{89U!rw|t_1#yx@tppqPMRN40wMVH16RhJWc`wDK%sSuvOl( zhGtSQ23Gg1ffEq^g;!y3h5f0%X2>^&JPJgM^)vaFePM&_EvDU)I+oE9Fs07GIqHqX z11^%P9Ja(^f5Yo6;XnHbcrS5cpTmkjM)3ePJsfM5_ylButt7FO8?^&$xs!Gcs?X>b z2Gv#YpGi2Dv&9d&6BQ4+j6e@0KF|+?vzxumV=x1vQd_)ri+|f97U*XuQLFZPQzNv0 zA%k>}M&Ys)3L$~QjeLSY;hfdNb|6kIP96bux0l|%;oDvCM=09?jfL4?gx*}APLf3? zdW9{Oqqf`4JW7W@2etzE<v<4eN~O!3>bQtSkrV7NztT#^ri)SK{5ncM`jbVKA(V8A zqm5NETDO0WB>jd|L}{&4iQSGss@PZfoA}gSfE3HzR_E;{tLUXvReu=XF_)L7-vPGW zI1T&ug(L<K(H?`(O0+|jU^^TJtCv|P+|^R7g+j>uD|W&H7y!uIhCFTlmu0not*lf@ z%PpJ;soA9gr~1Dvt?jQ$qirwINSJ_!P(z8X|80r;trDZo$YvUmPe56~N*V7}HN7l` zUbJiFQ3s!dfm&=5g!m1pD2!1O-JKPJcN0a2?d;iL6=5p90XQYcAZI!V9BvPRgvII= z<UY6B(l`@%0aevw=B*$-!(YX+-pB~^A0xFr>WVx{*aQ%P2W9=~sEz*<6$Ha^)DE+C zm#>U`NgC@|U)x7%!fC|bQJSw-Fsaw?)Kw+OUnVmHjbnB*a9TIrTV@F`=E$%dDJoE{ zNHOPT@UOs6VaxZVAY)PTUsB>f>;z*ISlRduY1A6QU9eATGOKj5!%ZL9;a7P+P4oXu zhQz9+kmfozzo;Lh`0P4(oZbabsc?{gTtRZ;^mW2kS?P?m-mmCgUm2CoWTw8v>Cs;? zS0SUm)`78mC2JotUs5$NFlJ#(0K^R^uL<!j;BeBq>EPJpG_u$FQLQ_~`{8sI<jY~X z5BHr6Pi{>ac%$yfJ|br?mbEn9!Zyl#plAg(29qyxaq993=Nu)WqY^=ggyWgg5_M&Y zpdmD4((h4i*n9jYW9dMOmd~&%XK$OXUQ@bM*2V_;Erb~neJY5aoK)H<Ywq5*H0qCQ zQlDTBhDE(`fMYf$RVHI_W!Ab<9q|m-x1tiL9m@*|+ZJFb*@nrGYKJMFZ$cZex59sk z57?Ts@o7{px+DZaeQ6n_Tc7ur#TXrI+SG*OFI5N`C1So|&e1#bc_WmSn8P_M^})g| z$1$5&wX$6=6p%E(_=1_WYzlEl=m6zLPhw&-Uf=4lsX2A#i8_81%m7n(SnrUx4@UAZ zcY9Ajt`fU~Sp=zJ^Zdlf_m5UCx0nX1-JJVdD%Q-iJb55^UDP*sf=9gOB6JS+k*AQT zX!-nE40q9~JPo6)*xcm752*{l5sA41;nJz9gLNkFi{|qz2oN^pd>1r@w}B5jB_~LP z2GvBz@Gwye!c#g`n=Ob@$5oF-2yJ2=AEdmT4d;TyC9{qB$;>+bA$=O^jVu&HK4E_b zWIKwTm7;yh4<KPRO`k7m<AZz#eH2?iV|fL}=dgMGu(uRi4MCOo8We<q#cTTB*m!lc zYnk_W-xt1sb8@R+o5nBn4Yi_<{&5{~%;2!Y{U-2GeuZ7_FW^by>(lJs-b$e-^uex8 z_YNtpTlEe_{|I}9wEOK#Uk`1z=?18z#e^6*kkn=swo*x(4YhC;wXpuQ?+@x&e6FkI z8K=b5&i4oHt`OV^Qc7$M*n^!!;^NY>CiIo+4e=k6IRn<Ccmv930T-<-f(Tk2(H%gL zc-;vM$cPedNA?^6r)F3%teroKHnxMD`WXi>WQ{b0wsmK&RX%S`$|=X#ookhCNZGc? zMGp@>=Fr1Wk03o((_?+&r6#oIX6-0LNq?%hiiHo%0Lbwe>-T<H1phgOUKoYuVWPo~ z>3`g2EIsFYSshpOGWKvb0B0J;;R3Pr9Ne=4_JFJCASN1ch-~a<)#uLsJH92a?)!t@ ziGq7585s9aau52IEp^!s7afJ`bq(Jt%A&4Fp#vW95D%=z4hro*uT^HX!3zQ!R7%dI z%{YlkWf*Ybj#f5>UUqM5dusBp-*XyMDxo5XAHRVjECJKc!11LP6L%wU4tUl+zKk7) z-t<VpU60>cbWELAvkSWx|4Lu$xv}(&QQafl&5^VedHR?41qOhCL(SzYfG{apR7rXi zehd6DB<&$TH((+Lff_Licu&>&&Z=;Xa&GeQ02a#831Q&@0{)cwt77%-W*x#g6dew3 zZ&xR^NH?~t<D+S-N*kTZL%UFEb4F!H#*LM5&0%fuh4Pn7Qs*V@M6IPxD24&wmmBVH zaWzk<^q1so9GjG9{ICT=o53f_1)nJAB449(Lr9zu5!nLysAyc$N}t~%!{MK@_OJlC zA6?!e-}s6;z3KebYQD%>(2;R<WeOUO%|p=iZR1$<8+?-@XiIcP_f*iKdFp5nBjJA| zlmE>}5E$jTfD_!&veX^B!!|{mD)!dLfiakI7!4&)nwbF?Q56J6xBCB<2Ts%>w%swm z5p;*KBsC>VeZc1WcEMA_>6oUa+}=pE|FnRHTlYl^yFJg$z<7}J3wq`~P0uM$(zEyp zdX_zo=h_{4hs7)BMe&;QsCcD6EMAxH6tAmx;Pv<q(p&Mu*@!*Qinn9WKD-lHQ68dr zybA+GXS#&24gYu3$34$ZUnq5^KaFP=t<%zffe^90ScDj20k=CQY~QrpwAO8V`T>NY z?pKA-Fd&Lp!bN`fM?ZqJfYZweK*9>n#u>pxsO*bYa7Ws&dJ+>Tb%xFz>O`IAsLm=O zQ2QL1+O_W+C!P+B$?f~bQkVu*9G$TNH?NtfET{|e3vWV$wJOgaW^Kk+2kj|ub+&!r z%5F<+b^ZM3KYxLSLd<UfT=e=&l(EHaYj*i>)A|w*O+oYkHMGSoBW;P+hf!CE(DpM0 z5b}`~H#WHA9D{t&+~_d#B52-Al#k5v7eFU(YjZ4}1Rw7A4d+_op8>QZP6-}Zt*%b& z`Wy+$bBC4Z?7qXBCKR>#gNcW8=zG+2J1;>KfMPkenBcs6613dtOvDF}1+@iHGXVyL z<Hr4%MR`xvA|0vF*LB06>yW9I-&s!VRgnTfUyT5WT@?XTEPx7$YC8f{O>dh`&23to zF~!xgBb|y(j-~lg9wm7w2?aIp$RKhh<&KyLNYvB=$&f|G&iHAR^HX5#J#vKzvqvZ; z5zD1q_M?eAJ^F=7o19IHb5YANY<MLV{mV(4P;D;iIM(!ur`eUXcSzDg-y01F$#zGJ z`)Ma>aSx^JC#C#K4-ABlVk?97?-pKri`J`C^lj@Tbt2mo!F*JPJ?y@BF^sVe{vm+d zqdEL61~0Kn00=xne8s}G?|LjIF2RCpJ-QOp0mYg#shJ`Ey|aMdO+dz?2ouoA2GDf? z9U76r98&W8OgoJV_Ce35rr%IF@VKibjibJerNfk0;jX6-4r)_7(<um2Ksq*~ppyCl zoHekV`;znY!LPJ&qd`=FBv0vs1LW%01JA;dkI6%n7v6XMv}w;eh8*tT?Kg^FQ|<(H z!uJ5fYA?J@VFAy@X#PBU6VsJlKt`M*DBbrc8mq+qk&wfxq;*bN4}uLJZ#Vf@v`MiZ zklW2}5nh9^@_Z*uFk1xWu+~LNBEW+%vXNYnNO+MXgfvlJK&!FisPOnrU~%IChq1v~ zx|Ayq^`nZW#?Mgv8we$|&s%b1aHBqmi1J(|gyl&0|3P?EF=J5-t3HilzI9{{76*x6 zKTVyaolaiaQfY&n%~GD5Pre=?SyxNb!}usy_@<yV+ah28#!oN{sH|+lH1HVu4R%J% zg!RTQ_=25o=w_Wjt+Sj~N)rDjW|z?nquiM&cO{I+QO=!f*|iJT8gmx<{kLFu<1Bw0 zAl=VHESnbFr#Sq+wvD|gdn;`i%!Lpn%BQ|Ch@zTg*?+Tko|QZJIOIT)My(9TB-mjr zm1SwF2S`&TpDryX9#P`UP%bU|hwRsvKtDhT+>zBJ1RbB^Yju~&e}L^~@^yQUlTv1@ zBA9`54bp31Vp;A`Vs+FFo;0-R!Oux1PR36uu}UPq&<xxl4(!6&r}UW;ygg;Uk7j?E zbav5Xk!BlAd(Ye$8J3W-tTIwY%9LE1?uKlIjg^sFRz^}`zTI279&YZRAX{%bNv2JS z{~i%Yhl;`362EfCp7+o`Rxa=95^v|8(|E&m98A}r-soD(7MHu$8qUB`B>R(Gd?_QH z-I&v|IKQB|xp^Xe=(awPG&MqF<&%bKZr+(s-#&t279BQ>_IM%5!-)So5yF^4AhqV( zL(&Wq!D<g=Km9X4w<j+pdy8lL1*^HWT%}yxc7~?S6A0Ep=5TNs--@($z3dtIhrug1 z`V|kM@4}twlmM)Tr)1W;{Gk^q3G=dc^*d!%Q$WiId*~UYAz@`{zIG>jXrC3Eh!|EY z7vSS$K1aFuPf!CESr0vX5x~160L22pe2&WF2S?JMN02hMS{W-)vY$P42(hb(MT7jG z0Kgu46=5+oFX{|(T_hbv62&x8SSw;YiXi4Zi37hwjAfQJW6M;XSo$borC~ii8Pgl{ z23`)Za5%9Q4#YA!CT!o<zY|=cj%Ar>YBo>+6HO(c(p3ZS!CvGTNzSBX%-rEqrFFu3 z0Co?<?3bD`fsn<-a`2Lp>&&;<_o%rvUkg%%s5cxToQ5N<Bay_aVYD8w(8^-=6rlb9 zoUX?}UWelC0uK~T4Nj*bQPBuGghm`55oDks)Mz;Qe+?~Ie>>rh48y<;K;Ii;b9{a3 ztU9BFw-Hxj#G4%AwBo~BI7~y{qtquD^1>whtP>}mT4}6p>h;5OwHsqC9ZqIF)>vD) z9`m%V7;6i79wo0|ml|-tf?lQpw*fhjoj*v*f!0om%5|)ayzKeCsC3kNR>)f$KpTZ# z(oS2Gu8>(A12ijc0u{}-(1z)|n~*@Jn~B)-r;p}a=23i*SyMmcD|z_=^+VW1hTN%f z(vZ(5bO4ecS%Xg)sAi!w$^tEC9))hiq5*bPOw_*ztWpE_|GlaQ{!Z2H$A+rj`9D={ z=EZ=LI3$p&*UY0PvmQ`%vRUl96ePQckb_@ts@ZwX1kkaveV8H>K#_cc^bsVyzH^9H z=5C@AQ7jit-+@eej-XrjZy-qM+$X4WAH<%?*C+=za1i?FCX6GUl`D33`!UI0WNdYV zc!d@**%TtCdBS*zs2`zLnixwFCz2Rj*LOTbOR4gXhi*l@yt6VwDin(KJ|WcL2{ELQ z01xS2_@d%yBd;a^VFhp+mFvhrvzs^vVRPd;PL|GLdruy6@N~4G9q0j96kkkAf_QJX z2+%UYGU1xVL=^aR|05&-o+3oyB@x=T#j51j9Ez_8cDG*jM$lQ1uh>l_<s=Y-(QuMC z#D7cT17F~WiJVIuFbOAN`CJKp4|{u2(@vz*nS5HG@NK9_)FVe-{DU_DLtmnD<S<cQ zrhN>uohmV!0kO(LP#4N@EEUEoXInA56`O0t{sKJlZJrhT*oyhB*gICN!iv3O#j32> zek-=3jJlF4`2{6_TwNHotTB0O1lr;fG+}riY+8d}9p6U4L%mdI_0qplMx>#0CAM`P z^3JT|XEDzY`-GsY?(L>fDo!{8YcSNAFr^I_G8MT({BkOn2e5fU5+J&7BR1$EhzL7* z)C!{q|C&MXejRWO7HlQ95-6}@;>JkpheGE@o~8F5C;HEPEAq66kR&1Ugosejns4c4 z1cAIHP<u##)CqbS0ZM9)UPeHYIIvl`n`Ckiec4TN)R|5hAHL0xg*icqyp|~MNy(fN zqfyinU<?y975;A|@JEh<CyFUMACGCE1t2ixb`cll39%<)T5`RI68VRSW55-a@n3)~ z(6#qOnrk3<R)J+G0Ia%aNKsY|arX&OIK|y_FXrwsRu+^rnYjC7ieALsWL(PRKSVlN zQ!M2S8y4n?u0%EGkG+hN>*Ykbt&Ao)n-mt{*6AhKP?jY%94~Hblx12JK-Y@>_8|Ya z@ic!yo#WtT9ZhQv^f%X^?+AQJXI8yOn(O;J0_UZLC<zA`*1OI14muNBlL+(&Q4U>I zvK2;A{g4N$!BrACM+=}HS^&Y8>{gx+49pBTn;Or7&0)~d?^^%W(6Xq8yvIX)Ll=!e z*wS={pMFrA$mhcL+bNOhSZs5^_4yh!1ui~0e3JMy1D}!~Vl@W`hY4^|f7+$QzK1ln zMAo|oja+PzpfJ7bbNw(p+ns=bCHrT>9ey@n*N$Ez=Xur1SBo$?&gYQTNOpk^Xaw}_ zR6l~)D4|tHof2!J(sAHyexk~T(_~BXi~4W&UBF?rtyAjg)El2yL=?b=>p-$vKkPxR zwAFGyjIrd9F_|1PCa^X*UbAC3yDeO=Q^&Sbr?DL#6@K`&wKcp2YIo*AFcyszm!j5| zYPnfXPJl+OgQ-YV_ZoaNtm<&qO3g~q3GRleK3%mOhj1-}V-2>KW!mcyelxy;ubQEC z)hx0P>gL3T&+t(6O=xD+&fle0>-{z*HrGlxLJ6P<q;CgoO!zPvAGTkhMTinxh;U>* z6xe^eG3%&($pfjV<2y?PZeXVz>$Lmt-X}S6iyKo8lmZ5udmZUzmo0=mihCbW!DW$U zC?|3ujnvSR;S!V~*Z7@Q8ITD0$oqlgyp1Ix{w_Jpf9A7yMC~ukowZPk+<`)h4#N-~ zx`B|O;c=|D*FvM(Dgs8t-bfH|@N`=*_|`ds>J=6Y_VcmpvIB$y(5+twa-`bh^4O%v zER<BoOVDTNkK}dHb14s(lfL)WLj8iNPK#m*4oR8&6_tmROqT-baL~NI*35epx(gFl zEFkTCC8p;@do>S{8j64{(^7QTCPawj{E9(rUYit}h7g@Mp(B+rD%YhBM7<1yhjko^ zmY)OsH;9v_@%1SW(nOfOU-XAWxkK-FG;FHl#i#~n`^z0+U;l=xeZq~Ye?uDUw0FXS zq=3~1_=XRtBH%J1u?Slf4StbYpGsA)ZM%?$#y!g4gc&=$hmLyDlC={t181roA^xKH zK*znnonf-!iY8+`hF#XfJ0bma#_17&frO%jJp_&EKzcMEXZ^8tMkn$yLF%Dl`Yw>4 z?>r1>nzNv;ej>%FDeTauQzHP|`F8+mk%?fR2YJXB3A>$Dv}_6O>pJI`4$z|xdtn_L z6oykV;-p@u!#CLQh0w8~eVm}^@jpS;!SMOKAImQEat9glJ8{GzLpNtNa1>+tdtj3z zb%M&K;`9!1SUAt#w!K80p86b@7Gy)H)|OV~D-R!J2Zb++b^AohUj#H{RrBnJmFE|_ zYeUNO-_7tI$E`+ke!O?%WY*}!{;KbMLl#>m+u!kBXc%*o-a5<oRs$C7Vr4W`*0BFc zbTH!TgX9T+m)+nHDM<Ge4LiB?!^vgXqXphBm|+l51X2iZ9#GSA<X8&4uA($}h|`y# z_#%UpKISiM<J0<%>Rq<flx4JEjBty=O$T(8%H};T_HRVfM;(yDF3~7Y8Y>4TZF7J( zuYC{P;2|#eZ$@ns1XCPM;#jMHR0+Iqo+R;gfNhVIEl0M?$&$E-bVmD-o(%ETU_qK5 zT9z0VTCrP2XVN;7y<A&bs^+qj-#X>g+nn}yeXlfp_N`W@{h;sg2D!9UbKq>XwL38e zq{ncRI$BE>X#GOE<|NlX;M7fa82thi>H7$<C992UY>PRKC9C24uAi5c_&!R{iJ)Q_ zaOio=e%|+XW8t@sIN8<}`Wl?tU}fU-6#9IV{SQFMcVf#QS^WTZz_zX_`#$!*w5-m` zH6-xKm1R4J;@c^{qzuMH>wApi^UHoT6pvH<>axU8{6UIOE&IVx{2_|xmi>_8nJB*n zadYDu>~fw68(Y`FEdh<JF;Bq$88#|cV+35jYG@n+f9xp%x%bSYho2r5c%)1R#ML=O z>`-aY0k5DhzSZlrYqH+z^mR0xLDTKk@=9OZhIIN2I@h<G#Z(4=_Y3r6d(;yN5;Ii7 zzMS$`IEhhDzmUCcv6{!)qiNxyHgyL6Wc;luYSSwC25>;?I4VwyW0G+f1n&T$xSJly z)#j!Z>;$g|Bg4t3LuMJtJ6XHV6?LA@Gt{CgEVf(T88SN!jZ-e9VBAUm#{oibH$9RQ z4p5tS(<3?N0JVBIJyKhjK|TR(Falj++}F_91<p7LvX%zAv`h>H2Y(B<CAczRh0p;- z2^jJ*ydbM%&^Y*WTySWU*=^vW-x-TmBOUgm+twJ>M>`j-*@0px<!XzYa7>Zq2!_fd z?y<jITK!(*Bv$<%F;?9Qqhc%^Jl{*6;#*-Oz<~v8vy{_{j!KzkZdy}oF6{~@CxNm! zOG{omIQ}Z}JN`gjAiiCU7`6b1u*!hrtg&c~x0Q438dwrX9I+U57-4}u%Px+t5K;K{ ztf$Vs7db7JPyS10-V<Gz?!#&1n$*@WNa#IMHWAFJJlw|GNcy)oc2OLQ7r@g>@N3(^ z%P&G^^+@ezF-7<mvVlOWC{*E53eo0nJ!~-}NHb}BiSTl}Qs3;dYlY13F7u@SXp)*& zHl1F%Wi#lNStj`(qocRwV(L!!5JV2F!csx(&57+{Ow!C!VXq`GthHD%9d4y@@W3}d z^h>zQ!m|l?sHj(CaaV|o+_Jn!u--yr&%?AH<Sz2{0FJiGO5F42*_2t?l7UUDzli1U zkRddkcYk7<Fo)4;SyYJ9^NIVPKtInbQ*DbvJcb>VFkK)fvVRhFEUM$v!Pjt!3mawm z$cOr0u}Y{--h>0H$iPmPH_a~#tJg+twfrpT3RoIRmxOAAyzy!<5uD&a$ss{`>32d< zFhttVlHvaaQ((lOBmugVkdySwv9Nm*6o6ntcZQ)%Aof&0-zuOeDA7Fov^5QaM?$T) zHDqM6KVt{HldRJaBw5WOT@a8R#&`%%)BG8l3pXwW2L5XXF21XzDf>J#6V3{9OGa}V ze3hInQ<dl1;d1{HO>%(rcr%lZo5J{5?QF>~1I}h!B`QF5u~Rs2ipwChpEX_Z;6|?t zS=vuglB44$6TCJcp=C;}8)#79sg8MBT1I8^?2_b%;sY6R>Fg;G#63WSpv$!3ShV*@ zGOco9)BF|cdBXNG>;YmXNOw+PuhiC5G6Ta+Pcp~b3eTUw0Nvgf7&z7qU(Rtii^|hh z+=K=l(Y~OzfCbd00!JAr+&V8yU4-lV%5dg32;iCgT~aG(WKK&4nrAi6#7b?brO6!r zd<w)~X=dWnQfFm%2x<}8Gdt2Gq8Mdxb?1_<gavOoinHq;$+QjKjd8|_)mo^obP5^Y z!QJqhHLdkP1acOtZJx3YPBGSMU^g+nQ9KKs3(IpR+6ET{92kdJ1Kj@mgSEAZ#&diO zCVjNecF0+VS{H1%1?~e_YHhfQ^|yVTmT)L=+`m4^3*Q1*PZ-`7SERDr2kSyqz!BJy ztOBa`(3M_Bu?tTuS;?(4HABVRdiQ!DrUQS7%(KuSb>36tj-g!*n>Ku>RA*;8K@h7Y zXIh3Wy??VdCYrWv4}HK5RiXqes^Z%LMDA8rR&n*l%Sd9KYfGo8xqkmz7~juZuRpWm zXHXlQLW(+TkM;Y5b-30gaL#-SE+?SMHSnB!6a5C_AU3@g%m04N%g+IdY#Zd^Il#kc zJNa;7VgM`BFHjt7Pp*J_y$X}Q_Mn;fG$r-;&ML76&=B|Mj3IB23-stM>hK3q7yl4) z3c&~3PMC6^L=NGYg!)2t{NIa&T&F&eW9ZP*o&*eo19&q+r=wu++=r}t$W0CCrI8Bt z?;&^5lp@9Mtk@yd@97tUQ(O1al8^lV4HFH{2Y0GD@pd(<@8}+KbV#noom6OT-m8SZ zHsICz&Ah`1dwVQ1AiWQXI3})uYbChAId7oH+XLUP%mcTf<YadItcL5yaH&*wk0Cs- z``$8&se+ZOhFU>l2|s9s?}qu+GD(o?7bga`z(b7AVKfwQ9bd&7(*ohyh+`4}Ub+Og zv~|&8Yi1q(z`|cSP+@cEU4GcPtrj1);c|rZ&7h1mZVgY->F%t)Hmt1SgWY1&+h`wk ziIt#zPP^Pv%D*f1Vm5JwRO$jLT-;(^AH~_i0pz?cc3Lg`8R!Yedb}i4O-sI(SZGo$ zMQ!bgg@ePPuZBYdsgTgG=p#sh=EN=;YjpX}YHr_!jV{m#ESP4%jjCI$Fh$&sGdARG zV{Y3xncoc?+o-#V&cN^r^5AYFTt<{n8}c7wSq7U?=`yzxe;l~sE+qF0w9H+L-P`LS zyb5Z{uB#34r~ixcI=Kr)c1o~<NIV@uCN}MdZsZYch+NnCE^M03|AgwIGlp+Qy3eW| z8}&E?3<Oh~_1)h_xEb>lY7N}$NT3DGrK4abA)Kgo*3{O8qP9e}yQbEtcfuZK=8>=> zqZ=+=N_-_{sg~iAwcoHMUl`H~|DeR_&;rTZH|c#rd1w{h)U0FwDVo)N8{&f2<jFM3 zHE9d99Y{7JEU-Bd;r{(O;X6exbR(Wpmr6~vfB)B46j7lve*tySO&_m@aInFh-Kxz( zC%X`Kk~1YciI9wU4{PsRgY?6!gWmRI$wdgSKnh*!2AE^r$4(vl<k-pVBigyXv#bYD zxNZ<%Tzwzek2U1_0JlkQP<(*hn6;z`A134OMeiwuWQ3f3@8YoIyApeuoxt5}sAnav zQq(VPf>4QDbFm0TU4)q%80Ig<ZH+aNXYL(7mtnb79KtP?@*3k(^cS7fn1kgPpl5q0 zvGq>4cVPW_N8w!k%Rwl;KX1G`F?VBP#ecb2HVzT!58yi4SA`b?HokcpJnUbfZl{PF zk>oRLejvmQH=%*0+DR7r7CLCtbRWUtdQMc0GX~zneB53WmY7JsxgPxBf|Zod2bsaC z^#TUXFw*vsD8s3eZn3<={BD8y-F)-Avv^(#5HmvD4qVGVp>f@NoD6p6G0b_;>7TGK zSQ~alR?VS_5WXJ4chmd`;}eKP*Ud!gqJH>H{<sD=5YvY2Qrsmh-(G`xqMJV}n8#Uv zP^OD2chX#X%4<OGp3_jDvaeY9xz2!>=^E&IvG)+-cV%M^_&01SS0H0MKv$grs5Or# ze{;CeD&O0U=GE4*vNezey^K^nxg<}=whvsAzk~U#Wx3i9o(+e0lk$hTOUuO;4{qj4 zl2>04XBKhf3p<6i#H3_&!u-@$Y5C=joC$cF{3W!jqt2D3>B5^fj~M$Vm|SQkqX41q z2T%b2<P|Js=I{^2YZYANlkj<;Okn&Cqz!pI)0U$v@(dBi@hSwcUPkG;WY(QbXmr1d z-iF=-DsbbnLw|(3pGQ*4ZCHu_2obUD6l7>Y3>2D36oLt^mS3MHXxT;nz5fClr6_(g z&5ZNmC;~14*6HL!T?_*!%vVHtjCz-|@_{NWfYVq9UHf&K-&hC=^N&yg7CXr8M9E-I zy78zABU=W%n&G@W?8Qu0LFxuGkGjMv)ARK*Kbna$O|6T+L`^#69$NTe%8totm!w@g zstZths1|A@RqXFjEbE6;4?L#pWi+}9BOlnJ@if*Y@t06S%G-H%h(Gyfd?E*y<6uV~ z#6AVi5o+s34s={NLIlf5uA;m&lJFu6NR3z>mHe*2<gXEcH*zS&2y;W+XH}$5LvL(+ zEyRl`&i{bYhx(h}je^_xt4QkJf*wZx3H$(JBgou`7*3bKRsOip$CwXe2J3re<E&_x z_xLh$I(Ka-;0C~i<E~XSAB#9>h>?FG+|6B3U|-OciP^-Shp#}#vXgWHA5YNa6U!+q zq};yuH@J$<g1PN~sO5)$A+&~=N)4?sb0QFx-Rto9))BY;aB?gTO%(;5xJVOItA;GS z6_+75B!}0e7^caSdZCNP>N+-9bU!#^pzU+qcXRI%2RJ6N!&X5ogfS!cW}_M>(lIwZ zfe*Ebf@|4$_;a(+fU&e6F5DR2dJoz(we3sCE&7)WHrk^L?qs(*e7DNlO|*U1q<`tz zFp0f<BAHm6=IA>yeZ{_t!7Obi5STtGS&+D;Yxv9K`^c{aAF<4kr-vQzf@8HZTke1_ zmA(3$ai@cpRCwMl!x0N;(N4*zTI>7u4{b*MIVBEz6z)~*XZ8JU7aY+A;K^H8`rhA| z#@@HXm?m-|yYDTeyybfrCsN?||6PagyRzmxAaK6m*)Wm4a^kbTx2CJWcd^}}O(&$T zO<t0?wM(QwYhg>D1is$|nkYqPH#_KxLQx{SSvHo)AToTevB1O*7qscSN~{T$U_eed zkFhYIW!is2{v~+Ic>0#e+UgdNtGQYkY->h<h<IsJqawiv@MS^P6G`BcHA#d8bu0E& zWaTHX5I`=Fbre+Cf%tEzVJALG#01`1n3W9}8Ain%xbF9uuqvL#_uX5>?AtOhv79Yn zC|3L;L^vY(C8_NL#a`w7Z<;&Q)?kGqzKblWva^D+h~g})^-+JanYz>}7pa3)<rYAd ztLgr7Nz2k#I|fCHz8M}K_mJYi@c5QU!YDbSM^*y~SgDB32}iIw%Oid-I-FQM_DoHp z%8f0ZPqEmb2{}&T3s7G=!ESWu-<I7%I`*j4B3P9u-6*5>3H#&j%?M%nM&-lef!)5j zxF+{ot!{W}P%Xn+lGGUvThXOjoAq?c<+5_^5yIE&whQ>kp@q=!7ai>|DzP=9c19f$ z$s>&8F1nuZB+A21Ac`DkZgdS-L#<8zL|-DCxMORp!%Qc{SfvY7W`--&hwRbd0Jad8 zc=lZv7M)4Ey|o<on4M?s_qGZtj?Ez{2LA{8?=<|f;dkJ~>n+;3sDoV)i>|hh75n`- zH-jEcA%g)`CS%Vo^jhM_(t0R?r8p(9shquB^hR5^6FWQ$^{ReTZ$6`7g^<`efS2LI z`*Ubd|3D8#gO1K7jsQi{X>oV6_6pY4m`A6R=Sku=CoWqz7RrfR5Ri?94t>qPR0wyK z7ypI$rKPgG<?vuztQB3=yrdk*yEZ!ni$Nqm={r6>C^KCCKePnH(pwNhEInLUcsSYH zMK#c96Wcyf*vntjXy@2%131BRv+s+<meK(>&8T)^0jzv~DG<Z29w_ku0@xTitNg%+ z5L8dwc?Wc0zkYtf#*FBKFqz|5Iee>Rt=!UY=RF%PA!+PSEVc;+x04jyWuz`9C8z0a zP;et3AKyt09HrxKlTn%hWp|r{ZIg}rF;RCFy>6=>AcKtZ{igs;$2D+d$8_A5SbQzE zWQCGl#p=%`3N9G+E+|OKU+*%)vT>_}G|H_qp1!cG)wL|ngccc3S|rn<o1P5?O^xG8 zi@Y&PKTJwg?5tpKBt7DrD{<S`lt)Y;jpQLYcM03pK%(M0T<2^ow&BiPq`>lI+%#ZR zT-V<{52V9tuLLh8L3{Ji<yXM}V2RDRbs(|AJHRwo+n{3!Mh_(DgQ7_*d*Pd+#G9ze z+5mkX`T*kiZW|s@25CTf9m9s2F+}g&kpX3i7*NEQzalmU6wrH<P_~<7luG(mgH3k8 zu<#kKu=-rW`31Y5NJ(zbpzp1C%BhhJWX%{-&KV9J2!X6ZIloR*nx+$<lX5N<WPP2; zif?Fq*Qk&8I}$0fE*VAEfXlEO75M|0>5gV__imv8s%5AodpfBay=|iYK@SFKaA)n! z`gu>Nt}$DG-8}J`UfpjdbHH}`%ci&Y#3wXN=Lo&`4(0{54(6M=w14Jc_S@PRz1<CO z58ufK?mMY%V^gT$zXS6QVBXP|C$S{L-FYK9dyw<mRL-o6zP;1XgB*GM3HZRUlc*=P z-<6d{Gt?Vl;|{Z1U51U7yYv!M{gW|8AX)BWE~p&+OU!%N4#9YA%g&0K)r9jKI4BOA zDYN*os)CgcwIvtV!Lomhf%vd$BtIr?^VgEUcxQ#zocTJu@~whVXw<U`dh^Jl_z~#M z>T~Rl^A0wq2=ksVQv3&T--<cSN^FnE$Xv{BarkbLwH1&hAwi9ou{TJ-2NGLKz>P-z znVBn^D-8S%Dw>y7pTWRCJv%uY(qn<`5JRE`J$=%kf*e{lfB-uER!3^0(2sg#_74u@ zeg`UK|3HdCiDBCf3TcQlZ;=fE)DVDCBd73MX>n%uU>mry8C=>pv#Bv#(y|5XL25qF z^05&n9mv|!TtSltfaHuYXx0NX=SsY2p}M3?Oo~o?mUROZ8H~u;#u#JqSQ2{ZLaoPs zjN}?g*Fmh$vE0P{He)`F%a{13&^QZnW3DA83tFarDJ79wHRQxiju9p&yOE5s7iX5S zPAT9u2VnQ0f2q4R-q|na&DrhAn{dUUuHF#hhY!*=#Yui>7P*An_97irPU5O2oo*Uy zOh-vz=E?#LyJLd<zBXDrY%Rb6BQbbjLFbGdr3IZAHR<>@1MDHwJ>lqR{3b&uuKRc$ zRa&(RM0m(TfwmKzbj_mbq{47k@OqTc9^%<gP!){>A+hT{dTmTLg5;Yh9^SeHWDVf^ zPG5p0ObJX>BS$}QtpRL@Mtm;(zl^;l;yDM;Qq3i-!QHSe;4YHOc?FQc!u3kLQijC| zsD%F~sDR}K4dDj>ip4gzraN(+OJc5dkxPd4`v&&TmSu%$r;c7Q_Rd1_&ATqgv*|(_ z?NHdXIT(ccj?t#VW&9LM1V(fCO9+gvYLQh{cRA|8<q{rsEL{q0S&;6=DPwd4Eo9!r zW)iLHV!I&tETgv~)6t~Fb|S(Vncn^DVBD;7C*lRb0QSuw%P{9=8VL`gW?mO&LX>$m z-~lI6RXK*E5J9AvdGFyn+a;(a3c&7Xd>(S*x&q~)n?QFXUV&&!oZ5%W|Ki_-47X%6 z(Q0oier1I=N8(f&F4phVH{(93yq4hH=B4MFtN%i`>qOJ&mZjva%7L~Zf16w=u@t|N zC8*A#SM1f;Df0UcD-S(|f&m-%BOMFxd0<LRMB$-j-MCk73Ph5VvHN8KVQD`KCgGqF zGZ>7f<DRA(*bWm^Pz|n5Bf6w=TUJEN0bvC)z;Q^lHVAw7xgd*ES279YvmA$ra903~ ziK<zG7|GsNx|axK#EH3-9eMb!@2B=lxPuWaG+ZWd7*%LT;9Sl{1s{d2O5aaK*_0h` zAY#U;d{dMw?7Z{fzcMdPo31?X^&VNP4}#Qf<>k6SCe7GO?X$W$1$etD()gv9Vi~;F zCn%}JBUFzlG%bavdIc_e2^!)%?=Kt;>=SrU%PeegG`3XKr#yK6E3D-&$9I<7GTy?n z`3_|+%QY&LlI~o5@E#!+04sw(UjlbAOA19tfaBt{6O-buYH*haS#ZIU;3SqHLg-Hs zuSrFMHxltGM10k*4W;Z6`f7@<Y8kh%>B}+rAq7FL4k^cPF$PXBT7m8RsSpzmmpDjw z(ki70#|jhi*+>t9d8k}VN=CZ*CV?+O*aWS7?aGcDMH*FIBw7N4g!15Gl-=#Y7fUc8 z@=E*|8dge8sz&-qlL!y}Da!v>O{!#%h_6;(D$kEwxNxnGW=+sVv(lnD%hwwDe!ni- zoR)g6HC%rGcEK}))V{s{`}Tc<hF(E|k@npw(g=@H?OQ<Y^W%$X&=vwo{8d9pPOHwF z=1S_Gc~)D{2-{wQw7)Kzg4=|s4fYP3kQeKT7T7zi7Ca5L*YJ|JHx!C2&B3B3(F6Ns zO(H?%7PX1HD1)pGw?xy?yOiLb#1H<&ew-3A(VeWls3Vw&6;tNFCBUlFzLx-f?{9l0 z>9qC<EY3&D3QMr9)>{HC`gjazkX!(kNl;e$`2}+?sVj5N5W~RbMG#Yeilh*{Kq7N- z`TBlJleBgEegUIi6-{4RDkK!Ye(|3$(WdsYeuJPfC%GUcy$8s6o4ht97ee3rVQ>{3 z*i>?fSUVT;29du2q~QO6pzaa7^iC!aDH2SyYB^>J-q%+0le@$TI#;BJhU*x>X_1dz zx5<3Im6y*H#lbF0#fZf#2J+6~4Y=t%4*)nya{)$p3vFvi*Ad5XiK~d{2YC_&;{G)_ z^N738ShjLt@wE>91DpC%ke8C8!RXHHy%lqCamNHAt94P%)%{coTzgL^C-6sytKd%{ zXq3?0V#s7l7}AWv0d&MKAn8;p*_K`XXxr1skZRj_e%o+C)TVz&PM8<lhud@szj_!z z7#R6;&svQ+YBgrw#f?$Wm|W4Ajv!w*lNy7K-^|{M3^e9i8mYTxAQ8Kvr@Ls()v{CE zhE~~Oc`mI#txn>vp$=Ak8g~#pgOEkaztzB*z)dvpU#TW*zC*i%^otfUrgsg<oidAx zdCQmoC2)sbB}zs~Y#m<0mwXN8Eei%e7lYqNAQKEO>xN5v5AXO1A$2ZMX_kg%wV(<c z%bUh1&$)Ul#!PYGZUX$=5<0QyizTeXI(=)M+#R+c(40lwc(fEUf{q;CM01l*0;X;B z<2AIM>7t+Gz<}TVG4u+y55@fqQ~6UsY}D@M)fS$(ouQTV5b`>jrzVexEzt|w)aI#N zy*R^HVsFpgJqzGszw-<~`_IG)*zc4z>|D6(fMAI483X=4<m#rM&C+qtIIY4vG^Isp zmi>!x@xnA5Z%tk@9F=du4^mXSwa*9zdvm_ucS4CD1|OA7qubHlHmx|ZnXXEN7wgnS z;0*lz@p~IMQ+O2fS>f%E3)S)CGy@y{NI!rx@H7_Z?IdD!#rd6>sbX_x<Bf?e8G}Zn z8)Zzl%5aM^c8n^+U8=cJ1|0a`D5}QgJ(w3XPfI$QS7ewa_5E}h;2a$Whz6I5-@E~V zYC(}vJF@TnT5!i`VC)C2VTX%e*UzVIsZMN8p^$2Zg+kU}qkv|(aU`Iic^dCQne1@% z%4LR)%AH8wAvk%E%pG0JuqQJ1(IA+Z`HjQ<;oD1okMpr~3NjyTKJtSt?vZ(XZHV^3 zzbKs&qZLp|Z7uocN7j5ord0GEJiB{@l&P{&Mj*+&p*>)DhIFP=QW{8&p4&QuZtn=V zZZ64JWj}sasaHP&)^HcKRrvz$Mw{OVxOWpg+%}ZhFHktf{@9bmBIHp*J5%CknLM~! zDg$THjev(0pF!ntz^E@IzYsSTJS0hu-vSnn7@Eg&KT%>oK*H8?Yd@n8<u}}rs91o@ zwlQbiG@gGSqRkFrPrIN~dKG79l4G&ogo_NrNXqJzh(@qC!Y76F$GK7%=410wAb9zl zwRKIuc7eKRn))GXX2nF4+FA=hxbVHj4r2lCd&N3h-WPCE)#?@aRU{?$46^vD3zQ%H z8v>?Q0LdAhvwJ6fe`RYRwH-s~!y=QFLVp5(V+N``2PuwrW)S-D;7ncuuNm@@yQl^5 zq{4{+04@|hEdqVZ!7$Z_Giqz;*Q^}1waE+%5ds8dJ=VAn`)kNLqK&-#SD1*x6dLXh zi>|>AN)PEo(K~LOaHQYF8ty96%N`FY>%bYTCBzzVI`a7f9wl}PErhQVybREN)Ngz~ zK(XBinxh53W5rw$6x7C7i=e;-u05IF-tOm-duy5A-?ga(-DGv@1pdNwP-OsaOTX{T z6jbRHRG||$U!zJtr~(%S^;t9)hal$sQ0PuX&<juy=;P5f;%@)sr63L*bI?(^Zve#6 z&hW%EREPVNdVqD``;&WTB0EnEpt9s8L!?Ausgc&qqXse1>ztZJw0smo9EP4mYn}Lg zE^>m6i=>XkJzX#^h#3U`@gu{ROkxZINommdM<klsEClhJTLK&6Ad4}9I-dn3aAN6i zc}djNj0pPfW{938?dL(*8_Dqqo2(%r>u`JO2f|PrvQbQc$+@G%oE*SJV!9|q$nP8I z6q4UgyoLO71cdzNgDEnF{N|6yuZQH<CFIvRBER`V^80h@;(6Om`0H-lG<US@9w)kg zO?HFi#CI|0V-sDyH{n=-AGfXLOLmGLuA?eJA(CFygvQ}sD>rRF!-bZb3l^*8N6734 zE>CLSUJ?$0JlMN{egkf}CFo+la0=L)c$<dwMLzW6RAOounA#ac75rWR(2ok{Lj>Q$ zUfysYQH_xMymQ19{rHMwSr7e+IHEIg&za%wfAmLxqx*k|M0C99esJQ&eLrE4S_+%) zUwg>Vbb$Q-w?hbVkqe)I`pk_o&lPVc&k%1HAN&tWck^EH&gY-e`+EMdh<f-R#JiBc zE#9;E8{$2icZxTRE#f_wKQG<|{8!>#!v9UY=kcH7tsnB68~yxYkyOEVh<6o_iT7f@ zMZAMt74JLvI`Lk{*NFEDzCyfL^E<?Q4PPwY5ndtQ>-aqJUeD)>x5{UW_hw!w-dlJ9 z-h{$)P2e(~OR3MrC}<bKW(xNIl2XafoPR2Uq?Gv|Metz?zAb`}Qt(v~B<C*PCW22; z@Hr8Dl7c@M!KW$s1cLgZ+2r{$^edZi5-DaGzI1Uj1N1;6KydCBzXrFM?rK2Fw?xWD z__G8>3XE}-^0h*?;$R@I?@Z;n!79b&OJ9~sxztK=`_fmWQpQ^;`M&hksT7-)Qs7Hp zlS=s<yY|4w<NLqbI~TyH$}92TWF}+?ff*Du$iqP%Vo{9pkPv7SlR!`c1A&CB28d)Z zi6M!TdwH}35(aFNF%?^D)!J5kl|I(mt;I)cOMoVTu0rvFO50#rz3H$TD?+G|`Tx#$ zXOc+->u&r1?|-{HaPr;z-S7Q8-#O<yC$1#y^E>6UW^C%za^;g}z92r4(tvF!fmr5a zJS;8b)P|e0exUHohGYxhZ`mP@AX0KDZ5H&@jzzaO0|%#HqT8=uV2JGLdyRwY6Rw{P zZfILze29pq3yoW+h-X>*`ylx9UblY0a`M9B*I1homJT+iV-t39e{gq<^GEivs4|2< zxIctH(uR%w)Tfph=Ogy9)$eh8aj!dan?uoa!GU_A&X^QuR$}#!sT!$NiInD|WsypK z@cl@oUX5VR2hjPJdRQURhZNc?IBx<t@AcGc6!i)Y>wa}Ch{Aa>SxA)w3SZ@#Yhsy4 zP|l_8>ll<EneUNRq#ZVgWjMl({z6ar_DQIo@-6HxUvi|;htcSVlw|m9^sjX{^f0q2 zDud=;4IP%?MDR>Zfjds`wlS(vm=`-E#+XE-j-OE!V~k5Uu8(XsT{F^SjbV5Wo>62o zT<|wAW1Dc?K<tD|0o#V}I@IRh6|?8`ZdN2sPil;%uSn)yI*3R|Pw$Qu|3_B^_#o-O zgl~(a{~OYO-rpP>td9tk(*OB#{DS-|bmL}j7PX|FWyW+mHw#8tcSev`A9oJxVHI)r zIzJC}fBtuzsb`lhHyq2B7q(vsO*?GTbSPF)F~!QACEpi5d@MBfo5$}?)3ya#pOeb^ z+wDFs;M#2aFzVB}Ee+c~O(*3$?mBTD{FwqQ1;$A8#-k^weojo|>{!yRpA+kEvH4q7 z>MwSu&baIjt3t*2TVnmKu~LS|yF+cW!eGx;N{A6zzSehtC5^Ypb04q^cm{Y9*a18Q z+y?|QzjnMK^RDB#Ca#Hl0`~-N2W|)MN!*jTow%L2@I~+HYO)IpN3(U<I>XHo2uY>8 z0LRzUv=IOkf7x;r-b;<6pRL-5ePmunw+PJ<3EQM!11~D2E8GcVdpcp@Cm%l6MZUG) zAeYeTH)!c(9!V?GCugianJ9g-g|ZMr0&lyA=VyR6pmDZs%%S=@HvfC7_1;&l_b*XN zOWDF<div_USpWN~7wV%zZi@;>4X9zb&)&27-<O_sZq8$>M#UiQDHLcXkO|BK76Uf} z#lTvCwjM!SkHAgBO~M_5i$(9Rxo{B{{aPX}0;*qg;5u;axG3t6?i;I(wvpa_zz*P- zl6ItTX4`0isJ>9|)HbRgs2gD{zg~S8nQXY9Z@mqK)Iy6ygSF6p0HGslrCqpCm`1G2 z;9Z;(^RWclWeyq46nhzTuGJW9#yt`t)dX4tuLo}cfojU>0>2U&dF`0O*a&!`g`0xV z_4k;kA7(QOzN}0Egl%J6RIw(gU$yQ}!0lkN%H_SXAtlK|yb2Nn4zyTm#DsuFp&Ma7 zD86p=D&kt?qCiXFwf2KdgFYlWA0Z&oE$t3yk?7jCs|_Kz@3TpCaH_7c61cce0^hR| zfE^y#9lXh7R=MOj)kDYw_3Jrdm_JacpQ{0d!b{qMmzevB9VT=h;!((XN0kPz2uUxI znxI8Eu%ykLM9zxn_0N)pg_>Bl_LQ`Z`7HfVfMfuoFEsK%|J+1JYkHCh$OH%TVsA<x z!Y90B#YVEnUxec3m?&x#7b;>A&K4fHf7Uk66I`ltZsj&7R0VDxhlW0=Fkw-#@dXy@ zu!@b7A95+hI%W^S*JI9mhC12D9vA;dB$?1_9`icO^Puv)C+vBd<@uEIyf5rI5YK`~ z9^#E!3@LfgO5S6Bgp7W{BM;)gUH*W%EJztC!Sp#EGnYuAsq%&%{n?U&=mI&VUx|R@ z1a*oS)|At^uneK~6R^KLq1Q>g-zjw58~y8YXd<^3OxZ5wBHd(<X_F)fGETGtb@4D_ zyOfWQ7kbQhq$K!pJm^y2(JRJB^QEvq#}_%lsPh8><X$d#N%$%f9VFK`UfM7U+R{d} zGuVtF+cVu9-X<ugVW4^$Za(q7-VD)cyj#3iOI+9^v*J}e;Vc&lXZa5i&a#eYG-tW% zyOEf|+=!~-=?Key^f>iksOFkOUX!ORB!u+=f$A>*d;LXqo()}ik#PvqOcQxo7xa^` z@U5Mxjg)?i`Azae-;PKbp!Cpg?s<&Vxbtd;>g7S<K6NK1urK!<Y){2)122uq;|6Df zc^Ecxf%(I|FtKRWvWv_g^H^X7f$C&&#>8Gt!{6CPg@Gm!dqdbrnApUK0RyqD<OR~Y z%HRTuNg>O0h8WWLVO``+2=Y<3G|DjLB=$9ia`_xPL_ArhHO^tYf=jil8$%&$eMWkI zi4vc`?|vp2)R?@>G_6q1mZ(4el)V47>MBBZ*W`WXWm}cJzboLGuqfaeyGU%~LYr}X zO59&AF>v!?iHD2!50OdOri9fKdp%8<tGBF05Nd+lU65M~A$^8_!`Le^bD64-y>iV} z+*$}E{;UCe_Hu1u!_T<4aItl7A@gSrbFQo>^01tT;L}p<V$19Vr)uiLU8~{%Oe`?G z^>!%(riK?L1{NizEOZ!g>MFyY+=aimhXD~B5Pl#LWVaj*8TN+T5|=FWEG;N3xQQDI zp@R`>{}80hh1PPy9JfV?0WL60S@XFHgl;qAN^|vty=6Q;f{xDws;%i1O)wTw7-IVo z7Oj+;A$lT+eC&q({2jXq%NZwf8%HrWFxKvW_Qw=GX5+;|faYRmnZsj>B|O3~3NX%n z_ddS!0S!0TV{e-=9M^d1oM3D1$5$Es{5eUnLBt*=8a6zktU`~x^G5O%`pcH<)x%il zT`4@k75PH#$H`DPvxY#6hn&+GKXV<{<CiKghj@+V8_N|Jx&56k<3fTPgH$N{%%z5X zj%4vuDUPg%DAqg;`E}<D&ZiUSpK7-24(G34@V6%ihjWRG{Pb%YU#M*_sy#Cd|Ft%M zyW8KqKQ(7a^)L$U;AW@qa>Jf_V9jV=?aCN2TCS58VA02|^dqCPIZ-x?;7#1{bN-}o zi0uuSK2r4nwDHiU9o!Ay5o65qx5euH>!5ZZySBDJwVVjmf6aLFMYs^BvXWw2H3q!~ z(;%lS6m;T)pvO`cGg}L5FC9yR#x_hBf8BPvu&Y-G!c+(*MZzTa`h*7T?%V$yJG&R< zlsGYzZp4?Y8_s}3d(e-V;|z>mx-JBb`a7IgHZbhZcV4;YyWqYN+&KEYvg11nH-1#U zgCkE6_Zj?-0}fug&mf<5UXj$nXS>6m`@EvcaNhGuIE?^Ftplon5?}?e6z~Aq066a7 z;k+W51wvBk9|O+-FN#kDC;q>7UP*pP@>S=Rw(p(yyfTGPa-t#dwoIN&fNenJjB(EM ziiG}r=M|N1B&}|&{<F?2;k1uah7-U^pbM~*Wg;*HxE!Ew{to9A$t(~`<8L;w6et&; zNZ<S|=ap^>TYjGTJnR>t)#{$@V%5uk7VPX)tx)}9i~;_$vBro~X_@fGK`p*c(6Shm z_ccfy4kG%9JhMigIdnL{Oju?TtP=+pgkUA)nQwrAeEPsq(87sB6bdBfn??76cEAp| zFgA55t4gq}O8mn|j^XANy!bhC48jd_s9~TBmfYvWp%H)+$2)KWtZ>$eqk?x<o6jQ@ zFjndlb(Y{tn8SR5BZNr*1)XM~JLz*V$<OjtoflNI^pG;4K<@DCqjos-ON6xiv-?6J zOlF@(WELF<T-v}C_iTHFPzXn(2WbOwO_}<n&=VJMziw2zc9yI3Z?jcxmlwrAV&7qN zs>*}%En;RExS~IXSp9J;Iv|J~YrNURrg*tQC773oWE%2dA{FNFz}RpRg_uvaG0X<4 z)KO#ha9-1rjzt~`h)KCbm8#yvWnIKul`Kc%2BF2HVwY^#;84=0h8L9xUmS)sI5efu zrMsq&67AV?*ESC6u?BQ53x=+at{vtpUy=Tn>%hjPRv@fb>>NZei@|TH*Pe_fyaRH> z+qn}v>wgrKRZayp#0=C6%HTf}vvC}PLL1zZe+v)J`OV#n=)i?}W&PEaUEz{$-9>27 zp&VDLisExmUlyYe57bJ0b^X`NPKqF`ALem;0ng^WuokSF$I*omA&wcc<->L*C)w^$ z#@105(>pikRtXe*PBn`NCWH?v<}230wAUWEut~0FW8dub!7=*+d&g-odQ$iK5(3Qy z_h7xtK6cMla=P5A1>046G*w<cCcFx)i|N%1)tOq!yEKKxMVy%I^Uq`)PYo*;6We2$ zTQD^YA7k^_xG=ZuWYCdY_EFH5TXqWbD|B)ozF|Z^c5}pE?uQK+J}++<j-Xp4a=J}l zakf&I<nr=2+>|;{F2`5r2AUC14SawNdSxguK5Tff1wp(ReX7WYCr5Ogjhy&`?wYGR z=ANe%{=|N?Z*Zu2VNWTB^VlE?Ocdog(hMR#lw^kPwpNPcxZNv7<o5n$;YK>g4Sid) z6wVlH{)&i*#y*M@7L64NAM;8{S4rUpV*{F;2Dw!$>r^WrA`-cQ)8U#<Q56p>`$0fv znZuaInX8j&uMF()eo2pcLnnx>(zYf-IaoN1od1%^SY&iYDsf*+$~R27Y08`qCv9kw zOjU%BzDgnXV4bl>PIk|Hi{z}OM`r1#lo2###z@=|#HAWZB~MB<G^wA6Od~yVv}}Oc zD2cG1tE)pIs)t{SDt=8@1B!q`Y0f6O5)zp5y!5f~&z_^WLMO5-pE#vhuEXgU;kZ+? zY1^Cq8@XtZLJ2!0ade)5xhlUAJ#C?g0Fp6RV~+-Hw1!~2<^&S)*Bs>t)U+%SQ46WK zB&rYRMQY-2Nega9LlI`8$l&K}0|k3jgm<t?8RH)mnrIcY`7Fk7o7>`SaHx-?&M0K8 zpVK~(`KfGoUd_k~D_z%%ni5q-x@~s`2G{LYmD*i>aUc7g{$0pyv;}|H{B9h!nN)WL zUiKfmwE0-SaEG;II_xp|W(#Pq)Xsjc&7=7)dXaWM%_h<<V3pXj6<F3`OYF>lRvOXO z85-I}-KDi;2ThPg+FW5{1GBi~x37s}lTPVLNDgi}h!h;*XoQB5g8>Z+<530+()tZK zFJd{Zq2?7VEIGF<moA=KLMA90Wm|bIFw$B=^=1AVGsajdN=1e4B242Ol~)#u>RYp3 zk*$D3t&n7nnB$*kl5`ZzPCdQxrn<9=cb(gmIV~)raJ6}nWV089VtQEa<f?oQnn#H$ zENN7Yp|Rw&!I`%G5XpMXb<MO8!J}nTM5e9gIM<@}BTe>cB93s}thilfElNyKiX5FB zh20b=d=UdqBPF8|xe|g0#4%;}<MWD!!ZyxWBjq)v<`v|%_;rU;<<V!N5W?)D)6|fm zI1>rNMjB4)Fa%gu-8S<#aM?jA+JXZZks&=UkaMtsY8^M%zQqUB);D>DSY`Fu^Sbnz z9EH?R_5+6qyE$#m!}kwpE@*%Aj0mNMed8m(d-3J$gc?6^mj*7%!t#ONljFiJRIp#u zw`n$PCsp<X=3^16GSAJQWnvLZj6^NKYg0a6o0j8Mxhjo66(0VqS;3!;ReZP=zfG0+ zZCZ=prcG5%ic1_ZAN5FpJfXlwEJ%%Ls5wb7L?DqXT6^wC)dOZe4@^8jO~mPKS}Jge z%S$)FeG9zgKenkM$4vb|zi{FQa#{Xz<|bVzD_M@oO_jA=i-V16J3R3amYHlvCUXAm z2pA^<H5~-_@KFK=b5mb7rk;Mo-|TA0L3_5<636+L<FMgD>?OyU0~523dloHJmcFbU zP~8$~Hm(%6$A0)&fb!Z@qM~U}s(4aSiKMN|60DmM&JR=xyNS9Y5{cTQLKM`#N~?$Q zo0C4SFd!5($($SLEhu>i$`o5mG-d%t7uwW*Kd}{0RewR9?YS|sW`dc}C;Hbv9UcDh ziZCuU5_E%s?J)f;3)E6_$qeH*!BiRx(LTW&J?5NP%1SGDICsWdK2z~QIB`xW$E7>K z;_T?p{nv?5AA`?EQ&$y+s*d;QL_}$vSwe}zd#92F?PyRHRFw)|o?;~GN9$@_QpL50 zmld|RlMRz5f)(wwup+itb$P<(DYKQ(5NRdz6g_+d$jKvuobFKwFjsu#<RJ$b5g=A} z2ewyPm~oF!L}&6W(JUs{f<=p%l1^EfkA8vSDO25e=(%PKt;BMAgB1c|cAC=FHA7mk zhzdaA4qlF?S$RxtT{A4uuXg72S;k;#Vs0c^ZOroFL<_1I`ZEqoOEEP1v17*sPa+n4 zM7G<zX_B&d^IcgPxQc^9BOxdwOU^~57MgIJe7|UU!*tb-<`WQg86vE2?VD+fhRN`U zQd@-T2JWe(g?Kwa8=6CCRz+2A(U*G6C!S{A?VMA_&NHf9jnW1i>0fOAh6Kav3!dXq z?80KUg~bXBPJ0m=Vx*8_SeLKkt19<Mp3~VmBPdEl`nezF-9v?D%4!&)7ADEE3iaPK zPgjyhp+nhrLiNF7W@?1OH$-+2(H}P+3byz|-WwRG6MC9xuSS8WG-sghMe*2aPilXJ zhp=X8OXGB4Py2)Tp{m;dj72rP=A0U@e=eOSr-g{d>#q93Pg=6hqVamD`4n}uFnm#d z-PMxyNw@NAd()E6GTWks!eGk_RjC4-b#F+Uj1@sg>J}2h;?As2y}xs3&Y9*m$AIQu z%CF^|W3A_kzLm?mJYc_`1BZ|K{dD@z{%NOMXcprWjyJ~Zm&45;17{F6_KbIZ{bu}e zZEWm2Gg^7t!&A$QHqPbkF~*_E`)9Q2{lOhWAz$q2Hv-K!375J1@D*NnHdIKnx<rqK zabfft!)E#mn$231ett*qHE9;_=UkKORg^^iU-Q(Gl={+|OU!kBB5PLU;Floyinuep zIFV-*=8VbhaamJ>(>RWaAK)m75saoPQO<SdcQ}8;3PteF6<t~u9jAZSS<CAj!rqb9 zLu|B?et0onh?Zn50t9Bs^cHP$@r-J(wX4g_Dhqk?@-UZx1Z9i9ShSj7CF~O>P!}E< ze1oA{77AS_p%^*SP=cQ4F^^FR8A&yRA*$-stIIql@yG$)hLVY~J-k8+UUo_X?2-UM z<Oom%gzBXM`-IwV^yl4v`WQNpa!(%%t6?f0JH%!wWIAR$d=sCn6HbmJ7(cg`%WVD9 zxQY4ET-I&`hP!v2E2Ggnv;>371>VH8VBt}wcFL?3AnC^RvY2N?V43;m0q+?)mX(uQ zq0UY|3&z$*Xj!~joxy-y8^^P}1W>JPEimlCNvW@I9L4Elk$Dq-frAANOOk>YK&1}V zyv^VeAr<cYZa5hjD9ONib8b099;q)ow|s!hQ9gB_@fwGTlo}Bx93*Nsaz>C9o6YOa ztq(}POI+yjj9uDpkXY(L=UuCDxd^z?US<onTev6Ef`Xq?k47ox6(FIpzBVys)s*#~ z{(7S)X3KB&gN*}baKm86fi*u(OQR7DGx&T;P145c5?ZW3rL|u`(vev2Td_>;MKty& zqGQGZ=N%wsAuIB+;7gXkrXY{5TxbhO8@?u2qF;d{xFy6G{I!TRZ+&ZHnkB3Jp~xyD zt~uP1+KQa@_)|34UWyzgXZ`3-1_)l!IBlC{*+^9KIJfK|Swu41)K-aUUX`gVK<MV> zj-MbS2)iEdE)9a7U)gwlRQ}V#`Cnu{{t@|iL4f<GULwJxKUD;ajz_?2M21@>AIVq0 zSiD|Q1yX!hHJmt9<eT3+NL2*$y_bhT){%ntpHsxiSZNkpzdd5ns^2XMc3Acfv;T(# z?<nBdz-f|`QmQdRM^2S%Pgx=ieU#}q!n{fX9f8Xw*0b&*locR}09b`1K%xXdNn{c# ze$d@C2d-T~`)vf2xgaM#sfN{v)}n;98YTjFFyGP#<(d~0KHnTHv9J`<<lWbenqO8L zb(~_sQ9{Qf@I>k~u!L34tz=Iv!Bbg~%oQ*tDag5`PK7=eUZUS9p}<RIi9Y<PC0eA0 zttI*b_@L4EYaXaQ&k`+CnA~dVUZP)PiGG#9(UA+S$iW+haF*?2Zx|}8FSIhXN?*(P zkX8Cip(@NqbcnZ*(bPf>s(3~%va&`GH@`wk7UTQ#F4tl7D>yozE_0YEh!wNxgDVXT z^lP-oqmXtastbojFsL^IEfeDeUu*7+J$*!Qsh)S%Q^CX+qM#iF>Sf01?38#!8=LKE z{uIqPotIW-_m~Bn)v%J~8DuZ1tiSmtofaH~-8AOB(pWEA+eHby5gd&=z^<r`l#3cd z;NrRi)g5Wxxv6(U4&j}RQkMA&3_RtN2bgkh*{nSCVz5D_KDXusa+_(`ewsOX*YxEv zN_T7LcBxWo+z9>}3FcG=(Id)dkFi2JZ*0m)g_4diCv&o6S-8O*OjcG)lN*C_|DKe> zPUqJ9SW6KAxSHWn5Kcn>eM6EJ-?)%Z7=huFBnRnrPXof{k`og8l=P{IV&b^VyoD|m z-KGT_7GW-We$$j+A=;cs!xfMT>ZV1t5G~P=q!3VqaOJgQPSccUuom4x2BMF(tjvz2 zf+TKk!b_0IJ^GU1d{xf38J4LZ*TkOwL(`mC)S}%vjX1L;p3^S`7*Cl!95*8p*SX~a zK8Oz2#Ag}?i^>ipZHB2zN*k?1rwGJWr9UgJAPqSn#-g-1&3$uTp7|uwx8k2~e(-8| zjOha{LEEVit?4$=cF;Pp#g=t~yHuy&7{34Xp)vawvNKLlJEP(B=bXgCWlaP(%s0=F zg*1uI$-c`BN`@FXpiQ$*wwKU`;wzKQ@?{&$m4=l;${>=7EF$sgij8i%C|{sscAoiz zCwZ{SeHl{%nV_`31>ORATngM8mTc+X_hl7PSLVJ^ta6nbg~kN)I2DYZ@a0y8qvt3E z(GfB`Dbz_0IEfzfF1o0o05xVi51q=qcBEauB(2dk<FNik=hOS0JAd1J%rO8B;)%w9 z?BGb}(}z-)B<cep3+#08eHCj+E3SO!!c~`Czfu%*xqj7SAJd}ws|M-5qjxRM##m8w z@TTiSH|>e2I4vFvme2^slp8n#QjKhFSgw`}{Rtuy`-1-Rmi_v|u&`}#z>)mGp5{Ng z@&+6UB>Xyb_UuLkUQbVc0qM*${trU_j?m<nC$}JLTX#&0iK#P2j1xycEKZE!sC$R{ z*BX1#1uMF_ukS+kcN$C4`!oKiUydf#cSUk{k3JNyqj>eh>y_ZW%a&VZz8-;Dihlhk zmctry)1J_{gP<lB{<cKX$q%!JWYd??eRJ^3s&8ctaU<#d2UG*0M)XJ^hS~F5?ufmV zyKs?tA)1$Hq=?-;|A`T786qQCc6KQ@i5iw1N5|E0GbCxbHS;)bH~qW49)wk>^dEB9 zbgEKdd%5{4AsUj*U*LobqX^v@l7L#!+7}W_G4Jv}Magf>wu>%_A?96HDh7^~U9ha~ zFZAc8wI1j)Tu<EMAQi0FI=6<vh-BJc*O)docGtnq`mD1kq|Pq07jVH7{YAS^ALJt6 zF#p?U8<wEUjLWwt+w15N>w_`c9Ao9xU*#o~1#2$fy<U|#I3=+Akcsjq6yw<%ve<uJ z<|T}Jka=0UN12BR7e4d8p&lJ1L8G^qP%uuQa^1z;@EWto*^oJCf=H|Ebu}y=bY;M4 zd+AiVJzLis=f<I5LN6C~)~)r9fHMu+NNZLHOR(0GIVdh+df{1pe!$r{Z_qdim>~hb z7ztQga~5kD9qc(0cw7QlgM=I}A%{uGA(4=TV)Kwt;}f_zV{%Gzc>?jFDg8o2uT)Eu zbIVs`dx28+g7eNQ9=Z4K{OYaZ7axNjI_?0U(rTSsL~kVdf_q;?z6`5@+={GCNigDS z9jK<Mb$^W3DOPgZ9`sH%aP8`d(|?exIWjiJ%)G?8<q2M9VhFn4mXS{5syldu&&CGE z#ZBobCQmRD(&bBwEdf(g80=mh%0kVXb*yj7;tqUtxg!i>w%ROkZ%zM_bzwPMM@T4? zpg-GU8yJXh%n70CCN4NGweY0TPknd@d&?n?V)W6GSER#T%G*x(49X+gK{n4};01>U z;;q`JNga^`YK)=m+{({7DIGu^om-`bf;kJ7;l{=RTlTN(m(hL)FB}B0bjwk*)4u6K zGWQL-(YbR#TJ5uKkd!ptY`oC9^MLbL4f4t<Y@oSeZDel<emR}<jNNu5nASaD#%6%` z*Ds9Q(7*A*fU|z_pmBKEjL6&gjEP5r7o0wFe_6~Tg$tcMtZK%gYGUEZLyEG_s61Jw zg;fp+?VSqHc;Q=T9&<DWDDdZ;V8=NL$zE>7EMbB`R_1o$S?AUO1Az8v_gik@;>r8D zjrPrE+b$Ann0HZfu!T`Eh*7c1|JlO=CNn9yoKHJe`Oh#iUgw>sfx2^5!+?y8G*}?6 z_NOEe7QdR$V!2~fQ+BLMb)bJ2w^Uta35sVg!)OcP{8=ufj?_RwBTMIb2g*%qpe%_D zlnJZ+HJu6izo0T?RfA0iOQ#GLc{szvxIlbMX20<X!7s?*iMIl8Rig)Xgu{H`x2laT ze~cAMA{pI7Xt)faq=2(YA7nq(PlnK-*q~!oKvSXU6;`!&WxR0c&2$C|6cjzpFe2-p zS;J#Pa(k)Z$epX5TMKwVBUJm%xDW-zNEcMVPN4z@2nwQLDL%;J#m~z9h3=$eZ4y0A zh_1GDD+w5Fj!+qxvEAV;8et>nQx@(%G7g<#wxK9KNU<x$2hYm#%yKb&e>w~JOGJa; z`4o<YTn3-?n3u|pS)rGp8DTnHwu@MQ!bgLRXC#}jW`vC@mfAPuc-)YDF1FU6_@ZPY zN+s0@fhw8(=v0=g7E#F#crEpXXIrxlCQ@4t(R%-e!XqtNAy+V=HA`d#wfe$PQ&yYD zbRyd&hvYCCR{>F7p>eKfv|6V0K4b9dW-TpVGvZRR+H`wuPN-Hau-PW=d5%<e{hB|u z`kZWiQno(cJX}qYli&@SJ9&z_?*AoTNw!^xRVZ5v4m;KC&>f_#k@9=3S)C-4ChR7p z^M{nV#Lmohz!!j#fXi>D8QW88Iu)kh5gZj>&Vxh4tA8+&2dS1^qwZi%Jx9XWe|uJl z2C2=;l>MeuJ(>OgO4v%5&JrRFhh1XK(pci1Thr*n)~pkFYr(5|Af6T+&jVkz;K*50 za@{#gL!*hlB6YWOtJ8`gnUY^CYavftTQN{K&;h;<-kX!eG8oSn34`Ii3+i%C@?@{e zp}H}eKc@rT@(}8DTmPDqJKT})jv(5DPmrA!e0+yXkGEpE%twyVxcx*v<r1@uZn7FW zho@F8iO^~#VDJZK2}NI4IZOXKSBRUk4ze0{Kzoxh_d4_|NoF<p<TFIvHD({{>_o;+ zj6SZ;+bN@2q7#d_=ZH8ZFzwSKNY<T)vzAbd$9xM$VS)J*{sy#moz@f*!O%2jIH*JB zUrj)4ncXKzsA$5F;O^d&=5oARHIc#%KEg)8PL>l&3-*^SK!zr=?8iA}P5C{!_6uMu z>r%`F28JjbfdyC%C}10`-5(>`Vn6kr&rO-JV{6^D^*Nu^dOyjo&q0H7Em@svX50TM zBZC%-)o(A0<<dw#**pTeqb9BiUvilFS`{Kl)BQxybNJf+21<7R!V)FYKwVg>g9vVZ z{UbHk*={a@gmH<%S=hXvoobr-5Ce<E7@T{+o2Hqwt;Bi%*{Q4$1xTg<zm}Q!td_<= zt8p1z*J~ToYQ*)=aRqJt;Xr4(#<Zq3>zT7;c<EPQD+lK?-eRpc9C@=NIm|c2pGQKh zj|p<Fa6J=aW4_2Z=#O7)(8ls{I*Y*>&ouct1DHajH58i8tvh((V#~ACbJv(=lGD<h zTjZX+Jl5)KQ=6Szx2P~D*cR_t&m%pxW)KL#nq;h?JGZXF%lWIUvy(&F&Mo74$#!mC zgwvX3hR%wkW?}m!c!@1X8e{s4(rm5)yY*HuR6H)nBVygrx#erp$~Hy3oMv8qQZ+FH z+_}Zz1DWf$F+iMK|Cs{T)tK-9;@6r{AT@74iVxemlvCK?1a;nV3&WqXI=|}SA)Nm+ zFNE`VZppycD#Ig|C&eJEt#=c@J&ye7(QzU^HtQ^ZjA0b^53kEqcoepQx+96slVYki zOX>=vyeyU=ORe5lh28~WP4z*#s_HE3Q}BM8M~WU^k|;Ko%bPN1fzwP=H$50VDt;~T zZJjAKCpNvsAQzoIVY3-B9b}NljBRvWn{&4I*rsHm9G)|TV5@MtUAvCO*S@_e;Xpk? zW1kqKnE?(2yNJ}+AP33XYaQ-DjkTl%URHx?gIZM9bWh^&vQmaIb7&mz%1Q&t6CnXv zvM7BI7WVDcY7U<}ANN`6{PLSLYx{j46K-1IrKoBu#Y7GEL16{B+`URV18z`Bin5yu zcd$*kd?H~6t})W=&lhW}wl@B|%cZ*&3ChQw%~oBOW^LB8Wi}xm)W9N12xL4We7g%| zDAgQIJ*&?&pCx|7^dO3_Qj9hoIq{=N9AzCB5w4u$y@XgWIcTq?Hi#~K=PjzUhhXLa zieqi+3l|D27#8qI(@UDFbXGylf4{A}j5i1a`1fF9g7T@gM&TCb2DU({2Atd@YU!sY z(EiOO>@84LxMNf!ya%JxG;pD+VmqRn-8Dq1MTAU;>YI<zn(=Ss7e3W07WC@w{M(N) zno*a7xQkGyUJVFQ>}5{bFXWZooNo>R1u454oWxAviCN5S+ge9!p*~nCs4tt5Z_aw3 zUK9hH9~#y9=G+J5jk~Kti~4sN2x6f~mBhJ4W^suQ=Nh8UZF{8LqW3?HzWf9-Bvq!K zd_B_K=j+|p*QT|xNOA-dAlBJaThMRb!B!k9o0Mmkh`k2EhOT6wazPNGP<eH3Jwc`s zjIGODA<K$jY#r@~)rT(g-uta0$4QZA$Vij#qDDl?dp&OjgVXiQ?mmU;f>y1H++{A5 zL^^FXodxC^4ranbMx##W#M8D8u!s|vieB!Mp=7G&>zm3>D;0{}X%>P$s#-Yxt54eN zYEHHhvu1B_l<6i_s==KPhI0eEWv40heyc9>RxXWQ<0wcGd$`gBH{l`5L!iBM4-L4` zsL~Ff??Jbq<eK-kFyymLwI(A)B4e&VEuNeYzRb74zA*>rdokmiu0%py6FY|g#aZ7% z!)!tn!g<FpdHRK*L%CvRZVKxGB6XI<1+K2aVP8q_g{cioc?@WZVyhH$%PB+*MhKq~ z<JlV$HrZ1@^w}}gBt{>ohXnZXk5o;iXw&YO+}HKnba?BjwJ)QdmAXri*(wdfLrIGi zVFf75<hRsW*8EUfd3u~Nz<iA-3lUM*IZp<kPyKk)?HkCp`ZhYjWi1!xrr$*GQ<=2B zWb<uEA|m0POeHNds@eB5n8xhJXn-t&SD0(NlQ%c<7_q1TiP-2EW1Lj{oKuWKvZ5<Z zNpwiBtlr=wv{G>tu}tV%dFEx3vE<+~hpHUppdnPU9AUdD@*%~N+pf$wDXN9d35AqN z0X;L0SW32h`1ugPPsHd#n3gJHv68V0+cd<IU5yQ2kxfi)OowWf@7%fG4%Mpe-CD|W zsI%^4L2q;qE*|>zxPr`#7Z?0xl(=9nvufwsYXb==`ySgkxc2S3+5<85gM*j%_T5~2 zAU0^$7TGri2ljla9bLOssQpH~I^q=WkuDgg?GiogWF0O$h%{@j+8+M2s`t|C<DD5> zcG1#cLSSGqtXL&^-AzC)AueaJeC7qGEEdC|2s7xejTeE1Yy?-e8;KmnVnEmE^x$;! zJERBQ(2o<n!Va*qku&QPj7w!y48z&ehv{)Gnmf>peX(F(S>`hIn%;+4*DG^L#ken^ zsFBQQR=0^<f<{d2VAS6D_NC2l_nUt6U<@+M&t|o4W9r=rnyA&Cy>>EanSTn;ftK5L z#X(?L)sS_-`SdQ~;@>JA&+K}U)q9JJFsUClBnPryY|6GbZAiv4c<06xx$Ydsxxq7R zc7=8~dhDlm!*i}5%yJeVjH@5!=j4>tnGS;}#pv8{fJCMjhV&~*Y4UI75aB;-tFZ^p z25n`w<(O<uB!(k&eLCd{A|-PYyjU~KywYS%Sx4FL?h~~-Ecqv`6^XeFK9R_*jm(;m z@gi3&?v@%*<No>Pmxx^uT#6tPCx~40(S=MBCG;fhgpooLJIeJ7QjoiH>cuX}6`ly9 z63$^a;>GVZQA2%Hn6<C5&I~g5!Y#0tCweS;xlD_aBf#PXV<RvBSL@ionrb>8du-KX zSRGa3Bn>%jXfb=VEVdzQU!arL$}xq%T6m(NaPP99%VS>q4aQxoU2IAQ;!#3moM5wQ zFkUndFj5fHrGNV2I|dAt;WVYYJmyUGC=Dlr>1vxs#X4xY6AYVQf<?(_!RnU3^CIJR zH3H3B!Gam$!CRCB$+KT4{mwaa5V<^<Qg}i*H7CqR@w8!~w&oxPN{POpjE$5<SxQ>Z zH@J;W8{%UE{ZvV}i!DkDmtmf`3&vddZ7QV>O_ST==AWew6nqq{pLTC7gHUP_sM&`? zr)h#Rd_eJMw=ZGnA=3?ZF`*I3y4o|d^h@*1B=SQ-_c+!CVpL8|Q?Pw<ym8Qs7mTC$ zH{=`%PMp3pM!%|dUF;0w^4fK_S;lBal*jzt-74x4@YlG&Kq(gtcUyDq^jZ2#Fxn?( zA@2B!4J+Wgf|shs_%RV^yADCSF9wrhS7U9=p}O$xerKyWD6(PG8DXkNpeHxLb#QLI zR@VM$rcCOBhEe9dG;nw``>wP#P0%W$&{}&bHEhk=%U><{ln2%<%(NFhdFH0)R7dsT zI(t^AJ_=oD4x>miDi|EWX&z360WA`1Zr@l<-Ld|-jSlP}PD?-cY<RWw4(O*@zYM)E zf#j6JS1et}A_7h$yo^D3t9@+y7Ur3!NOxk*aYl~qbfD&y;Iu&2F6tV(j*Md{?V)G; zly+!$zPFLDGK?xKz@<h@O5tAP)<DfcX;ZFGeXDQGx0b7VmaO<ASMl@AScJ~Vwx=C_ zVSSf@If{WvkUt=#*DJ_<RuJ217DZ;DnVO8Q$5FHEM}>!_4vqJACP_iVNErc=6xh!R zvrzm*aX}7R947zkP3G;{-2w|?%zUi*duj%~Z!b<Xf<Dixu<Q~`P|A0P?l%srEp<Bk zt8Bs-MQ9~IA!vc==Wl=u^gCR}Ww32Voytm#)sxIkc()4m37hTeQBgk*!S?IkaE1uR zG5IZS5hERJ9))NRTNm!(1oLWQMDHn2TMf}$ePi%;Ht7ywS`K6FTxgat`w9vqOnyY+ z<NW-_!Ooq#ojW^EWnKpxb98#+VAz;Lojd;`vU#m3S&7Iyq=N!>1qY@SqV`^VY#0zq zpK;jOvphOOkp_q$lb_~TDs07nLbQs)z)`yV9$+pg!HyHACUvt^ev0%|7|UvXMfEqC zIJc}OaJbaU7PTmMhkGqrNRbr2l=?@v$M=`1u@zlBh8L2;<47hCMywNdl;YJMnsX{M zb|mstU3y02#Z-#x6kWlkaBvCr+f@VDDEF@ld@zRqt5U06zC`|Bu(sbSTh)-@G@dW= zCG$6F?HBO5BskXjwD90#Po<A^=>tijVI&!nM9}7Z`hcVXCmyaPU;1NA)+#}F0kROd zZoD8;hWwr~SV2`0vQ-hXRS~jP5wcYgvQ-hXKUWc?DlZwMS21h)(;3dKLD0$Qwqg*< zxnTG%E=Om}2PDQV4WaLLGo&M(G={jWmA&p}i3F#}Z_-DY?cN{y^Ajj!Ld^XAn8vKc zPk3vMnI5kTgFiOV+J!78v!L(q!M|`%9C!&h4x9o8fh3LvW&(?W5}*p$3~U1)2A%?1 zfY*TIKo{WZA|8+iECYPNX5eeU1Hj|JuYlKpHsAzs7D)U=(~^MkKr)a9<N>z;KHvf1 zDd0um9iR)i2=dQZ;96iFa5LZo?gZ`w9tU;;Ex-}r1keRs09olWU<xoBSPGN@Yk)1l zJ-`ov=YRvi5#Uci7cdr7IvGd<76E;KCz8^%x6@ItaATTwc4?ZXtpLKm8~-^?`_8bQ z_lW<hqSA72v0JZn-|E%f-gTwAdu3&@*S*SDx!PUjt6b@=uAam}x+mO9pSMW&Mt^gU ztJe6hWmFpF#qNqqNyocVeDN!)5RX-*6~%7PdcCBwLVYy!qFc(n1Q8trV@6l0FO!HS z<r*`(J6>g#w?c)ws(Pibv`U{;wSF!6__8Rd$10tst=6iwm0G3d)4cqfq!nxB{L{1v zT7_n)=PM*xZ9;`nUT!@KBcPu&p-Z#%)B44_>{(e^aq^p*ta(&m_jJ$Fc!zdfa&o>0 zQjFUz`@7~?QL=)crmd@5$In3sh^!6=j)Q;ls_ht^PA3EWVq$IfxPI}D{s{vT2M%(& z248UDkf9e{oHXo`;Uh+ly3{@TvN2=FjlX=t6<?Tm<yDiePQK>a$y26IyKZ{QjMSO4 zzWAlI^y@P+vu4l9o_oWM^K#}d@GM-EyBG_ZOAG$#rke|wEniV|%gSQ!s#{A+%Wf-Q zT~S$eyRTX|)~sE({>xw4P_uE9BI{;VNSAslODlA*k22k;Wifu{^LL&$S-X}N%j9XE zDsQH@ci7qG)w6wGuZElJ)$@wV4fQ-H>N&l<ymF;P_8Ap=>1war>+@Cm+?qC!&Rslj zL2j<)Bd=QS-1&2&UbV~xIq7rf_xLQDmOOdNz=ZS)cTrVUdFjd`y_6wSQdI3;UBs{~ z!e7_DtE+SwvgMUU4BZm1JHs8xyS(%kUy*OUyOcWneBPCM`T9u-o^o$dwU>cip%<+r zCNZK?zr5OAZB$iN`uO54TJ2s%;a6AsyrjY7YE^<ss_>Lw$~Spn!d33{o?;lJos&Cv zUewIdOG>NVMb*{b)wh(dcNZJJ(u!N%6(qGria|w6D@yg!qVm!&tK<_FOL*ppRM<;Q z_btY)yt~&|8oubVPIAxH-2`1-S*^RvOK<a%x>U#Ktv1SacjYSg%A)de$&8kgGF`Q@ za&?uO;uEf3S?;^Sy~?OqsoGS{@S>hVRaEOfW2H{z`L8}^mY3%gl~$;_OTDj^daLPO zQEA*-;;ybLTFFX5a0WmT(>bcaqTB15KJC?AcdylXixyk$t(Q>f%8HfVNuR$xBp)eT zvgDCLN>aX_42r|wubnR6jS98uFmifAxJ$f6RaR+9=i2K&qmFA!qavz)>xnn*yz#2_ z;?IaTRpM0{jJ7qUKHVrP@97}vNtJ<=i#c(gwqIUZA<OpF3>;a#)xz3cu4_^xUQfN% zddfVguB5w)y=zKWdV9i#+sM1Fih0APAT84~GgUiZquR$H$8ea{47*ajggv2HM!{`; z!=Jxh!jX!L^dgEd(CYH2X{jc?&wIP!t(L;bC|?v_VCX<rvel(bC<dMMw+wfq!l;%8 zTwC;aobt4NvTDO~j(cwfy;fPV+FPMh2MMd%@SI_be771Buv#^^gjMrt6^ocI6Shj$ z=kAqAl91)it46S<<&>`URaRH7(%pHbs+JiOCw8~TJZsTodD0S?50fTM(q^)E-|AyE zt0-bcHY#qbs9am|Mfxz@gjupik4{Kn6O~{y+!C1|CzV~0(baDx&%#KT-@Q@KO+2g3 z5Px(|bU!05+5NmN>KW!*w?DG^-Ot~MdhS<Sdq-_uEgQ1!j@mmm*A9t`V@KY)bt?r* zPOkOT)@u%J!sXLF`L*n~Y|0)_J=wb_)YjJ$OJiFuDJgL{;@4GGt*xr+wIB2OfBes_ z_5C*i{K)#(_shB7v%!=;>)#gb)Bk#huhV+|#b}@JUvvtawVr>m5R*U8zes%d|M>pb zKGpwjG%Ef-9sx0R-Tx3U{#?IE4~n}vrsrR5%;)<TiGQv!{U7uDYcoJ{8p6Lwj`G&? z>=Kdc|G=+r_|I3{o=`5W=h=FSiIGWATesQ2W$PVZt#4=y+}ZTCySCl^^>5ts&3nIf z-~A7K`@!#g_j?a*fB2C{AA9`!JAUxPAN}~BpZLj>KmC`VJ@xaQPe1eQbHDiI^S}D_ zuIAl)_Wq`&b>IF2FTD7#FTH&5&~FdF^6G1^A9>@=w~qeq_kU<R_Vyo-|Jyt7n(coI zp7{6o-tYL}&mW%r=+x=XGk^KGi_3_A^MUC62cFM$Ao{Pa|9^G<e{=i)wFBw-zpDf3 ze|7z{vuCVcJ)>Gk6IwC9E8RK#-14xVpO%wzb#d|4Jn-}6Xj(eJnV55&Iy!6fE7x>C zFW|H!-nrf?j-*zAbmLZ|TGzB2jB=I64dBX>R(h4MRA>@8MZT3KxU;>t_zVuJ^6iGA z3iU`nlD<Z|lBPylk`7Qoy!DcX#Fw}dN6RhJ4PP-IBt2iLdRkm!_^QKx`QG9RZ}?>~ zXta3eR92|3xklJ6(j~4&JdN-g;UtX4ca1}Sn8uRN(X?`HuC5L};=iQY>sxS38Rvw# zJ%?nWc<^mrQMI1V8FLLJhbp5=`C0E)GFlEarJ`HC*H^Af*OugFEt-7oq|AAcAIOue zDFFqcJQRx>TJ1xXsW}ZmJJ1}o3XMY>(NwgUG#tN-1@jjySv*#o#F<y#BlM(6x2R<B zUtO&HZziwxoGMl?s;ra@_+?wpf9h}T1?k#BID$5bJzdkDEY-A!?mu@@kWr!JX&N+d z<wo9*Lc5b+<b7YC@4p<=`+I%V_rHvT-Y0<HF5Fkb&ywDqQQ=CaqB9SWUnHNt<+w1l z_xFQQ@g?4|KHp#L^ZmA2R(uJ29na^>r{jxOxbuA<lXm{^Iq7LyDImY|#V?%G`+MJV zPJ~7(zw^ca_WaNO{yR@k-A+V3AL-K`-&@oZ?nhD2ecRnz&^y2AbOzj%rd<liFH+v< z?}dCT>hpb9pK?62tatqAe$8H<rY#5L7fHWw`JOH7{XIIq#5+*l`+MK`FRkzWy>I;A z*M0W)UvKXHy>EX$_08Vj`=+0B-)Db6zP<PNzU9B^@!sG2&d<?1tnV7X!teL=dEasz zeWG_deZP0^?)|-QJ->Y*O}qIFnS_5Aagx&7B5%Fj|K+XxZM>C5F>|~XULQoJ42xox zq5I0S)<DC7ufsQ8xDXjaT90rdD(v}1rTXkjUoI4#a<8>RYTwi{6wf3ajBWBKHi+p_ ziDnm76qkcZd?cynR2CcM-q{ds=R><8^qX3iQ0_B)kc=S;=CbQT6xXzqvGcq|YrLQG z|4UCQR>Jw3HqoA2?ggi~ES4OkAnC=$5RJiu;$otiDOD0TqjL3XN;I#ug6wBX47Pr# zlU1_Wr)wQjdMjmEKGGUrw89iyo^Y)s6{*4E^;KTv-ZQ=BURtqF1+KF%j!^NsTkwY} ze*@BeMFjcKvh7PMN>mFKXRTWavPJDlTro2)wNsY!ets=>Zgr*?TKcVCpNHy7*S#w_ z2#%siU~uYUv!Qb;CWrR0dbSuEH>;9(q{`ZFV&_T^2!YdEJhuWCm{9UGtvT8sEF|Ke zD{<2^JeoE{T4q63jy$(f8aODW#cIre0cl^fFD|bpfW=ptDQ{tJ%9rH1o8vM|-c%7! zO4~=3{)wpeTCB*hbHQ=GWzVOr)fm!F#m<9{7$y-inx3P~VctXE9!ak#&aEn~usZd| z7|AfJhr*ew3m2n0UE3vje)@wp?>sT`wJrAi(qeB$Ns(`HWsXpcuV1fwwcY1Vhtc|| z>IZAqXj+jy&!Ua17AUYSG`zm`9<NVvXJ8ko@-lnMq^%d1uDmTgDt{E!HsJwA<K(Kb zs?fj1aI4a*)i~uzd%(6xFJDrz7GziZfhxfwuhkvPA|(j-&K8w&cu}Bd?~QtA`hxLa zA2Yk$s4kJTuQyh$^7@!*@5Ii_$SJC_+L4~P)Yjb=iz_1yq?ys7Xp1y!Zb{qAY$9Gp zZy&<6OaAi|6ULgN+PgANB=>H%-;Y#{a!bEV=`yv9^2%y&c)H$cjh66wl&(DxRhtEd zUS;SqdhhKODqrg-GcQ-~p7ZO&tDIzty+F9MtE-B9-tOAw_4c9EN2H8V<0!AlS1Jse zbnV8hMf0=faV{t>=g?GPTLgPS($%zAtvJOCR$1@kr7gmpEAtpkL`ts;p)+7_G2o}s zX8-&9|FZ>li2^!);#w4{a5-IJH_Ab<NwA&s{^YyB|Nj2B1wL;J%zr2C7e5{L>&!om zNmFB|{B7`Sfa6oBRs<IQlRp`!7XgtmX$wEwapk&a954_-4n^w^!~=<dBkYQwyh{<} zoABf!-y~g$D=u0vR30*2#BVTgK^P?O(SZ0*1>`+F{GJhhXJJ=y7KQzD!!FCSO1}VC z@@5%U>8!?e11z-K2*3wOS*0FQo?1Z4To-mX<H~nGAm6tDQXaW*cLng>@cVXLDc_@j z<oA6*!aWU0on8Xu`|E&wPohzzeIjkfWB1w+BQH_E$a}<%e2TpHb^Ctr`~KI$pYMAl zoqs&nb>5#<SNC~;{}^p?ex`&~zw;Bt|1s(>wK(q(2=C<Q9RluuoHn2)|ILR&$x!gH zSi9p<Hmnt!*KZyj?wrT}U_ESq%yR3#Cla)pmbS50xjP8o{K%V+xUJ8h`df$WtNhZ! z?$1AG`1El2orHh+;o}cqqW#;$=EFBxiADYGPJiQe6+?72Eqrs?n{I9Sn`Lia8x_)e ztUG+<_ifP8uGwhCEdO_lW|t8T8Ck<W74dKM*mg;JuN3~)cPVGzvWk7^$gd=rrgglJ z-J}oFwE7Y0+I{3N;l-7{7Cc9OvbT1cX$r@95m)x?hj3*tci_q-KKgE&+KYdTD>z0y z?uEEF;|fkQ7IzqK*E?z2CAfQWhvVLfE4V^2?kL<$+)HuW{w+;&<L<y6jr-*BH0?56 z7w$S-4R<|G#~;(QFXOi1%3wQ+8^V1NcNuiu&jSn}g-1!cQm62uq)Gdf(f9X#n5NwW zYy<8D>VYjlEwB!#0!o0J0S}N3%mk(bQ-EaPN?-yo7H|V2fFxiD-~ti>JJ9)O`UEfm z3Ezf$1ULxn1%3%U2|Nls1Uv|A12zCvK!1BrpG%)kqCT1Q`JGq%b=VaC$ry<tp2QV5 z@{@LQ$9+S(@ti*yC(*y!Dl2}+2Nplele;+j^MCl+lliyBKS;e?D5H`w9mzcUS@;_Q z@{_Tc3j7lw<KkO@C}w>H_z)OO!z2Uq0lAnGi8F(51;AS1Uf?O<Fz{zUE>~U+<N)Qs ffA`;C6IqGv^RtD2k$RV(<URs$Gq4!wJAVETV*lf- literal 0 HcmV?d00001 diff --git a/env/lib/python3.6/site-packages/setuptools/launch.py b/env/lib/python3.6/site-packages/setuptools/launch.py new file mode 100644 index 0000000..308283e --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/launch.py @@ -0,0 +1,35 @@ +""" +Launch the Python script on the command line after +setuptools is bootstrapped via import. +""" + +# Note that setuptools gets imported implicitly by the +# invocation of this script using python -m setuptools.launch + +import tokenize +import sys + + +def run(): + """ + Run the script in sys.argv[1] as if it had + been invoked naturally. + """ + __builtins__ + script_name = sys.argv[1] + namespace = dict( + __file__=script_name, + __name__='__main__', + __doc__=None, + ) + sys.argv[:] = sys.argv[1:] + + open_ = getattr(tokenize, 'open', open) + script = open_(script_name).read() + norm_script = script.replace('\\r\\n', '\\n') + code = compile(norm_script, script_name, 'exec') + exec(code, namespace) + + +if __name__ == '__main__': + run() diff --git a/env/lib/python3.6/site-packages/setuptools/lib2to3_ex.py b/env/lib/python3.6/site-packages/setuptools/lib2to3_ex.py new file mode 100644 index 0000000..4b1a73f --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/lib2to3_ex.py @@ -0,0 +1,62 @@ +""" +Customized Mixin2to3 support: + + - adds support for converting doctests + + +This module raises an ImportError on Python 2. +""" + +from distutils.util import Mixin2to3 as _Mixin2to3 +from distutils import log +from lib2to3.refactor import RefactoringTool, get_fixers_from_package + +import setuptools + + +class DistutilsRefactoringTool(RefactoringTool): + def log_error(self, msg, *args, **kw): + log.error(msg, *args) + + def log_message(self, msg, *args): + log.info(msg, *args) + + def log_debug(self, msg, *args): + log.debug(msg, *args) + + +class Mixin2to3(_Mixin2to3): + def run_2to3(self, files, doctests=False): + # See of the distribution option has been set, otherwise check the + # setuptools default. + if self.distribution.use_2to3 is not True: + return + if not files: + return + log.info("Fixing " + " ".join(files)) + self.__build_fixer_names() + self.__exclude_fixers() + if doctests: + if setuptools.run_2to3_on_doctests: + r = DistutilsRefactoringTool(self.fixer_names) + r.refactor(files, write=True, doctests_only=True) + else: + _Mixin2to3.run_2to3(self, files) + + def __build_fixer_names(self): + if self.fixer_names: + return + self.fixer_names = [] + for p in setuptools.lib2to3_fixer_packages: + self.fixer_names.extend(get_fixers_from_package(p)) + if self.distribution.use_2to3_fixers is not None: + for p in self.distribution.use_2to3_fixers: + self.fixer_names.extend(get_fixers_from_package(p)) + + def __exclude_fixers(self): + excluded_fixers = getattr(self, 'exclude_fixers', []) + if self.distribution.use_2to3_exclude_fixers is not None: + excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers) + for fixer_name in excluded_fixers: + if fixer_name in self.fixer_names: + self.fixer_names.remove(fixer_name) diff --git a/env/lib/python3.6/site-packages/setuptools/monkey.py b/env/lib/python3.6/site-packages/setuptools/monkey.py new file mode 100644 index 0000000..aabc280 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/monkey.py @@ -0,0 +1,186 @@ +""" +Monkey patching of distutils. +""" + +import sys +import distutils.filelist +import platform +import types +import functools + +from .py26compat import import_module +from setuptools.extern import six + +import setuptools + +__all__ = [] +""" +Everything is private. Contact the project team +if you think you need this functionality. +""" + + +def get_unpatched(item): + lookup = ( + get_unpatched_class if isinstance(item, six.class_types) else + get_unpatched_function if isinstance(item, types.FunctionType) else + lambda item: None + ) + return lookup(item) + + +def get_unpatched_class(cls): + """Protect against re-patching the distutils if reloaded + + Also ensures that no other distutils extension monkeypatched the distutils + first. + """ + while cls.__module__.startswith('setuptools'): + cls, = cls.__bases__ + if not cls.__module__.startswith('distutils'): + msg = "distutils has already been patched by %r" % cls + raise AssertionError(msg) + return cls + + +def patch_all(): + # we can't patch distutils.cmd, alas + distutils.core.Command = setuptools.Command + + has_issue_12885 = ( + sys.version_info < (3, 4, 6) + or + (3, 5) < sys.version_info <= (3, 5, 3) + or + (3, 6) < sys.version_info + ) + + if has_issue_12885: + # fix findall bug in distutils (http://bugs.python.org/issue12885) + distutils.filelist.findall = setuptools.findall + + needs_warehouse = ( + sys.version_info < (2, 7, 13) + or + (3, 0) < sys.version_info < (3, 3, 7) + or + (3, 4) < sys.version_info < (3, 4, 6) + or + (3, 5) < sys.version_info <= (3, 5, 3) + or + (3, 6) < sys.version_info + ) + + if needs_warehouse: + warehouse = 'https://upload.pypi.org/legacy/' + distutils.config.PyPIRCCommand.DEFAULT_REPOSITORY = warehouse + + _patch_distribution_metadata_write_pkg_file() + _patch_distribution_metadata_write_pkg_info() + + # Install Distribution throughout the distutils + for module in distutils.dist, distutils.core, distutils.cmd: + module.Distribution = setuptools.dist.Distribution + + # Install the patched Extension + distutils.core.Extension = setuptools.extension.Extension + distutils.extension.Extension = setuptools.extension.Extension + if 'distutils.command.build_ext' in sys.modules: + sys.modules['distutils.command.build_ext'].Extension = ( + setuptools.extension.Extension + ) + + patch_for_msvc_specialized_compiler() + + +def _patch_distribution_metadata_write_pkg_file(): + """Patch write_pkg_file to also write Requires-Python/Requires-External""" + distutils.dist.DistributionMetadata.write_pkg_file = ( + setuptools.dist.write_pkg_file + ) + + +def _patch_distribution_metadata_write_pkg_info(): + """ + Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-local + encoding to save the pkg_info. Monkey-patch its write_pkg_info method to + correct this undesirable behavior. + """ + environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2) + if not environment_local: + return + + distutils.dist.DistributionMetadata.write_pkg_info = ( + setuptools.dist.write_pkg_info + ) + + +def patch_func(replacement, target_mod, func_name): + """ + Patch func_name in target_mod with replacement + + Important - original must be resolved by name to avoid + patching an already patched function. + """ + original = getattr(target_mod, func_name) + + # set the 'unpatched' attribute on the replacement to + # point to the original. + vars(replacement).setdefault('unpatched', original) + + # replace the function in the original module + setattr(target_mod, func_name, replacement) + + +def get_unpatched_function(candidate): + return getattr(candidate, 'unpatched') + + +def patch_for_msvc_specialized_compiler(): + """ + Patch functions in distutils to use standalone Microsoft Visual C++ + compilers. + """ + # import late to avoid circular imports on Python < 3.5 + msvc = import_module('setuptools.msvc') + + if platform.system() != 'Windows': + # Compilers only availables on Microsoft Windows + return + + def patch_params(mod_name, func_name): + """ + Prepare the parameters for patch_func to patch indicated function. + """ + repl_prefix = 'msvc9_' if 'msvc9' in mod_name else 'msvc14_' + repl_name = repl_prefix + func_name.lstrip('_') + repl = getattr(msvc, repl_name) + mod = import_module(mod_name) + if not hasattr(mod, func_name): + raise ImportError(func_name) + return repl, mod, func_name + + # Python 2.7 to 3.4 + msvc9 = functools.partial(patch_params, 'distutils.msvc9compiler') + + # Python 3.5+ + msvc14 = functools.partial(patch_params, 'distutils._msvccompiler') + + try: + # Patch distutils.msvc9compiler + patch_func(*msvc9('find_vcvarsall')) + patch_func(*msvc9('query_vcvarsall')) + except ImportError: + pass + + try: + # Patch distutils._msvccompiler._get_vc_env + patch_func(*msvc14('_get_vc_env')) + except ImportError: + pass + + try: + # Patch distutils._msvccompiler.gen_lib_options for Numpy + patch_func(*msvc14('gen_lib_options')) + except ImportError: + pass diff --git a/env/lib/python3.6/site-packages/setuptools/msvc.py b/env/lib/python3.6/site-packages/setuptools/msvc.py new file mode 100644 index 0000000..447ddb3 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/msvc.py @@ -0,0 +1,1193 @@ +""" +Improved support for Microsoft Visual C++ compilers. + +Known supported compilers: +-------------------------- +Microsoft Visual C++ 9.0: + Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64); + Microsoft Windows SDK 7.0 (x86, x64, ia64); + Microsoft Windows SDK 6.1 (x86, x64, ia64) + +Microsoft Visual C++ 10.0: + Microsoft Windows SDK 7.1 (x86, x64, ia64) + +Microsoft Visual C++ 14.0: + Microsoft Visual C++ Build Tools 2015 (x86, x64, arm) +""" + +import os +import sys +import platform +import itertools +import distutils.errors +from pkg_resources.extern.packaging.version import LegacyVersion + +from setuptools.extern.six.moves import filterfalse + +from .monkey import get_unpatched + +if platform.system() == 'Windows': + from setuptools.extern.six.moves import winreg + safe_env = os.environ +else: + """ + Mock winreg and environ so the module can be imported + on this platform. + """ + + class winreg: + HKEY_USERS = None + HKEY_CURRENT_USER = None + HKEY_LOCAL_MACHINE = None + HKEY_CLASSES_ROOT = None + + safe_env = dict() + +try: + from distutils.msvc9compiler import Reg +except ImportError: + pass + + +def msvc9_find_vcvarsall(version): + """ + Patched "distutils.msvc9compiler.find_vcvarsall" to use the standalone + compiler build for Python (VCForPython). Fall back to original behavior + when the standalone compiler is not available. + + Redirect the path of "vcvarsall.bat". + + Known supported compilers + ------------------------- + Microsoft Visual C++ 9.0: + Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64) + + Parameters + ---------- + version: float + Required Microsoft Visual C++ version. + + Return + ------ + vcvarsall.bat path: str + """ + VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f' + key = VC_BASE % ('', version) + try: + # Per-user installs register the compiler path here + productdir = Reg.get_value(key, "installdir") + except KeyError: + try: + # All-user installs on a 64-bit system register here + key = VC_BASE % ('Wow6432Node\\', version) + productdir = Reg.get_value(key, "installdir") + except KeyError: + productdir = None + + if productdir: + vcvarsall = os.path.os.path.join(productdir, "vcvarsall.bat") + if os.path.isfile(vcvarsall): + return vcvarsall + + return get_unpatched(msvc9_find_vcvarsall)(version) + + +def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs): + """ + Patched "distutils.msvc9compiler.query_vcvarsall" for support standalones + compilers. + + Set environment without use of "vcvarsall.bat". + + Known supported compilers + ------------------------- + Microsoft Visual C++ 9.0: + Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64); + Microsoft Windows SDK 7.0 (x86, x64, ia64); + Microsoft Windows SDK 6.1 (x86, x64, ia64) + + Microsoft Visual C++ 10.0: + Microsoft Windows SDK 7.1 (x86, x64, ia64) + + Parameters + ---------- + ver: float + Required Microsoft Visual C++ version. + arch: str + Target architecture. + + Return + ------ + environment: dict + """ + # Try to get environement from vcvarsall.bat (Classical way) + try: + orig = get_unpatched(msvc9_query_vcvarsall) + return orig(ver, arch, *args, **kwargs) + except distutils.errors.DistutilsPlatformError: + # Pass error if Vcvarsall.bat is missing + pass + except ValueError: + # Pass error if environment not set after executing vcvarsall.bat + pass + + # If error, try to set environment directly + try: + return EnvironmentInfo(arch, ver).return_env() + except distutils.errors.DistutilsPlatformError as exc: + _augment_exception(exc, ver, arch) + raise + + +def msvc14_get_vc_env(plat_spec): + """ + Patched "distutils._msvccompiler._get_vc_env" for support standalones + compilers. + + Set environment without use of "vcvarsall.bat". + + Known supported compilers + ------------------------- + Microsoft Visual C++ 14.0: + Microsoft Visual C++ Build Tools 2015 (x86, x64, arm) + + Parameters + ---------- + plat_spec: str + Target architecture. + + Return + ------ + environment: dict + """ + # Try to get environment from vcvarsall.bat (Classical way) + try: + return get_unpatched(msvc14_get_vc_env)(plat_spec) + except distutils.errors.DistutilsPlatformError: + # Pass error Vcvarsall.bat is missing + pass + + # If error, try to set environment directly + try: + return EnvironmentInfo(plat_spec, vc_min_ver=14.0).return_env() + except distutils.errors.DistutilsPlatformError as exc: + _augment_exception(exc, 14.0) + raise + + +def msvc14_gen_lib_options(*args, **kwargs): + """ + Patched "distutils._msvccompiler.gen_lib_options" for fix + compatibility between "numpy.distutils" and "distutils._msvccompiler" + (for Numpy < 1.11.2) + """ + if "numpy.distutils" in sys.modules: + import numpy as np + if LegacyVersion(np.__version__) < LegacyVersion('1.11.2'): + return np.distutils.ccompiler.gen_lib_options(*args, **kwargs) + return get_unpatched(msvc14_gen_lib_options)(*args, **kwargs) + + +def _augment_exception(exc, version, arch=''): + """ + Add details to the exception message to help guide the user + as to what action will resolve it. + """ + # Error if MSVC++ directory not found or environment not set + message = exc.args[0] + + if "vcvarsall" in message.lower() or "visual c" in message.lower(): + # Special error message if MSVC++ not installed + tmpl = 'Microsoft Visual C++ {version:0.1f} is required.' + message = tmpl.format(**locals()) + msdownload = 'www.microsoft.com/download/details.aspx?id=%d' + if version == 9.0: + if arch.lower().find('ia64') > -1: + # For VC++ 9.0, if IA64 support is needed, redirect user + # to Windows SDK 7.0 + message += ' Get it with "Microsoft Windows SDK 7.0": ' + message += msdownload % 3138 + else: + # For VC++ 9.0 redirect user to Vc++ for Python 2.7 : + # This redirection link is maintained by Microsoft. + # Contact vspython@microsoft.com if it needs updating. + message += ' Get it from http://aka.ms/vcpython27' + elif version == 10.0: + # For VC++ 10.0 Redirect user to Windows SDK 7.1 + message += ' Get it with "Microsoft Windows SDK 7.1": ' + message += msdownload % 8279 + elif version >= 14.0: + # For VC++ 14.0 Redirect user to Visual C++ Build Tools + message += (' Get it with "Microsoft Visual C++ Build Tools": ' + r'http://landinghub.visualstudio.com/' + 'visual-cpp-build-tools') + + exc.args = (message, ) + + +class PlatformInfo: + """ + Current and Target Architectures informations. + + Parameters + ---------- + arch: str + Target architecture. + """ + current_cpu = safe_env.get('processor_architecture', '').lower() + + def __init__(self, arch): + self.arch = arch.lower().replace('x64', 'amd64') + + @property + def target_cpu(self): + return self.arch[self.arch.find('_') + 1:] + + def target_is_x86(self): + return self.target_cpu == 'x86' + + def current_is_x86(self): + return self.current_cpu == 'x86' + + def current_dir(self, hidex86=False, x64=False): + """ + Current platform specific subfolder. + + Parameters + ---------- + hidex86: bool + return '' and not '\x86' if architecture is x86. + x64: bool + return '\x64' and not '\amd64' if architecture is amd64. + + Return + ------ + subfolder: str + '\target', or '' (see hidex86 parameter) + """ + return ( + '' if (self.current_cpu == 'x86' and hidex86) else + r'\x64' if (self.current_cpu == 'amd64' and x64) else + r'\%s' % self.current_cpu + ) + + def target_dir(self, hidex86=False, x64=False): + """ + Target platform specific subfolder. + + Parameters + ---------- + hidex86: bool + return '' and not '\x86' if architecture is x86. + x64: bool + return '\x64' and not '\amd64' if architecture is amd64. + + Return + ------ + subfolder: str + '\current', or '' (see hidex86 parameter) + """ + return ( + '' if (self.target_cpu == 'x86' and hidex86) else + r'\x64' if (self.target_cpu == 'amd64' and x64) else + r'\%s' % self.target_cpu + ) + + def cross_dir(self, forcex86=False): + """ + Cross platform specific subfolder. + + Parameters + ---------- + forcex86: bool + Use 'x86' as current architecture even if current acritecture is + not x86. + + Return + ------ + subfolder: str + '' if target architecture is current architecture, + '\current_target' if not. + """ + current = 'x86' if forcex86 else self.current_cpu + return ( + '' if self.target_cpu == current else + self.target_dir().replace('\\', '\\%s_' % current) + ) + + +class RegistryInfo: + """ + Microsoft Visual Studio related registry informations. + + Parameters + ---------- + platform_info: PlatformInfo + "PlatformInfo" instance. + """ + HKEYS = (winreg.HKEY_USERS, + winreg.HKEY_CURRENT_USER, + winreg.HKEY_LOCAL_MACHINE, + winreg.HKEY_CLASSES_ROOT) + + def __init__(self, platform_info): + self.pi = platform_info + + @property + def visualstudio(self): + """ + Microsoft Visual Studio root registry key. + """ + return 'VisualStudio' + + @property + def sxs(self): + """ + Microsoft Visual Studio SxS registry key. + """ + return os.path.join(self.visualstudio, 'SxS') + + @property + def vc(self): + """ + Microsoft Visual C++ VC7 registry key. + """ + return os.path.join(self.sxs, 'VC7') + + @property + def vs(self): + """ + Microsoft Visual Studio VS7 registry key. + """ + return os.path.join(self.sxs, 'VS7') + + @property + def vc_for_python(self): + """ + Microsoft Visual C++ for Python registry key. + """ + return r'DevDiv\VCForPython' + + @property + def microsoft_sdk(self): + """ + Microsoft SDK registry key. + """ + return 'Microsoft SDKs' + + @property + def windows_sdk(self): + """ + Microsoft Windows/Platform SDK registry key. + """ + return os.path.join(self.microsoft_sdk, 'Windows') + + @property + def netfx_sdk(self): + """ + Microsoft .NET Framework SDK registry key. + """ + return os.path.join(self.microsoft_sdk, 'NETFXSDK') + + @property + def windows_kits_roots(self): + """ + Microsoft Windows Kits Roots registry key. + """ + return r'Windows Kits\Installed Roots' + + def microsoft(self, key, x86=False): + """ + Return key in Microsoft software registry. + + Parameters + ---------- + key: str + Registry key path where look. + x86: str + Force x86 software registry. + + Return + ------ + str: value + """ + node64 = '' if self.pi.current_is_x86() or x86 else r'\Wow6432Node' + return os.path.join('Software', node64, 'Microsoft', key) + + def lookup(self, key, name): + """ + Look for values in registry in Microsoft software registry. + + Parameters + ---------- + key: str + Registry key path where look. + name: str + Value name to find. + + Return + ------ + str: value + """ + KEY_READ = winreg.KEY_READ + openkey = winreg.OpenKey + ms = self.microsoft + for hkey in self.HKEYS: + try: + bkey = openkey(hkey, ms(key), 0, KEY_READ) + except (OSError, IOError): + if not self.pi.current_is_x86(): + try: + bkey = openkey(hkey, ms(key, True), 0, KEY_READ) + except (OSError, IOError): + continue + else: + continue + try: + return winreg.QueryValueEx(bkey, name)[0] + except (OSError, IOError): + pass + + +class SystemInfo: + """ + Microsoft Windows and Visual Studio related system inormations. + + Parameters + ---------- + registry_info: RegistryInfo + "RegistryInfo" instance. + vc_ver: float + Required Microsoft Visual C++ version. + """ + + # Variables and properties in this class use originals CamelCase variables + # names from Microsoft source files for more easy comparaison. + WinDir = safe_env.get('WinDir', '') + ProgramFiles = safe_env.get('ProgramFiles', '') + ProgramFilesx86 = safe_env.get('ProgramFiles(x86)', ProgramFiles) + + def __init__(self, registry_info, vc_ver=None): + self.ri = registry_info + self.pi = self.ri.pi + if vc_ver: + self.vc_ver = vc_ver + else: + try: + self.vc_ver = self.find_available_vc_vers()[-1] + except IndexError: + err = 'No Microsoft Visual C++ version found' + raise distutils.errors.DistutilsPlatformError(err) + + def find_available_vc_vers(self): + """ + Find all available Microsoft Visual C++ versions. + """ + vckeys = (self.ri.vc, self.ri.vc_for_python) + vc_vers = [] + for hkey in self.ri.HKEYS: + for key in vckeys: + try: + bkey = winreg.OpenKey(hkey, key, 0, winreg.KEY_READ) + except (OSError, IOError): + continue + subkeys, values, _ = winreg.QueryInfoKey(bkey) + for i in range(values): + try: + ver = float(winreg.EnumValue(bkey, i)[0]) + if ver not in vc_vers: + vc_vers.append(ver) + except ValueError: + pass + for i in range(subkeys): + try: + ver = float(winreg.EnumKey(bkey, i)) + if ver not in vc_vers: + vc_vers.append(ver) + except ValueError: + pass + return sorted(vc_vers) + + @property + def VSInstallDir(self): + """ + Microsoft Visual Studio directory. + """ + # Default path + name = 'Microsoft Visual Studio %0.1f' % self.vc_ver + default = os.path.join(self.ProgramFilesx86, name) + + # Try to get path from registry, if fail use default path + return self.ri.lookup(self.ri.vs, '%0.1f' % self.vc_ver) or default + + @property + def VCInstallDir(self): + """ + Microsoft Visual C++ directory. + """ + # Default path + default = r'Microsoft Visual Studio %0.1f\VC' % self.vc_ver + guess_vc = os.path.join(self.ProgramFilesx86, default) + + # Try to get "VC++ for Python" path from registry as default path + reg_path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver) + python_vc = self.ri.lookup(reg_path, 'installdir') + default_vc = os.path.join(python_vc, 'VC') if python_vc else guess_vc + + # Try to get path from registry, if fail use default path + path = self.ri.lookup(self.ri.vc, '%0.1f' % self.vc_ver) or default_vc + + if not os.path.isdir(path): + msg = 'Microsoft Visual C++ directory not found' + raise distutils.errors.DistutilsPlatformError(msg) + + return path + + @property + def WindowsSdkVersion(self): + """ + Microsoft Windows SDK versions. + """ + # Set Windows SDK versions for specified MSVC++ version + if self.vc_ver <= 9.0: + return ('7.0', '6.1', '6.0a') + elif self.vc_ver == 10.0: + return ('7.1', '7.0a') + elif self.vc_ver == 11.0: + return ('8.0', '8.0a') + elif self.vc_ver == 12.0: + return ('8.1', '8.1a') + elif self.vc_ver >= 14.0: + return ('10.0', '8.1') + + @property + def WindowsSdkDir(self): + """ + Microsoft Windows SDK directory. + """ + sdkdir = '' + for ver in self.WindowsSdkVersion: + # Try to get it from registry + loc = os.path.join(self.ri.windows_sdk, 'v%s' % ver) + sdkdir = self.ri.lookup(loc, 'installationfolder') + if sdkdir: + break + if not sdkdir or not os.path.isdir(sdkdir): + # Try to get "VC++ for Python" version from registry + path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver) + install_base = self.ri.lookup(path, 'installdir') + if install_base: + sdkdir = os.path.join(install_base, 'WinSDK') + if not sdkdir or not os.path.isdir(sdkdir): + # If fail, use default new path + for ver in self.WindowsSdkVersion: + intver = ver[:ver.rfind('.')] + path = r'Microsoft SDKs\Windows Kits\%s' % (intver) + d = os.path.join(self.ProgramFiles, path) + if os.path.isdir(d): + sdkdir = d + if not sdkdir or not os.path.isdir(sdkdir): + # If fail, use default old path + for ver in self.WindowsSdkVersion: + path = r'Microsoft SDKs\Windows\v%s' % ver + d = os.path.join(self.ProgramFiles, path) + if os.path.isdir(d): + sdkdir = d + if not sdkdir: + # If fail, use Platform SDK + sdkdir = os.path.join(self.VCInstallDir, 'PlatformSDK') + return sdkdir + + @property + def WindowsSDKExecutablePath(self): + """ + Microsoft Windows SDK executable directory. + """ + # Find WinSDK NetFx Tools registry dir name + if self.vc_ver <= 11.0: + netfxver = 35 + arch = '' + else: + netfxver = 40 + hidex86 = True if self.vc_ver <= 12.0 else False + arch = self.pi.current_dir(x64=True, hidex86=hidex86) + fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-')) + + # liste all possibles registry paths + regpaths = [] + if self.vc_ver >= 14.0: + for ver in self.NetFxSdkVersion: + regpaths += [os.path.join(self.ri.netfx_sdk, ver, fx)] + + for ver in self.WindowsSdkVersion: + regpaths += [os.path.join(self.ri.windows_sdk, 'v%sA' % ver, fx)] + + # Return installation folder from the more recent path + for path in regpaths: + execpath = self.ri.lookup(path, 'installationfolder') + if execpath: + break + return execpath + + @property + def FSharpInstallDir(self): + """ + Microsoft Visual F# directory. + """ + path = r'%0.1f\Setup\F#' % self.vc_ver + path = os.path.join(self.ri.visualstudio, path) + return self.ri.lookup(path, 'productdir') or '' + + @property + def UniversalCRTSdkDir(self): + """ + Microsoft Universal CRT SDK directory. + """ + # Set Kit Roots versions for specified MSVC++ version + if self.vc_ver >= 14.0: + vers = ('10', '81') + else: + vers = () + + # Find path of the more recent Kit + for ver in vers: + sdkdir = self.ri.lookup(self.ri.windows_kits_roots, + 'kitsroot%s' % ver) + if sdkdir: + break + return sdkdir or '' + + @property + def NetFxSdkVersion(self): + """ + Microsoft .NET Framework SDK versions. + """ + # Set FxSdk versions for specified MSVC++ version + if self.vc_ver >= 14.0: + return ('4.6.1', '4.6') + else: + return () + + @property + def NetFxSdkDir(self): + """ + Microsoft .NET Framework SDK directory. + """ + for ver in self.NetFxSdkVersion: + loc = os.path.join(self.ri.netfx_sdk, ver) + sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder') + if sdkdir: + break + return sdkdir or '' + + @property + def FrameworkDir32(self): + """ + Microsoft .NET Framework 32bit directory. + """ + # Default path + guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework') + + # Try to get path from registry, if fail use default path + return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw + + @property + def FrameworkDir64(self): + """ + Microsoft .NET Framework 64bit directory. + """ + # Default path + guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework64') + + # Try to get path from registry, if fail use default path + return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw + + @property + def FrameworkVersion32(self): + """ + Microsoft .NET Framework 32bit versions. + """ + return self._find_dot_net_versions(32) + + @property + def FrameworkVersion64(self): + """ + Microsoft .NET Framework 64bit versions. + """ + return self._find_dot_net_versions(64) + + def _find_dot_net_versions(self, bits=32): + """ + Find Microsoft .NET Framework versions. + + Parameters + ---------- + bits: int + Platform number of bits: 32 or 64. + """ + # Find actual .NET version + ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits) or '' + + # Set .NET versions for specified MSVC++ version + if self.vc_ver >= 12.0: + frameworkver = (ver, 'v4.0') + elif self.vc_ver >= 10.0: + frameworkver = ('v4.0.30319' if ver.lower()[:2] != 'v4' else ver, + 'v3.5') + elif self.vc_ver == 9.0: + frameworkver = ('v3.5', 'v2.0.50727') + if self.vc_ver == 8.0: + frameworkver = ('v3.0', 'v2.0.50727') + return frameworkver + + +class EnvironmentInfo: + """ + Return environment variables for specified Microsoft Visual C++ version + and platform : Lib, Include, Path and libpath. + + This function is compatible with Microsoft Visual C++ 9.0 to 14.0. + + Script created by analysing Microsoft environment configuration files like + "vcvars[...].bat", "SetEnv.Cmd", "vcbuildtools.bat", ... + + Parameters + ---------- + arch: str + Target architecture. + vc_ver: float + Required Microsoft Visual C++ version. If not set, autodetect the last + version. + vc_min_ver: float + Minimum Microsoft Visual C++ version. + """ + + # Variables and properties in this class use originals CamelCase variables + # names from Microsoft source files for more easy comparaison. + + def __init__(self, arch, vc_ver=None, vc_min_ver=None): + self.pi = PlatformInfo(arch) + self.ri = RegistryInfo(self.pi) + self.si = SystemInfo(self.ri, vc_ver) + + if vc_min_ver: + if self.vc_ver < vc_min_ver: + err = 'No suitable Microsoft Visual C++ version found' + raise distutils.errors.DistutilsPlatformError(err) + + @property + def vc_ver(self): + """ + Microsoft Visual C++ version. + """ + return self.si.vc_ver + + @property + def VSTools(self): + """ + Microsoft Visual Studio Tools + """ + paths = [r'Common7\IDE', r'Common7\Tools'] + + if self.vc_ver >= 14.0: + arch_subdir = self.pi.current_dir(hidex86=True, x64=True) + paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow'] + paths += [r'Team Tools\Performance Tools'] + paths += [r'Team Tools\Performance Tools%s' % arch_subdir] + + return [os.path.join(self.si.VSInstallDir, path) for path in paths] + + @property + def VCIncludes(self): + """ + Microsoft Visual C++ & Microsoft Foundation Class Includes + """ + return [os.path.join(self.si.VCInstallDir, 'Include'), + os.path.join(self.si.VCInstallDir, r'ATLMFC\Include')] + + @property + def VCLibraries(self): + """ + Microsoft Visual C++ & Microsoft Foundation Class Libraries + """ + arch_subdir = self.pi.target_dir(hidex86=True) + paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir] + + if self.vc_ver >= 14.0: + paths += [r'Lib\store%s' % arch_subdir] + + return [os.path.join(self.si.VCInstallDir, path) for path in paths] + + @property + def VCStoreRefs(self): + """ + Microsoft Visual C++ store references Libraries + """ + if self.vc_ver < 14.0: + return [] + return [os.path.join(self.si.VCInstallDir, r'Lib\store\references')] + + @property + def VCTools(self): + """ + Microsoft Visual C++ Tools + """ + si = self.si + tools = [os.path.join(si.VCInstallDir, 'VCPackages')] + + forcex86 = True if self.vc_ver <= 10.0 else False + arch_subdir = self.pi.cross_dir(forcex86) + if arch_subdir: + tools += [os.path.join(si.VCInstallDir, 'Bin%s' % arch_subdir)] + + if self.vc_ver >= 14.0: + path = 'Bin%s' % self.pi.current_dir(hidex86=True) + tools += [os.path.join(si.VCInstallDir, path)] + + else: + tools += [os.path.join(si.VCInstallDir, 'Bin')] + + return tools + + @property + def OSLibraries(self): + """ + Microsoft Windows SDK Libraries + """ + if self.vc_ver <= 10.0: + arch_subdir = self.pi.target_dir(hidex86=True, x64=True) + return [os.path.join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)] + + else: + arch_subdir = self.pi.target_dir(x64=True) + lib = os.path.join(self.si.WindowsSdkDir, 'lib') + libver = self._get_content_dirname(lib) + return [os.path.join(lib, '%sum%s' % (libver, arch_subdir))] + + @property + def OSIncludes(self): + """ + Microsoft Windows SDK Include + """ + include = os.path.join(self.si.WindowsSdkDir, 'include') + + if self.vc_ver <= 10.0: + return [include, os.path.join(include, 'gl')] + + else: + if self.vc_ver >= 14.0: + sdkver = self._get_content_dirname(include) + else: + sdkver = '' + return [os.path.join(include, '%sshared' % sdkver), + os.path.join(include, '%sum' % sdkver), + os.path.join(include, '%swinrt' % sdkver)] + + @property + def OSLibpath(self): + """ + Microsoft Windows SDK Libraries Paths + """ + ref = os.path.join(self.si.WindowsSdkDir, 'References') + libpath = [] + + if self.vc_ver <= 9.0: + libpath += self.OSLibraries + + if self.vc_ver >= 11.0: + libpath += [os.path.join(ref, r'CommonConfiguration\Neutral')] + + if self.vc_ver >= 14.0: + libpath += [ + ref, + os.path.join(self.si.WindowsSdkDir, 'UnionMetadata'), + os.path.join( + ref, + 'Windows.Foundation.UniversalApiContract', + '1.0.0.0', + ), + os.path.join( + ref, + 'Windows.Foundation.FoundationContract', + '1.0.0.0', + ), + os.path.join( + ref, + 'Windows.Networking.Connectivity.WwanContract', + '1.0.0.0', + ), + os.path.join( + self.si.WindowsSdkDir, + 'ExtensionSDKs', + 'Microsoft.VCLibs', + '%0.1f' % self.vc_ver, + 'References', + 'CommonConfiguration', + 'neutral', + ), + ] + return libpath + + @property + def SdkTools(self): + """ + Microsoft Windows SDK Tools + """ + bin_dir = 'Bin' if self.vc_ver <= 11.0 else r'Bin\x86' + tools = [os.path.join(self.si.WindowsSdkDir, bin_dir)] + + if not self.pi.current_is_x86(): + arch_subdir = self.pi.current_dir(x64=True) + path = 'Bin%s' % arch_subdir + tools += [os.path.join(self.si.WindowsSdkDir, path)] + + if self.vc_ver == 10.0 or self.vc_ver == 11.0: + if self.pi.target_is_x86(): + arch_subdir = '' + else: + arch_subdir = self.pi.current_dir(hidex86=True, x64=True) + path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir + tools += [os.path.join(self.si.WindowsSdkDir, path)] + + if self.si.WindowsSDKExecutablePath: + tools += [self.si.WindowsSDKExecutablePath] + + return tools + + @property + def SdkSetup(self): + """ + Microsoft Windows SDK Setup + """ + if self.vc_ver > 9.0: + return [] + + return [os.path.join(self.si.WindowsSdkDir, 'Setup')] + + @property + def FxTools(self): + """ + Microsoft .NET Framework Tools + """ + pi = self.pi + si = self.si + + if self.vc_ver <= 10.0: + include32 = True + include64 = not pi.target_is_x86() and not pi.current_is_x86() + else: + include32 = pi.target_is_x86() or pi.current_is_x86() + include64 = pi.current_cpu == 'amd64' or pi.target_cpu == 'amd64' + + tools = [] + if include32: + tools += [os.path.join(si.FrameworkDir32, ver) + for ver in si.FrameworkVersion32] + if include64: + tools += [os.path.join(si.FrameworkDir64, ver) + for ver in si.FrameworkVersion64] + return tools + + @property + def NetFxSDKLibraries(self): + """ + Microsoft .Net Framework SDK Libraries + """ + if self.vc_ver < 14.0 or not self.si.NetFxSdkDir: + return [] + + arch_subdir = self.pi.target_dir(x64=True) + return [os.path.join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)] + + @property + def NetFxSDKIncludes(self): + """ + Microsoft .Net Framework SDK Includes + """ + if self.vc_ver < 14.0 or not self.si.NetFxSdkDir: + return [] + + return [os.path.join(self.si.NetFxSdkDir, r'include\um')] + + @property + def VsTDb(self): + """ + Microsoft Visual Studio Team System Database + """ + return [os.path.join(self.si.VSInstallDir, r'VSTSDB\Deploy')] + + @property + def MSBuild(self): + """ + Microsoft Build Engine + """ + if self.vc_ver < 12.0: + return [] + + arch_subdir = self.pi.current_dir(hidex86=True) + path = r'MSBuild\%0.1f\bin%s' % (self.vc_ver, arch_subdir) + return [os.path.join(self.si.ProgramFilesx86, path)] + + @property + def HTMLHelpWorkshop(self): + """ + Microsoft HTML Help Workshop + """ + if self.vc_ver < 11.0: + return [] + + return [os.path.join(self.si.ProgramFilesx86, 'HTML Help Workshop')] + + @property + def UCRTLibraries(self): + """ + Microsoft Universal CRT Libraries + """ + if self.vc_ver < 14.0: + return [] + + arch_subdir = self.pi.target_dir(x64=True) + lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib') + ucrtver = self._get_content_dirname(lib) + return [os.path.join(lib, '%sucrt%s' % (ucrtver, arch_subdir))] + + @property + def UCRTIncludes(self): + """ + Microsoft Universal CRT Include + """ + if self.vc_ver < 14.0: + return [] + + include = os.path.join(self.si.UniversalCRTSdkDir, 'include') + ucrtver = self._get_content_dirname(include) + return [os.path.join(include, '%sucrt' % ucrtver)] + + @property + def FSharp(self): + """ + Microsoft Visual F# + """ + if self.vc_ver < 11.0 and self.vc_ver > 12.0: + return [] + + return self.si.FSharpInstallDir + + @property + def VCRuntimeRedist(self): + """ + Microsoft Visual C++ runtime redistribuable dll + """ + arch_subdir = self.pi.target_dir(x64=True) + vcruntime = 'redist%s\\Microsoft.VC%d0.CRT\\vcruntime%d0.dll' + vcruntime = vcruntime % (arch_subdir, self.vc_ver, self.vc_ver) + return os.path.join(self.si.VCInstallDir, vcruntime) + + def return_env(self, exists=True): + """ + Return environment dict. + + Parameters + ---------- + exists: bool + It True, only return existing paths. + """ + env = dict( + include=self._build_paths('include', + [self.VCIncludes, + self.OSIncludes, + self.UCRTIncludes, + self.NetFxSDKIncludes], + exists), + lib=self._build_paths('lib', + [self.VCLibraries, + self.OSLibraries, + self.FxTools, + self.UCRTLibraries, + self.NetFxSDKLibraries], + exists), + libpath=self._build_paths('libpath', + [self.VCLibraries, + self.FxTools, + self.VCStoreRefs, + self.OSLibpath], + exists), + path=self._build_paths('path', + [self.VCTools, + self.VSTools, + self.VsTDb, + self.SdkTools, + self.SdkSetup, + self.FxTools, + self.MSBuild, + self.HTMLHelpWorkshop, + self.FSharp], + exists), + ) + if self.vc_ver >= 14 and os.path.isfile(self.VCRuntimeRedist): + env['py_vcruntime_redist'] = self.VCRuntimeRedist + return env + + def _build_paths(self, name, spec_path_lists, exists): + """ + Given an environment variable name and specified paths, + return a pathsep-separated string of paths containing + unique, extant, directories from those paths and from + the environment variable. Raise an error if no paths + are resolved. + """ + # flatten spec_path_lists + spec_paths = itertools.chain.from_iterable(spec_path_lists) + env_paths = safe_env.get(name, '').split(os.pathsep) + paths = itertools.chain(spec_paths, env_paths) + extant_paths = list(filter(os.path.isdir, paths)) if exists else paths + if not extant_paths: + msg = "%s environment variable is empty" % name.upper() + raise distutils.errors.DistutilsPlatformError(msg) + unique_paths = self._unique_everseen(extant_paths) + return os.pathsep.join(unique_paths) + + # from Python docs + def _unique_everseen(self, iterable, key=None): + """ + List unique elements, preserving order. + Remember all elements ever seen. + + _unique_everseen('AAAABBBCCDAABBB') --> A B C D + + _unique_everseen('ABBCcAD', str.lower) --> A B C D + """ + seen = set() + seen_add = seen.add + if key is None: + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element + + def _get_content_dirname(self, path): + """ + Return name of the first dir in path or '' if no dir found. + + Parameters + ---------- + path: str + Path where search dir. + + Return + ------ + foldername: str + "name\" or "" + """ + try: + name = os.listdir(path) + if name: + return '%s\\' % name[0] + return '' + except (OSError, IOError): + return '' diff --git a/env/lib/python3.6/site-packages/setuptools/namespaces.py b/env/lib/python3.6/site-packages/setuptools/namespaces.py new file mode 100644 index 0000000..cc934b7 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/namespaces.py @@ -0,0 +1,93 @@ +import os +from distutils import log +import itertools + +from setuptools.extern.six.moves import map + + +flatten = itertools.chain.from_iterable + + +class Installer: + + nspkg_ext = '-nspkg.pth' + + def install_namespaces(self): + nsp = self._get_all_ns_packages() + if not nsp: + return + filename, ext = os.path.splitext(self._get_target()) + filename += self.nspkg_ext + self.outputs.append(filename) + log.info("Installing %s", filename) + lines = map(self._gen_nspkg_line, nsp) + + if self.dry_run: + # always generate the lines, even in dry run + list(lines) + return + + with open(filename, 'wt') as f: + f.writelines(lines) + + def _get_target(self): + return self.target + + _nspkg_tmpl = ( + "import sys, types, os", + "pep420 = sys.version_info > (3, 3)", + "p = os.path.join(%(root)s, *%(pth)r)", + "ie = os.path.exists(os.path.join(p,'__init__.py'))", + "m = not ie and not pep420 and " + "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))", + "mp = (m or []) and m.__dict__.setdefault('__path__',[])", + "(p not in mp) and mp.append(p)", + ) + "lines for the namespace installer" + + _nspkg_tmpl_multi = ( + 'm and setattr(sys.modules[%(parent)r], %(child)r, m)', + ) + "additional line(s) when a parent package is indicated" + + def _get_root(self): + return "sys._getframe(1).f_locals['sitedir']" + + def _gen_nspkg_line(self, pkg): + # ensure pkg is not a unicode string under Python 2.7 + pkg = str(pkg) + pth = tuple(pkg.split('.')) + root = self._get_root() + tmpl_lines = self._nspkg_tmpl + parent, sep, child = pkg.rpartition('.') + if parent: + tmpl_lines += self._nspkg_tmpl_multi + return ';'.join(tmpl_lines) % locals() + '\n' + + def _get_all_ns_packages(self): + """Return sorted list of all package namespaces""" + pkgs = self.distribution.namespace_packages or [] + return sorted(flatten(map(self._pkg_names, pkgs))) + + @staticmethod + def _pkg_names(pkg): + """ + Given a namespace package, yield the components of that + package. + + >>> names = Installer._pkg_names('a.b.c') + >>> set(names) == set(['a', 'a.b', 'a.b.c']) + True + """ + parts = pkg.split('.') + while parts: + yield '.'.join(parts) + parts.pop() + + +class DevelopInstaller(Installer): + def _get_root(self): + return repr(str(self.egg_path)) + + def _get_target(self): + return self.egg_link diff --git a/env/lib/python3.6/site-packages/setuptools/package_index.py b/env/lib/python3.6/site-packages/setuptools/package_index.py new file mode 100644 index 0000000..024fab9 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/package_index.py @@ -0,0 +1,1115 @@ +"""PyPI and direct package downloading""" +import sys +import os +import re +import shutil +import socket +import base64 +import hashlib +import itertools +from functools import wraps + +try: + from urllib.parse import splituser +except ImportError: + from urllib2 import splituser + +from setuptools.extern import six +from setuptools.extern.six.moves import urllib, http_client, configparser, map + +import setuptools +from pkg_resources import ( + CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST, + require, Environment, find_distributions, safe_name, safe_version, + to_filename, Requirement, DEVELOP_DIST, +) +from setuptools import ssl_support +from distutils import log +from distutils.errors import DistutilsError +from fnmatch import translate +from setuptools.py26compat import strip_fragment +from setuptools.py27compat import get_all_headers + +EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$') +HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I) +# this is here to fix emacs' cruddy broken syntax highlighting +PYPI_MD5 = re.compile( + '<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a (?:title="MD5 hash"\n\s+)' + 'href="[^?]+\?:action=show_md5&digest=([0-9a-f]{32})">md5</a>\\)' +) +URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).match +EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split() + +__all__ = [ + 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst', + 'interpret_distro_name', +] + +_SOCKET_TIMEOUT = 15 + +_tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}" +user_agent = _tmpl.format(py_major=sys.version[:3], **globals()) + + +def parse_requirement_arg(spec): + try: + return Requirement.parse(spec) + except ValueError: + raise DistutilsError( + "Not a URL, existing file, or requirement spec: %r" % (spec,) + ) + + +def parse_bdist_wininst(name): + """Return (base,pyversion) or (None,None) for possible .exe name""" + + lower = name.lower() + base, py_ver, plat = None, None, None + + if lower.endswith('.exe'): + if lower.endswith('.win32.exe'): + base = name[:-10] + plat = 'win32' + elif lower.startswith('.win32-py', -16): + py_ver = name[-7:-4] + base = name[:-16] + plat = 'win32' + elif lower.endswith('.win-amd64.exe'): + base = name[:-14] + plat = 'win-amd64' + elif lower.startswith('.win-amd64-py', -20): + py_ver = name[-7:-4] + base = name[:-20] + plat = 'win-amd64' + return base, py_ver, plat + + +def egg_info_for_url(url): + parts = urllib.parse.urlparse(url) + scheme, server, path, parameters, query, fragment = parts + base = urllib.parse.unquote(path.split('/')[-1]) + if server == 'sourceforge.net' and base == 'download': # XXX Yuck + base = urllib.parse.unquote(path.split('/')[-2]) + if '#' in base: + base, fragment = base.split('#', 1) + return base, fragment + + +def distros_for_url(url, metadata=None): + """Yield egg or source distribution objects that might be found at a URL""" + base, fragment = egg_info_for_url(url) + for dist in distros_for_location(url, base, metadata): + yield dist + if fragment: + match = EGG_FRAGMENT.match(fragment) + if match: + for dist in interpret_distro_name( + url, match.group(1), metadata, precedence=CHECKOUT_DIST + ): + yield dist + + +def distros_for_location(location, basename, metadata=None): + """Yield egg or source distribution objects based on basename""" + if basename.endswith('.egg.zip'): + basename = basename[:-4] # strip the .zip + if basename.endswith('.egg') and '-' in basename: + # only one, unambiguous interpretation + return [Distribution.from_location(location, basename, metadata)] + if basename.endswith('.exe'): + win_base, py_ver, platform = parse_bdist_wininst(basename) + if win_base is not None: + return interpret_distro_name( + location, win_base, metadata, py_ver, BINARY_DIST, platform + ) + # Try source distro extensions (.zip, .tgz, etc.) + # + for ext in EXTENSIONS: + if basename.endswith(ext): + basename = basename[:-len(ext)] + return interpret_distro_name(location, basename, metadata) + return [] # no extension matched + + +def distros_for_filename(filename, metadata=None): + """Yield possible egg or source distribution objects based on a filename""" + return distros_for_location( + normalize_path(filename), os.path.basename(filename), metadata + ) + + +def interpret_distro_name( + location, basename, metadata, py_version=None, precedence=SOURCE_DIST, + platform=None + ): + """Generate alternative interpretations of a source distro name + + Note: if `location` is a filesystem filename, you should call + ``pkg_resources.normalize_path()`` on it before passing it to this + routine! + """ + # Generate alternative interpretations of a source distro name + # Because some packages are ambiguous as to name/versions split + # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc. + # So, we generate each possible interepretation (e.g. "adns, python-1.1.0" + # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice, + # the spurious interpretations should be ignored, because in the event + # there's also an "adns" package, the spurious "python-1.1.0" version will + # compare lower than any numeric version number, and is therefore unlikely + # to match a request for it. It's still a potential problem, though, and + # in the long run PyPI and the distutils should go for "safe" names and + # versions in distribution archive names (sdist and bdist). + + parts = basename.split('-') + if not py_version and any(re.match('py\d\.\d$', p) for p in parts[2:]): + # it is a bdist_dumb, not an sdist -- bail out + return + + for p in range(1, len(parts) + 1): + yield Distribution( + location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), + py_version=py_version, precedence=precedence, + platform=platform + ) + + +# From Python 2.7 docs +def unique_everseen(iterable, key=None): + "List unique elements, preserving order. Remember all elements ever seen." + # unique_everseen('AAAABBBCCDAABBB') --> A B C D + # unique_everseen('ABBCcAD', str.lower) --> A B C D + seen = set() + seen_add = seen.add + if key is None: + for element in six.moves.filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element + + +def unique_values(func): + """ + Wrap a function returning an iterable such that the resulting iterable + only ever yields unique items. + """ + + @wraps(func) + def wrapper(*args, **kwargs): + return unique_everseen(func(*args, **kwargs)) + + return wrapper + + +REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I) +# this line is here to fix emacs' cruddy broken syntax highlighting + + +@unique_values +def find_external_links(url, page): + """Find rel="homepage" and rel="download" links in `page`, yielding URLs""" + + for match in REL.finditer(page): + tag, rel = match.groups() + rels = set(map(str.strip, rel.lower().split(','))) + if 'homepage' in rels or 'download' in rels: + for match in HREF.finditer(tag): + yield urllib.parse.urljoin(url, htmldecode(match.group(1))) + + for tag in ("<th>Home Page", "<th>Download URL"): + pos = page.find(tag) + if pos != -1: + match = HREF.search(page, pos) + if match: + yield urllib.parse.urljoin(url, htmldecode(match.group(1))) + + +class ContentChecker(object): + """ + A null content checker that defines the interface for checking content + """ + + def feed(self, block): + """ + Feed a block of data to the hash. + """ + return + + def is_valid(self): + """ + Check the hash. Return False if validation fails. + """ + return True + + def report(self, reporter, template): + """ + Call reporter with information about the checker (hash name) + substituted into the template. + """ + return + + +class HashChecker(ContentChecker): + pattern = re.compile( + r'(?P<hash_name>sha1|sha224|sha384|sha256|sha512|md5)=' + r'(?P<expected>[a-f0-9]+)' + ) + + def __init__(self, hash_name, expected): + self.hash_name = hash_name + self.hash = hashlib.new(hash_name) + self.expected = expected + + @classmethod + def from_url(cls, url): + "Construct a (possibly null) ContentChecker from a URL" + fragment = urllib.parse.urlparse(url)[-1] + if not fragment: + return ContentChecker() + match = cls.pattern.search(fragment) + if not match: + return ContentChecker() + return cls(**match.groupdict()) + + def feed(self, block): + self.hash.update(block) + + def is_valid(self): + return self.hash.hexdigest() == self.expected + + def report(self, reporter, template): + msg = template % self.hash_name + return reporter(msg) + + +class PackageIndex(Environment): + """A distribution index that scans web pages for download URLs""" + + def __init__( + self, index_url="https://pypi.python.org/simple", hosts=('*',), + ca_bundle=None, verify_ssl=True, *args, **kw + ): + Environment.__init__(self, *args, **kw) + self.index_url = index_url + "/" [:not index_url.endswith('/')] + self.scanned_urls = {} + self.fetched_urls = {} + self.package_pages = {} + self.allows = re.compile('|'.join(map(translate, hosts))).match + self.to_scan = [] + use_ssl = ( + verify_ssl + and ssl_support.is_available + and (ca_bundle or ssl_support.find_ca_bundle()) + ) + if use_ssl: + self.opener = ssl_support.opener_for(ca_bundle) + else: + self.opener = urllib.request.urlopen + + def process_url(self, url, retrieve=False): + """Evaluate a URL as a possible download, and maybe retrieve it""" + if url in self.scanned_urls and not retrieve: + return + self.scanned_urls[url] = True + if not URL_SCHEME(url): + self.process_filename(url) + return + else: + dists = list(distros_for_url(url)) + if dists: + if not self.url_ok(url): + return + self.debug("Found link: %s", url) + + if dists or not retrieve or url in self.fetched_urls: + list(map(self.add, dists)) + return # don't need the actual page + + if not self.url_ok(url): + self.fetched_urls[url] = True + return + + self.info("Reading %s", url) + self.fetched_urls[url] = True # prevent multiple fetch attempts + tmpl = "Download error on %s: %%s -- Some packages may not be found!" + f = self.open_url(url, tmpl % url) + if f is None: + return + self.fetched_urls[f.url] = True + if 'html' not in f.headers.get('content-type', '').lower(): + f.close() # not html, we can't process it + return + + base = f.url # handle redirects + page = f.read() + if not isinstance(page, str): # We are in Python 3 and got bytes. We want str. + if isinstance(f, urllib.error.HTTPError): + # Errors have no charset, assume latin1: + charset = 'latin-1' + else: + charset = f.headers.get_param('charset') or 'latin-1' + page = page.decode(charset, "ignore") + f.close() + for match in HREF.finditer(page): + link = urllib.parse.urljoin(base, htmldecode(match.group(1))) + self.process_url(link) + if url.startswith(self.index_url) and getattr(f, 'code', None) != 404: + page = self.process_index(url, page) + + def process_filename(self, fn, nested=False): + # process filenames or directories + if not os.path.exists(fn): + self.warn("Not found: %s", fn) + return + + if os.path.isdir(fn) and not nested: + path = os.path.realpath(fn) + for item in os.listdir(path): + self.process_filename(os.path.join(path, item), True) + + dists = distros_for_filename(fn) + if dists: + self.debug("Found: %s", fn) + list(map(self.add, dists)) + + def url_ok(self, url, fatal=False): + s = URL_SCHEME(url) + is_file = s and s.group(1).lower() == 'file' + if is_file or self.allows(urllib.parse.urlparse(url)[1]): + return True + msg = ("\nNote: Bypassing %s (disallowed host; see " + "http://bit.ly/1dg9ijs for details).\n") + if fatal: + raise DistutilsError(msg % url) + else: + self.warn(msg, url) + + def scan_egg_links(self, search_path): + dirs = filter(os.path.isdir, search_path) + egg_links = ( + (path, entry) + for path in dirs + for entry in os.listdir(path) + if entry.endswith('.egg-link') + ) + list(itertools.starmap(self.scan_egg_link, egg_links)) + + def scan_egg_link(self, path, entry): + with open(os.path.join(path, entry)) as raw_lines: + # filter non-empty lines + lines = list(filter(None, map(str.strip, raw_lines))) + + if len(lines) != 2: + # format is not recognized; punt + return + + egg_path, setup_path = lines + + for dist in find_distributions(os.path.join(path, egg_path)): + dist.location = os.path.join(path, *lines) + dist.precedence = SOURCE_DIST + self.add(dist) + + def process_index(self, url, page): + """Process the contents of a PyPI page""" + + def scan(link): + # Process a URL to see if it's for a package page + if link.startswith(self.index_url): + parts = list(map( + urllib.parse.unquote, link[len(self.index_url):].split('/') + )) + if len(parts) == 2 and '#' not in parts[1]: + # it's a package page, sanitize and index it + pkg = safe_name(parts[0]) + ver = safe_version(parts[1]) + self.package_pages.setdefault(pkg.lower(), {})[link] = True + return to_filename(pkg), to_filename(ver) + return None, None + + # process an index page into the package-page index + for match in HREF.finditer(page): + try: + scan(urllib.parse.urljoin(url, htmldecode(match.group(1)))) + except ValueError: + pass + + pkg, ver = scan(url) # ensure this page is in the page index + if pkg: + # process individual package page + for new_url in find_external_links(url, page): + # Process the found URL + base, frag = egg_info_for_url(new_url) + if base.endswith('.py') and not frag: + if ver: + new_url += '#egg=%s-%s' % (pkg, ver) + else: + self.need_version_info(url) + self.scan_url(new_url) + + return PYPI_MD5.sub( + lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1, 3, 2), page + ) + else: + return "" # no sense double-scanning non-package pages + + def need_version_info(self, url): + self.scan_all( + "Page at %s links to .py file(s) without version info; an index " + "scan is required.", url + ) + + def scan_all(self, msg=None, *args): + if self.index_url not in self.fetched_urls: + if msg: + self.warn(msg, *args) + self.info( + "Scanning index of all packages (this may take a while)" + ) + self.scan_url(self.index_url) + + def find_packages(self, requirement): + self.scan_url(self.index_url + requirement.unsafe_name + '/') + + if not self.package_pages.get(requirement.key): + # Fall back to safe version of the name + self.scan_url(self.index_url + requirement.project_name + '/') + + if not self.package_pages.get(requirement.key): + # We couldn't find the target package, so search the index page too + self.not_found_in_index(requirement) + + for url in list(self.package_pages.get(requirement.key, ())): + # scan each page that might be related to the desired package + self.scan_url(url) + + def obtain(self, requirement, installer=None): + self.prescan() + self.find_packages(requirement) + for dist in self[requirement.key]: + if dist in requirement: + return dist + self.debug("%s does not match %s", requirement, dist) + return super(PackageIndex, self).obtain(requirement, installer) + + def check_hash(self, checker, filename, tfp): + """ + checker is a ContentChecker + """ + checker.report(self.debug, + "Validating %%s checksum for %s" % filename) + if not checker.is_valid(): + tfp.close() + os.unlink(filename) + raise DistutilsError( + "%s validation failed for %s; " + "possible download problem?" % ( + checker.hash.name, os.path.basename(filename)) + ) + + def add_find_links(self, urls): + """Add `urls` to the list that will be prescanned for searches""" + for url in urls: + if ( + self.to_scan is None # if we have already "gone online" + or not URL_SCHEME(url) # or it's a local file/directory + or url.startswith('file:') + or list(distros_for_url(url)) # or a direct package link + ): + # then go ahead and process it now + self.scan_url(url) + else: + # otherwise, defer retrieval till later + self.to_scan.append(url) + + def prescan(self): + """Scan urls scheduled for prescanning (e.g. --find-links)""" + if self.to_scan: + list(map(self.scan_url, self.to_scan)) + self.to_scan = None # from now on, go ahead and process immediately + + def not_found_in_index(self, requirement): + if self[requirement.key]: # we've seen at least one distro + meth, msg = self.info, "Couldn't retrieve index page for %r" + else: # no distros seen for this name, might be misspelled + meth, msg = (self.warn, + "Couldn't find index page for %r (maybe misspelled?)") + meth(msg, requirement.unsafe_name) + self.scan_all() + + def download(self, spec, tmpdir): + """Locate and/or download `spec` to `tmpdir`, returning a local path + + `spec` may be a ``Requirement`` object, or a string containing a URL, + an existing local filename, or a project/version requirement spec + (i.e. the string form of a ``Requirement`` object). If it is the URL + of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one + that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is + automatically created alongside the downloaded file. + + If `spec` is a ``Requirement`` object or a string containing a + project/version requirement spec, this method returns the location of + a matching distribution (possibly after downloading it to `tmpdir`). + If `spec` is a locally existing file or directory name, it is simply + returned unchanged. If `spec` is a URL, it is downloaded to a subpath + of `tmpdir`, and the local filename is returned. Various errors may be + raised if a problem occurs during downloading. + """ + if not isinstance(spec, Requirement): + scheme = URL_SCHEME(spec) + if scheme: + # It's a url, download it to tmpdir + found = self._download_url(scheme.group(1), spec, tmpdir) + base, fragment = egg_info_for_url(spec) + if base.endswith('.py'): + found = self.gen_setup(found, fragment, tmpdir) + return found + elif os.path.exists(spec): + # Existing file or directory, just return it + return spec + else: + spec = parse_requirement_arg(spec) + return getattr(self.fetch_distribution(spec, tmpdir), 'location', None) + + def fetch_distribution( + self, requirement, tmpdir, force_scan=False, source=False, + develop_ok=False, local_index=None + ): + """Obtain a distribution suitable for fulfilling `requirement` + + `requirement` must be a ``pkg_resources.Requirement`` instance. + If necessary, or if the `force_scan` flag is set, the requirement is + searched for in the (online) package index as well as the locally + installed packages. If a distribution matching `requirement` is found, + the returned distribution's ``location`` is the value you would have + gotten from calling the ``download()`` method with the matching + distribution's URL or filename. If no matching distribution is found, + ``None`` is returned. + + If the `source` flag is set, only source distributions and source + checkout links will be considered. Unless the `develop_ok` flag is + set, development and system eggs (i.e., those using the ``.egg-info`` + format) will be ignored. + """ + # process a Requirement + self.info("Searching for %s", requirement) + skipped = {} + dist = None + + def find(req, env=None): + if env is None: + env = self + # Find a matching distribution; may be called more than once + + for dist in env[req.key]: + + if dist.precedence == DEVELOP_DIST and not develop_ok: + if dist not in skipped: + self.warn("Skipping development or system egg: %s", dist) + skipped[dist] = 1 + continue + + if dist in req and (dist.precedence <= SOURCE_DIST or not source): + dist.download_location = self.download(dist.location, tmpdir) + if os.path.exists(dist.download_location): + return dist + + if force_scan: + self.prescan() + self.find_packages(requirement) + dist = find(requirement) + + if not dist and local_index is not None: + dist = find(requirement, local_index) + + if dist is None: + if self.to_scan is not None: + self.prescan() + dist = find(requirement) + + if dist is None and not force_scan: + self.find_packages(requirement) + dist = find(requirement) + + if dist is None: + self.warn( + "No local packages or working download links found for %s%s", + (source and "a source distribution of " or ""), + requirement, + ) + else: + self.info("Best match: %s", dist) + return dist.clone(location=dist.download_location) + + def fetch(self, requirement, tmpdir, force_scan=False, source=False): + """Obtain a file suitable for fulfilling `requirement` + + DEPRECATED; use the ``fetch_distribution()`` method now instead. For + backward compatibility, this routine is identical but returns the + ``location`` of the downloaded distribution instead of a distribution + object. + """ + dist = self.fetch_distribution(requirement, tmpdir, force_scan, source) + if dist is not None: + return dist.location + return None + + def gen_setup(self, filename, fragment, tmpdir): + match = EGG_FRAGMENT.match(fragment) + dists = match and [ + d for d in + interpret_distro_name(filename, match.group(1), None) if d.version + ] or [] + + if len(dists) == 1: # unambiguous ``#egg`` fragment + basename = os.path.basename(filename) + + # Make sure the file has been downloaded to the temp dir. + if os.path.dirname(filename) != tmpdir: + dst = os.path.join(tmpdir, basename) + from setuptools.command.easy_install import samefile + if not samefile(filename, dst): + shutil.copy2(filename, dst) + filename = dst + + with open(os.path.join(tmpdir, 'setup.py'), 'w') as file: + file.write( + "from setuptools import setup\n" + "setup(name=%r, version=%r, py_modules=[%r])\n" + % ( + dists[0].project_name, dists[0].version, + os.path.splitext(basename)[0] + ) + ) + return filename + + elif match: + raise DistutilsError( + "Can't unambiguously interpret project/version identifier %r; " + "any dashes in the name or version should be escaped using " + "underscores. %r" % (fragment, dists) + ) + else: + raise DistutilsError( + "Can't process plain .py files without an '#egg=name-version'" + " suffix to enable automatic setup script generation." + ) + + dl_blocksize = 8192 + + def _download_to(self, url, filename): + self.info("Downloading %s", url) + # Download the file + fp, info = None, None + try: + checker = HashChecker.from_url(url) + fp = self.open_url(strip_fragment(url)) + if isinstance(fp, urllib.error.HTTPError): + raise DistutilsError( + "Can't download %s: %s %s" % (url, fp.code, fp.msg) + ) + headers = fp.info() + blocknum = 0 + bs = self.dl_blocksize + size = -1 + if "content-length" in headers: + # Some servers return multiple Content-Length headers :( + sizes = get_all_headers(headers, 'Content-Length') + size = max(map(int, sizes)) + self.reporthook(url, filename, blocknum, bs, size) + with open(filename, 'wb') as tfp: + while True: + block = fp.read(bs) + if block: + checker.feed(block) + tfp.write(block) + blocknum += 1 + self.reporthook(url, filename, blocknum, bs, size) + else: + break + self.check_hash(checker, filename, tfp) + return headers + finally: + if fp: + fp.close() + + def reporthook(self, url, filename, blocknum, blksize, size): + pass # no-op + + def open_url(self, url, warning=None): + if url.startswith('file:'): + return local_open(url) + try: + return open_with_auth(url, self.opener) + except (ValueError, http_client.InvalidURL) as v: + msg = ' '.join([str(arg) for arg in v.args]) + if warning: + self.warn(warning, msg) + else: + raise DistutilsError('%s %s' % (url, msg)) + except urllib.error.HTTPError as v: + return v + except urllib.error.URLError as v: + if warning: + self.warn(warning, v.reason) + else: + raise DistutilsError("Download error for %s: %s" + % (url, v.reason)) + except http_client.BadStatusLine as v: + if warning: + self.warn(warning, v.line) + else: + raise DistutilsError( + '%s returned a bad status line. The server might be ' + 'down, %s' % + (url, v.line) + ) + except http_client.HTTPException as v: + if warning: + self.warn(warning, v) + else: + raise DistutilsError("Download error for %s: %s" + % (url, v)) + + def _download_url(self, scheme, url, tmpdir): + # Determine download filename + # + name, fragment = egg_info_for_url(url) + if name: + while '..' in name: + name = name.replace('..', '.').replace('\\', '_') + else: + name = "__downloaded__" # default if URL has no path contents + + if name.endswith('.egg.zip'): + name = name[:-4] # strip the extra .zip before download + + filename = os.path.join(tmpdir, name) + + # Download the file + # + if scheme == 'svn' or scheme.startswith('svn+'): + return self._download_svn(url, filename) + elif scheme == 'git' or scheme.startswith('git+'): + return self._download_git(url, filename) + elif scheme.startswith('hg+'): + return self._download_hg(url, filename) + elif scheme == 'file': + return urllib.request.url2pathname(urllib.parse.urlparse(url)[2]) + else: + self.url_ok(url, True) # raises error if not allowed + return self._attempt_download(url, filename) + + def scan_url(self, url): + self.process_url(url, True) + + def _attempt_download(self, url, filename): + headers = self._download_to(url, filename) + if 'html' in headers.get('content-type', '').lower(): + return self._download_html(url, headers, filename) + else: + return filename + + def _download_html(self, url, headers, filename): + file = open(filename) + for line in file: + if line.strip(): + # Check for a subversion index page + if re.search(r'<title>([^- ]+ - )?Revision \d+:', line): + # it's a subversion index page: + file.close() + os.unlink(filename) + return self._download_svn(url, filename) + break # not an index page + file.close() + os.unlink(filename) + raise DistutilsError("Unexpected HTML page found at " + url) + + def _download_svn(self, url, filename): + url = url.split('#', 1)[0] # remove any fragment for svn's sake + creds = '' + if url.lower().startswith('svn:') and '@' in url: + scheme, netloc, path, p, q, f = urllib.parse.urlparse(url) + if not netloc and path.startswith('//') and '/' in path[2:]: + netloc, path = path[2:].split('/', 1) + auth, host = splituser(netloc) + if auth: + if ':' in auth: + user, pw = auth.split(':', 1) + creds = " --username=%s --password=%s" % (user, pw) + else: + creds = " --username=" + auth + netloc = host + parts = scheme, netloc, url, p, q, f + url = urllib.parse.urlunparse(parts) + self.info("Doing subversion checkout from %s to %s", url, filename) + os.system("svn checkout%s -q %s %s" % (creds, url, filename)) + return filename + + @staticmethod + def _vcs_split_rev_from_url(url, pop_prefix=False): + scheme, netloc, path, query, frag = urllib.parse.urlsplit(url) + + scheme = scheme.split('+', 1)[-1] + + # Some fragment identification fails + path = path.split('#', 1)[0] + + rev = None + if '@' in path: + path, rev = path.rsplit('@', 1) + + # Also, discard fragment + url = urllib.parse.urlunsplit((scheme, netloc, path, query, '')) + + return url, rev + + def _download_git(self, url, filename): + filename = filename.split('#', 1)[0] + url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) + + self.info("Doing git clone from %s to %s", url, filename) + os.system("git clone --quiet %s %s" % (url, filename)) + + if rev is not None: + self.info("Checking out %s", rev) + os.system("(cd %s && git checkout --quiet %s)" % ( + filename, + rev, + )) + + return filename + + def _download_hg(self, url, filename): + filename = filename.split('#', 1)[0] + url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) + + self.info("Doing hg clone from %s to %s", url, filename) + os.system("hg clone --quiet %s %s" % (url, filename)) + + if rev is not None: + self.info("Updating to %s", rev) + os.system("(cd %s && hg up -C -r %s >&-)" % ( + filename, + rev, + )) + + return filename + + def debug(self, msg, *args): + log.debug(msg, *args) + + def info(self, msg, *args): + log.info(msg, *args) + + def warn(self, msg, *args): + log.warn(msg, *args) + + +# This pattern matches a character entity reference (a decimal numeric +# references, a hexadecimal numeric reference, or a named reference). +entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub + + +def uchr(c): + if not isinstance(c, int): + return c + if c > 255: + return six.unichr(c) + return chr(c) + + +def decode_entity(match): + what = match.group(1) + if what.startswith('#x'): + what = int(what[2:], 16) + elif what.startswith('#'): + what = int(what[1:]) + else: + what = six.moves.html_entities.name2codepoint.get(what, match.group(0)) + return uchr(what) + + +def htmldecode(text): + """Decode HTML entities in the given text.""" + return entity_sub(decode_entity, text) + + +def socket_timeout(timeout=15): + def _socket_timeout(func): + def _socket_timeout(*args, **kwargs): + old_timeout = socket.getdefaulttimeout() + socket.setdefaulttimeout(timeout) + try: + return func(*args, **kwargs) + finally: + socket.setdefaulttimeout(old_timeout) + + return _socket_timeout + + return _socket_timeout + + +def _encode_auth(auth): + """ + A function compatible with Python 2.3-3.3 that will encode + auth from a URL suitable for an HTTP header. + >>> str(_encode_auth('username%3Apassword')) + 'dXNlcm5hbWU6cGFzc3dvcmQ=' + + Long auth strings should not cause a newline to be inserted. + >>> long_auth = 'username:' + 'password'*10 + >>> chr(10) in str(_encode_auth(long_auth)) + False + """ + auth_s = urllib.parse.unquote(auth) + # convert to bytes + auth_bytes = auth_s.encode() + # use the legacy interface for Python 2.3 support + encoded_bytes = base64.encodestring(auth_bytes) + # convert back to a string + encoded = encoded_bytes.decode() + # strip the trailing carriage return + return encoded.replace('\n', '') + + +class Credential(object): + """ + A username/password pair. Use like a namedtuple. + """ + + def __init__(self, username, password): + self.username = username + self.password = password + + def __iter__(self): + yield self.username + yield self.password + + def __str__(self): + return '%(username)s:%(password)s' % vars(self) + + +class PyPIConfig(configparser.RawConfigParser): + def __init__(self): + """ + Load from ~/.pypirc + """ + defaults = dict.fromkeys(['username', 'password', 'repository'], '') + configparser.RawConfigParser.__init__(self, defaults) + + rc = os.path.join(os.path.expanduser('~'), '.pypirc') + if os.path.exists(rc): + self.read(rc) + + @property + def creds_by_repository(self): + sections_with_repositories = [ + section for section in self.sections() + if self.get(section, 'repository').strip() + ] + + return dict(map(self._get_repo_cred, sections_with_repositories)) + + def _get_repo_cred(self, section): + repo = self.get(section, 'repository').strip() + return repo, Credential( + self.get(section, 'username').strip(), + self.get(section, 'password').strip(), + ) + + def find_credential(self, url): + """ + If the URL indicated appears to be a repository defined in this + config, return the credential for that repository. + """ + for repository, cred in self.creds_by_repository.items(): + if url.startswith(repository): + return cred + + +def open_with_auth(url, opener=urllib.request.urlopen): + """Open a urllib2 request, handling HTTP authentication""" + + scheme, netloc, path, params, query, frag = urllib.parse.urlparse(url) + + # Double scheme does not raise on Mac OS X as revealed by a + # failing test. We would expect "nonnumeric port". Refs #20. + if netloc.endswith(':'): + raise http_client.InvalidURL("nonnumeric port: ''") + + if scheme in ('http', 'https'): + auth, host = splituser(netloc) + else: + auth = None + + if not auth: + cred = PyPIConfig().find_credential(url) + if cred: + auth = str(cred) + info = cred.username, url + log.info('Authenticating as %s for %s (from .pypirc)', *info) + + if auth: + auth = "Basic " + _encode_auth(auth) + parts = scheme, host, path, params, query, frag + new_url = urllib.parse.urlunparse(parts) + request = urllib.request.Request(new_url) + request.add_header("Authorization", auth) + else: + request = urllib.request.Request(url) + + request.add_header('User-Agent', user_agent) + fp = opener(request) + + if auth: + # Put authentication info back into request URL if same host, + # so that links found on the page will work + s2, h2, path2, param2, query2, frag2 = urllib.parse.urlparse(fp.url) + if s2 == scheme and h2 == host: + parts = s2, netloc, path2, param2, query2, frag2 + fp.url = urllib.parse.urlunparse(parts) + + return fp + + +# adding a timeout to avoid freezing package_index +open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth) + + +def fix_sf_url(url): + return url # backward compatibility + + +def local_open(url): + """Read a local path, with special support for directories""" + scheme, server, path, param, query, frag = urllib.parse.urlparse(url) + filename = urllib.request.url2pathname(path) + if os.path.isfile(filename): + return urllib.request.urlopen(url) + elif path.endswith('/') and os.path.isdir(filename): + files = [] + for f in os.listdir(filename): + filepath = os.path.join(filename, f) + if f == 'index.html': + with open(filepath, 'r') as fp: + body = fp.read() + break + elif os.path.isdir(filepath): + f += '/' + files.append('<a href="{name}">{name}</a>'.format(name=f)) + else: + tmpl = ("<html><head><title>{url}" + "{files}") + body = tmpl.format(url=url, files='\n'.join(files)) + status, message = 200, "OK" + else: + status, message, body = 404, "Path not found", "Not found" + + headers = {'content-type': 'text/html'} + body_stream = six.StringIO(body) + return urllib.error.HTTPError(url, status, message, headers, body_stream) diff --git a/env/lib/python3.6/site-packages/setuptools/py26compat.py b/env/lib/python3.6/site-packages/setuptools/py26compat.py new file mode 100644 index 0000000..4d3add8 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/py26compat.py @@ -0,0 +1,31 @@ +""" +Compatibility Support for Python 2.6 and earlier +""" + +import sys + +try: + from urllib.parse import splittag +except ImportError: + from urllib import splittag + + +def strip_fragment(url): + """ + In `Python 8280 `_, Python 2.7 and + later was patched to disregard the fragment when making URL requests. + Do the same for Python 2.6 and earlier. + """ + url, fragment = splittag(url) + return url + + +if sys.version_info >= (2, 7): + strip_fragment = lambda x: x + +try: + from importlib import import_module +except ImportError: + + def import_module(module_name): + return __import__(module_name, fromlist=['__name__']) diff --git a/env/lib/python3.6/site-packages/setuptools/py27compat.py b/env/lib/python3.6/site-packages/setuptools/py27compat.py new file mode 100644 index 0000000..4e3e4ab --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/py27compat.py @@ -0,0 +1,18 @@ +""" +Compatibility Support for Python 2.7 and earlier +""" + +import sys + + +def get_all_headers(message, key): + """ + Given an HTTPMessage, return all headers matching a given key. + """ + return message.get_all(key) + + +if sys.version_info < (3,): + + def get_all_headers(message, key): + return message.getheaders(key) diff --git a/env/lib/python3.6/site-packages/setuptools/py31compat.py b/env/lib/python3.6/site-packages/setuptools/py31compat.py new file mode 100644 index 0000000..44b025d --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/py31compat.py @@ -0,0 +1,56 @@ +import sys +import unittest + +__all__ = ['get_config_vars', 'get_path'] + +try: + # Python 2.7 or >=3.2 + from sysconfig import get_config_vars, get_path +except ImportError: + from distutils.sysconfig import get_config_vars, get_python_lib + + def get_path(name): + if name not in ('platlib', 'purelib'): + raise ValueError("Name must be purelib or platlib") + return get_python_lib(name == 'platlib') + + +try: + # Python >=3.2 + from tempfile import TemporaryDirectory +except ImportError: + import shutil + import tempfile + + class TemporaryDirectory(object): + """ + Very simple temporary directory context manager. + Will try to delete afterward, but will also ignore OS and similar + errors on deletion. + """ + + def __init__(self): + self.name = None # Handle mkdtemp raising an exception + self.name = tempfile.mkdtemp() + + def __enter__(self): + return self.name + + def __exit__(self, exctype, excvalue, exctrace): + try: + shutil.rmtree(self.name, True) + except OSError: # removal errors are not the only possible + pass + self.name = None + + +unittest_main = unittest.main + +_PY31 = (3, 1) <= sys.version_info[:2] < (3, 2) +if _PY31: + # on Python 3.1, translate testRunner==None to TextTestRunner + # for compatibility with Python 2.6, 2.7, and 3.2+ + def unittest_main(*args, **kwargs): + if 'testRunner' in kwargs and kwargs['testRunner'] is None: + kwargs['testRunner'] = unittest.TextTestRunner + return unittest.main(*args, **kwargs) diff --git a/env/lib/python3.6/site-packages/setuptools/sandbox.py b/env/lib/python3.6/site-packages/setuptools/sandbox.py new file mode 100644 index 0000000..39afd57 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/sandbox.py @@ -0,0 +1,492 @@ +import os +import sys +import tempfile +import operator +import functools +import itertools +import re +import contextlib +import pickle + +from setuptools.extern import six +from setuptools.extern.six.moves import builtins, map + +import pkg_resources + +if sys.platform.startswith('java'): + import org.python.modules.posix.PosixModule as _os +else: + _os = sys.modules[os.name] +try: + _file = file +except NameError: + _file = None +_open = open +from distutils.errors import DistutilsError +from pkg_resources import working_set + +__all__ = [ + "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", +] + + +def _execfile(filename, globals, locals=None): + """ + Python 3 implementation of execfile. + """ + mode = 'rb' + with open(filename, mode) as stream: + script = stream.read() + # compile() function in Python 2.6 and 3.1 requires LF line endings. + if sys.version_info[:2] < (2, 7) or sys.version_info[:2] >= (3, 0) and sys.version_info[:2] < (3, 2): + script = script.replace(b'\r\n', b'\n') + script = script.replace(b'\r', b'\n') + if locals is None: + locals = globals + code = compile(script, filename, 'exec') + exec(code, globals, locals) + + +@contextlib.contextmanager +def save_argv(repl=None): + saved = sys.argv[:] + if repl is not None: + sys.argv[:] = repl + try: + yield saved + finally: + sys.argv[:] = saved + + +@contextlib.contextmanager +def save_path(): + saved = sys.path[:] + try: + yield saved + finally: + sys.path[:] = saved + + +@contextlib.contextmanager +def override_temp(replacement): + """ + Monkey-patch tempfile.tempdir with replacement, ensuring it exists + """ + if not os.path.isdir(replacement): + os.makedirs(replacement) + + saved = tempfile.tempdir + + tempfile.tempdir = replacement + + try: + yield + finally: + tempfile.tempdir = saved + + +@contextlib.contextmanager +def pushd(target): + saved = os.getcwd() + os.chdir(target) + try: + yield saved + finally: + os.chdir(saved) + + +class UnpickleableException(Exception): + """ + An exception representing another Exception that could not be pickled. + """ + + @staticmethod + def dump(type, exc): + """ + Always return a dumped (pickled) type and exc. If exc can't be pickled, + wrap it in UnpickleableException first. + """ + try: + return pickle.dumps(type), pickle.dumps(exc) + except Exception: + # get UnpickleableException inside the sandbox + from setuptools.sandbox import UnpickleableException as cls + return cls.dump(cls, cls(repr(exc))) + + +class ExceptionSaver: + """ + A Context Manager that will save an exception, serialized, and restore it + later. + """ + + def __enter__(self): + return self + + def __exit__(self, type, exc, tb): + if not exc: + return + + # dump the exception + self._saved = UnpickleableException.dump(type, exc) + self._tb = tb + + # suppress the exception + return True + + def resume(self): + "restore and re-raise any exception" + + if '_saved' not in vars(self): + return + + type, exc = map(pickle.loads, self._saved) + six.reraise(type, exc, self._tb) + + +@contextlib.contextmanager +def save_modules(): + """ + Context in which imported modules are saved. + + Translates exceptions internal to the context into the equivalent exception + outside the context. + """ + saved = sys.modules.copy() + with ExceptionSaver() as saved_exc: + yield saved + + sys.modules.update(saved) + # remove any modules imported since + del_modules = ( + mod_name for mod_name in sys.modules + if mod_name not in saved + # exclude any encodings modules. See #285 + and not mod_name.startswith('encodings.') + ) + _clear_modules(del_modules) + + saved_exc.resume() + + +def _clear_modules(module_names): + for mod_name in list(module_names): + del sys.modules[mod_name] + + +@contextlib.contextmanager +def save_pkg_resources_state(): + saved = pkg_resources.__getstate__() + try: + yield saved + finally: + pkg_resources.__setstate__(saved) + + +@contextlib.contextmanager +def setup_context(setup_dir): + temp_dir = os.path.join(setup_dir, 'temp') + with save_pkg_resources_state(): + with save_modules(): + hide_setuptools() + with save_path(): + with save_argv(): + with override_temp(temp_dir): + with pushd(setup_dir): + # ensure setuptools commands are available + __import__('setuptools') + yield + + +def _needs_hiding(mod_name): + """ + >>> _needs_hiding('setuptools') + True + >>> _needs_hiding('pkg_resources') + True + >>> _needs_hiding('setuptools_plugin') + False + >>> _needs_hiding('setuptools.__init__') + True + >>> _needs_hiding('distutils') + True + >>> _needs_hiding('os') + False + >>> _needs_hiding('Cython') + True + """ + pattern = re.compile('(setuptools|pkg_resources|distutils|Cython)(\.|$)') + return bool(pattern.match(mod_name)) + + +def hide_setuptools(): + """ + Remove references to setuptools' modules from sys.modules to allow the + invocation to import the most appropriate setuptools. This technique is + necessary to avoid issues such as #315 where setuptools upgrading itself + would fail to find a function declared in the metadata. + """ + modules = filter(_needs_hiding, sys.modules) + _clear_modules(modules) + + +def run_setup(setup_script, args): + """Run a distutils setup script, sandboxed in its directory""" + setup_dir = os.path.abspath(os.path.dirname(setup_script)) + with setup_context(setup_dir): + try: + sys.argv[:] = [setup_script] + list(args) + sys.path.insert(0, setup_dir) + # reset to include setup dir, w/clean callback list + working_set.__init__() + working_set.callbacks.append(lambda dist: dist.activate()) + + def runner(): + ns = dict(__file__=setup_script, __name__='__main__') + _execfile(setup_script, ns) + + DirectorySandbox(setup_dir).run(runner) + except SystemExit as v: + if v.args and v.args[0]: + raise + # Normal exit, just return + + +class AbstractSandbox: + """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" + + _active = False + + def __init__(self): + self._attrs = [ + name for name in dir(_os) + if not name.startswith('_') and hasattr(self, name) + ] + + def _copy(self, source): + for name in self._attrs: + setattr(os, name, getattr(source, name)) + + def run(self, func): + """Run 'func' under os sandboxing""" + try: + self._copy(self) + if _file: + builtins.file = self._file + builtins.open = self._open + self._active = True + return func() + finally: + self._active = False + if _file: + builtins.file = _file + builtins.open = _open + self._copy(_os) + + def _mk_dual_path_wrapper(name): + original = getattr(_os, name) + + def wrap(self, src, dst, *args, **kw): + if self._active: + src, dst = self._remap_pair(name, src, dst, *args, **kw) + return original(src, dst, *args, **kw) + + return wrap + + for name in ["rename", "link", "symlink"]: + if hasattr(_os, name): + locals()[name] = _mk_dual_path_wrapper(name) + + def _mk_single_path_wrapper(name, original=None): + original = original or getattr(_os, name) + + def wrap(self, path, *args, **kw): + if self._active: + path = self._remap_input(name, path, *args, **kw) + return original(path, *args, **kw) + + return wrap + + if _file: + _file = _mk_single_path_wrapper('file', _file) + _open = _mk_single_path_wrapper('open', _open) + for name in [ + "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir", + "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat", + "startfile", "mkfifo", "mknod", "pathconf", "access" + ]: + if hasattr(_os, name): + locals()[name] = _mk_single_path_wrapper(name) + + def _mk_single_with_return(name): + original = getattr(_os, name) + + def wrap(self, path, *args, **kw): + if self._active: + path = self._remap_input(name, path, *args, **kw) + return self._remap_output(name, original(path, *args, **kw)) + return original(path, *args, **kw) + + return wrap + + for name in ['readlink', 'tempnam']: + if hasattr(_os, name): + locals()[name] = _mk_single_with_return(name) + + def _mk_query(name): + original = getattr(_os, name) + + def wrap(self, *args, **kw): + retval = original(*args, **kw) + if self._active: + return self._remap_output(name, retval) + return retval + + return wrap + + for name in ['getcwd', 'tmpnam']: + if hasattr(_os, name): + locals()[name] = _mk_query(name) + + def _validate_path(self, path): + """Called to remap or validate any path, whether input or output""" + return path + + def _remap_input(self, operation, path, *args, **kw): + """Called for path inputs""" + return self._validate_path(path) + + def _remap_output(self, operation, path): + """Called for path outputs""" + return self._validate_path(path) + + def _remap_pair(self, operation, src, dst, *args, **kw): + """Called for path pairs like rename, link, and symlink operations""" + return ( + self._remap_input(operation + '-from', src, *args, **kw), + self._remap_input(operation + '-to', dst, *args, **kw) + ) + + +if hasattr(os, 'devnull'): + _EXCEPTIONS = [os.devnull,] +else: + _EXCEPTIONS = [] + +try: + from win32com.client.gencache import GetGeneratePath + _EXCEPTIONS.append(GetGeneratePath()) + del GetGeneratePath +except ImportError: + # it appears pywin32 is not installed, so no need to exclude. + pass + + +class DirectorySandbox(AbstractSandbox): + """Restrict operations to a single subdirectory - pseudo-chroot""" + + write_ops = dict.fromkeys([ + "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", + "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam", + ]) + + _exception_patterns = [ + # Allow lib2to3 to attempt to save a pickled grammar object (#121) + '.*lib2to3.*\.pickle$', + ] + "exempt writing to paths that match the pattern" + + def __init__(self, sandbox, exceptions=_EXCEPTIONS): + self._sandbox = os.path.normcase(os.path.realpath(sandbox)) + self._prefix = os.path.join(self._sandbox, '') + self._exceptions = [ + os.path.normcase(os.path.realpath(path)) + for path in exceptions + ] + AbstractSandbox.__init__(self) + + def _violation(self, operation, *args, **kw): + from setuptools.sandbox import SandboxViolation + raise SandboxViolation(operation, args, kw) + + if _file: + + def _file(self, path, mode='r', *args, **kw): + if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): + self._violation("file", path, mode, *args, **kw) + return _file(path, mode, *args, **kw) + + def _open(self, path, mode='r', *args, **kw): + if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): + self._violation("open", path, mode, *args, **kw) + return _open(path, mode, *args, **kw) + + def tmpnam(self): + self._violation("tmpnam") + + def _ok(self, path): + active = self._active + try: + self._active = False + realpath = os.path.normcase(os.path.realpath(path)) + return ( + self._exempted(realpath) + or realpath == self._sandbox + or realpath.startswith(self._prefix) + ) + finally: + self._active = active + + def _exempted(self, filepath): + start_matches = ( + filepath.startswith(exception) + for exception in self._exceptions + ) + pattern_matches = ( + re.match(pattern, filepath) + for pattern in self._exception_patterns + ) + candidates = itertools.chain(start_matches, pattern_matches) + return any(candidates) + + def _remap_input(self, operation, path, *args, **kw): + """Called for path inputs""" + if operation in self.write_ops and not self._ok(path): + self._violation(operation, os.path.realpath(path), *args, **kw) + return path + + def _remap_pair(self, operation, src, dst, *args, **kw): + """Called for path pairs like rename, link, and symlink operations""" + if not self._ok(src) or not self._ok(dst): + self._violation(operation, src, dst, *args, **kw) + return (src, dst) + + def open(self, file, flags, mode=0o777, *args, **kw): + """Called for low-level os.open()""" + if flags & WRITE_FLAGS and not self._ok(file): + self._violation("os.open", file, flags, mode, *args, **kw) + return _os.open(file, flags, mode, *args, **kw) + + +WRITE_FLAGS = functools.reduce( + operator.or_, [getattr(_os, a, 0) for a in + "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()] +) + + +class SandboxViolation(DistutilsError): + """A setup script attempted to modify the filesystem outside the sandbox""" + + def __str__(self): + return """SandboxViolation: %s%r %s + +The package setup script has attempted to modify files on your system +that are not within the EasyInstall build area, and has been aborted. + +This package cannot be safely installed by EasyInstall, and may not +support alternate installation locations even if you run its setup +script by hand. Please inform the package's author and the EasyInstall +maintainers to find out if a fix or workaround is available.""" % self.args + + +# diff --git a/env/lib/python3.6/site-packages/setuptools/script (dev).tmpl b/env/lib/python3.6/site-packages/setuptools/script (dev).tmpl new file mode 100644 index 0000000..d58b1bb --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/script (dev).tmpl @@ -0,0 +1,5 @@ +# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r +__requires__ = %(spec)r +__import__('pkg_resources').require(%(spec)r) +__file__ = %(dev_path)r +exec(compile(open(__file__).read(), __file__, 'exec')) diff --git a/env/lib/python3.6/site-packages/setuptools/script.tmpl b/env/lib/python3.6/site-packages/setuptools/script.tmpl new file mode 100644 index 0000000..ff5efbc --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/script.tmpl @@ -0,0 +1,3 @@ +# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r +__requires__ = %(spec)r +__import__('pkg_resources').run_script(%(spec)r, %(script_name)r) diff --git a/env/lib/python3.6/site-packages/setuptools/site-patch.py b/env/lib/python3.6/site-packages/setuptools/site-patch.py new file mode 100644 index 0000000..0d2d2ff --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/site-patch.py @@ -0,0 +1,74 @@ +def __boot(): + import sys + import os + PYTHONPATH = os.environ.get('PYTHONPATH') + if PYTHONPATH is None or (sys.platform == 'win32' and not PYTHONPATH): + PYTHONPATH = [] + else: + PYTHONPATH = PYTHONPATH.split(os.pathsep) + + pic = getattr(sys, 'path_importer_cache', {}) + stdpath = sys.path[len(PYTHONPATH):] + mydir = os.path.dirname(__file__) + + for item in stdpath: + if item == mydir or not item: + continue # skip if current dir. on Windows, or my own directory + importer = pic.get(item) + if importer is not None: + loader = importer.find_module('site') + if loader is not None: + # This should actually reload the current module + loader.load_module('site') + break + else: + try: + import imp # Avoid import loop in Python >= 3.3 + stream, path, descr = imp.find_module('site', [item]) + except ImportError: + continue + if stream is None: + continue + try: + # This should actually reload the current module + imp.load_module('site', stream, path, descr) + finally: + stream.close() + break + else: + raise ImportError("Couldn't find the real 'site' module") + + known_paths = dict([(makepath(item)[1], 1) for item in sys.path]) # 2.2 comp + + oldpos = getattr(sys, '__egginsert', 0) # save old insertion position + sys.__egginsert = 0 # and reset the current one + + for item in PYTHONPATH: + addsitedir(item) + + sys.__egginsert += oldpos # restore effective old position + + d, nd = makepath(stdpath[0]) + insert_at = None + new_path = [] + + for item in sys.path: + p, np = makepath(item) + + if np == nd and insert_at is None: + # We've hit the first 'system' path entry, so added entries go here + insert_at = len(new_path) + + if np in known_paths or insert_at is None: + new_path.append(item) + else: + # new path after the insert point, back-insert it + new_path.insert(insert_at, item) + insert_at += 1 + + sys.path[:] = new_path + + +if __name__ == 'site': + __boot() + del __boot diff --git a/env/lib/python3.6/site-packages/setuptools/ssl_support.py b/env/lib/python3.6/site-packages/setuptools/ssl_support.py new file mode 100644 index 0000000..82f8870 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/ssl_support.py @@ -0,0 +1,250 @@ +import os +import socket +import atexit +import re + +from setuptools.extern.six.moves import urllib, http_client, map + +import pkg_resources +from pkg_resources import ResolutionError, ExtractionError + +try: + import ssl +except ImportError: + ssl = None + +__all__ = [ + 'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths', + 'opener_for' +] + +cert_paths = """ +/etc/pki/tls/certs/ca-bundle.crt +/etc/ssl/certs/ca-certificates.crt +/usr/share/ssl/certs/ca-bundle.crt +/usr/local/share/certs/ca-root.crt +/etc/ssl/cert.pem +/System/Library/OpenSSL/certs/cert.pem +/usr/local/share/certs/ca-root-nss.crt +/etc/ssl/ca-bundle.pem +""".strip().split() + +try: + HTTPSHandler = urllib.request.HTTPSHandler + HTTPSConnection = http_client.HTTPSConnection +except AttributeError: + HTTPSHandler = HTTPSConnection = object + +is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection) + + +try: + from ssl import CertificateError, match_hostname +except ImportError: + try: + from backports.ssl_match_hostname import CertificateError + from backports.ssl_match_hostname import match_hostname + except ImportError: + CertificateError = None + match_hostname = None + +if not CertificateError: + + class CertificateError(ValueError): + pass + + +if not match_hostname: + + def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + # Ported from python3-syntax: + # leftmost, *remainder = dn.split(r'.') + parts = dn.split(r'.') + leftmost = parts[0] + remainder = parts[1:] + + wildcards = leftmost.count('*') + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn)) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + return pat.match(hostname) + + def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate") + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" + % (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" + % (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") + + +class VerifyingHTTPSHandler(HTTPSHandler): + """Simple verifying handler: no auth, subclasses, timeouts, etc.""" + + def __init__(self, ca_bundle): + self.ca_bundle = ca_bundle + HTTPSHandler.__init__(self) + + def https_open(self, req): + return self.do_open( + lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req + ) + + +class VerifyingHTTPSConn(HTTPSConnection): + """Simple verifying connection: no auth, subclasses, timeouts, etc.""" + + def __init__(self, host, ca_bundle, **kw): + HTTPSConnection.__init__(self, host, **kw) + self.ca_bundle = ca_bundle + + def connect(self): + sock = socket.create_connection( + (self.host, self.port), getattr(self, 'source_address', None) + ) + + # Handle the socket if a (proxy) tunnel is present + if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None): + self.sock = sock + self._tunnel() + # http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7 + # change self.host to mean the proxy server host when tunneling is + # being used. Adapt, since we are interested in the destination + # host for the match_hostname() comparison. + actual_host = self._tunnel_host + else: + actual_host = self.host + + self.sock = ssl.wrap_socket( + sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle + ) + try: + match_hostname(self.sock.getpeercert(), actual_host) + except CertificateError: + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + + +def opener_for(ca_bundle=None): + """Get a urlopen() replacement that uses ca_bundle for verification""" + return urllib.request.build_opener( + VerifyingHTTPSHandler(ca_bundle or find_ca_bundle()) + ).open + + +_wincerts = None + + +def get_win_certfile(): + global _wincerts + if _wincerts is not None: + return _wincerts.name + + try: + from wincertstore import CertFile + except ImportError: + return None + + class MyCertFile(CertFile): + def __init__(self, stores=(), certs=()): + CertFile.__init__(self) + for store in stores: + self.addstore(store) + self.addcerts(certs) + atexit.register(self.close) + + def close(self): + try: + super(MyCertFile, self).close() + except OSError: + pass + + _wincerts = MyCertFile(stores=['CA', 'ROOT']) + return _wincerts.name + + +def find_ca_bundle(): + """Return an existing CA bundle path, or None""" + if os.name == 'nt': + return get_win_certfile() + else: + for cert_path in cert_paths: + if os.path.isfile(cert_path): + return cert_path + try: + import certifi + return certifi.where() + except (ImportError, ResolutionError, ExtractionError): + return None diff --git a/env/lib/python3.6/site-packages/setuptools/unicode_utils.py b/env/lib/python3.6/site-packages/setuptools/unicode_utils.py new file mode 100644 index 0000000..7c63efd --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/unicode_utils.py @@ -0,0 +1,44 @@ +import unicodedata +import sys + +from setuptools.extern import six + + +# HFS Plus uses decomposed UTF-8 +def decompose(path): + if isinstance(path, six.text_type): + return unicodedata.normalize('NFD', path) + try: + path = path.decode('utf-8') + path = unicodedata.normalize('NFD', path) + path = path.encode('utf-8') + except UnicodeError: + pass # Not UTF-8 + return path + + +def filesys_decode(path): + """ + Ensure that the given path is decoded, + NONE when no expected encoding works + """ + + if isinstance(path, six.text_type): + return path + + fs_enc = sys.getfilesystemencoding() or 'utf-8' + candidates = fs_enc, 'utf-8' + + for enc in candidates: + try: + return path.decode(enc) + except UnicodeDecodeError: + continue + + +def try_encode(string, enc): + "turn unicode encoding into a functional routine" + try: + return string.encode(enc) + except UnicodeEncodeError: + return None diff --git a/env/lib/python3.6/site-packages/setuptools/version.py b/env/lib/python3.6/site-packages/setuptools/version.py new file mode 100644 index 0000000..f2b4072 --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/version.py @@ -0,0 +1,6 @@ +import pkg_resources + +try: + __version__ = pkg_resources.require('setuptools')[0].version +except Exception: + __version__ = 'unknown' diff --git a/env/lib/python3.6/site-packages/setuptools/windows_support.py b/env/lib/python3.6/site-packages/setuptools/windows_support.py new file mode 100644 index 0000000..cb977cf --- /dev/null +++ b/env/lib/python3.6/site-packages/setuptools/windows_support.py @@ -0,0 +1,29 @@ +import platform +import ctypes + + +def windows_only(func): + if platform.system() != 'Windows': + return lambda *args, **kwargs: None + return func + + +@windows_only +def hide_file(path): + """ + Set the hidden attribute on a file or directory. + + From http://stackoverflow.com/questions/19622133/ + + `path` must be text. + """ + __import__('ctypes.wintypes') + SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW + SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD + SetFileAttributes.restype = ctypes.wintypes.BOOL + + FILE_ATTRIBUTE_HIDDEN = 0x02 + + ret = SetFileAttributes(path, FILE_ATTRIBUTE_HIDDEN) + if not ret: + raise ctypes.WinError() diff --git a/env/lib/python3.6/site-packages/six-1.11.0.dist-info/DESCRIPTION.rst b/env/lib/python3.6/site-packages/six-1.11.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..09c2c99 --- /dev/null +++ b/env/lib/python3.6/site-packages/six-1.11.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,27 @@ +.. image:: http://img.shields.io/pypi/v/six.svg + :target: https://pypi.python.org/pypi/six + +.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master + :target: https://travis-ci.org/benjaminp/six + +.. image:: http://img.shields.io/badge/license-MIT-green.svg + :target: https://github.com/benjaminp/six/blob/master/LICENSE + +Six is a Python 2 and 3 compatibility library. It provides utility functions +for smoothing over the differences between the Python versions with the goal of +writing Python code that is compatible on both Python versions. See the +documentation for more information on what is provided. + +Six supports every Python version since 2.6. It is contained in only one Python +file, so it can be easily copied into your project. (The copyright and license +notice must be retained.) + +Online documentation is at http://six.rtfd.org. + +Bugs can be reported to https://github.com/benjaminp/six. The code can also +be found there. + +For questions about six or porting in general, email the python-porting mailing +list: https://mail.python.org/mailman/listinfo/python-porting + + diff --git a/env/lib/python3.6/site-packages/six-1.11.0.dist-info/INSTALLER b/env/lib/python3.6/site-packages/six-1.11.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/lib/python3.6/site-packages/six-1.11.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/lib/python3.6/site-packages/six-1.11.0.dist-info/METADATA b/env/lib/python3.6/site-packages/six-1.11.0.dist-info/METADATA new file mode 100644 index 0000000..04e93dc --- /dev/null +++ b/env/lib/python3.6/site-packages/six-1.11.0.dist-info/METADATA @@ -0,0 +1,43 @@ +Metadata-Version: 2.0 +Name: six +Version: 1.11.0 +Summary: Python 2 and 3 compatibility utilities +Home-page: http://pypi.python.org/pypi/six/ +Author: Benjamin Peterson +Author-email: benjamin@python.org +License: MIT +Platform: UNKNOWN +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Utilities + +.. image:: http://img.shields.io/pypi/v/six.svg + :target: https://pypi.python.org/pypi/six + +.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master + :target: https://travis-ci.org/benjaminp/six + +.. image:: http://img.shields.io/badge/license-MIT-green.svg + :target: https://github.com/benjaminp/six/blob/master/LICENSE + +Six is a Python 2 and 3 compatibility library. It provides utility functions +for smoothing over the differences between the Python versions with the goal of +writing Python code that is compatible on both Python versions. See the +documentation for more information on what is provided. + +Six supports every Python version since 2.6. It is contained in only one Python +file, so it can be easily copied into your project. (The copyright and license +notice must be retained.) + +Online documentation is at http://six.rtfd.org. + +Bugs can be reported to https://github.com/benjaminp/six. The code can also +be found there. + +For questions about six or porting in general, email the python-porting mailing +list: https://mail.python.org/mailman/listinfo/python-porting + + diff --git a/env/lib/python3.6/site-packages/six-1.11.0.dist-info/RECORD b/env/lib/python3.6/site-packages/six-1.11.0.dist-info/RECORD new file mode 100644 index 0000000..c870eca --- /dev/null +++ b/env/lib/python3.6/site-packages/six-1.11.0.dist-info/RECORD @@ -0,0 +1,9 @@ +six.py,sha256=A08MPb-Gi9FfInI3IW7HimXFmEH2T2IPzHgDvdhZPRA,30888 +six-1.11.0.dist-info/DESCRIPTION.rst,sha256=gPBoq1Ruc1QDWyLeXPlieL3F-XZz1_WXB-5gctCfg-A,1098 +six-1.11.0.dist-info/METADATA,sha256=06nZXaDYN3vnC-pmUjhkECYFH_a--ywvcPIpUdNeH1o,1607 +six-1.11.0.dist-info/RECORD,, +six-1.11.0.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +six-1.11.0.dist-info/metadata.json,sha256=ac3f4f7MpSHSnZ1SqhHCwsL7FGWMG0gBEb0hhS2eSSM,703 +six-1.11.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4 +six-1.11.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +__pycache__/six.cpython-36.pyc,, diff --git a/env/lib/python3.6/site-packages/six-1.11.0.dist-info/WHEEL b/env/lib/python3.6/site-packages/six-1.11.0.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/env/lib/python3.6/site-packages/six-1.11.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/env/lib/python3.6/site-packages/six-1.11.0.dist-info/metadata.json b/env/lib/python3.6/site-packages/six-1.11.0.dist-info/metadata.json new file mode 100644 index 0000000..2c7fcea --- /dev/null +++ b/env/lib/python3.6/site-packages/six-1.11.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Libraries", "Topic :: Utilities"], "extensions": {"python.details": {"contacts": [{"email": "benjamin@python.org", "name": "Benjamin Peterson", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://pypi.python.org/pypi/six/"}}}, "generator": "bdist_wheel (0.29.0)", "license": "MIT", "metadata_version": "2.0", "name": "six", "summary": "Python 2 and 3 compatibility utilities", "test_requires": [{"requires": ["pytest"]}], "version": "1.11.0"} \ No newline at end of file diff --git a/env/lib/python3.6/site-packages/six-1.11.0.dist-info/top_level.txt b/env/lib/python3.6/site-packages/six-1.11.0.dist-info/top_level.txt new file mode 100644 index 0000000..ffe2fce --- /dev/null +++ b/env/lib/python3.6/site-packages/six-1.11.0.dist-info/top_level.txt @@ -0,0 +1 @@ +six diff --git a/env/lib/python3.6/site-packages/six.py b/env/lib/python3.6/site-packages/six.py new file mode 100644 index 0000000..6bf4fd3 --- /dev/null +++ b/env/lib/python3.6/site-packages/six.py @@ -0,0 +1,891 @@ +# Copyright (c) 2010-2017 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Utilities for writing code that runs on Python 2 and 3""" + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.11.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + try: + raise tp, value, tb + finally: + tb = None +""") + + +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + try: + if from_value is None: + raise value + raise value from from_value + finally: + value = None +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + try: + raise value from from_value + finally: + value = None +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(type): + + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/env/lib/python3.6/site-packages/socks.py b/env/lib/python3.6/site-packages/socks.py new file mode 100644 index 0000000..7bcf590 --- /dev/null +++ b/env/lib/python3.6/site-packages/socks.py @@ -0,0 +1,870 @@ +"""SocksiPy - Python SOCKS module. + +Copyright 2006 Dan-Haim. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +3. Neither the name of Dan Haim nor the names of his contributors may be used + to endorse or promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA +OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +This module provides a standard socket-like interface for Python +for tunneling connections through SOCKS proxies. + +=============================================================================== + +Minor modifications made by Christopher Gilbert (http://motomastyle.com/) +for use in PyLoris (http://pyloris.sourceforge.net/) + +Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/) +mainly to merge bug fixes found in Sourceforge + +Modifications made by Anorov (https://github.com/Anorov) +-Forked and renamed to PySocks +-Fixed issue with HTTP proxy failure checking (same bug that was in the + old ___recvall() method) +-Included SocksiPyHandler (sockshandler.py), to be used as a urllib2 handler, + courtesy of e000 (https://github.com/e000): + https://gist.github.com/869791#file_socksipyhandler.py +-Re-styled code to make it readable + -Aliased PROXY_TYPE_SOCKS5 -> SOCKS5 etc. + -Improved exception handling and output + -Removed irritating use of sequence indexes, replaced with tuple unpacked + variables + -Fixed up Python 3 bytestring handling - chr(0x03).encode() -> b"\x03" + -Other general fixes +-Added clarification that the HTTP proxy connection method only supports + CONNECT-style tunneling HTTP proxies +-Various small bug fixes +""" + +from base64 import b64encode +from collections import Callable +from errno import EOPNOTSUPP, EINVAL, EAGAIN +import functools +from io import BytesIO +import logging +import os +from os import SEEK_CUR +import socket +import struct +import sys + +__version__ = "1.6.7" + + +if os.name == "nt" and sys.version_info < (3, 0): + try: + import win_inet_pton + except ImportError: + raise ImportError( + "To run PySocks on Windows you must install win_inet_pton") + +log = logging.getLogger(__name__) + +PROXY_TYPE_SOCKS4 = SOCKS4 = 1 +PROXY_TYPE_SOCKS5 = SOCKS5 = 2 +PROXY_TYPE_HTTP = HTTP = 3 + +PROXY_TYPES = {"SOCKS4": SOCKS4, "SOCKS5": SOCKS5, "HTTP": HTTP} +PRINTABLE_PROXY_TYPES = dict(zip(PROXY_TYPES.values(), PROXY_TYPES.keys())) + +_orgsocket = _orig_socket = socket.socket + + +def set_self_blocking(function): + + @functools.wraps(function) + def wrapper(*args, **kwargs): + self = args[0] + try: + _is_blocking = self.gettimeout() + if _is_blocking == 0: + self.setblocking(True) + return function(*args, **kwargs) + except Exception as e: + raise + finally: + # set orgin blocking + if _is_blocking == 0: + self.setblocking(False) + return wrapper + + +class ProxyError(IOError): + """Socket_err contains original socket.error exception.""" + def __init__(self, msg, socket_err=None): + self.msg = msg + self.socket_err = socket_err + + if socket_err: + self.msg += ": {0}".format(socket_err) + + def __str__(self): + return self.msg + + +class GeneralProxyError(ProxyError): + pass + + +class ProxyConnectionError(ProxyError): + pass + + +class SOCKS5AuthError(ProxyError): + pass + + +class SOCKS5Error(ProxyError): + pass + + +class SOCKS4Error(ProxyError): + pass + + +class HTTPError(ProxyError): + pass + +SOCKS4_ERRORS = { + 0x5B: "Request rejected or failed", + 0x5C: ("Request rejected because SOCKS server cannot connect to identd on" + " the client"), + 0x5D: ("Request rejected because the client program and identd report" + " different user-ids") +} + +SOCKS5_ERRORS = { + 0x01: "General SOCKS server failure", + 0x02: "Connection not allowed by ruleset", + 0x03: "Network unreachable", + 0x04: "Host unreachable", + 0x05: "Connection refused", + 0x06: "TTL expired", + 0x07: "Command not supported, or protocol error", + 0x08: "Address type not supported" +} + +DEFAULT_PORTS = {SOCKS4: 1080, SOCKS5: 1080, HTTP: 8080} + + +def set_default_proxy(proxy_type=None, addr=None, port=None, rdns=True, + username=None, password=None): + """Sets a default proxy. + + All further socksocket objects will use the default unless explicitly + changed. All parameters are as for socket.set_proxy().""" + socksocket.default_proxy = (proxy_type, addr, port, rdns, + username.encode() if username else None, + password.encode() if password else None) + + +def setdefaultproxy(*args, **kwargs): + if "proxytype" in kwargs: + kwargs["proxy_type"] = kwargs.pop("proxytype") + return set_default_proxy(*args, **kwargs) + + +def get_default_proxy(): + """Returns the default proxy, set by set_default_proxy.""" + return socksocket.default_proxy + +getdefaultproxy = get_default_proxy + + +def wrap_module(module): + """Attempts to replace a module's socket library with a SOCKS socket. + + Must set a default proxy using set_default_proxy(...) first. This will + only work on modules that import socket directly into the namespace; + most of the Python Standard Library falls into this category.""" + if socksocket.default_proxy: + module.socket.socket = socksocket + else: + raise GeneralProxyError("No default proxy specified") + +wrapmodule = wrap_module + + +def create_connection(dest_pair, + timeout=None, source_address=None, + proxy_type=None, proxy_addr=None, + proxy_port=None, proxy_rdns=True, + proxy_username=None, proxy_password=None, + socket_options=None): + """create_connection(dest_pair, *[, timeout], **proxy_args) -> socket object + + Like socket.create_connection(), but connects to proxy + before returning the socket object. + + dest_pair - 2-tuple of (IP/hostname, port). + **proxy_args - Same args passed to socksocket.set_proxy() if present. + timeout - Optional socket timeout value, in seconds. + source_address - tuple (host, port) for the socket to bind to as its source + address before connecting (only for compatibility) + """ + # Remove IPv6 brackets on the remote address and proxy address. + remote_host, remote_port = dest_pair + if remote_host.startswith("["): + remote_host = remote_host.strip("[]") + if proxy_addr and proxy_addr.startswith("["): + proxy_addr = proxy_addr.strip("[]") + + err = None + + # Allow the SOCKS proxy to be on IPv4 or IPv6 addresses. + for r in socket.getaddrinfo(proxy_addr, proxy_port, 0, socket.SOCK_STREAM): + family, socket_type, proto, canonname, sa = r + sock = None + try: + sock = socksocket(family, socket_type, proto) + + if socket_options: + for opt in socket_options: + sock.setsockopt(*opt) + + if isinstance(timeout, (int, float)): + sock.settimeout(timeout) + + if proxy_type: + sock.set_proxy(proxy_type, proxy_addr, proxy_port, proxy_rdns, + proxy_username, proxy_password) + if source_address: + sock.bind(source_address) + + sock.connect((remote_host, remote_port)) + return sock + + except (socket.error, ProxyConnectionError) as e: + err = e + if sock: + sock.close() + sock = None + + if err: + raise err + + raise socket.error("gai returned empty list.") + + +class _BaseSocket(socket.socket): + """Allows Python 2 delegated methods such as send() to be overridden.""" + def __init__(self, *pos, **kw): + _orig_socket.__init__(self, *pos, **kw) + + self._savedmethods = dict() + for name in self._savenames: + self._savedmethods[name] = getattr(self, name) + delattr(self, name) # Allows normal overriding mechanism to work + + _savenames = list() + + +def _makemethod(name): + return lambda self, *pos, **kw: self._savedmethods[name](*pos, **kw) +for name in ("sendto", "send", "recvfrom", "recv"): + method = getattr(_BaseSocket, name, None) + + # Determine if the method is not defined the usual way + # as a function in the class. + # Python 2 uses __slots__, so there are descriptors for each method, + # but they are not functions. + if not isinstance(method, Callable): + _BaseSocket._savenames.append(name) + setattr(_BaseSocket, name, _makemethod(name)) + + +class socksocket(_BaseSocket): + """socksocket([family[, type[, proto]]]) -> socket object + + Open a SOCKS enabled socket. The parameters are the same as + those of the standard socket init. In order for SOCKS to work, + you must specify family=AF_INET and proto=0. + The "type" argument must be either SOCK_STREAM or SOCK_DGRAM. + """ + + default_proxy = None + + def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, + proto=0, *args, **kwargs): + if type not in (socket.SOCK_STREAM, socket.SOCK_DGRAM): + msg = "Socket type must be stream or datagram, not {!r}" + raise ValueError(msg.format(type)) + + super(socksocket, self).__init__(family, type, proto, *args, **kwargs) + self._proxyconn = None # TCP connection to keep UDP relay alive + + if self.default_proxy: + self.proxy = self.default_proxy + else: + self.proxy = (None, None, None, None, None, None) + self.proxy_sockname = None + self.proxy_peername = None + + self._timeout = None + + def _readall(self, file, count): + """Receive EXACTLY the number of bytes requested from the file object. + + Blocks until the required number of bytes have been received.""" + data = b"" + while len(data) < count: + d = file.read(count - len(data)) + if not d: + raise GeneralProxyError("Connection closed unexpectedly") + data += d + return data + + def settimeout(self, timeout): + self._timeout = timeout + try: + # test if we're connected, if so apply timeout + peer = self.get_proxy_peername() + super(socksocket, self).settimeout(self._timeout) + except socket.error: + pass + + def gettimeout(self): + return self._timeout + + def setblocking(self, v): + if v: + self.settimeout(None) + else: + self.settimeout(0.0) + + def set_proxy(self, proxy_type=None, addr=None, port=None, rdns=True, + username=None, password=None): + """ Sets the proxy to be used. + + proxy_type - The type of the proxy to be used. Three types + are supported: PROXY_TYPE_SOCKS4 (including socks4a), + PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP + addr - The address of the server (IP or DNS). + port - The port of the server. Defaults to 1080 for SOCKS + servers and 8080 for HTTP proxy servers. + rdns - Should DNS queries be performed on the remote side + (rather than the local side). The default is True. + Note: This has no effect with SOCKS4 servers. + username - Username to authenticate with to the server. + The default is no authentication. + password - Password to authenticate with to the server. + Only relevant when username is also provided.""" + self.proxy = (proxy_type, addr, port, rdns, + username.encode() if username else None, + password.encode() if password else None) + + def setproxy(self, *args, **kwargs): + if "proxytype" in kwargs: + kwargs["proxy_type"] = kwargs.pop("proxytype") + return self.set_proxy(*args, **kwargs) + + def bind(self, *pos, **kw): + """Implements proxy connection for UDP sockets. + + Happens during the bind() phase.""" + (proxy_type, proxy_addr, proxy_port, rdns, username, + password) = self.proxy + if not proxy_type or self.type != socket.SOCK_DGRAM: + return _orig_socket.bind(self, *pos, **kw) + + if self._proxyconn: + raise socket.error(EINVAL, "Socket already bound to an address") + if proxy_type != SOCKS5: + msg = "UDP only supported by SOCKS5 proxy type" + raise socket.error(EOPNOTSUPP, msg) + super(socksocket, self).bind(*pos, **kw) + + # Need to specify actual local port because + # some relays drop packets if a port of zero is specified. + # Avoid specifying host address in case of NAT though. + _, port = self.getsockname() + dst = ("0", port) + + self._proxyconn = _orig_socket() + proxy = self._proxy_addr() + self._proxyconn.connect(proxy) + + UDP_ASSOCIATE = b"\x03" + _, relay = self._SOCKS5_request(self._proxyconn, UDP_ASSOCIATE, dst) + + # The relay is most likely on the same host as the SOCKS proxy, + # but some proxies return a private IP address (10.x.y.z) + host, _ = proxy + _, port = relay + super(socksocket, self).connect((host, port)) + super(socksocket, self).settimeout(self._timeout) + self.proxy_sockname = ("0.0.0.0", 0) # Unknown + + def sendto(self, bytes, *args, **kwargs): + if self.type != socket.SOCK_DGRAM: + return super(socksocket, self).sendto(bytes, *args, **kwargs) + if not self._proxyconn: + self.bind(("", 0)) + + address = args[-1] + flags = args[:-1] + + header = BytesIO() + RSV = b"\x00\x00" + header.write(RSV) + STANDALONE = b"\x00" + header.write(STANDALONE) + self._write_SOCKS5_address(address, header) + + sent = super(socksocket, self).send(header.getvalue() + bytes, *flags, + **kwargs) + return sent - header.tell() + + def send(self, bytes, flags=0, **kwargs): + if self.type == socket.SOCK_DGRAM: + return self.sendto(bytes, flags, self.proxy_peername, **kwargs) + else: + return super(socksocket, self).send(bytes, flags, **kwargs) + + def recvfrom(self, bufsize, flags=0): + if self.type != socket.SOCK_DGRAM: + return super(socksocket, self).recvfrom(bufsize, flags) + if not self._proxyconn: + self.bind(("", 0)) + + buf = BytesIO(super(socksocket, self).recv(bufsize + 1024, flags)) + buf.seek(2, SEEK_CUR) + frag = buf.read(1) + if ord(frag): + raise NotImplementedError("Received UDP packet fragment") + fromhost, fromport = self._read_SOCKS5_address(buf) + + if self.proxy_peername: + peerhost, peerport = self.proxy_peername + if fromhost != peerhost or peerport not in (0, fromport): + raise socket.error(EAGAIN, "Packet filtered") + + return (buf.read(bufsize), (fromhost, fromport)) + + def recv(self, *pos, **kw): + bytes, _ = self.recvfrom(*pos, **kw) + return bytes + + def close(self): + if self._proxyconn: + self._proxyconn.close() + return super(socksocket, self).close() + + def get_proxy_sockname(self): + """Returns the bound IP address and port number at the proxy.""" + return self.proxy_sockname + + getproxysockname = get_proxy_sockname + + def get_proxy_peername(self): + """ + Returns the IP and port number of the proxy. + """ + return self.getpeername() + + getproxypeername = get_proxy_peername + + def get_peername(self): + """Returns the IP address and port number of the destination machine. + + Note: get_proxy_peername returns the proxy.""" + return self.proxy_peername + + getpeername = get_peername + + def _negotiate_SOCKS5(self, *dest_addr): + """Negotiates a stream connection through a SOCKS5 server.""" + CONNECT = b"\x01" + self.proxy_peername, self.proxy_sockname = self._SOCKS5_request( + self, CONNECT, dest_addr) + + def _SOCKS5_request(self, conn, cmd, dst): + """ + Send SOCKS5 request with given command (CMD field) and + address (DST field). Returns resolved DST address that was used. + """ + proxy_type, addr, port, rdns, username, password = self.proxy + + writer = conn.makefile("wb") + reader = conn.makefile("rb", 0) # buffering=0 renamed in Python 3 + try: + # First we'll send the authentication packages we support. + if username and password: + # The username/password details were supplied to the + # set_proxy method so we support the USERNAME/PASSWORD + # authentication (in addition to the standard none). + writer.write(b"\x05\x02\x00\x02") + else: + # No username/password were entered, therefore we + # only support connections with no authentication. + writer.write(b"\x05\x01\x00") + + # We'll receive the server's response to determine which + # method was selected + writer.flush() + chosen_auth = self._readall(reader, 2) + + if chosen_auth[0:1] != b"\x05": + # Note: string[i:i+1] is used because indexing of a bytestring + # via bytestring[i] yields an integer in Python 3 + raise GeneralProxyError( + "SOCKS5 proxy server sent invalid data") + + # Check the chosen authentication method + + if chosen_auth[1:2] == b"\x02": + # Okay, we need to perform a basic username/password + # authentication. + writer.write(b"\x01" + chr(len(username)).encode() + + username + + chr(len(password)).encode() + + password) + writer.flush() + auth_status = self._readall(reader, 2) + if auth_status[0:1] != b"\x01": + # Bad response + raise GeneralProxyError( + "SOCKS5 proxy server sent invalid data") + if auth_status[1:2] != b"\x00": + # Authentication failed + raise SOCKS5AuthError("SOCKS5 authentication failed") + + # Otherwise, authentication succeeded + + # No authentication is required if 0x00 + elif chosen_auth[1:2] != b"\x00": + # Reaching here is always bad + if chosen_auth[1:2] == b"\xFF": + raise SOCKS5AuthError( + "All offered SOCKS5 authentication methods were" + " rejected") + else: + raise GeneralProxyError( + "SOCKS5 proxy server sent invalid data") + + # Now we can request the actual connection + writer.write(b"\x05" + cmd + b"\x00") + resolved = self._write_SOCKS5_address(dst, writer) + writer.flush() + + # Get the response + resp = self._readall(reader, 3) + if resp[0:1] != b"\x05": + raise GeneralProxyError( + "SOCKS5 proxy server sent invalid data") + + status = ord(resp[1:2]) + if status != 0x00: + # Connection failed: server returned an error + error = SOCKS5_ERRORS.get(status, "Unknown error") + raise SOCKS5Error("{0:#04x}: {1}".format(status, error)) + + # Get the bound address/port + bnd = self._read_SOCKS5_address(reader) + + super(socksocket, self).settimeout(self._timeout) + return (resolved, bnd) + finally: + reader.close() + writer.close() + + def _write_SOCKS5_address(self, addr, file): + """ + Return the host and port packed for the SOCKS5 protocol, + and the resolved address as a tuple object. + """ + host, port = addr + proxy_type, _, _, rdns, username, password = self.proxy + family_to_byte = {socket.AF_INET: b"\x01", socket.AF_INET6: b"\x04"} + + # If the given destination address is an IP address, we'll + # use the IP address request even if remote resolving was specified. + # Detect whether the address is IPv4/6 directly. + for family in (socket.AF_INET, socket.AF_INET6): + try: + addr_bytes = socket.inet_pton(family, host) + file.write(family_to_byte[family] + addr_bytes) + host = socket.inet_ntop(family, addr_bytes) + file.write(struct.pack(">H", port)) + return host, port + except socket.error: + continue + + # Well it's not an IP number, so it's probably a DNS name. + if rdns: + # Resolve remotely + host_bytes = host.encode("idna") + file.write(b"\x03" + chr(len(host_bytes)).encode() + host_bytes) + else: + # Resolve locally + addresses = socket.getaddrinfo(host, port, socket.AF_UNSPEC, + socket.SOCK_STREAM, + socket.IPPROTO_TCP, + socket.AI_ADDRCONFIG) + # We can't really work out what IP is reachable, so just pick the + # first. + target_addr = addresses[0] + family = target_addr[0] + host = target_addr[4][0] + + addr_bytes = socket.inet_pton(family, host) + file.write(family_to_byte[family] + addr_bytes) + host = socket.inet_ntop(family, addr_bytes) + file.write(struct.pack(">H", port)) + return host, port + + def _read_SOCKS5_address(self, file): + atyp = self._readall(file, 1) + if atyp == b"\x01": + addr = socket.inet_ntoa(self._readall(file, 4)) + elif atyp == b"\x03": + length = self._readall(file, 1) + addr = self._readall(file, ord(length)) + elif atyp == b"\x04": + addr = socket.inet_ntop(socket.AF_INET6, self._readall(file, 16)) + else: + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + + port = struct.unpack(">H", self._readall(file, 2))[0] + return addr, port + + def _negotiate_SOCKS4(self, dest_addr, dest_port): + """Negotiates a connection through a SOCKS4 server.""" + proxy_type, addr, port, rdns, username, password = self.proxy + + writer = self.makefile("wb") + reader = self.makefile("rb", 0) # buffering=0 renamed in Python 3 + try: + # Check if the destination address provided is an IP address + remote_resolve = False + try: + addr_bytes = socket.inet_aton(dest_addr) + except socket.error: + # It's a DNS name. Check where it should be resolved. + if rdns: + addr_bytes = b"\x00\x00\x00\x01" + remote_resolve = True + else: + addr_bytes = socket.inet_aton( + socket.gethostbyname(dest_addr)) + + # Construct the request packet + writer.write(struct.pack(">BBH", 0x04, 0x01, dest_port)) + writer.write(addr_bytes) + + # The username parameter is considered userid for SOCKS4 + if username: + writer.write(username) + writer.write(b"\x00") + + # DNS name if remote resolving is required + # NOTE: This is actually an extension to the SOCKS4 protocol + # called SOCKS4A and may not be supported in all cases. + if remote_resolve: + writer.write(dest_addr.encode("idna") + b"\x00") + writer.flush() + + # Get the response from the server + resp = self._readall(reader, 8) + if resp[0:1] != b"\x00": + # Bad data + raise GeneralProxyError( + "SOCKS4 proxy server sent invalid data") + + status = ord(resp[1:2]) + if status != 0x5A: + # Connection failed: server returned an error + error = SOCKS4_ERRORS.get(status, "Unknown error") + raise SOCKS4Error("{0:#04x}: {1}".format(status, error)) + + # Get the bound address/port + self.proxy_sockname = (socket.inet_ntoa(resp[4:]), + struct.unpack(">H", resp[2:4])[0]) + if remote_resolve: + self.proxy_peername = socket.inet_ntoa(addr_bytes), dest_port + else: + self.proxy_peername = dest_addr, dest_port + finally: + reader.close() + writer.close() + + def _negotiate_HTTP(self, dest_addr, dest_port): + """Negotiates a connection through an HTTP server. + + NOTE: This currently only supports HTTP CONNECT-style proxies.""" + proxy_type, addr, port, rdns, username, password = self.proxy + + # If we need to resolve locally, we do this now + addr = dest_addr if rdns else socket.gethostbyname(dest_addr) + + http_headers = [ + (b"CONNECT " + addr.encode("idna") + b":" + + str(dest_port).encode() + b" HTTP/1.1"), + b"Host: " + dest_addr.encode("idna") + ] + + if username and password: + http_headers.append(b"Proxy-Authorization: basic " + + b64encode(username + b":" + password)) + + http_headers.append(b"\r\n") + + self.sendall(b"\r\n".join(http_headers)) + + # We just need the first line to check if the connection was successful + fobj = self.makefile() + status_line = fobj.readline() + fobj.close() + + if not status_line: + raise GeneralProxyError("Connection closed unexpectedly") + + try: + proto, status_code, status_msg = status_line.split(" ", 2) + except ValueError: + raise GeneralProxyError("HTTP proxy server sent invalid response") + + if not proto.startswith("HTTP/"): + raise GeneralProxyError( + "Proxy server does not appear to be an HTTP proxy") + + try: + status_code = int(status_code) + except ValueError: + raise HTTPError( + "HTTP proxy server did not return a valid HTTP status") + + if status_code != 200: + error = "{0}: {1}".format(status_code, status_msg) + if status_code in (400, 403, 405): + # It's likely that the HTTP proxy server does not support the + # CONNECT tunneling method + error += ("\n[*] Note: The HTTP proxy server may not be" + " supported by PySocks (must be a CONNECT tunnel" + " proxy)") + raise HTTPError(error) + + self.proxy_sockname = (b"0.0.0.0", 0) + self.proxy_peername = addr, dest_port + + _proxy_negotiators = { + SOCKS4: _negotiate_SOCKS4, + SOCKS5: _negotiate_SOCKS5, + HTTP: _negotiate_HTTP + } + + @set_self_blocking + def connect(self, dest_pair): + """ + Connects to the specified destination through a proxy. + Uses the same API as socket's connect(). + To select the proxy server, use set_proxy(). + + dest_pair - 2-tuple of (IP/hostname, port). + """ + if len(dest_pair) != 2 or dest_pair[0].startswith("["): + # Probably IPv6, not supported -- raise an error, and hope + # Happy Eyeballs (RFC6555) makes sure at least the IPv4 + # connection works... + raise socket.error("PySocks doesn't support IPv6: %s" + % str(dest_pair)) + + dest_addr, dest_port = dest_pair + + if self.type == socket.SOCK_DGRAM: + if not self._proxyconn: + self.bind(("", 0)) + dest_addr = socket.gethostbyname(dest_addr) + + # If the host address is INADDR_ANY or similar, reset the peer + # address so that packets are received from any peer + if dest_addr == "0.0.0.0" and not dest_port: + self.proxy_peername = None + else: + self.proxy_peername = (dest_addr, dest_port) + return + + (proxy_type, proxy_addr, proxy_port, rdns, username, + password) = self.proxy + + # Do a minimal input check first + if (not isinstance(dest_pair, (list, tuple)) + or len(dest_pair) != 2 + or not dest_addr + or not isinstance(dest_port, int)): + # Inputs failed, raise an error + raise GeneralProxyError( + "Invalid destination-connection (host, port) pair") + + # We set the timeout here so that we don't hang in connection or during + # negotiation. + super(socksocket, self).settimeout(self._timeout) + + if proxy_type is None: + # Treat like regular socket object + self.proxy_peername = dest_pair + super(socksocket, self).settimeout(self._timeout) + super(socksocket, self).connect((dest_addr, dest_port)) + return + + proxy_addr = self._proxy_addr() + + try: + # Initial connection to proxy server. + super(socksocket, self).connect(proxy_addr) + + except socket.error as error: + # Error while connecting to proxy + self.close() + proxy_addr, proxy_port = proxy_addr + proxy_server = "{0}:{1}".format(proxy_addr, proxy_port) + printable_type = PRINTABLE_PROXY_TYPES[proxy_type] + + msg = "Error connecting to {0} proxy {1}".format(printable_type, + proxy_server) + log.debug("%s due to: %s", msg, error) + raise ProxyConnectionError(msg, error) + + else: + # Connected to proxy server, now negotiate + try: + # Calls negotiate_{SOCKS4, SOCKS5, HTTP} + negotiate = self._proxy_negotiators[proxy_type] + negotiate(self, dest_addr, dest_port) + except socket.error as error: + # Wrap socket errors + self.close() + raise GeneralProxyError("Socket error", error) + except ProxyError: + # Protocol error while negotiating with proxy + self.close() + raise + + def _proxy_addr(self): + """ + Return proxy address to connect to as tuple object + """ + (proxy_type, proxy_addr, proxy_port, rdns, username, + password) = self.proxy + proxy_port = proxy_port or DEFAULT_PORTS.get(proxy_type) + if not proxy_port: + raise GeneralProxyError("Invalid proxy type") + return proxy_addr, proxy_port diff --git a/env/lib/python3.6/site-packages/sockshandler.py b/env/lib/python3.6/site-packages/sockshandler.py new file mode 100644 index 0000000..26c8343 --- /dev/null +++ b/env/lib/python3.6/site-packages/sockshandler.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python +""" +SocksiPy + urllib2 handler + +version: 0.3 +author: e + +This module provides a Handler which you can use with urllib2 to allow it to tunnel your connection through a socks.sockssocket socket, with out monkey patching the original socket... +""" +import ssl + +try: + import urllib2 + import httplib +except ImportError: # Python 3 + import urllib.request as urllib2 + import http.client as httplib + +import socks # $ pip install PySocks + +def merge_dict(a, b): + d = a.copy() + d.update(b) + return d + +class SocksiPyConnection(httplib.HTTPConnection): + def __init__(self, proxytype, proxyaddr, proxyport=None, rdns=True, username=None, password=None, *args, **kwargs): + self.proxyargs = (proxytype, proxyaddr, proxyport, rdns, username, password) + httplib.HTTPConnection.__init__(self, *args, **kwargs) + + def connect(self): + self.sock = socks.socksocket() + self.sock.setproxy(*self.proxyargs) + if type(self.timeout) in (int, float): + self.sock.settimeout(self.timeout) + self.sock.connect((self.host, self.port)) + +class SocksiPyConnectionS(httplib.HTTPSConnection): + def __init__(self, proxytype, proxyaddr, proxyport=None, rdns=True, username=None, password=None, *args, **kwargs): + self.proxyargs = (proxytype, proxyaddr, proxyport, rdns, username, password) + httplib.HTTPSConnection.__init__(self, *args, **kwargs) + + def connect(self): + sock = socks.socksocket() + sock.setproxy(*self.proxyargs) + if type(self.timeout) in (int, float): + sock.settimeout(self.timeout) + sock.connect((self.host, self.port)) + self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file) + +class SocksiPyHandler(urllib2.HTTPHandler, urllib2.HTTPSHandler): + def __init__(self, *args, **kwargs): + self.args = args + self.kw = kwargs + urllib2.HTTPHandler.__init__(self) + + def http_open(self, req): + def build(host, port=None, timeout=0, **kwargs): + kw = merge_dict(self.kw, kwargs) + conn = SocksiPyConnection(*self.args, host=host, port=port, timeout=timeout, **kw) + return conn + return self.do_open(build, req) + + def https_open(self, req): + def build(host, port=None, timeout=0, **kwargs): + kw = merge_dict(self.kw, kwargs) + conn = SocksiPyConnectionS(*self.args, host=host, port=port, timeout=timeout, **kw) + return conn + return self.do_open(build, req) + +if __name__ == "__main__": + import sys + try: + port = int(sys.argv[1]) + except (ValueError, IndexError): + port = 9050 + opener = urllib2.build_opener(SocksiPyHandler(socks.PROXY_TYPE_SOCKS5, "localhost", port)) + print("HTTP: " + opener.open("http://httpbin.org/ip").read().decode()) + print("HTTPS: " + opener.open("https://httpbin.org/ip").read().decode()) diff --git a/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/DESCRIPTION.rst b/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..e118723 --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,3 @@ +UNKNOWN + + diff --git a/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/INSTALLER b/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/METADATA b/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/METADATA new file mode 100644 index 0000000..c2693b6 --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/METADATA @@ -0,0 +1,32 @@ +Metadata-Version: 2.0 +Name: tweepy +Version: 3.6.0 +Summary: Twitter library for python +Home-page: http://github.com/tweepy/tweepy +Author: Joshua Roesslein +Author-email: tweepy@googlegroups.com +License: MIT +Description-Content-Type: UNKNOWN +Keywords: twitter library +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Topic :: Software Development :: Libraries +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Requires-Dist: requests (>=2.11.1) +Requires-Dist: requests-oauthlib (>=0.7.0) +Requires-Dist: six (>=1.10.0) +Requires-Dist: PySocks (>=1.5.7) + +UNKNOWN + + diff --git a/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/RECORD b/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/RECORD new file mode 100644 index 0000000..3b8a545 --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/RECORD @@ -0,0 +1,36 @@ +examples/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +examples/oauth.py,sha256=f-2UeLJfT-rQwM1evyeSWmnfI0AQA4gWwso_DE6Txs4,1080 +examples/streaming.py,sha256=d2cxS6fN4S4VJYnKidLnSy3JZQyNK1_jbnMHaB_3wm0,1034 +tweepy/__init__.py,sha256=AzK8O7zqW4s4ReRHNFTaW6L5ZmlLvvnhRH71stKrmM0,756 +tweepy/api.py,sha256=HSqrXb1VReRajF3D0k1uoY2ztbLZ3rS6DDd2wT72Kc8,48587 +tweepy/auth.py,sha256=RSYASOkpHIa8Bs5ozZe6MqLm8ZHJSLPHg-rydfSPXuY,6718 +tweepy/binder.py,sha256=euaXXsrvXNL8xF8busWpsloSnOkW7MkC_fKBRtkztgQ,11108 +tweepy/cache.py,sha256=RWVN8N36P2CxBA7Z3L6Sf7rUKOgchkbV1iAdi1IN-p8,12961 +tweepy/cursor.py,sha256=9cSLF8jlfpNZXRhbVXcf7OaAtt7t-d_ejGyvr5M3qwQ,7067 +tweepy/error.py,sha256=aLqGvr6_nnJAx1asFi46m5y21WcqgipRodXhgOI4Bm4,938 +tweepy/models.py,sha256=JAwblngMHghBP1mqQnb1rB5EDNrjWlmCrebGThqdxWA,14117 +tweepy/parsers.py,sha256=OxLefSV-iaoMlQr6kwUoUSmKkB3x639soCQpHlkC1Is,3101 +tweepy/streaming.py,sha256=Bqkr6yTtiWgbYWlND1p7fNrZteUH2PR5Hpu2ZiHiRFo,17252 +tweepy/utils.py,sha256=fPVFCJBYAcy36fstoGmuI6DbfhsCSRkRCLG9FaucfyE,1272 +tweepy-3.6.0.dist-info/DESCRIPTION.rst,sha256=OCTuuN6LcWulhHS3d5rfjdsQtW22n7HENFRh6jC6ego,10 +tweepy-3.6.0.dist-info/METADATA,sha256=Aq3sFxy2d2DXCKNKrRrSuJDDY4RrGRYQRSyYRpHtKNg,1070 +tweepy-3.6.0.dist-info/RECORD,, +tweepy-3.6.0.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110 +tweepy-3.6.0.dist-info/metadata.json,sha256=QNjfFBXhjpk-kDExnejM_DrLdMF8wB9bToeE_GY5O78,1161 +tweepy-3.6.0.dist-info/top_level.txt,sha256=sGgLq_3P1FwC0PxBCOAXcXFCU5C2Q8TxSDKK2FPQKLU,16 +tweepy-3.6.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +tweepy-3.6.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +tweepy/__pycache__/streaming.cpython-36.pyc,, +tweepy/__pycache__/models.cpython-36.pyc,, +tweepy/__pycache__/auth.cpython-36.pyc,, +tweepy/__pycache__/cache.cpython-36.pyc,, +tweepy/__pycache__/utils.cpython-36.pyc,, +tweepy/__pycache__/cursor.cpython-36.pyc,, +tweepy/__pycache__/binder.cpython-36.pyc,, +tweepy/__pycache__/__init__.cpython-36.pyc,, +tweepy/__pycache__/parsers.cpython-36.pyc,, +tweepy/__pycache__/error.cpython-36.pyc,, +tweepy/__pycache__/api.cpython-36.pyc,, +examples/__pycache__/streaming.cpython-36.pyc,, +examples/__pycache__/__init__.cpython-36.pyc,, +examples/__pycache__/oauth.cpython-36.pyc,, diff --git a/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/WHEEL b/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/WHEEL new file mode 100644 index 0000000..7332a41 --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/metadata.json b/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/metadata.json new file mode 100644 index 0000000..bd1b518 --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 4 - Beta", "Topic :: Software Development :: Libraries", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6"], "description_content_type": "UNKNOWN", "extensions": {"python.details": {"contacts": [{"email": "tweepy@googlegroups.com", "name": "Joshua Roesslein", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://github.com/tweepy/tweepy"}}}, "extras": [], "generator": "bdist_wheel (0.30.0)", "keywords": ["twitter", "library"], "license": "MIT", "metadata_version": "2.0", "name": "tweepy", "run_requires": [{"requires": ["PySocks (>=1.5.7)", "requests (>=2.11.1)", "requests-oauthlib (>=0.7.0)", "six (>=1.10.0)"]}], "summary": "Twitter library for python", "version": "3.6.0"} \ No newline at end of file diff --git a/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/top_level.txt b/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/top_level.txt new file mode 100644 index 0000000..84e6813 --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +examples +tweepy diff --git a/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/zip-safe b/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy-3.6.0.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/env/lib/python3.6/site-packages/tweepy/api.py b/env/lib/python3.6/site-packages/tweepy/api.py new file mode 100644 index 0000000..4276709 --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy/api.py @@ -0,0 +1,1345 @@ +# Tweepy +# Copyright 2009-2010 Joshua Roesslein +# See LICENSE for details. + +from __future__ import print_function + +import os +import mimetypes + +import six + +from tweepy.binder import bind_api +from tweepy.error import TweepError +from tweepy.parsers import ModelParser, Parser +from tweepy.utils import list_to_csv + + +class API(object): + """Twitter API""" + + def __init__(self, auth_handler=None, + host='api.twitter.com', search_host='search.twitter.com', + upload_host='upload.twitter.com', cache=None, api_root='/1.1', + search_root='', upload_root='/1.1', retry_count=0, + retry_delay=0, retry_errors=None, timeout=60, parser=None, + compression=False, wait_on_rate_limit=False, + wait_on_rate_limit_notify=False, proxy=''): + """ Api instance Constructor + + :param auth_handler: + :param host: url of the server of the rest api, default:'api.twitter.com' + :param search_host: url of the search server, default:'search.twitter.com' + :param upload_host: url of the upload server, default:'upload.twitter.com' + :param cache: Cache to query if a GET method is used, default:None + :param api_root: suffix of the api version, default:'/1.1' + :param search_root: suffix of the search version, default:'' + :param upload_root: suffix of the upload version, default:'/1.1' + :param retry_count: number of allowed retries, default:0 + :param retry_delay: delay in second between retries, default:0 + :param retry_errors: default:None + :param timeout: delay before to consider the request as timed out in seconds, default:60 + :param parser: ModelParser instance to parse the responses, default:None + :param compression: If the response is compressed, default:False + :param wait_on_rate_limit: If the api wait when it hits the rate limit, default:False + :param wait_on_rate_limit_notify: If the api print a notification when the rate limit is hit, default:False + :param proxy: Url to use as proxy during the HTTP request, default:'' + + :raise TypeError: If the given parser is not a ModelParser instance. + """ + self.auth = auth_handler + self.host = host + self.search_host = search_host + self.upload_host = upload_host + self.api_root = api_root + self.search_root = search_root + self.upload_root = upload_root + self.cache = cache + self.compression = compression + self.retry_count = retry_count + self.retry_delay = retry_delay + self.retry_errors = retry_errors + self.timeout = timeout + self.wait_on_rate_limit = wait_on_rate_limit + self.wait_on_rate_limit_notify = wait_on_rate_limit_notify + self.parser = parser or ModelParser() + self.proxy = {} + if proxy: + self.proxy['https'] = proxy + + # Attempt to explain more clearly the parser argument requirements + # https://github.com/tweepy/tweepy/issues/421 + # + parser_type = Parser + if not isinstance(self.parser, parser_type): + raise TypeError( + '"parser" argument has to be an instance of "{required}".' + ' It is currently a {actual}.'.format( + required=parser_type.__name__, + actual=type(self.parser) + ) + ) + + @property + def home_timeline(self): + """ :reference: https://dev.twitter.com/rest/reference/get/statuses/home_timeline + :allowed_param:'since_id', 'max_id', 'count' + """ + return bind_api( + api=self, + path='/statuses/home_timeline.json', + payload_type='status', payload_list=True, + allowed_param=['since_id', 'max_id', 'count'], + require_auth=True + ) + + def statuses_lookup(self, id_, include_entities=None, + trim_user=None, map_=None): + return self._statuses_lookup(list_to_csv(id_), include_entities, + trim_user, map_) + + @property + def _statuses_lookup(self): + """ :reference: https://dev.twitter.com/rest/reference/get/statuses/lookup + :allowed_param:'id', 'include_entities', 'trim_user', 'map' + """ + return bind_api( + api=self, + path='/statuses/lookup.json', + payload_type='status', payload_list=True, + allowed_param=['id', 'include_entities', 'trim_user', 'map'], + require_auth=True + ) + + @property + def user_timeline(self): + """ :reference: https://dev.twitter.com/rest/reference/get/statuses/user_timeline + :allowed_param:'id', 'user_id', 'screen_name', 'since_id', 'max_id', 'count', 'include_rts' + """ + return bind_api( + api=self, + path='/statuses/user_timeline.json', + payload_type='status', payload_list=True, + allowed_param=['id', 'user_id', 'screen_name', 'since_id', + 'max_id', 'count', 'include_rts'] + ) + + @property + def mentions_timeline(self): + """ :reference: https://dev.twitter.com/rest/reference/get/statuses/mentions_timeline + :allowed_param:'since_id', 'max_id', 'count' + """ + return bind_api( + api=self, + path='/statuses/mentions_timeline.json', + payload_type='status', payload_list=True, + allowed_param=['since_id', 'max_id', 'count'], + require_auth=True + ) + + @property + def related_results(self): + """ :reference: https://dev.twitter.com/docs/api/1.1/get/related_results/show/%3id.format + :allowed_param:'id' + """ + return bind_api( + api=self, + path='/related_results/show/{id}.json', + payload_type='relation', payload_list=True, + allowed_param=['id'], + require_auth=False + ) + + @property + def retweets_of_me(self): + """ :reference: https://dev.twitter.com/rest/reference/get/statuses/retweets_of_me + :allowed_param:'since_id', 'max_id', 'count' + """ + return bind_api( + api=self, + path='/statuses/retweets_of_me.json', + payload_type='status', payload_list=True, + allowed_param=['since_id', 'max_id', 'count'], + require_auth=True + ) + + @property + def get_status(self): + """ :reference: https://dev.twitter.com/rest/reference/get/statuses/show/%3Aid + :allowed_param:'id' + """ + return bind_api( + api=self, + path='/statuses/show.json', + payload_type='status', + allowed_param=['id'] + ) + + def update_status(self, *args, **kwargs): + """ :reference: https://dev.twitter.com/rest/reference/post/statuses/update + :allowed_param:'status', 'in_reply_to_status_id', 'in_reply_to_status_id_str', 'auto_populate_reply_metadata', 'lat', 'long', 'source', 'place_id', 'display_coordinates', 'media_ids' + """ + post_data = {} + media_ids = kwargs.pop("media_ids", None) + if media_ids is not None: + post_data["media_ids"] = list_to_csv(media_ids) + + return bind_api( + api=self, + path='/statuses/update.json', + method='POST', + payload_type='status', + allowed_param=['status', 'in_reply_to_status_id', 'in_reply_to_status_id_str', 'auto_populate_reply_metadata', 'lat', 'long', 'source', 'place_id', 'display_coordinates'], + require_auth=True + )(post_data=post_data, *args, **kwargs) + + def media_upload(self, filename, *args, **kwargs): + """ :reference: https://dev.twitter.com/rest/reference/post/media/upload + :allowed_param: + """ + f = kwargs.pop('file', None) + headers, post_data = API._pack_image(filename, 4883, form_field='media', f=f) + kwargs.update({'headers': headers, 'post_data': post_data}) + + return bind_api( + api=self, + path='/media/upload.json', + method='POST', + payload_type='media', + allowed_param=[], + require_auth=True, + upload_api=True + )(*args, **kwargs) + + def update_with_media(self, filename, *args, **kwargs): + """ :reference: https://dev.twitter.com/rest/reference/post/statuses/update_with_media + :allowed_param:'status', 'possibly_sensitive', 'in_reply_to_status_id', 'in_reply_to_status_id_str', 'auto_populate_reply_metadata', 'lat', 'long', 'place_id', 'display_coordinates' + """ + f = kwargs.pop('file', None) + headers, post_data = API._pack_image(filename, 3072, form_field='media[]', f=f) + kwargs.update({'headers': headers, 'post_data': post_data}) + + return bind_api( + api=self, + path='/statuses/update_with_media.json', + method='POST', + payload_type='status', + allowed_param=[ + 'status', 'possibly_sensitive', 'in_reply_to_status_id', 'in_reply_to_status_id_str', + 'auto_populate_reply_metadata', 'lat', 'long', 'place_id', 'display_coordinates' + ], + require_auth=True + )(*args, **kwargs) + + @property + def destroy_status(self): + """ :reference: https://dev.twitter.com/rest/reference/post/statuses/destroy/%3Aid + :allowed_param:'id' + """ + return bind_api( + api=self, + path='/statuses/destroy/{id}.json', + method='POST', + payload_type='status', + allowed_param=['id'], + require_auth=True + ) + + @property + def retweet(self): + """ :reference: https://dev.twitter.com/rest/reference/post/statuses/retweet/%3Aid + :allowed_param:'id' + """ + return bind_api( + api=self, + path='/statuses/retweet/{id}.json', + method='POST', + payload_type='status', + allowed_param=['id'], + require_auth=True + ) + + @property + def unretweet(self): + """ :reference: https://dev.twitter.com/rest/reference/post/statuses/unretweet/%3Aid + :allowed_param:'id' + """ + return bind_api( + api=self, + path='/statuses/unretweet/{id}.json', + method='POST', + payload_type='status', + allowed_param=['id'], + require_auth=True + ) + + @property + def retweets(self): + """ :reference: https://dev.twitter.com/rest/reference/get/statuses/retweets/%3Aid + :allowed_param:'id', 'count' + """ + return bind_api( + api=self, + path='/statuses/retweets/{id}.json', + payload_type='status', payload_list=True, + allowed_param=['id', 'count'], + require_auth=True + ) + + @property + def retweeters(self): + """ :reference: https://dev.twitter.com/rest/reference/get/statuses/retweeters/ids + :allowed_param:'id', 'cursor', 'stringify_ids + """ + return bind_api( + api=self, + path='/statuses/retweeters/ids.json', + payload_type='ids', + allowed_param=['id', 'cursor', 'stringify_ids'] + ) + + @property + def get_user(self): + """ :reference: https://dev.twitter.com/rest/reference/get/users/show + :allowed_param:'id', 'user_id', 'screen_name' + """ + return bind_api( + api=self, + path='/users/show.json', + payload_type='user', + allowed_param=['id', 'user_id', 'screen_name'] + ) + + @property + def get_oembed(self): + """ :reference: https://dev.twitter.com/rest/reference/get/statuses/oembed + :allowed_param:'id', 'url', 'maxwidth', 'hide_media', 'omit_script', 'align', 'related', 'lang' + """ + return bind_api( + api=self, + path='/statuses/oembed.json', + payload_type='json', + allowed_param=['id', 'url', 'maxwidth', 'hide_media', 'omit_script', 'align', 'related', 'lang'] + ) + + def lookup_users(self, user_ids=None, screen_names=None, include_entities=None): + """ Perform bulk look up of users from user ID or screenname """ + post_data = {} + if include_entities is not None: + include_entities = 'true' if include_entities else 'false' + post_data['include_entities'] = include_entities + if user_ids: + post_data['user_id'] = list_to_csv(user_ids) + if screen_names: + post_data['screen_name'] = list_to_csv(screen_names) + + return self._lookup_users(post_data=post_data) + + @property + def _lookup_users(self): + """ :reference: https://dev.twitter.com/rest/reference/get/users/lookup + allowed_param='user_id', 'screen_name', 'include_entities' + """ + return bind_api( + api=self, + path='/users/lookup.json', + payload_type='user', payload_list=True, + method='POST', + allowed_param=['user_id', 'screen_name', 'include_entities'] + ) + + def me(self): + """ Get the authenticated user """ + return self.get_user(screen_name=self.auth.get_username()) + + @property + def search_users(self): + """ :reference: https://dev.twitter.com/rest/reference/get/users/search + :allowed_param:'q', 'count', 'page' + """ + return bind_api( + api=self, + path='/users/search.json', + payload_type='user', payload_list=True, + require_auth=True, + allowed_param=['q', 'count', 'page'] + ) + + @property + def suggested_users(self): + """ :reference: https://dev.twitter.com/rest/reference/get/users/suggestions/%3Aslug + :allowed_param:'slug', 'lang' + """ + return bind_api( + api=self, + path='/users/suggestions/{slug}.json', + payload_type='user', payload_list=True, + require_auth=True, + allowed_param=['slug', 'lang'] + ) + + @property + def suggested_categories(self): + """ :reference: https://dev.twitter.com/rest/reference/get/users/suggestions + :allowed_param:'lang' + """ + return bind_api( + api=self, + path='/users/suggestions.json', + payload_type='category', payload_list=True, + allowed_param=['lang'], + require_auth=True + ) + + @property + def suggested_users_tweets(self): + """ :reference: https://dev.twitter.com/rest/reference/get/users/suggestions/%3Aslug/members + :allowed_param:'slug' + """ + return bind_api( + api=self, + path='/users/suggestions/{slug}/members.json', + payload_type='status', payload_list=True, + allowed_param=['slug'], + require_auth=True + ) + + @property + def direct_messages(self): + """ :reference: https://dev.twitter.com/rest/reference/get/direct_messages + :allowed_param:'since_id', 'max_id', 'count', 'full_text' + """ + return bind_api( + api=self, + path='/direct_messages.json', + payload_type='direct_message', payload_list=True, + allowed_param=['since_id', 'max_id', 'count', 'full_text'], + require_auth=True + ) + + @property + def get_direct_message(self): + """ :reference: https://dev.twitter.com/rest/reference/get/direct_messages/show + :allowed_param:'id', 'full_text' + """ + return bind_api( + api=self, + path='/direct_messages/show/{id}.json', + payload_type='direct_message', + allowed_param=['id', 'full_text'], + require_auth=True + ) + + @property + def sent_direct_messages(self): + """ :reference: https://dev.twitter.com/rest/reference/get/direct_messages/sent + :allowed_param:'since_id', 'max_id', 'count', 'page', 'full_text' + """ + return bind_api( + api=self, + path='/direct_messages/sent.json', + payload_type='direct_message', payload_list=True, + allowed_param=['since_id', 'max_id', 'count', 'page', 'full_text'], + require_auth=True + ) + + @property + def send_direct_message(self): + """ :reference: https://dev.twitter.com/rest/reference/post/direct_messages/new + :allowed_param:'user', 'screen_name', 'user_id', 'text' + """ + return bind_api( + api=self, + path='/direct_messages/new.json', + method='POST', + payload_type='direct_message', + allowed_param=['user', 'screen_name', 'user_id', 'text'], + require_auth=True + ) + + @property + def destroy_direct_message(self): + """ :reference: https://dev.twitter.com/rest/reference/post/direct_messages/destroy + :allowed_param:'id' + """ + return bind_api( + api=self, + path='/direct_messages/destroy.json', + method='POST', + payload_type='direct_message', + allowed_param=['id'], + require_auth=True + ) + + @property + def create_friendship(self): + """ :reference: https://dev.twitter.com/rest/reference/post/friendships/create + :allowed_param:'id', 'user_id', 'screen_name', 'follow' + """ + return bind_api( + api=self, + path='/friendships/create.json', + method='POST', + payload_type='user', + allowed_param=['id', 'user_id', 'screen_name', 'follow'], + require_auth=True + ) + + @property + def destroy_friendship(self): + """ :reference: https://dev.twitter.com/rest/reference/post/friendships/destroy + :allowed_param:'id', 'user_id', 'screen_name' + """ + return bind_api( + api=self, + path='/friendships/destroy.json', + method='POST', + payload_type='user', + allowed_param=['id', 'user_id', 'screen_name'], + require_auth=True + ) + + @property + def show_friendship(self): + """ :reference: https://dev.twitter.com/rest/reference/get/friendships/show + :allowed_param:'source_id', 'source_screen_name', 'target_id', 'target_screen_name' + """ + return bind_api( + api=self, + path='/friendships/show.json', + payload_type='friendship', + allowed_param=['source_id', 'source_screen_name', + 'target_id', 'target_screen_name'] + ) + + def lookup_friendships(self, user_ids=None, screen_names=None): + """ Perform bulk look up of friendships from user ID or screenname """ + return self._lookup_friendships(list_to_csv(user_ids), list_to_csv(screen_names)) + + @property + def _lookup_friendships(self): + """ :reference: https://dev.twitter.com/rest/reference/get/friendships/lookup + :allowed_param:'user_id', 'screen_name' + """ + return bind_api( + api=self, + path='/friendships/lookup.json', + payload_type='relationship', payload_list=True, + allowed_param=['user_id', 'screen_name'], + require_auth=True + ) + + @property + def friends_ids(self): + """ :reference: https://dev.twitter.com/rest/reference/get/friends/ids + :allowed_param:'id', 'user_id', 'screen_name', 'cursor' + """ + return bind_api( + api=self, + path='/friends/ids.json', + payload_type='ids', + allowed_param=['id', 'user_id', 'screen_name', 'cursor'] + ) + + @property + def friends(self): + """ :reference: https://dev.twitter.com/rest/reference/get/friends/list + :allowed_param:'id', 'user_id', 'screen_name', 'cursor', 'skip_status', 'include_user_entities' + """ + return bind_api( + api=self, + path='/friends/list.json', + payload_type='user', payload_list=True, + allowed_param=['id', 'user_id', 'screen_name', 'cursor', 'skip_status', 'include_user_entities'] + ) + + @property + def friendships_incoming(self): + """ :reference: https://dev.twitter.com/rest/reference/get/friendships/incoming + :allowed_param:'cursor' + """ + return bind_api( + api=self, + path='/friendships/incoming.json', + payload_type='ids', + allowed_param=['cursor'] + ) + + @property + def friendships_outgoing(self): + """ :reference: https://dev.twitter.com/rest/reference/get/friendships/outgoing + :allowed_param:'cursor' + """ + return bind_api( + api=self, + path='/friendships/outgoing.json', + payload_type='ids', + allowed_param=['cursor'] + ) + + @property + def followers_ids(self): + """ :reference: https://dev.twitter.com/rest/reference/get/followers/ids + :allowed_param:'id', 'user_id', 'screen_name', 'cursor', 'count' + """ + return bind_api( + api=self, + path='/followers/ids.json', + payload_type='ids', + allowed_param=['id', 'user_id', 'screen_name', 'cursor', 'count'] + ) + + @property + def followers(self): + """ :reference: https://dev.twitter.com/rest/reference/get/followers/list + :allowed_param:'id', 'user_id', 'screen_name', 'cursor', 'count', 'skip_status', 'include_user_entities' + """ + return bind_api( + api=self, + path='/followers/list.json', + payload_type='user', payload_list=True, + allowed_param=['id', 'user_id', 'screen_name', 'cursor', 'count', + 'skip_status', 'include_user_entities'] + ) + + @property + def get_settings(self): + """ :reference: https://dev.twitter.com/rest/reference/get/account/settings + """ + return bind_api( + api=self, + path='/account/settings.json', + payload_type='json', + use_cache=False + ) + + @property + def set_settings(self): + """ :reference: https://dev.twitter.com/rest/reference/post/account/settings + :allowed_param:'sleep_time_enabled', 'start_sleep_time', + 'end_sleep_time', 'time_zone', 'trend_location_woeid', + 'allow_contributor_request', 'lang' + """ + return bind_api( + api=self, + path='/account/settings.json', + method='POST', + payload_type='json', + allowed_param=['sleep_time_enabled', 'start_sleep_time', + 'end_sleep_time', 'time_zone', + 'trend_location_woeid', 'allow_contributor_request', + 'lang'], + use_cache=False + ) + + def verify_credentials(self, **kargs): + """ :reference: https://dev.twitter.com/rest/reference/get/account/verify_credentials + :allowed_param:'include_entities', 'skip_status', 'include_email' + """ + try: + return bind_api( + api=self, + path='/account/verify_credentials.json', + payload_type='user', + require_auth=True, + allowed_param=['include_entities', 'skip_status', 'include_email'], + )(**kargs) + except TweepError as e: + if e.response and e.response.status == 401: + return False + raise + + @property + def rate_limit_status(self): + """ :reference: https://dev.twitter.com/rest/reference/get/application/rate_limit_status + :allowed_param:'resources' + """ + return bind_api( + api=self, + path='/application/rate_limit_status.json', + payload_type='json', + allowed_param=['resources'], + use_cache=False + ) + + @property + def set_delivery_device(self): + """ :reference: https://dev.twitter.com/rest/reference/post/account/update_delivery_device + :allowed_param:'device' + """ + return bind_api( + api=self, + path='/account/update_delivery_device.json', + method='POST', + allowed_param=['device'], + payload_type='user', + require_auth=True + ) + + def update_profile_image(self, filename, file_=None): + """ :reference: https://dev.twitter.com/rest/reference/post/account/update_profile_image + :allowed_param:'include_entities', 'skip_status' + """ + headers, post_data = API._pack_image(filename, 700, f=file_) + return bind_api( + api=self, + path='/account/update_profile_image.json', + method='POST', + payload_type='user', + allowed_param=['include_entities', 'skip_status'], + require_auth=True + )(self, post_data=post_data, headers=headers) + + def update_profile_background_image(self, filename, **kargs): + """ :reference: https://dev.twitter.com/rest/reference/post/account/update_profile_background_image + :allowed_param:'tile', 'include_entities', 'skip_status', 'use' + """ + f = kargs.pop('file', None) + headers, post_data = API._pack_image(filename, 800, f=f) + bind_api( + api=self, + path='/account/update_profile_background_image.json', + method='POST', + payload_type='user', + allowed_param=['tile', 'include_entities', 'skip_status', 'use'], + require_auth=True + )(post_data=post_data, headers=headers) + + def update_profile_banner(self, filename, **kargs): + """ :reference: https://dev.twitter.com/rest/reference/post/account/update_profile_banner + :allowed_param:'width', 'height', 'offset_left', 'offset_right' + """ + f = kargs.pop('file', None) + headers, post_data = API._pack_image(filename, 700, form_field="banner", f=f) + bind_api( + api=self, + path='/account/update_profile_banner.json', + method='POST', + allowed_param=['width', 'height', 'offset_left', 'offset_right'], + require_auth=True + )(post_data=post_data, headers=headers) + + @property + def update_profile(self): + """ :reference: https://dev.twitter.com/rest/reference/post/account/update_profile + :allowed_param:'name', 'url', 'location', 'description', 'profile_link_color' + """ + return bind_api( + api=self, + path='/account/update_profile.json', + method='POST', + payload_type='user', + allowed_param=['name', 'url', 'location', 'description', 'profile_link_color'], + require_auth=True + ) + + @property + def favorites(self): + """ :reference: https://dev.twitter.com/rest/reference/get/favorites/list + :allowed_param:'screen_name', 'user_id', 'max_id', 'count', 'since_id', 'max_id' + """ + return bind_api( + api=self, + path='/favorites/list.json', + payload_type='status', payload_list=True, + allowed_param=['screen_name', 'user_id', 'max_id', 'count', 'since_id', 'max_id'] + ) + + @property + def create_favorite(self): + """ :reference:https://dev.twitter.com/rest/reference/post/favorites/create + :allowed_param:'id' + """ + return bind_api( + api=self, + path='/favorites/create.json', + method='POST', + payload_type='status', + allowed_param=['id'], + require_auth=True + ) + + @property + def destroy_favorite(self): + """ :reference: https://dev.twitter.com/rest/reference/post/favorites/destroy + :allowed_param:'id' + """ + return bind_api( + api=self, + path='/favorites/destroy.json', + method='POST', + payload_type='status', + allowed_param=['id'], + require_auth=True + ) + + @property + def create_block(self): + """ :reference: https://dev.twitter.com/rest/reference/post/blocks/create + :allowed_param:'id', 'user_id', 'screen_name' + """ + return bind_api( + api=self, + path='/blocks/create.json', + method='POST', + payload_type='user', + allowed_param=['id', 'user_id', 'screen_name'], + require_auth=True + ) + + @property + def destroy_block(self): + """ :reference: https://dev.twitter.com/rest/reference/post/blocks/destroy + :allowed_param:'id', 'user_id', 'screen_name' + """ + return bind_api( + api=self, + path='/blocks/destroy.json', + method='POST', + payload_type='user', + allowed_param=['id', 'user_id', 'screen_name'], + require_auth=True + ) + + @property + def blocks(self): + """ :reference: https://dev.twitter.com/rest/reference/get/blocks/list + :allowed_param:'cursor' + """ + return bind_api( + api=self, + path='/blocks/list.json', + payload_type='user', payload_list=True, + allowed_param=['cursor'], + require_auth=True + ) + + @property + def blocks_ids(self): + """ :reference: https://dev.twitter.com/rest/reference/get/blocks/ids """ + return bind_api( + api=self, + path='/blocks/ids.json', + payload_type='json', + require_auth=True + ) + + @property + def report_spam(self): + """ :reference: https://dev.twitter.com/rest/reference/post/users/report_spam + :allowed_param:'user_id', 'screen_name' + """ + return bind_api( + api=self, + path='/users/report_spam.json', + method='POST', + payload_type='user', + allowed_param=['user_id', 'screen_name'], + require_auth=True + ) + + @property + def saved_searches(self): + """ :reference: https://dev.twitter.com/rest/reference/get/saved_searches/show/%3Aid """ + return bind_api( + api=self, + path='/saved_searches/list.json', + payload_type='saved_search', payload_list=True, + require_auth=True + ) + + @property + def get_saved_search(self): + """ :reference: https://dev.twitter.com/rest/reference/get/saved_searches/show/%3Aid + :allowed_param:'id' + """ + return bind_api( + api=self, + path='/saved_searches/show/{id}.json', + payload_type='saved_search', + allowed_param=['id'], + require_auth=True + ) + + @property + def create_saved_search(self): + """ :reference: https://dev.twitter.com/rest/reference/post/saved_searches/create + :allowed_param:'query' + """ + return bind_api( + api=self, + path='/saved_searches/create.json', + method='POST', + payload_type='saved_search', + allowed_param=['query'], + require_auth=True + ) + + @property + def destroy_saved_search(self): + """ :reference: https://dev.twitter.com/rest/reference/post/saved_searches/destroy/%3Aid + :allowed_param:'id' + """ + return bind_api( + api=self, + path='/saved_searches/destroy/{id}.json', + method='POST', + payload_type='saved_search', + allowed_param=['id'], + require_auth=True + ) + + @property + def create_list(self): + """ :reference: https://dev.twitter.com/rest/reference/post/lists/create + :allowed_param:'name', 'mode', 'description' + """ + return bind_api( + api=self, + path='/lists/create.json', + method='POST', + payload_type='list', + allowed_param=['name', 'mode', 'description'], + require_auth=True + ) + + @property + def destroy_list(self): + """ :reference: https://dev.twitter.com/rest/reference/post/lists/destroy + :allowed_param:'owner_screen_name', 'owner_id', 'list_id', 'slug' + """ + return bind_api( + api=self, + path='/lists/destroy.json', + method='POST', + payload_type='list', + allowed_param=['owner_screen_name', 'owner_id', 'list_id', 'slug'], + require_auth=True + ) + + @property + def update_list(self): + """ :reference: https://dev.twitter.com/rest/reference/post/lists/update + :allowed_param: list_id', 'slug', 'name', 'mode', 'description', 'owner_screen_name', 'owner_id' + """ + return bind_api( + api=self, + path='/lists/update.json', + method='POST', + payload_type='list', + allowed_param=['list_id', 'slug', 'name', 'mode', 'description', 'owner_screen_name', 'owner_id'], + require_auth=True + ) + + @property + def lists_all(self): + """ :reference: https://dev.twitter.com/rest/reference/get/lists/list + :allowed_param:'screen_name', 'user_id' + """ + return bind_api( + api=self, + path='/lists/list.json', + payload_type='list', payload_list=True, + allowed_param=['screen_name', 'user_id'], + require_auth=True + ) + + @property + def lists_memberships(self): + """ :reference: https://dev.twitter.com/rest/reference/get/lists/memberships + :allowed_param:'screen_name', 'user_id', 'filter_to_owned_lists', 'cursor' + """ + return bind_api( + api=self, + path='/lists/memberships.json', + payload_type='list', payload_list=True, + allowed_param=['screen_name', 'user_id', 'filter_to_owned_lists', 'cursor'], + require_auth=True + ) + + @property + def lists_subscriptions(self): + """ :reference: https://dev.twitter.com/rest/reference/get/lists/subscriptions + :allowed_param:'screen_name', 'user_id', 'cursor' + """ + return bind_api( + api=self, + path='/lists/subscriptions.json', + payload_type='list', payload_list=True, + allowed_param=['screen_name', 'user_id', 'cursor'], + require_auth=True + ) + + @property + def list_timeline(self): + """ :reference: https://dev.twitter.com/docs/api/1.1/get/lists/statuses + :allowed_param:'owner_screen_name', 'slug', 'owner_id', 'list_id', + 'since_id', 'max_id', 'count', 'include_rts + """ + return bind_api( + api=self, + path='/lists/statuses.json', + payload_type='status', payload_list=True, + allowed_param=['owner_screen_name', 'slug', 'owner_id', + 'list_id', 'since_id', 'max_id', 'count', + 'include_rts'] + ) + + @property + def get_list(self): + """ :reference: https://dev.twitter.com/rest/reference/get/lists/show + :allowed_param:'owner_screen_name', 'owner_id', 'slug', 'list_id' + """ + return bind_api( + api=self, + path='/lists/show.json', + payload_type='list', + allowed_param=['owner_screen_name', 'owner_id', 'slug', 'list_id'] + ) + + @property + def add_list_member(self): + """ :reference: https://dev.twitter.com/docs/api/1.1/post/lists/members/create + :allowed_param:'screen_name', 'user_id', 'owner_screen_name', + 'owner_id', 'slug', 'list_id' + """ + return bind_api( + api=self, + path='/lists/members/create.json', + method='POST', + payload_type='list', + allowed_param=['screen_name', 'user_id', 'owner_screen_name', + 'owner_id', 'slug', 'list_id'], + require_auth=True + ) + + @property + def remove_list_member(self): + """ :reference: https://dev.twitter.com/docs/api/1.1/post/lists/members/destroy + :allowed_param:'screen_name', 'user_id', 'owner_screen_name', + 'owner_id', 'slug', 'list_id' + """ + return bind_api( + api=self, + path='/lists/members/destroy.json', + method='POST', + payload_type='list', + allowed_param=['screen_name', 'user_id', 'owner_screen_name', + 'owner_id', 'slug', 'list_id'], + require_auth=True + ) + + def add_list_members(self, screen_name=None, user_id=None, slug=None, + list_id=None, owner_id=None, owner_screen_name=None): + """ Perform bulk add of list members from user ID or screenname """ + return self._add_list_members(list_to_csv(screen_name), + list_to_csv(user_id), + slug, list_id, owner_id, + owner_screen_name) + + @property + def _add_list_members(self): + """ :reference: https://dev.twitter.com/docs/api/1.1/post/lists/members/create_all + :allowed_param:'screen_name', 'user_id', 'slug', 'list_id', + 'owner_id', 'owner_screen_name' + + """ + return bind_api( + api=self, + path='/lists/members/create_all.json', + method='POST', + payload_type='list', + allowed_param=['screen_name', 'user_id', 'slug', 'list_id', + 'owner_id', 'owner_screen_name'], + require_auth=True + ) + + def remove_list_members(self, screen_name=None, user_id=None, slug=None, + list_id=None, owner_id=None, owner_screen_name=None): + """ Perform bulk remove of list members from user ID or screenname """ + return self._remove_list_members(list_to_csv(screen_name), + list_to_csv(user_id), + slug, list_id, owner_id, + owner_screen_name) + + @property + def _remove_list_members(self): + """ :reference: https://dev.twitter.com/docs/api/1.1/post/lists/members/destroy_all + :allowed_param:'screen_name', 'user_id', 'slug', 'list_id', + 'owner_id', 'owner_screen_name' + + """ + return bind_api( + api=self, + path='/lists/members/destroy_all.json', + method='POST', + payload_type='list', + allowed_param=['screen_name', 'user_id', 'slug', 'list_id', + 'owner_id', 'owner_screen_name'], + require_auth=True + ) + + @property + def list_members(self): + """ :reference: https://dev.twitter.com/docs/api/1.1/get/lists/members + :allowed_param:'owner_screen_name', 'slug', 'list_id', + 'owner_id', 'cursor + """ + return bind_api( + api=self, + path='/lists/members.json', + payload_type='user', payload_list=True, + allowed_param=['owner_screen_name', 'slug', 'list_id', + 'owner_id', 'cursor'] + ) + + @property + def show_list_member(self): + """ :reference: https://dev.twitter.com/docs/api/1.1/get/lists/members/show + :allowed_param:'list_id', 'slug', 'user_id', 'screen_name', + 'owner_screen_name', 'owner_id + """ + return bind_api( + api=self, + path='/lists/members/show.json', + payload_type='user', + allowed_param=['list_id', 'slug', 'user_id', 'screen_name', + 'owner_screen_name', 'owner_id'] + ) + + @property + def subscribe_list(self): + """ :reference: https://dev.twitter.com/docs/api/1.1/post/lists/subscribers/create + :allowed_param:'owner_screen_name', 'slug', 'owner_id', + 'list_id' + """ + return bind_api( + api=self, + path='/lists/subscribers/create.json', + method='POST', + payload_type='list', + allowed_param=['owner_screen_name', 'slug', 'owner_id', + 'list_id'], + require_auth=True + ) + + @property + def unsubscribe_list(self): + """ :reference: https://dev.twitter.com/docs/api/1.1/post/lists/subscribers/destroy + :allowed_param:'owner_screen_name', 'slug', 'owner_id', + 'list_id' + """ + return bind_api( + api=self, + path='/lists/subscribers/destroy.json', + method='POST', + payload_type='list', + allowed_param=['owner_screen_name', 'slug', 'owner_id', + 'list_id'], + require_auth=True + ) + + @property + def list_subscribers(self): + """ :reference: https://dev.twitter.com/docs/api/1.1/get/lists/subscribers + :allowed_param:'owner_screen_name', 'slug', 'owner_id', + 'list_id', 'cursor + """ + return bind_api( + api=self, + path='/lists/subscribers.json', + payload_type='user', payload_list=True, + allowed_param=['owner_screen_name', 'slug', 'owner_id', + 'list_id', 'cursor'] + ) + + @property + def show_list_subscriber(self): + """ :reference: https://dev.twitter.com/docs/api/1.1/get/lists/subscribers/show + :allowed_param:'owner_screen_name', 'slug', 'screen_name', + 'owner_id', 'list_id', 'user_id + """ + return bind_api( + api=self, + path='/lists/subscribers/show.json', + payload_type='user', + allowed_param=['owner_screen_name', 'slug', 'screen_name', + 'owner_id', 'list_id', 'user_id'] + ) + + @property + def trends_available(self): + """ :reference: https://dev.twitter.com/rest/reference/get/trends/available """ + return bind_api( + api=self, + path='/trends/available.json', + payload_type='json' + ) + + @property + def trends_place(self): + """ :reference: https://dev.twitter.com/rest/reference/get/trends/place + :allowed_param:'id', 'exclude' + """ + return bind_api( + api=self, + path='/trends/place.json', + payload_type='json', + allowed_param=['id', 'exclude'] + ) + + @property + def trends_closest(self): + """ :reference: https://dev.twitter.com/rest/reference/get/trends/closest + :allowed_param:'lat', 'long' + """ + return bind_api( + api=self, + path='/trends/closest.json', + payload_type='json', + allowed_param=['lat', 'long'] + ) + + @property + def search(self): + """ :reference: https://dev.twitter.com/rest/reference/get/search/tweets + :allowed_param:'q', 'lang', 'locale', 'since_id', 'geocode', + 'max_id', 'since', 'until', 'result_type', 'count', + 'include_entities', 'from', 'to', 'source' + """ + return bind_api( + api=self, + path='/search/tweets.json', + payload_type='search_results', + allowed_param=['q', 'lang', 'locale', 'since_id', 'geocode', + 'max_id', 'since', 'until', 'result_type', + 'count', 'include_entities', 'from', + 'to', 'source'] + ) + + @property + def reverse_geocode(self): + """ :reference: https://dev.twitter.com/rest/reference/get/geo/reverse_geocode + :allowed_param:'lat', 'long', 'accuracy', 'granularity', 'max_results' + """ + return bind_api( + api=self, + path='/geo/reverse_geocode.json', + payload_type='place', payload_list=True, + allowed_param=['lat', 'long', 'accuracy', 'granularity', + 'max_results'] + ) + + @property + def geo_id(self): + """ :reference: https://dev.twitter.com/rest/reference/get/geo/id/%3Aplace_id + :allowed_param:'id' + """ + return bind_api( + api=self, + path='/geo/id/{id}.json', + payload_type='place', + allowed_param=['id'] + ) + + @property + def geo_search(self): + """ :reference: https://dev.twitter.com/docs/api/1.1/get/geo/search + :allowed_param:'lat', 'long', 'query', 'ip', 'granularity', + 'accuracy', 'max_results', 'contained_within + + """ + return bind_api( + api=self, + path='/geo/search.json', + payload_type='place', payload_list=True, + allowed_param=['lat', 'long', 'query', 'ip', 'granularity', + 'accuracy', 'max_results', 'contained_within'] + ) + + @property + def geo_similar_places(self): + """ :reference: https://dev.twitter.com/rest/reference/get/geo/similar_places + :allowed_param:'lat', 'long', 'name', 'contained_within' + """ + return bind_api( + api=self, + path='/geo/similar_places.json', + payload_type='place', payload_list=True, + allowed_param=['lat', 'long', 'name', 'contained_within'] + ) + + @property + def supported_languages(self): + """ :reference: https://dev.twitter.com/rest/reference/get/help/languages """ + return bind_api( + api=self, + path='/help/languages.json', + payload_type='json', + require_auth=True + ) + + @property + def configuration(self): + """ :reference: https://dev.twitter.com/rest/reference/get/help/configuration """ + return bind_api( + api=self, + path='/help/configuration.json', + payload_type='json', + require_auth=True + ) + + """ Internal use only """ + + @staticmethod + def _pack_image(filename, max_size, form_field="image", f=None): + """Pack image from file into multipart-formdata post body""" + # image must be less than 700kb in size + if f is None: + try: + if os.path.getsize(filename) > (max_size * 1024): + raise TweepError('File is too big, must be less than %skb.' % max_size) + except os.error as e: + raise TweepError('Unable to access file: %s' % e.strerror) + + # build the mulitpart-formdata body + fp = open(filename, 'rb') + else: + f.seek(0, 2) # Seek to end of file + if f.tell() > (max_size * 1024): + raise TweepError('File is too big, must be less than %skb.' % max_size) + f.seek(0) # Reset to beginning of file + fp = f + + # image must be gif, jpeg, or png + file_type = mimetypes.guess_type(filename) + if file_type is None: + raise TweepError('Could not determine file type') + file_type = file_type[0] + if file_type not in ['image/gif', 'image/jpeg', 'image/png']: + raise TweepError('Invalid file type for image: %s' % file_type) + + if isinstance(filename, six.text_type): + filename = filename.encode("utf-8") + + BOUNDARY = b'Tw3ePy' + body = list() + body.append(b'--' + BOUNDARY) + body.append('Content-Disposition: form-data; name="{0}";' + ' filename="{1}"'.format(form_field, filename) + .encode('utf-8')) + body.append('Content-Type: {0}'.format(file_type).encode('utf-8')) + body.append(b'') + body.append(fp.read()) + body.append(b'--' + BOUNDARY + b'--') + body.append(b'') + fp.close() + body = b'\r\n'.join(body) + + # build headers + headers = { + 'Content-Type': 'multipart/form-data; boundary=Tw3ePy', + 'Content-Length': str(len(body)) + } + + return headers, body diff --git a/env/lib/python3.6/site-packages/tweepy/auth.py b/env/lib/python3.6/site-packages/tweepy/auth.py new file mode 100644 index 0000000..c157ad8 --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy/auth.py @@ -0,0 +1,178 @@ +from __future__ import print_function + +import six +import logging + +from tweepy.error import TweepError +from tweepy.api import API +import requests +from requests_oauthlib import OAuth1Session, OAuth1 +from requests.auth import AuthBase +from six.moves.urllib.parse import parse_qs + +WARNING_MESSAGE = """Warning! Due to a Twitter API bug, signin_with_twitter +and access_type don't always play nice together. Details +https://dev.twitter.com/discussions/21281""" + + +class AuthHandler(object): + + def apply_auth(self, url, method, headers, parameters): + """Apply authentication headers to request""" + raise NotImplementedError + + def get_username(self): + """Return the username of the authenticated user""" + raise NotImplementedError + + +class OAuthHandler(AuthHandler): + """OAuth authentication handler""" + OAUTH_HOST = 'api.twitter.com' + OAUTH_ROOT = '/oauth/' + + def __init__(self, consumer_key, consumer_secret, callback=None): + if type(consumer_key) == six.text_type: + consumer_key = consumer_key.encode('ascii') + + if type(consumer_secret) == six.text_type: + consumer_secret = consumer_secret.encode('ascii') + + self.consumer_key = consumer_key + self.consumer_secret = consumer_secret + self.access_token = None + self.access_token_secret = None + self.callback = callback + self.username = None + self.oauth = OAuth1Session(consumer_key, + client_secret=consumer_secret, + callback_uri=self.callback) + + def _get_oauth_url(self, endpoint): + return 'https://' + self.OAUTH_HOST + self.OAUTH_ROOT + endpoint + + def apply_auth(self): + return OAuth1(self.consumer_key, + client_secret=self.consumer_secret, + resource_owner_key=self.access_token, + resource_owner_secret=self.access_token_secret, + decoding=None) + + def _get_request_token(self, access_type=None): + try: + url = self._get_oauth_url('request_token') + if access_type: + url += '?x_auth_access_type=%s' % access_type + return self.oauth.fetch_request_token(url) + except Exception as e: + raise TweepError(e) + + def set_access_token(self, key, secret): + self.access_token = key + self.access_token_secret = secret + + def get_authorization_url(self, + signin_with_twitter=False, + access_type=None): + """Get the authorization URL to redirect the user""" + try: + if signin_with_twitter: + url = self._get_oauth_url('authenticate') + if access_type: + logging.warning(WARNING_MESSAGE) + else: + url = self._get_oauth_url('authorize') + self.request_token = self._get_request_token(access_type=access_type) + return self.oauth.authorization_url(url) + except Exception as e: + raise TweepError(e) + + def get_access_token(self, verifier=None): + """ + After user has authorized the request token, get access token + with user supplied verifier. + """ + try: + url = self._get_oauth_url('access_token') + self.oauth = OAuth1Session(self.consumer_key, + client_secret=self.consumer_secret, + resource_owner_key=self.request_token['oauth_token'], + resource_owner_secret=self.request_token['oauth_token_secret'], + verifier=verifier, callback_uri=self.callback) + resp = self.oauth.fetch_access_token(url) + self.access_token = resp['oauth_token'] + self.access_token_secret = resp['oauth_token_secret'] + return self.access_token, self.access_token_secret + except Exception as e: + raise TweepError(e) + + def get_xauth_access_token(self, username, password): + """ + Get an access token from an username and password combination. + In order to get this working you need to create an app at + http://twitter.com/apps, after that send a mail to api@twitter.com + and request activation of xAuth for it. + """ + try: + url = self._get_oauth_url('access_token') + oauth = OAuth1(self.consumer_key, + client_secret=self.consumer_secret) + r = requests.post(url=url, + auth=oauth, + headers={'x_auth_mode': 'client_auth', + 'x_auth_username': username, + 'x_auth_password': password}) + + credentials = parse_qs(r.content) + return credentials.get('oauth_token')[0], credentials.get('oauth_token_secret')[0] + except Exception as e: + raise TweepError(e) + + def get_username(self): + if self.username is None: + api = API(self) + user = api.verify_credentials() + if user: + self.username = user.screen_name + else: + raise TweepError('Unable to get username,' + ' invalid oauth token!') + return self.username + + +class OAuth2Bearer(AuthBase): + def __init__(self, bearer_token): + self.bearer_token = bearer_token + + def __call__(self, request): + request.headers['Authorization'] = 'Bearer ' + self.bearer_token + return request + + +class AppAuthHandler(AuthHandler): + """Application-only authentication handler""" + + OAUTH_HOST = 'api.twitter.com' + OAUTH_ROOT = '/oauth2/' + + def __init__(self, consumer_key, consumer_secret): + self.consumer_key = consumer_key + self.consumer_secret = consumer_secret + self._bearer_token = '' + + resp = requests.post(self._get_oauth_url('token'), + auth=(self.consumer_key, + self.consumer_secret), + data={'grant_type': 'client_credentials'}) + data = resp.json() + if data.get('token_type') != 'bearer': + raise TweepError('Expected token_type to equal "bearer", ' + 'but got %s instead' % data.get('token_type')) + + self._bearer_token = data['access_token'] + + def _get_oauth_url(self, endpoint): + return 'https://' + self.OAUTH_HOST + self.OAUTH_ROOT + endpoint + + def apply_auth(self): + return OAuth2Bearer(self._bearer_token) diff --git a/env/lib/python3.6/site-packages/tweepy/binder.py b/env/lib/python3.6/site-packages/tweepy/binder.py new file mode 100644 index 0000000..e026f4b --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy/binder.py @@ -0,0 +1,261 @@ +# Tweepy +# Copyright 2009-2010 Joshua Roesslein +# See LICENSE for details. + +from __future__ import print_function + +import time +import re + +from six.moves.urllib.parse import quote, urlencode +import requests + +import logging + +from tweepy.error import TweepError, RateLimitError, is_rate_limit_error_message +from tweepy.utils import convert_to_utf8_str +from tweepy.models import Model +import six +import sys + + +re_path_template = re.compile('{\w+}') + +log = logging.getLogger('tweepy.binder') + +def bind_api(**config): + + class APIMethod(object): + + api = config['api'] + path = config['path'] + payload_type = config.get('payload_type', None) + payload_list = config.get('payload_list', False) + allowed_param = config.get('allowed_param', []) + method = config.get('method', 'GET') + require_auth = config.get('require_auth', False) + search_api = config.get('search_api', False) + upload_api = config.get('upload_api', False) + use_cache = config.get('use_cache', True) + session = requests.Session() + + def __init__(self, args, kwargs): + api = self.api + # If authentication is required and no credentials + # are provided, throw an error. + if self.require_auth and not api.auth: + raise TweepError('Authentication required!') + + self.post_data = kwargs.pop('post_data', None) + self.retry_count = kwargs.pop('retry_count', + api.retry_count) + self.retry_delay = kwargs.pop('retry_delay', + api.retry_delay) + self.retry_errors = kwargs.pop('retry_errors', + api.retry_errors) + self.wait_on_rate_limit = kwargs.pop('wait_on_rate_limit', + api.wait_on_rate_limit) + self.wait_on_rate_limit_notify = kwargs.pop('wait_on_rate_limit_notify', + api.wait_on_rate_limit_notify) + self.parser = kwargs.pop('parser', api.parser) + self.session.headers = kwargs.pop('headers', {}) + self.build_parameters(args, kwargs) + + # Pick correct URL root to use + if self.search_api: + self.api_root = api.search_root + elif self.upload_api: + self.api_root = api.upload_root + else: + self.api_root = api.api_root + + # Perform any path variable substitution + self.build_path() + + if self.search_api: + self.host = api.search_host + elif self.upload_api: + self.host = api.upload_host + else: + self.host = api.host + + # Manually set Host header to fix an issue in python 2.5 + # or older where Host is set including the 443 port. + # This causes Twitter to issue 301 redirect. + # See Issue https://github.com/tweepy/tweepy/issues/12 + self.session.headers['Host'] = self.host + # Monitoring rate limits + self._remaining_calls = None + self._reset_time = None + + def build_parameters(self, args, kwargs): + self.session.params = {} + for idx, arg in enumerate(args): + if arg is None: + continue + try: + self.session.params[self.allowed_param[idx]] = convert_to_utf8_str(arg) + except IndexError: + raise TweepError('Too many parameters supplied!') + + for k, arg in kwargs.items(): + if arg is None: + continue + if k in self.session.params: + raise TweepError('Multiple values for parameter %s supplied!' % k) + + self.session.params[k] = convert_to_utf8_str(arg) + + log.debug("PARAMS: %r", self.session.params) + + def build_path(self): + for variable in re_path_template.findall(self.path): + name = variable.strip('{}') + + if name == 'user' and 'user' not in self.session.params and self.api.auth: + # No 'user' parameter provided, fetch it from Auth instead. + value = self.api.auth.get_username() + else: + try: + value = quote(self.session.params[name]) + except KeyError: + raise TweepError('No parameter value found for path variable: %s' % name) + del self.session.params[name] + + self.path = self.path.replace(variable, value) + + def execute(self): + self.api.cached_result = False + + # Build the request URL + url = self.api_root + self.path + full_url = 'https://' + self.host + url + + # Query the cache if one is available + # and this request uses a GET method. + if self.use_cache and self.api.cache and self.method == 'GET': + cache_result = self.api.cache.get('%s?%s' % (url, urlencode(self.session.params))) + # if cache result found and not expired, return it + if cache_result: + # must restore api reference + if isinstance(cache_result, list): + for result in cache_result: + if isinstance(result, Model): + result._api = self.api + else: + if isinstance(cache_result, Model): + cache_result._api = self.api + self.api.cached_result = True + return cache_result + + # Continue attempting request until successful + # or maximum number of retries is reached. + retries_performed = 0 + while retries_performed < self.retry_count + 1: + # handle running out of api calls + if self.wait_on_rate_limit: + if self._reset_time is not None: + if self._remaining_calls is not None: + if self._remaining_calls < 1: + sleep_time = self._reset_time - int(time.time()) + if sleep_time > 0: + if self.wait_on_rate_limit_notify: + log.warning("Rate limit reached. Sleeping for: %d" % sleep_time) + time.sleep(sleep_time + 5) # sleep for few extra sec + + # if self.wait_on_rate_limit and self._reset_time is not None and \ + # self._remaining_calls is not None and self._remaining_calls < 1: + # sleep_time = self._reset_time - int(time.time()) + # if sleep_time > 0: + # if self.wait_on_rate_limit_notify: + # log.warning("Rate limit reached. Sleeping for: %d" % sleep_time) + # time.sleep(sleep_time + 5) # sleep for few extra sec + + # Apply authentication + auth = None + if self.api.auth: + auth = self.api.auth.apply_auth() + + # Request compression if configured + if self.api.compression: + self.session.headers['Accept-encoding'] = 'gzip' + + # Execute request + try: + resp = self.session.request(self.method, + full_url, + data=self.post_data, + timeout=self.api.timeout, + auth=auth, + proxies=self.api.proxy) + except Exception as e: + six.reraise(TweepError, TweepError('Failed to send request: %s' % e), sys.exc_info()[2]) + + rem_calls = resp.headers.get('x-rate-limit-remaining') + + if rem_calls is not None: + self._remaining_calls = int(rem_calls) + elif isinstance(self._remaining_calls, int): + self._remaining_calls -= 1 + reset_time = resp.headers.get('x-rate-limit-reset') + if reset_time is not None: + self._reset_time = int(reset_time) + if self.wait_on_rate_limit and self._remaining_calls == 0 and ( + # if ran out of calls before waiting switching retry last call + resp.status_code == 429 or resp.status_code == 420): + continue + retry_delay = self.retry_delay + # Exit request loop if non-retry error code + if resp.status_code == 200: + break + elif (resp.status_code == 429 or resp.status_code == 420) and self.wait_on_rate_limit: + if 'retry-after' in resp.headers: + retry_delay = float(resp.headers['retry-after']) + elif self.retry_errors and resp.status_code not in self.retry_errors: + break + + # Sleep before retrying request again + time.sleep(retry_delay) + retries_performed += 1 + + # If an error was returned, throw an exception + self.api.last_response = resp + if resp.status_code and not 200 <= resp.status_code < 300: + try: + error_msg, api_error_code = \ + self.parser.parse_error(resp.text) + except Exception: + error_msg = "Twitter error response: status code = %s" % resp.status_code + api_error_code = None + + if is_rate_limit_error_message(error_msg): + raise RateLimitError(error_msg, resp) + else: + raise TweepError(error_msg, resp, api_code=api_error_code) + + # Parse the response payload + result = self.parser.parse(self, resp.text) + + # Store result into cache if one is available. + if self.use_cache and self.api.cache and self.method == 'GET' and result: + self.api.cache.store('%s?%s' % (url, urlencode(self.session.params)), result) + + return result + + def _call(*args, **kwargs): + method = APIMethod(args, kwargs) + if kwargs.get('create'): + return method + else: + return method.execute() + + # Set pagination mode + if 'cursor' in APIMethod.allowed_param: + _call.pagination_mode = 'cursor' + elif 'max_id' in APIMethod.allowed_param: + if 'since_id' in APIMethod.allowed_param: + _call.pagination_mode = 'id' + elif 'page' in APIMethod.allowed_param: + _call.pagination_mode = 'page' + + return _call diff --git a/env/lib/python3.6/site-packages/tweepy/cache.py b/env/lib/python3.6/site-packages/tweepy/cache.py new file mode 100644 index 0000000..99b7065 --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy/cache.py @@ -0,0 +1,437 @@ +# Tweepy +# Copyright 2009-2010 Joshua Roesslein +# See LICENSE for details. + +from __future__ import print_function + +import time +import datetime +import threading +import os +import logging + +try: + import cPickle as pickle +except ImportError: + import pickle + +try: + import hashlib +except ImportError: + # python 2.4 + import md5 as hashlib + +try: + import fcntl +except ImportError: + # Probably on a windows system + # TODO: use win32file + pass + +log = logging.getLogger('tweepy.cache') + +class Cache(object): + """Cache interface""" + + def __init__(self, timeout=60): + """Initialize the cache + timeout: number of seconds to keep a cached entry + """ + self.timeout = timeout + + def store(self, key, value): + """Add new record to cache + key: entry key + value: data of entry + """ + raise NotImplementedError + + def get(self, key, timeout=None): + """Get cached entry if exists and not expired + key: which entry to get + timeout: override timeout with this value [optional] + """ + raise NotImplementedError + + def count(self): + """Get count of entries currently stored in cache""" + raise NotImplementedError + + def cleanup(self): + """Delete any expired entries in cache.""" + raise NotImplementedError + + def flush(self): + """Delete all cached entries""" + raise NotImplementedError + + +class MemoryCache(Cache): + """In-memory cache""" + + def __init__(self, timeout=60): + Cache.__init__(self, timeout) + self._entries = {} + self.lock = threading.Lock() + + def __getstate__(self): + # pickle + return {'entries': self._entries, 'timeout': self.timeout} + + def __setstate__(self, state): + # unpickle + self.lock = threading.Lock() + self._entries = state['entries'] + self.timeout = state['timeout'] + + def _is_expired(self, entry, timeout): + return timeout > 0 and (time.time() - entry[0]) >= timeout + + def store(self, key, value): + self.lock.acquire() + self._entries[key] = (time.time(), value) + self.lock.release() + + def get(self, key, timeout=None): + self.lock.acquire() + try: + # check to see if we have this key + entry = self._entries.get(key) + if not entry: + # no hit, return nothing + return None + + # use provided timeout in arguments if provided + # otherwise use the one provided during init. + if timeout is None: + timeout = self.timeout + + # make sure entry is not expired + if self._is_expired(entry, timeout): + # entry expired, delete and return nothing + del self._entries[key] + return None + + # entry found and not expired, return it + return entry[1] + finally: + self.lock.release() + + def count(self): + return len(self._entries) + + def cleanup(self): + self.lock.acquire() + try: + for k, v in dict(self._entries).items(): + if self._is_expired(v, self.timeout): + del self._entries[k] + finally: + self.lock.release() + + def flush(self): + self.lock.acquire() + self._entries.clear() + self.lock.release() + + +class FileCache(Cache): + """File-based cache""" + + # locks used to make cache thread-safe + cache_locks = {} + + def __init__(self, cache_dir, timeout=60): + Cache.__init__(self, timeout) + if os.path.exists(cache_dir) is False: + os.mkdir(cache_dir) + self.cache_dir = cache_dir + if cache_dir in FileCache.cache_locks: + self.lock = FileCache.cache_locks[cache_dir] + else: + self.lock = threading.Lock() + FileCache.cache_locks[cache_dir] = self.lock + + if os.name == 'posix': + self._lock_file = self._lock_file_posix + self._unlock_file = self._unlock_file_posix + elif os.name == 'nt': + self._lock_file = self._lock_file_win32 + self._unlock_file = self._unlock_file_win32 + else: + log.warning('FileCache locking not supported on this system!') + self._lock_file = self._lock_file_dummy + self._unlock_file = self._unlock_file_dummy + + def _get_path(self, key): + md5 = hashlib.md5() + md5.update(key.encode('utf-8')) + return os.path.join(self.cache_dir, md5.hexdigest()) + + def _lock_file_dummy(self, path, exclusive=True): + return None + + def _unlock_file_dummy(self, lock): + return + + def _lock_file_posix(self, path, exclusive=True): + lock_path = path + '.lock' + if exclusive is True: + f_lock = open(lock_path, 'w') + fcntl.lockf(f_lock, fcntl.LOCK_EX) + else: + f_lock = open(lock_path, 'r') + fcntl.lockf(f_lock, fcntl.LOCK_SH) + if os.path.exists(lock_path) is False: + f_lock.close() + return None + return f_lock + + def _unlock_file_posix(self, lock): + lock.close() + + def _lock_file_win32(self, path, exclusive=True): + # TODO: implement + return None + + def _unlock_file_win32(self, lock): + # TODO: implement + return + + def _delete_file(self, path): + os.remove(path) + if os.path.exists(path + '.lock'): + os.remove(path + '.lock') + + def store(self, key, value): + path = self._get_path(key) + self.lock.acquire() + try: + # acquire lock and open file + f_lock = self._lock_file(path) + datafile = open(path, 'wb') + + # write data + pickle.dump((time.time(), value), datafile) + + # close and unlock file + datafile.close() + self._unlock_file(f_lock) + finally: + self.lock.release() + + def get(self, key, timeout=None): + return self._get(self._get_path(key), timeout) + + def _get(self, path, timeout): + if os.path.exists(path) is False: + # no record + return None + self.lock.acquire() + try: + # acquire lock and open + f_lock = self._lock_file(path, False) + datafile = open(path, 'rb') + + # read pickled object + created_time, value = pickle.load(datafile) + datafile.close() + + # check if value is expired + if timeout is None: + timeout = self.timeout + if timeout > 0: + if (time.time() - created_time) >= timeout: + # expired! delete from cache + value = None + self._delete_file(path) + + # unlock and return result + self._unlock_file(f_lock) + return value + finally: + self.lock.release() + + def count(self): + c = 0 + for entry in os.listdir(self.cache_dir): + if entry.endswith('.lock'): + continue + c += 1 + return c + + def cleanup(self): + for entry in os.listdir(self.cache_dir): + if entry.endswith('.lock'): + continue + self._get(os.path.join(self.cache_dir, entry), None) + + def flush(self): + for entry in os.listdir(self.cache_dir): + if entry.endswith('.lock'): + continue + self._delete_file(os.path.join(self.cache_dir, entry)) + + +class MemCacheCache(Cache): + """Cache interface""" + + def __init__(self, client, timeout=60): + """Initialize the cache + client: The memcache client + timeout: number of seconds to keep a cached entry + """ + self.client = client + self.timeout = timeout + + def store(self, key, value): + """Add new record to cache + key: entry key + value: data of entry + """ + self.client.set(key, value, time=self.timeout) + + def get(self, key, timeout=None): + """Get cached entry if exists and not expired + key: which entry to get + timeout: override timeout with this value [optional]. + DOES NOT WORK HERE + """ + return self.client.get(key) + + def count(self): + """Get count of entries currently stored in cache. RETURN 0""" + raise NotImplementedError + + def cleanup(self): + """Delete any expired entries in cache. NO-OP""" + raise NotImplementedError + + def flush(self): + """Delete all cached entries. NO-OP""" + raise NotImplementedError + + +class RedisCache(Cache): + """Cache running in a redis server""" + + def __init__(self, client, + timeout=60, + keys_container='tweepy:keys', + pre_identifier='tweepy:'): + Cache.__init__(self, timeout) + self.client = client + self.keys_container = keys_container + self.pre_identifier = pre_identifier + + def _is_expired(self, entry, timeout): + # Returns true if the entry has expired + return timeout > 0 and (time.time() - entry[0]) >= timeout + + def store(self, key, value): + """Store the key, value pair in our redis server""" + # Prepend tweepy to our key, + # this makes it easier to identify tweepy keys in our redis server + key = self.pre_identifier + key + # Get a pipe (to execute several redis commands in one step) + pipe = self.client.pipeline() + # Set our values in a redis hash (similar to python dict) + pipe.set(key, pickle.dumps((time.time(), value))) + # Set the expiration + pipe.expire(key, self.timeout) + # Add the key to a set containing all the keys + pipe.sadd(self.keys_container, key) + # Execute the instructions in the redis server + pipe.execute() + + def get(self, key, timeout=None): + """Given a key, returns an element from the redis table""" + key = self.pre_identifier + key + # Check to see if we have this key + unpickled_entry = self.client.get(key) + if not unpickled_entry: + # No hit, return nothing + return None + + entry = pickle.loads(unpickled_entry) + # Use provided timeout in arguments if provided + # otherwise use the one provided during init. + if timeout is None: + timeout = self.timeout + + # Make sure entry is not expired + if self._is_expired(entry, timeout): + # entry expired, delete and return nothing + self.delete_entry(key) + return None + # entry found and not expired, return it + return entry[1] + + def count(self): + """Note: This is not very efficient, + since it retreives all the keys from the redis + server to know how many keys we have""" + return len(self.client.smembers(self.keys_container)) + + def delete_entry(self, key): + """Delete an object from the redis table""" + pipe = self.client.pipeline() + pipe.srem(self.keys_container, key) + pipe.delete(key) + pipe.execute() + + def cleanup(self): + """Cleanup all the expired keys""" + keys = self.client.smembers(self.keys_container) + for key in keys: + entry = self.client.get(key) + if entry: + entry = pickle.loads(entry) + if self._is_expired(entry, self.timeout): + self.delete_entry(key) + + def flush(self): + """Delete all entries from the cache""" + keys = self.client.smembers(self.keys_container) + for key in keys: + self.delete_entry(key) + + +class MongodbCache(Cache): + """A simple pickle-based MongoDB cache sytem.""" + + def __init__(self, db, timeout=3600, collection='tweepy_cache'): + """Should receive a "database" cursor from pymongo.""" + Cache.__init__(self, timeout) + self.timeout = timeout + self.col = db[collection] + self.col.create_index('created', expireAfterSeconds=timeout) + + def store(self, key, value): + from bson.binary import Binary + + now = datetime.datetime.utcnow() + blob = Binary(pickle.dumps(value)) + + self.col.insert({'created': now, '_id': key, 'value': blob}) + + def get(self, key, timeout=None): + if timeout: + raise NotImplementedError + obj = self.col.find_one({'_id': key}) + if obj: + return pickle.loads(obj['value']) + + def count(self): + return self.col.find({}).count() + + def delete_entry(self, key): + return self.col.remove({'_id': key}) + + def cleanup(self): + """MongoDB will automatically clear expired keys.""" + pass + + def flush(self): + self.col.drop() + self.col.create_index('created', expireAfterSeconds=self.timeout) diff --git a/env/lib/python3.6/site-packages/tweepy/cursor.py b/env/lib/python3.6/site-packages/tweepy/cursor.py new file mode 100644 index 0000000..1d63f49 --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy/cursor.py @@ -0,0 +1,214 @@ +# Tweepy +# Copyright 2009-2010 Joshua Roesslein +# See LICENSE for details. + +from __future__ import print_function + +from tweepy.error import TweepError +from tweepy.parsers import ModelParser, RawParser + + +class Cursor(object): + """Pagination helper class""" + + def __init__(self, method, *args, **kargs): + if hasattr(method, 'pagination_mode'): + if method.pagination_mode == 'cursor': + self.iterator = CursorIterator(method, args, kargs) + elif method.pagination_mode == 'id': + self.iterator = IdIterator(method, args, kargs) + elif method.pagination_mode == 'page': + self.iterator = PageIterator(method, args, kargs) + else: + raise TweepError('Invalid pagination mode.') + else: + raise TweepError('This method does not perform pagination') + + def pages(self, limit=0): + """Return iterator for pages""" + if limit > 0: + self.iterator.limit = limit + return self.iterator + + def items(self, limit=0): + """Return iterator for items in each page""" + i = ItemIterator(self.iterator) + i.limit = limit + return i + + +class BaseIterator(object): + + def __init__(self, method, args, kargs): + self.method = method + self.args = args + self.kargs = kargs + self.limit = 0 + + def __next__(self): + return self.next() + + def next(self): + raise NotImplementedError + + def prev(self): + raise NotImplementedError + + def __iter__(self): + return self + + +class CursorIterator(BaseIterator): + + def __init__(self, method, args, kargs): + BaseIterator.__init__(self, method, args, kargs) + start_cursor = kargs.pop('cursor', None) + self.next_cursor = start_cursor or -1 + self.prev_cursor = start_cursor or 0 + self.num_tweets = 0 + + def next(self): + if self.next_cursor == 0 or (self.limit and self.num_tweets == self.limit): + raise StopIteration + data, cursors = self.method(cursor=self.next_cursor, + *self.args, + **self.kargs) + self.prev_cursor, self.next_cursor = cursors + if len(data) == 0: + raise StopIteration + self.num_tweets += 1 + return data + + def prev(self): + if self.prev_cursor == 0: + raise TweepError('Can not page back more, at first page') + data, self.next_cursor, self.prev_cursor = self.method(cursor=self.prev_cursor, + *self.args, + **self.kargs) + self.num_tweets -= 1 + return data + + +class IdIterator(BaseIterator): + + def __init__(self, method, args, kargs): + BaseIterator.__init__(self, method, args, kargs) + self.max_id = kargs.pop('max_id', None) + self.num_tweets = 0 + self.results = [] + self.model_results = [] + self.index = 0 + + def next(self): + """Fetch a set of items with IDs less than current set.""" + if self.limit and self.limit == self.num_tweets: + raise StopIteration + + if self.index >= len(self.results) - 1: + data = self.method(max_id=self.max_id, parser=RawParser(), *self.args, **self.kargs) + + if hasattr(self.method, '__self__'): + old_parser = self.method.__self__.parser + # Hack for models which expect ModelParser to be set + self.method.__self__.parser = ModelParser() + + # This is a special invocation that returns the underlying + # APIMethod class + model = ModelParser().parse(self.method(create=True), data) + if hasattr(self.method, '__self__'): + self.method.__self__.parser = old_parser + result = self.method.__self__.parser.parse(self.method(create=True), data) + else: + result = model + + if len(self.results) != 0: + self.index += 1 + self.results.append(result) + self.model_results.append(model) + else: + self.index += 1 + result = self.results[self.index] + model = self.model_results[self.index] + + if len(result) == 0: + raise StopIteration + # TODO: Make this not dependant on the parser making max_id and + # since_id available + self.max_id = model.max_id + self.num_tweets += 1 + return result + + def prev(self): + """Fetch a set of items with IDs greater than current set.""" + if self.limit and self.limit == self.num_tweets: + raise StopIteration + + self.index -= 1 + if self.index < 0: + # There's no way to fetch a set of tweets directly 'above' the + # current set + raise StopIteration + + data = self.results[self.index] + self.max_id = self.model_results[self.index].max_id + self.num_tweets += 1 + return data + + +class PageIterator(BaseIterator): + + def __init__(self, method, args, kargs): + BaseIterator.__init__(self, method, args, kargs) + self.current_page = 0 + + def next(self): + if self.limit > 0: + if self.current_page > self.limit: + raise StopIteration + + items = self.method(page=self.current_page, *self.args, **self.kargs) + if len(items) == 0: + raise StopIteration + self.current_page += 1 + return items + + def prev(self): + if self.current_page == 1: + raise TweepError('Can not page back more, at first page') + self.current_page -= 1 + return self.method(page=self.current_page, *self.args, **self.kargs) + + +class ItemIterator(BaseIterator): + + def __init__(self, page_iterator): + self.page_iterator = page_iterator + self.limit = 0 + self.current_page = None + self.page_index = -1 + self.num_tweets = 0 + + def next(self): + if self.limit > 0: + if self.num_tweets == self.limit: + raise StopIteration + if self.current_page is None or self.page_index == len(self.current_page) - 1: + # Reached end of current page, get the next page... + self.current_page = self.page_iterator.next() + self.page_index = -1 + self.page_index += 1 + self.num_tweets += 1 + return self.current_page[self.page_index] + + def prev(self): + if self.current_page is None: + raise TweepError('Can not go back more, at first page') + if self.page_index == 0: + # At the beginning of the current page, move to next... + self.current_page = self.page_iterator.prev() + self.page_index = len(self.current_page) + if self.page_index == 0: + raise TweepError('No more items') + self.page_index -= 1 + self.num_tweets -= 1 + return self.current_page[self.page_index] diff --git a/env/lib/python3.6/site-packages/tweepy/error.py b/env/lib/python3.6/site-packages/tweepy/error.py new file mode 100644 index 0000000..f7d5894 --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy/error.py @@ -0,0 +1,34 @@ +# Tweepy +# Copyright 2009-2010 Joshua Roesslein +# See LICENSE for details. + +from __future__ import print_function + +import six + +class TweepError(Exception): + """Tweepy exception""" + + def __init__(self, reason, response=None, api_code=None): + self.reason = six.text_type(reason) + self.response = response + self.api_code = api_code + Exception.__init__(self, reason) + + def __str__(self): + return self.reason + + +def is_rate_limit_error_message(message): + """Check if the supplied error message belongs to a rate limit error.""" + return isinstance(message, list) \ + and len(message) > 0 \ + and 'code' in message[0] \ + and message[0]['code'] == 88 + + +class RateLimitError(TweepError): + """Exception for Tweepy hitting the rate limit.""" + # RateLimitError has the exact same properties and inner workings + # as TweepError for backwards compatibility reasons. + pass diff --git a/env/lib/python3.6/site-packages/tweepy/models.py b/env/lib/python3.6/site-packages/tweepy/models.py new file mode 100644 index 0000000..21449d0 --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy/models.py @@ -0,0 +1,495 @@ +# Tweepy +# Copyright 2009-2010 Joshua Roesslein +# See LICENSE for details. + +from __future__ import absolute_import, print_function + +from tweepy.utils import parse_datetime, parse_html_value, parse_a_href + + +class ResultSet(list): + """A list like object that holds results from a Twitter API query.""" + def __init__(self, max_id=None, since_id=None): + super(ResultSet, self).__init__() + self._max_id = max_id + self._since_id = since_id + + @property + def max_id(self): + if self._max_id: + return self._max_id + ids = self.ids() + # Max_id is always set to the *smallest* id, minus one, in the set + return (min(ids) - 1) if ids else None + + @property + def since_id(self): + if self._since_id: + return self._since_id + ids = self.ids() + # Since_id is always set to the *greatest* id in the set + return max(ids) if ids else None + + def ids(self): + return [item.id for item in self if hasattr(item, 'id')] + + +class Model(object): + + def __init__(self, api=None): + self._api = api + + def __getstate__(self): + # pickle + pickle = dict(self.__dict__) + try: + del pickle['_api'] # do not pickle the API reference + except KeyError: + pass + return pickle + + @classmethod + def parse(cls, api, json): + """Parse a JSON object into a model instance.""" + raise NotImplementedError + + @classmethod + def parse_list(cls, api, json_list): + """ + Parse a list of JSON objects into + a result set of model instances. + """ + results = ResultSet() + for obj in json_list: + if obj: + results.append(cls.parse(api, obj)) + return results + + def __repr__(self): + state = ['%s=%s' % (k, repr(v)) for (k, v) in vars(self).items()] + return '%s(%s)' % (self.__class__.__name__, ', '.join(state)) + + +class Status(Model): + + @classmethod + def parse(cls, api, json): + status = cls(api) + setattr(status, '_json', json) + for k, v in json.items(): + if k == 'user': + user_model = getattr(api.parser.model_factory, 'user') if api else User + user = user_model.parse(api, v) + setattr(status, 'author', user) + setattr(status, 'user', user) # DEPRECIATED + elif k == 'created_at': + setattr(status, k, parse_datetime(v)) + elif k == 'source': + if '<' in v: + setattr(status, k, parse_html_value(v)) + setattr(status, 'source_url', parse_a_href(v)) + else: + setattr(status, k, v) + setattr(status, 'source_url', None) + elif k == 'retweeted_status': + setattr(status, k, Status.parse(api, v)) + elif k == 'quoted_status': + setattr(status, k, Status.parse(api, v)) + elif k == 'place': + if v is not None: + setattr(status, k, Place.parse(api, v)) + else: + setattr(status, k, None) + else: + setattr(status, k, v) + return status + + def destroy(self): + return self._api.destroy_status(self.id) + + def retweet(self): + return self._api.retweet(self.id) + + def retweets(self): + return self._api.retweets(self.id) + + def favorite(self): + return self._api.create_favorite(self.id) + + def __eq__(self, other): + if isinstance(other, Status): + return self.id == other.id + + return NotImplemented + + def __ne__(self, other): + result = self == other + + if result is NotImplemented: + return result + + return not result + + +class User(Model): + + @classmethod + def parse(cls, api, json): + user = cls(api) + setattr(user, '_json', json) + for k, v in json.items(): + if k == 'created_at': + setattr(user, k, parse_datetime(v)) + elif k == 'status': + setattr(user, k, Status.parse(api, v)) + elif k == 'following': + # twitter sets this to null if it is false + if v is True: + setattr(user, k, True) + else: + setattr(user, k, False) + else: + setattr(user, k, v) + return user + + @classmethod + def parse_list(cls, api, json_list): + if isinstance(json_list, list): + item_list = json_list + else: + item_list = json_list['users'] + + results = ResultSet() + for obj in item_list: + results.append(cls.parse(api, obj)) + return results + + def timeline(self, **kargs): + return self._api.user_timeline(user_id=self.id, **kargs) + + def friends(self, **kargs): + return self._api.friends(user_id=self.id, **kargs) + + def followers(self, **kargs): + return self._api.followers(user_id=self.id, **kargs) + + def follow(self): + self._api.create_friendship(user_id=self.id) + self.following = True + + def unfollow(self): + self._api.destroy_friendship(user_id=self.id) + self.following = False + + def lists_memberships(self, *args, **kargs): + return self._api.lists_memberships(user=self.screen_name, + *args, + **kargs) + + def lists_subscriptions(self, *args, **kargs): + return self._api.lists_subscriptions(user=self.screen_name, + *args, + **kargs) + + def lists(self, *args, **kargs): + return self._api.lists_all(user=self.screen_name, + *args, + **kargs) + + def followers_ids(self, *args, **kargs): + return self._api.followers_ids(user_id=self.id, + *args, + **kargs) + + +class DirectMessage(Model): + + @classmethod + def parse(cls, api, json): + dm = cls(api) + for k, v in json.items(): + if k == 'sender' or k == 'recipient': + setattr(dm, k, User.parse(api, v)) + elif k == 'created_at': + setattr(dm, k, parse_datetime(v)) + else: + setattr(dm, k, v) + return dm + + def destroy(self): + return self._api.destroy_direct_message(self.id) + + +class Friendship(Model): + + @classmethod + def parse(cls, api, json): + relationship = json['relationship'] + + # parse source + source = cls(api) + for k, v in relationship['source'].items(): + setattr(source, k, v) + + # parse target + target = cls(api) + for k, v in relationship['target'].items(): + setattr(target, k, v) + + return source, target + + +class Category(Model): + + @classmethod + def parse(cls, api, json): + category = cls(api) + for k, v in json.items(): + setattr(category, k, v) + return category + + +class SavedSearch(Model): + + @classmethod + def parse(cls, api, json): + ss = cls(api) + for k, v in json.items(): + if k == 'created_at': + setattr(ss, k, parse_datetime(v)) + else: + setattr(ss, k, v) + return ss + + def destroy(self): + return self._api.destroy_saved_search(self.id) + + +class SearchResults(ResultSet): + + @classmethod + def parse(cls, api, json): + metadata = json['search_metadata'] + results = SearchResults() + results.refresh_url = metadata.get('refresh_url') + results.completed_in = metadata.get('completed_in') + results.query = metadata.get('query') + results.count = metadata.get('count') + results.next_results = metadata.get('next_results') + + status_model = getattr(api.parser.model_factory, 'status') if api else Status + + for status in json['statuses']: + results.append(status_model.parse(api, status)) + return results + + +class List(Model): + + @classmethod + def parse(cls, api, json): + lst = List(api) + for k, v in json.items(): + if k == 'user': + setattr(lst, k, User.parse(api, v)) + elif k == 'created_at': + setattr(lst, k, parse_datetime(v)) + else: + setattr(lst, k, v) + return lst + + @classmethod + def parse_list(cls, api, json_list, result_set=None): + results = ResultSet() + if isinstance(json_list, dict): + json_list = json_list['lists'] + for obj in json_list: + results.append(cls.parse(api, obj)) + return results + + def update(self, **kargs): + return self._api.update_list(self.slug, **kargs) + + def destroy(self): + return self._api.destroy_list(self.slug) + + def timeline(self, **kargs): + return self._api.list_timeline(self.user.screen_name, + self.slug, + **kargs) + + def add_member(self, id): + return self._api.add_list_member(self.slug, id) + + def remove_member(self, id): + return self._api.remove_list_member(self.slug, id) + + def members(self, **kargs): + return self._api.list_members(self.user.screen_name, + self.slug, + **kargs) + + def is_member(self, id): + return self._api.is_list_member(self.user.screen_name, + self.slug, + id) + + def subscribe(self): + return self._api.subscribe_list(self.user.screen_name, self.slug) + + def unsubscribe(self): + return self._api.unsubscribe_list(self.user.screen_name, self.slug) + + def subscribers(self, **kargs): + return self._api.list_subscribers(self.user.screen_name, + self.slug, + **kargs) + + def is_subscribed(self, id): + return self._api.is_subscribed_list(self.user.screen_name, + self.slug, + id) + + +class Relation(Model): + @classmethod + def parse(cls, api, json): + result = cls(api) + for k, v in json.items(): + if k == 'value' and json['kind'] in ['Tweet', 'LookedupStatus']: + setattr(result, k, Status.parse(api, v)) + elif k == 'results': + setattr(result, k, Relation.parse_list(api, v)) + else: + setattr(result, k, v) + return result + + +class Relationship(Model): + @classmethod + def parse(cls, api, json): + result = cls(api) + for k, v in json.items(): + if k == 'connections': + setattr(result, 'is_following', 'following' in v) + setattr(result, 'is_followed_by', 'followed_by' in v) + else: + setattr(result, k, v) + return result + + +class JSONModel(Model): + + @classmethod + def parse(cls, api, json): + return json + + +class IDModel(Model): + + @classmethod + def parse(cls, api, json): + if isinstance(json, list): + return json + else: + return json['ids'] + + +class BoundingBox(Model): + + @classmethod + def parse(cls, api, json): + result = cls(api) + if json is not None: + for k, v in json.items(): + setattr(result, k, v) + return result + + def origin(self): + """ + Return longitude, latitude of southwest (bottom, left) corner of + bounding box, as a tuple. + + This assumes that bounding box is always a rectangle, which + appears to be the case at present. + """ + return tuple(self.coordinates[0][0]) + + def corner(self): + """ + Return longitude, latitude of northeast (top, right) corner of + bounding box, as a tuple. + + This assumes that bounding box is always a rectangle, which + appears to be the case at present. + """ + return tuple(self.coordinates[0][2]) + + +class Place(Model): + + @classmethod + def parse(cls, api, json): + place = cls(api) + for k, v in json.items(): + if k == 'bounding_box': + # bounding_box value may be null (None.) + # Example: "United States" (id=96683cc9126741d1) + if v is not None: + t = BoundingBox.parse(api, v) + else: + t = v + setattr(place, k, t) + elif k == 'contained_within': + # contained_within is a list of Places. + setattr(place, k, Place.parse_list(api, v)) + else: + setattr(place, k, v) + return place + + @classmethod + def parse_list(cls, api, json_list): + if isinstance(json_list, list): + item_list = json_list + else: + item_list = json_list['result']['places'] + + results = ResultSet() + for obj in item_list: + results.append(cls.parse(api, obj)) + return results + + +class Media(Model): + + @classmethod + def parse(cls, api, json): + media = cls(api) + for k, v in json.items(): + setattr(media, k, v) + return media + + +class ModelFactory(object): + """ + Used by parsers for creating instances + of models. You may subclass this factory + to add your own extended models. + """ + + status = Status + user = User + direct_message = DirectMessage + friendship = Friendship + saved_search = SavedSearch + search_results = SearchResults + category = Category + list = List + relation = Relation + relationship = Relationship + media = Media + + json = JSONModel + ids = IDModel + place = Place + bounding_box = BoundingBox diff --git a/env/lib/python3.6/site-packages/tweepy/parsers.py b/env/lib/python3.6/site-packages/tweepy/parsers.py new file mode 100644 index 0000000..046cf50 --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy/parsers.py @@ -0,0 +1,109 @@ +# Tweepy +# Copyright 2009-2010 Joshua Roesslein +# See LICENSE for details. + +from __future__ import print_function + +from tweepy.models import ModelFactory +from tweepy.utils import import_simplejson +from tweepy.error import TweepError + + +class Parser(object): + + def parse(self, method, payload): + """ + Parse the response payload and return the result. + Returns a tuple that contains the result data and the cursors + (or None if not present). + """ + raise NotImplementedError + + def parse_error(self, payload): + """ + Parse the error message and api error code from payload. + Return them as an (error_msg, error_code) tuple. If unable to parse the + message, throw an exception and default error message will be used. + """ + raise NotImplementedError + + +class RawParser(Parser): + + def __init__(self): + pass + + def parse(self, method, payload): + return payload + + def parse_error(self, payload): + return payload + + +class JSONParser(Parser): + + payload_format = 'json' + + def __init__(self): + self.json_lib = import_simplejson() + + def parse(self, method, payload): + try: + json = self.json_lib.loads(payload) + except Exception as e: + raise TweepError('Failed to parse JSON payload: %s' % e) + + needs_cursors = 'cursor' in method.session.params + if needs_cursors and isinstance(json, dict) \ + and 'previous_cursor' in json \ + and 'next_cursor' in json: + cursors = json['previous_cursor'], json['next_cursor'] + return json, cursors + else: + return json + + def parse_error(self, payload): + error_object = self.json_lib.loads(payload) + + if 'error' in error_object: + reason = error_object['error'] + api_code = error_object.get('code') + else: + reason = error_object['errors'] + api_code = [error.get('code') for error in + reason if error.get('code')] + api_code = api_code[0] if len(api_code) == 1 else api_code + + return reason, api_code + + +class ModelParser(JSONParser): + + def __init__(self, model_factory=None): + JSONParser.__init__(self) + self.model_factory = model_factory or ModelFactory + + def parse(self, method, payload): + try: + if method.payload_type is None: + return + model = getattr(self.model_factory, method.payload_type) + except AttributeError: + raise TweepError('No model for this payload type: ' + '%s' % method.payload_type) + + json = JSONParser.parse(self, method, payload) + if isinstance(json, tuple): + json, cursors = json + else: + cursors = None + + if method.payload_list: + result = model.parse_list(method.api, json) + else: + result = model.parse(method.api, json) + + if cursors: + return result, cursors + else: + return result diff --git a/env/lib/python3.6/site-packages/tweepy/streaming.py b/env/lib/python3.6/site-packages/tweepy/streaming.py new file mode 100644 index 0000000..43aa823 --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy/streaming.py @@ -0,0 +1,471 @@ +# Tweepy +# Copyright 2009-2010 Joshua Roesslein +# See LICENSE for details. + +# Appengine users: https://developers.google.com/appengine/docs/python/sockets/#making_httplib_use_sockets + +from __future__ import absolute_import, print_function + +import logging +import re +import requests +import sys +from requests.exceptions import Timeout +from threading import Thread +from time import sleep + +import six + +import ssl + +from tweepy.models import Status +from tweepy.api import API +from tweepy.error import TweepError + +from tweepy.utils import import_simplejson +json = import_simplejson() + +STREAM_VERSION = '1.1' + + +class StreamListener(object): + + def __init__(self, api=None): + self.api = api or API() + + def on_connect(self): + """Called once connected to streaming server. + + This will be invoked once a successful response + is received from the server. Allows the listener + to perform some work prior to entering the read loop. + """ + pass + + def on_data(self, raw_data): + """Called when raw data is received from connection. + + Override this method if you wish to manually handle + the stream data. Return False to stop stream and close connection. + """ + data = json.loads(raw_data) + + if 'in_reply_to_status_id' in data: + status = Status.parse(self.api, data) + if self.on_status(status) is False: + return False + elif 'delete' in data: + delete = data['delete']['status'] + if self.on_delete(delete['id'], delete['user_id']) is False: + return False + elif 'event' in data: + status = Status.parse(self.api, data) + if self.on_event(status) is False: + return False + elif 'direct_message' in data: + status = Status.parse(self.api, data) + if self.on_direct_message(status) is False: + return False + elif 'friends' in data: + if self.on_friends(data['friends']) is False: + return False + elif 'limit' in data: + if self.on_limit(data['limit']['track']) is False: + return False + elif 'disconnect' in data: + if self.on_disconnect(data['disconnect']) is False: + return False + elif 'warning' in data: + if self.on_warning(data['warning']) is False: + return False + else: + logging.error("Unknown message type: " + str(raw_data)) + + def keep_alive(self): + """Called when a keep-alive arrived""" + return + + def on_status(self, status): + """Called when a new status arrives""" + return + + def on_exception(self, exception): + """Called when an unhandled exception occurs.""" + return + + def on_delete(self, status_id, user_id): + """Called when a delete notice arrives for a status""" + return + + def on_event(self, status): + """Called when a new event arrives""" + return + + def on_direct_message(self, status): + """Called when a new direct message arrives""" + return + + def on_friends(self, friends): + """Called when a friends list arrives. + + friends is a list that contains user_id + """ + return + + def on_limit(self, track): + """Called when a limitation notice arrives""" + return + + def on_error(self, status_code): + """Called when a non-200 status code is returned""" + return False + + def on_timeout(self): + """Called when stream connection times out""" + return + + def on_disconnect(self, notice): + """Called when twitter sends a disconnect notice + + Disconnect codes are listed here: + https://dev.twitter.com/docs/streaming-apis/messages#Disconnect_messages_disconnect + """ + return + + def on_warning(self, notice): + """Called when a disconnection warning message arrives""" + return + + +class ReadBuffer(object): + """Buffer data from the response in a smarter way than httplib/requests can. + + Tweets are roughly in the 2-12kb range, averaging around 3kb. + Requests/urllib3/httplib/socket all use socket.read, which blocks + until enough data is returned. On some systems (eg google appengine), socket + reads are quite slow. To combat this latency we can read big chunks, + but the blocking part means we won't get results until enough tweets + have arrived. That may not be a big deal for high throughput systems. + For low throughput systems we don't want to sacrafice latency, so we + use small chunks so it can read the length and the tweet in 2 read calls. + """ + + def __init__(self, stream, chunk_size, encoding='utf-8'): + self._stream = stream + self._buffer = six.b('') + self._chunk_size = chunk_size + self._encoding = encoding + + def read_len(self, length): + while not self._stream.closed: + if len(self._buffer) >= length: + return self._pop(length) + read_len = max(self._chunk_size, length - len(self._buffer)) + self._buffer += self._stream.read(read_len) + return six.b('') + + def read_line(self, sep=six.b('\n')): + """Read the data stream until a given separator is found (default \n) + + :param sep: Separator to read until. Must by of the bytes type (str in python 2, + bytes in python 3) + :return: The str of the data read until sep + """ + start = 0 + while not self._stream.closed: + loc = self._buffer.find(sep, start) + if loc >= 0: + return self._pop(loc + len(sep)) + else: + start = len(self._buffer) + self._buffer += self._stream.read(self._chunk_size) + return six.b('') + + def _pop(self, length): + r = self._buffer[:length] + self._buffer = self._buffer[length:] + return r.decode(self._encoding) + + +class Stream(object): + + host = 'stream.twitter.com' + + def __init__(self, auth, listener, **options): + self.auth = auth + self.listener = listener + self.running = False + self.timeout = options.get("timeout", 300.0) + self.retry_count = options.get("retry_count") + # values according to + # https://dev.twitter.com/docs/streaming-apis/connecting#Reconnecting + self.retry_time_start = options.get("retry_time", 5.0) + self.retry_420_start = options.get("retry_420", 60.0) + self.retry_time_cap = options.get("retry_time_cap", 320.0) + self.snooze_time_step = options.get("snooze_time", 0.25) + self.snooze_time_cap = options.get("snooze_time_cap", 16) + + # The default socket.read size. Default to less than half the size of + # a tweet so that it reads tweets with the minimal latency of 2 reads + # per tweet. Values higher than ~1kb will increase latency by waiting + # for more data to arrive but may also increase throughput by doing + # fewer socket read calls. + self.chunk_size = options.get("chunk_size", 512) + + self.verify = options.get("verify", True) + + self.api = API() + self.headers = options.get("headers") or {} + self.new_session() + self.body = None + self.retry_time = self.retry_time_start + self.snooze_time = self.snooze_time_step + + def new_session(self): + self.session = requests.Session() + self.session.headers = self.headers + self.session.params = None + + def _run(self): + # Authenticate + url = "https://%s%s" % (self.host, self.url) + + # Connect and process the stream + error_counter = 0 + resp = None + exc_info = None + while self.running: + if self.retry_count is not None: + if error_counter > self.retry_count: + # quit if error count greater than retry count + break + try: + auth = self.auth.apply_auth() + resp = self.session.request('POST', + url, + data=self.body, + timeout=self.timeout, + stream=True, + auth=auth, + verify=self.verify) + if resp.status_code != 200: + if self.listener.on_error(resp.status_code) is False: + break + error_counter += 1 + if resp.status_code == 420: + self.retry_time = max(self.retry_420_start, + self.retry_time) + sleep(self.retry_time) + self.retry_time = min(self.retry_time * 2, + self.retry_time_cap) + else: + error_counter = 0 + self.retry_time = self.retry_time_start + self.snooze_time = self.snooze_time_step + self.listener.on_connect() + self._read_loop(resp) + except (Timeout, ssl.SSLError) as exc: + # This is still necessary, as a SSLError can actually be + # thrown when using Requests + # If it's not time out treat it like any other exception + if isinstance(exc, ssl.SSLError): + if not (exc.args and 'timed out' in str(exc.args[0])): + exc_info = sys.exc_info() + break + if self.listener.on_timeout() is False: + break + if self.running is False: + break + sleep(self.snooze_time) + self.snooze_time = min(self.snooze_time + self.snooze_time_step, + self.snooze_time_cap) + except Exception as exc: + exc_info = sys.exc_info() + # any other exception is fatal, so kill loop + break + + # cleanup + self.running = False + if resp: + resp.close() + + self.new_session() + + if exc_info: + # call a handler first so that the exception can be logged. + self.listener.on_exception(exc_info[1]) + six.reraise(*exc_info) + + def _data(self, data): + if self.listener.on_data(data) is False: + self.running = False + + def _read_loop(self, resp): + charset = resp.headers.get('content-type', default='') + enc_search = re.search('charset=(?P\S*)', charset) + if enc_search is not None: + encoding = enc_search.group('enc') + else: + encoding = 'utf-8' + + buf = ReadBuffer(resp.raw, self.chunk_size, encoding=encoding) + + while self.running and not resp.raw.closed: + length = 0 + while not resp.raw.closed: + line = buf.read_line().strip() + if not line: + self.listener.keep_alive() # keep-alive new lines are expected + elif line.strip().isdigit(): + length = int(line) + break + else: + raise TweepError('Expecting length, unexpected value found') + + next_status_obj = buf.read_len(length) + if self.running and next_status_obj: + self._data(next_status_obj) + + # # Note: keep-alive newlines might be inserted before each length value. + # # read until we get a digit... + # c = b'\n' + # for c in resp.iter_content(decode_unicode=True): + # if c == b'\n': + # continue + # break + # + # delimited_string = c + # + # # read rest of delimiter length.. + # d = b'' + # for d in resp.iter_content(decode_unicode=True): + # if d != b'\n': + # delimited_string += d + # continue + # break + # + # # read the next twitter status object + # if delimited_string.decode('utf-8').strip().isdigit(): + # status_id = int(delimited_string) + # next_status_obj = resp.raw.read(status_id) + # if self.running: + # self._data(next_status_obj.decode('utf-8')) + + + if resp.raw.closed: + self.on_closed(resp) + + def _start(self, async): + self.running = True + if async: + self._thread = Thread(target=self._run) + self._thread.start() + else: + self._run() + + def on_closed(self, resp): + """ Called when the response has been closed by Twitter """ + pass + + def userstream(self, + stall_warnings=False, + _with=None, + replies=None, + track=None, + locations=None, + async=False, + encoding='utf8'): + self.session.params = {'delimited': 'length'} + if self.running: + raise TweepError('Stream object already connected!') + self.url = '/%s/user.json' % STREAM_VERSION + self.host = 'userstream.twitter.com' + if stall_warnings: + self.session.params['stall_warnings'] = stall_warnings + if _with: + self.session.params['with'] = _with + if replies: + self.session.params['replies'] = replies + if locations and len(locations) > 0: + if len(locations) % 4 != 0: + raise TweepError("Wrong number of locations points, " + "it has to be a multiple of 4") + self.session.params['locations'] = ','.join(['%.2f' % l for l in locations]) + if track: + self.session.params['track'] = u','.join(track).encode(encoding) + + self._start(async) + + def firehose(self, count=None, async=False): + self.session.params = {'delimited': 'length'} + if self.running: + raise TweepError('Stream object already connected!') + self.url = '/%s/statuses/firehose.json' % STREAM_VERSION + if count: + self.url += '&count=%s' % count + self._start(async) + + def retweet(self, async=False): + self.session.params = {'delimited': 'length'} + if self.running: + raise TweepError('Stream object already connected!') + self.url = '/%s/statuses/retweet.json' % STREAM_VERSION + self._start(async) + + def sample(self, async=False, languages=None, stall_warnings=False): + self.session.params = {'delimited': 'length'} + if self.running: + raise TweepError('Stream object already connected!') + self.url = '/%s/statuses/sample.json' % STREAM_VERSION + if languages: + self.session.params['language'] = ','.join(map(str, languages)) + if stall_warnings: + self.session.params['stall_warnings'] = 'true' + self._start(async) + + def filter(self, follow=None, track=None, async=False, locations=None, + stall_warnings=False, languages=None, encoding='utf8', filter_level=None): + self.body = {} + self.session.headers['Content-type'] = "application/x-www-form-urlencoded" + if self.running: + raise TweepError('Stream object already connected!') + self.url = '/%s/statuses/filter.json' % STREAM_VERSION + if follow: + self.body['follow'] = u','.join(follow).encode(encoding) + if track: + self.body['track'] = u','.join(track).encode(encoding) + if locations and len(locations) > 0: + if len(locations) % 4 != 0: + raise TweepError("Wrong number of locations points, " + "it has to be a multiple of 4") + self.body['locations'] = u','.join(['%.4f' % l for l in locations]) + if stall_warnings: + self.body['stall_warnings'] = stall_warnings + if languages: + self.body['language'] = u','.join(map(str, languages)) + if filter_level: + self.body['filter_level'] = filter_level.encode(encoding) + self.session.params = {'delimited': 'length'} + self.host = 'stream.twitter.com' + self._start(async) + + def sitestream(self, follow, stall_warnings=False, + with_='user', replies=False, async=False): + self.body = {} + if self.running: + raise TweepError('Stream object already connected!') + self.url = '/%s/site.json' % STREAM_VERSION + self.body['follow'] = u','.join(map(six.text_type, follow)) + self.body['delimited'] = 'length' + if stall_warnings: + self.body['stall_warnings'] = stall_warnings + if with_: + self.body['with'] = with_ + if replies: + self.body['replies'] = replies + self._start(async) + + def disconnect(self): + if self.running is False: + return + self.running = False diff --git a/env/lib/python3.6/site-packages/tweepy/utils.py b/env/lib/python3.6/site-packages/tweepy/utils.py new file mode 100644 index 0000000..36d3402 --- /dev/null +++ b/env/lib/python3.6/site-packages/tweepy/utils.py @@ -0,0 +1,58 @@ +# Tweepy +# Copyright 2010 Joshua Roesslein +# See LICENSE for details. + +from __future__ import print_function + +from datetime import datetime + +import six +from six.moves.urllib.parse import quote + +from email.utils import parsedate + + +def parse_datetime(string): + return datetime(*(parsedate(string)[:6])) + + +def parse_html_value(html): + + return html[html.find('>')+1:html.rfind('<')] + + +def parse_a_href(atag): + + start = atag.find('"') + 1 + end = atag.find('"', start) + return atag[start:end] + + +def convert_to_utf8_str(arg): + # written by Michael Norton (http://docondev.blogspot.com/) + if isinstance(arg, six.text_type): + arg = arg.encode('utf-8') + elif not isinstance(arg, bytes): + arg = six.text_type(arg).encode('utf-8') + return arg + + +def import_simplejson(): + try: + import simplejson as json + except ImportError: + try: + import json # Python 2.6+ + except ImportError: + try: + # Google App Engine + from django.utils import simplejson as json + except ImportError: + raise ImportError("Can't load a json library") + + return json + + +def list_to_csv(item_list): + if item_list: + return ','.join([str(i) for i in item_list]) diff --git a/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/DESCRIPTION.rst b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..6713797 --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,374 @@ +Python Twitter Tools +==================== + +|Build Status| |Coverage Status| + +The Minimalist Twitter API for Python is a Python API for Twitter, +everyone's favorite Web 2.0 Facebook-style status updater for people on +the go. + +Also included is a Twitter command-line tool for getting your friends' +tweets and setting your own tweet from the safety and security of your +favorite shell and an IRC bot that can announce Twitter updates to an +IRC channel. + +For more information, after installing the ``twitter`` package: + +- import the ``twitter`` package and run ``help()`` on it +- run ``twitter -h`` for command-line tool help + +twitter - The Command-Line Tool +------------------------------- + +The command-line tool lets you do some awesome things: + +- view your tweets, recent replies, and tweets in lists +- view the public timeline +- follow and unfollow (leave) friends +- various output formats for tweet information + +The bottom line: type ``twitter``, receive tweets. + +twitterbot - The IRC Bot +------------------------ + +The IRC bot is associated with a Twitter account (either your own +account or an account you create for the bot). The bot announces all +tweets from friends it is following. It can be made to follow or leave +friends through IRC /msg commands. + +``twitter-log`` +--------------- + +``twitter-log`` is a simple command-line tool that dumps all public +tweets from a given user in a simple text format. It is useful to get a +complete offsite backup of all your tweets. Run ``twitter-log`` and read +the instructions. + +``twitter-archiver`` and ``twitter-follow`` +------------------------------------------- + +twitter-archiver will log all the tweets posted by any user since they +started posting. twitter-follow will print a list of all of all the +followers of a user (or all the users that user follows). + +Programming with the Twitter API classes +======================================== + +The ``Twitter`` and ``TwitterStream`` classes are the key to building +your own Twitter-enabled applications. + +The ``Twitter`` class +--------------------- + +The minimalist yet fully featured Twitter API class. + +Get RESTful data by accessing members of this class. The result is +decoded python objects (lists and dicts). + +The Twitter API is documented at: + +**https://dev.twitter.com/overview/documentation** + +The list of most accessible functions is listed at: + +**https://dev.twitter.com/rest/public** + +Examples: + +.. code:: python + + from twitter import * + + t = Twitter( + auth=OAuth(token, token_secret, consumer_key, consumer_secret)) + + # Get your "home" timeline + t.statuses.home_timeline() + + # Get a particular friend's timeline + t.statuses.user_timeline(screen_name="billybob") + + # to pass in GET/POST parameters, such as `count` + t.statuses.home_timeline(count=5) + + # to pass in the GET/POST parameter `id` you need to use `_id` + t.statuses.oembed(_id=1234567890) + + # Update your status + t.statuses.update( + status="Using @sixohsix's sweet Python Twitter Tools.") + + # Send a direct message + t.direct_messages.new( + user="billybob", + text="I think yer swell!") + + # Get the members of tamtar's list "Things That Are Rad" + t.lists.members(owner_screen_name="tamtar", slug="things-that-are-rad") + + # An *optional* `_timeout` parameter can also be used for API + # calls which take much more time than normal or twitter stops + # responding for some reason: + t.users.lookup( + screen_name=','.join(A_LIST_OF_100_SCREEN_NAMES), _timeout=1) + + # Overriding Method: GET/POST + # you should not need to use this method as this library properly + # detects whether GET or POST should be used, Nevertheless + # to force a particular method, use `_method` + t.statuses.oembed(_id=1234567890, _method='GET') + + # Send images along with your tweets: + # - first just read images from the web or from files the regular way: + with open("example.png", "rb") as imagefile: + imagedata = imagefile.read() + # - then upload medias one by one on Twitter's dedicated server + # and collect each one's id: + t_upload = Twitter(domain='upload.twitter.com', + auth=OAuth(token, token_secret, consumer_key, consumer_secret)) + id_img1 = t_upload.media.upload(media=imagedata)["media_id_string"] + id_img2 = t_upload.media.upload(media=imagedata)["media_id_string"] + # - finally send your tweet with the list of media ids: + t.statuses.update(status="PTT ★", media_ids=",".join([id_img1, id_img2])) + + # Or send a tweet with an image (or set a logo/banner similarily) + # using the old deprecated method that will probably disappear some day + params = {"media[]": imagedata, "status": "PTT ★"} + # Or for an image encoded as base64: + params = {"media[]": base64_image, "status": "PTT ★", "_base64": True} + t.statuses.update_with_media(**params) + + # Attach text metadata to medias sent, using the upload.twitter.com route + # using the _json workaround to send json arguments as POST body + # (warning: to be done before attaching the media to a tweet) + t_upload.media.metadata.create(_json={ + "media_id": id_img1, + "alt_text": { "text": "metadata generated via PTT!" } + }) + # or with the shortcut arguments ("alt_text" and "text" work): + t_upload.media.metadata.create(media_id=id_img1, text="metadata generated via PTT!") + +Searching Twitter: + +.. code:: python + + # Search for the latest tweets about #pycon + t.search.tweets(q="#pycon") + +Retrying after reaching the API rate limit +------------------------------------------ + +Simply create the ``Twitter`` instance with the argument ``retry=True``, +then the HTTP error codes ``429``, ``502``, ``503``, and ``504`` will +cause a retry of the last request. + +If ``retry`` is an integer, it defines the maximum number of retry +attempts. + +Using the data returned +----------------------- + +Twitter API calls return decoded JSON. This is converted into a bunch of +Python lists, dicts, ints, and strings. For example: + +.. code:: python + + x = twitter.statuses.home_timeline() + + # The first 'tweet' in the timeline + x[0] + + # The screen name of the user who wrote the first 'tweet' + x[0]['user']['screen_name'] + +Getting raw XML data +-------------------- + +If you prefer to get your Twitter data in XML format, pass +``format="xml"`` to the ``Twitter`` object when you instantiate it: + +.. code:: python + + twitter = Twitter(format="xml") + +The output will not be parsed in any way. It will be a raw string of +XML. + +The ``TwitterStream`` class +--------------------------- + +The ``TwitterStream`` object is an interface to the Twitter Stream API. +This can be used pretty much the same as the ``Twitter`` class, except +the result of calling a method will be an iterator that yields objects +decoded from the stream. For example:: + +.. code:: python + + twitter_stream = TwitterStream(auth=OAuth(...)) + iterator = twitter_stream.statuses.sample() + + for tweet in iterator: + ...do something with this tweet... + +Per default the ``TwitterStream`` object uses `public +streams `__. +If you want to use one of the other `streaming +APIs `__, specify the URL +manually: + +- `Public + streams `__: + stream.twitter.com +- `User + streams `__: + userstream.twitter.com +- `Site + streams `__: + sitestream.twitter.com + +Note that you require the proper +`permissions `__ +to access these streams. (E.g., for direct messages, your +`application `__ needs the "Read, Write & +Direct Messages" permission.) + +The following example demonstrates how to retrieve all new direct +messages from the user stream: + +.. code:: python + + auth = OAuth( + consumer_key='[your consumer key]', + consumer_secret='[your consumer secret]', + token='[your token]', + token_secret='[your token secret]' + ) + twitter_userstream = TwitterStream(auth=auth, domain='userstream.twitter.com') + for msg in twitter_userstream.user(): + if 'direct_message' in msg: + print msg['direct_message']['text'] + +The iterator will ``yield`` until the TCP connection breaks. When the +connection breaks, the iterator yields ``{'hangup': True}`` (and raises +``StopIteration`` if iterated again). + +Similarly, if the stream does not produce heartbeats for more than 90 +seconds, the iterator yields +``{'hangup': True, 'heartbeat_timeout': True}`` (and raises +``StopIteration`` if iterated again). + +The ``timeout`` parameter controls the maximum time between yields. If +it is nonzero, then the iterator will yield either stream data or +``{'timeout': True}`` within the timeout period. This is useful if you +want your program to do other stuff in between waiting for tweets. + +The ``block`` parameter sets the stream to be fully non-blocking. In +this mode, the iterator always yields immediately. It returns stream +data, or ``None``. + +Note that ``timeout`` supercedes this argument, so it should also be set +``None`` to use this mode, and non-blocking can potentially lead to 100% +CPU usage. + +Twitter ``Response`` Objects +---------------------------- + +Response from a Twitter request. Behaves like a list or a string +(depending on requested format), but it has a few other interesting +attributes. + +``headers`` gives you access to the response headers as an +``httplib.HTTPHeaders`` instance. Use ``response.headers.get('h')`` to +retrieve a header. + +Authentication +-------------- + +You can authenticate with Twitter in three ways: NoAuth, OAuth, or +OAuth2 (app-only). Get ``help()`` on these classes to learn how to use +them. + +OAuth and OAuth2 are probably the most useful. + +Working with OAuth +------------------ + +Visit the Twitter developer page and create a new application: + +**https://dev.twitter.com/apps/new** + +This will get you a ``CONSUMER_KEY`` and ``CONSUMER_SECRET``. + +When users run your application they have to authenticate your app with +their Twitter account. A few HTTP calls to Twitter are required to do +this. Please see the ``twitter.oauth_dance`` module to see how this is +done. If you are making a command-line app, you can use the +``oauth_dance()`` function directly. + +Performing the "oauth dance" gets you an oauth token and oauth secret +that authenticate the user with Twitter. You should save these for +later, so that the user doesn't have to do the oauth dance again. + +``read_token_file`` and ``write_token_file`` are utility methods to read +and write OAuth ``token`` and ``secret`` key values. The values are +stored as strings in the file. Not terribly exciting. + +Finally, you can use the ``OAuth`` authenticator to connect to Twitter. +In code it all goes like this: + +.. code:: python + + from twitter import * + + MY_TWITTER_CREDS = os.path.expanduser('~/.my_app_credentials') + if not os.path.exists(MY_TWITTER_CREDS): + oauth_dance("My App Name", CONSUMER_KEY, CONSUMER_SECRET, + MY_TWITTER_CREDS) + + oauth_token, oauth_secret = read_token_file(MY_TWITTER_CREDS) + + twitter = Twitter(auth=OAuth( + oauth_token, oauth_secret, CONSUMER_KEY, CONSUMER_SECRET)) + + # Now work with Twitter + twitter.statuses.update(status='Hello, world!') + +Working with ``OAuth2`` +----------------------- + +Twitter only supports the application-only flow of OAuth2 for certain +API endpoints. This OAuth2 authenticator only supports the +application-only flow right now. + +To authenticate with OAuth2, visit the Twitter developer page and create +a new application: + +**https://dev.twitter.com/apps/new** + +This will get you a ``CONSUMER_KEY`` and ``CONSUMER_SECRET``. + +Exchange your ``CONSUMER_KEY`` and ``CONSUMER_SECRET`` for a bearer +token using the ``oauth2_dance`` function. + +Finally, you can use the ``OAuth2`` authenticator and your bearer token +to connect to Twitter. In code it goes like this:: + +.. code:: python + + twitter = Twitter(auth=OAuth2(bearer_token=BEARER_TOKEN)) + + # Now work with Twitter + twitter.search.tweets(q='keyword') + +License +======= + +Python Twitter Tools are released under an MIT License. + +.. |Build Status| image:: https://travis-ci.org/sixohsix/twitter.svg + :target: https://travis-ci.org/sixohsix/twitter +.. |Coverage Status| image:: https://coveralls.io/repos/sixohsix/twitter/badge.png?branch=master + :target: https://coveralls.io/r/sixohsix/twitter?branch=master + + diff --git a/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/INSTALLER b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/METADATA b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/METADATA new file mode 100644 index 0000000..c7702f5 --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/METADATA @@ -0,0 +1,406 @@ +Metadata-Version: 2.0 +Name: twitter +Version: 1.18.0 +Summary: An API and command-line toolset for Twitter (twitter.com) +Home-page: http://mike.verdone.ca/twitter/ +Author: Mike Verdone +Author-email: mike.verdone+twitterapi@gmail.com +License: MIT License +Description-Content-Type: UNKNOWN +Keywords: twitter,IRC,command-line tools,web 2.0 +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: End Users/Desktop +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Communications :: Chat :: Internet Relay Chat +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries +Classifier: Topic :: Utilities +Classifier: License :: OSI Approved :: MIT License + +Python Twitter Tools +==================== + +|Build Status| |Coverage Status| + +The Minimalist Twitter API for Python is a Python API for Twitter, +everyone's favorite Web 2.0 Facebook-style status updater for people on +the go. + +Also included is a Twitter command-line tool for getting your friends' +tweets and setting your own tweet from the safety and security of your +favorite shell and an IRC bot that can announce Twitter updates to an +IRC channel. + +For more information, after installing the ``twitter`` package: + +- import the ``twitter`` package and run ``help()`` on it +- run ``twitter -h`` for command-line tool help + +twitter - The Command-Line Tool +------------------------------- + +The command-line tool lets you do some awesome things: + +- view your tweets, recent replies, and tweets in lists +- view the public timeline +- follow and unfollow (leave) friends +- various output formats for tweet information + +The bottom line: type ``twitter``, receive tweets. + +twitterbot - The IRC Bot +------------------------ + +The IRC bot is associated with a Twitter account (either your own +account or an account you create for the bot). The bot announces all +tweets from friends it is following. It can be made to follow or leave +friends through IRC /msg commands. + +``twitter-log`` +--------------- + +``twitter-log`` is a simple command-line tool that dumps all public +tweets from a given user in a simple text format. It is useful to get a +complete offsite backup of all your tweets. Run ``twitter-log`` and read +the instructions. + +``twitter-archiver`` and ``twitter-follow`` +------------------------------------------- + +twitter-archiver will log all the tweets posted by any user since they +started posting. twitter-follow will print a list of all of all the +followers of a user (or all the users that user follows). + +Programming with the Twitter API classes +======================================== + +The ``Twitter`` and ``TwitterStream`` classes are the key to building +your own Twitter-enabled applications. + +The ``Twitter`` class +--------------------- + +The minimalist yet fully featured Twitter API class. + +Get RESTful data by accessing members of this class. The result is +decoded python objects (lists and dicts). + +The Twitter API is documented at: + +**https://dev.twitter.com/overview/documentation** + +The list of most accessible functions is listed at: + +**https://dev.twitter.com/rest/public** + +Examples: + +.. code:: python + + from twitter import * + + t = Twitter( + auth=OAuth(token, token_secret, consumer_key, consumer_secret)) + + # Get your "home" timeline + t.statuses.home_timeline() + + # Get a particular friend's timeline + t.statuses.user_timeline(screen_name="billybob") + + # to pass in GET/POST parameters, such as `count` + t.statuses.home_timeline(count=5) + + # to pass in the GET/POST parameter `id` you need to use `_id` + t.statuses.oembed(_id=1234567890) + + # Update your status + t.statuses.update( + status="Using @sixohsix's sweet Python Twitter Tools.") + + # Send a direct message + t.direct_messages.new( + user="billybob", + text="I think yer swell!") + + # Get the members of tamtar's list "Things That Are Rad" + t.lists.members(owner_screen_name="tamtar", slug="things-that-are-rad") + + # An *optional* `_timeout` parameter can also be used for API + # calls which take much more time than normal or twitter stops + # responding for some reason: + t.users.lookup( + screen_name=','.join(A_LIST_OF_100_SCREEN_NAMES), _timeout=1) + + # Overriding Method: GET/POST + # you should not need to use this method as this library properly + # detects whether GET or POST should be used, Nevertheless + # to force a particular method, use `_method` + t.statuses.oembed(_id=1234567890, _method='GET') + + # Send images along with your tweets: + # - first just read images from the web or from files the regular way: + with open("example.png", "rb") as imagefile: + imagedata = imagefile.read() + # - then upload medias one by one on Twitter's dedicated server + # and collect each one's id: + t_upload = Twitter(domain='upload.twitter.com', + auth=OAuth(token, token_secret, consumer_key, consumer_secret)) + id_img1 = t_upload.media.upload(media=imagedata)["media_id_string"] + id_img2 = t_upload.media.upload(media=imagedata)["media_id_string"] + # - finally send your tweet with the list of media ids: + t.statuses.update(status="PTT ★", media_ids=",".join([id_img1, id_img2])) + + # Or send a tweet with an image (or set a logo/banner similarily) + # using the old deprecated method that will probably disappear some day + params = {"media[]": imagedata, "status": "PTT ★"} + # Or for an image encoded as base64: + params = {"media[]": base64_image, "status": "PTT ★", "_base64": True} + t.statuses.update_with_media(**params) + + # Attach text metadata to medias sent, using the upload.twitter.com route + # using the _json workaround to send json arguments as POST body + # (warning: to be done before attaching the media to a tweet) + t_upload.media.metadata.create(_json={ + "media_id": id_img1, + "alt_text": { "text": "metadata generated via PTT!" } + }) + # or with the shortcut arguments ("alt_text" and "text" work): + t_upload.media.metadata.create(media_id=id_img1, text="metadata generated via PTT!") + +Searching Twitter: + +.. code:: python + + # Search for the latest tweets about #pycon + t.search.tweets(q="#pycon") + +Retrying after reaching the API rate limit +------------------------------------------ + +Simply create the ``Twitter`` instance with the argument ``retry=True``, +then the HTTP error codes ``429``, ``502``, ``503``, and ``504`` will +cause a retry of the last request. + +If ``retry`` is an integer, it defines the maximum number of retry +attempts. + +Using the data returned +----------------------- + +Twitter API calls return decoded JSON. This is converted into a bunch of +Python lists, dicts, ints, and strings. For example: + +.. code:: python + + x = twitter.statuses.home_timeline() + + # The first 'tweet' in the timeline + x[0] + + # The screen name of the user who wrote the first 'tweet' + x[0]['user']['screen_name'] + +Getting raw XML data +-------------------- + +If you prefer to get your Twitter data in XML format, pass +``format="xml"`` to the ``Twitter`` object when you instantiate it: + +.. code:: python + + twitter = Twitter(format="xml") + +The output will not be parsed in any way. It will be a raw string of +XML. + +The ``TwitterStream`` class +--------------------------- + +The ``TwitterStream`` object is an interface to the Twitter Stream API. +This can be used pretty much the same as the ``Twitter`` class, except +the result of calling a method will be an iterator that yields objects +decoded from the stream. For example:: + +.. code:: python + + twitter_stream = TwitterStream(auth=OAuth(...)) + iterator = twitter_stream.statuses.sample() + + for tweet in iterator: + ...do something with this tweet... + +Per default the ``TwitterStream`` object uses `public +streams `__. +If you want to use one of the other `streaming +APIs `__, specify the URL +manually: + +- `Public + streams `__: + stream.twitter.com +- `User + streams `__: + userstream.twitter.com +- `Site + streams `__: + sitestream.twitter.com + +Note that you require the proper +`permissions `__ +to access these streams. (E.g., for direct messages, your +`application `__ needs the "Read, Write & +Direct Messages" permission.) + +The following example demonstrates how to retrieve all new direct +messages from the user stream: + +.. code:: python + + auth = OAuth( + consumer_key='[your consumer key]', + consumer_secret='[your consumer secret]', + token='[your token]', + token_secret='[your token secret]' + ) + twitter_userstream = TwitterStream(auth=auth, domain='userstream.twitter.com') + for msg in twitter_userstream.user(): + if 'direct_message' in msg: + print msg['direct_message']['text'] + +The iterator will ``yield`` until the TCP connection breaks. When the +connection breaks, the iterator yields ``{'hangup': True}`` (and raises +``StopIteration`` if iterated again). + +Similarly, if the stream does not produce heartbeats for more than 90 +seconds, the iterator yields +``{'hangup': True, 'heartbeat_timeout': True}`` (and raises +``StopIteration`` if iterated again). + +The ``timeout`` parameter controls the maximum time between yields. If +it is nonzero, then the iterator will yield either stream data or +``{'timeout': True}`` within the timeout period. This is useful if you +want your program to do other stuff in between waiting for tweets. + +The ``block`` parameter sets the stream to be fully non-blocking. In +this mode, the iterator always yields immediately. It returns stream +data, or ``None``. + +Note that ``timeout`` supercedes this argument, so it should also be set +``None`` to use this mode, and non-blocking can potentially lead to 100% +CPU usage. + +Twitter ``Response`` Objects +---------------------------- + +Response from a Twitter request. Behaves like a list or a string +(depending on requested format), but it has a few other interesting +attributes. + +``headers`` gives you access to the response headers as an +``httplib.HTTPHeaders`` instance. Use ``response.headers.get('h')`` to +retrieve a header. + +Authentication +-------------- + +You can authenticate with Twitter in three ways: NoAuth, OAuth, or +OAuth2 (app-only). Get ``help()`` on these classes to learn how to use +them. + +OAuth and OAuth2 are probably the most useful. + +Working with OAuth +------------------ + +Visit the Twitter developer page and create a new application: + +**https://dev.twitter.com/apps/new** + +This will get you a ``CONSUMER_KEY`` and ``CONSUMER_SECRET``. + +When users run your application they have to authenticate your app with +their Twitter account. A few HTTP calls to Twitter are required to do +this. Please see the ``twitter.oauth_dance`` module to see how this is +done. If you are making a command-line app, you can use the +``oauth_dance()`` function directly. + +Performing the "oauth dance" gets you an oauth token and oauth secret +that authenticate the user with Twitter. You should save these for +later, so that the user doesn't have to do the oauth dance again. + +``read_token_file`` and ``write_token_file`` are utility methods to read +and write OAuth ``token`` and ``secret`` key values. The values are +stored as strings in the file. Not terribly exciting. + +Finally, you can use the ``OAuth`` authenticator to connect to Twitter. +In code it all goes like this: + +.. code:: python + + from twitter import * + + MY_TWITTER_CREDS = os.path.expanduser('~/.my_app_credentials') + if not os.path.exists(MY_TWITTER_CREDS): + oauth_dance("My App Name", CONSUMER_KEY, CONSUMER_SECRET, + MY_TWITTER_CREDS) + + oauth_token, oauth_secret = read_token_file(MY_TWITTER_CREDS) + + twitter = Twitter(auth=OAuth( + oauth_token, oauth_secret, CONSUMER_KEY, CONSUMER_SECRET)) + + # Now work with Twitter + twitter.statuses.update(status='Hello, world!') + +Working with ``OAuth2`` +----------------------- + +Twitter only supports the application-only flow of OAuth2 for certain +API endpoints. This OAuth2 authenticator only supports the +application-only flow right now. + +To authenticate with OAuth2, visit the Twitter developer page and create +a new application: + +**https://dev.twitter.com/apps/new** + +This will get you a ``CONSUMER_KEY`` and ``CONSUMER_SECRET``. + +Exchange your ``CONSUMER_KEY`` and ``CONSUMER_SECRET`` for a bearer +token using the ``oauth2_dance`` function. + +Finally, you can use the ``OAuth2`` authenticator and your bearer token +to connect to Twitter. In code it goes like this:: + +.. code:: python + + twitter = Twitter(auth=OAuth2(bearer_token=BEARER_TOKEN)) + + # Now work with Twitter + twitter.search.tweets(q='keyword') + +License +======= + +Python Twitter Tools are released under an MIT License. + +.. |Build Status| image:: https://travis-ci.org/sixohsix/twitter.svg + :target: https://travis-ci.org/sixohsix/twitter +.. |Coverage Status| image:: https://coveralls.io/repos/sixohsix/twitter/badge.png?branch=master + :target: https://coveralls.io/r/sixohsix/twitter?branch=master + + diff --git a/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/RECORD b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/RECORD new file mode 100644 index 0000000..1c83bca --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/RECORD @@ -0,0 +1,51 @@ +twitter/__init__.py,sha256=MxGDgsEDM1VeENwS2Cv2nWJNYsPCyeTHkgDGMNvMA0c,1708 +twitter/ansi.py,sha256=Y-gSFTNGmkd5owpe-UI5ddQ70A8fpgTInHPPdpcqKl4,2750 +twitter/api.py,sha256=4cR6re08wO5CdcB651RI6bjnSvvJhm6RceQjPSxGfKM,19290 +twitter/archiver.py,sha256=iBI4er-PJ2_kk2DH8VDFKMqc7y5RMuIQAI3AZTWNlwk,17078 +twitter/auth.py,sha256=7gJ5Y310jOfVsX8onYkM0-S0KpR9ZJW2Ha1S_-gfmz4,1730 +twitter/cmdline.py,sha256=70Uup9xzVUEMUKltczwx4h-R5FrBPKrvIqI-QDtbgNE,26323 +twitter/corrupt.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +twitter/follow.py,sha256=Y9rG-EZvdP47CEM1n9BLV6_6W2R2NDQCCHn4uBw3_xA,9544 +twitter/ircbot.py,sha256=mEtUGcDHN-OuaCBl9IyD5_P-nis01lLcf-hNZ-s8MFY,11713 +twitter/logger.py,sha256=I8h-qjzOP7G72anL2W2XMibryXpJZ_e-Jcf0_ryugcs,3003 +twitter/oauth.py,sha256=RVWBwoWjswmjnQ1LgmL3G7eWm8hcqsPz6dtk43OOi6s,4930 +twitter/oauth2.py,sha256=tLUj3JIx40KyKhKc4Eukk5m7lnIjX02DfQO1PY6o53o,2933 +twitter/oauth_dance.py,sha256=ZYjRgOlXwYUhwo4pODp0fXLZSatKJTiUAr2FaSt4JAA,3938 +twitter/stream.py,sha256=aH0sT3NpuTtCQpf7lN0TPXfE-8OcCj6eekahdgvM4Ks,10072 +twitter/stream_example.py,sha256=XFp0bf18-KpVcbxqzEzlKzyf_0bbT2zO_fvPMAZSm-c,3448 +twitter/timezones.py,sha256=650NvSukqy9cyzm3ehL4K43zLk_N0ifZ_nLZGj_L-vA,1766 +twitter/twitter_globals.py,sha256=zlKxCUxtum5miUw_JTG9r7G5_a-3ZQTF9NXPJRwrY7c,1053 +twitter/util.py,sha256=dExdTPxnoG2alNgjrkCKGF3yOzqu3to0iBGm4LPKD5k,5089 +twitter-1.18.0.dist-info/DESCRIPTION.rst,sha256=wVjbx-EjbTPRZpZrMfOm0FQf37Km9IJZV6aF-XZjHlU,12634 +twitter-1.18.0.dist-info/METADATA,sha256=TWxtmzKppRpUDcrhjcoFfJEGFNRbUcJ3Sbksv1cth_I,14037 +twitter-1.18.0.dist-info/RECORD,, +twitter-1.18.0.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110 +twitter-1.18.0.dist-info/entry_points.txt,sha256=giGWSDhYgzE794e_Jr7F--z6pgsBRB8PrDkk_nUwc3Y,314 +twitter-1.18.0.dist-info/metadata.json,sha256=eP706YJUE0tBAnBXgwgDrUl3yNM1TU-5QUM7vGgaKuQ,2082 +twitter-1.18.0.dist-info/top_level.txt,sha256=Lk3uYTLW9Re19q-YY6s4Ym16s5DENnuA23PgX9powBA,8 +twitter-1.18.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +../../../bin/twitter,sha256=5ncflOveWzzrX0JxzVcui9h3IcadaXUsHzqF8OoqWWk,284 +../../../bin/twitterbot,sha256=wJjIpH8g6DL4z8dPAxi9r_fomAGZDKpYUds-zRcZpJQ,283 +../../../bin/twitter-log,sha256=j7Y5GfRzIdeWnem6ymrhMTl6WpDwyOa6vv5uDP8Eehg,283 +../../../bin/twitter-archiver,sha256=fircpHHMe9nfUkE-JMey9jy1xlcmFj1P6sVAaBZiIcg,285 +../../../bin/twitter-follow,sha256=_xNf4vvRZCMyEa-NMskvObmauthUOhu2vbLLblNX3dE,283 +../../../bin/twitter-stream-example,sha256=YvijALES4xSZ1wpxK4diptV6wnCzdMmyiP9TuiHcLM4,291 +twitter-1.18.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +twitter/__pycache__/oauth_dance.cpython-36.pyc,, +twitter/__pycache__/util.cpython-36.pyc,, +twitter/__pycache__/oauth2.cpython-36.pyc,, +twitter/__pycache__/timezones.cpython-36.pyc,, +twitter/__pycache__/stream.cpython-36.pyc,, +twitter/__pycache__/ansi.cpython-36.pyc,, +twitter/__pycache__/follow.cpython-36.pyc,, +twitter/__pycache__/auth.cpython-36.pyc,, +twitter/__pycache__/archiver.cpython-36.pyc,, +twitter/__pycache__/twitter_globals.cpython-36.pyc,, +twitter/__pycache__/corrupt.cpython-36.pyc,, +twitter/__pycache__/stream_example.cpython-36.pyc,, +twitter/__pycache__/ircbot.cpython-36.pyc,, +twitter/__pycache__/__init__.cpython-36.pyc,, +twitter/__pycache__/api.cpython-36.pyc,, +twitter/__pycache__/cmdline.cpython-36.pyc,, +twitter/__pycache__/logger.cpython-36.pyc,, +twitter/__pycache__/oauth.cpython-36.pyc,, diff --git a/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/WHEEL b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/WHEEL new file mode 100644 index 0000000..7332a41 --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/entry_points.txt b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/entry_points.txt new file mode 100644 index 0000000..cd1d5a4 --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/entry_points.txt @@ -0,0 +1,10 @@ + + # -*- Entry points: -*- + [console_scripts] + twitter=twitter.cmdline:main + twitterbot=twitter.ircbot:main + twitter-log=twitter.logger:main + twitter-archiver=twitter.archiver:main + twitter-follow=twitter.follow:main + twitter-stream-example=twitter.stream_example:main + \ No newline at end of file diff --git a/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/metadata.json b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/metadata.json new file mode 100644 index 0000000..8c4a923 --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: End Users/Desktop", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Communications :: Chat :: Internet Relay Chat", "Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries", "Topic :: Utilities", "License :: OSI Approved :: MIT License"], "description_content_type": "UNKNOWN", "extensions": {"python.commands": {"wrap_console": {"twitter": "twitter.cmdline:main", "twitter-archiver": "twitter.archiver:main", "twitter-follow": "twitter.follow:main", "twitter-log": "twitter.logger:main", "twitter-stream-example": "twitter.stream_example:main", "twitterbot": "twitter.ircbot:main"}}, "python.details": {"contacts": [{"email": "mike.verdone+twitterapi@gmail.com", "name": "Mike Verdone", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://mike.verdone.ca/twitter/"}}, "python.exports": {"console_scripts": {"twitter": "twitter.cmdline:main", "twitter-archiver": "twitter.archiver:main", "twitter-follow": "twitter.follow:main", "twitter-log": "twitter.logger:main", "twitter-stream-example": "twitter.stream_example:main", "twitterbot": "twitter.ircbot:main"}}}, "generator": "bdist_wheel (0.30.0)", "keywords": ["twitter", "IRC", "command-line", "tools", "web", "2.0"], "license": "MIT License", "metadata_version": "2.0", "name": "twitter", "summary": "An API and command-line toolset for Twitter (twitter.com)", "test_requires": [{"requires": ["nose"]}], "version": "1.18.0"} \ No newline at end of file diff --git a/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/top_level.txt b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/top_level.txt new file mode 100644 index 0000000..37d1823 --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/top_level.txt @@ -0,0 +1 @@ +twitter diff --git a/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/zip-safe b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter-1.18.0.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/env/lib/python3.6/site-packages/twitter/ansi.py b/env/lib/python3.6/site-packages/twitter/ansi.py new file mode 100644 index 0000000..0ae2071 --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter/ansi.py @@ -0,0 +1,102 @@ +""" +Support for ANSI colours in command-line client. + +.. data:: ESC + ansi escape character + +.. data:: RESET + ansi reset colour (ansi value) + +.. data:: COLOURS_NAMED + dict of colour names mapped to their ansi value + +.. data:: COLOURS_MIDS + A list of ansi values for Mid Spectrum Colours +""" + +import itertools +import sys + +ESC = chr(0x1B) +RESET = "0" + +COLOURS_NAMED = dict(list(zip( + ['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'], + [str(x) for x in range(30, 38)] +))) +COLOURS_MIDS = [ + colour for name, colour in list(COLOURS_NAMED.items()) + if name not in ('black', 'white') +] + +class AnsiColourException(Exception): + ''' Exception while processing ansi colours ''' + pass + +class ColourMap(object): + ''' + Object that allows for mapping strings to ansi colour values. + ''' + def __init__(self, colors=COLOURS_MIDS): + ''' uses the list of ansi `colors` values to initialize the map ''' + self._cmap = {} + self._colourIter = itertools.cycle(colors) + + def colourFor(self, string): + ''' + Returns an ansi colour value given a `string`. + The same ansi colour value is always returned for the same string + ''' + if string not in self._cmap: + self._cmap[string] = next(self._colourIter) + return self._cmap[string] + +class AnsiCmd(object): + def __init__(self, forceAnsi): + self.forceAnsi = forceAnsi + + def cmdReset(self): + ''' Returns the ansi cmd colour for a RESET ''' + if sys.stdout.isatty() or self.forceAnsi: + return ESC + "[0m" + else: + return "" + + def cmdColour(self, colour): + ''' + Return the ansi cmd colour (i.e. escape sequence) + for the ansi `colour` value + ''' + if sys.stdout.isatty() or self.forceAnsi: + return ESC + "[" + colour + "m" + else: + return "" + + def cmdColourNamed(self, colour): + ''' Return the ansi cmdColour for a given named `colour` ''' + try: + return self.cmdColour(COLOURS_NAMED[colour]) + except KeyError: + raise AnsiColourException('Unknown Colour %s' % (colour)) + + def cmdBold(self): + if sys.stdout.isatty() or self.forceAnsi: + return ESC + "[1m" + else: + return "" + + def cmdUnderline(self): + if sys.stdout.isatty() or self.forceAnsi: + return ESC + "[4m" + else: + return "" + +"""These exist to maintain compatibility with users of version<=1.9.0""" +def cmdReset(): + return AnsiCmd(False).cmdReset() + +def cmdColour(colour): + return AnsiCmd(False).cmdColour(colour) + +def cmdColourNamed(colour): + return AnsiCmd(False).cmdColourNamed(colour) diff --git a/env/lib/python3.6/site-packages/twitter/api.py b/env/lib/python3.6/site-packages/twitter/api.py new file mode 100644 index 0000000..fa6fea2 --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter/api.py @@ -0,0 +1,566 @@ +# encoding: utf-8 +from __future__ import unicode_literals, print_function + +from .util import PY_3_OR_HIGHER, actually_bytes + +try: + import urllib.request as urllib_request + import urllib.error as urllib_error +except ImportError: + import urllib2 as urllib_request + import urllib2 as urllib_error + +try: + from cStringIO import StringIO +except ImportError: + from io import BytesIO as StringIO + +from .twitter_globals import POST_ACTIONS +from .auth import NoAuth + +import re +import sys +import gzip +from time import sleep, time + +try: + import http.client as http_client +except ImportError: + import httplib as http_client + +try: + import json +except ImportError: + import simplejson as json + + +class _DEFAULT(object): + pass + + +class TwitterError(Exception): + """ + Base Exception thrown by the Twitter object when there is a + general error interacting with the API. + """ + pass + + +class TwitterHTTPError(TwitterError): + """ + Exception thrown by the Twitter object when there is an + HTTP error interacting with twitter.com. + """ + def __init__(self, e, uri, format, uriparts): + self.e = e + self.uri = uri + self.format = format + self.uriparts = uriparts + try: + data = self.e.fp.read() + except http_client.IncompleteRead as e: + # can't read the error text + # let's try some of it + data = e.partial + if self.e.headers.get('Content-Encoding') == 'gzip': + buf = StringIO(data) + f = gzip.GzipFile(fileobj=buf) + data = f.read() + if len(data) == 0: + data = {} + else: + data = data.decode('utf8') + if "json" == self.format: + try: + data = json.loads(data) + except ValueError: + # We try to load the response as json as a nicety; if it fails, carry on. + pass + self.response_data = data + super(TwitterHTTPError, self).__init__(str(self)) + + def __str__(self): + fmt = ("." + self.format) if self.format else "" + return ( + "Twitter sent status %i for URL: %s%s using parameters: " + "(%s)\ndetails: %s" % ( + self.e.code, self.uri, fmt, self.uriparts, + self.response_data)) + + +class TwitterResponse(object): + """ + Response from a twitter request. Behaves like a list or a string + (depending on requested format) but it has a few other interesting + attributes. + + `headers` gives you access to the response headers as an + httplib.HTTPHeaders instance. You can do + `response.headers.get('h')` to retrieve a header. + """ + + @property + def rate_limit_remaining(self): + """ + Remaining requests in the current rate-limit. + """ + return int(self.headers.get('X-Rate-Limit-Remaining', "0")) + + @property + def rate_limit_limit(self): + """ + The rate limit ceiling for that given request. + """ + return int(self.headers.get('X-Rate-Limit-Limit', "0")) + + @property + def rate_limit_reset(self): + """ + Time in UTC epoch seconds when the rate limit will reset. + """ + return int(self.headers.get('X-Rate-Limit-Reset', "0")) + + +class TwitterDictResponse(dict, TwitterResponse): + pass + + +class TwitterListResponse(list, TwitterResponse): + pass + + +def wrap_response(response, headers): + response_typ = type(response) + if response_typ is dict: + res = TwitterDictResponse(response) + res.headers = headers + elif response_typ is list: + res = TwitterListResponse(response) + res.headers = headers + else: + res = response + return res + + +POST_ACTIONS_RE = re.compile('(' + '|'.join(POST_ACTIONS) + r')(/\d+)?$') + +def method_for_uri(uri): + if POST_ACTIONS_RE.search(uri): + return "POST" + return "GET" + + +def build_uri(orig_uriparts, kwargs): + """ + Build the URI from the original uriparts and kwargs. Modifies kwargs. + """ + uriparts = [] + for uripart in orig_uriparts: + # If this part matches a keyword argument (starting with _), use + # the supplied value. Otherwise, just use the part. + if uripart.startswith("_"): + part = (str(kwargs.pop(uripart, uripart))) + else: + part = uripart + uriparts.append(part) + uri = '/'.join(uriparts) + + # If an id kwarg is present and there is no id to fill in in + # the list of uriparts, assume the id goes at the end. + id = kwargs.pop('id', None) + if id: + uri += "/%s" % (id) + + return uri + + +class TwitterCall(object): + + TWITTER_UNAVAILABLE_WAIT = 30 # delay after HTTP codes 502, 503 or 504 + + def __init__( + self, auth, format, domain, callable_cls, uri="", + uriparts=None, secure=True, timeout=None, gzip=False, retry=False): + self.auth = auth + self.format = format + self.domain = domain + self.callable_cls = callable_cls + self.uri = uri + self.uriparts = uriparts + self.secure = secure + self.timeout = timeout + self.gzip = gzip + self.retry = retry + + def __getattr__(self, k): + try: + return object.__getattr__(self, k) + except AttributeError: + def extend_call(arg): + return self.callable_cls( + auth=self.auth, format=self.format, domain=self.domain, + callable_cls=self.callable_cls, timeout=self.timeout, + secure=self.secure, gzip=self.gzip, retry=self.retry, + uriparts=self.uriparts + (arg,)) + if k == "_": + return extend_call + else: + return extend_call(k) + + def __call__(self, **kwargs): + kwargs = dict(kwargs) + uri = build_uri(self.uriparts, kwargs) + + # Shortcut call arguments for special json arguments case + if "media/metadata/create" in uri: + media_id = kwargs.pop('media_id', None) + alt_text = kwargs.pop('alt_text', kwargs.pop('text', None)) + if media_id and alt_text: + jsondata = { + "media_id": media_id, + "alt_text": {"text": alt_text} + } + return self.__call__(_json=jsondata, **kwargs) + + method = kwargs.pop('_method', None) or method_for_uri(uri) + domain = self.domain + + # If an _id kwarg is present, this is treated as id as a CGI + # param. + _id = kwargs.pop('_id', None) + if _id: + kwargs['id'] = _id + + # If an _timeout is specified in kwargs, use it + _timeout = kwargs.pop('_timeout', None) + + secure_str = '' + if self.secure: + secure_str = 's' + dot = "" + if self.format: + dot = "." + url_base = "http%s://%s/%s%s%s" % ( + secure_str, domain, uri, dot, self.format) + + # Check if argument tells whether img is already base64 encoded + b64_convert = not kwargs.pop("_base64", False) + if b64_convert: + import base64 + + # Catch media arguments to handle oauth query differently for multipart + media = None + if 'media' in kwargs: + mediafield = 'media' + media = kwargs.pop('media') + media_raw = True + elif 'media[]' in kwargs: + mediafield = 'media[]' + media = kwargs.pop('media[]') + if b64_convert: + media = base64.b64encode(media) + media_raw = False + + # Catch media arguments that are not accepted through multipart + # and are not yet base64 encoded + if b64_convert: + for arg in ['banner', 'image']: + if arg in kwargs: + kwargs[arg] = base64.b64encode(kwargs[arg]) + + headers = {'Accept-Encoding': 'gzip'} if self.gzip else dict() + body = None + arg_data = None + + # Catch _json special argument to handle special endpoints which + # require args as a json string within the request's body + # for instance media/metadata/create on upload.twitter.com + # https://dev.twitter.com/rest/reference/post/media/metadata/create + jsondata = kwargs.pop('_json', None) + if jsondata: + body = actually_bytes(json.dumps(jsondata)) + headers['Content-Type'] = 'application/json; charset=UTF-8' + + if self.auth: + headers.update(self.auth.generate_headers()) + # Use urlencoded oauth args with no params when sending media + # via multipart and send it directly via uri even for post + arg_data = self.auth.encode_params( + url_base, method, {} if media or jsondata else kwargs) + if method == 'GET' or media or jsondata: + url_base += '?' + arg_data + else: + body = arg_data.encode('utf-8') + + # Handle query as multipart when sending media + if media: + BOUNDARY = b"###Python-Twitter###" + bod = [] + bod.append(b'--' + BOUNDARY) + bod.append( + b'Content-Disposition: form-data; name="' + + actually_bytes(mediafield) + + b'"') + bod.append(b'Content-Type: application/octet-stream') + if not media_raw: + bod.append(b'Content-Transfer-Encoding: base64') + bod.append(b'') + bod.append(actually_bytes(media)) + for k, v in kwargs.items(): + k = actually_bytes(k) + v = actually_bytes(v) + bod.append(b'--' + BOUNDARY) + bod.append(b'Content-Disposition: form-data; name="' + k + b'"') + bod.append(b'Content-Type: text/plain;charset=utf-8') + bod.append(b'') + bod.append(v) + bod.append(b'--' + BOUNDARY + b'--') + bod.append(b'') + bod.append(b'') + body = b'\r\n'.join(bod) + # print(body.decode('utf-8', errors='ignore')) + headers['Content-Type'] = \ + b'multipart/form-data; boundary=' + BOUNDARY + + if not PY_3_OR_HIGHER: + url_base = url_base.encode("utf-8") + for k in headers: + headers[actually_bytes(k)] = actually_bytes(headers.pop(k)) + + req = urllib_request.Request(url_base, data=body, headers=headers) + if self.retry: + return self._handle_response_with_retry(req, uri, arg_data, _timeout) + else: + return self._handle_response(req, uri, arg_data, _timeout) + + def _handle_response(self, req, uri, arg_data, _timeout=None): + kwargs = {} + if _timeout: + kwargs['timeout'] = _timeout + try: + handle = urllib_request.urlopen(req, **kwargs) + if handle.headers['Content-Type'] in ['image/jpeg', 'image/png']: + return handle + try: + data = handle.read() + except http_client.IncompleteRead as e: + # Even if we don't get all the bytes we should have there + # may be a complete response in e.partial + data = e.partial + if handle.info().get('Content-Encoding') == 'gzip': + # Handle gzip decompression + buf = StringIO(data) + f = gzip.GzipFile(fileobj=buf) + data = f.read() + if len(data) == 0: + return wrap_response({}, handle.headers) + elif "json" == self.format: + res = json.loads(data.decode('utf8')) + return wrap_response(res, handle.headers) + else: + return wrap_response( + data.decode('utf8'), handle.headers) + except urllib_error.HTTPError as e: + if (e.code == 304): + return [] + else: + raise TwitterHTTPError(e, uri, self.format, arg_data) + + def _handle_response_with_retry(self, req, uri, arg_data, _timeout=None): + retry = self.retry + while retry: + try: + return self._handle_response(req, uri, arg_data, _timeout) + except TwitterHTTPError as e: + if e.e.code == 429: + # API rate limit reached + reset = int(e.e.headers.get('X-Rate-Limit-Reset', time() + 30)) + delay = int(reset - time() + 2) # add some extra margin + if delay <= 0: + delay = self.TWITTER_UNAVAILABLE_WAIT + print("API rate limit reached; waiting for %ds..." % delay, file=sys.stderr) + elif e.e.code in (502, 503, 504): + delay = self.TWITTER_UNAVAILABLE_WAIT + print("Service unavailable; waiting for %ds..." % delay, file=sys.stderr) + else: + raise + if isinstance(retry, int) and not isinstance(retry, bool): + if retry <= 0: + raise + retry -= 1 + sleep(delay) + + +class Twitter(TwitterCall): + """ + The minimalist yet fully featured Twitter API class. + + Get RESTful data by accessing members of this class. The result + is decoded python objects (lists and dicts). + + The Twitter API is documented at: + + https://dev.twitter.com/overview/documentation + + The list of most accessible functions is listed at: + + https://dev.twitter.com/rest/public + + + Examples:: + + from twitter import * + + t = Twitter( + auth=OAuth(token, token_secret, consumer_key, consumer_secret)) + + # Get your "home" timeline + t.statuses.home_timeline() + + # Get a particular friend's timeline + t.statuses.user_timeline(screen_name="billybob") + + # to pass in GET/POST parameters, such as `count` + t.statuses.home_timeline(count=5) + + # to pass in the GET/POST parameter `id` you need to use `_id` + t.statuses.oembed(_id=1234567890) + + # Update your status + t.statuses.update( + status="Using @sixohsix's sweet Python Twitter Tools.") + + # Send a direct message + t.direct_messages.new( + user="billybob", + text="I think yer swell!") + + # Get the members of tamtar's list "Things That Are Rad" + t.lists.members(owner_screen_name="tamtar", slug="things-that-are-rad") + + # An *optional* `_timeout` parameter can also be used for API + # calls which take much more time than normal or twitter stops + # responding for some reason: + t.users.lookup( + screen_name=','.join(A_LIST_OF_100_SCREEN_NAMES), \ + _timeout=1) + + # Overriding Method: GET/POST + # you should not need to use this method as this library properly + # detects whether GET or POST should be used, Nevertheless + # to force a particular method, use `_method` + t.statuses.oembed(_id=1234567890, _method='GET') + + # Send images along with your tweets: + # - first just read images from the web or from files the regular way: + with open("example.png", "rb") as imagefile: + imagedata = imagefile.read() + # - then upload medias one by one on Twitter's dedicated server + # and collect each one's id: + t_upload = Twitter(domain='upload.twitter.com', + auth=OAuth(token, token_secret, consumer_key, consumer_secret)) + id_img1 = t_upload.media.upload(media=imagedata)["media_id_string"] + id_img2 = t_upload.media.upload(media=imagedata)["media_id_string"] + + # - finally send your tweet with the list of media ids: + t.statuses.update(status="PTT ★", media_ids=",".join([id_img1, id_img2])) + + # Or send a tweet with an image (or set a logo/banner similarily) + # using the old deprecated method that will probably disappear some day + params = {"media[]": imagedata, "status": "PTT ★"} + # Or for an image encoded as base64: + params = {"media[]": base64_image, "status": "PTT ★", "_base64": True} + t.statuses.update_with_media(**params) + + # Attach text metadata to medias sent, using the upload.twitter.com route + # using the _json workaround to send json arguments as POST body + # (warning: to be done before attaching the media to a tweet) + t_upload.media.metadata.create(_json={ + "media_id": id_img1, + "alt_text": { "text": "metadata generated via PTT!" } + }) + # or with the shortcut arguments ("alt_text" and "text" work): + t_upload.media.metadata.create(media_id=id_img1, text="metadata generated via PTT!") + + Searching Twitter:: + + # Search for the latest tweets about #pycon + t.search.tweets(q="#pycon") + + + Using the data returned + ----------------------- + + Twitter API calls return decoded JSON. This is converted into + a bunch of Python lists, dicts, ints, and strings. For example:: + + x = twitter.statuses.home_timeline() + + # The first 'tweet' in the timeline + x[0] + + # The screen name of the user who wrote the first 'tweet' + x[0]['user']['screen_name'] + + + Getting raw XML data + -------------------- + + If you prefer to get your Twitter data in XML format, pass + format="xml" to the Twitter object when you instantiate it:: + + twitter = Twitter(format="xml") + + The output will not be parsed in any way. It will be a raw string + of XML. + + """ + def __init__( + self, format="json", + domain="api.twitter.com", secure=True, auth=None, + api_version=_DEFAULT, retry=False): + """ + Create a new twitter API connector. + + Pass an `auth` parameter to use the credentials of a specific + user. Generally you'll want to pass an `OAuth` + instance:: + + twitter = Twitter(auth=OAuth( + token, token_secret, consumer_key, consumer_secret)) + + + `domain` lets you change the domain you are connecting. By + default it's `api.twitter.com`. + + If `secure` is False you will connect with HTTP instead of + HTTPS. + + `api_version` is used to set the base uri. By default it's + '1.1'. + + If `retry` is True, API rate limits will automatically be + handled by waiting until the next reset, as indicated by + the X-Rate-Limit-Reset HTTP header. If retry is an integer, + it defines the number of retries attempted. + """ + if not auth: + auth = NoAuth() + + if (format not in ("json", "xml", "")): + raise ValueError("Unknown data format '%s'" % (format)) + + if api_version is _DEFAULT: + api_version = '1.1' + + uriparts = () + if api_version: + uriparts += (str(api_version),) + + TwitterCall.__init__( + self, auth=auth, format=format, domain=domain, + callable_cls=TwitterCall, + secure=secure, uriparts=uriparts, retry=retry) + + +__all__ = ["Twitter", "TwitterError", "TwitterHTTPError", "TwitterResponse"] diff --git a/env/lib/python3.6/site-packages/twitter/archiver.py b/env/lib/python3.6/site-packages/twitter/archiver.py new file mode 100644 index 0000000..ef2dc1f --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter/archiver.py @@ -0,0 +1,457 @@ +"""USAGE + twitter-archiver [options] <-|user> [ ...] + +DESCRIPTION + Archive tweets of users, sorted by date from oldest to newest, in + the following format: <> + Date format is: YYYY-MM-DD HH:MM:SS TZ. Tweet is used to + resume archiving on next run. Archive file name is the user name. + Provide "-" instead of users to read users from standard input. + +OPTIONS + -o --oauth authenticate to Twitter using OAuth (default: no) + -s --save-dir directory to save archives (default: current dir) + -a --api-rate see current API rate limit status + -t --timeline archive own timeline into given file name (requires + OAuth, max 800 statuses) + -m --mentions archive own mentions instead of timeline into + given file name (requires OAuth, max 800 statuses) + -v --favorites archive user's favorites instead of timeline + -f --follow-redirects follow redirects of urls + -r --redirect-sites follow redirects for this comma separated list of hosts + -d --dms archive own direct messages (both received and + sent) into given file name. + -i --isoformat store dates in ISO format (specifically RFC 3339) + +AUTHENTICATION + Authenticate to Twitter using OAuth to archive tweets of private profiles + and have higher API rate limits. OAuth authentication tokens are stored + in ~/.twitter-archiver_oauth. +""" + +from __future__ import print_function + +import os, sys, time as _time, calendar, functools +from datetime import time, date, datetime +from getopt import gnu_getopt as getopt, GetoptError + +try: + import urllib.request as urllib2 + import http.client as httplib +except ImportError: + import urllib2 + import httplib + + +# T-Archiver (Twitter-Archiver) application registered by @stalkr_ +CONSUMER_KEY='d8hIyfzs7ievqeeZLjZrqQ' +CONSUMER_SECRET='AnZmK0rnvaX7BoJ75l6XlilnbyMv7FoiDXWVmPD8' + +from .api import Twitter, TwitterError +from .oauth import OAuth, read_token_file +from .oauth_dance import oauth_dance +from .auth import NoAuth +from .util import Fail, err, expand_line, parse_host_list +from .follow import lookup +from .timezones import utc as UTC, Local + +def parse_args(args, options): + """Parse arguments from command-line to set options.""" + long_opts = ['help', 'oauth', 'save-dir=', 'api-rate', 'timeline=', 'mentions=', 'favorites', 'follow-redirects',"redirect-sites=", 'dms=', 'isoformat'] + short_opts = "hos:at:m:vfr:d:i" + opts, extra_args = getopt(args, short_opts, long_opts) + + for opt, arg in opts: + if opt in ('-h', '--help'): + print(__doc__) + raise SystemExit(0) + elif opt in ('-o', '--oauth'): + options['oauth'] = True + elif opt in ('-s', '--save-dir'): + options['save-dir'] = arg + elif opt in ('-a', '--api-rate'): + options['api-rate' ] = True + elif opt in ('-t', '--timeline'): + options['timeline'] = arg + elif opt in ('-m', '--mentions'): + options['mentions'] = arg + elif opt in ('-v', '--favorites'): + options['favorites'] = True + elif opt in ('-f', '--follow-redirects'): + options['follow-redirects'] = True + elif opt in ('-r', '--redirect-sites'): + options['redirect-sites'] = arg + elif opt in ('-d', '--dms'): + options['dms'] = arg + elif opt in ('-i', '--isoformat'): + options['isoformat'] = True + + options['extra_args'] = extra_args + +def load_tweets(filename): + """Load tweets from file into dict, see save_tweets().""" + try: + archive = open(filename,"r") + except IOError: # no archive (yet) + return {} + + tweets = {} + for line in archive.readlines(): + try: + tid, text = line.strip().split(" ", 1) + tweets[int(tid)] = text.decode("utf-8") + except Exception as e: + err("loading tweet %s failed due to %s" % (line, unicode(e))) + + archive.close() + return tweets + +def save_tweets(filename, tweets): + """Save tweets from dict to file. + + Save tweets from dict to UTF-8 encoded file, one per line: + + Tweet text is: + <> [RT @: ] + + Args: + filename: A string representing the file name to save tweets to. + tweets: A dict mapping tweet-ids (int) to tweet text (str). + """ + if len(tweets) == 0: + return + + try: + archive = open(filename,"w") + except IOError as e: + err("Cannot save tweets: %s" % str(e)) + return + + for k in sorted(tweets.keys()): + try: + archive.write("%i %s\n" % (k, tweets[k].encode('utf-8'))) + except Exception as ex: + err("archiving tweet %s failed due to %s" % (k, unicode(ex))) + + archive.close() + +def format_date(utc, isoformat=False): + """Parse Twitter's UTC date into UTC or local time.""" + u = datetime.strptime(utc.replace('+0000','UTC'), '%a %b %d %H:%M:%S %Z %Y') + # This is the least painful way I could find to create a non-naive + # datetime including a UTC timezone. Alternative suggestions + # welcome. + unew = datetime.combine(u.date(), time(u.time().hour, + u.time().minute, u.time().second, tzinfo=UTC)) + + # Convert to localtime + unew = unew.astimezone(Local) + + if isoformat: + return unew.isoformat() + else: + return unew.strftime('%Y-%m-%d %H:%M:%S %Z') + +def expand_format_text(hosts, text): + """Following redirects in links.""" + return direct_format_text(expand_line(text, hosts)) + +def direct_format_text(text): + """Transform special chars in text to have only one line.""" + return text.replace('\n','\\n').replace('\r','\\r') + +def statuses_resolve_uids(twitter, tl): + """Resolve user ids to screen names from statuses.""" + # get all user ids that needs a lookup (no screen_name key) + user_ids = [] + for t in tl: + rt = t.get('retweeted_status') + if rt and not rt['user'].get('screen_name'): + user_ids.append(rt['user']['id']) + if not t['user'].get('screen_name'): + user_ids.append(t['user']['id']) + + # resolve all of them at once + names = lookup(twitter, list(set(user_ids))) + + # build new statuses with resolved uids + new_tl = [] + for t in tl: + rt = t.get('retweeted_status') + if rt and not rt['user'].get('screen_name'): + name = names[rt['user']['id']] + t['retweeted_status']['user']['screen_name'] = name + if not t['user'].get('screen_name'): + name = names[t['user']['id']] + t['user']['screen_name'] = name + new_tl.append(t) + + return new_tl + +def statuses_portion(twitter, screen_name, max_id=None, mentions=False, favorites=False, received_dms=None, isoformat=False): + """Get a portion of the statuses of a screen name.""" + kwargs = dict(count=200, include_rts=1, screen_name=screen_name) + if max_id: + kwargs['max_id'] = max_id + + tweets = {} + if mentions: + tl = twitter.statuses.mentions_timeline(**kwargs) + elif favorites: + tl = twitter.favorites.list(**kwargs) + elif received_dms != None: + if received_dms: + tl = twitter.direct_messages(**kwargs) + else: # sent DMs + tl = twitter.direct_messages.sent(**kwargs) + else: # timeline + if screen_name: + tl = twitter.statuses.user_timeline(**kwargs) + else: # self + tl = twitter.statuses.home_timeline(**kwargs) + + # some tweets do not provide screen name but user id, resolve those + # this isn't a valid operation for DMs, so special-case them + if received_dms == None: + newtl = statuses_resolve_uids(twitter, tl) + else: + newtl = tl + for t in newtl: + text = t['text'] + rt = t.get('retweeted_status') + if rt: + text = "RT @%s: %s" % (rt['user']['screen_name'], rt['text']) + # DMs don't include mentions by default, so in order to show who + # the recipient was, we synthesise a mention. If we're not + # operating on DMs, behave as normal + if received_dms == None: + tweets[t['id']] = "%s <%s> %s" % (format_date(t['created_at'], isoformat=isoformat), + t['user']['screen_name'], + format_text(text)) + else: + tweets[t['id']] = "%s <%s> @%s %s" % (format_date(t['created_at'], isoformat=isoformat), + t['sender_screen_name'], + t['recipient']['screen_name'], + format_text(text)) + return tweets + +def statuses(twitter, screen_name, tweets, mentions=False, favorites=False, received_dms=None, isoformat=False): + """Get all the statuses for a screen name.""" + max_id = None + fail = Fail() + # get portions of statuses, incrementing max id until no new tweets appear + while True: + try: + portion = statuses_portion(twitter, screen_name, max_id, mentions, favorites, received_dms, isoformat) + except TwitterError as e: + if e.e.code == 401: + err("Fail: %i Unauthorized (tweets of that user are protected)" + % e.e.code) + break + elif e.e.code == 429: + err("Fail: %i API rate limit exceeded" % e.e.code) + rls = twitter.application.rate_limit_status() + reset = rls.rate_limit_reset + reset = _time.asctime(_time.localtime(reset)) + delay = int(rls.rate_limit_reset + - _time.time()) + 5 # avoid race + err("Interval limit of %i requests reached, next reset on %s: " + "going to sleep for %i secs" % (rls.rate_limit_limit, + reset, delay)) + fail.wait(delay) + continue + elif e.e.code == 404: + err("Fail: %i This profile does not exist" % e.e.code) + break + elif e.e.code == 502: + err("Fail: %i Service currently unavailable, retrying..." + % e.e.code) + else: + err("Fail: %s\nRetrying..." % str(e)[:500]) + fail.wait(3) + except urllib2.URLError as e: + err("Fail: urllib2.URLError %s - Retrying..." % str(e)) + fail.wait(3) + except httplib.error as e: + err("Fail: httplib.error %s - Retrying..." % str(e)) + fail.wait(3) + except KeyError as e: + err("Fail: KeyError %s - Retrying..." % str(e)) + fail.wait(3) + else: + new = -len(tweets) + tweets.update(portion) + new += len(tweets) + err("Browsing %s statuses, new tweets: %i" + % (screen_name if screen_name else "home", new)) + if new < 190: + break + max_id = min(portion.keys())-1 # browse backwards + fail = Fail() + +def rate_limit_status(twitter): + """Print current Twitter API rate limit status.""" + rls = twitter.application.rate_limit_status() + print("Remaining API requests: %i/%i (interval limit)" + % (rls.rate_limit_remaining, rls.rate_limit_limit)) + print("Next reset in %is (%s)" + % (int(rls.rate_limit_reset - _time.time()), + _time.asctime(_time.localtime(rls.rate_limit_reset)))) + +def main(args=sys.argv[1:]): + options = { + 'oauth': False, + 'save-dir': ".", + 'api-rate': False, + 'timeline': "", + 'mentions': "", + 'dms': "", + 'favorites': False, + 'follow-redirects': False, + 'redirect-sites': None, + 'isoformat': False, + } + try: + parse_args(args, options) + except GetoptError as e: + err("I can't do that, %s." % e) + raise SystemExit(1) + + # exit if no user given + # except if asking for API rate, or archive of timeline or mentions + if not options['extra_args'] and not (options['api-rate'] or + options['timeline'] or + options['mentions'] or + options['dms']): + print(__doc__) + return + + # authenticate using OAuth, asking for token if necessary + if options['oauth']: + oauth_filename = (os.environ.get('HOME', + os.environ.get('USERPROFILE', '')) + + os.sep + + '.twitter-archiver_oauth') + + if not os.path.exists(oauth_filename): + oauth_dance("Twitter-Archiver", CONSUMER_KEY, CONSUMER_SECRET, + oauth_filename) + oauth_token, oauth_token_secret = read_token_file(oauth_filename) + auth = OAuth(oauth_token, oauth_token_secret, CONSUMER_KEY, + CONSUMER_SECRET) + else: + auth = NoAuth() + + twitter = Twitter(auth=auth, api_version='1.1', domain='api.twitter.com') + + if options['api-rate']: + rate_limit_status(twitter) + return + + global format_text + if options['follow-redirects'] or options['redirect-sites'] : + if options['redirect-sites']: + hosts = parse_host_list(options['redirect-sites']) + else: + hosts = None + format_text = functools.partial(expand_format_text, hosts) + else: + format_text = direct_format_text + + # save own timeline or mentions (the user used in OAuth) + if options['timeline'] or options['mentions']: + if isinstance(auth, NoAuth): + err("You must be authenticated to save timeline or mentions.") + raise SystemExit(1) + + if options['timeline']: + filename = options['save-dir'] + os.sep + options['timeline'] + print("* Archiving own timeline in %s" % filename) + elif options['mentions']: + filename = options['save-dir'] + os.sep + options['mentions'] + print("* Archiving own mentions in %s" % filename) + + tweets = {} + try: + tweets = load_tweets(filename) + except Exception as e: + err("Error when loading saved tweets: %s - continuing without" + % str(e)) + + try: + statuses(twitter, "", tweets, options['mentions'], options['favorites'], isoformat=options['isoformat']) + except KeyboardInterrupt: + err() + err("Interrupted") + raise SystemExit(1) + + save_tweets(filename, tweets) + if options['timeline']: + print("Total tweets in own timeline: %i" % len(tweets)) + elif options['mentions']: + print("Total mentions: %i" % len(tweets)) + + if options['dms']: + if isinstance(auth, NoAuth): + err("You must be authenticated to save DMs.") + raise SystemExit(1) + + filename = options['save-dir'] + os.sep + options['dms'] + print("* Archiving own DMs in %s" % filename) + + dms = {} + try: + dms = load_tweets(filename) + except Exception as e: + err("Error when loading saved DMs: %s - continuing without" + % str(e)) + + try: + statuses(twitter, "", dms, received_dms=True, isoformat=options['isoformat']) + statuses(twitter, "", dms, received_dms=False, isoformat=options['isoformat']) + except KeyboardInterrupt: + err() + err("Interrupted") + raise SystemExit(1) + + save_tweets(filename, dms) + print("Total DMs sent and received: %i" % len(dms)) + + + # read users from command-line or stdin + users = options['extra_args'] + if len(users) == 1 and users[0] == "-": + users = [line.strip() for line in sys.stdin.readlines()] + + # save tweets for every user + total, total_new = 0, 0 + for user in users: + filename = options['save-dir'] + os.sep + user + if options['favorites']: + filename = filename + "-favorites" + print("* Archiving %s tweets in %s" % (user, filename)) + + tweets = {} + try: + tweets = load_tweets(filename) + except Exception as e: + err("Error when loading saved tweets: %s - continuing without" + % str(e)) + + new = 0 + before = len(tweets) + try: + statuses(twitter, user, tweets, options['mentions'], options['favorites'], isoformat=options['isoformat']) + except KeyboardInterrupt: + err() + err("Interrupted") + raise SystemExit(1) + + save_tweets(filename, tweets) + total += len(tweets) + new = len(tweets) - before + total_new += new + print("Total tweets for %s: %i (%i new)" % (user, len(tweets), new)) + + print("Total: %i tweets (%i new) for %i users" + % (total, total_new, len(users))) diff --git a/env/lib/python3.6/site-packages/twitter/auth.py b/env/lib/python3.6/site-packages/twitter/auth.py new file mode 100644 index 0000000..45f1f4c --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter/auth.py @@ -0,0 +1,62 @@ +try: + import urllib.parse as urllib_parse + from base64 import encodebytes +except ImportError: + import urllib as urllib_parse + from base64 import encodestring as encodebytes + + +class Auth(object): + """ + ABC for Authenticator objects. + """ + + def encode_params(self, base_url, method, params): + """Encodes parameters for a request suitable for including in a URL + or POST body. This method may also add new params to the request + if required by the authentication scheme in use.""" + raise NotImplementedError() + + def generate_headers(self): + """Generates headers which should be added to the request if required + by the authentication scheme in use.""" + raise NotImplementedError() + + +class UserPassAuth(Auth): + """ + Basic auth authentication using email/username and + password. Deprecated. + """ + def __init__(self, username, password): + self.username = username + self.password = password + + def encode_params(self, base_url, method, params): + # We could consider automatically converting unicode to utf8 strings + # before encoding... + return urllib_parse.urlencode(params) + + def generate_headers(self): + return {b"Authorization": b"Basic " + encodebytes( + ("%s:%s" %(self.username, self.password)) + .encode('utf8')).strip(b'\n') + } + + +class NoAuth(Auth): + """ + No authentication authenticator. + """ + def __init__(self): + pass + + def encode_params(self, base_url, method, params): + return urllib_parse.urlencode(params) + + def generate_headers(self): + return {} + + +class MissingCredentialsError(Exception): + pass diff --git a/env/lib/python3.6/site-packages/twitter/cmdline.py b/env/lib/python3.6/site-packages/twitter/cmdline.py new file mode 100644 index 0000000..e279584 --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter/cmdline.py @@ -0,0 +1,800 @@ +# encoding: utf-8 +""" +USAGE: + + twitter [action] [options] + + +ACTIONS: + authorize authorize the command-line tool to interact with Twitter + follow follow a user + friends get latest tweets from your friends (default action) + user get latest tweets from a specific user + help print this help text that you are currently reading + leave stop following a user + list get list of a user's lists; give a list name to get + tweets from that list + mylist get list of your lists; give a list name to get tweets + from that list + pyprompt start a Python prompt for interacting with the twitter + object directly + replies get latest replies to you + search search twitter (Beware: octothorpe, escape it) + set set your twitter status + shell login to the twitter shell + rate get your current rate limit status (remaining API reqs) + repl begin a Read-Eval-Print-Loop with a configured twitter + object + +OPTIONS: + + -r --refresh run this command forever, polling every once + in a while (default: every 5 minutes) + -R --refresh-rate set the refresh rate (in seconds) + -f --format specify the output format for status updates + -c --config read username and password from given config + file (default ~/.twitter) + -l --length specify number of status updates shown + (default: 20, max: 200) + -t --timestamp show time before status lines + -d --datestamp show date before status lines + --no-ssl use less-secure HTTP instead of HTTPS + --oauth filename to read/store oauth credentials to + +FORMATS for the --format option + + default one line per status + verbose multiple lines per status, more verbose status info + json raw json data from the api on each line + urls nothing but URLs + ansi ansi colour (rainbow mode) + + +CONFIG FILES + + The config file should be placed in your home directory and be named .twitter. + It must contain a [twitter] header, and all the desired options you wish to + set, like so: + +[twitter] +format: +prompt: '> + + OAuth authentication tokens are stored in the file .twitter_oauth in your + home directory. +""" + +from __future__ import print_function + +try: + input = __builtins__.raw_input +except (AttributeError, KeyError): + pass + + +CONSUMER_KEY = 'uS6hO2sV6tDKIOeVjhnFnQ' +CONSUMER_SECRET = 'MEYTOS97VvlHX7K1rwHPEqVpTSqZ71HtvoK4sVuYk' + +import code +from getopt import gnu_getopt as getopt, GetoptError +import json +import locale +import os.path +import re +import sys +import time + +try: + from ConfigParser import SafeConfigParser +except ImportError: + from configparser import ConfigParser as SafeConfigParser +import datetime +try: + from urllib.parse import quote +except ImportError: + from urllib2 import quote +try: + import HTMLParser +except ImportError: + import html.parser as HTMLParser + +from .api import Twitter, TwitterError +from .oauth import OAuth, read_token_file +from .oauth_dance import oauth_dance +from . import ansi +from .util import smrt_input, printNicely, align_text + +OPTIONS = { + 'action': 'friends', + 'refresh': False, + 'refresh_rate': 600, + 'format': 'default', + 'prompt': '[cyan]twitter[R]> ', + 'config_filename': os.environ.get('HOME', + os.environ.get('USERPROFILE', '')) + + os.sep + '.twitter', + 'oauth_filename': os.environ.get('HOME', + os.environ.get('USERPROFILE', '')) + + os.sep + '.twitter_oauth', + 'length': 20, + 'timestamp': False, + 'datestamp': False, + 'extra_args': [], + 'secure': True, + 'invert_split': False, + 'force-ansi': False, +} + +gHtmlParser = HTMLParser.HTMLParser() +hashtagRe = re.compile(r'(?P#\S+)') +profileRe = re.compile(r'(?P\@\S+)') +ansiFormatter = ansi.AnsiCmd(False) + + +def parse_args(args, options): + long_opts = ['help', 'format=', 'refresh', 'oauth=', + 'refresh-rate=', 'config=', 'length=', 'timestamp', + 'datestamp', 'no-ssl', 'force-ansi'] + short_opts = "e:p:f:h?rR:c:l:td" + opts, extra_args = getopt(args, short_opts, long_opts) + if extra_args and hasattr(extra_args[0], 'decode'): + extra_args = [arg.decode(locale.getpreferredencoding()) + for arg in extra_args] + + for opt, arg in opts: + if opt in ('-f', '--format'): + options['format'] = arg + elif opt in ('-r', '--refresh'): + options['refresh'] = True + elif opt in ('-R', '--refresh-rate'): + options['refresh_rate'] = int(arg) + elif opt in ('-l', '--length'): + options["length"] = int(arg) + elif opt in ('-t', '--timestamp'): + options["timestamp"] = True + elif opt in ('-d', '--datestamp'): + options["datestamp"] = True + elif opt in ('-?', '-h', '--help'): + options['action'] = 'help' + elif opt in ('-c', '--config'): + options['config_filename'] = arg + elif opt == '--no-ssl': + options['secure'] = False + elif opt == '--oauth': + options['oauth_filename'] = arg + elif opt == '--force-ansi': + options['force-ansi'] = True + + if extra_args and not ('action' in options and options['action'] == 'help'): + options['action'] = extra_args[0] + options['extra_args'] = extra_args[1:] + + +def get_time_string(status, options, format="%a %b %d %H:%M:%S +0000 %Y"): + timestamp = options["timestamp"] + datestamp = options["datestamp"] + t = time.strptime(status['created_at'], format) + i_hate_timezones = time.timezone + if time.daylight: + i_hate_timezones = time.altzone + dt = datetime.datetime(*t[:-3]) - datetime.timedelta( + seconds=i_hate_timezones) + t = dt.timetuple() + if timestamp and datestamp: + return time.strftime("%Y-%m-%d %H:%M:%S ", t) + elif timestamp: + return time.strftime("%H:%M:%S ", t) + elif datestamp: + return time.strftime("%Y-%m-%d ", t) + return "" + + +def reRepl(m): + ansiTypes = { + 'clear': ansiFormatter.cmdReset(), + 'hashtag': ansiFormatter.cmdBold(), + 'profile': ansiFormatter.cmdUnderline(), + } + + s = None + try: + mkey = m.lastgroup + if m.group(mkey): + s = '%s%s%s' % (ansiTypes[mkey], m.group(mkey), ansiTypes['clear']) + except IndexError: + pass + return s + + +def replaceInStatus(status): + txt = gHtmlParser.unescape(status) + txt = re.sub(hashtagRe, reRepl, txt) + txt = re.sub(profileRe, reRepl, txt) + return txt + + +def correctRTStatus(status): + if 'retweeted_status' in status: + return ("RT @" + status['retweeted_status']['user']['screen_name'] + + " " + status['retweeted_status']['text']) + else: + return status['text'] + + +class StatusFormatter(object): + def __call__(self, status, options): + return ("%s@%s %s" % ( + get_time_string(status, options), + status['user']['screen_name'], + gHtmlParser.unescape(correctRTStatus(status)))) + + +class AnsiStatusFormatter(object): + def __init__(self): + self._colourMap = ansi.ColourMap() + + def __call__(self, status, options): + colour = self._colourMap.colourFor(status['user']['screen_name']) + return ("%s%s% 16s%s %s " % ( + get_time_string(status, options), + ansiFormatter.cmdColour(colour), status['user']['screen_name'], + ansiFormatter.cmdReset(), + align_text(replaceInStatus(correctRTStatus(status))))) + + +class VerboseStatusFormatter(object): + def __call__(self, status, options): + return ("-- %s (%s) on %s\n%s\n" % ( + status['user']['screen_name'], + status['user']['location'], + status['created_at'], + gHtmlParser.unescape(correctRTStatus(status)))) + + +class JSONStatusFormatter(object): + def __call__(self, status, options): + status['text'] = gHtmlParser.unescape(status['text']) + return json.dumps(status) + + +class URLStatusFormatter(object): + urlmatch = re.compile(r'https?://\S+') + + def __call__(self, status, options): + urls = self.urlmatch.findall(correctRTStatus(status)) + return '\n'.join(urls) if urls else "" + + +class ListsFormatter(object): + def __call__(self, list): + if list['description']: + list_str = "%-30s (%s)" % (list['name'], list['description']) + else: + list_str = "%-30s" % (list['name']) + return "%s\n" % list_str + + +class ListsVerboseFormatter(object): + def __call__(self, list): + list_str = "%-30s\n description: %s\n members: %s\n mode:%s\n" % ( + list['name'], list['description'], + list['member_count'], list['mode']) + return list_str + + +class AnsiListsFormatter(object): + def __init__(self): + self._colourMap = ansi.ColourMap() + + def __call__(self, list): + colour = self._colourMap.colourFor(list['name']) + return ("%s%-15s%s %s" % ( + ansiFormatter.cmdColour(colour), list['name'], + ansiFormatter.cmdReset(), list['description'])) + + +class AdminFormatter(object): + def __call__(self, action, user): + user_str = "%s (%s)" % (user['screen_name'], user['name']) + if action == "follow": + return "You are now following %s.\n" % (user_str) + else: + return "You are no longer following %s.\n" % (user_str) + + +class VerboseAdminFormatter(object): + def __call__(self, action, user): + return("-- %s: %s (%s): %s" % ( + "Following" if action == "follow" else "Leaving", + user['screen_name'], + user['name'], + user['url'])) + + +class SearchFormatter(object): + def __call__(self, result, options): + return("%s%s %s" % ( + get_time_string(result, options, "%a, %d %b %Y %H:%M:%S +0000"), + result['from_user'], result['text'])) + + +class VerboseSearchFormatter(SearchFormatter): + pass # Default to the regular one + + +class URLSearchFormatter(object): + urlmatch = re.compile(r'https?://\S+') + + def __call__(self, result, options): + urls = self.urlmatch.findall(result['text']) + return '\n'.join(urls) if urls else "" + + +class AnsiSearchFormatter(object): + def __init__(self): + self._colourMap = ansi.ColourMap() + + def __call__(self, result, options): + colour = self._colourMap.colourFor(result['from_user']) + return ("%s%s%s%s %s" % ( + get_time_string(result, options, "%a, %d %b %Y %H:%M:%S +0000"), + ansiFormatter.cmdColour(colour), result['from_user'], + ansiFormatter.cmdReset(), result['text'])) + +_term_encoding = None + + +def get_term_encoding(): + global _term_encoding + if not _term_encoding: + lang = os.getenv('LANG', 'unknown.UTF-8').split('.') + if lang[1:]: + _term_encoding = lang[1] + else: + _term_encoding = 'UTF-8' + return _term_encoding + +formatters = {} +status_formatters = { + 'default': StatusFormatter, + 'verbose': VerboseStatusFormatter, + 'json': JSONStatusFormatter, + 'urls': URLStatusFormatter, + 'ansi': AnsiStatusFormatter +} +formatters['status'] = status_formatters + +admin_formatters = { + 'default': AdminFormatter, + 'verbose': VerboseAdminFormatter, + 'urls': AdminFormatter, + 'ansi': AdminFormatter +} +formatters['admin'] = admin_formatters + +search_formatters = { + 'default': SearchFormatter, + 'verbose': VerboseSearchFormatter, + 'urls': URLSearchFormatter, + 'ansi': AnsiSearchFormatter +} +formatters['search'] = search_formatters + +lists_formatters = { + 'default': ListsFormatter, + 'verbose': ListsVerboseFormatter, + 'urls': None, + 'ansi': AnsiListsFormatter +} +formatters['lists'] = lists_formatters + + +def get_formatter(action_type, options): + formatters_dict = formatters.get(action_type) + if not formatters_dict: + raise TwitterError( + "There was an error finding a class of formatters for your type (%s)" + % (action_type)) + f = formatters_dict.get(options['format']) + if not f: + raise TwitterError( + "Unknown formatter '%s' for status actions" % (options['format'])) + return f() + + +class Action(object): + + def ask(self, subject='perform this action', careful=False): + ''' + Requests from the user using `raw_input` if `subject` should be + performed. When `careful`, the default answer is NO, otherwise YES. + Returns the user answer in the form `True` or `False`. + ''' + sample = '(y/N)' + if not careful: + sample = '(Y/n)' + + prompt = 'You really want to %s %s? ' % (subject, sample) + try: + answer = input(prompt).lower() + if careful: + return answer in ('yes', 'y') + else: + return answer not in ('no', 'n') + except EOFError: + print(file=sys.stderr) # Put Newline since Enter was never pressed + # TODO: + # Figure out why on OS X the raw_input keeps raising + # EOFError and is never able to reset and get more input + # Hint: Look at how IPython implements their console + default = True + if careful: + default = False + return default + + def __call__(self, twitter, options): + action = actions.get(options['action'], NoSuchAction)() + try: + doAction = lambda: action(twitter, options) + if options['refresh'] and isinstance(action, StatusAction): + while True: + doAction() + sys.stdout.flush() + time.sleep(options['refresh_rate']) + else: + doAction() + except KeyboardInterrupt: + print('\n[Keyboard Interrupt]', file=sys.stderr) + pass + + +class NoSuchActionError(Exception): + pass + + +class NoSuchAction(Action): + def __call__(self, twitter, options): + raise NoSuchActionError("No such action: %s" % (options['action'])) + + +class StatusAction(Action): + def __call__(self, twitter, options): + statuses = self.getStatuses(twitter, options) + sf = get_formatter('status', options) + for status in statuses: + statusStr = sf(status, options) + if statusStr.strip(): + printNicely(statusStr) + + +class SearchAction(Action): + def __call__(self, twitter, options): + # We need to be pointing at search.twitter.com to work, and it is less + # tangly to do it here than in the main() + twitter.domain = "search.twitter.com" + twitter.uriparts = () + # We need to bypass the TwitterCall parameter encoding, so we + # don't encode the plus sign, so we have to encode it ourselves + query_string = "+".join( + [quote(term) + for term in options['extra_args']]) + + results = twitter.search(q=query_string)['results'] + f = get_formatter('search', options) + for result in results: + resultStr = f(result, options) + if resultStr.strip(): + printNicely(resultStr) + + +class AdminAction(Action): + def __call__(self, twitter, options): + if not (options['extra_args'] and options['extra_args'][0]): + raise TwitterError("You need to specify a user (screen name)") + af = get_formatter('admin', options) + try: + user = self.getUser(twitter, options['extra_args'][0]) + except TwitterError as e: + print("There was a problem following or leaving the specified user.") + print("You may be trying to follow a user you are already following;") + print("Leaving a user you are not currently following;") + print("Or the user may not exist.") + print("Sorry.") + print() + print(e) + else: + printNicely(af(options['action'], user)) + + +class ListsAction(StatusAction): + def getStatuses(self, twitter, options): + if not options['extra_args']: + raise TwitterError("Please provide a user to query for lists") + + screen_name = options['extra_args'][0] + + if not options['extra_args'][1:]: + lists = twitter.lists.list(screen_name=screen_name) + if not lists: + printNicely("This user has no lists.") + for list in lists: + lf = get_formatter('lists', options) + printNicely(lf(list)) + return [] + else: + return list(reversed(twitter.lists.statuses( + count=options['length'], + owner_screen_name=screen_name, + slug=options['extra_args'][1]))) + + +class MyListsAction(ListsAction): + def getStatuses(self, twitter, options): + screen_name = twitter.account.verify_credentials()['screen_name'] + options['extra_args'].insert(0, screen_name) + return ListsAction.getStatuses(self, twitter, options) + + +class FriendsAction(StatusAction): + def getStatuses(self, twitter, options): + return list(reversed( + twitter.statuses.home_timeline(count=options["length"]))) + + +class UserAction(StatusAction): + def getStatuses(self, twitter, options): + if not options['extra_args']: + raise TwitterError("You need to specify a user (screen name)") + + screen_name = options['extra_args'][0] + + return list(reversed( + twitter.statuses.user_timeline(screen_name=screen_name, + count=options["length"]))) + + +class RepliesAction(StatusAction): + def getStatuses(self, twitter, options): + return list(reversed( + twitter.statuses.mentions_timeline(count=options["length"]))) + + +class FollowAction(AdminAction): + def getUser(self, twitter, user): + return twitter.friendships.create(screen_name=user) + + +class LeaveAction(AdminAction): + def getUser(self, twitter, user): + return twitter.friendships.destroy(screen_name=user) + + +class SetStatusAction(Action): + def __call__(self, twitter, options): + statusTxt = (" ".join(options['extra_args']) + if options['extra_args'] + else str(input("message: "))) + statusTxt = statusTxt.replace('\\n', '\n') + replies = [] + ptr = re.compile("@[\w_]+") + while statusTxt: + s = ptr.match(statusTxt) + if s and s.start() == 0: + replies.append(statusTxt[s.start():s.end()]) + statusTxt = statusTxt[s.end() + 1:] + else: + break + replies = " ".join(replies) + if len(replies) >= 140: + # just go back + statusTxt = replies + replies = "" + + splitted = [] + while statusTxt: + limit = 140 - len(replies) + if len(statusTxt) > limit: + end = str.rfind(statusTxt, ' ', 0, limit) + else: + end = limit + splitted.append(" ".join((replies, statusTxt[:end]))) + statusTxt = statusTxt[end:] + + if options['invert_split']: + splitted.reverse() + for status in splitted: + twitter.statuses.update(status=status) + + +class TwitterShell(Action): + + def render_prompt(self, prompt): + '''Parses the `prompt` string and returns the rendered version''' + prompt = prompt.strip("'").replace("\\'", "'") + for colour in ansi.COLOURS_NAMED: + if '[%s]' % (colour) in prompt: + prompt = prompt.replace( + '[%s]' % (colour), ansiFormatter.cmdColourNamed(colour)) + prompt = prompt.replace('[R]', ansiFormatter.cmdReset()) + return prompt + + def __call__(self, twitter, options): + prompt = self.render_prompt(options.get('prompt', 'twitter> ')) + while True: + options['action'] = "" + try: + args = input(prompt).split() + parse_args(args, options) + if not options['action']: + continue + elif options['action'] == 'exit': + raise SystemExit(0) + elif options['action'] == 'shell': + print('Sorry Xzibit does not work here!', file=sys.stderr) + continue + elif options['action'] == 'help': + print('''\ntwitter> `action`\n + The Shell accepts all the command line actions along with: + + exit Leave the twitter shell (^D may also be used) + + Full CMD Line help is appended below for your convenience.''', + file=sys.stderr) + Action()(twitter, options) + options['action'] = '' + except NoSuchActionError as e: + print(e, file=sys.stderr) + except KeyboardInterrupt: + print('\n[Keyboard Interrupt]', file=sys.stderr) + except EOFError: + print(file=sys.stderr) + leaving = self.ask(subject='Leave') + if not leaving: + print('Excellent!', file=sys.stderr) + else: + raise SystemExit(0) + + +class PythonPromptAction(Action): + def __call__(self, twitter, options): + try: + while True: + smrt_input(globals(), locals()) + except EOFError: + pass + + +class HelpAction(Action): + def __call__(self, twitter, options): + print(__doc__) + + +class DoNothingAction(Action): + def __call__(self, twitter, options): + pass + + +class RateLimitStatus(Action): + def __call__(self, twitter, options): + rate = twitter.application.rate_limit_status() + resources = rate['resources'] + for resource in resources: + for method in resources[resource]: + limit = resources[resource][method]['limit'] + remaining = resources[resource][method]['remaining'] + reset = resources[resource][method]['reset'] + + print("Remaining API requests for %s: %s / %s" % + (method, remaining, limit)) + print("Next reset in %ss (%s)\n" % (int(reset - time.time()), + time.asctime(time.localtime(reset)))) + + +class ReplAction(Action): + def __call__(self, twitter, options): + upload = Twitter( + auth=twitter.auth, + domain="upload.twitter.com") + printNicely( + "\nUse the 'twitter' object to interact with" + " the Twitter REST API.\n" + "Use twitter_upload to interact with " + "upload.twitter.com\n\n") + code.interact(local={ + "twitter": twitter, + "t": twitter, + "twitter_upload": upload, + "u": upload + }) + + +actions = { + 'authorize' : DoNothingAction, + 'follow' : FollowAction, + 'friends' : FriendsAction, + 'user' : UserAction, + 'list' : ListsAction, + 'mylist' : MyListsAction, + 'help' : HelpAction, + 'leave' : LeaveAction, + 'pyprompt' : PythonPromptAction, + 'replies' : RepliesAction, + 'search' : SearchAction, + 'set' : SetStatusAction, + 'shell' : TwitterShell, + 'rate' : RateLimitStatus, + 'repl' : ReplAction, +} + + +def loadConfig(filename): + options = dict(OPTIONS) + if os.path.exists(filename): + cp = SafeConfigParser() + cp.read([filename]) + for option in ('format', 'prompt'): + if cp.has_option('twitter', option): + options[option] = cp.get('twitter', option) + # process booleans + for option in ('invert_split',): + if cp.has_option('twitter', option): + options[option] = cp.getboolean('twitter', option) + return options + + +def main(args=sys.argv[1:]): + arg_options = {} + try: + parse_args(args, arg_options) + except GetoptError as e: + print("I can't do that, %s." % (e), file=sys.stderr) + print(file=sys.stderr) + raise SystemExit(1) + + config_path = os.path.expanduser( + arg_options.get('config_filename') or OPTIONS.get('config_filename')) + config_options = loadConfig(config_path) + + # Apply the various options in order, the most important applied last. + # Defaults first, then what's read from config file, then command-line + # arguments. + options = dict(OPTIONS) + for d in config_options, arg_options: + for k, v in list(d.items()): + if v: + options[k] = v + + if options['refresh'] and options['action'] not in ('friends', 'replies'): + print("You can only refresh the friends or replies actions.", + file=sys.stderr) + print("Use 'twitter -h' for help.", file=sys.stderr) + return 1 + + oauth_filename = os.path.expanduser(options['oauth_filename']) + + if options['action'] == 'authorize' or not os.path.exists(oauth_filename): + oauth_dance( + "the Command-Line Tool", CONSUMER_KEY, CONSUMER_SECRET, + options['oauth_filename']) + + global ansiFormatter + ansiFormatter = ansi.AnsiCmd(options["force-ansi"]) + + oauth_token, oauth_token_secret = read_token_file(oauth_filename) + + twitter = Twitter( + auth=OAuth( + oauth_token, oauth_token_secret, CONSUMER_KEY, CONSUMER_SECRET), + secure=options['secure'], + api_version='1.1', + domain='api.twitter.com') + + try: + Action()(twitter, options) + except NoSuchActionError as e: + print(e, file=sys.stderr) + raise SystemExit(1) + except TwitterError as e: + print(str(e), file=sys.stderr) + print("Use 'twitter -h' for help.", file=sys.stderr) + raise SystemExit(1) diff --git a/env/lib/python3.6/site-packages/twitter/corrupt.py b/env/lib/python3.6/site-packages/twitter/corrupt.py new file mode 100644 index 0000000..e69de29 diff --git a/env/lib/python3.6/site-packages/twitter/follow.py b/env/lib/python3.6/site-packages/twitter/follow.py new file mode 100644 index 0000000..8c763c1 --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter/follow.py @@ -0,0 +1,255 @@ +"""USAGE + twitter-follow [options] + +DESCRIPTION + Display all following/followers of a user, one user per line. + +OPTIONS + -o --oauth authenticate to Twitter using OAuth (default no) + -r --followers display followers of the given user (default) + -g --following display users the given user is following + -a --api-rate see your current API rate limit status + -i --ids prepend user id to each line. useful to tracking renames + +AUTHENTICATION + Authenticate to Twitter using OAuth to see following/followers of private + profiles and have higher API rate limits. OAuth authentication tokens + are stored in the file .twitter-follow_oauth in your home directory. +""" + +from __future__ import print_function + +import os, sys, time, calendar +from getopt import gnu_getopt as getopt, GetoptError + +try: + import urllib.request as urllib2 + import http.client as httplib +except ImportError: + import urllib2 + import httplib + +# T-Follow (Twitter-Follow) application registered by @stalkr_ +CONSUMER_KEY='USRZQfvFFjB6UvZIN2Edww' +CONSUMER_SECRET='AwGAaSzZa5r0TDL8RKCDtffnI9H9mooZUdOa95nw8' + +from .api import Twitter, TwitterError +from .oauth import OAuth, read_token_file +from .oauth_dance import oauth_dance +from .auth import NoAuth +from .util import Fail, err + + +def parse_args(args, options): + """Parse arguments from command-line to set options.""" + long_opts = ['help', 'oauth', 'followers', 'following', 'api-rate', 'ids'] + short_opts = "horgai" + opts, extra_args = getopt(args, short_opts, long_opts) + + for opt, arg in opts: + if opt in ('-h', '--help'): + print(__doc__) + raise SystemExit(1) + elif opt in ('-o', '--oauth'): + options['oauth'] = True + elif opt in ('-r', '--followers'): + options['followers'] = True + elif opt in ('-g', '--following'): + options['followers'] = False + elif opt in ('-a', '--api-rate'): + options['api-rate' ] = True + elif opt in ('-i', '--ids'): + options['show_id'] = True + + options['extra_args'] = extra_args + +def lookup_portion(twitter, user_ids): + """Resolve a limited list of user ids to screen names.""" + users = {} + kwargs = dict(user_id=",".join(map(str, user_ids)), skip_status=1) + for u in twitter.users.lookup(**kwargs): + users[int(u['id'])] = u['screen_name'] + return users + +def lookup(twitter, user_ids): + """Resolve an entire list of user ids to screen names.""" + users = {} + api_limit = 100 + for i in range(0, len(user_ids), api_limit): + fail = Fail() + while True: + try: + portion = lookup_portion(twitter, user_ids[i:][:api_limit]) + except TwitterError as e: + if e.e.code == 429: + err("Fail: %i API rate limit exceeded" % e.e.code) + rls = twitter.application.rate_limit_status() + reset = rls.rate_limit_reset + reset = time.asctime(time.localtime(reset)) + delay = int(rls.rate_limit_reset + - time.time()) + 5 # avoid race + err("Interval limit of %i requests reached, next reset on " + "%s: going to sleep for %i secs" + % (rls.rate_limit_limit, reset, delay)) + fail.wait(delay) + continue + elif e.e.code == 502: + err("Fail: %i Service currently unavailable, retrying..." + % e.e.code) + else: + err("Fail: %s\nRetrying..." % str(e)[:500]) + fail.wait(3) + except urllib2.URLError as e: + err("Fail: urllib2.URLError %s - Retrying..." % str(e)) + fail.wait(3) + except httplib.error as e: + err("Fail: httplib.error %s - Retrying..." % str(e)) + fail.wait(3) + except KeyError as e: + err("Fail: KeyError %s - Retrying..." % str(e)) + fail.wait(3) + else: + users.update(portion) + err("Resolving user ids to screen names: %i/%i" + % (len(users), len(user_ids))) + break + return users + +def follow_portion(twitter, screen_name, cursor=-1, followers=True): + """Get a portion of followers/following for a user.""" + kwargs = dict(screen_name=screen_name, cursor=cursor) + if followers: + t = twitter.followers.ids(**kwargs) + else: # following + t = twitter.friends.ids(**kwargs) + return t['ids'], t['next_cursor'] + +def follow(twitter, screen_name, followers=True): + """Get the entire list of followers/following for a user.""" + user_ids = [] + cursor = -1 + fail = Fail() + while True: + try: + portion, cursor = follow_portion(twitter, screen_name, cursor, + followers) + except TwitterError as e: + if e.e.code == 401: + reason = ("follow%s of that user are protected" + % ("ers" if followers else "ing")) + err("Fail: %i Unauthorized (%s)" % (e.e.code, reason)) + break + elif e.e.code == 429: + err("Fail: %i API rate limit exceeded" % e.e.code) + rls = twitter.application.rate_limit_status() + reset = rls.rate_limit_reset + reset = time.asctime(time.localtime(reset)) + delay = int(rls.rate_limit_reset + - time.time()) + 5 # avoid race + err("Interval limit of %i requests reached, next reset on %s: " + "going to sleep for %i secs" % (rls.rate_limit_limit, + reset, delay)) + fail.wait(delay) + continue + elif e.e.code == 502: + err("Fail: %i Service currently unavailable, retrying..." + % e.e.code) + else: + err("Fail: %s\nRetrying..." % str(e)[:500]) + fail.wait(3) + except urllib2.URLError as e: + err("Fail: urllib2.URLError %s - Retrying..." % str(e)) + fail.wait(3) + except httplib.error as e: + err("Fail: httplib.error %s - Retrying..." % str(e)) + fail.wait(3) + except KeyError as e: + err("Fail: KeyError %s - Retrying..." % str(e)) + fail.wait(3) + else: + new = -len(user_ids) + user_ids = list(set(user_ids + portion)) + new += len(user_ids) + what = "follow%s" % ("ers" if followers else "ing") + err("Browsing %s %s, new: %i" % (screen_name, what, new)) + if cursor == 0: + break + fail = Fail() + return user_ids + + +def rate_limit_status(twitter): + """Print current Twitter API rate limit status.""" + rls = twitter.application.rate_limit_status() + print("Remaining API requests: %i/%i (interval limit)" + % (rls.rate_limit_remaining, rls.rate_limit_limit)) + print("Next reset in %is (%s)" + % (int(rls.rate_limit_reset - time.time()), + time.asctime(time.localtime(rls.rate_limit_reset)))) + +def main(args=sys.argv[1:]): + options = { + 'oauth': False, + 'followers': True, + 'api-rate': False, + 'show_id': False + } + try: + parse_args(args, options) + except GetoptError as e: + err("I can't do that, %s." % e) + raise SystemExit(1) + + # exit if no user or given, except if asking for API rate + if not options['extra_args'] and not options['api-rate']: + print(__doc__) + raise SystemExit(1) + + # authenticate using OAuth, asking for token if necessary + if options['oauth']: + oauth_filename = (os.getenv("HOME", "") + os.sep + + ".twitter-follow_oauth") + if not os.path.exists(oauth_filename): + oauth_dance("Twitter-Follow", CONSUMER_KEY, CONSUMER_SECRET, + oauth_filename) + oauth_token, oauth_token_secret = read_token_file(oauth_filename) + auth = OAuth(oauth_token, oauth_token_secret, CONSUMER_KEY, + CONSUMER_SECRET) + else: + auth = NoAuth() + + twitter = Twitter(auth=auth, api_version='1.1', domain='api.twitter.com') + + if options['api-rate']: + rate_limit_status(twitter) + return + + # obtain list of followers (or following) for every given user + for user in options['extra_args']: + user_ids, users = [], {} + try: + user_ids = follow(twitter, user, options['followers']) + users = lookup(twitter, user_ids) + except KeyboardInterrupt as e: + err() + err("Interrupted.") + raise SystemExit(1) + + for uid in user_ids: + if options['show_id']: + try: + print(str(uid) + "\t" + users[uid].encode("utf-8")) + except KeyError: + pass + + else: + try: + print(users[uid].encode("utf-8")) + except KeyError: + pass + + # print total on stderr to separate from user list on stdout + if options['followers']: + err("Total followers for %s: %i" % (user, len(user_ids))) + else: + err("Total users %s is following: %i" % (user, len(user_ids))) diff --git a/env/lib/python3.6/site-packages/twitter/ircbot.py b/env/lib/python3.6/site-packages/twitter/ircbot.py new file mode 100644 index 0000000..6091417 --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter/ircbot.py @@ -0,0 +1,365 @@ +""" +twitterbot + + A twitter IRC bot. Twitterbot connected to an IRC server and idles in + a channel, polling a twitter account and broadcasting all updates to + friends. + +USAGE + + twitterbot [config_file] + +CONFIG_FILE + + The config file is an ini-style file that must contain the following: + +[irc] +server: +port: +nick: +channel: +prefixes: + +[twitter] +oauth_token_file: + + + If no config file is given "twitterbot.ini" will be used by default. + + The channel argument can accept multiple channels separated by commas. + + The default token file is ~/.twitterbot_oauth. + + The default prefix type is 'cats'. You can also use 'none'. + +""" + +from __future__ import print_function + +BOT_VERSION = "TwitterBot 1.9.1 (http://mike.verdone.ca/twitter)" + +CONSUMER_KEY = "XryIxN3J2ACaJs50EizfLQ" +CONSUMER_SECRET = "j7IuDCNjftVY8DBauRdqXs4jDl5Fgk1IJRag8iE" + +IRC_BOLD = chr(0x02) +IRC_ITALIC = chr(0x16) +IRC_UNDERLINE = chr(0x1f) +IRC_REGULAR = chr(0x0f) + +import sys +import time +from datetime import datetime, timedelta +from email.utils import parsedate +try: + from configparser import ConfigParser +except ImportError: + from ConfigParser import ConfigParser +from heapq import heappop, heappush +import traceback +import os +import os.path + +from .api import Twitter, TwitterError +from .oauth import OAuth, read_token_file +from .oauth_dance import oauth_dance +from .util import htmlentitydecode + +PREFIXES = dict( + cats=dict( + new_tweet="=^_^= ", + error="=O_o= ", + inform="=o_o= " + ), + none=dict( + new_tweet="" + ), + ) +ACTIVE_PREFIXES=dict() + +def get_prefix(prefix_typ=None): + return ACTIVE_PREFIXES.get(prefix_typ, ACTIVE_PREFIXES.get('new_tweet', '')) + + +try: + import irclib +except ImportError: + raise ImportError( + "This module requires python irclib available from " + + "https://github.com/sixohsix/python-irclib/zipball/python-irclib3-0.4.8") + +OAUTH_FILE = os.environ.get('HOME', os.environ.get('USERPROFILE', '')) + os.sep + '.twitterbot_oauth' + +def debug(msg): + # uncomment this for debug text stuff + # print(msg, file=sys.stdout) + pass + +class SchedTask(object): + def __init__(self, task, delta): + self.task = task + self.delta = delta + self.next = time.time() + + def __repr__(self): + return "" %( + self.task.__name__, self.__next__, self.delta) + + def __lt__(self, other): + return self.next < other.next + + def __call__(self): + return self.task() + +class Scheduler(object): + def __init__(self, tasks): + self.task_heap = [] + for task in tasks: + heappush(self.task_heap, task) + + def next_task(self): + now = time.time() + task = heappop(self.task_heap) + wait = task.next - now + task.next = now + task.delta + heappush(self.task_heap, task) + if (wait > 0): + time.sleep(wait) + task() + #debug("tasks: " + str(self.task_heap)) + + def run_forever(self): + while True: + self.next_task() + + +class TwitterBot(object): + def __init__(self, configFilename): + self.configFilename = configFilename + self.config = load_config(self.configFilename) + + global ACTIVE_PREFIXES + ACTIVE_PREFIXES = PREFIXES[self.config.get('irc', 'prefixes')] + + oauth_file = self.config.get('twitter', 'oauth_token_file') + if not os.path.exists(oauth_file): + oauth_dance("IRC Bot", CONSUMER_KEY, CONSUMER_SECRET, oauth_file) + oauth_token, oauth_secret = read_token_file(oauth_file) + + self.twitter = Twitter( + auth=OAuth( + oauth_token, oauth_secret, CONSUMER_KEY, CONSUMER_SECRET), + domain='api.twitter.com') + + self.irc = irclib.IRC() + self.irc.add_global_handler('privmsg', self.handle_privmsg) + self.irc.add_global_handler('ctcp', self.handle_ctcp) + self.irc.add_global_handler('umode', self.handle_umode) + self.ircServer = self.irc.server() + + self.sched = Scheduler( + (SchedTask(self.process_events, 1), + SchedTask(self.check_statuses, 120))) + self.lastUpdate = (datetime.utcnow() - timedelta(minutes=10)).utctimetuple() + + def check_statuses(self): + debug("In check_statuses") + try: + updates = reversed(self.twitter.statuses.home_timeline()) + except Exception as e: + print("Exception while querying twitter:", file=sys.stderr) + traceback.print_exc(file=sys.stderr) + return + + nextLastUpdate = self.lastUpdate + for update in updates: + crt = parsedate(update['created_at']) + if (crt > nextLastUpdate): + text = (htmlentitydecode( + update['text'].replace('\n', ' ')) + .encode('utf8', 'replace')) + + # Skip updates beginning with @ + # TODO This would be better if we only ignored messages + # to people who are not on our following list. + if not text.startswith(b"@"): + msg = "%s %s%s%s %s" %( + get_prefix(), + IRC_BOLD, update['user']['screen_name'], + IRC_BOLD, text.decode('utf8')) + self.privmsg_channels(msg) + + nextLastUpdate = crt + + self.lastUpdate = nextLastUpdate + + def process_events(self): + self.irc.process_once() + + def handle_privmsg(self, conn, evt): + debug('got privmsg') + args = evt.arguments()[0].split(' ') + try: + if (not args): + return + if (args[0] == 'follow' and args[1:]): + self.follow(conn, evt, args[1]) + elif (args[0] == 'unfollow' and args[1:]): + self.unfollow(conn, evt, args[1]) + else: + conn.privmsg( + evt.source().split('!')[0], + "%sHi! I'm Twitterbot! you can (follow " + ") to make me follow a user or " + "(unfollow ) to make me stop." % + get_prefix()) + except Exception: + traceback.print_exc(file=sys.stderr) + + def handle_ctcp(self, conn, evt): + args = evt.arguments() + source = evt.source().split('!')[0] + if (args): + if args[0] == 'VERSION': + conn.ctcp_reply(source, "VERSION " + BOT_VERSION) + elif args[0] == 'PING': + conn.ctcp_reply(source, "PING") + elif args[0] == 'CLIENTINFO': + conn.ctcp_reply(source, "CLIENTINFO PING VERSION CLIENTINFO") + + def handle_umode(self, conn, evt): + """ + QuakeNet ignores all your commands until after the MOTD. This + handler defers joining until after it sees a magic line. It + also tries to join right after connect, but this will just + make it join again which should be safe. + """ + args = evt.arguments() + if (args and args[0] == '+i'): + channels = self.config.get('irc', 'channel').split(',') + for channel in channels: + self.ircServer.join(channel) + + def privmsg_channels(self, msg): + return_response=True + channels=self.config.get('irc','channel').split(',') + return self.ircServer.privmsg_many(channels, msg.encode('utf8')) + + def follow(self, conn, evt, name): + userNick = evt.source().split('!')[0] + friends = [x['name'] for x in self.twitter.statuses.friends()] + debug("Current friends: %s" %(friends)) + if (name in friends): + conn.privmsg( + userNick, + "%sI'm already following %s." %(get_prefix('error'), name)) + else: + try: + self.twitter.friendships.create(screen_name=name) + except TwitterError: + conn.privmsg( + userNick, + "%sI can't follow that user. Are you sure the name is correct?" %( + get_prefix('error') + )) + return + conn.privmsg( + userNick, + "%sOkay! I'm now following %s." %(get_prefix('followed'), name)) + self.privmsg_channels( + "%s%s has asked me to start following %s" %( + get_prefix('inform'), userNick, name)) + + def unfollow(self, conn, evt, name): + userNick = evt.source().split('!')[0] + friends = [x['name'] for x in self.twitter.statuses.friends()] + debug("Current friends: %s" %(friends)) + if (name not in friends): + conn.privmsg( + userNick, + "%sI'm not following %s." %(get_prefix('error'), name)) + else: + self.twitter.friendships.destroy(screen_name=name) + conn.privmsg( + userNick, + "%sOkay! I've stopped following %s." %( + get_prefix('stop_follow'), name)) + self.privmsg_channels( + "%s%s has asked me to stop following %s" %( + get_prefix('inform'), userNick, name)) + + def _irc_connect(self): + self.ircServer.connect( + self.config.get('irc', 'server'), + self.config.getint('irc', 'port'), + self.config.get('irc', 'nick')) + channels=self.config.get('irc', 'channel').split(',') + for channel in channels: + self.ircServer.join(channel) + + def run(self): + self._irc_connect() + + while True: + try: + self.sched.run_forever() + except KeyboardInterrupt: + break + except TwitterError: + # twitter.com is probably down because it + # sucks. ignore the fault and keep going + pass + except irclib.ServerNotConnectedError: + # Try and reconnect to IRC. + self._irc_connect() + + +def load_config(filename): + # Note: Python ConfigParser module has the worst interface in the + # world. Mega gross. + cp = ConfigParser() + cp.add_section('irc') + cp.set('irc', 'port', '6667') + cp.set('irc', 'nick', 'twitterbot') + cp.set('irc', 'prefixes', 'cats') + cp.add_section('twitter') + cp.set('twitter', 'oauth_token_file', OAUTH_FILE) + + cp.read((filename,)) + + # attempt to read these properties-- they are required + cp.get('twitter', 'oauth_token_file'), + cp.get('irc', 'server') + cp.getint('irc', 'port') + cp.get('irc', 'nick') + cp.get('irc', 'channel') + + return cp + +# So there was a joke here about the twitter business model +# but I got rid of it. Not because I want this codebase to +# be "professional" in any way, but because someone forked +# this and deleted the comment because they couldn't take +# a joke. Hi guy! +# +# Fact: The number one use of Google Code is to look for that +# comment in the Linux kernel that goes "FUCK me gently with +# a chainsaw." Pretty sure Linus himself wrote it. + +def main(): + configFilename = "twitterbot.ini" + if (sys.argv[1:]): + configFilename = sys.argv[1] + + try: + if not os.path.exists(configFilename): + raise Exception() + load_config(configFilename) + except Exception as e: + print("Error while loading ini file %s" %( + configFilename), file=sys.stderr) + print(e, file=sys.stderr) + print(__doc__, file=sys.stderr) + sys.exit(1) + + bot = TwitterBot(configFilename) + return bot.run() diff --git a/env/lib/python3.6/site-packages/twitter/logger.py b/env/lib/python3.6/site-packages/twitter/logger.py new file mode 100644 index 0000000..e18d7a0 --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter/logger.py @@ -0,0 +1,107 @@ +""" +twitter-log - Twitter Logger/Archiver + +USAGE: + + twitter-log [max_id] + +DESCRIPTION: + + Produce a complete archive in text form of a user's tweets. The + archive format is: + + screen_name + Date: + [In-Reply-To: a_tweet_id] + + Tweet text possibly spanning multiple lines with + each line indented by four spaces. + + + Each tweet is separated by two blank lines. + +""" + +from __future__ import print_function + +import sys +import os +from time import sleep + +from .api import Twitter, TwitterError +from .cmdline import CONSUMER_KEY, CONSUMER_SECRET +from .auth import NoAuth +from .oauth import OAuth, write_token_file, read_token_file +from .oauth_dance import oauth_dance +from .util import printNicely + +# Registered by @sixohsix +CONSUMER_KEY = "OifqLIQIufeY9znQCkbvg" +CONSUMER_SECRET = "IedFvi0JitR9yaYw9HwcCCEy4KYaLxf4p4rHRqGgX80" +OAUTH_FILENAME = os.environ.get('HOME', os.environ.get('USERPROFILE', '')) + os.sep + '.twitter_log_oauth' + +def log_debug(msg): + print(msg, file=sys.stderr) + +def get_tweets(twitter, screen_name, max_id=None): + kwargs = dict(count=3200, screen_name=screen_name) + if max_id: + kwargs['max_id'] = max_id + + n_tweets = 0 + tweets = twitter.statuses.user_timeline(**kwargs) + for tweet in tweets: + if tweet['id'] == max_id: + continue + print("%s %s\nDate: %s" % (tweet['user']['screen_name'], + tweet['id'], + tweet['created_at'])) + if tweet.get('in_reply_to_status_id'): + print("In-Reply-To: %s" % tweet['in_reply_to_status_id']) + print() + for line in tweet['text'].splitlines(): + printNicely(' ' + line + '\n') + print() + print() + max_id = tweet['id'] + n_tweets += 1 + return n_tweets, max_id + +def main(args=sys.argv[1:]): + if not args: + print(__doc__) + return 1 + + if not os.path.exists(OAUTH_FILENAME): + oauth_dance( + "the Python Twitter Logger", CONSUMER_KEY, CONSUMER_SECRET, + OAUTH_FILENAME) + + oauth_token, oauth_token_secret = read_token_file(OAUTH_FILENAME) + + twitter = Twitter( + auth=OAuth( + oauth_token, oauth_token_secret, CONSUMER_KEY, CONSUMER_SECRET), + domain='api.twitter.com') + + screen_name = args[0] + + if args[1:]: + max_id = args[1] + else: + max_id = None + + n_tweets = 0 + while True: + try: + tweets_processed, max_id = get_tweets(twitter, screen_name, max_id) + n_tweets += tweets_processed + log_debug("Processed %i tweets (max_id %s)" %(n_tweets, max_id)) + if tweets_processed == 0: + log_debug("That's it, we got all the tweets we could. Done.") + break + except TwitterError as e: + log_debug("Twitter bailed out. I'm going to sleep a bit then try again") + sleep(3) + + return 0 diff --git a/env/lib/python3.6/site-packages/twitter/oauth.py b/env/lib/python3.6/site-packages/twitter/oauth.py new file mode 100644 index 0000000..b0d7f41 --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter/oauth.py @@ -0,0 +1,144 @@ +""" +Visit the Twitter developer page and create a new application: + + https://dev.twitter.com/apps/new + +This will get you a CONSUMER_KEY and CONSUMER_SECRET. + +When users run your application they have to authenticate your app +with their Twitter account. A few HTTP calls to twitter are required +to do this. Please see the twitter.oauth_dance module to see how this +is done. If you are making a command-line app, you can use the +oauth_dance() function directly. + +Performing the "oauth dance" gets you an ouath token and oauth secret +that authenticate the user with Twitter. You should save these for +later so that the user doesn't have to do the oauth dance again. + +read_token_file and write_token_file are utility methods to read and +write OAuth token and secret key values. The values are stored as +strings in the file. Not terribly exciting. + +Finally, you can use the OAuth authenticator to connect to Twitter. In +code it all goes like this:: + + from twitter import * + + MY_TWITTER_CREDS = os.path.expanduser('~/.my_app_credentials') + if not os.path.exists(MY_TWITTER_CREDS): + oauth_dance("My App Name", CONSUMER_KEY, CONSUMER_SECRET, + MY_TWITTER_CREDS) + + oauth_token, oauth_secret = read_token_file(MY_TWITTER_CREDS) + + twitter = Twitter(auth=OAuth( + oauth_token, oauth_token_secret, CONSUMER_KEY, CONSUMER_SECRET)) + + # Now work with Twitter + twitter.statuses.update(status='Hello, world!') + +""" + +from __future__ import print_function + +from random import getrandbits +from time import time + +from .util import PY_3_OR_HIGHER + +try: + import urllib.parse as urllib_parse + from urllib.parse import urlencode +except ImportError: + import urllib2 as urllib_parse + from urllib import urlencode + +import hashlib +import hmac +import base64 + +from .auth import Auth, MissingCredentialsError + + +def write_token_file(filename, oauth_token, oauth_token_secret): + """ + Write a token file to hold the oauth token and oauth token secret. + """ + oauth_file = open(filename, 'w') + print(oauth_token, file=oauth_file) + print(oauth_token_secret, file=oauth_file) + oauth_file.close() + +def read_token_file(filename): + """ + Read a token file and return the oauth token and oauth token secret. + """ + f = open(filename) + return f.readline().strip(), f.readline().strip() + + +class OAuth(Auth): + """ + An OAuth authenticator. + """ + def __init__(self, token, token_secret, consumer_key, consumer_secret): + """ + Create the authenticator. If you are in the initial stages of + the OAuth dance and don't yet have a token or token_secret, + pass empty strings for these params. + """ + self.token = token + self.token_secret = token_secret + self.consumer_key = consumer_key + self.consumer_secret = consumer_secret + + if token_secret is None or consumer_secret is None: + raise MissingCredentialsError( + 'You must supply strings for token_secret and consumer_secret, not None.') + + def encode_params(self, base_url, method, params): + params = params.copy() + + if self.token: + params['oauth_token'] = self.token + + params['oauth_consumer_key'] = self.consumer_key + params['oauth_signature_method'] = 'HMAC-SHA1' + params['oauth_version'] = '1.0' + params['oauth_timestamp'] = str(int(time())) + params['oauth_nonce'] = str(getrandbits(64)) + + enc_params = urlencode_noplus(sorted(params.items())) + + key = self.consumer_secret + "&" + urllib_parse.quote(self.token_secret, safe='~') + + message = '&'.join( + urllib_parse.quote(i, safe='~') for i in [method.upper(), base_url, enc_params]) + + signature = (base64.b64encode(hmac.new( + key.encode('ascii'), message.encode('ascii'), hashlib.sha1) + .digest())) + return enc_params + "&" + "oauth_signature=" + urllib_parse.quote(signature, safe='~') + + def generate_headers(self): + return {} + +# apparently contrary to the HTTP RFCs, spaces in arguments must be encoded as +# %20 rather than '+' when constructing an OAuth signature (and therefore +# also in the request itself.) +# So here is a specialized version which does exactly that. +# In Python2, since there is no safe option for urlencode, we force it by hand +def urlencode_noplus(query): + if not PY_3_OR_HIGHER: + new_query = [] + TILDE = '____TILDE-PYTHON-TWITTER____' + for k,v in query: + if type(k) is unicode: k = k.encode('utf-8') + k = str(k).replace("~", TILDE) + if type(v) is unicode: v = v.encode('utf-8') + v = str(v).replace("~", TILDE) + new_query.append((k, v)) + query = new_query + return urlencode(query).replace(TILDE, "~").replace("+", "%20") + + return urlencode(query, safe='~').replace("+", "%20") diff --git a/env/lib/python3.6/site-packages/twitter/oauth2.py b/env/lib/python3.6/site-packages/twitter/oauth2.py new file mode 100644 index 0000000..330d88c --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter/oauth2.py @@ -0,0 +1,93 @@ +""" +Twitter only supports the application-only flow of OAuth2 for certain +API endpoints. This OAuth2 authenticator only supports the application-only +flow right now. + +To authenticate with OAuth2, visit the Twitter developer page and create a new +application: + + https://dev.twitter.com/apps/new + +This will get you a CONSUMER_KEY and CONSUMER_SECRET. + +Exchange your CONSUMER_KEY and CONSUMER_SECRET for a bearer token using the +oauth2_dance function. + +Finally, you can use the OAuth2 authenticator and your bearer token to connect +to Twitter. In code it goes like this:: + + twitter = Twitter(auth=OAuth2(bearer_token=BEARER_TOKEN)) + + # Now work with Twitter + twitter.search.tweets(q='keyword') + +""" + +from __future__ import print_function + +try: + from urllib.parse import quote, urlencode +except ImportError: + from urllib import quote, urlencode + +from base64 import b64encode +from .auth import Auth, MissingCredentialsError + +def write_bearer_token_file(filename, oauth2_bearer_token): + """ + Write a token file to hold the oauth2 bearer token. + """ + oauth_file = open(filename, 'w') + print(oauth2_bearer_token, file=oauth_file) + oauth_file.close() + +def read_bearer_token_file(filename): + """ + Read a token file and return the oauth2 bearer token. + """ + f = open(filename) + bearer_token = f.readline().strip() + f.close() + return bearer_token + +class OAuth2(Auth): + """ + An OAuth2 application-only authenticator. + """ + def __init__(self, consumer_key=None, consumer_secret=None, + bearer_token=None): + """ + Create an authenticator. You can supply consumer_key and + consumer_secret if you are requesting a bearer_token. Otherwise + you must supply the bearer_token. + """ + self.bearer_token = bearer_token + self.consumer_key = consumer_key + self.consumer_secret = consumer_secret + + if not (bearer_token or (consumer_key and consumer_secret)): + raise MissingCredentialsError( + 'You must supply either a bearer token, or both a ' + 'consumer_key and a consumer_secret.') + + def encode_params(self, base_url, method, params): + return urlencode(params) + + def generate_headers(self): + if self.bearer_token: + headers = { + b'Authorization': 'Bearer {0}'.format( + self.bearer_token).encode('utf8') + } + else: + headers = { + b'Content-Type': (b'application/x-www-form-urlencoded;' + b'charset=UTF-8'), + b'Authorization': 'Basic {0}'.format( + b64encode('{0}:{1}'.format( + quote(self.consumer_key), + quote(self.consumer_secret)).encode('utf8') + ).decode('utf8') + ).encode('utf8') + } + return headers diff --git a/env/lib/python3.6/site-packages/twitter/oauth_dance.py b/env/lib/python3.6/site-packages/twitter/oauth_dance.py new file mode 100644 index 0000000..4b6eead --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter/oauth_dance.py @@ -0,0 +1,119 @@ +from __future__ import print_function + +import webbrowser +import time + +from .api import Twitter, json +from .oauth import OAuth, write_token_file +from .oauth2 import OAuth2, write_bearer_token_file + +try: + _input = raw_input +except NameError: + _input = input + + +def oauth2_dance(consumer_key, consumer_secret, token_filename=None): + """ + Perform the OAuth2 dance to transform a consumer key and secret into a + bearer token. + + If a token_filename is given, the bearer token will be written to + the file. + """ + twitter = Twitter( + auth=OAuth2(consumer_key=consumer_key, consumer_secret=consumer_secret), + format="", + api_version="") + token = json.loads(twitter.oauth2.token(grant_type="client_credentials"))["access_token"] + if token_filename: + write_bearer_token_file(token_filename, token) + return token + +def get_oauth_pin(oauth_url, open_browser=True): + """ + Prompt the user for the OAuth PIN. + + By default, a browser will open the authorization page. If `open_browser` + is false, the authorization URL will just be printed instead. + """ + + print("Opening: %s\n" % oauth_url) + + if open_browser: + print(""" +In the web browser window that opens please choose to Allow +access. Copy the PIN number that appears on the next page and paste or +type it here: + """) + + try: + r = webbrowser.open(oauth_url) + time.sleep(2) # Sometimes the last command can print some + # crap. Wait a bit so it doesn't mess up the next + # prompt. + if not r: + raise Exception() + except: + print(""" +Uh, I couldn't open a browser on your computer. Please go here to get +your PIN: + +""" + oauth_url) + + else: # not using a browser + print(""" +Please go to the following URL, authorize the app, and copy the PIN: + +""" + oauth_url) + + return _input("Please enter the PIN: ").strip() + + +def oauth_dance(app_name, consumer_key, consumer_secret, token_filename=None, open_browser=True): + """ + Perform the OAuth dance with some command-line prompts. Return the + oauth_token and oauth_token_secret. + + Provide the name of your app in `app_name`, your consumer_key, and + consumer_secret. This function will let the user allow your app to access + their Twitter account using PIN authentication. + + If a `token_filename` is given, the oauth tokens will be written to + the file. + + By default, this function attempts to open a browser to request access. If + `open_browser` is false it will just print the URL instead. + """ + print("Hi there! We're gonna get you all set up to use %s." % app_name) + twitter = Twitter( + auth=OAuth('', '', consumer_key, consumer_secret), + format='', api_version=None) + oauth_token, oauth_token_secret = parse_oauth_tokens( + twitter.oauth.request_token(oauth_callback="oob")) + oauth_url = ('https://api.twitter.com/oauth/authorize?oauth_token=' + + oauth_token) + oauth_verifier = get_oauth_pin(oauth_url, open_browser) + + twitter = Twitter( + auth=OAuth( + oauth_token, oauth_token_secret, consumer_key, consumer_secret), + format='', api_version=None) + oauth_token, oauth_token_secret = parse_oauth_tokens( + twitter.oauth.access_token(oauth_verifier=oauth_verifier)) + if token_filename: + write_token_file( + token_filename, oauth_token, oauth_token_secret) + print() + print("That's it! Your authorization keys have been written to %s." % ( + token_filename)) + return oauth_token, oauth_token_secret + +def parse_oauth_tokens(result): + for r in result.split('&'): + k, v = r.split('=') + if k == 'oauth_token': + oauth_token = v + elif k == 'oauth_token_secret': + oauth_token_secret = v + return oauth_token, oauth_token_secret diff --git a/env/lib/python3.6/site-packages/twitter/stream.py b/env/lib/python3.6/site-packages/twitter/stream.py new file mode 100644 index 0000000..884cac9 --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter/stream.py @@ -0,0 +1,293 @@ +# encoding: utf-8 +from __future__ import unicode_literals + +from .util import PY_3_OR_HIGHER + +if PY_3_OR_HIGHER: + import urllib.request as urllib_request + import urllib.error as urllib_error +else: + import urllib2 as urllib_request + import urllib2 as urllib_error +import json +from ssl import SSLError +import socket +import codecs +import sys, select, time + +from .api import TwitterCall, wrap_response, TwitterHTTPError + +CRLF = b'\r\n' +MIN_SOCK_TIMEOUT = 0.0 # Apparenty select with zero wait is okay! +MAX_SOCK_TIMEOUT = 10.0 +HEARTBEAT_TIMEOUT = 90.0 + +Timeout = {'timeout': True} +Hangup = {'hangup': True} +DecodeError = {'hangup': True, 'decode_error': True} +HeartbeatTimeout = {'hangup': True, 'heartbeat_timeout': True} + + +class HttpChunkDecoder(object): + + def __init__(self): + self.buf = bytearray() + self.munch_crlf = False + + def decode(self, data): # -> (bytearray, end_of_stream, decode_error) + chunks = [] + buf = self.buf + munch_crlf = self.munch_crlf + end_of_stream = False + decode_error = False + buf.extend(data) + while True: + if munch_crlf: + # Dang, Twitter, you crazy. Twitter only sends a terminating + # CRLF at the beginning of the *next* message. + if len(buf) >= 2: + buf = buf[2:] + munch_crlf = False + else: + break + + header_end_pos = buf.find(CRLF) + if header_end_pos == -1: + break + + header = buf[:header_end_pos] + data_start_pos = header_end_pos + 2 + try: + chunk_len = int(header.decode('ascii'), 16) + except ValueError: + decode_error = True + break + + if chunk_len == 0: + end_of_stream = True + break + + data_end_pos = data_start_pos + chunk_len + + if len(buf) >= data_end_pos: + chunks.append(buf[data_start_pos:data_end_pos]) + buf = buf[data_end_pos:] + munch_crlf = True + else: + break + self.buf = buf + self.munch_crlf = munch_crlf + return bytearray().join(chunks), end_of_stream, decode_error + + +class JsonDecoder(object): + + def __init__(self): + self.buf = "" + self.raw_decode = json.JSONDecoder().raw_decode + + def decode(self, data): + chunks = [] + buf = self.buf + data + while True: + try: + buf = buf.lstrip() + res, ptr = self.raw_decode(buf) + buf = buf[ptr:] + chunks.append(res) + except ValueError: + break + self.buf = buf + return chunks + + +class Timer(object): + + def __init__(self, timeout): + # If timeout is None, we never expire. + self.timeout = timeout + self.reset() + + def reset(self): + self.time = time.time() + + def expired(self): + """ + If expired, reset the timer and return True. + """ + if self.timeout is None: + return False + elif time.time() - self.time > self.timeout: + self.reset() + return True + return False + + +class SockReader(object): + def __init__(self, sock, sock_timeout): + self.sock = sock + self.sock_timeout = sock_timeout + + def read(self): + try: + ready_to_read = select.select([self.sock], [], [], self.sock_timeout)[0] + if ready_to_read: + return self.sock.read() + except SSLError as e: + # Code 2 is error from a non-blocking read of an empty buffer. + if e.errno != 2: + raise + return bytearray() + + +class TwitterJSONIter(object): + + def __init__(self, handle, uri, arg_data, block, timeout, heartbeat_timeout): + self.handle = handle + self.uri = uri + self.arg_data = arg_data + self.timeout_token = Timeout + self.timeout = None + self.heartbeat_timeout = HEARTBEAT_TIMEOUT + if timeout and timeout > 0: + self.timeout = float(timeout) + elif not (block or timeout): + self.timeout_token = None + self.timeout = MIN_SOCK_TIMEOUT + if heartbeat_timeout and heartbeat_timeout > 0: + self.heartbeat_timeout = float(heartbeat_timeout) + + def __iter__(self): + timeouts = [t for t in (self.timeout, self.heartbeat_timeout, MAX_SOCK_TIMEOUT) + if t is not None] + sock_timeout = min(*timeouts) + sock = self.handle.fp.raw._sock if PY_3_OR_HIGHER else self.handle.fp._sock.fp._sock + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) + headers = self.handle.headers + sock_reader = SockReader(sock, sock_timeout) + chunk_decoder = HttpChunkDecoder() + utf8_decoder = codecs.getincrementaldecoder("utf-8")() + json_decoder = JsonDecoder() + timer = Timer(self.timeout) + heartbeat_timer = Timer(self.heartbeat_timeout) + + while True: + # Decode all the things: + try: + data = sock_reader.read() + except SSLError: + yield Hangup + break + dechunked_data, end_of_stream, decode_error = chunk_decoder.decode(data) + unicode_data = utf8_decoder.decode(dechunked_data) + json_data = json_decoder.decode(unicode_data) + + # Yield data-like things: + for json_obj in json_data: + yield wrap_response(json_obj, headers) + + # Reset timers: + if dechunked_data: + heartbeat_timer.reset() + if json_data: + timer.reset() + + # Yield timeouts and special things: + if end_of_stream: + yield Hangup + break + if decode_error: + yield DecodeError + break + if heartbeat_timer.expired(): + yield HeartbeatTimeout + break + if timer.expired(): + yield self.timeout_token + + +def handle_stream_response(req, uri, arg_data, block, timeout, heartbeat_timeout): + try: + handle = urllib_request.urlopen(req,) + except urllib_error.HTTPError as e: + raise TwitterHTTPError(e, uri, 'json', arg_data) + return iter(TwitterJSONIter(handle, uri, arg_data, block, timeout, heartbeat_timeout)) + +class TwitterStream(TwitterCall): + """ + The TwitterStream object is an interface to the Twitter Stream + API. This can be used pretty much the same as the Twitter class + except the result of calling a method will be an iterator that + yields objects decoded from the stream. For example:: + + twitter_stream = TwitterStream(auth=OAuth(...)) + iterator = twitter_stream.statuses.sample() + + for tweet in iterator: + # ...do something with this tweet... + + Per default the ``TwitterStream`` object uses + [public streams](https://dev.twitter.com/docs/streaming-apis/streams/public). + If you want to use one of the other + [streaming APIs](https://dev.twitter.com/docs/streaming-apis), specify the URL + manually: + + - [Public streams](https://dev.twitter.com/docs/streaming-apis/streams/public): stream.twitter.com + - [User streams](https://dev.twitter.com/docs/streaming-apis/streams/user): userstream.twitter.com + - [Site streams](https://dev.twitter.com/docs/streaming-apis/streams/site): sitestream.twitter.com + + Note that you require the proper + [permissions](https://dev.twitter.com/docs/application-permission-model) to + access these streams. E.g. for direct messages your + [application](https://dev.twitter.com/apps) needs the "Read, Write & Direct + Messages" permission. + + The following example demonstrates how to retrieve all new direct messages + from the user stream:: + + auth = OAuth( + consumer_key='[your consumer key]', + consumer_secret='[your consumer secret]', + token='[your token]', + token_secret='[your token secret]' + ) + twitter_userstream = TwitterStream(auth=auth, domain='userstream.twitter.com') + for msg in twitter_userstream.user(): + if 'direct_message' in msg: + print msg['direct_message']['text'] + + The iterator will yield until the TCP connection breaks. When the + connection breaks, the iterator yields `{'hangup': True}`, and + raises `StopIteration` if iterated again. + + Similarly, if the stream does not produce heartbeats for more than + 90 seconds, the iterator yields `{'hangup': True, + 'heartbeat_timeout': True}`, and raises `StopIteration` if + iterated again. + + The `timeout` parameter controls the maximum time between + yields. If it is nonzero, then the iterator will yield either + stream data or `{'timeout': True}` within the timeout period. This + is useful if you want your program to do other stuff in between + waiting for tweets. + + The `block` parameter sets the stream to be fully non-blocking. In + this mode, the iterator always yields immediately. It returns + stream data, or `None`. Note that `timeout` supercedes this + argument, so it should also be set `None` to use this mode. + """ + def __init__(self, domain="stream.twitter.com", secure=True, auth=None, + api_version='1.1', block=True, timeout=None, + heartbeat_timeout=90.0): + uriparts = (str(api_version),) + + class TwitterStreamCall(TwitterCall): + def _handle_response(self, req, uri, arg_data, _timeout=None): + return handle_stream_response( + req, uri, arg_data, block, + _timeout or timeout, heartbeat_timeout) + + TwitterCall.__init__( + self, auth=auth, format="json", domain=domain, + callable_cls=TwitterStreamCall, + secure=secure, uriparts=uriparts, timeout=timeout, gzip=False) diff --git a/env/lib/python3.6/site-packages/twitter/stream_example.py b/env/lib/python3.6/site-packages/twitter/stream_example.py new file mode 100644 index 0000000..15c0cc8 --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter/stream_example.py @@ -0,0 +1,92 @@ +""" +Example program for the Stream API. This prints public status messages +from the "sample" stream as fast as possible. Use -h for help. +""" + +from __future__ import print_function + +import argparse + +from twitter.stream import TwitterStream, Timeout, HeartbeatTimeout, Hangup +from twitter.oauth import OAuth +from twitter.util import printNicely + + +def parse_arguments(): + + parser = argparse.ArgumentParser(description=__doc__ or "") + + parser.add_argument('-t', '--token', required=True, + help='The Twitter Access Token.') + parser.add_argument('-ts', '--token-secret', required=True, + help='The Twitter Access Token Secret.') + parser.add_argument('-ck', '--consumer-key', required=True, + help='The Twitter Consumer Key.') + parser.add_argument('-cs', '--consumer-secret', required=True, + help='The Twitter Consumer Secret.') + parser.add_argument('-us', '--user-stream', action='store_true', + help='Connect to the user stream endpoint.') + parser.add_argument('-ss', '--site-stream', action='store_true', + help='Connect to the site stream endpoint.') + parser.add_argument('-to', '--timeout', + help='Timeout for the stream (seconds).') + parser.add_argument('-ht', '--heartbeat-timeout', + help='Set heartbeat timeout.', default=90) + parser.add_argument('-nb', '--no-block', action='store_true', + help='Set stream to non-blocking.') + parser.add_argument('-tt', '--track-keywords', + help='Search the stream for specific text.') + return parser.parse_args() + + +def main(): + args = parse_arguments() + + # When using twitter stream you must authorize. + auth = OAuth(args.token, args.token_secret, + args.consumer_key, args.consumer_secret) + + # These arguments are optional: + stream_args = dict( + timeout=args.timeout, + block=not args.no_block, + heartbeat_timeout=args.heartbeat_timeout) + + query_args = dict() + if args.track_keywords: + query_args['track'] = args.track_keywords + + if args.user_stream: + stream = TwitterStream(auth=auth, domain='userstream.twitter.com', + **stream_args) + tweet_iter = stream.user(**query_args) + elif args.site_stream: + stream = TwitterStream(auth=auth, domain='sitestream.twitter.com', + **stream_args) + tweet_iter = stream.site(**query_args) + else: + stream = TwitterStream(auth=auth, **stream_args) + if args.track_keywords: + tweet_iter = stream.statuses.filter(**query_args) + else: + tweet_iter = stream.statuses.sample() + + # Iterate over the sample stream. + for tweet in tweet_iter: + # You must test that your tweet has text. It might be a delete + # or data message. + if tweet is None: + printNicely("-- None --") + elif tweet is Timeout: + printNicely("-- Timeout --") + elif tweet is HeartbeatTimeout: + printNicely("-- Heartbeat Timeout --") + elif tweet is Hangup: + printNicely("-- Hangup --") + elif tweet.get('text'): + printNicely(tweet['text']) + else: + printNicely("-- Some data: " + str(tweet)) + +if __name__ == '__main__': + main() diff --git a/env/lib/python3.6/site-packages/twitter/timezones.py b/env/lib/python3.6/site-packages/twitter/timezones.py new file mode 100644 index 0000000..b513e0e --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter/timezones.py @@ -0,0 +1,81 @@ +# Retrieved from http://docs.python.org/2/library/datetime.html on 2013-05-24 +from datetime import tzinfo, timedelta, datetime + +ZERO = timedelta(0) +HOUR = timedelta(hours=1) + +# A UTC class. + +class UTC(tzinfo): + """UTC""" + + def utcoffset(self, dt): + return ZERO + + def tzname(self, dt): + return "UTC" + + def dst(self, dt): + return ZERO + +utc = UTC() + +# A class building tzinfo objects for fixed-offset time zones. +# Note that FixedOffset(0, "UTC") is a different way to build a +# UTC tzinfo object. + +class FixedOffset(tzinfo): + """Fixed offset in minutes east from UTC.""" + + def __init__(self, offset, name): + self.__offset = timedelta(minutes = offset) + self.__name = name + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return self.__name + + def dst(self, dt): + return ZERO + +# A class capturing the platform's idea of local time. + +import time as _time + +STDOFFSET = timedelta(seconds = -_time.timezone) +if _time.daylight: + DSTOFFSET = timedelta(seconds = -_time.altzone) +else: + DSTOFFSET = STDOFFSET + +DSTDIFF = DSTOFFSET - STDOFFSET + +class LocalTimezone(tzinfo): + + def utcoffset(self, dt): + if self._isdst(dt): + return DSTOFFSET + else: + return STDOFFSET + + def dst(self, dt): + if self._isdst(dt): + return DSTDIFF + else: + return ZERO + + def tzname(self, dt): + return _time.tzname[self._isdst(dt)] + + def _isdst(self, dt): + tt = (dt.year, dt.month, dt.day, + dt.hour, dt.minute, dt.second, + dt.weekday(), 0, 0) + stamp = _time.mktime(tt) + tt = _time.localtime(stamp) + return tt.tm_isdst > 0 + +Local = LocalTimezone() + diff --git a/env/lib/python3.6/site-packages/twitter/twitter_globals.py b/env/lib/python3.6/site-packages/twitter/twitter_globals.py new file mode 100644 index 0000000..2f94dbc --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter/twitter_globals.py @@ -0,0 +1,42 @@ +''' + .. data:: POST_ACTIONS + List of twitter method names that require the use of POST +''' + +POST_ACTIONS = [ + + # Status Methods + 'update', 'retweet', 'update_with_media', 'statuses/lookup', + + # Direct Message Methods + 'new', + + # Account Methods + 'update_profile_image', 'update_delivery_device', 'update_profile', + 'update_profile_background_image', 'update_profile_colors', + 'update_location', 'end_session', 'settings', + 'update_profile_banner', 'remove_profile_banner', + + # Notification Methods + 'leave', 'follow', + + # Status Methods, Block Methods, Direct Message Methods, + # Friendship Methods, Favorite Methods + 'destroy', 'destroy_all', + + # Block Methods, Friendship Methods, Favorite Methods + 'create', 'create_all', + + # Users Methods + 'users/lookup', 'report_spam', + + # Streaming Methods + 'filter', 'user', 'site', + + # OAuth Methods + 'token', 'access_token', + 'request_token', 'invalidate_token', + + # Upload Methods + 'media/upload', 'media/metadata/create' +] diff --git a/env/lib/python3.6/site-packages/twitter/util.py b/env/lib/python3.6/site-packages/twitter/util.py new file mode 100644 index 0000000..5a5f507 --- /dev/null +++ b/env/lib/python3.6/site-packages/twitter/util.py @@ -0,0 +1,177 @@ +""" +Internal utility functions. + +`htmlentitydecode` came from here: + http://wiki.python.org/moin/EscapingHtml +""" + +from __future__ import print_function + +import contextlib +import re +import sys +import textwrap +import time +import socket + +PY_3_OR_HIGHER = sys.version_info >= (3, 0) + +try: + from html.entities import name2codepoint + unichr = chr + import urllib.request as urllib2 + import urllib.parse as urlparse +except ImportError: + from htmlentitydefs import name2codepoint + import urllib2 + import urlparse + +def htmlentitydecode(s): + return re.sub( + '&(%s);' % '|'.join(name2codepoint), + lambda m: unichr(name2codepoint[m.group(1)]), s) + +def smrt_input(globals_, locals_, ps1=">>> ", ps2="... "): + inputs = [] + while True: + if inputs: + prompt = ps2 + else: + prompt = ps1 + inputs.append(input(prompt)) + try: + ret = eval('\n'.join(inputs), globals_, locals_) + if ret: + print(str(ret)) + return + except SyntaxError: + pass + +def printNicely(string): + if hasattr(sys.stdout, 'buffer'): + sys.stdout.buffer.write(string.encode('utf8')) + print() + sys.stdout.buffer.flush() + sys.stdout.flush() + else: + print(string.encode('utf8')) + +def actually_bytes(stringy): + if PY_3_OR_HIGHER: + if type(stringy) == bytes: + pass + elif type(stringy) != str: + stringy = str(stringy) + if type(stringy) == str: + stringy = stringy.encode("utf-8") + else: + if type(stringy) == str: + pass + elif type(stringy) != unicode: + stringy = str(stringy) + if type(stringy) == unicode: + stringy = stringy.encode("utf-8") + return stringy + +def err(msg=""): + print(msg, file=sys.stderr) + + +class Fail(object): + """A class to count fails during a repetitive task. + + Args: + maximum: An integer for the maximum of fails to allow. + exit: An integer for the exit code when maximum of fail is reached. + + Methods: + count: Count a fail, exit when maximum of fails is reached. + wait: Same as count but also sleep for a given time in seconds. + """ + def __init__(self, maximum=10, exit=1): + self.i = maximum + self.exit = exit + + def count(self): + self.i -= 1 + if self.i == 0: + err("Too many consecutive fails, exiting.") + raise SystemExit(self.exit) + + def wait(self, delay=0): + self.count() + if delay > 0: + time.sleep(delay) + + +def find_links(line): + """Find all links in the given line. The function returns a sprintf style + format string (with %s placeholders for the links) and a list of urls.""" + l = line.replace("%", "%%") + regex = "(https?://[^ )]+)" + return ( + re.sub(regex, "%s", l), + [m.group(1) for m in re.finditer(regex, l)]) + +def follow_redirects(link, sites= None): + """Follow directs for the link as long as the redirects are on the given + sites and return the resolved link.""" + def follow(url): + return sites == None or urlparse.urlparse(url).hostname in sites + + class RedirectHandler(urllib2.HTTPRedirectHandler): + def __init__(self): + self.last_url = None + def redirect_request(self, req, fp, code, msg, hdrs, newurl): + self.last_url = newurl + if not follow(newurl): + return None + r = urllib2.HTTPRedirectHandler.redirect_request( + self, req, fp, code, msg, hdrs, newurl) + r.get_method = lambda : 'HEAD' + return r + + if not follow(link): + return link + redirect_handler = RedirectHandler() + opener = urllib2.build_opener(redirect_handler) + req = urllib2.Request(link) + req.get_method = lambda : 'HEAD' + try: + with contextlib.closing(opener.open(req,timeout=1)) as site: + return site.url + except: + return redirect_handler.last_url if redirect_handler.last_url else link + +def expand_line(line, sites): + """Expand the links in the line for the given sites.""" + try: + l = line.strip() + msg_format, links = find_links(l) + args = tuple(follow_redirects(l, sites) for l in links) + line = msg_format % args + except Exception as e: + try: + err("expanding line %s failed due to %s" % (line, unicode(e))) + except: + pass + return line + +def parse_host_list(list_of_hosts): + """Parse the comma separated list of hosts.""" + p = set( + m.group(1) for m in re.finditer("\s*([^,\s]+)\s*,?\s*", list_of_hosts)) + return p + + +def align_text(text, left_margin=17, max_width=160): + lines = [] + for line in text.split('\n'): + temp_lines = textwrap.wrap(line, max_width - left_margin) + temp_lines = [(' ' * left_margin + line) for line in temp_lines] + lines.append('\n'.join(temp_lines)) + ret = '\n'.join(lines) + return ret.lstrip() + + +__all__ = ["htmlentitydecode", "smrt_input"] diff --git a/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/DESCRIPTION.rst b/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/DESCRIPTION.rst new file mode 100755 index 0000000..5654511 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/DESCRIPTION.rst @@ -0,0 +1,1040 @@ +urllib3 +======= + +.. image:: https://travis-ci.org/urllib3/urllib3.svg?branch=master + :alt: Build status on Travis + :target: https://travis-ci.org/urllib3/urllib3 + +.. image:: https://img.shields.io/appveyor/ci/urllib3/urllib3/master.svg + :alt: Build status on AppVeyor + :target: https://ci.appveyor.com/project/urllib3/urllib3 + +.. image:: https://readthedocs.org/projects/urllib3/badge/?version=latest + :alt: Documentation Status + :target: https://urllib3.readthedocs.io/en/latest/ + +.. image:: https://img.shields.io/codecov/c/github/urllib3/urllib3.svg + :alt: Coverage Status + :target: https://codecov.io/gh/urllib3/urllib3 + +.. image:: https://img.shields.io/pypi/v/urllib3.svg?maxAge=86400 + :alt: PyPI version + :target: https://pypi.org/project/urllib3/ + +.. image:: https://www.bountysource.com/badge/tracker?tracker_id=192525 + :alt: Bountysource + :target: https://www.bountysource.com/trackers/192525-urllib3?utm_source=192525&utm_medium=shield&utm_campaign=TRACKER_BADGE + +.. image:: https://badges.gitter.im/python-urllib3/Lobby.svg + :alt: Gitter + :target: https://gitter.im/python-urllib3/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge + +urllib3 is a powerful, *sanity-friendly* HTTP client for Python. Much of the +Python ecosystem already uses urllib3 and you should too. +urllib3 brings many critical features that are missing from the Python +standard libraries: + +- Thread safety. +- Connection pooling. +- Client-side SSL/TLS verification. +- File uploads with multipart encoding. +- Helpers for retrying requests and dealing with HTTP redirects. +- Support for gzip and deflate encoding. +- Proxy support for HTTP and SOCKS. +- 100% test coverage. + +urllib3 is powerful and easy to use:: + + >>> import urllib3 + >>> http = urllib3.PoolManager() + >>> r = http.request('GET', 'http://httpbin.org/robots.txt') + >>> r.status + 200 + >>> r.data + 'User-agent: *\nDisallow: /deny\n' + + +Installing +---------- + +urllib3 can be installed with `pip `_:: + + $ pip install urllib3 + +Alternatively, you can grab the latest source code from `GitHub `_:: + + $ git clone git://github.com/urllib3/urllib3.git + $ python setup.py install + + +Documentation +------------- + +urllib3 has usage and reference documentation at `urllib3.readthedocs.io `_. + + +Contributing +------------ + +urllib3 happily accepts contributions. Please see our +`contributing documentation `_ +for some tips on getting started. + + +Maintainers +----------- + +- `@theacodes `_ (Thea Flowers) +- `@SethMichaelLarson `_ (Seth M. Larson) +- `@haikuginger `_ (Jesse Shapiro) +- `@lukasa `_ (Cory Benfield) +- `@sigmavirus24 `_ (Ian Cordasco) +- `@shazow `_ (Andrey Petrov) + +👋 + + +Sponsorship +----------- + +If your company benefits from this library, please consider `sponsoring its +development `_. + +Sponsors include: + +- Google Cloud Platform (2018-present), sponsors `@theacodes `_'s work on an ongoing basis +- Abbott (2018-present), sponsors `@SethMichaelLarson `_'s work on an ongoing basis +- Akamai (2017-present), sponsors `@haikuginger `_'s work on an ongoing basis +- Hewlett Packard Enterprise (2016-2017), sponsored `@Lukasa’s `_ work on urllib3 + + +Changes +======= + +1.23 (2018-06-04) +----------------- + +* Allow providing a list of headers to strip from requests when redirecting + to a different host. Defaults to the ``Authorization`` header. Different + headers can be set via ``Retry.remove_headers_on_redirect``. (Issue #1316) + +* Fix ``util.selectors._fileobj_to_fd`` to accept ``long`` (Issue #1247). + +* Dropped Python 3.3 support. (Pull #1242) + +* Put the connection back in the pool when calling stream() or read_chunked() on + a chunked HEAD response. (Issue #1234) + +* Fixed pyOpenSSL-specific ssl client authentication issue when clients + attempted to auth via certificate + chain (Issue #1060) + +* Add the port to the connectionpool connect print (Pull #1251) + +* Don't use the ``uuid`` module to create multipart data boundaries. (Pull #1380) + +* ``read_chunked()`` on a closed response returns no chunks. (Issue #1088) + +* Add Python 2.6 support to ``contrib.securetransport`` (Pull #1359) + +* Added support for auth info in url for SOCKS proxy (Pull #1363) + + +1.22 (2017-07-20) +----------------- + +* Fixed missing brackets in ``HTTP CONNECT`` when connecting to IPv6 address via + IPv6 proxy. (Issue #1222) + +* Made the connection pool retry on ``SSLError``. The original ``SSLError`` + is available on ``MaxRetryError.reason``. (Issue #1112) + +* Drain and release connection before recursing on retry/redirect. Fixes + deadlocks with a blocking connectionpool. (Issue #1167) + +* Fixed compatibility for cookiejar. (Issue #1229) + +* pyopenssl: Use vendored version of ``six``. (Issue #1231) + + +1.21.1 (2017-05-02) +------------------- + +* Fixed SecureTransport issue that would cause long delays in response body + delivery. (Pull #1154) + +* Fixed regression in 1.21 that threw exceptions when users passed the + ``socket_options`` flag to the ``PoolManager``. (Issue #1165) + +* Fixed regression in 1.21 that threw exceptions when users passed the + ``assert_hostname`` or ``assert_fingerprint`` flag to the ``PoolManager``. + (Pull #1157) + + +1.21 (2017-04-25) +----------------- + +* Improved performance of certain selector system calls on Python 3.5 and + later. (Pull #1095) + +* Resolved issue where the PyOpenSSL backend would not wrap SysCallError + exceptions appropriately when sending data. (Pull #1125) + +* Selectors now detects a monkey-patched select module after import for modules + that patch the select module like eventlet, greenlet. (Pull #1128) + +* Reduced memory consumption when streaming zlib-compressed responses + (as opposed to raw deflate streams). (Pull #1129) + +* Connection pools now use the entire request context when constructing the + pool key. (Pull #1016) + +* ``PoolManager.connection_from_*`` methods now accept a new keyword argument, + ``pool_kwargs``, which are merged with the existing ``connection_pool_kw``. + (Pull #1016) + +* Add retry counter for ``status_forcelist``. (Issue #1147) + +* Added ``contrib`` module for using SecureTransport on macOS: + ``urllib3.contrib.securetransport``. (Pull #1122) + +* urllib3 now only normalizes the case of ``http://`` and ``https://`` schemes: + for schemes it does not recognise, it assumes they are case-sensitive and + leaves them unchanged. + (Issue #1080) + + +1.20 (2017-01-19) +----------------- + +* Added support for waiting for I/O using selectors other than select, + improving urllib3's behaviour with large numbers of concurrent connections. + (Pull #1001) + +* Updated the date for the system clock check. (Issue #1005) + +* ConnectionPools now correctly consider hostnames to be case-insensitive. + (Issue #1032) + +* Outdated versions of PyOpenSSL now cause the PyOpenSSL contrib module + to fail when it is injected, rather than at first use. (Pull #1063) + +* Outdated versions of cryptography now cause the PyOpenSSL contrib module + to fail when it is injected, rather than at first use. (Issue #1044) + +* Automatically attempt to rewind a file-like body object when a request is + retried or redirected. (Pull #1039) + +* Fix some bugs that occur when modules incautiously patch the queue module. + (Pull #1061) + +* Prevent retries from occurring on read timeouts for which the request method + was not in the method whitelist. (Issue #1059) + +* Changed the PyOpenSSL contrib module to lazily load idna to avoid + unnecessarily bloating the memory of programs that don't need it. (Pull + #1076) + +* Add support for IPv6 literals with zone identifiers. (Pull #1013) + +* Added support for socks5h:// and socks4a:// schemes when working with SOCKS + proxies, and controlled remote DNS appropriately. (Issue #1035) + + +1.19.1 (2016-11-16) +------------------- + +* Fixed AppEngine import that didn't function on Python 3.5. (Pull #1025) + + +1.19 (2016-11-03) +----------------- + +* urllib3 now respects Retry-After headers on 413, 429, and 503 responses when + using the default retry logic. (Pull #955) + +* Remove markers from setup.py to assist ancient setuptools versions. (Issue + #986) + +* Disallow superscripts and other integerish things in URL ports. (Issue #989) + +* Allow urllib3's HTTPResponse.stream() method to continue to work with + non-httplib underlying FPs. (Pull #990) + +* Empty filenames in multipart headers are now emitted as such, rather than + being suppressed. (Issue #1015) + +* Prefer user-supplied Host headers on chunked uploads. (Issue #1009) + + +1.18.1 (2016-10-27) +------------------- + +* CVE-2016-9015. Users who are using urllib3 version 1.17 or 1.18 along with + PyOpenSSL injection and OpenSSL 1.1.0 *must* upgrade to this version. This + release fixes a vulnerability whereby urllib3 in the above configuration + would silently fail to validate TLS certificates due to erroneously setting + invalid flags in OpenSSL's ``SSL_CTX_set_verify`` function. These erroneous + flags do not cause a problem in OpenSSL versions before 1.1.0, which + interprets the presence of any flag as requesting certificate validation. + + There is no PR for this patch, as it was prepared for simultaneous disclosure + and release. The master branch received the same fix in PR #1010. + + +1.18 (2016-09-26) +----------------- + +* Fixed incorrect message for IncompleteRead exception. (PR #973) + +* Accept ``iPAddress`` subject alternative name fields in TLS certificates. + (Issue #258) + +* Fixed consistency of ``HTTPResponse.closed`` between Python 2 and 3. + (Issue #977) + +* Fixed handling of wildcard certificates when using PyOpenSSL. (Issue #979) + + +1.17 (2016-09-06) +----------------- + +* Accept ``SSLContext`` objects for use in SSL/TLS negotiation. (Issue #835) + +* ConnectionPool debug log now includes scheme, host, and port. (Issue #897) + +* Substantially refactored documentation. (Issue #887) + +* Used URLFetch default timeout on AppEngine, rather than hardcoding our own. + (Issue #858) + +* Normalize the scheme and host in the URL parser (Issue #833) + +* ``HTTPResponse`` contains the last ``Retry`` object, which now also + contains retries history. (Issue #848) + +* Timeout can no longer be set as boolean, and must be greater than zero. + (PR #924) + +* Removed pyasn1 and ndg-httpsclient from dependencies used for PyOpenSSL. We + now use cryptography and idna, both of which are already dependencies of + PyOpenSSL. (PR #930) + +* Fixed infinite loop in ``stream`` when amt=None. (Issue #928) + +* Try to use the operating system's certificates when we are using an + ``SSLContext``. (PR #941) + +* Updated cipher suite list to allow ChaCha20+Poly1305. AES-GCM is preferred to + ChaCha20, but ChaCha20 is then preferred to everything else. (PR #947) + +* Updated cipher suite list to remove 3DES-based cipher suites. (PR #958) + +* Removed the cipher suite fallback to allow HIGH ciphers. (PR #958) + +* Implemented ``length_remaining`` to determine remaining content + to be read. (PR #949) + +* Implemented ``enforce_content_length`` to enable exceptions when + incomplete data chunks are received. (PR #949) + +* Dropped connection start, dropped connection reset, redirect, forced retry, + and new HTTPS connection log levels to DEBUG, from INFO. (PR #967) + + +1.16 (2016-06-11) +----------------- + +* Disable IPv6 DNS when IPv6 connections are not possible. (Issue #840) + +* Provide ``key_fn_by_scheme`` pool keying mechanism that can be + overridden. (Issue #830) + +* Normalize scheme and host to lowercase for pool keys, and include + ``source_address``. (Issue #830) + +* Cleaner exception chain in Python 3 for ``_make_request``. + (Issue #861) + +* Fixed installing ``urllib3[socks]`` extra. (Issue #864) + +* Fixed signature of ``ConnectionPool.close`` so it can actually safely be + called by subclasses. (Issue #873) + +* Retain ``release_conn`` state across retries. (Issues #651, #866) + +* Add customizable ``HTTPConnectionPool.ResponseCls``, which defaults to + ``HTTPResponse`` but can be replaced with a subclass. (Issue #879) + + +1.15.1 (2016-04-11) +------------------- + +* Fix packaging to include backports module. (Issue #841) + + +1.15 (2016-04-06) +----------------- + +* Added Retry(raise_on_status=False). (Issue #720) + +* Always use setuptools, no more distutils fallback. (Issue #785) + +* Dropped support for Python 3.2. (Issue #786) + +* Chunked transfer encoding when requesting with ``chunked=True``. + (Issue #790) + +* Fixed regression with IPv6 port parsing. (Issue #801) + +* Append SNIMissingWarning messages to allow users to specify it in + the PYTHONWARNINGS environment variable. (Issue #816) + +* Handle unicode headers in Py2. (Issue #818) + +* Log certificate when there is a hostname mismatch. (Issue #820) + +* Preserve order of request/response headers. (Issue #821) + + +1.14 (2015-12-29) +----------------- + +* contrib: SOCKS proxy support! (Issue #762) + +* Fixed AppEngine handling of transfer-encoding header and bug + in Timeout defaults checking. (Issue #763) + + +1.13.1 (2015-12-18) +------------------- + +* Fixed regression in IPv6 + SSL for match_hostname. (Issue #761) + + +1.13 (2015-12-14) +----------------- + +* Fixed ``pip install urllib3[secure]`` on modern pip. (Issue #706) + +* pyopenssl: Fixed SSL3_WRITE_PENDING error. (Issue #717) + +* pyopenssl: Support for TLSv1.1 and TLSv1.2. (Issue #696) + +* Close connections more defensively on exception. (Issue #734) + +* Adjusted ``read_chunked`` to handle gzipped, chunk-encoded bodies without + repeatedly flushing the decoder, to function better on Jython. (Issue #743) + +* Accept ``ca_cert_dir`` for SSL-related PoolManager configuration. (Issue #758) + + +1.12 (2015-09-03) +----------------- + +* Rely on ``six`` for importing ``httplib`` to work around + conflicts with other Python 3 shims. (Issue #688) + +* Add support for directories of certificate authorities, as supported by + OpenSSL. (Issue #701) + +* New exception: ``NewConnectionError``, raised when we fail to establish + a new connection, usually ``ECONNREFUSED`` socket error. + + +1.11 (2015-07-21) +----------------- + +* When ``ca_certs`` is given, ``cert_reqs`` defaults to + ``'CERT_REQUIRED'``. (Issue #650) + +* ``pip install urllib3[secure]`` will install Certifi and + PyOpenSSL as dependencies. (Issue #678) + +* Made ``HTTPHeaderDict`` usable as a ``headers`` input value + (Issues #632, #679) + +* Added `urllib3.contrib.appengine `_ + which has an ``AppEngineManager`` for using ``URLFetch`` in a + Google AppEngine environment. (Issue #664) + +* Dev: Added test suite for AppEngine. (Issue #631) + +* Fix performance regression when using PyOpenSSL. (Issue #626) + +* Passing incorrect scheme (e.g. ``foo://``) will raise + ``ValueError`` instead of ``AssertionError`` (backwards + compatible for now, but please migrate). (Issue #640) + +* Fix pools not getting replenished when an error occurs during a + request using ``release_conn=False``. (Issue #644) + +* Fix pool-default headers not applying for url-encoded requests + like GET. (Issue #657) + +* log.warning in Python 3 when headers are skipped due to parsing + errors. (Issue #642) + +* Close and discard connections if an error occurs during read. + (Issue #660) + +* Fix host parsing for IPv6 proxies. (Issue #668) + +* Separate warning type SubjectAltNameWarning, now issued once + per host. (Issue #671) + +* Fix ``httplib.IncompleteRead`` not getting converted to + ``ProtocolError`` when using ``HTTPResponse.stream()`` + (Issue #674) + +1.10.4 (2015-05-03) +------------------- + +* Migrate tests to Tornado 4. (Issue #594) + +* Append default warning configuration rather than overwrite. + (Issue #603) + +* Fix streaming decoding regression. (Issue #595) + +* Fix chunked requests losing state across keep-alive connections. + (Issue #599) + +* Fix hanging when chunked HEAD response has no body. (Issue #605) + + +1.10.3 (2015-04-21) +------------------- + +* Emit ``InsecurePlatformWarning`` when SSLContext object is missing. + (Issue #558) + +* Fix regression of duplicate header keys being discarded. + (Issue #563) + +* ``Response.stream()`` returns a generator for chunked responses. + (Issue #560) + +* Set upper-bound timeout when waiting for a socket in PyOpenSSL. + (Issue #585) + +* Work on platforms without `ssl` module for plain HTTP requests. + (Issue #587) + +* Stop relying on the stdlib's default cipher list. (Issue #588) + + +1.10.2 (2015-02-25) +------------------- + +* Fix file descriptor leakage on retries. (Issue #548) + +* Removed RC4 from default cipher list. (Issue #551) + +* Header performance improvements. (Issue #544) + +* Fix PoolManager not obeying redirect retry settings. (Issue #553) + + +1.10.1 (2015-02-10) +------------------- + +* Pools can be used as context managers. (Issue #545) + +* Don't re-use connections which experienced an SSLError. (Issue #529) + +* Don't fail when gzip decoding an empty stream. (Issue #535) + +* Add sha256 support for fingerprint verification. (Issue #540) + +* Fixed handling of header values containing commas. (Issue #533) + + +1.10 (2014-12-14) +----------------- + +* Disabled SSLv3. (Issue #473) + +* Add ``Url.url`` property to return the composed url string. (Issue #394) + +* Fixed PyOpenSSL + gevent ``WantWriteError``. (Issue #412) + +* ``MaxRetryError.reason`` will always be an exception, not string. + (Issue #481) + +* Fixed SSL-related timeouts not being detected as timeouts. (Issue #492) + +* Py3: Use ``ssl.create_default_context()`` when available. (Issue #473) + +* Emit ``InsecureRequestWarning`` for *every* insecure HTTPS request. + (Issue #496) + +* Emit ``SecurityWarning`` when certificate has no ``subjectAltName``. + (Issue #499) + +* Close and discard sockets which experienced SSL-related errors. + (Issue #501) + +* Handle ``body`` param in ``.request(...)``. (Issue #513) + +* Respect timeout with HTTPS proxy. (Issue #505) + +* PyOpenSSL: Handle ZeroReturnError exception. (Issue #520) + + +1.9.1 (2014-09-13) +------------------ + +* Apply socket arguments before binding. (Issue #427) + +* More careful checks if fp-like object is closed. (Issue #435) + +* Fixed packaging issues of some development-related files not + getting included. (Issue #440) + +* Allow performing *only* fingerprint verification. (Issue #444) + +* Emit ``SecurityWarning`` if system clock is waaay off. (Issue #445) + +* Fixed PyOpenSSL compatibility with PyPy. (Issue #450) + +* Fixed ``BrokenPipeError`` and ``ConnectionError`` handling in Py3. + (Issue #443) + + + +1.9 (2014-07-04) +---------------- + +* Shuffled around development-related files. If you're maintaining a distro + package of urllib3, you may need to tweak things. (Issue #415) + +* Unverified HTTPS requests will trigger a warning on the first request. See + our new `security documentation + `_ for details. + (Issue #426) + +* New retry logic and ``urllib3.util.retry.Retry`` configuration object. + (Issue #326) + +* All raised exceptions should now wrapped in a + ``urllib3.exceptions.HTTPException``-extending exception. (Issue #326) + +* All errors during a retry-enabled request should be wrapped in + ``urllib3.exceptions.MaxRetryError``, including timeout-related exceptions + which were previously exempt. Underlying error is accessible from the + ``.reason`` property. (Issue #326) + +* ``urllib3.exceptions.ConnectionError`` renamed to + ``urllib3.exceptions.ProtocolError``. (Issue #326) + +* Errors during response read (such as IncompleteRead) are now wrapped in + ``urllib3.exceptions.ProtocolError``. (Issue #418) + +* Requesting an empty host will raise ``urllib3.exceptions.LocationValueError``. + (Issue #417) + +* Catch read timeouts over SSL connections as + ``urllib3.exceptions.ReadTimeoutError``. (Issue #419) + +* Apply socket arguments before connecting. (Issue #427) + + +1.8.3 (2014-06-23) +------------------ + +* Fix TLS verification when using a proxy in Python 3.4.1. (Issue #385) + +* Add ``disable_cache`` option to ``urllib3.util.make_headers``. (Issue #393) + +* Wrap ``socket.timeout`` exception with + ``urllib3.exceptions.ReadTimeoutError``. (Issue #399) + +* Fixed proxy-related bug where connections were being reused incorrectly. + (Issues #366, #369) + +* Added ``socket_options`` keyword parameter which allows to define + ``setsockopt`` configuration of new sockets. (Issue #397) + +* Removed ``HTTPConnection.tcp_nodelay`` in favor of + ``HTTPConnection.default_socket_options``. (Issue #397) + +* Fixed ``TypeError`` bug in Python 2.6.4. (Issue #411) + + +1.8.2 (2014-04-17) +------------------ + +* Fix ``urllib3.util`` not being included in the package. + + +1.8.1 (2014-04-17) +------------------ + +* Fix AppEngine bug of HTTPS requests going out as HTTP. (Issue #356) + +* Don't install ``dummyserver`` into ``site-packages`` as it's only needed + for the test suite. (Issue #362) + +* Added support for specifying ``source_address``. (Issue #352) + + +1.8 (2014-03-04) +---------------- + +* Improved url parsing in ``urllib3.util.parse_url`` (properly parse '@' in + username, and blank ports like 'hostname:'). + +* New ``urllib3.connection`` module which contains all the HTTPConnection + objects. + +* Several ``urllib3.util.Timeout``-related fixes. Also changed constructor + signature to a more sensible order. [Backwards incompatible] + (Issues #252, #262, #263) + +* Use ``backports.ssl_match_hostname`` if it's installed. (Issue #274) + +* Added ``.tell()`` method to ``urllib3.response.HTTPResponse`` which + returns the number of bytes read so far. (Issue #277) + +* Support for platforms without threading. (Issue #289) + +* Expand default-port comparison in ``HTTPConnectionPool.is_same_host`` + to allow a pool with no specified port to be considered equal to to an + HTTP/HTTPS url with port 80/443 explicitly provided. (Issue #305) + +* Improved default SSL/TLS settings to avoid vulnerabilities. + (Issue #309) + +* Fixed ``urllib3.poolmanager.ProxyManager`` not retrying on connect errors. + (Issue #310) + +* Disable Nagle's Algorithm on the socket for non-proxies. A subset of requests + will send the entire HTTP request ~200 milliseconds faster; however, some of + the resulting TCP packets will be smaller. (Issue #254) + +* Increased maximum number of SubjectAltNames in ``urllib3.contrib.pyopenssl`` + from the default 64 to 1024 in a single certificate. (Issue #318) + +* Headers are now passed and stored as a custom + ``urllib3.collections_.HTTPHeaderDict`` object rather than a plain ``dict``. + (Issue #329, #333) + +* Headers no longer lose their case on Python 3. (Issue #236) + +* ``urllib3.contrib.pyopenssl`` now uses the operating system's default CA + certificates on inject. (Issue #332) + +* Requests with ``retries=False`` will immediately raise any exceptions without + wrapping them in ``MaxRetryError``. (Issue #348) + +* Fixed open socket leak with SSL-related failures. (Issue #344, #348) + + +1.7.1 (2013-09-25) +------------------ + +* Added granular timeout support with new ``urllib3.util.Timeout`` class. + (Issue #231) + +* Fixed Python 3.4 support. (Issue #238) + + +1.7 (2013-08-14) +---------------- + +* More exceptions are now pickle-able, with tests. (Issue #174) + +* Fixed redirecting with relative URLs in Location header. (Issue #178) + +* Support for relative urls in ``Location: ...`` header. (Issue #179) + +* ``urllib3.response.HTTPResponse`` now inherits from ``io.IOBase`` for bonus + file-like functionality. (Issue #187) + +* Passing ``assert_hostname=False`` when creating a HTTPSConnectionPool will + skip hostname verification for SSL connections. (Issue #194) + +* New method ``urllib3.response.HTTPResponse.stream(...)`` which acts as a + generator wrapped around ``.read(...)``. (Issue #198) + +* IPv6 url parsing enforces brackets around the hostname. (Issue #199) + +* Fixed thread race condition in + ``urllib3.poolmanager.PoolManager.connection_from_host(...)`` (Issue #204) + +* ``ProxyManager`` requests now include non-default port in ``Host: ...`` + header. (Issue #217) + +* Added HTTPS proxy support in ``ProxyManager``. (Issue #170 #139) + +* New ``RequestField`` object can be passed to the ``fields=...`` param which + can specify headers. (Issue #220) + +* Raise ``urllib3.exceptions.ProxyError`` when connecting to proxy fails. + (Issue #221) + +* Use international headers when posting file names. (Issue #119) + +* Improved IPv6 support. (Issue #203) + + +1.6 (2013-04-25) +---------------- + +* Contrib: Optional SNI support for Py2 using PyOpenSSL. (Issue #156) + +* ``ProxyManager`` automatically adds ``Host: ...`` header if not given. + +* Improved SSL-related code. ``cert_req`` now optionally takes a string like + "REQUIRED" or "NONE". Same with ``ssl_version`` takes strings like "SSLv23" + The string values reflect the suffix of the respective constant variable. + (Issue #130) + +* Vendored ``socksipy`` now based on Anorov's fork which handles unexpectedly + closed proxy connections and larger read buffers. (Issue #135) + +* Ensure the connection is closed if no data is received, fixes connection leak + on some platforms. (Issue #133) + +* Added SNI support for SSL/TLS connections on Py32+. (Issue #89) + +* Tests fixed to be compatible with Py26 again. (Issue #125) + +* Added ability to choose SSL version by passing an ``ssl.PROTOCOL_*`` constant + to the ``ssl_version`` parameter of ``HTTPSConnectionPool``. (Issue #109) + +* Allow an explicit content type to be specified when encoding file fields. + (Issue #126) + +* Exceptions are now pickleable, with tests. (Issue #101) + +* Fixed default headers not getting passed in some cases. (Issue #99) + +* Treat "content-encoding" header value as case-insensitive, per RFC 2616 + Section 3.5. (Issue #110) + +* "Connection Refused" SocketErrors will get retried rather than raised. + (Issue #92) + +* Updated vendored ``six``, no longer overrides the global ``six`` module + namespace. (Issue #113) + +* ``urllib3.exceptions.MaxRetryError`` contains a ``reason`` property holding + the exception that prompted the final retry. If ``reason is None`` then it + was due to a redirect. (Issue #92, #114) + +* Fixed ``PoolManager.urlopen()`` from not redirecting more than once. + (Issue #149) + +* Don't assume ``Content-Type: text/plain`` for multi-part encoding parameters + that are not files. (Issue #111) + +* Pass `strict` param down to ``httplib.HTTPConnection``. (Issue #122) + +* Added mechanism to verify SSL certificates by fingerprint (md5, sha1) or + against an arbitrary hostname (when connecting by IP or for misconfigured + servers). (Issue #140) + +* Streaming decompression support. (Issue #159) + + +1.5 (2012-08-02) +---------------- + +* Added ``urllib3.add_stderr_logger()`` for quickly enabling STDERR debug + logging in urllib3. + +* Native full URL parsing (including auth, path, query, fragment) available in + ``urllib3.util.parse_url(url)``. + +* Built-in redirect will switch method to 'GET' if status code is 303. + (Issue #11) + +* ``urllib3.PoolManager`` strips the scheme and host before sending the request + uri. (Issue #8) + +* New ``urllib3.exceptions.DecodeError`` exception for when automatic decoding, + based on the Content-Type header, fails. + +* Fixed bug with pool depletion and leaking connections (Issue #76). Added + explicit connection closing on pool eviction. Added + ``urllib3.PoolManager.clear()``. + +* 99% -> 100% unit test coverage. + + +1.4 (2012-06-16) +---------------- + +* Minor AppEngine-related fixes. + +* Switched from ``mimetools.choose_boundary`` to ``uuid.uuid4()``. + +* Improved url parsing. (Issue #73) + +* IPv6 url support. (Issue #72) + + +1.3 (2012-03-25) +---------------- + +* Removed pre-1.0 deprecated API. + +* Refactored helpers into a ``urllib3.util`` submodule. + +* Fixed multipart encoding to support list-of-tuples for keys with multiple + values. (Issue #48) + +* Fixed multiple Set-Cookie headers in response not getting merged properly in + Python 3. (Issue #53) + +* AppEngine support with Py27. (Issue #61) + +* Minor ``encode_multipart_formdata`` fixes related to Python 3 strings vs + bytes. + + +1.2.2 (2012-02-06) +------------------ + +* Fixed packaging bug of not shipping ``test-requirements.txt``. (Issue #47) + + +1.2.1 (2012-02-05) +------------------ + +* Fixed another bug related to when ``ssl`` module is not available. (Issue #41) + +* Location parsing errors now raise ``urllib3.exceptions.LocationParseError`` + which inherits from ``ValueError``. + + +1.2 (2012-01-29) +---------------- + +* Added Python 3 support (tested on 3.2.2) + +* Dropped Python 2.5 support (tested on 2.6.7, 2.7.2) + +* Use ``select.poll`` instead of ``select.select`` for platforms that support + it. + +* Use ``Queue.LifoQueue`` instead of ``Queue.Queue`` for more aggressive + connection reusing. Configurable by overriding ``ConnectionPool.QueueCls``. + +* Fixed ``ImportError`` during install when ``ssl`` module is not available. + (Issue #41) + +* Fixed ``PoolManager`` redirects between schemes (such as HTTP -> HTTPS) not + completing properly. (Issue #28, uncovered by Issue #10 in v1.1) + +* Ported ``dummyserver`` to use ``tornado`` instead of ``webob`` + + ``eventlet``. Removed extraneous unsupported dummyserver testing backends. + Added socket-level tests. + +* More tests. Achievement Unlocked: 99% Coverage. + + +1.1 (2012-01-07) +---------------- + +* Refactored ``dummyserver`` to its own root namespace module (used for + testing). + +* Added hostname verification for ``VerifiedHTTPSConnection`` by vendoring in + Py32's ``ssl_match_hostname``. (Issue #25) + +* Fixed cross-host HTTP redirects when using ``PoolManager``. (Issue #10) + +* Fixed ``decode_content`` being ignored when set through ``urlopen``. (Issue + #27) + +* Fixed timeout-related bugs. (Issues #17, #23) + + +1.0.2 (2011-11-04) +------------------ + +* Fixed typo in ``VerifiedHTTPSConnection`` which would only present as a bug if + you're using the object manually. (Thanks pyos) + +* Made RecentlyUsedContainer (and consequently PoolManager) more thread-safe by + wrapping the access log in a mutex. (Thanks @christer) + +* Made RecentlyUsedContainer more dict-like (corrected ``__delitem__`` and + ``__getitem__`` behaviour), with tests. Shouldn't affect core urllib3 code. + + +1.0.1 (2011-10-10) +------------------ + +* Fixed a bug where the same connection would get returned into the pool twice, + causing extraneous "HttpConnectionPool is full" log warnings. + + +1.0 (2011-10-08) +---------------- + +* Added ``PoolManager`` with LRU expiration of connections (tested and + documented). +* Added ``ProxyManager`` (needs tests, docs, and confirmation that it works + with HTTPS proxies). +* Added optional partial-read support for responses when + ``preload_content=False``. You can now make requests and just read the headers + without loading the content. +* Made response decoding optional (default on, same as before). +* Added optional explicit boundary string for ``encode_multipart_formdata``. +* Convenience request methods are now inherited from ``RequestMethods``. Old + helpers like ``get_url`` and ``post_url`` should be abandoned in favour of + the new ``request(method, url, ...)``. +* Refactored code to be even more decoupled, reusable, and extendable. +* License header added to ``.py`` files. +* Embiggened the documentation: Lots of Sphinx-friendly docstrings in the code + and docs in ``docs/`` and on https://urllib3.readthedocs.io/. +* Embettered all the things! +* Started writing this file. + + +0.4.1 (2011-07-17) +------------------ + +* Minor bug fixes, code cleanup. + + +0.4 (2011-03-01) +---------------- + +* Better unicode support. +* Added ``VerifiedHTTPSConnection``. +* Added ``NTLMConnectionPool`` in contrib. +* Minor improvements. + + +0.3.1 (2010-07-13) +------------------ + +* Added ``assert_host_name`` optional parameter. Now compatible with proxies. + + +0.3 (2009-12-10) +---------------- + +* Added HTTPS support. +* Minor bug fixes. +* Refactored, broken backwards compatibility with 0.2. +* API to be treated as stable from this version forward. + + +0.2 (2008-11-17) +---------------- + +* Added unit tests. +* Bug fixes. + + +0.1 (2008-11-16) +---------------- + +* First release. + + diff --git a/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/INSTALLER b/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/LICENSE.txt b/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/LICENSE.txt new file mode 100755 index 0000000..1c3283e --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/LICENSE.txt @@ -0,0 +1,19 @@ +This is the MIT license: http://www.opensource.org/licenses/mit-license.php + +Copyright 2008-2016 Andrey Petrov and contributors (see CONTRIBUTORS.txt) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this +software and associated documentation files (the "Software"), to deal in the Software +without restriction, including without limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons +to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or +substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/METADATA b/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/METADATA new file mode 100755 index 0000000..a04dd38 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/METADATA @@ -0,0 +1,1077 @@ +Metadata-Version: 2.0 +Name: urllib3 +Version: 1.23 +Summary: HTTP library with thread-safe connection pooling, file post, and more. +Home-page: https://urllib3.readthedocs.io/ +Author: Andrey Petrov +Author-email: andrey.petrov@shazow.net +License: MIT +Keywords: urllib httplib threadsafe filepost http https ssl pooling +Platform: UNKNOWN +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: Software Development :: Libraries +Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4 +Provides-Extra: secure +Provides-Extra: socks +Requires-Dist: pyOpenSSL>=0.14,<18.0.0; python_version=="2.6" and extra == 'secure' +Requires-Dist: pyOpenSSL>=0.14; python_version=="2.7" and extra == 'secure' +Requires-Dist: cryptography>=1.3.4; python_version<="2.7" and extra == 'secure' +Requires-Dist: idna>=2.0.0; python_version<="2.7" and extra == 'secure' +Requires-Dist: certifi; extra == 'secure' +Requires-Dist: ipaddress; python_version<="2.7" and extra == 'secure' +Requires-Dist: PySocks>=1.5.6,<2.0,!=1.5.7; extra == 'socks' + +urllib3 +======= + +.. image:: https://travis-ci.org/urllib3/urllib3.svg?branch=master + :alt: Build status on Travis + :target: https://travis-ci.org/urllib3/urllib3 + +.. image:: https://img.shields.io/appveyor/ci/urllib3/urllib3/master.svg + :alt: Build status on AppVeyor + :target: https://ci.appveyor.com/project/urllib3/urllib3 + +.. image:: https://readthedocs.org/projects/urllib3/badge/?version=latest + :alt: Documentation Status + :target: https://urllib3.readthedocs.io/en/latest/ + +.. image:: https://img.shields.io/codecov/c/github/urllib3/urllib3.svg + :alt: Coverage Status + :target: https://codecov.io/gh/urllib3/urllib3 + +.. image:: https://img.shields.io/pypi/v/urllib3.svg?maxAge=86400 + :alt: PyPI version + :target: https://pypi.org/project/urllib3/ + +.. image:: https://www.bountysource.com/badge/tracker?tracker_id=192525 + :alt: Bountysource + :target: https://www.bountysource.com/trackers/192525-urllib3?utm_source=192525&utm_medium=shield&utm_campaign=TRACKER_BADGE + +.. image:: https://badges.gitter.im/python-urllib3/Lobby.svg + :alt: Gitter + :target: https://gitter.im/python-urllib3/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge + +urllib3 is a powerful, *sanity-friendly* HTTP client for Python. Much of the +Python ecosystem already uses urllib3 and you should too. +urllib3 brings many critical features that are missing from the Python +standard libraries: + +- Thread safety. +- Connection pooling. +- Client-side SSL/TLS verification. +- File uploads with multipart encoding. +- Helpers for retrying requests and dealing with HTTP redirects. +- Support for gzip and deflate encoding. +- Proxy support for HTTP and SOCKS. +- 100% test coverage. + +urllib3 is powerful and easy to use:: + + >>> import urllib3 + >>> http = urllib3.PoolManager() + >>> r = http.request('GET', 'http://httpbin.org/robots.txt') + >>> r.status + 200 + >>> r.data + 'User-agent: *\nDisallow: /deny\n' + + +Installing +---------- + +urllib3 can be installed with `pip `_:: + + $ pip install urllib3 + +Alternatively, you can grab the latest source code from `GitHub `_:: + + $ git clone git://github.com/urllib3/urllib3.git + $ python setup.py install + + +Documentation +------------- + +urllib3 has usage and reference documentation at `urllib3.readthedocs.io `_. + + +Contributing +------------ + +urllib3 happily accepts contributions. Please see our +`contributing documentation `_ +for some tips on getting started. + + +Maintainers +----------- + +- `@theacodes `_ (Thea Flowers) +- `@SethMichaelLarson `_ (Seth M. Larson) +- `@haikuginger `_ (Jesse Shapiro) +- `@lukasa `_ (Cory Benfield) +- `@sigmavirus24 `_ (Ian Cordasco) +- `@shazow `_ (Andrey Petrov) + +👋 + + +Sponsorship +----------- + +If your company benefits from this library, please consider `sponsoring its +development `_. + +Sponsors include: + +- Google Cloud Platform (2018-present), sponsors `@theacodes `_'s work on an ongoing basis +- Abbott (2018-present), sponsors `@SethMichaelLarson `_'s work on an ongoing basis +- Akamai (2017-present), sponsors `@haikuginger `_'s work on an ongoing basis +- Hewlett Packard Enterprise (2016-2017), sponsored `@Lukasa’s `_ work on urllib3 + + +Changes +======= + +1.23 (2018-06-04) +----------------- + +* Allow providing a list of headers to strip from requests when redirecting + to a different host. Defaults to the ``Authorization`` header. Different + headers can be set via ``Retry.remove_headers_on_redirect``. (Issue #1316) + +* Fix ``util.selectors._fileobj_to_fd`` to accept ``long`` (Issue #1247). + +* Dropped Python 3.3 support. (Pull #1242) + +* Put the connection back in the pool when calling stream() or read_chunked() on + a chunked HEAD response. (Issue #1234) + +* Fixed pyOpenSSL-specific ssl client authentication issue when clients + attempted to auth via certificate + chain (Issue #1060) + +* Add the port to the connectionpool connect print (Pull #1251) + +* Don't use the ``uuid`` module to create multipart data boundaries. (Pull #1380) + +* ``read_chunked()`` on a closed response returns no chunks. (Issue #1088) + +* Add Python 2.6 support to ``contrib.securetransport`` (Pull #1359) + +* Added support for auth info in url for SOCKS proxy (Pull #1363) + + +1.22 (2017-07-20) +----------------- + +* Fixed missing brackets in ``HTTP CONNECT`` when connecting to IPv6 address via + IPv6 proxy. (Issue #1222) + +* Made the connection pool retry on ``SSLError``. The original ``SSLError`` + is available on ``MaxRetryError.reason``. (Issue #1112) + +* Drain and release connection before recursing on retry/redirect. Fixes + deadlocks with a blocking connectionpool. (Issue #1167) + +* Fixed compatibility for cookiejar. (Issue #1229) + +* pyopenssl: Use vendored version of ``six``. (Issue #1231) + + +1.21.1 (2017-05-02) +------------------- + +* Fixed SecureTransport issue that would cause long delays in response body + delivery. (Pull #1154) + +* Fixed regression in 1.21 that threw exceptions when users passed the + ``socket_options`` flag to the ``PoolManager``. (Issue #1165) + +* Fixed regression in 1.21 that threw exceptions when users passed the + ``assert_hostname`` or ``assert_fingerprint`` flag to the ``PoolManager``. + (Pull #1157) + + +1.21 (2017-04-25) +----------------- + +* Improved performance of certain selector system calls on Python 3.5 and + later. (Pull #1095) + +* Resolved issue where the PyOpenSSL backend would not wrap SysCallError + exceptions appropriately when sending data. (Pull #1125) + +* Selectors now detects a monkey-patched select module after import for modules + that patch the select module like eventlet, greenlet. (Pull #1128) + +* Reduced memory consumption when streaming zlib-compressed responses + (as opposed to raw deflate streams). (Pull #1129) + +* Connection pools now use the entire request context when constructing the + pool key. (Pull #1016) + +* ``PoolManager.connection_from_*`` methods now accept a new keyword argument, + ``pool_kwargs``, which are merged with the existing ``connection_pool_kw``. + (Pull #1016) + +* Add retry counter for ``status_forcelist``. (Issue #1147) + +* Added ``contrib`` module for using SecureTransport on macOS: + ``urllib3.contrib.securetransport``. (Pull #1122) + +* urllib3 now only normalizes the case of ``http://`` and ``https://`` schemes: + for schemes it does not recognise, it assumes they are case-sensitive and + leaves them unchanged. + (Issue #1080) + + +1.20 (2017-01-19) +----------------- + +* Added support for waiting for I/O using selectors other than select, + improving urllib3's behaviour with large numbers of concurrent connections. + (Pull #1001) + +* Updated the date for the system clock check. (Issue #1005) + +* ConnectionPools now correctly consider hostnames to be case-insensitive. + (Issue #1032) + +* Outdated versions of PyOpenSSL now cause the PyOpenSSL contrib module + to fail when it is injected, rather than at first use. (Pull #1063) + +* Outdated versions of cryptography now cause the PyOpenSSL contrib module + to fail when it is injected, rather than at first use. (Issue #1044) + +* Automatically attempt to rewind a file-like body object when a request is + retried or redirected. (Pull #1039) + +* Fix some bugs that occur when modules incautiously patch the queue module. + (Pull #1061) + +* Prevent retries from occurring on read timeouts for which the request method + was not in the method whitelist. (Issue #1059) + +* Changed the PyOpenSSL contrib module to lazily load idna to avoid + unnecessarily bloating the memory of programs that don't need it. (Pull + #1076) + +* Add support for IPv6 literals with zone identifiers. (Pull #1013) + +* Added support for socks5h:// and socks4a:// schemes when working with SOCKS + proxies, and controlled remote DNS appropriately. (Issue #1035) + + +1.19.1 (2016-11-16) +------------------- + +* Fixed AppEngine import that didn't function on Python 3.5. (Pull #1025) + + +1.19 (2016-11-03) +----------------- + +* urllib3 now respects Retry-After headers on 413, 429, and 503 responses when + using the default retry logic. (Pull #955) + +* Remove markers from setup.py to assist ancient setuptools versions. (Issue + #986) + +* Disallow superscripts and other integerish things in URL ports. (Issue #989) + +* Allow urllib3's HTTPResponse.stream() method to continue to work with + non-httplib underlying FPs. (Pull #990) + +* Empty filenames in multipart headers are now emitted as such, rather than + being suppressed. (Issue #1015) + +* Prefer user-supplied Host headers on chunked uploads. (Issue #1009) + + +1.18.1 (2016-10-27) +------------------- + +* CVE-2016-9015. Users who are using urllib3 version 1.17 or 1.18 along with + PyOpenSSL injection and OpenSSL 1.1.0 *must* upgrade to this version. This + release fixes a vulnerability whereby urllib3 in the above configuration + would silently fail to validate TLS certificates due to erroneously setting + invalid flags in OpenSSL's ``SSL_CTX_set_verify`` function. These erroneous + flags do not cause a problem in OpenSSL versions before 1.1.0, which + interprets the presence of any flag as requesting certificate validation. + + There is no PR for this patch, as it was prepared for simultaneous disclosure + and release. The master branch received the same fix in PR #1010. + + +1.18 (2016-09-26) +----------------- + +* Fixed incorrect message for IncompleteRead exception. (PR #973) + +* Accept ``iPAddress`` subject alternative name fields in TLS certificates. + (Issue #258) + +* Fixed consistency of ``HTTPResponse.closed`` between Python 2 and 3. + (Issue #977) + +* Fixed handling of wildcard certificates when using PyOpenSSL. (Issue #979) + + +1.17 (2016-09-06) +----------------- + +* Accept ``SSLContext`` objects for use in SSL/TLS negotiation. (Issue #835) + +* ConnectionPool debug log now includes scheme, host, and port. (Issue #897) + +* Substantially refactored documentation. (Issue #887) + +* Used URLFetch default timeout on AppEngine, rather than hardcoding our own. + (Issue #858) + +* Normalize the scheme and host in the URL parser (Issue #833) + +* ``HTTPResponse`` contains the last ``Retry`` object, which now also + contains retries history. (Issue #848) + +* Timeout can no longer be set as boolean, and must be greater than zero. + (PR #924) + +* Removed pyasn1 and ndg-httpsclient from dependencies used for PyOpenSSL. We + now use cryptography and idna, both of which are already dependencies of + PyOpenSSL. (PR #930) + +* Fixed infinite loop in ``stream`` when amt=None. (Issue #928) + +* Try to use the operating system's certificates when we are using an + ``SSLContext``. (PR #941) + +* Updated cipher suite list to allow ChaCha20+Poly1305. AES-GCM is preferred to + ChaCha20, but ChaCha20 is then preferred to everything else. (PR #947) + +* Updated cipher suite list to remove 3DES-based cipher suites. (PR #958) + +* Removed the cipher suite fallback to allow HIGH ciphers. (PR #958) + +* Implemented ``length_remaining`` to determine remaining content + to be read. (PR #949) + +* Implemented ``enforce_content_length`` to enable exceptions when + incomplete data chunks are received. (PR #949) + +* Dropped connection start, dropped connection reset, redirect, forced retry, + and new HTTPS connection log levels to DEBUG, from INFO. (PR #967) + + +1.16 (2016-06-11) +----------------- + +* Disable IPv6 DNS when IPv6 connections are not possible. (Issue #840) + +* Provide ``key_fn_by_scheme`` pool keying mechanism that can be + overridden. (Issue #830) + +* Normalize scheme and host to lowercase for pool keys, and include + ``source_address``. (Issue #830) + +* Cleaner exception chain in Python 3 for ``_make_request``. + (Issue #861) + +* Fixed installing ``urllib3[socks]`` extra. (Issue #864) + +* Fixed signature of ``ConnectionPool.close`` so it can actually safely be + called by subclasses. (Issue #873) + +* Retain ``release_conn`` state across retries. (Issues #651, #866) + +* Add customizable ``HTTPConnectionPool.ResponseCls``, which defaults to + ``HTTPResponse`` but can be replaced with a subclass. (Issue #879) + + +1.15.1 (2016-04-11) +------------------- + +* Fix packaging to include backports module. (Issue #841) + + +1.15 (2016-04-06) +----------------- + +* Added Retry(raise_on_status=False). (Issue #720) + +* Always use setuptools, no more distutils fallback. (Issue #785) + +* Dropped support for Python 3.2. (Issue #786) + +* Chunked transfer encoding when requesting with ``chunked=True``. + (Issue #790) + +* Fixed regression with IPv6 port parsing. (Issue #801) + +* Append SNIMissingWarning messages to allow users to specify it in + the PYTHONWARNINGS environment variable. (Issue #816) + +* Handle unicode headers in Py2. (Issue #818) + +* Log certificate when there is a hostname mismatch. (Issue #820) + +* Preserve order of request/response headers. (Issue #821) + + +1.14 (2015-12-29) +----------------- + +* contrib: SOCKS proxy support! (Issue #762) + +* Fixed AppEngine handling of transfer-encoding header and bug + in Timeout defaults checking. (Issue #763) + + +1.13.1 (2015-12-18) +------------------- + +* Fixed regression in IPv6 + SSL for match_hostname. (Issue #761) + + +1.13 (2015-12-14) +----------------- + +* Fixed ``pip install urllib3[secure]`` on modern pip. (Issue #706) + +* pyopenssl: Fixed SSL3_WRITE_PENDING error. (Issue #717) + +* pyopenssl: Support for TLSv1.1 and TLSv1.2. (Issue #696) + +* Close connections more defensively on exception. (Issue #734) + +* Adjusted ``read_chunked`` to handle gzipped, chunk-encoded bodies without + repeatedly flushing the decoder, to function better on Jython. (Issue #743) + +* Accept ``ca_cert_dir`` for SSL-related PoolManager configuration. (Issue #758) + + +1.12 (2015-09-03) +----------------- + +* Rely on ``six`` for importing ``httplib`` to work around + conflicts with other Python 3 shims. (Issue #688) + +* Add support for directories of certificate authorities, as supported by + OpenSSL. (Issue #701) + +* New exception: ``NewConnectionError``, raised when we fail to establish + a new connection, usually ``ECONNREFUSED`` socket error. + + +1.11 (2015-07-21) +----------------- + +* When ``ca_certs`` is given, ``cert_reqs`` defaults to + ``'CERT_REQUIRED'``. (Issue #650) + +* ``pip install urllib3[secure]`` will install Certifi and + PyOpenSSL as dependencies. (Issue #678) + +* Made ``HTTPHeaderDict`` usable as a ``headers`` input value + (Issues #632, #679) + +* Added `urllib3.contrib.appengine `_ + which has an ``AppEngineManager`` for using ``URLFetch`` in a + Google AppEngine environment. (Issue #664) + +* Dev: Added test suite for AppEngine. (Issue #631) + +* Fix performance regression when using PyOpenSSL. (Issue #626) + +* Passing incorrect scheme (e.g. ``foo://``) will raise + ``ValueError`` instead of ``AssertionError`` (backwards + compatible for now, but please migrate). (Issue #640) + +* Fix pools not getting replenished when an error occurs during a + request using ``release_conn=False``. (Issue #644) + +* Fix pool-default headers not applying for url-encoded requests + like GET. (Issue #657) + +* log.warning in Python 3 when headers are skipped due to parsing + errors. (Issue #642) + +* Close and discard connections if an error occurs during read. + (Issue #660) + +* Fix host parsing for IPv6 proxies. (Issue #668) + +* Separate warning type SubjectAltNameWarning, now issued once + per host. (Issue #671) + +* Fix ``httplib.IncompleteRead`` not getting converted to + ``ProtocolError`` when using ``HTTPResponse.stream()`` + (Issue #674) + +1.10.4 (2015-05-03) +------------------- + +* Migrate tests to Tornado 4. (Issue #594) + +* Append default warning configuration rather than overwrite. + (Issue #603) + +* Fix streaming decoding regression. (Issue #595) + +* Fix chunked requests losing state across keep-alive connections. + (Issue #599) + +* Fix hanging when chunked HEAD response has no body. (Issue #605) + + +1.10.3 (2015-04-21) +------------------- + +* Emit ``InsecurePlatformWarning`` when SSLContext object is missing. + (Issue #558) + +* Fix regression of duplicate header keys being discarded. + (Issue #563) + +* ``Response.stream()`` returns a generator for chunked responses. + (Issue #560) + +* Set upper-bound timeout when waiting for a socket in PyOpenSSL. + (Issue #585) + +* Work on platforms without `ssl` module for plain HTTP requests. + (Issue #587) + +* Stop relying on the stdlib's default cipher list. (Issue #588) + + +1.10.2 (2015-02-25) +------------------- + +* Fix file descriptor leakage on retries. (Issue #548) + +* Removed RC4 from default cipher list. (Issue #551) + +* Header performance improvements. (Issue #544) + +* Fix PoolManager not obeying redirect retry settings. (Issue #553) + + +1.10.1 (2015-02-10) +------------------- + +* Pools can be used as context managers. (Issue #545) + +* Don't re-use connections which experienced an SSLError. (Issue #529) + +* Don't fail when gzip decoding an empty stream. (Issue #535) + +* Add sha256 support for fingerprint verification. (Issue #540) + +* Fixed handling of header values containing commas. (Issue #533) + + +1.10 (2014-12-14) +----------------- + +* Disabled SSLv3. (Issue #473) + +* Add ``Url.url`` property to return the composed url string. (Issue #394) + +* Fixed PyOpenSSL + gevent ``WantWriteError``. (Issue #412) + +* ``MaxRetryError.reason`` will always be an exception, not string. + (Issue #481) + +* Fixed SSL-related timeouts not being detected as timeouts. (Issue #492) + +* Py3: Use ``ssl.create_default_context()`` when available. (Issue #473) + +* Emit ``InsecureRequestWarning`` for *every* insecure HTTPS request. + (Issue #496) + +* Emit ``SecurityWarning`` when certificate has no ``subjectAltName``. + (Issue #499) + +* Close and discard sockets which experienced SSL-related errors. + (Issue #501) + +* Handle ``body`` param in ``.request(...)``. (Issue #513) + +* Respect timeout with HTTPS proxy. (Issue #505) + +* PyOpenSSL: Handle ZeroReturnError exception. (Issue #520) + + +1.9.1 (2014-09-13) +------------------ + +* Apply socket arguments before binding. (Issue #427) + +* More careful checks if fp-like object is closed. (Issue #435) + +* Fixed packaging issues of some development-related files not + getting included. (Issue #440) + +* Allow performing *only* fingerprint verification. (Issue #444) + +* Emit ``SecurityWarning`` if system clock is waaay off. (Issue #445) + +* Fixed PyOpenSSL compatibility with PyPy. (Issue #450) + +* Fixed ``BrokenPipeError`` and ``ConnectionError`` handling in Py3. + (Issue #443) + + + +1.9 (2014-07-04) +---------------- + +* Shuffled around development-related files. If you're maintaining a distro + package of urllib3, you may need to tweak things. (Issue #415) + +* Unverified HTTPS requests will trigger a warning on the first request. See + our new `security documentation + `_ for details. + (Issue #426) + +* New retry logic and ``urllib3.util.retry.Retry`` configuration object. + (Issue #326) + +* All raised exceptions should now wrapped in a + ``urllib3.exceptions.HTTPException``-extending exception. (Issue #326) + +* All errors during a retry-enabled request should be wrapped in + ``urllib3.exceptions.MaxRetryError``, including timeout-related exceptions + which were previously exempt. Underlying error is accessible from the + ``.reason`` property. (Issue #326) + +* ``urllib3.exceptions.ConnectionError`` renamed to + ``urllib3.exceptions.ProtocolError``. (Issue #326) + +* Errors during response read (such as IncompleteRead) are now wrapped in + ``urllib3.exceptions.ProtocolError``. (Issue #418) + +* Requesting an empty host will raise ``urllib3.exceptions.LocationValueError``. + (Issue #417) + +* Catch read timeouts over SSL connections as + ``urllib3.exceptions.ReadTimeoutError``. (Issue #419) + +* Apply socket arguments before connecting. (Issue #427) + + +1.8.3 (2014-06-23) +------------------ + +* Fix TLS verification when using a proxy in Python 3.4.1. (Issue #385) + +* Add ``disable_cache`` option to ``urllib3.util.make_headers``. (Issue #393) + +* Wrap ``socket.timeout`` exception with + ``urllib3.exceptions.ReadTimeoutError``. (Issue #399) + +* Fixed proxy-related bug where connections were being reused incorrectly. + (Issues #366, #369) + +* Added ``socket_options`` keyword parameter which allows to define + ``setsockopt`` configuration of new sockets. (Issue #397) + +* Removed ``HTTPConnection.tcp_nodelay`` in favor of + ``HTTPConnection.default_socket_options``. (Issue #397) + +* Fixed ``TypeError`` bug in Python 2.6.4. (Issue #411) + + +1.8.2 (2014-04-17) +------------------ + +* Fix ``urllib3.util`` not being included in the package. + + +1.8.1 (2014-04-17) +------------------ + +* Fix AppEngine bug of HTTPS requests going out as HTTP. (Issue #356) + +* Don't install ``dummyserver`` into ``site-packages`` as it's only needed + for the test suite. (Issue #362) + +* Added support for specifying ``source_address``. (Issue #352) + + +1.8 (2014-03-04) +---------------- + +* Improved url parsing in ``urllib3.util.parse_url`` (properly parse '@' in + username, and blank ports like 'hostname:'). + +* New ``urllib3.connection`` module which contains all the HTTPConnection + objects. + +* Several ``urllib3.util.Timeout``-related fixes. Also changed constructor + signature to a more sensible order. [Backwards incompatible] + (Issues #252, #262, #263) + +* Use ``backports.ssl_match_hostname`` if it's installed. (Issue #274) + +* Added ``.tell()`` method to ``urllib3.response.HTTPResponse`` which + returns the number of bytes read so far. (Issue #277) + +* Support for platforms without threading. (Issue #289) + +* Expand default-port comparison in ``HTTPConnectionPool.is_same_host`` + to allow a pool with no specified port to be considered equal to to an + HTTP/HTTPS url with port 80/443 explicitly provided. (Issue #305) + +* Improved default SSL/TLS settings to avoid vulnerabilities. + (Issue #309) + +* Fixed ``urllib3.poolmanager.ProxyManager`` not retrying on connect errors. + (Issue #310) + +* Disable Nagle's Algorithm on the socket for non-proxies. A subset of requests + will send the entire HTTP request ~200 milliseconds faster; however, some of + the resulting TCP packets will be smaller. (Issue #254) + +* Increased maximum number of SubjectAltNames in ``urllib3.contrib.pyopenssl`` + from the default 64 to 1024 in a single certificate. (Issue #318) + +* Headers are now passed and stored as a custom + ``urllib3.collections_.HTTPHeaderDict`` object rather than a plain ``dict``. + (Issue #329, #333) + +* Headers no longer lose their case on Python 3. (Issue #236) + +* ``urllib3.contrib.pyopenssl`` now uses the operating system's default CA + certificates on inject. (Issue #332) + +* Requests with ``retries=False`` will immediately raise any exceptions without + wrapping them in ``MaxRetryError``. (Issue #348) + +* Fixed open socket leak with SSL-related failures. (Issue #344, #348) + + +1.7.1 (2013-09-25) +------------------ + +* Added granular timeout support with new ``urllib3.util.Timeout`` class. + (Issue #231) + +* Fixed Python 3.4 support. (Issue #238) + + +1.7 (2013-08-14) +---------------- + +* More exceptions are now pickle-able, with tests. (Issue #174) + +* Fixed redirecting with relative URLs in Location header. (Issue #178) + +* Support for relative urls in ``Location: ...`` header. (Issue #179) + +* ``urllib3.response.HTTPResponse`` now inherits from ``io.IOBase`` for bonus + file-like functionality. (Issue #187) + +* Passing ``assert_hostname=False`` when creating a HTTPSConnectionPool will + skip hostname verification for SSL connections. (Issue #194) + +* New method ``urllib3.response.HTTPResponse.stream(...)`` which acts as a + generator wrapped around ``.read(...)``. (Issue #198) + +* IPv6 url parsing enforces brackets around the hostname. (Issue #199) + +* Fixed thread race condition in + ``urllib3.poolmanager.PoolManager.connection_from_host(...)`` (Issue #204) + +* ``ProxyManager`` requests now include non-default port in ``Host: ...`` + header. (Issue #217) + +* Added HTTPS proxy support in ``ProxyManager``. (Issue #170 #139) + +* New ``RequestField`` object can be passed to the ``fields=...`` param which + can specify headers. (Issue #220) + +* Raise ``urllib3.exceptions.ProxyError`` when connecting to proxy fails. + (Issue #221) + +* Use international headers when posting file names. (Issue #119) + +* Improved IPv6 support. (Issue #203) + + +1.6 (2013-04-25) +---------------- + +* Contrib: Optional SNI support for Py2 using PyOpenSSL. (Issue #156) + +* ``ProxyManager`` automatically adds ``Host: ...`` header if not given. + +* Improved SSL-related code. ``cert_req`` now optionally takes a string like + "REQUIRED" or "NONE". Same with ``ssl_version`` takes strings like "SSLv23" + The string values reflect the suffix of the respective constant variable. + (Issue #130) + +* Vendored ``socksipy`` now based on Anorov's fork which handles unexpectedly + closed proxy connections and larger read buffers. (Issue #135) + +* Ensure the connection is closed if no data is received, fixes connection leak + on some platforms. (Issue #133) + +* Added SNI support for SSL/TLS connections on Py32+. (Issue #89) + +* Tests fixed to be compatible with Py26 again. (Issue #125) + +* Added ability to choose SSL version by passing an ``ssl.PROTOCOL_*`` constant + to the ``ssl_version`` parameter of ``HTTPSConnectionPool``. (Issue #109) + +* Allow an explicit content type to be specified when encoding file fields. + (Issue #126) + +* Exceptions are now pickleable, with tests. (Issue #101) + +* Fixed default headers not getting passed in some cases. (Issue #99) + +* Treat "content-encoding" header value as case-insensitive, per RFC 2616 + Section 3.5. (Issue #110) + +* "Connection Refused" SocketErrors will get retried rather than raised. + (Issue #92) + +* Updated vendored ``six``, no longer overrides the global ``six`` module + namespace. (Issue #113) + +* ``urllib3.exceptions.MaxRetryError`` contains a ``reason`` property holding + the exception that prompted the final retry. If ``reason is None`` then it + was due to a redirect. (Issue #92, #114) + +* Fixed ``PoolManager.urlopen()`` from not redirecting more than once. + (Issue #149) + +* Don't assume ``Content-Type: text/plain`` for multi-part encoding parameters + that are not files. (Issue #111) + +* Pass `strict` param down to ``httplib.HTTPConnection``. (Issue #122) + +* Added mechanism to verify SSL certificates by fingerprint (md5, sha1) or + against an arbitrary hostname (when connecting by IP or for misconfigured + servers). (Issue #140) + +* Streaming decompression support. (Issue #159) + + +1.5 (2012-08-02) +---------------- + +* Added ``urllib3.add_stderr_logger()`` for quickly enabling STDERR debug + logging in urllib3. + +* Native full URL parsing (including auth, path, query, fragment) available in + ``urllib3.util.parse_url(url)``. + +* Built-in redirect will switch method to 'GET' if status code is 303. + (Issue #11) + +* ``urllib3.PoolManager`` strips the scheme and host before sending the request + uri. (Issue #8) + +* New ``urllib3.exceptions.DecodeError`` exception for when automatic decoding, + based on the Content-Type header, fails. + +* Fixed bug with pool depletion and leaking connections (Issue #76). Added + explicit connection closing on pool eviction. Added + ``urllib3.PoolManager.clear()``. + +* 99% -> 100% unit test coverage. + + +1.4 (2012-06-16) +---------------- + +* Minor AppEngine-related fixes. + +* Switched from ``mimetools.choose_boundary`` to ``uuid.uuid4()``. + +* Improved url parsing. (Issue #73) + +* IPv6 url support. (Issue #72) + + +1.3 (2012-03-25) +---------------- + +* Removed pre-1.0 deprecated API. + +* Refactored helpers into a ``urllib3.util`` submodule. + +* Fixed multipart encoding to support list-of-tuples for keys with multiple + values. (Issue #48) + +* Fixed multiple Set-Cookie headers in response not getting merged properly in + Python 3. (Issue #53) + +* AppEngine support with Py27. (Issue #61) + +* Minor ``encode_multipart_formdata`` fixes related to Python 3 strings vs + bytes. + + +1.2.2 (2012-02-06) +------------------ + +* Fixed packaging bug of not shipping ``test-requirements.txt``. (Issue #47) + + +1.2.1 (2012-02-05) +------------------ + +* Fixed another bug related to when ``ssl`` module is not available. (Issue #41) + +* Location parsing errors now raise ``urllib3.exceptions.LocationParseError`` + which inherits from ``ValueError``. + + +1.2 (2012-01-29) +---------------- + +* Added Python 3 support (tested on 3.2.2) + +* Dropped Python 2.5 support (tested on 2.6.7, 2.7.2) + +* Use ``select.poll`` instead of ``select.select`` for platforms that support + it. + +* Use ``Queue.LifoQueue`` instead of ``Queue.Queue`` for more aggressive + connection reusing. Configurable by overriding ``ConnectionPool.QueueCls``. + +* Fixed ``ImportError`` during install when ``ssl`` module is not available. + (Issue #41) + +* Fixed ``PoolManager`` redirects between schemes (such as HTTP -> HTTPS) not + completing properly. (Issue #28, uncovered by Issue #10 in v1.1) + +* Ported ``dummyserver`` to use ``tornado`` instead of ``webob`` + + ``eventlet``. Removed extraneous unsupported dummyserver testing backends. + Added socket-level tests. + +* More tests. Achievement Unlocked: 99% Coverage. + + +1.1 (2012-01-07) +---------------- + +* Refactored ``dummyserver`` to its own root namespace module (used for + testing). + +* Added hostname verification for ``VerifiedHTTPSConnection`` by vendoring in + Py32's ``ssl_match_hostname``. (Issue #25) + +* Fixed cross-host HTTP redirects when using ``PoolManager``. (Issue #10) + +* Fixed ``decode_content`` being ignored when set through ``urlopen``. (Issue + #27) + +* Fixed timeout-related bugs. (Issues #17, #23) + + +1.0.2 (2011-11-04) +------------------ + +* Fixed typo in ``VerifiedHTTPSConnection`` which would only present as a bug if + you're using the object manually. (Thanks pyos) + +* Made RecentlyUsedContainer (and consequently PoolManager) more thread-safe by + wrapping the access log in a mutex. (Thanks @christer) + +* Made RecentlyUsedContainer more dict-like (corrected ``__delitem__`` and + ``__getitem__`` behaviour), with tests. Shouldn't affect core urllib3 code. + + +1.0.1 (2011-10-10) +------------------ + +* Fixed a bug where the same connection would get returned into the pool twice, + causing extraneous "HttpConnectionPool is full" log warnings. + + +1.0 (2011-10-08) +---------------- + +* Added ``PoolManager`` with LRU expiration of connections (tested and + documented). +* Added ``ProxyManager`` (needs tests, docs, and confirmation that it works + with HTTPS proxies). +* Added optional partial-read support for responses when + ``preload_content=False``. You can now make requests and just read the headers + without loading the content. +* Made response decoding optional (default on, same as before). +* Added optional explicit boundary string for ``encode_multipart_formdata``. +* Convenience request methods are now inherited from ``RequestMethods``. Old + helpers like ``get_url`` and ``post_url`` should be abandoned in favour of + the new ``request(method, url, ...)``. +* Refactored code to be even more decoupled, reusable, and extendable. +* License header added to ``.py`` files. +* Embiggened the documentation: Lots of Sphinx-friendly docstrings in the code + and docs in ``docs/`` and on https://urllib3.readthedocs.io/. +* Embettered all the things! +* Started writing this file. + + +0.4.1 (2011-07-17) +------------------ + +* Minor bug fixes, code cleanup. + + +0.4 (2011-03-01) +---------------- + +* Better unicode support. +* Added ``VerifiedHTTPSConnection``. +* Added ``NTLMConnectionPool`` in contrib. +* Minor improvements. + + +0.3.1 (2010-07-13) +------------------ + +* Added ``assert_host_name`` optional parameter. Now compatible with proxies. + + +0.3 (2009-12-10) +---------------- + +* Added HTTPS support. +* Minor bug fixes. +* Refactored, broken backwards compatibility with 0.2. +* API to be treated as stable from this version forward. + + +0.2 (2008-11-17) +---------------- + +* Added unit tests. +* Bug fixes. + + +0.1 (2008-11-16) +---------------- + +* First release. + + diff --git a/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/RECORD b/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/RECORD new file mode 100644 index 0000000..780c427 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/RECORD @@ -0,0 +1,80 @@ +urllib3/__init__.py,sha256=DZucS8tlzGYKmK5FIsyUViMghpCq_0_0Ouvm_Jp9GNc,2853 +urllib3/_collections.py,sha256=iNeAU_we9L3lMGRUKKdq24Mf7o050TXP5U4Jm9AzAY4,10841 +urllib3/connection.py,sha256=My76qeWMDkV-KP1l3iChXHOE7J-ZCaUdP3sPIiLA2uE,14485 +urllib3/connectionpool.py,sha256=w20OwKdIqk6f8FIl6QGgn6jf9gZ0-tmgH7VPxnpEgkQ,35464 +urllib3/exceptions.py,sha256=rFeIfBNKC8KJ61ux-MtJyJlEC9G9ggkmCeF751JwVR4,6604 +urllib3/fields.py,sha256=D_TE_SK15YatdbhWDMN0OE3X6UCJn1RTkANINCYOobE,5943 +urllib3/filepost.py,sha256=40CROlpRKVBpFUkD0R6wJf_PpvbcRQRFUu0OOQlFkKM,2436 +urllib3/poolmanager.py,sha256=FHBjb7odbP2LyQRzeitgpuh1AQAPyegzmrm2b3gSZlY,16821 +urllib3/request.py,sha256=fwjlq5nQfcUa7aoncR25z6-fJAX_oNTcPksKKGjBm38,5996 +urllib3/response.py,sha256=uAuOTZSuTodzvQWIDCZghDoKmZ2bKbgIRCfaVIIZfn8,24667 +urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +urllib3/contrib/appengine.py,sha256=GwC8G9RhBHMsW_szXCUj0Fxg-s58fwaUy1I9QWdsn6o,11173 +urllib3/contrib/ntlmpool.py,sha256=Q9-rO5Rh2-IqyEd4ZicpTDfMnOlf0IPPCkjhChBCjV4,4478 +urllib3/contrib/pyopenssl.py,sha256=NRLCyMHt-POP6ZajpsyH4BrJQy2DaevXWVXZlebcSxE,15463 +urllib3/contrib/securetransport.py,sha256=BqXSlChN9_hjCWgyN6JdcgvBUdc37QCCX4u3_8zE_9o,30309 +urllib3/contrib/socks.py,sha256=Iom0snbHkCuZbZ7Sle2Kueha1W0jYAJ0SyCOtePLaio,6391 +urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +urllib3/contrib/_securetransport/bindings.py,sha256=x2kLSh-ASZKsun0FxtraBuLVe3oHuth4YW6yZ5Vof-w,17560 +urllib3/contrib/_securetransport/low_level.py,sha256=Umy5u-3Z957GirdapnicXVOpHaM4xdOZABJuJxfaeJA,12162 +urllib3/packages/__init__.py,sha256=nlChrGzkjCkmhCX9HrF_qHPUgosfsPQkVIJxiiLhk9g,109 +urllib3/packages/ordered_dict.py,sha256=VQaPONfhVMsb8B63Xg7ZOydJqIE_jzeMhVN3Pec6ogw,8935 +urllib3/packages/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 +urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +urllib3/packages/backports/makefile.py,sha256=r1IADol_pBBq2Y1ub4CPyuS2hXuShK47nfFngZRcRhI,1461 +urllib3/packages/ssl_match_hostname/__init__.py,sha256=WBVbxQBojNAxfZwNavkox3BgJiMA9BJmm-_fwd0jD_o,688 +urllib3/packages/ssl_match_hostname/_implementation.py,sha256=lAj7qGCZLOldhn8gZDY6Tqp4mvgkbTfy4k4gDIDRo8g,5702 +urllib3/util/__init__.py,sha256=6Ran4oAVIy40Cu_oEPWnNV9bwF5rXx6G1DUZ7oehjPY,1044 +urllib3/util/connection.py,sha256=8K1VXm8BHsM3QATJJGBNRa_MStkzDy1Da2IaPAaCU8c,4279 +urllib3/util/queue.py,sha256=myTX3JDHntglKQNBf3b6dasHH-uF-W59vzGSQiFdAfI,497 +urllib3/util/request.py,sha256=H5_lrHvtwl2U2BbT1UYN9HpruNc1gsNFlz2njQmhPrQ,3705 +urllib3/util/response.py,sha256=SSNL888W-MQ8t3HAi44kNGgF682p6H__ytEXzBYxV_M,2343 +urllib3/util/retry.py,sha256=tlxiEq8OU2BSenPpPjYYO1URne8A-qTEgaykam6rZPg,15104 +urllib3/util/ssl_.py,sha256=ZGaPQcKYMqza6vUl54JygUD5XcI0p9dxFwUx8k7sCBw,13993 +urllib3/util/timeout.py,sha256=7lHNrgL5YH2cI1j-yZnzV_J8jBlRVdmFhQaNyM1_2b8,9757 +urllib3/util/url.py,sha256=qCY_HHUXvo05wAsEERALgExtlgxLnAHSQ7ce1b-g3SM,6487 +urllib3/util/wait.py,sha256=_4vvsT1BTTpqxQYK-2kXVfGsUsVRiuc4R4F-0Bf5BPc,5468 +urllib3-1.23.dist-info/DESCRIPTION.rst,sha256=wO8oxz5hJse8PFH1eQZYnHHZqjsEGFRoHEsfUVbhYJQ,32311 +urllib3-1.23.dist-info/LICENSE.txt,sha256=EdtWlDDKWteT8TmSl7jfUEGiITerr5BkLqcdoh1ZEhw,1175 +urllib3-1.23.dist-info/METADATA,sha256=9Vnr4PogOR0NNBWZRy4LcVITPbFmvbaM-p5jDjKVjLA,34050 +urllib3-1.23.dist-info/RECORD,, +urllib3-1.23.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +urllib3-1.23.dist-info/metadata.json,sha256=84gnKX3kpTNkp5zA5zaidVW3qHwJaE3YFJbbdKCPTNg,1879 +urllib3-1.23.dist-info/top_level.txt,sha256=EMiXL2sKrTcmrMxIHTqdc3ET54pQI2Y072LexFEemvo,8 +urllib3-1.23.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +urllib3/util/__pycache__/timeout.cpython-36.pyc,, +urllib3/util/__pycache__/response.cpython-36.pyc,, +urllib3/util/__pycache__/url.cpython-36.pyc,, +urllib3/util/__pycache__/request.cpython-36.pyc,, +urllib3/util/__pycache__/connection.cpython-36.pyc,, +urllib3/util/__pycache__/wait.cpython-36.pyc,, +urllib3/util/__pycache__/retry.cpython-36.pyc,, +urllib3/util/__pycache__/ssl_.cpython-36.pyc,, +urllib3/util/__pycache__/__init__.cpython-36.pyc,, +urllib3/util/__pycache__/queue.cpython-36.pyc,, +urllib3/__pycache__/poolmanager.cpython-36.pyc,, +urllib3/__pycache__/response.cpython-36.pyc,, +urllib3/__pycache__/exceptions.cpython-36.pyc,, +urllib3/__pycache__/request.cpython-36.pyc,, +urllib3/__pycache__/filepost.cpython-36.pyc,, +urllib3/__pycache__/connectionpool.cpython-36.pyc,, +urllib3/__pycache__/connection.cpython-36.pyc,, +urllib3/__pycache__/_collections.cpython-36.pyc,, +urllib3/__pycache__/fields.cpython-36.pyc,, +urllib3/__pycache__/__init__.cpython-36.pyc,, +urllib3/contrib/__pycache__/ntlmpool.cpython-36.pyc,, +urllib3/contrib/__pycache__/appengine.cpython-36.pyc,, +urllib3/contrib/__pycache__/securetransport.cpython-36.pyc,, +urllib3/contrib/__pycache__/socks.cpython-36.pyc,, +urllib3/contrib/__pycache__/__init__.cpython-36.pyc,, +urllib3/contrib/__pycache__/pyopenssl.cpython-36.pyc,, +urllib3/contrib/_securetransport/__pycache__/low_level.cpython-36.pyc,, +urllib3/contrib/_securetransport/__pycache__/bindings.cpython-36.pyc,, +urllib3/contrib/_securetransport/__pycache__/__init__.cpython-36.pyc,, +urllib3/packages/__pycache__/six.cpython-36.pyc,, +urllib3/packages/__pycache__/ordered_dict.cpython-36.pyc,, +urllib3/packages/__pycache__/__init__.cpython-36.pyc,, +urllib3/packages/backports/__pycache__/makefile.cpython-36.pyc,, +urllib3/packages/backports/__pycache__/__init__.cpython-36.pyc,, +urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-36.pyc,, +urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-36.pyc,, diff --git a/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/WHEEL b/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/WHEEL new file mode 100755 index 0000000..8b6dd1b --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/metadata.json b/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/metadata.json new file mode 100755 index 0000000..5bf347c --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Internet :: WWW/HTTP", "Topic :: Software Development :: Libraries"], "extensions": {"python.details": {"contacts": [{"email": "andrey.petrov@shazow.net", "name": "Andrey Petrov", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "https://urllib3.readthedocs.io/"}}}, "extras": ["secure", "socks"], "generator": "bdist_wheel (0.29.0)", "keywords": ["urllib", "httplib", "threadsafe", "filepost", "http", "https", "ssl", "pooling"], "license": "MIT", "metadata_version": "2.0", "name": "urllib3", "requires_python": ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4", "run_requires": [{"extra": "socks", "requires": ["PySocks>=1.5.6,<2.0,!=1.5.7"]}, {"extra": "secure", "requires": ["certifi"]}, {"environment": "python_version<=\"2.7\"", "extra": "secure", "requires": ["cryptography>=1.3.4", "idna>=2.0.0", "ipaddress"]}, {"environment": "python_version==\"2.6\"", "extra": "secure", "requires": ["pyOpenSSL>=0.14,<18.0.0"]}, {"environment": "python_version==\"2.7\"", "extra": "secure", "requires": ["pyOpenSSL>=0.14"]}], "summary": "HTTP library with thread-safe connection pooling, file post, and more.", "test_requires": [{"requires": ["mock", "pytest", "tornado"]}], "version": "1.23"} \ No newline at end of file diff --git a/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/top_level.txt b/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/top_level.txt new file mode 100755 index 0000000..a42590b --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3-1.23.dist-info/top_level.txt @@ -0,0 +1 @@ +urllib3 diff --git a/env/lib/python3.6/site-packages/urllib3/_collections.py b/env/lib/python3.6/site-packages/urllib3/_collections.py new file mode 100755 index 0000000..6e36b84 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/_collections.py @@ -0,0 +1,332 @@ +from __future__ import absolute_import +try: + from collections.abc import Mapping, MutableMapping +except ImportError: + from collections import Mapping, MutableMapping +try: + from threading import RLock +except ImportError: # Platform-specific: No threads available + class RLock: + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_value, traceback): + pass + + +try: # Python 2.7+ + from collections import OrderedDict +except ImportError: + from .packages.ordered_dict import OrderedDict +from .exceptions import InvalidHeader +from .packages.six import iterkeys, itervalues, PY3 + + +__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict'] + + +_Null = object() + + +class RecentlyUsedContainer(MutableMapping): + """ + Provides a thread-safe dict-like container which maintains up to + ``maxsize`` keys while throwing away the least-recently-used keys beyond + ``maxsize``. + + :param maxsize: + Maximum number of recent elements to retain. + + :param dispose_func: + Every time an item is evicted from the container, + ``dispose_func(value)`` is called. Callback which will get called + """ + + ContainerCls = OrderedDict + + def __init__(self, maxsize=10, dispose_func=None): + self._maxsize = maxsize + self.dispose_func = dispose_func + + self._container = self.ContainerCls() + self.lock = RLock() + + def __getitem__(self, key): + # Re-insert the item, moving it to the end of the eviction line. + with self.lock: + item = self._container.pop(key) + self._container[key] = item + return item + + def __setitem__(self, key, value): + evicted_value = _Null + with self.lock: + # Possibly evict the existing value of 'key' + evicted_value = self._container.get(key, _Null) + self._container[key] = value + + # If we didn't evict an existing value, we might have to evict the + # least recently used item from the beginning of the container. + if len(self._container) > self._maxsize: + _key, evicted_value = self._container.popitem(last=False) + + if self.dispose_func and evicted_value is not _Null: + self.dispose_func(evicted_value) + + def __delitem__(self, key): + with self.lock: + value = self._container.pop(key) + + if self.dispose_func: + self.dispose_func(value) + + def __len__(self): + with self.lock: + return len(self._container) + + def __iter__(self): + raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.') + + def clear(self): + with self.lock: + # Copy pointers to all values, then wipe the mapping + values = list(itervalues(self._container)) + self._container.clear() + + if self.dispose_func: + for value in values: + self.dispose_func(value) + + def keys(self): + with self.lock: + return list(iterkeys(self._container)) + + +class HTTPHeaderDict(MutableMapping): + """ + :param headers: + An iterable of field-value pairs. Must not contain multiple field names + when compared case-insensitively. + + :param kwargs: + Additional field-value pairs to pass in to ``dict.update``. + + A ``dict`` like container for storing HTTP Headers. + + Field names are stored and compared case-insensitively in compliance with + RFC 7230. Iteration provides the first case-sensitive key seen for each + case-insensitive pair. + + Using ``__setitem__`` syntax overwrites fields that compare equal + case-insensitively in order to maintain ``dict``'s api. For fields that + compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add`` + in a loop. + + If multiple fields that are equal case-insensitively are passed to the + constructor or ``.update``, the behavior is undefined and some will be + lost. + + >>> headers = HTTPHeaderDict() + >>> headers.add('Set-Cookie', 'foo=bar') + >>> headers.add('set-cookie', 'baz=quxx') + >>> headers['content-length'] = '7' + >>> headers['SET-cookie'] + 'foo=bar, baz=quxx' + >>> headers['Content-Length'] + '7' + """ + + def __init__(self, headers=None, **kwargs): + super(HTTPHeaderDict, self).__init__() + self._container = OrderedDict() + if headers is not None: + if isinstance(headers, HTTPHeaderDict): + self._copy_from(headers) + else: + self.extend(headers) + if kwargs: + self.extend(kwargs) + + def __setitem__(self, key, val): + self._container[key.lower()] = [key, val] + return self._container[key.lower()] + + def __getitem__(self, key): + val = self._container[key.lower()] + return ', '.join(val[1:]) + + def __delitem__(self, key): + del self._container[key.lower()] + + def __contains__(self, key): + return key.lower() in self._container + + def __eq__(self, other): + if not isinstance(other, Mapping) and not hasattr(other, 'keys'): + return False + if not isinstance(other, type(self)): + other = type(self)(other) + return (dict((k.lower(), v) for k, v in self.itermerged()) == + dict((k.lower(), v) for k, v in other.itermerged())) + + def __ne__(self, other): + return not self.__eq__(other) + + if not PY3: # Python 2 + iterkeys = MutableMapping.iterkeys + itervalues = MutableMapping.itervalues + + __marker = object() + + def __len__(self): + return len(self._container) + + def __iter__(self): + # Only provide the originally cased names + for vals in self._container.values(): + yield vals[0] + + def pop(self, key, default=__marker): + '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + ''' + # Using the MutableMapping function directly fails due to the private marker. + # Using ordinary dict.pop would expose the internal structures. + # So let's reinvent the wheel. + try: + value = self[key] + except KeyError: + if default is self.__marker: + raise + return default + else: + del self[key] + return value + + def discard(self, key): + try: + del self[key] + except KeyError: + pass + + def add(self, key, val): + """Adds a (name, value) pair, doesn't overwrite the value if it already + exists. + + >>> headers = HTTPHeaderDict(foo='bar') + >>> headers.add('Foo', 'baz') + >>> headers['foo'] + 'bar, baz' + """ + key_lower = key.lower() + new_vals = [key, val] + # Keep the common case aka no item present as fast as possible + vals = self._container.setdefault(key_lower, new_vals) + if new_vals is not vals: + vals.append(val) + + def extend(self, *args, **kwargs): + """Generic import function for any type of header-like object. + Adapted version of MutableMapping.update in order to insert items + with self.add instead of self.__setitem__ + """ + if len(args) > 1: + raise TypeError("extend() takes at most 1 positional " + "arguments ({0} given)".format(len(args))) + other = args[0] if len(args) >= 1 else () + + if isinstance(other, HTTPHeaderDict): + for key, val in other.iteritems(): + self.add(key, val) + elif isinstance(other, Mapping): + for key in other: + self.add(key, other[key]) + elif hasattr(other, "keys"): + for key in other.keys(): + self.add(key, other[key]) + else: + for key, value in other: + self.add(key, value) + + for key, value in kwargs.items(): + self.add(key, value) + + def getlist(self, key, default=__marker): + """Returns a list of all the values for the named field. Returns an + empty list if the key doesn't exist.""" + try: + vals = self._container[key.lower()] + except KeyError: + if default is self.__marker: + return [] + return default + else: + return vals[1:] + + # Backwards compatibility for httplib + getheaders = getlist + getallmatchingheaders = getlist + iget = getlist + + # Backwards compatibility for http.cookiejar + get_all = getlist + + def __repr__(self): + return "%s(%s)" % (type(self).__name__, dict(self.itermerged())) + + def _copy_from(self, other): + for key in other: + val = other.getlist(key) + if isinstance(val, list): + # Don't need to convert tuples + val = list(val) + self._container[key.lower()] = [key] + val + + def copy(self): + clone = type(self)() + clone._copy_from(self) + return clone + + def iteritems(self): + """Iterate over all header lines, including duplicate ones.""" + for key in self: + vals = self._container[key.lower()] + for val in vals[1:]: + yield vals[0], val + + def itermerged(self): + """Iterate over all headers, merging duplicate ones together.""" + for key in self: + val = self._container[key.lower()] + yield val[0], ', '.join(val[1:]) + + def items(self): + return list(self.iteritems()) + + @classmethod + def from_httplib(cls, message): # Python 2 + """Read headers from a Python 2 httplib message object.""" + # python2.7 does not expose a proper API for exporting multiheaders + # efficiently. This function re-reads raw lines from the message + # object and extracts the multiheaders properly. + obs_fold_continued_leaders = (' ', '\t') + headers = [] + + for line in message.headers: + if line.startswith(obs_fold_continued_leaders): + if not headers: + # We received a header line that starts with OWS as described + # in RFC-7230 S3.2.4. This indicates a multiline header, but + # there exists no previous header to which we can attach it. + raise InvalidHeader( + 'Header continuation with no previous header: %s' % line + ) + else: + key, value = headers[-1] + headers[-1] = (key, value + ' ' + line.strip()) + continue + + key, value = line.split(':', 1) + headers.append((key, value.strip())) + + return cls(headers) diff --git a/env/lib/python3.6/site-packages/urllib3/connection.py b/env/lib/python3.6/site-packages/urllib3/connection.py new file mode 100755 index 0000000..a03b573 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/connection.py @@ -0,0 +1,403 @@ +from __future__ import absolute_import +import datetime +import logging +import os +import sys +import socket +from socket import error as SocketError, timeout as SocketTimeout +import warnings +from .packages import six +from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection +from .packages.six.moves.http_client import HTTPException # noqa: F401 + +try: # Compiled with SSL? + import ssl + BaseSSLError = ssl.SSLError +except (ImportError, AttributeError): # Platform-specific: No SSL. + ssl = None + + class BaseSSLError(BaseException): + pass + + +try: # Python 3: + # Not a no-op, we're adding this to the namespace so it can be imported. + ConnectionError = ConnectionError +except NameError: # Python 2: + class ConnectionError(Exception): + pass + + +from .exceptions import ( + NewConnectionError, + ConnectTimeoutError, + SubjectAltNameWarning, + SystemTimeWarning, +) +from .packages.ssl_match_hostname import match_hostname, CertificateError + +from .util.ssl_ import ( + resolve_cert_reqs, + resolve_ssl_version, + assert_fingerprint, + create_urllib3_context, + ssl_wrap_socket +) + + +from .util import connection + +from ._collections import HTTPHeaderDict + +log = logging.getLogger(__name__) + +port_by_scheme = { + 'http': 80, + 'https': 443, +} + +# When updating RECENT_DATE, move it to within two years of the current date, +# and not less than 6 months ago. +# Example: if Today is 2018-01-01, then RECENT_DATE should be any date on or +# after 2016-01-01 (today - 2 years) AND before 2017-07-01 (today - 6 months) +RECENT_DATE = datetime.date(2017, 6, 30) + + +class DummyConnection(object): + """Used to detect a failed ConnectionCls import.""" + pass + + +class HTTPConnection(_HTTPConnection, object): + """ + Based on httplib.HTTPConnection but provides an extra constructor + backwards-compatibility layer between older and newer Pythons. + + Additional keyword parameters are used to configure attributes of the connection. + Accepted parameters include: + + - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` + - ``source_address``: Set the source address for the current connection. + + .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x + + - ``socket_options``: Set specific options on the underlying socket. If not specified, then + defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling + Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. + + For example, if you wish to enable TCP Keep Alive in addition to the defaults, + you might pass:: + + HTTPConnection.default_socket_options + [ + (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), + ] + + Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). + """ + + default_port = port_by_scheme['http'] + + #: Disable Nagle's algorithm by default. + #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` + default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] + + #: Whether this connection verifies the host's certificate. + is_verified = False + + def __init__(self, *args, **kw): + if six.PY3: # Python 3 + kw.pop('strict', None) + + # Pre-set source_address in case we have an older Python like 2.6. + self.source_address = kw.get('source_address') + + if sys.version_info < (2, 7): # Python 2.6 + # _HTTPConnection on Python 2.6 will balk at this keyword arg, but + # not newer versions. We can still use it when creating a + # connection though, so we pop it *after* we have saved it as + # self.source_address. + kw.pop('source_address', None) + + #: The socket options provided by the user. If no options are + #: provided, we use the default options. + self.socket_options = kw.pop('socket_options', self.default_socket_options) + + # Superclass also sets self.source_address in Python 2.7+. + _HTTPConnection.__init__(self, *args, **kw) + + @property + def host(self): + """ + Getter method to remove any trailing dots that indicate the hostname is an FQDN. + + In general, SSL certificates don't include the trailing dot indicating a + fully-qualified domain name, and thus, they don't validate properly when + checked against a domain name that includes the dot. In addition, some + servers may not expect to receive the trailing dot when provided. + + However, the hostname with trailing dot is critical to DNS resolution; doing a + lookup with the trailing dot will properly only resolve the appropriate FQDN, + whereas a lookup without a trailing dot will search the system's search domain + list. Thus, it's important to keep the original host around for use only in + those cases where it's appropriate (i.e., when doing DNS lookup to establish the + actual TCP connection across which we're going to send HTTP requests). + """ + return self._dns_host.rstrip('.') + + @host.setter + def host(self, value): + """ + Setter for the `host` property. + + We assume that only urllib3 uses the _dns_host attribute; httplib itself + only uses `host`, and it seems reasonable that other libraries follow suit. + """ + self._dns_host = value + + def _new_conn(self): + """ Establish a socket connection and set nodelay settings on it. + + :return: New socket connection. + """ + extra_kw = {} + if self.source_address: + extra_kw['source_address'] = self.source_address + + if self.socket_options: + extra_kw['socket_options'] = self.socket_options + + try: + conn = connection.create_connection( + (self._dns_host, self.port), self.timeout, **extra_kw) + + except SocketTimeout as e: + raise ConnectTimeoutError( + self, "Connection to %s timed out. (connect timeout=%s)" % + (self.host, self.timeout)) + + except SocketError as e: + raise NewConnectionError( + self, "Failed to establish a new connection: %s" % e) + + return conn + + def _prepare_conn(self, conn): + self.sock = conn + # the _tunnel_host attribute was added in python 2.6.3 (via + # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do + # not have them. + if getattr(self, '_tunnel_host', None): + # TODO: Fix tunnel so it doesn't depend on self.sock state. + self._tunnel() + # Mark this connection as not reusable + self.auto_open = 0 + + def connect(self): + conn = self._new_conn() + self._prepare_conn(conn) + + def request_chunked(self, method, url, body=None, headers=None): + """ + Alternative to the common request method, which sends the + body with chunked encoding and not as one block + """ + headers = HTTPHeaderDict(headers if headers is not None else {}) + skip_accept_encoding = 'accept-encoding' in headers + skip_host = 'host' in headers + self.putrequest( + method, + url, + skip_accept_encoding=skip_accept_encoding, + skip_host=skip_host + ) + for header, value in headers.items(): + self.putheader(header, value) + if 'transfer-encoding' not in headers: + self.putheader('Transfer-Encoding', 'chunked') + self.endheaders() + + if body is not None: + stringish_types = six.string_types + (six.binary_type,) + if isinstance(body, stringish_types): + body = (body,) + for chunk in body: + if not chunk: + continue + if not isinstance(chunk, six.binary_type): + chunk = chunk.encode('utf8') + len_str = hex(len(chunk))[2:] + self.send(len_str.encode('utf-8')) + self.send(b'\r\n') + self.send(chunk) + self.send(b'\r\n') + + # After the if clause, to always have a closed body + self.send(b'0\r\n\r\n') + + +class HTTPSConnection(HTTPConnection): + default_port = port_by_scheme['https'] + + ssl_version = None + + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + ssl_context=None, **kw): + + HTTPConnection.__init__(self, host, port, strict=strict, + timeout=timeout, **kw) + + self.key_file = key_file + self.cert_file = cert_file + self.ssl_context = ssl_context + + # Required property for Google AppEngine 1.9.0 which otherwise causes + # HTTPS requests to go out as HTTP. (See Issue #356) + self._protocol = 'https' + + def connect(self): + conn = self._new_conn() + self._prepare_conn(conn) + + if self.ssl_context is None: + self.ssl_context = create_urllib3_context( + ssl_version=resolve_ssl_version(None), + cert_reqs=resolve_cert_reqs(None), + ) + + self.sock = ssl_wrap_socket( + sock=conn, + keyfile=self.key_file, + certfile=self.cert_file, + ssl_context=self.ssl_context, + ) + + +class VerifiedHTTPSConnection(HTTPSConnection): + """ + Based on httplib.HTTPSConnection but wraps the socket with + SSL certification. + """ + cert_reqs = None + ca_certs = None + ca_cert_dir = None + ssl_version = None + assert_fingerprint = None + + def set_cert(self, key_file=None, cert_file=None, + cert_reqs=None, ca_certs=None, + assert_hostname=None, assert_fingerprint=None, + ca_cert_dir=None): + """ + This method should only be called once, before the connection is used. + """ + # If cert_reqs is not provided, we can try to guess. If the user gave + # us a cert database, we assume they want to use it: otherwise, if + # they gave us an SSL Context object we should use whatever is set for + # it. + if cert_reqs is None: + if ca_certs or ca_cert_dir: + cert_reqs = 'CERT_REQUIRED' + elif self.ssl_context is not None: + cert_reqs = self.ssl_context.verify_mode + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + self.ca_certs = ca_certs and os.path.expanduser(ca_certs) + self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) + + def connect(self): + # Add certificate verification + conn = self._new_conn() + + hostname = self.host + if getattr(self, '_tunnel_host', None): + # _tunnel_host was added in Python 2.6.3 + # (See: http://hg.python.org/cpython/rev/0f57b30a152f) + + self.sock = conn + # Calls self._set_hostport(), so self.host is + # self._tunnel_host below. + self._tunnel() + # Mark this connection as not reusable + self.auto_open = 0 + + # Override the host with the one we're requesting data from. + hostname = self._tunnel_host + + is_time_off = datetime.date.today() < RECENT_DATE + if is_time_off: + warnings.warn(( + 'System time is way off (before {0}). This will probably ' + 'lead to SSL verification errors').format(RECENT_DATE), + SystemTimeWarning + ) + + # Wrap socket using verification with the root certs in + # trusted_root_certs + if self.ssl_context is None: + self.ssl_context = create_urllib3_context( + ssl_version=resolve_ssl_version(self.ssl_version), + cert_reqs=resolve_cert_reqs(self.cert_reqs), + ) + + context = self.ssl_context + context.verify_mode = resolve_cert_reqs(self.cert_reqs) + self.sock = ssl_wrap_socket( + sock=conn, + keyfile=self.key_file, + certfile=self.cert_file, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + server_hostname=hostname, + ssl_context=context) + + if self.assert_fingerprint: + assert_fingerprint(self.sock.getpeercert(binary_form=True), + self.assert_fingerprint) + elif context.verify_mode != ssl.CERT_NONE \ + and not getattr(context, 'check_hostname', False) \ + and self.assert_hostname is not False: + # While urllib3 attempts to always turn off hostname matching from + # the TLS library, this cannot always be done. So we check whether + # the TLS Library still thinks it's matching hostnames. + cert = self.sock.getpeercert() + if not cert.get('subjectAltName', ()): + warnings.warn(( + 'Certificate for {0} has no `subjectAltName`, falling back to check for a ' + '`commonName` for now. This feature is being removed by major browsers and ' + 'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 ' + 'for details.)'.format(hostname)), + SubjectAltNameWarning + ) + _match_hostname(cert, self.assert_hostname or hostname) + + self.is_verified = ( + context.verify_mode == ssl.CERT_REQUIRED or + self.assert_fingerprint is not None + ) + + +def _match_hostname(cert, asserted_hostname): + try: + match_hostname(cert, asserted_hostname) + except CertificateError as e: + log.error( + 'Certificate did not match expected hostname: %s. ' + 'Certificate: %s', asserted_hostname, cert + ) + # Add cert to exception and reraise so client code can inspect + # the cert when catching the exception, if they want to + e._peer_cert = cert + raise + + +if ssl: + # Make a copy for testing. + UnverifiedHTTPSConnection = HTTPSConnection + HTTPSConnection = VerifiedHTTPSConnection +else: + HTTPSConnection = DummyConnection diff --git a/env/lib/python3.6/site-packages/urllib3/connectionpool.py b/env/lib/python3.6/site-packages/urllib3/connectionpool.py new file mode 100755 index 0000000..8fcb0bc --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/connectionpool.py @@ -0,0 +1,906 @@ +from __future__ import absolute_import +import errno +import logging +import sys +import warnings + +from socket import error as SocketError, timeout as SocketTimeout +import socket + + +from .exceptions import ( + ClosedPoolError, + ProtocolError, + EmptyPoolError, + HeaderParsingError, + HostChangedError, + LocationValueError, + MaxRetryError, + ProxyError, + ReadTimeoutError, + SSLError, + TimeoutError, + InsecureRequestWarning, + NewConnectionError, +) +from .packages.ssl_match_hostname import CertificateError +from .packages import six +from .packages.six.moves import queue +from .connection import ( + port_by_scheme, + DummyConnection, + HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection, + HTTPException, BaseSSLError, +) +from .request import RequestMethods +from .response import HTTPResponse + +from .util.connection import is_connection_dropped +from .util.request import set_file_position +from .util.response import assert_header_parsing +from .util.retry import Retry +from .util.timeout import Timeout +from .util.url import get_host, Url, NORMALIZABLE_SCHEMES +from .util.queue import LifoQueue + + +xrange = six.moves.xrange + +log = logging.getLogger(__name__) + +_Default = object() + + +# Pool objects +class ConnectionPool(object): + """ + Base class for all connection pools, such as + :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. + """ + + scheme = None + QueueCls = LifoQueue + + def __init__(self, host, port=None): + if not host: + raise LocationValueError("No host specified.") + + self.host = _ipv6_host(host, self.scheme) + self._proxy_host = host.lower() + self.port = port + + def __str__(self): + return '%s(host=%r, port=%r)' % (type(self).__name__, + self.host, self.port) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + # Return False to re-raise any potential exceptions + return False + + def close(self): + """ + Close all pooled connections and disable the pool. + """ + pass + + +# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 +_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK]) + + +class HTTPConnectionPool(ConnectionPool, RequestMethods): + """ + Thread-safe connection pool for one host. + + :param host: + Host used for this HTTP Connection (e.g. "localhost"), passed into + :class:`httplib.HTTPConnection`. + + :param port: + Port used for this HTTP Connection (None is equivalent to 80), passed + into :class:`httplib.HTTPConnection`. + + :param strict: + Causes BadStatusLine to be raised if the status line can't be parsed + as a valid HTTP/1.0 or 1.1 status line, passed into + :class:`httplib.HTTPConnection`. + + .. note:: + Only works in Python 2. This parameter is ignored in Python 3. + + :param timeout: + Socket timeout in seconds for each individual connection. This can + be a float or integer, which sets the timeout for the HTTP request, + or an instance of :class:`urllib3.util.Timeout` which gives you more + fine-grained control over request timeouts. After the constructor has + been parsed, this is always a `urllib3.util.Timeout` object. + + :param maxsize: + Number of connections to save that can be reused. More than 1 is useful + in multithreaded situations. If ``block`` is set to False, more + connections will be created but they will not be saved once they've + been used. + + :param block: + If set to True, no more than ``maxsize`` connections will be used at + a time. When no free connections are available, the call will block + until a connection has been released. This is a useful side effect for + particular multithreaded situations where one does not want to use more + than maxsize connections per host to prevent flooding. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + + :param retries: + Retry configuration to use by default with requests in this pool. + + :param _proxy: + Parsed proxy URL, should not be used directly, instead, see + :class:`urllib3.connectionpool.ProxyManager`" + + :param _proxy_headers: + A dictionary with proxy headers, should not be used directly, + instead, see :class:`urllib3.connectionpool.ProxyManager`" + + :param \\**conn_kw: + Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, + :class:`urllib3.connection.HTTPSConnection` instances. + """ + + scheme = 'http' + ConnectionCls = HTTPConnection + ResponseCls = HTTPResponse + + def __init__(self, host, port=None, strict=False, + timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False, + headers=None, retries=None, + _proxy=None, _proxy_headers=None, + **conn_kw): + ConnectionPool.__init__(self, host, port) + RequestMethods.__init__(self, headers) + + self.strict = strict + + if not isinstance(timeout, Timeout): + timeout = Timeout.from_float(timeout) + + if retries is None: + retries = Retry.DEFAULT + + self.timeout = timeout + self.retries = retries + + self.pool = self.QueueCls(maxsize) + self.block = block + + self.proxy = _proxy + self.proxy_headers = _proxy_headers or {} + + # Fill the queue up so that doing get() on it will block properly + for _ in xrange(maxsize): + self.pool.put(None) + + # These are mostly for testing and debugging purposes. + self.num_connections = 0 + self.num_requests = 0 + self.conn_kw = conn_kw + + if self.proxy: + # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. + # We cannot know if the user has added default socket options, so we cannot replace the + # list. + self.conn_kw.setdefault('socket_options', []) + + def _new_conn(self): + """ + Return a fresh :class:`HTTPConnection`. + """ + self.num_connections += 1 + log.debug("Starting new HTTP connection (%d): %s:%s", + self.num_connections, self.host, self.port or "80") + + conn = self.ConnectionCls(host=self.host, port=self.port, + timeout=self.timeout.connect_timeout, + strict=self.strict, **self.conn_kw) + return conn + + def _get_conn(self, timeout=None): + """ + Get a connection. Will return a pooled connection if one is available. + + If no connections are available and :prop:`.block` is ``False``, then a + fresh connection is returned. + + :param timeout: + Seconds to wait before giving up and raising + :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and + :prop:`.block` is ``True``. + """ + conn = None + try: + conn = self.pool.get(block=self.block, timeout=timeout) + + except AttributeError: # self.pool is None + raise ClosedPoolError(self, "Pool is closed.") + + except queue.Empty: + if self.block: + raise EmptyPoolError(self, + "Pool reached maximum size and no more " + "connections are allowed.") + pass # Oh well, we'll create a new connection then + + # If this is a persistent connection, check if it got disconnected + if conn and is_connection_dropped(conn): + log.debug("Resetting dropped connection: %s", self.host) + conn.close() + if getattr(conn, 'auto_open', 1) == 0: + # This is a proxied connection that has been mutated by + # httplib._tunnel() and cannot be reused (since it would + # attempt to bypass the proxy) + conn = None + + return conn or self._new_conn() + + def _put_conn(self, conn): + """ + Put a connection back into the pool. + + :param conn: + Connection object for the current host and port as returned by + :meth:`._new_conn` or :meth:`._get_conn`. + + If the pool is already full, the connection is closed and discarded + because we exceeded maxsize. If connections are discarded frequently, + then maxsize should be increased. + + If the pool is closed, then the connection will be closed and discarded. + """ + try: + self.pool.put(conn, block=False) + return # Everything is dandy, done. + except AttributeError: + # self.pool is None. + pass + except queue.Full: + # This should never happen if self.block == True + log.warning( + "Connection pool is full, discarding connection: %s", + self.host) + + # Connection never got put back into the pool, close it. + if conn: + conn.close() + + def _validate_conn(self, conn): + """ + Called right before a request is made, after the socket is created. + """ + pass + + def _prepare_proxy(self, conn): + # Nothing to do for HTTP connections. + pass + + def _get_timeout(self, timeout): + """ Helper that always returns a :class:`urllib3.util.Timeout` """ + if timeout is _Default: + return self.timeout.clone() + + if isinstance(timeout, Timeout): + return timeout.clone() + else: + # User passed us an int/float. This is for backwards compatibility, + # can be removed later + return Timeout.from_float(timeout) + + def _raise_timeout(self, err, url, timeout_value): + """Is the error actually a timeout? Will raise a ReadTimeout or pass""" + + if isinstance(err, SocketTimeout): + raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) + + # See the above comment about EAGAIN in Python 3. In Python 2 we have + # to specifically catch it and throw the timeout error + if hasattr(err, 'errno') and err.errno in _blocking_errnos: + raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) + + # Catch possible read timeouts thrown as SSL errors. If not the + # case, rethrow the original. We need to do this because of: + # http://bugs.python.org/issue10272 + if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6 + raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) + + def _make_request(self, conn, method, url, timeout=_Default, chunked=False, + **httplib_request_kw): + """ + Perform a request on a given urllib connection object taken from our + pool. + + :param conn: + a connection from one of our connection pools + + :param timeout: + Socket timeout in seconds for the request. This can be a + float or integer, which will set the same timeout value for + the socket connect and the socket read, or an instance of + :class:`urllib3.util.Timeout`, which gives you more fine-grained + control over your timeouts. + """ + self.num_requests += 1 + + timeout_obj = self._get_timeout(timeout) + timeout_obj.start_connect() + conn.timeout = timeout_obj.connect_timeout + + # Trigger any extra validation we need to do. + try: + self._validate_conn(conn) + except (SocketTimeout, BaseSSLError) as e: + # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout. + self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) + raise + + # conn.request() calls httplib.*.request, not the method in + # urllib3.request. It also calls makefile (recv) on the socket. + if chunked: + conn.request_chunked(method, url, **httplib_request_kw) + else: + conn.request(method, url, **httplib_request_kw) + + # Reset the timeout for the recv() on the socket + read_timeout = timeout_obj.read_timeout + + # App Engine doesn't have a sock attr + if getattr(conn, 'sock', None): + # In Python 3 socket.py will catch EAGAIN and return None when you + # try and read into the file pointer created by http.client, which + # instead raises a BadStatusLine exception. Instead of catching + # the exception and assuming all BadStatusLine exceptions are read + # timeouts, check for a zero timeout before making the request. + if read_timeout == 0: + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % read_timeout) + if read_timeout is Timeout.DEFAULT_TIMEOUT: + conn.sock.settimeout(socket.getdefaulttimeout()) + else: # None or a value + conn.sock.settimeout(read_timeout) + + # Receive the response from the server + try: + try: # Python 2.7, use buffering of HTTP responses + httplib_response = conn.getresponse(buffering=True) + except TypeError: # Python 2.6 and older, Python 3 + try: + httplib_response = conn.getresponse() + except Exception as e: + # Remove the TypeError from the exception chain in Python 3; + # otherwise it looks like a programming error was the cause. + six.raise_from(e, None) + except (SocketTimeout, BaseSSLError, SocketError) as e: + self._raise_timeout(err=e, url=url, timeout_value=read_timeout) + raise + + # AppEngine doesn't have a version attr. + http_version = getattr(conn, '_http_vsn_str', 'HTTP/?') + log.debug("%s://%s:%s \"%s %s %s\" %s %s", self.scheme, self.host, self.port, + method, url, http_version, httplib_response.status, + httplib_response.length) + + try: + assert_header_parsing(httplib_response.msg) + except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3 + log.warning( + 'Failed to parse headers (url=%s): %s', + self._absolute_url(url), hpe, exc_info=True) + + return httplib_response + + def _absolute_url(self, path): + return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url + + def close(self): + """ + Close all pooled connections and disable the pool. + """ + if self.pool is None: + return + # Disable access to the pool + old_pool, self.pool = self.pool, None + + try: + while True: + conn = old_pool.get(block=False) + if conn: + conn.close() + + except queue.Empty: + pass # Done. + + def is_same_host(self, url): + """ + Check if the given ``url`` is a member of the same host as this + connection pool. + """ + if url.startswith('/'): + return True + + # TODO: Add optional support for socket.gethostbyname checking. + scheme, host, port = get_host(url) + + host = _ipv6_host(host, self.scheme) + + # Use explicit default port for comparison when none is given + if self.port and not port: + port = port_by_scheme.get(scheme) + elif not self.port and port == port_by_scheme.get(scheme): + port = None + + return (scheme, host, port) == (self.scheme, self.host, self.port) + + def urlopen(self, method, url, body=None, headers=None, retries=None, + redirect=True, assert_same_host=True, timeout=_Default, + pool_timeout=None, release_conn=None, chunked=False, + body_pos=None, **response_kw): + """ + Get a connection from the pool and perform an HTTP request. This is the + lowest level call for making a request, so you'll need to specify all + the raw details. + + .. note:: + + More commonly, it's appropriate to use a convenience method provided + by :class:`.RequestMethods`, such as :meth:`request`. + + .. note:: + + `release_conn` will only behave as expected if + `preload_content=False` because we want to make + `preload_content=False` the default behaviour someday soon without + breaking backwards compatibility. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param body: + Data to send in the request body (useful for creating + POST requests, see HTTPConnectionPool.post_url for + more convenience). + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param retries: + Configure the number of retries to allow before raising a + :class:`~urllib3.exceptions.MaxRetryError` exception. + + Pass ``None`` to retry until you receive a response. Pass a + :class:`~urllib3.util.retry.Retry` object for fine-grained control + over different types of retries. + Pass an integer number to retry connection errors that many times, + but no other types of errors. Pass zero to never retry. + + If ``False``, then retries are disabled and any exception is raised + immediately. Also, instead of raising a MaxRetryError on redirects, + the redirect response will be returned. + + :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. + + :param redirect: + If True, automatically handle redirects (status codes 301, 302, + 303, 307, 308). Each redirect counts as a retry. Disabling retries + will disable redirect, too. + + :param assert_same_host: + If ``True``, will make sure that the host of the pool requests is + consistent else will raise HostChangedError. When False, you can + use the pool on an HTTP proxy and request foreign hosts. + + :param timeout: + If specified, overrides the default timeout for this one + request. It may be a float (in seconds) or an instance of + :class:`urllib3.util.Timeout`. + + :param pool_timeout: + If set and the pool is set to block=True, then this method will + block for ``pool_timeout`` seconds and raise EmptyPoolError if no + connection is available within the time period. + + :param release_conn: + If False, then the urlopen call will not release the connection + back into the pool once a response is received (but will release if + you read the entire contents of the response such as when + `preload_content=True`). This is useful if you're not preloading + the response's content immediately. You will need to call + ``r.release_conn()`` on the response ``r`` to return the connection + back into the pool. If None, it takes the value of + ``response_kw.get('preload_content', True)``. + + :param chunked: + If True, urllib3 will send the body using chunked transfer + encoding. Otherwise, urllib3 will send the body using the standard + content-length form. Defaults to False. + + :param int body_pos: + Position to seek to in file-like body in the event of a retry or + redirect. Typically this won't need to be set because urllib3 will + auto-populate the value when needed. + + :param \\**response_kw: + Additional parameters are passed to + :meth:`urllib3.response.HTTPResponse.from_httplib` + """ + if headers is None: + headers = self.headers + + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect, default=self.retries) + + if release_conn is None: + release_conn = response_kw.get('preload_content', True) + + # Check host + if assert_same_host and not self.is_same_host(url): + raise HostChangedError(self, url, retries) + + conn = None + + # Track whether `conn` needs to be released before + # returning/raising/recursing. Update this variable if necessary, and + # leave `release_conn` constant throughout the function. That way, if + # the function recurses, the original value of `release_conn` will be + # passed down into the recursive call, and its value will be respected. + # + # See issue #651 [1] for details. + # + # [1] + release_this_conn = release_conn + + # Merge the proxy headers. Only do this in HTTP. We have to copy the + # headers dict so we can safely change it without those changes being + # reflected in anyone else's copy. + if self.scheme == 'http': + headers = headers.copy() + headers.update(self.proxy_headers) + + # Must keep the exception bound to a separate variable or else Python 3 + # complains about UnboundLocalError. + err = None + + # Keep track of whether we cleanly exited the except block. This + # ensures we do proper cleanup in finally. + clean_exit = False + + # Rewind body position, if needed. Record current position + # for future rewinds in the event of a redirect/retry. + body_pos = set_file_position(body, body_pos) + + try: + # Request a connection from the queue. + timeout_obj = self._get_timeout(timeout) + conn = self._get_conn(timeout=pool_timeout) + + conn.timeout = timeout_obj.connect_timeout + + is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None) + if is_new_proxy_conn: + self._prepare_proxy(conn) + + # Make the request on the httplib connection object. + httplib_response = self._make_request(conn, method, url, + timeout=timeout_obj, + body=body, headers=headers, + chunked=chunked) + + # If we're going to release the connection in ``finally:``, then + # the response doesn't need to know about the connection. Otherwise + # it will also try to release it and we'll have a double-release + # mess. + response_conn = conn if not release_conn else None + + # Pass method to Response for length checking + response_kw['request_method'] = method + + # Import httplib's response into our own wrapper object + response = self.ResponseCls.from_httplib(httplib_response, + pool=self, + connection=response_conn, + retries=retries, + **response_kw) + + # Everything went great! + clean_exit = True + + except queue.Empty: + # Timed out by queue. + raise EmptyPoolError(self, "No pool connections are available.") + + except (TimeoutError, HTTPException, SocketError, ProtocolError, + BaseSSLError, SSLError, CertificateError) as e: + # Discard the connection for these exceptions. It will be + # replaced during the next _get_conn() call. + clean_exit = False + if isinstance(e, (BaseSSLError, CertificateError)): + e = SSLError(e) + elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy: + e = ProxyError('Cannot connect to proxy.', e) + elif isinstance(e, (SocketError, HTTPException)): + e = ProtocolError('Connection aborted.', e) + + retries = retries.increment(method, url, error=e, _pool=self, + _stacktrace=sys.exc_info()[2]) + retries.sleep() + + # Keep track of the error for the retry warning. + err = e + + finally: + if not clean_exit: + # We hit some kind of exception, handled or otherwise. We need + # to throw the connection away unless explicitly told not to. + # Close the connection, set the variable to None, and make sure + # we put the None back in the pool to avoid leaking it. + conn = conn and conn.close() + release_this_conn = True + + if release_this_conn: + # Put the connection back to be reused. If the connection is + # expired then it will be None, which will get replaced with a + # fresh connection during _get_conn. + self._put_conn(conn) + + if not conn: + # Try again + log.warning("Retrying (%r) after connection " + "broken by '%r': %s", retries, err, url) + return self.urlopen(method, url, body, headers, retries, + redirect, assert_same_host, + timeout=timeout, pool_timeout=pool_timeout, + release_conn=release_conn, body_pos=body_pos, + **response_kw) + + def drain_and_release_conn(response): + try: + # discard any remaining response body, the connection will be + # released back to the pool once the entire response is read + response.read() + except (TimeoutError, HTTPException, SocketError, ProtocolError, + BaseSSLError, SSLError) as e: + pass + + # Handle redirect? + redirect_location = redirect and response.get_redirect_location() + if redirect_location: + if response.status == 303: + method = 'GET' + + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_redirect: + # Drain and release the connection for this response, since + # we're not returning it to be released manually. + drain_and_release_conn(response) + raise + return response + + # drain and return the connection to the pool before recursing + drain_and_release_conn(response) + + retries.sleep_for_retry(response) + log.debug("Redirecting %s -> %s", url, redirect_location) + return self.urlopen( + method, redirect_location, body, headers, + retries=retries, redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, pool_timeout=pool_timeout, + release_conn=release_conn, body_pos=body_pos, + **response_kw) + + # Check if we should retry the HTTP response. + has_retry_after = bool(response.getheader('Retry-After')) + if retries.is_retry(method, response.status, has_retry_after): + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_status: + # Drain and release the connection for this response, since + # we're not returning it to be released manually. + drain_and_release_conn(response) + raise + return response + + # drain and return the connection to the pool before recursing + drain_and_release_conn(response) + + retries.sleep(response) + log.debug("Retry: %s", url) + return self.urlopen( + method, url, body, headers, + retries=retries, redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, pool_timeout=pool_timeout, + release_conn=release_conn, + body_pos=body_pos, **response_kw) + + return response + + +class HTTPSConnectionPool(HTTPConnectionPool): + """ + Same as :class:`.HTTPConnectionPool`, but HTTPS. + + When Python is compiled with the :mod:`ssl` module, then + :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, + instead of :class:`.HTTPSConnection`. + + :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``, + ``assert_hostname`` and ``host`` in this order to verify connections. + If ``assert_hostname`` is False, no verification is done. + + The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, + ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is + available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade + the connection socket into an SSL socket. + """ + + scheme = 'https' + ConnectionCls = HTTPSConnection + + def __init__(self, host, port=None, + strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, + block=False, headers=None, retries=None, + _proxy=None, _proxy_headers=None, + key_file=None, cert_file=None, cert_reqs=None, + ca_certs=None, ssl_version=None, + assert_hostname=None, assert_fingerprint=None, + ca_cert_dir=None, **conn_kw): + + HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize, + block, headers, retries, _proxy, _proxy_headers, + **conn_kw) + + if ca_certs and cert_reqs is None: + cert_reqs = 'CERT_REQUIRED' + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.ca_certs = ca_certs + self.ca_cert_dir = ca_cert_dir + self.ssl_version = ssl_version + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + + def _prepare_conn(self, conn): + """ + Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket` + and establish the tunnel if proxy is used. + """ + + if isinstance(conn, VerifiedHTTPSConnection): + conn.set_cert(key_file=self.key_file, + cert_file=self.cert_file, + cert_reqs=self.cert_reqs, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + assert_hostname=self.assert_hostname, + assert_fingerprint=self.assert_fingerprint) + conn.ssl_version = self.ssl_version + return conn + + def _prepare_proxy(self, conn): + """ + Establish tunnel connection early, because otherwise httplib + would improperly set Host: header to proxy's IP:port. + """ + # Python 2.7+ + try: + set_tunnel = conn.set_tunnel + except AttributeError: # Platform-specific: Python 2.6 + set_tunnel = conn._set_tunnel + + if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older + set_tunnel(self._proxy_host, self.port) + else: + set_tunnel(self._proxy_host, self.port, self.proxy_headers) + + conn.connect() + + def _new_conn(self): + """ + Return a fresh :class:`httplib.HTTPSConnection`. + """ + self.num_connections += 1 + log.debug("Starting new HTTPS connection (%d): %s:%s", + self.num_connections, self.host, self.port or "443") + + if not self.ConnectionCls or self.ConnectionCls is DummyConnection: + raise SSLError("Can't connect to HTTPS URL because the SSL " + "module is not available.") + + actual_host = self.host + actual_port = self.port + if self.proxy is not None: + actual_host = self.proxy.host + actual_port = self.proxy.port + + conn = self.ConnectionCls(host=actual_host, port=actual_port, + timeout=self.timeout.connect_timeout, + strict=self.strict, **self.conn_kw) + + return self._prepare_conn(conn) + + def _validate_conn(self, conn): + """ + Called right before a request is made, after the socket is created. + """ + super(HTTPSConnectionPool, self)._validate_conn(conn) + + # Force connect early to allow us to validate the connection. + if not getattr(conn, 'sock', None): # AppEngine might not have `.sock` + conn.connect() + + if not conn.is_verified: + warnings.warn(( + 'Unverified HTTPS request is being made. ' + 'Adding certificate verification is strongly advised. See: ' + 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' + '#ssl-warnings'), + InsecureRequestWarning) + + +def connection_from_url(url, **kw): + """ + Given a url, return an :class:`.ConnectionPool` instance of its host. + + This is a shortcut for not having to parse out the scheme, host, and port + of the url before creating an :class:`.ConnectionPool` instance. + + :param url: + Absolute URL string that must include the scheme. Port is optional. + + :param \\**kw: + Passes additional parameters to the constructor of the appropriate + :class:`.ConnectionPool`. Useful for specifying things like + timeout, maxsize, headers, etc. + + Example:: + + >>> conn = connection_from_url('http://google.com/') + >>> r = conn.request('GET', '/') + """ + scheme, host, port = get_host(url) + port = port or port_by_scheme.get(scheme, 80) + if scheme == 'https': + return HTTPSConnectionPool(host, port=port, **kw) + else: + return HTTPConnectionPool(host, port=port, **kw) + + +def _ipv6_host(host, scheme): + """ + Process IPv6 address literals + """ + + # httplib doesn't like it when we include brackets in IPv6 addresses + # Specifically, if we include brackets but also pass the port then + # httplib crazily doubles up the square brackets on the Host header. + # Instead, we need to make sure we never pass ``None`` as the port. + # However, for backward compatibility reasons we can't actually + # *assert* that. See http://bugs.python.org/issue28539 + # + # Also if an IPv6 address literal has a zone identifier, the + # percent sign might be URIencoded, convert it back into ASCII + if host.startswith('[') and host.endswith(']'): + host = host.replace('%25', '%').strip('[]') + if scheme in NORMALIZABLE_SCHEMES: + host = host.lower() + return host diff --git a/env/lib/python3.6/site-packages/urllib3/contrib/_securetransport/bindings.py b/env/lib/python3.6/site-packages/urllib3/contrib/_securetransport/bindings.py new file mode 100755 index 0000000..bcf41c0 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/contrib/_securetransport/bindings.py @@ -0,0 +1,593 @@ +""" +This module uses ctypes to bind a whole bunch of functions and constants from +SecureTransport. The goal here is to provide the low-level API to +SecureTransport. These are essentially the C-level functions and constants, and +they're pretty gross to work with. + +This code is a bastardised version of the code found in Will Bond's oscrypto +library. An enormous debt is owed to him for blazing this trail for us. For +that reason, this code should be considered to be covered both by urllib3's +license and by oscrypto's: + + Copyright (c) 2015-2016 Will Bond + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. +""" +from __future__ import absolute_import + +import platform +from ctypes.util import find_library +from ctypes import ( + c_void_p, c_int32, c_char_p, c_size_t, c_byte, c_uint32, c_ulong, c_long, + c_bool +) +from ctypes import CDLL, POINTER, CFUNCTYPE + + +security_path = find_library('Security') +if not security_path: + raise ImportError('The library Security could not be found') + + +core_foundation_path = find_library('CoreFoundation') +if not core_foundation_path: + raise ImportError('The library CoreFoundation could not be found') + + +version = platform.mac_ver()[0] +version_info = tuple(map(int, version.split('.'))) +if version_info < (10, 8): + raise OSError( + 'Only OS X 10.8 and newer are supported, not %s.%s' % ( + version_info[0], version_info[1] + ) + ) + +Security = CDLL(security_path, use_errno=True) +CoreFoundation = CDLL(core_foundation_path, use_errno=True) + +Boolean = c_bool +CFIndex = c_long +CFStringEncoding = c_uint32 +CFData = c_void_p +CFString = c_void_p +CFArray = c_void_p +CFMutableArray = c_void_p +CFDictionary = c_void_p +CFError = c_void_p +CFType = c_void_p +CFTypeID = c_ulong + +CFTypeRef = POINTER(CFType) +CFAllocatorRef = c_void_p + +OSStatus = c_int32 + +CFDataRef = POINTER(CFData) +CFStringRef = POINTER(CFString) +CFArrayRef = POINTER(CFArray) +CFMutableArrayRef = POINTER(CFMutableArray) +CFDictionaryRef = POINTER(CFDictionary) +CFArrayCallBacks = c_void_p +CFDictionaryKeyCallBacks = c_void_p +CFDictionaryValueCallBacks = c_void_p + +SecCertificateRef = POINTER(c_void_p) +SecExternalFormat = c_uint32 +SecExternalItemType = c_uint32 +SecIdentityRef = POINTER(c_void_p) +SecItemImportExportFlags = c_uint32 +SecItemImportExportKeyParameters = c_void_p +SecKeychainRef = POINTER(c_void_p) +SSLProtocol = c_uint32 +SSLCipherSuite = c_uint32 +SSLContextRef = POINTER(c_void_p) +SecTrustRef = POINTER(c_void_p) +SSLConnectionRef = c_uint32 +SecTrustResultType = c_uint32 +SecTrustOptionFlags = c_uint32 +SSLProtocolSide = c_uint32 +SSLConnectionType = c_uint32 +SSLSessionOption = c_uint32 + + +try: + Security.SecItemImport.argtypes = [ + CFDataRef, + CFStringRef, + POINTER(SecExternalFormat), + POINTER(SecExternalItemType), + SecItemImportExportFlags, + POINTER(SecItemImportExportKeyParameters), + SecKeychainRef, + POINTER(CFArrayRef), + ] + Security.SecItemImport.restype = OSStatus + + Security.SecCertificateGetTypeID.argtypes = [] + Security.SecCertificateGetTypeID.restype = CFTypeID + + Security.SecIdentityGetTypeID.argtypes = [] + Security.SecIdentityGetTypeID.restype = CFTypeID + + Security.SecKeyGetTypeID.argtypes = [] + Security.SecKeyGetTypeID.restype = CFTypeID + + Security.SecCertificateCreateWithData.argtypes = [ + CFAllocatorRef, + CFDataRef + ] + Security.SecCertificateCreateWithData.restype = SecCertificateRef + + Security.SecCertificateCopyData.argtypes = [ + SecCertificateRef + ] + Security.SecCertificateCopyData.restype = CFDataRef + + Security.SecCopyErrorMessageString.argtypes = [ + OSStatus, + c_void_p + ] + Security.SecCopyErrorMessageString.restype = CFStringRef + + Security.SecIdentityCreateWithCertificate.argtypes = [ + CFTypeRef, + SecCertificateRef, + POINTER(SecIdentityRef) + ] + Security.SecIdentityCreateWithCertificate.restype = OSStatus + + Security.SecKeychainCreate.argtypes = [ + c_char_p, + c_uint32, + c_void_p, + Boolean, + c_void_p, + POINTER(SecKeychainRef) + ] + Security.SecKeychainCreate.restype = OSStatus + + Security.SecKeychainDelete.argtypes = [ + SecKeychainRef + ] + Security.SecKeychainDelete.restype = OSStatus + + Security.SecPKCS12Import.argtypes = [ + CFDataRef, + CFDictionaryRef, + POINTER(CFArrayRef) + ] + Security.SecPKCS12Import.restype = OSStatus + + SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t)) + SSLWriteFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)) + + Security.SSLSetIOFuncs.argtypes = [ + SSLContextRef, + SSLReadFunc, + SSLWriteFunc + ] + Security.SSLSetIOFuncs.restype = OSStatus + + Security.SSLSetPeerID.argtypes = [ + SSLContextRef, + c_char_p, + c_size_t + ] + Security.SSLSetPeerID.restype = OSStatus + + Security.SSLSetCertificate.argtypes = [ + SSLContextRef, + CFArrayRef + ] + Security.SSLSetCertificate.restype = OSStatus + + Security.SSLSetCertificateAuthorities.argtypes = [ + SSLContextRef, + CFTypeRef, + Boolean + ] + Security.SSLSetCertificateAuthorities.restype = OSStatus + + Security.SSLSetConnection.argtypes = [ + SSLContextRef, + SSLConnectionRef + ] + Security.SSLSetConnection.restype = OSStatus + + Security.SSLSetPeerDomainName.argtypes = [ + SSLContextRef, + c_char_p, + c_size_t + ] + Security.SSLSetPeerDomainName.restype = OSStatus + + Security.SSLHandshake.argtypes = [ + SSLContextRef + ] + Security.SSLHandshake.restype = OSStatus + + Security.SSLRead.argtypes = [ + SSLContextRef, + c_char_p, + c_size_t, + POINTER(c_size_t) + ] + Security.SSLRead.restype = OSStatus + + Security.SSLWrite.argtypes = [ + SSLContextRef, + c_char_p, + c_size_t, + POINTER(c_size_t) + ] + Security.SSLWrite.restype = OSStatus + + Security.SSLClose.argtypes = [ + SSLContextRef + ] + Security.SSLClose.restype = OSStatus + + Security.SSLGetNumberSupportedCiphers.argtypes = [ + SSLContextRef, + POINTER(c_size_t) + ] + Security.SSLGetNumberSupportedCiphers.restype = OSStatus + + Security.SSLGetSupportedCiphers.argtypes = [ + SSLContextRef, + POINTER(SSLCipherSuite), + POINTER(c_size_t) + ] + Security.SSLGetSupportedCiphers.restype = OSStatus + + Security.SSLSetEnabledCiphers.argtypes = [ + SSLContextRef, + POINTER(SSLCipherSuite), + c_size_t + ] + Security.SSLSetEnabledCiphers.restype = OSStatus + + Security.SSLGetNumberEnabledCiphers.argtype = [ + SSLContextRef, + POINTER(c_size_t) + ] + Security.SSLGetNumberEnabledCiphers.restype = OSStatus + + Security.SSLGetEnabledCiphers.argtypes = [ + SSLContextRef, + POINTER(SSLCipherSuite), + POINTER(c_size_t) + ] + Security.SSLGetEnabledCiphers.restype = OSStatus + + Security.SSLGetNegotiatedCipher.argtypes = [ + SSLContextRef, + POINTER(SSLCipherSuite) + ] + Security.SSLGetNegotiatedCipher.restype = OSStatus + + Security.SSLGetNegotiatedProtocolVersion.argtypes = [ + SSLContextRef, + POINTER(SSLProtocol) + ] + Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus + + Security.SSLCopyPeerTrust.argtypes = [ + SSLContextRef, + POINTER(SecTrustRef) + ] + Security.SSLCopyPeerTrust.restype = OSStatus + + Security.SecTrustSetAnchorCertificates.argtypes = [ + SecTrustRef, + CFArrayRef + ] + Security.SecTrustSetAnchorCertificates.restype = OSStatus + + Security.SecTrustSetAnchorCertificatesOnly.argstypes = [ + SecTrustRef, + Boolean + ] + Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus + + Security.SecTrustEvaluate.argtypes = [ + SecTrustRef, + POINTER(SecTrustResultType) + ] + Security.SecTrustEvaluate.restype = OSStatus + + Security.SecTrustGetCertificateCount.argtypes = [ + SecTrustRef + ] + Security.SecTrustGetCertificateCount.restype = CFIndex + + Security.SecTrustGetCertificateAtIndex.argtypes = [ + SecTrustRef, + CFIndex + ] + Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef + + Security.SSLCreateContext.argtypes = [ + CFAllocatorRef, + SSLProtocolSide, + SSLConnectionType + ] + Security.SSLCreateContext.restype = SSLContextRef + + Security.SSLSetSessionOption.argtypes = [ + SSLContextRef, + SSLSessionOption, + Boolean + ] + Security.SSLSetSessionOption.restype = OSStatus + + Security.SSLSetProtocolVersionMin.argtypes = [ + SSLContextRef, + SSLProtocol + ] + Security.SSLSetProtocolVersionMin.restype = OSStatus + + Security.SSLSetProtocolVersionMax.argtypes = [ + SSLContextRef, + SSLProtocol + ] + Security.SSLSetProtocolVersionMax.restype = OSStatus + + Security.SecCopyErrorMessageString.argtypes = [ + OSStatus, + c_void_p + ] + Security.SecCopyErrorMessageString.restype = CFStringRef + + Security.SSLReadFunc = SSLReadFunc + Security.SSLWriteFunc = SSLWriteFunc + Security.SSLContextRef = SSLContextRef + Security.SSLProtocol = SSLProtocol + Security.SSLCipherSuite = SSLCipherSuite + Security.SecIdentityRef = SecIdentityRef + Security.SecKeychainRef = SecKeychainRef + Security.SecTrustRef = SecTrustRef + Security.SecTrustResultType = SecTrustResultType + Security.SecExternalFormat = SecExternalFormat + Security.OSStatus = OSStatus + + Security.kSecImportExportPassphrase = CFStringRef.in_dll( + Security, 'kSecImportExportPassphrase' + ) + Security.kSecImportItemIdentity = CFStringRef.in_dll( + Security, 'kSecImportItemIdentity' + ) + + # CoreFoundation time! + CoreFoundation.CFRetain.argtypes = [ + CFTypeRef + ] + CoreFoundation.CFRetain.restype = CFTypeRef + + CoreFoundation.CFRelease.argtypes = [ + CFTypeRef + ] + CoreFoundation.CFRelease.restype = None + + CoreFoundation.CFGetTypeID.argtypes = [ + CFTypeRef + ] + CoreFoundation.CFGetTypeID.restype = CFTypeID + + CoreFoundation.CFStringCreateWithCString.argtypes = [ + CFAllocatorRef, + c_char_p, + CFStringEncoding + ] + CoreFoundation.CFStringCreateWithCString.restype = CFStringRef + + CoreFoundation.CFStringGetCStringPtr.argtypes = [ + CFStringRef, + CFStringEncoding + ] + CoreFoundation.CFStringGetCStringPtr.restype = c_char_p + + CoreFoundation.CFStringGetCString.argtypes = [ + CFStringRef, + c_char_p, + CFIndex, + CFStringEncoding + ] + CoreFoundation.CFStringGetCString.restype = c_bool + + CoreFoundation.CFDataCreate.argtypes = [ + CFAllocatorRef, + c_char_p, + CFIndex + ] + CoreFoundation.CFDataCreate.restype = CFDataRef + + CoreFoundation.CFDataGetLength.argtypes = [ + CFDataRef + ] + CoreFoundation.CFDataGetLength.restype = CFIndex + + CoreFoundation.CFDataGetBytePtr.argtypes = [ + CFDataRef + ] + CoreFoundation.CFDataGetBytePtr.restype = c_void_p + + CoreFoundation.CFDictionaryCreate.argtypes = [ + CFAllocatorRef, + POINTER(CFTypeRef), + POINTER(CFTypeRef), + CFIndex, + CFDictionaryKeyCallBacks, + CFDictionaryValueCallBacks + ] + CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef + + CoreFoundation.CFDictionaryGetValue.argtypes = [ + CFDictionaryRef, + CFTypeRef + ] + CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef + + CoreFoundation.CFArrayCreate.argtypes = [ + CFAllocatorRef, + POINTER(CFTypeRef), + CFIndex, + CFArrayCallBacks, + ] + CoreFoundation.CFArrayCreate.restype = CFArrayRef + + CoreFoundation.CFArrayCreateMutable.argtypes = [ + CFAllocatorRef, + CFIndex, + CFArrayCallBacks + ] + CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef + + CoreFoundation.CFArrayAppendValue.argtypes = [ + CFMutableArrayRef, + c_void_p + ] + CoreFoundation.CFArrayAppendValue.restype = None + + CoreFoundation.CFArrayGetCount.argtypes = [ + CFArrayRef + ] + CoreFoundation.CFArrayGetCount.restype = CFIndex + + CoreFoundation.CFArrayGetValueAtIndex.argtypes = [ + CFArrayRef, + CFIndex + ] + CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p + + CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( + CoreFoundation, 'kCFAllocatorDefault' + ) + CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(CoreFoundation, 'kCFTypeArrayCallBacks') + CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll( + CoreFoundation, 'kCFTypeDictionaryKeyCallBacks' + ) + CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll( + CoreFoundation, 'kCFTypeDictionaryValueCallBacks' + ) + + CoreFoundation.CFTypeRef = CFTypeRef + CoreFoundation.CFArrayRef = CFArrayRef + CoreFoundation.CFStringRef = CFStringRef + CoreFoundation.CFDictionaryRef = CFDictionaryRef + +except (AttributeError): + raise ImportError('Error initializing ctypes') + + +class CFConst(object): + """ + A class object that acts as essentially a namespace for CoreFoundation + constants. + """ + kCFStringEncodingUTF8 = CFStringEncoding(0x08000100) + + +class SecurityConst(object): + """ + A class object that acts as essentially a namespace for Security constants. + """ + kSSLSessionOptionBreakOnServerAuth = 0 + + kSSLProtocol2 = 1 + kSSLProtocol3 = 2 + kTLSProtocol1 = 4 + kTLSProtocol11 = 7 + kTLSProtocol12 = 8 + + kSSLClientSide = 1 + kSSLStreamType = 0 + + kSecFormatPEMSequence = 10 + + kSecTrustResultInvalid = 0 + kSecTrustResultProceed = 1 + # This gap is present on purpose: this was kSecTrustResultConfirm, which + # is deprecated. + kSecTrustResultDeny = 3 + kSecTrustResultUnspecified = 4 + kSecTrustResultRecoverableTrustFailure = 5 + kSecTrustResultFatalTrustFailure = 6 + kSecTrustResultOtherError = 7 + + errSSLProtocol = -9800 + errSSLWouldBlock = -9803 + errSSLClosedGraceful = -9805 + errSSLClosedNoNotify = -9816 + errSSLClosedAbort = -9806 + + errSSLXCertChainInvalid = -9807 + errSSLCrypto = -9809 + errSSLInternal = -9810 + errSSLCertExpired = -9814 + errSSLCertNotYetValid = -9815 + errSSLUnknownRootCert = -9812 + errSSLNoRootCert = -9813 + errSSLHostNameMismatch = -9843 + errSSLPeerHandshakeFail = -9824 + errSSLPeerUserCancelled = -9839 + errSSLWeakPeerEphemeralDHKey = -9850 + errSSLServerAuthCompleted = -9841 + errSSLRecordOverflow = -9847 + + errSecVerifyFailed = -67808 + errSecNoTrustSettings = -25263 + errSecItemNotFound = -25300 + errSecInvalidTrustSettings = -25262 + + # Cipher suites. We only pick the ones our default cipher string allows. + TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C + TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030 + TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B + TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F + TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 = 0x00A3 + TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F + TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 = 0x00A2 + TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E + TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024 + TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028 + TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A + TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014 + TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B + TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 = 0x006A + TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039 + TLS_DHE_DSS_WITH_AES_256_CBC_SHA = 0x0038 + TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023 + TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027 + TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009 + TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013 + TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067 + TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 = 0x0040 + TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033 + TLS_DHE_DSS_WITH_AES_128_CBC_SHA = 0x0032 + TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D + TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C + TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D + TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C + TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035 + TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F + TLS_AES_128_GCM_SHA256 = 0x1301 + TLS_AES_256_GCM_SHA384 = 0x1302 + TLS_CHACHA20_POLY1305_SHA256 = 0x1303 diff --git a/env/lib/python3.6/site-packages/urllib3/contrib/_securetransport/low_level.py b/env/lib/python3.6/site-packages/urllib3/contrib/_securetransport/low_level.py new file mode 100755 index 0000000..b13cd9e --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/contrib/_securetransport/low_level.py @@ -0,0 +1,346 @@ +""" +Low-level helpers for the SecureTransport bindings. + +These are Python functions that are not directly related to the high-level APIs +but are necessary to get them to work. They include a whole bunch of low-level +CoreFoundation messing about and memory management. The concerns in this module +are almost entirely about trying to avoid memory leaks and providing +appropriate and useful assistance to the higher-level code. +""" +import base64 +import ctypes +import itertools +import re +import os +import ssl +import tempfile + +from .bindings import Security, CoreFoundation, CFConst + + +# This regular expression is used to grab PEM data out of a PEM bundle. +_PEM_CERTS_RE = re.compile( + b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL +) + + +def _cf_data_from_bytes(bytestring): + """ + Given a bytestring, create a CFData object from it. This CFData object must + be CFReleased by the caller. + """ + return CoreFoundation.CFDataCreate( + CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring) + ) + + +def _cf_dictionary_from_tuples(tuples): + """ + Given a list of Python tuples, create an associated CFDictionary. + """ + dictionary_size = len(tuples) + + # We need to get the dictionary keys and values out in the same order. + keys = (t[0] for t in tuples) + values = (t[1] for t in tuples) + cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys) + cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values) + + return CoreFoundation.CFDictionaryCreate( + CoreFoundation.kCFAllocatorDefault, + cf_keys, + cf_values, + dictionary_size, + CoreFoundation.kCFTypeDictionaryKeyCallBacks, + CoreFoundation.kCFTypeDictionaryValueCallBacks, + ) + + +def _cf_string_to_unicode(value): + """ + Creates a Unicode string from a CFString object. Used entirely for error + reporting. + + Yes, it annoys me quite a lot that this function is this complex. + """ + value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p)) + + string = CoreFoundation.CFStringGetCStringPtr( + value_as_void_p, + CFConst.kCFStringEncodingUTF8 + ) + if string is None: + buffer = ctypes.create_string_buffer(1024) + result = CoreFoundation.CFStringGetCString( + value_as_void_p, + buffer, + 1024, + CFConst.kCFStringEncodingUTF8 + ) + if not result: + raise OSError('Error copying C string from CFStringRef') + string = buffer.value + if string is not None: + string = string.decode('utf-8') + return string + + +def _assert_no_error(error, exception_class=None): + """ + Checks the return code and throws an exception if there is an error to + report + """ + if error == 0: + return + + cf_error_string = Security.SecCopyErrorMessageString(error, None) + output = _cf_string_to_unicode(cf_error_string) + CoreFoundation.CFRelease(cf_error_string) + + if output is None or output == u'': + output = u'OSStatus %s' % error + + if exception_class is None: + exception_class = ssl.SSLError + + raise exception_class(output) + + +def _cert_array_from_pem(pem_bundle): + """ + Given a bundle of certs in PEM format, turns them into a CFArray of certs + that can be used to validate a cert chain. + """ + # Normalize the PEM bundle's line endings. + pem_bundle = pem_bundle.replace(b"\r\n", b"\n") + + der_certs = [ + base64.b64decode(match.group(1)) + for match in _PEM_CERTS_RE.finditer(pem_bundle) + ] + if not der_certs: + raise ssl.SSLError("No root certificates specified") + + cert_array = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks) + ) + if not cert_array: + raise ssl.SSLError("Unable to allocate memory!") + + try: + for der_bytes in der_certs: + certdata = _cf_data_from_bytes(der_bytes) + if not certdata: + raise ssl.SSLError("Unable to allocate memory!") + cert = Security.SecCertificateCreateWithData( + CoreFoundation.kCFAllocatorDefault, certdata + ) + CoreFoundation.CFRelease(certdata) + if not cert: + raise ssl.SSLError("Unable to build cert object!") + + CoreFoundation.CFArrayAppendValue(cert_array, cert) + CoreFoundation.CFRelease(cert) + except Exception: + # We need to free the array before the exception bubbles further. + # We only want to do that if an error occurs: otherwise, the caller + # should free. + CoreFoundation.CFRelease(cert_array) + + return cert_array + + +def _is_cert(item): + """ + Returns True if a given CFTypeRef is a certificate. + """ + expected = Security.SecCertificateGetTypeID() + return CoreFoundation.CFGetTypeID(item) == expected + + +def _is_identity(item): + """ + Returns True if a given CFTypeRef is an identity. + """ + expected = Security.SecIdentityGetTypeID() + return CoreFoundation.CFGetTypeID(item) == expected + + +def _temporary_keychain(): + """ + This function creates a temporary Mac keychain that we can use to work with + credentials. This keychain uses a one-time password and a temporary file to + store the data. We expect to have one keychain per socket. The returned + SecKeychainRef must be freed by the caller, including calling + SecKeychainDelete. + + Returns a tuple of the SecKeychainRef and the path to the temporary + directory that contains it. + """ + # Unfortunately, SecKeychainCreate requires a path to a keychain. This + # means we cannot use mkstemp to use a generic temporary file. Instead, + # we're going to create a temporary directory and a filename to use there. + # This filename will be 8 random bytes expanded into base64. We also need + # some random bytes to password-protect the keychain we're creating, so we + # ask for 40 random bytes. + random_bytes = os.urandom(40) + filename = base64.b16encode(random_bytes[:8]).decode('utf-8') + password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8 + tempdirectory = tempfile.mkdtemp() + + keychain_path = os.path.join(tempdirectory, filename).encode('utf-8') + + # We now want to create the keychain itself. + keychain = Security.SecKeychainRef() + status = Security.SecKeychainCreate( + keychain_path, + len(password), + password, + False, + None, + ctypes.byref(keychain) + ) + _assert_no_error(status) + + # Having created the keychain, we want to pass it off to the caller. + return keychain, tempdirectory + + +def _load_items_from_file(keychain, path): + """ + Given a single file, loads all the trust objects from it into arrays and + the keychain. + Returns a tuple of lists: the first list is a list of identities, the + second a list of certs. + """ + certificates = [] + identities = [] + result_array = None + + with open(path, 'rb') as f: + raw_filedata = f.read() + + try: + filedata = CoreFoundation.CFDataCreate( + CoreFoundation.kCFAllocatorDefault, + raw_filedata, + len(raw_filedata) + ) + result_array = CoreFoundation.CFArrayRef() + result = Security.SecItemImport( + filedata, # cert data + None, # Filename, leaving it out for now + None, # What the type of the file is, we don't care + None, # what's in the file, we don't care + 0, # import flags + None, # key params, can include passphrase in the future + keychain, # The keychain to insert into + ctypes.byref(result_array) # Results + ) + _assert_no_error(result) + + # A CFArray is not very useful to us as an intermediary + # representation, so we are going to extract the objects we want + # and then free the array. We don't need to keep hold of keys: the + # keychain already has them! + result_count = CoreFoundation.CFArrayGetCount(result_array) + for index in range(result_count): + item = CoreFoundation.CFArrayGetValueAtIndex( + result_array, index + ) + item = ctypes.cast(item, CoreFoundation.CFTypeRef) + + if _is_cert(item): + CoreFoundation.CFRetain(item) + certificates.append(item) + elif _is_identity(item): + CoreFoundation.CFRetain(item) + identities.append(item) + finally: + if result_array: + CoreFoundation.CFRelease(result_array) + + CoreFoundation.CFRelease(filedata) + + return (identities, certificates) + + +def _load_client_cert_chain(keychain, *paths): + """ + Load certificates and maybe keys from a number of files. Has the end goal + of returning a CFArray containing one SecIdentityRef, and then zero or more + SecCertificateRef objects, suitable for use as a client certificate trust + chain. + """ + # Ok, the strategy. + # + # This relies on knowing that macOS will not give you a SecIdentityRef + # unless you have imported a key into a keychain. This is a somewhat + # artificial limitation of macOS (for example, it doesn't necessarily + # affect iOS), but there is nothing inside Security.framework that lets you + # get a SecIdentityRef without having a key in a keychain. + # + # So the policy here is we take all the files and iterate them in order. + # Each one will use SecItemImport to have one or more objects loaded from + # it. We will also point at a keychain that macOS can use to work with the + # private key. + # + # Once we have all the objects, we'll check what we actually have. If we + # already have a SecIdentityRef in hand, fab: we'll use that. Otherwise, + # we'll take the first certificate (which we assume to be our leaf) and + # ask the keychain to give us a SecIdentityRef with that cert's associated + # key. + # + # We'll then return a CFArray containing the trust chain: one + # SecIdentityRef and then zero-or-more SecCertificateRef objects. The + # responsibility for freeing this CFArray will be with the caller. This + # CFArray must remain alive for the entire connection, so in practice it + # will be stored with a single SSLSocket, along with the reference to the + # keychain. + certificates = [] + identities = [] + + # Filter out bad paths. + paths = (path for path in paths if path) + + try: + for file_path in paths: + new_identities, new_certs = _load_items_from_file( + keychain, file_path + ) + identities.extend(new_identities) + certificates.extend(new_certs) + + # Ok, we have everything. The question is: do we have an identity? If + # not, we want to grab one from the first cert we have. + if not identities: + new_identity = Security.SecIdentityRef() + status = Security.SecIdentityCreateWithCertificate( + keychain, + certificates[0], + ctypes.byref(new_identity) + ) + _assert_no_error(status) + identities.append(new_identity) + + # We now want to release the original certificate, as we no longer + # need it. + CoreFoundation.CFRelease(certificates.pop(0)) + + # We now need to build a new CFArray that holds the trust chain. + trust_chain = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), + ) + for item in itertools.chain(identities, certificates): + # ArrayAppendValue does a CFRetain on the item. That's fine, + # because the finally block will release our other refs to them. + CoreFoundation.CFArrayAppendValue(trust_chain, item) + + return trust_chain + finally: + for obj in itertools.chain(identities, certificates): + CoreFoundation.CFRelease(obj) diff --git a/env/lib/python3.6/site-packages/urllib3/contrib/appengine.py b/env/lib/python3.6/site-packages/urllib3/contrib/appengine.py new file mode 100755 index 0000000..66922e0 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/contrib/appengine.py @@ -0,0 +1,305 @@ +""" +This module provides a pool manager that uses Google App Engine's +`URLFetch Service `_. + +Example usage:: + + from urllib3 import PoolManager + from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox + + if is_appengine_sandbox(): + # AppEngineManager uses AppEngine's URLFetch API behind the scenes + http = AppEngineManager() + else: + # PoolManager uses a socket-level API behind the scenes + http = PoolManager() + + r = http.request('GET', 'https://google.com/') + +There are `limitations `_ to the URLFetch service and it may not be +the best choice for your application. There are three options for using +urllib3 on Google App Engine: + +1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is + cost-effective in many circumstances as long as your usage is within the + limitations. +2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets. + Sockets also have `limitations and restrictions + `_ and have a lower free quota than URLFetch. + To use sockets, be sure to specify the following in your ``app.yaml``:: + + env_variables: + GAE_USE_SOCKETS_HTTPLIB : 'true' + +3. If you are using `App Engine Flexible +`_, you can use the standard +:class:`PoolManager` without any configuration or special environment variables. +""" + +from __future__ import absolute_import +import logging +import os +import warnings +from ..packages.six.moves.urllib.parse import urljoin + +from ..exceptions import ( + HTTPError, + HTTPWarning, + MaxRetryError, + ProtocolError, + TimeoutError, + SSLError +) + +from ..packages.six import BytesIO +from ..request import RequestMethods +from ..response import HTTPResponse +from ..util.timeout import Timeout +from ..util.retry import Retry + +try: + from google.appengine.api import urlfetch +except ImportError: + urlfetch = None + + +log = logging.getLogger(__name__) + + +class AppEnginePlatformWarning(HTTPWarning): + pass + + +class AppEnginePlatformError(HTTPError): + pass + + +class AppEngineManager(RequestMethods): + """ + Connection manager for Google App Engine sandbox applications. + + This manager uses the URLFetch service directly instead of using the + emulated httplib, and is subject to URLFetch limitations as described in + the App Engine documentation `here + `_. + + Notably it will raise an :class:`AppEnginePlatformError` if: + * URLFetch is not available. + * If you attempt to use this on App Engine Flexible, as full socket + support is available. + * If a request size is more than 10 megabytes. + * If a response size is more than 32 megabtyes. + * If you use an unsupported request method such as OPTIONS. + + Beyond those cases, it will raise normal urllib3 errors. + """ + + def __init__(self, headers=None, retries=None, validate_certificate=True, + urlfetch_retries=True): + if not urlfetch: + raise AppEnginePlatformError( + "URLFetch is not available in this environment.") + + if is_prod_appengine_mvms(): + raise AppEnginePlatformError( + "Use normal urllib3.PoolManager instead of AppEngineManager" + "on Managed VMs, as using URLFetch is not necessary in " + "this environment.") + + warnings.warn( + "urllib3 is using URLFetch on Google App Engine sandbox instead " + "of sockets. To use sockets directly instead of URLFetch see " + "https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.", + AppEnginePlatformWarning) + + RequestMethods.__init__(self, headers) + self.validate_certificate = validate_certificate + self.urlfetch_retries = urlfetch_retries + + self.retries = retries or Retry.DEFAULT + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + # Return False to re-raise any potential exceptions + return False + + def urlopen(self, method, url, body=None, headers=None, + retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT, + **response_kw): + + retries = self._get_retries(retries, redirect) + + try: + follow_redirects = ( + redirect and + retries.redirect != 0 and + retries.total) + response = urlfetch.fetch( + url, + payload=body, + method=method, + headers=headers or {}, + allow_truncated=False, + follow_redirects=self.urlfetch_retries and follow_redirects, + deadline=self._get_absolute_timeout(timeout), + validate_certificate=self.validate_certificate, + ) + except urlfetch.DeadlineExceededError as e: + raise TimeoutError(self, e) + + except urlfetch.InvalidURLError as e: + if 'too large' in str(e): + raise AppEnginePlatformError( + "URLFetch request too large, URLFetch only " + "supports requests up to 10mb in size.", e) + raise ProtocolError(e) + + except urlfetch.DownloadError as e: + if 'Too many redirects' in str(e): + raise MaxRetryError(self, url, reason=e) + raise ProtocolError(e) + + except urlfetch.ResponseTooLargeError as e: + raise AppEnginePlatformError( + "URLFetch response too large, URLFetch only supports" + "responses up to 32mb in size.", e) + + except urlfetch.SSLCertificateError as e: + raise SSLError(e) + + except urlfetch.InvalidMethodError as e: + raise AppEnginePlatformError( + "URLFetch does not support method: %s" % method, e) + + http_response = self._urlfetch_response_to_http_response( + response, retries=retries, **response_kw) + + # Handle redirect? + redirect_location = redirect and http_response.get_redirect_location() + if redirect_location: + # Check for redirect response + if (self.urlfetch_retries and retries.raise_on_redirect): + raise MaxRetryError(self, url, "too many redirects") + else: + if http_response.status == 303: + method = 'GET' + + try: + retries = retries.increment(method, url, response=http_response, _pool=self) + except MaxRetryError: + if retries.raise_on_redirect: + raise MaxRetryError(self, url, "too many redirects") + return http_response + + retries.sleep_for_retry(http_response) + log.debug("Redirecting %s -> %s", url, redirect_location) + redirect_url = urljoin(url, redirect_location) + return self.urlopen( + method, redirect_url, body, headers, + retries=retries, redirect=redirect, + timeout=timeout, **response_kw) + + # Check if we should retry the HTTP response. + has_retry_after = bool(http_response.getheader('Retry-After')) + if retries.is_retry(method, http_response.status, has_retry_after): + retries = retries.increment( + method, url, response=http_response, _pool=self) + log.debug("Retry: %s", url) + retries.sleep(http_response) + return self.urlopen( + method, url, + body=body, headers=headers, + retries=retries, redirect=redirect, + timeout=timeout, **response_kw) + + return http_response + + def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw): + + if is_prod_appengine(): + # Production GAE handles deflate encoding automatically, but does + # not remove the encoding header. + content_encoding = urlfetch_resp.headers.get('content-encoding') + + if content_encoding == 'deflate': + del urlfetch_resp.headers['content-encoding'] + + transfer_encoding = urlfetch_resp.headers.get('transfer-encoding') + # We have a full response's content, + # so let's make sure we don't report ourselves as chunked data. + if transfer_encoding == 'chunked': + encodings = transfer_encoding.split(",") + encodings.remove('chunked') + urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings) + + original_response = HTTPResponse( + # In order for decoding to work, we must present the content as + # a file-like object. + body=BytesIO(urlfetch_resp.content), + msg=urlfetch_resp.header_msg, + headers=urlfetch_resp.headers, + status=urlfetch_resp.status_code, + **response_kw + ) + + return HTTPResponse( + body=BytesIO(urlfetch_resp.content), + headers=urlfetch_resp.headers, + status=urlfetch_resp.status_code, + original_response=original_response, + **response_kw + ) + + def _get_absolute_timeout(self, timeout): + if timeout is Timeout.DEFAULT_TIMEOUT: + return None # Defer to URLFetch's default. + if isinstance(timeout, Timeout): + if timeout._read is not None or timeout._connect is not None: + warnings.warn( + "URLFetch does not support granular timeout settings, " + "reverting to total or default URLFetch timeout.", + AppEnginePlatformWarning) + return timeout.total + return timeout + + def _get_retries(self, retries, redirect): + if not isinstance(retries, Retry): + retries = Retry.from_int( + retries, redirect=redirect, default=self.retries) + + if retries.connect or retries.read or retries.redirect: + warnings.warn( + "URLFetch only supports total retries and does not " + "recognize connect, read, or redirect retry parameters.", + AppEnginePlatformWarning) + + return retries + + +def is_appengine(): + return (is_local_appengine() or + is_prod_appengine() or + is_prod_appengine_mvms()) + + +def is_appengine_sandbox(): + return is_appengine() and not is_prod_appengine_mvms() + + +def is_local_appengine(): + return ('APPENGINE_RUNTIME' in os.environ and + 'Development/' in os.environ['SERVER_SOFTWARE']) + + +def is_prod_appengine(): + return ('APPENGINE_RUNTIME' in os.environ and + 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and + not is_prod_appengine_mvms()) + + +def is_prod_appengine_mvms(): + return os.environ.get('GAE_VM', False) == 'true' diff --git a/env/lib/python3.6/site-packages/urllib3/contrib/ntlmpool.py b/env/lib/python3.6/site-packages/urllib3/contrib/ntlmpool.py new file mode 100755 index 0000000..642e99e --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/contrib/ntlmpool.py @@ -0,0 +1,112 @@ +""" +NTLM authenticating pool, contributed by erikcederstran + +Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10 +""" +from __future__ import absolute_import + +from logging import getLogger +from ntlm import ntlm + +from .. import HTTPSConnectionPool +from ..packages.six.moves.http_client import HTTPSConnection + + +log = getLogger(__name__) + + +class NTLMConnectionPool(HTTPSConnectionPool): + """ + Implements an NTLM authentication version of an urllib3 connection pool + """ + + scheme = 'https' + + def __init__(self, user, pw, authurl, *args, **kwargs): + """ + authurl is a random URL on the server that is protected by NTLM. + user is the Windows user, probably in the DOMAIN\\username format. + pw is the password for the user. + """ + super(NTLMConnectionPool, self).__init__(*args, **kwargs) + self.authurl = authurl + self.rawuser = user + user_parts = user.split('\\', 1) + self.domain = user_parts[0].upper() + self.user = user_parts[1] + self.pw = pw + + def _new_conn(self): + # Performs the NTLM handshake that secures the connection. The socket + # must be kept open while requests are performed. + self.num_connections += 1 + log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s', + self.num_connections, self.host, self.authurl) + + headers = {} + headers['Connection'] = 'Keep-Alive' + req_header = 'Authorization' + resp_header = 'www-authenticate' + + conn = HTTPSConnection(host=self.host, port=self.port) + + # Send negotiation message + headers[req_header] = ( + 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser)) + log.debug('Request headers: %s', headers) + conn.request('GET', self.authurl, None, headers) + res = conn.getresponse() + reshdr = dict(res.getheaders()) + log.debug('Response status: %s %s', res.status, res.reason) + log.debug('Response headers: %s', reshdr) + log.debug('Response data: %s [...]', res.read(100)) + + # Remove the reference to the socket, so that it can not be closed by + # the response object (we want to keep the socket open) + res.fp = None + + # Server should respond with a challenge message + auth_header_values = reshdr[resp_header].split(', ') + auth_header_value = None + for s in auth_header_values: + if s[:5] == 'NTLM ': + auth_header_value = s[5:] + if auth_header_value is None: + raise Exception('Unexpected %s response header: %s' % + (resp_header, reshdr[resp_header])) + + # Send authentication message + ServerChallenge, NegotiateFlags = \ + ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value) + auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge, + self.user, + self.domain, + self.pw, + NegotiateFlags) + headers[req_header] = 'NTLM %s' % auth_msg + log.debug('Request headers: %s', headers) + conn.request('GET', self.authurl, None, headers) + res = conn.getresponse() + log.debug('Response status: %s %s', res.status, res.reason) + log.debug('Response headers: %s', dict(res.getheaders())) + log.debug('Response data: %s [...]', res.read()[:100]) + if res.status != 200: + if res.status == 401: + raise Exception('Server rejected request: wrong ' + 'username or password') + raise Exception('Wrong server response: %s %s' % + (res.status, res.reason)) + + res.fp = None + log.debug('Connection established') + return conn + + def urlopen(self, method, url, body=None, headers=None, retries=3, + redirect=True, assert_same_host=True): + if headers is None: + headers = {} + headers['Connection'] = 'Keep-Alive' + return super(NTLMConnectionPool, self).urlopen(method, url, body, + headers, retries, + redirect, + assert_same_host) diff --git a/env/lib/python3.6/site-packages/urllib3/contrib/pyopenssl.py b/env/lib/python3.6/site-packages/urllib3/contrib/pyopenssl.py new file mode 100755 index 0000000..4d4b1af --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/contrib/pyopenssl.py @@ -0,0 +1,457 @@ +""" +SSL with SNI_-support for Python 2. Follow these instructions if you would +like to verify SSL certificates in Python 2. Note, the default libraries do +*not* do certificate checking; you need to do additional work to validate +certificates yourself. + +This needs the following packages installed: + +* pyOpenSSL (tested with 16.0.0) +* cryptography (minimum 1.3.4, from pyopenssl) +* idna (minimum 2.0, from cryptography) + +However, pyopenssl depends on cryptography, which depends on idna, so while we +use all three directly here we end up having relatively few packages required. + +You can install them with the following command: + + pip install pyopenssl cryptography idna + +To activate certificate checking, call +:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code +before you begin making HTTP requests. This can be done in a ``sitecustomize`` +module, or at any other time before your application begins using ``urllib3``, +like this:: + + try: + import urllib3.contrib.pyopenssl + urllib3.contrib.pyopenssl.inject_into_urllib3() + except ImportError: + pass + +Now you can use :mod:`urllib3` as you normally would, and it will support SNI +when the required modules are installed. + +Activating this module also has the positive side effect of disabling SSL/TLS +compression in Python 2 (see `CRIME attack`_). + +If you want to configure the default list of supported cipher suites, you can +set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable. + +.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication +.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) +""" +from __future__ import absolute_import + +import OpenSSL.SSL +from cryptography import x509 +from cryptography.hazmat.backends.openssl import backend as openssl_backend +from cryptography.hazmat.backends.openssl.x509 import _Certificate +try: + from cryptography.x509 import UnsupportedExtension +except ImportError: + # UnsupportedExtension is gone in cryptography >= 2.1.0 + class UnsupportedExtension(Exception): + pass + +from socket import timeout, error as SocketError +from io import BytesIO + +try: # Platform-specific: Python 2 + from socket import _fileobject +except ImportError: # Platform-specific: Python 3 + _fileobject = None + from ..packages.backports.makefile import backport_makefile + +import logging +import ssl +from ..packages import six +import sys + +from .. import util + +__all__ = ['inject_into_urllib3', 'extract_from_urllib3'] + +# SNI always works. +HAS_SNI = True + +# Map from urllib3 to PyOpenSSL compatible parameter-values. +_openssl_versions = { + ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, + ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, +} + +if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'): + _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD + +if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): + _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD + +try: + _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD}) +except AttributeError: + pass + +_stdlib_to_openssl_verify = { + ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, + ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, + ssl.CERT_REQUIRED: + OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, +} +_openssl_to_stdlib_verify = dict( + (v, k) for k, v in _stdlib_to_openssl_verify.items() +) + +# OpenSSL will only write 16K at a time +SSL_WRITE_BLOCKSIZE = 16384 + +orig_util_HAS_SNI = util.HAS_SNI +orig_util_SSLContext = util.ssl_.SSLContext + + +log = logging.getLogger(__name__) + + +def inject_into_urllib3(): + 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.' + + _validate_dependencies_met() + + util.ssl_.SSLContext = PyOpenSSLContext + util.HAS_SNI = HAS_SNI + util.ssl_.HAS_SNI = HAS_SNI + util.IS_PYOPENSSL = True + util.ssl_.IS_PYOPENSSL = True + + +def extract_from_urllib3(): + 'Undo monkey-patching by :func:`inject_into_urllib3`.' + + util.ssl_.SSLContext = orig_util_SSLContext + util.HAS_SNI = orig_util_HAS_SNI + util.ssl_.HAS_SNI = orig_util_HAS_SNI + util.IS_PYOPENSSL = False + util.ssl_.IS_PYOPENSSL = False + + +def _validate_dependencies_met(): + """ + Verifies that PyOpenSSL's package-level dependencies have been met. + Throws `ImportError` if they are not met. + """ + # Method added in `cryptography==1.1`; not available in older versions + from cryptography.x509.extensions import Extensions + if getattr(Extensions, "get_extension_for_class", None) is None: + raise ImportError("'cryptography' module missing required functionality. " + "Try upgrading to v1.3.4 or newer.") + + # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509 + # attribute is only present on those versions. + from OpenSSL.crypto import X509 + x509 = X509() + if getattr(x509, "_x509", None) is None: + raise ImportError("'pyOpenSSL' module missing required functionality. " + "Try upgrading to v0.14 or newer.") + + +def _dnsname_to_stdlib(name): + """ + Converts a dNSName SubjectAlternativeName field to the form used by the + standard library on the given Python version. + + Cryptography produces a dNSName as a unicode string that was idna-decoded + from ASCII bytes. We need to idna-encode that string to get it back, and + then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib + uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8). + """ + def idna_encode(name): + """ + Borrowed wholesale from the Python Cryptography Project. It turns out + that we can't just safely call `idna.encode`: it can explode for + wildcard names. This avoids that problem. + """ + import idna + + for prefix in [u'*.', u'.']: + if name.startswith(prefix): + name = name[len(prefix):] + return prefix.encode('ascii') + idna.encode(name) + return idna.encode(name) + + name = idna_encode(name) + if sys.version_info >= (3, 0): + name = name.decode('utf-8') + return name + + +def get_subj_alt_name(peer_cert): + """ + Given an PyOpenSSL certificate, provides all the subject alternative names. + """ + # Pass the cert to cryptography, which has much better APIs for this. + if hasattr(peer_cert, "to_cryptography"): + cert = peer_cert.to_cryptography() + else: + # This is technically using private APIs, but should work across all + # relevant versions before PyOpenSSL got a proper API for this. + cert = _Certificate(openssl_backend, peer_cert._x509) + + # We want to find the SAN extension. Ask Cryptography to locate it (it's + # faster than looping in Python) + try: + ext = cert.extensions.get_extension_for_class( + x509.SubjectAlternativeName + ).value + except x509.ExtensionNotFound: + # No such extension, return the empty list. + return [] + except (x509.DuplicateExtension, UnsupportedExtension, + x509.UnsupportedGeneralNameType, UnicodeError) as e: + # A problem has been found with the quality of the certificate. Assume + # no SAN field is present. + log.warning( + "A problem was encountered with the certificate that prevented " + "urllib3 from finding the SubjectAlternativeName field. This can " + "affect certificate validation. The error was %s", + e, + ) + return [] + + # We want to return dNSName and iPAddress fields. We need to cast the IPs + # back to strings because the match_hostname function wants them as + # strings. + # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8 + # decoded. This is pretty frustrating, but that's what the standard library + # does with certificates, and so we need to attempt to do the same. + names = [ + ('DNS', _dnsname_to_stdlib(name)) + for name in ext.get_values_for_type(x509.DNSName) + ] + names.extend( + ('IP Address', str(name)) + for name in ext.get_values_for_type(x509.IPAddress) + ) + + return names + + +class WrappedSocket(object): + '''API-compatibility wrapper for Python OpenSSL's Connection-class. + + Note: _makefile_refs, _drop() and _reuse() are needed for the garbage + collector of pypy. + ''' + + def __init__(self, connection, socket, suppress_ragged_eofs=True): + self.connection = connection + self.socket = socket + self.suppress_ragged_eofs = suppress_ragged_eofs + self._makefile_refs = 0 + self._closed = False + + def fileno(self): + return self.socket.fileno() + + # Copy-pasted from Python 3.5 source code + def _decref_socketios(self): + if self._makefile_refs > 0: + self._makefile_refs -= 1 + if self._closed: + self.close() + + def recv(self, *args, **kwargs): + try: + data = self.connection.recv(*args, **kwargs) + except OpenSSL.SSL.SysCallError as e: + if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): + return b'' + else: + raise SocketError(str(e)) + except OpenSSL.SSL.ZeroReturnError as e: + if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: + return b'' + else: + raise + except OpenSSL.SSL.WantReadError: + if not util.wait_for_read(self.socket, self.socket.gettimeout()): + raise timeout('The read operation timed out') + else: + return self.recv(*args, **kwargs) + else: + return data + + def recv_into(self, *args, **kwargs): + try: + return self.connection.recv_into(*args, **kwargs) + except OpenSSL.SSL.SysCallError as e: + if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): + return 0 + else: + raise SocketError(str(e)) + except OpenSSL.SSL.ZeroReturnError as e: + if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: + return 0 + else: + raise + except OpenSSL.SSL.WantReadError: + if not util.wait_for_read(self.socket, self.socket.gettimeout()): + raise timeout('The read operation timed out') + else: + return self.recv_into(*args, **kwargs) + + def settimeout(self, timeout): + return self.socket.settimeout(timeout) + + def _send_until_done(self, data): + while True: + try: + return self.connection.send(data) + except OpenSSL.SSL.WantWriteError: + if not util.wait_for_write(self.socket, self.socket.gettimeout()): + raise timeout() + continue + except OpenSSL.SSL.SysCallError as e: + raise SocketError(str(e)) + + def sendall(self, data): + total_sent = 0 + while total_sent < len(data): + sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) + total_sent += sent + + def shutdown(self): + # FIXME rethrow compatible exceptions should we ever use this + self.connection.shutdown() + + def close(self): + if self._makefile_refs < 1: + try: + self._closed = True + return self.connection.close() + except OpenSSL.SSL.Error: + return + else: + self._makefile_refs -= 1 + + def getpeercert(self, binary_form=False): + x509 = self.connection.get_peer_certificate() + + if not x509: + return x509 + + if binary_form: + return OpenSSL.crypto.dump_certificate( + OpenSSL.crypto.FILETYPE_ASN1, + x509) + + return { + 'subject': ( + (('commonName', x509.get_subject().CN),), + ), + 'subjectAltName': get_subj_alt_name(x509) + } + + def _reuse(self): + self._makefile_refs += 1 + + def _drop(self): + if self._makefile_refs < 1: + self.close() + else: + self._makefile_refs -= 1 + + +if _fileobject: # Platform-specific: Python 2 + def makefile(self, mode, bufsize=-1): + self._makefile_refs += 1 + return _fileobject(self, mode, bufsize, close=True) +else: # Platform-specific: Python 3 + makefile = backport_makefile + +WrappedSocket.makefile = makefile + + +class PyOpenSSLContext(object): + """ + I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible + for translating the interface of the standard library ``SSLContext`` object + to calls into PyOpenSSL. + """ + def __init__(self, protocol): + self.protocol = _openssl_versions[protocol] + self._ctx = OpenSSL.SSL.Context(self.protocol) + self._options = 0 + self.check_hostname = False + + @property + def options(self): + return self._options + + @options.setter + def options(self, value): + self._options = value + self._ctx.set_options(value) + + @property + def verify_mode(self): + return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()] + + @verify_mode.setter + def verify_mode(self, value): + self._ctx.set_verify( + _stdlib_to_openssl_verify[value], + _verify_callback + ) + + def set_default_verify_paths(self): + self._ctx.set_default_verify_paths() + + def set_ciphers(self, ciphers): + if isinstance(ciphers, six.text_type): + ciphers = ciphers.encode('utf-8') + self._ctx.set_cipher_list(ciphers) + + def load_verify_locations(self, cafile=None, capath=None, cadata=None): + if cafile is not None: + cafile = cafile.encode('utf-8') + if capath is not None: + capath = capath.encode('utf-8') + self._ctx.load_verify_locations(cafile, capath) + if cadata is not None: + self._ctx.load_verify_locations(BytesIO(cadata)) + + def load_cert_chain(self, certfile, keyfile=None, password=None): + self._ctx.use_certificate_chain_file(certfile) + if password is not None: + self._ctx.set_passwd_cb(lambda max_length, prompt_twice, userdata: password) + self._ctx.use_privatekey_file(keyfile or certfile) + + def wrap_socket(self, sock, server_side=False, + do_handshake_on_connect=True, suppress_ragged_eofs=True, + server_hostname=None): + cnx = OpenSSL.SSL.Connection(self._ctx, sock) + + if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3 + server_hostname = server_hostname.encode('utf-8') + + if server_hostname is not None: + cnx.set_tlsext_host_name(server_hostname) + + cnx.set_connect_state() + + while True: + try: + cnx.do_handshake() + except OpenSSL.SSL.WantReadError: + if not util.wait_for_read(sock, sock.gettimeout()): + raise timeout('select timed out') + continue + except OpenSSL.SSL.Error as e: + raise ssl.SSLError('bad handshake: %r' % e) + break + + return WrappedSocket(cnx, sock) + + +def _verify_callback(cnx, x509, err_no, err_depth, return_code): + return err_no == 0 diff --git a/env/lib/python3.6/site-packages/urllib3/contrib/securetransport.py b/env/lib/python3.6/site-packages/urllib3/contrib/securetransport.py new file mode 100755 index 0000000..77cb59e --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/contrib/securetransport.py @@ -0,0 +1,804 @@ +""" +SecureTranport support for urllib3 via ctypes. + +This makes platform-native TLS available to urllib3 users on macOS without the +use of a compiler. This is an important feature because the Python Package +Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL +that ships with macOS is not capable of doing TLSv1.2. The only way to resolve +this is to give macOS users an alternative solution to the problem, and that +solution is to use SecureTransport. + +We use ctypes here because this solution must not require a compiler. That's +because pip is not allowed to require a compiler either. + +This is not intended to be a seriously long-term solution to this problem. +The hope is that PEP 543 will eventually solve this issue for us, at which +point we can retire this contrib module. But in the short term, we need to +solve the impending tire fire that is Python on Mac without this kind of +contrib module. So...here we are. + +To use this module, simply import and inject it:: + + import urllib3.contrib.securetransport + urllib3.contrib.securetransport.inject_into_urllib3() + +Happy TLSing! +""" +from __future__ import absolute_import + +import contextlib +import ctypes +import errno +import os.path +import shutil +import socket +import ssl +import threading +import weakref + +from .. import util +from ._securetransport.bindings import ( + Security, SecurityConst, CoreFoundation +) +from ._securetransport.low_level import ( + _assert_no_error, _cert_array_from_pem, _temporary_keychain, + _load_client_cert_chain +) + +try: # Platform-specific: Python 2 + from socket import _fileobject +except ImportError: # Platform-specific: Python 3 + _fileobject = None + from ..packages.backports.makefile import backport_makefile + +__all__ = ['inject_into_urllib3', 'extract_from_urllib3'] + +# SNI always works +HAS_SNI = True + +orig_util_HAS_SNI = util.HAS_SNI +orig_util_SSLContext = util.ssl_.SSLContext + +# This dictionary is used by the read callback to obtain a handle to the +# calling wrapped socket. This is a pretty silly approach, but for now it'll +# do. I feel like I should be able to smuggle a handle to the wrapped socket +# directly in the SSLConnectionRef, but for now this approach will work I +# guess. +# +# We need to lock around this structure for inserts, but we don't do it for +# reads/writes in the callbacks. The reasoning here goes as follows: +# +# 1. It is not possible to call into the callbacks before the dictionary is +# populated, so once in the callback the id must be in the dictionary. +# 2. The callbacks don't mutate the dictionary, they only read from it, and +# so cannot conflict with any of the insertions. +# +# This is good: if we had to lock in the callbacks we'd drastically slow down +# the performance of this code. +_connection_refs = weakref.WeakValueDictionary() +_connection_ref_lock = threading.Lock() + +# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over +# for no better reason than we need *a* limit, and this one is right there. +SSL_WRITE_BLOCKSIZE = 16384 + +# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to +# individual cipher suites. We need to do this because this is how +# SecureTransport wants them. +CIPHER_SUITES = [ + SecurityConst.TLS_AES_256_GCM_SHA384, + SecurityConst.TLS_CHACHA20_POLY1305_SHA256, + SecurityConst.TLS_AES_128_GCM_SHA256, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_DHE_DSS_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_DHE_DSS_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256, + SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA256, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256, + SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA, +] + +# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of +# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version. +_protocol_to_min_max = { + ssl.PROTOCOL_SSLv23: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12), +} + +if hasattr(ssl, "PROTOCOL_SSLv2"): + _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = ( + SecurityConst.kSSLProtocol2, SecurityConst.kSSLProtocol2 + ) +if hasattr(ssl, "PROTOCOL_SSLv3"): + _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = ( + SecurityConst.kSSLProtocol3, SecurityConst.kSSLProtocol3 + ) +if hasattr(ssl, "PROTOCOL_TLSv1"): + _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = ( + SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol1 + ) +if hasattr(ssl, "PROTOCOL_TLSv1_1"): + _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = ( + SecurityConst.kTLSProtocol11, SecurityConst.kTLSProtocol11 + ) +if hasattr(ssl, "PROTOCOL_TLSv1_2"): + _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = ( + SecurityConst.kTLSProtocol12, SecurityConst.kTLSProtocol12 + ) +if hasattr(ssl, "PROTOCOL_TLS"): + _protocol_to_min_max[ssl.PROTOCOL_TLS] = _protocol_to_min_max[ssl.PROTOCOL_SSLv23] + + +def inject_into_urllib3(): + """ + Monkey-patch urllib3 with SecureTransport-backed SSL-support. + """ + util.ssl_.SSLContext = SecureTransportContext + util.HAS_SNI = HAS_SNI + util.ssl_.HAS_SNI = HAS_SNI + util.IS_SECURETRANSPORT = True + util.ssl_.IS_SECURETRANSPORT = True + + +def extract_from_urllib3(): + """ + Undo monkey-patching by :func:`inject_into_urllib3`. + """ + util.ssl_.SSLContext = orig_util_SSLContext + util.HAS_SNI = orig_util_HAS_SNI + util.ssl_.HAS_SNI = orig_util_HAS_SNI + util.IS_SECURETRANSPORT = False + util.ssl_.IS_SECURETRANSPORT = False + + +def _read_callback(connection_id, data_buffer, data_length_pointer): + """ + SecureTransport read callback. This is called by ST to request that data + be returned from the socket. + """ + wrapped_socket = None + try: + wrapped_socket = _connection_refs.get(connection_id) + if wrapped_socket is None: + return SecurityConst.errSSLInternal + base_socket = wrapped_socket.socket + + requested_length = data_length_pointer[0] + + timeout = wrapped_socket.gettimeout() + error = None + read_count = 0 + + try: + while read_count < requested_length: + if timeout is None or timeout >= 0: + if not util.wait_for_read(base_socket, timeout): + raise socket.error(errno.EAGAIN, 'timed out') + + remaining = requested_length - read_count + buffer = (ctypes.c_char * remaining).from_address( + data_buffer + read_count + ) + chunk_size = base_socket.recv_into(buffer, remaining) + read_count += chunk_size + if not chunk_size: + if not read_count: + return SecurityConst.errSSLClosedGraceful + break + except (socket.error) as e: + error = e.errno + + if error is not None and error != errno.EAGAIN: + data_length_pointer[0] = read_count + if error == errno.ECONNRESET or error == errno.EPIPE: + return SecurityConst.errSSLClosedAbort + raise + + data_length_pointer[0] = read_count + + if read_count != requested_length: + return SecurityConst.errSSLWouldBlock + + return 0 + except Exception as e: + if wrapped_socket is not None: + wrapped_socket._exception = e + return SecurityConst.errSSLInternal + + +def _write_callback(connection_id, data_buffer, data_length_pointer): + """ + SecureTransport write callback. This is called by ST to request that data + actually be sent on the network. + """ + wrapped_socket = None + try: + wrapped_socket = _connection_refs.get(connection_id) + if wrapped_socket is None: + return SecurityConst.errSSLInternal + base_socket = wrapped_socket.socket + + bytes_to_write = data_length_pointer[0] + data = ctypes.string_at(data_buffer, bytes_to_write) + + timeout = wrapped_socket.gettimeout() + error = None + sent = 0 + + try: + while sent < bytes_to_write: + if timeout is None or timeout >= 0: + if not util.wait_for_write(base_socket, timeout): + raise socket.error(errno.EAGAIN, 'timed out') + chunk_sent = base_socket.send(data) + sent += chunk_sent + + # This has some needless copying here, but I'm not sure there's + # much value in optimising this data path. + data = data[chunk_sent:] + except (socket.error) as e: + error = e.errno + + if error is not None and error != errno.EAGAIN: + data_length_pointer[0] = sent + if error == errno.ECONNRESET or error == errno.EPIPE: + return SecurityConst.errSSLClosedAbort + raise + + data_length_pointer[0] = sent + + if sent != bytes_to_write: + return SecurityConst.errSSLWouldBlock + + return 0 + except Exception as e: + if wrapped_socket is not None: + wrapped_socket._exception = e + return SecurityConst.errSSLInternal + + +# We need to keep these two objects references alive: if they get GC'd while +# in use then SecureTransport could attempt to call a function that is in freed +# memory. That would be...uh...bad. Yeah, that's the word. Bad. +_read_callback_pointer = Security.SSLReadFunc(_read_callback) +_write_callback_pointer = Security.SSLWriteFunc(_write_callback) + + +class WrappedSocket(object): + """ + API-compatibility wrapper for Python's OpenSSL wrapped socket object. + + Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage + collector of PyPy. + """ + def __init__(self, socket): + self.socket = socket + self.context = None + self._makefile_refs = 0 + self._closed = False + self._exception = None + self._keychain = None + self._keychain_dir = None + self._client_cert_chain = None + + # We save off the previously-configured timeout and then set it to + # zero. This is done because we use select and friends to handle the + # timeouts, but if we leave the timeout set on the lower socket then + # Python will "kindly" call select on that socket again for us. Avoid + # that by forcing the timeout to zero. + self._timeout = self.socket.gettimeout() + self.socket.settimeout(0) + + @contextlib.contextmanager + def _raise_on_error(self): + """ + A context manager that can be used to wrap calls that do I/O from + SecureTransport. If any of the I/O callbacks hit an exception, this + context manager will correctly propagate the exception after the fact. + This avoids silently swallowing those exceptions. + + It also correctly forces the socket closed. + """ + self._exception = None + + # We explicitly don't catch around this yield because in the unlikely + # event that an exception was hit in the block we don't want to swallow + # it. + yield + if self._exception is not None: + exception, self._exception = self._exception, None + self.close() + raise exception + + def _set_ciphers(self): + """ + Sets up the allowed ciphers. By default this matches the set in + util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done + custom and doesn't allow changing at this time, mostly because parsing + OpenSSL cipher strings is going to be a freaking nightmare. + """ + ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES) + result = Security.SSLSetEnabledCiphers( + self.context, ciphers, len(CIPHER_SUITES) + ) + _assert_no_error(result) + + def _custom_validate(self, verify, trust_bundle): + """ + Called when we have set custom validation. We do this in two cases: + first, when cert validation is entirely disabled; and second, when + using a custom trust DB. + """ + # If we disabled cert validation, just say: cool. + if not verify: + return + + # We want data in memory, so load it up. + if os.path.isfile(trust_bundle): + with open(trust_bundle, 'rb') as f: + trust_bundle = f.read() + + cert_array = None + trust = Security.SecTrustRef() + + try: + # Get a CFArray that contains the certs we want. + cert_array = _cert_array_from_pem(trust_bundle) + + # Ok, now the hard part. We want to get the SecTrustRef that ST has + # created for this connection, shove our CAs into it, tell ST to + # ignore everything else it knows, and then ask if it can build a + # chain. This is a buuuunch of code. + result = Security.SSLCopyPeerTrust( + self.context, ctypes.byref(trust) + ) + _assert_no_error(result) + if not trust: + raise ssl.SSLError("Failed to copy trust reference") + + result = Security.SecTrustSetAnchorCertificates(trust, cert_array) + _assert_no_error(result) + + result = Security.SecTrustSetAnchorCertificatesOnly(trust, True) + _assert_no_error(result) + + trust_result = Security.SecTrustResultType() + result = Security.SecTrustEvaluate( + trust, ctypes.byref(trust_result) + ) + _assert_no_error(result) + finally: + if trust: + CoreFoundation.CFRelease(trust) + + if cert_array is not None: + CoreFoundation.CFRelease(cert_array) + + # Ok, now we can look at what the result was. + successes = ( + SecurityConst.kSecTrustResultUnspecified, + SecurityConst.kSecTrustResultProceed + ) + if trust_result.value not in successes: + raise ssl.SSLError( + "certificate verify failed, error code: %d" % + trust_result.value + ) + + def handshake(self, + server_hostname, + verify, + trust_bundle, + min_version, + max_version, + client_cert, + client_key, + client_key_passphrase): + """ + Actually performs the TLS handshake. This is run automatically by + wrapped socket, and shouldn't be needed in user code. + """ + # First, we do the initial bits of connection setup. We need to create + # a context, set its I/O funcs, and set the connection reference. + self.context = Security.SSLCreateContext( + None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType + ) + result = Security.SSLSetIOFuncs( + self.context, _read_callback_pointer, _write_callback_pointer + ) + _assert_no_error(result) + + # Here we need to compute the handle to use. We do this by taking the + # id of self modulo 2**31 - 1. If this is already in the dictionary, we + # just keep incrementing by one until we find a free space. + with _connection_ref_lock: + handle = id(self) % 2147483647 + while handle in _connection_refs: + handle = (handle + 1) % 2147483647 + _connection_refs[handle] = self + + result = Security.SSLSetConnection(self.context, handle) + _assert_no_error(result) + + # If we have a server hostname, we should set that too. + if server_hostname: + if not isinstance(server_hostname, bytes): + server_hostname = server_hostname.encode('utf-8') + + result = Security.SSLSetPeerDomainName( + self.context, server_hostname, len(server_hostname) + ) + _assert_no_error(result) + + # Setup the ciphers. + self._set_ciphers() + + # Set the minimum and maximum TLS versions. + result = Security.SSLSetProtocolVersionMin(self.context, min_version) + _assert_no_error(result) + result = Security.SSLSetProtocolVersionMax(self.context, max_version) + _assert_no_error(result) + + # If there's a trust DB, we need to use it. We do that by telling + # SecureTransport to break on server auth. We also do that if we don't + # want to validate the certs at all: we just won't actually do any + # authing in that case. + if not verify or trust_bundle is not None: + result = Security.SSLSetSessionOption( + self.context, + SecurityConst.kSSLSessionOptionBreakOnServerAuth, + True + ) + _assert_no_error(result) + + # If there's a client cert, we need to use it. + if client_cert: + self._keychain, self._keychain_dir = _temporary_keychain() + self._client_cert_chain = _load_client_cert_chain( + self._keychain, client_cert, client_key + ) + result = Security.SSLSetCertificate( + self.context, self._client_cert_chain + ) + _assert_no_error(result) + + while True: + with self._raise_on_error(): + result = Security.SSLHandshake(self.context) + + if result == SecurityConst.errSSLWouldBlock: + raise socket.timeout("handshake timed out") + elif result == SecurityConst.errSSLServerAuthCompleted: + self._custom_validate(verify, trust_bundle) + continue + else: + _assert_no_error(result) + break + + def fileno(self): + return self.socket.fileno() + + # Copy-pasted from Python 3.5 source code + def _decref_socketios(self): + if self._makefile_refs > 0: + self._makefile_refs -= 1 + if self._closed: + self.close() + + def recv(self, bufsiz): + buffer = ctypes.create_string_buffer(bufsiz) + bytes_read = self.recv_into(buffer, bufsiz) + data = buffer[:bytes_read] + return data + + def recv_into(self, buffer, nbytes=None): + # Read short on EOF. + if self._closed: + return 0 + + if nbytes is None: + nbytes = len(buffer) + + buffer = (ctypes.c_char * nbytes).from_buffer(buffer) + processed_bytes = ctypes.c_size_t(0) + + with self._raise_on_error(): + result = Security.SSLRead( + self.context, buffer, nbytes, ctypes.byref(processed_bytes) + ) + + # There are some result codes that we want to treat as "not always + # errors". Specifically, those are errSSLWouldBlock, + # errSSLClosedGraceful, and errSSLClosedNoNotify. + if (result == SecurityConst.errSSLWouldBlock): + # If we didn't process any bytes, then this was just a time out. + # However, we can get errSSLWouldBlock in situations when we *did* + # read some data, and in those cases we should just read "short" + # and return. + if processed_bytes.value == 0: + # Timed out, no data read. + raise socket.timeout("recv timed out") + elif result in (SecurityConst.errSSLClosedGraceful, SecurityConst.errSSLClosedNoNotify): + # The remote peer has closed this connection. We should do so as + # well. Note that we don't actually return here because in + # principle this could actually be fired along with return data. + # It's unlikely though. + self.close() + else: + _assert_no_error(result) + + # Ok, we read and probably succeeded. We should return whatever data + # was actually read. + return processed_bytes.value + + def settimeout(self, timeout): + self._timeout = timeout + + def gettimeout(self): + return self._timeout + + def send(self, data): + processed_bytes = ctypes.c_size_t(0) + + with self._raise_on_error(): + result = Security.SSLWrite( + self.context, data, len(data), ctypes.byref(processed_bytes) + ) + + if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0: + # Timed out + raise socket.timeout("send timed out") + else: + _assert_no_error(result) + + # We sent, and probably succeeded. Tell them how much we sent. + return processed_bytes.value + + def sendall(self, data): + total_sent = 0 + while total_sent < len(data): + sent = self.send(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) + total_sent += sent + + def shutdown(self): + with self._raise_on_error(): + Security.SSLClose(self.context) + + def close(self): + # TODO: should I do clean shutdown here? Do I have to? + if self._makefile_refs < 1: + self._closed = True + if self.context: + CoreFoundation.CFRelease(self.context) + self.context = None + if self._client_cert_chain: + CoreFoundation.CFRelease(self._client_cert_chain) + self._client_cert_chain = None + if self._keychain: + Security.SecKeychainDelete(self._keychain) + CoreFoundation.CFRelease(self._keychain) + shutil.rmtree(self._keychain_dir) + self._keychain = self._keychain_dir = None + return self.socket.close() + else: + self._makefile_refs -= 1 + + def getpeercert(self, binary_form=False): + # Urgh, annoying. + # + # Here's how we do this: + # + # 1. Call SSLCopyPeerTrust to get hold of the trust object for this + # connection. + # 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf. + # 3. To get the CN, call SecCertificateCopyCommonName and process that + # string so that it's of the appropriate type. + # 4. To get the SAN, we need to do something a bit more complex: + # a. Call SecCertificateCopyValues to get the data, requesting + # kSecOIDSubjectAltName. + # b. Mess about with this dictionary to try to get the SANs out. + # + # This is gross. Really gross. It's going to be a few hundred LoC extra + # just to repeat something that SecureTransport can *already do*. So my + # operating assumption at this time is that what we want to do is + # instead to just flag to urllib3 that it shouldn't do its own hostname + # validation when using SecureTransport. + if not binary_form: + raise ValueError( + "SecureTransport only supports dumping binary certs" + ) + trust = Security.SecTrustRef() + certdata = None + der_bytes = None + + try: + # Grab the trust store. + result = Security.SSLCopyPeerTrust( + self.context, ctypes.byref(trust) + ) + _assert_no_error(result) + if not trust: + # Probably we haven't done the handshake yet. No biggie. + return None + + cert_count = Security.SecTrustGetCertificateCount(trust) + if not cert_count: + # Also a case that might happen if we haven't handshaked. + # Handshook? Handshaken? + return None + + leaf = Security.SecTrustGetCertificateAtIndex(trust, 0) + assert leaf + + # Ok, now we want the DER bytes. + certdata = Security.SecCertificateCopyData(leaf) + assert certdata + + data_length = CoreFoundation.CFDataGetLength(certdata) + data_buffer = CoreFoundation.CFDataGetBytePtr(certdata) + der_bytes = ctypes.string_at(data_buffer, data_length) + finally: + if certdata: + CoreFoundation.CFRelease(certdata) + if trust: + CoreFoundation.CFRelease(trust) + + return der_bytes + + def _reuse(self): + self._makefile_refs += 1 + + def _drop(self): + if self._makefile_refs < 1: + self.close() + else: + self._makefile_refs -= 1 + + +if _fileobject: # Platform-specific: Python 2 + def makefile(self, mode, bufsize=-1): + self._makefile_refs += 1 + return _fileobject(self, mode, bufsize, close=True) +else: # Platform-specific: Python 3 + def makefile(self, mode="r", buffering=None, *args, **kwargs): + # We disable buffering with SecureTransport because it conflicts with + # the buffering that ST does internally (see issue #1153 for more). + buffering = 0 + return backport_makefile(self, mode, buffering, *args, **kwargs) + +WrappedSocket.makefile = makefile + + +class SecureTransportContext(object): + """ + I am a wrapper class for the SecureTransport library, to translate the + interface of the standard library ``SSLContext`` object to calls into + SecureTransport. + """ + def __init__(self, protocol): + self._min_version, self._max_version = _protocol_to_min_max[protocol] + self._options = 0 + self._verify = False + self._trust_bundle = None + self._client_cert = None + self._client_key = None + self._client_key_passphrase = None + + @property + def check_hostname(self): + """ + SecureTransport cannot have its hostname checking disabled. For more, + see the comment on getpeercert() in this file. + """ + return True + + @check_hostname.setter + def check_hostname(self, value): + """ + SecureTransport cannot have its hostname checking disabled. For more, + see the comment on getpeercert() in this file. + """ + pass + + @property + def options(self): + # TODO: Well, crap. + # + # So this is the bit of the code that is the most likely to cause us + # trouble. Essentially we need to enumerate all of the SSL options that + # users might want to use and try to see if we can sensibly translate + # them, or whether we should just ignore them. + return self._options + + @options.setter + def options(self, value): + # TODO: Update in line with above. + self._options = value + + @property + def verify_mode(self): + return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE + + @verify_mode.setter + def verify_mode(self, value): + self._verify = True if value == ssl.CERT_REQUIRED else False + + def set_default_verify_paths(self): + # So, this has to do something a bit weird. Specifically, what it does + # is nothing. + # + # This means that, if we had previously had load_verify_locations + # called, this does not undo that. We need to do that because it turns + # out that the rest of the urllib3 code will attempt to load the + # default verify paths if it hasn't been told about any paths, even if + # the context itself was sometime earlier. We resolve that by just + # ignoring it. + pass + + def load_default_certs(self): + return self.set_default_verify_paths() + + def set_ciphers(self, ciphers): + # For now, we just require the default cipher string. + if ciphers != util.ssl_.DEFAULT_CIPHERS: + raise ValueError( + "SecureTransport doesn't support custom cipher strings" + ) + + def load_verify_locations(self, cafile=None, capath=None, cadata=None): + # OK, we only really support cadata and cafile. + if capath is not None: + raise ValueError( + "SecureTransport does not support cert directories" + ) + + self._trust_bundle = cafile or cadata + + def load_cert_chain(self, certfile, keyfile=None, password=None): + self._client_cert = certfile + self._client_key = keyfile + self._client_cert_passphrase = password + + def wrap_socket(self, sock, server_side=False, + do_handshake_on_connect=True, suppress_ragged_eofs=True, + server_hostname=None): + # So, what do we do here? Firstly, we assert some properties. This is a + # stripped down shim, so there is some functionality we don't support. + # See PEP 543 for the real deal. + assert not server_side + assert do_handshake_on_connect + assert suppress_ragged_eofs + + # Ok, we're good to go. Now we want to create the wrapped socket object + # and store it in the appropriate place. + wrapped_socket = WrappedSocket(sock) + + # Now we can handshake + wrapped_socket.handshake( + server_hostname, self._verify, self._trust_bundle, + self._min_version, self._max_version, self._client_cert, + self._client_key, self._client_key_passphrase + ) + return wrapped_socket diff --git a/env/lib/python3.6/site-packages/urllib3/contrib/socks.py b/env/lib/python3.6/site-packages/urllib3/contrib/socks.py new file mode 100755 index 0000000..811e312 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/contrib/socks.py @@ -0,0 +1,192 @@ +# -*- coding: utf-8 -*- +""" +This module contains provisional support for SOCKS proxies from within +urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and +SOCKS5. To enable its functionality, either install PySocks or install this +module with the ``socks`` extra. + +The SOCKS implementation supports the full range of urllib3 features. It also +supports the following SOCKS features: + +- SOCKS4 +- SOCKS4a +- SOCKS5 +- Usernames and passwords for the SOCKS proxy + +Known Limitations: + +- Currently PySocks does not support contacting remote websites via literal + IPv6 addresses. Any such connection attempt will fail. You must use a domain + name. +- Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any + such connection attempt will fail. +""" +from __future__ import absolute_import + +try: + import socks +except ImportError: + import warnings + from ..exceptions import DependencyWarning + + warnings.warn(( + 'SOCKS support in urllib3 requires the installation of optional ' + 'dependencies: specifically, PySocks. For more information, see ' + 'https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies' + ), + DependencyWarning + ) + raise + +from socket import error as SocketError, timeout as SocketTimeout + +from ..connection import ( + HTTPConnection, HTTPSConnection +) +from ..connectionpool import ( + HTTPConnectionPool, HTTPSConnectionPool +) +from ..exceptions import ConnectTimeoutError, NewConnectionError +from ..poolmanager import PoolManager +from ..util.url import parse_url + +try: + import ssl +except ImportError: + ssl = None + + +class SOCKSConnection(HTTPConnection): + """ + A plain-text HTTP connection that connects via a SOCKS proxy. + """ + def __init__(self, *args, **kwargs): + self._socks_options = kwargs.pop('_socks_options') + super(SOCKSConnection, self).__init__(*args, **kwargs) + + def _new_conn(self): + """ + Establish a new connection via the SOCKS proxy. + """ + extra_kw = {} + if self.source_address: + extra_kw['source_address'] = self.source_address + + if self.socket_options: + extra_kw['socket_options'] = self.socket_options + + try: + conn = socks.create_connection( + (self.host, self.port), + proxy_type=self._socks_options['socks_version'], + proxy_addr=self._socks_options['proxy_host'], + proxy_port=self._socks_options['proxy_port'], + proxy_username=self._socks_options['username'], + proxy_password=self._socks_options['password'], + proxy_rdns=self._socks_options['rdns'], + timeout=self.timeout, + **extra_kw + ) + + except SocketTimeout as e: + raise ConnectTimeoutError( + self, "Connection to %s timed out. (connect timeout=%s)" % + (self.host, self.timeout)) + + except socks.ProxyError as e: + # This is fragile as hell, but it seems to be the only way to raise + # useful errors here. + if e.socket_err: + error = e.socket_err + if isinstance(error, SocketTimeout): + raise ConnectTimeoutError( + self, + "Connection to %s timed out. (connect timeout=%s)" % + (self.host, self.timeout) + ) + else: + raise NewConnectionError( + self, + "Failed to establish a new connection: %s" % error + ) + else: + raise NewConnectionError( + self, + "Failed to establish a new connection: %s" % e + ) + + except SocketError as e: # Defensive: PySocks should catch all these. + raise NewConnectionError( + self, "Failed to establish a new connection: %s" % e) + + return conn + + +# We don't need to duplicate the Verified/Unverified distinction from +# urllib3/connection.py here because the HTTPSConnection will already have been +# correctly set to either the Verified or Unverified form by that module. This +# means the SOCKSHTTPSConnection will automatically be the correct type. +class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection): + pass + + +class SOCKSHTTPConnectionPool(HTTPConnectionPool): + ConnectionCls = SOCKSConnection + + +class SOCKSHTTPSConnectionPool(HTTPSConnectionPool): + ConnectionCls = SOCKSHTTPSConnection + + +class SOCKSProxyManager(PoolManager): + """ + A version of the urllib3 ProxyManager that routes connections via the + defined SOCKS proxy. + """ + pool_classes_by_scheme = { + 'http': SOCKSHTTPConnectionPool, + 'https': SOCKSHTTPSConnectionPool, + } + + def __init__(self, proxy_url, username=None, password=None, + num_pools=10, headers=None, **connection_pool_kw): + parsed = parse_url(proxy_url) + + if username is None and password is None and parsed.auth is not None: + split = parsed.auth.split(':') + if len(split) == 2: + username, password = split + if parsed.scheme == 'socks5': + socks_version = socks.PROXY_TYPE_SOCKS5 + rdns = False + elif parsed.scheme == 'socks5h': + socks_version = socks.PROXY_TYPE_SOCKS5 + rdns = True + elif parsed.scheme == 'socks4': + socks_version = socks.PROXY_TYPE_SOCKS4 + rdns = False + elif parsed.scheme == 'socks4a': + socks_version = socks.PROXY_TYPE_SOCKS4 + rdns = True + else: + raise ValueError( + "Unable to determine SOCKS version from %s" % proxy_url + ) + + self.proxy_url = proxy_url + + socks_options = { + 'socks_version': socks_version, + 'proxy_host': parsed.host, + 'proxy_port': parsed.port, + 'username': username, + 'password': password, + 'rdns': rdns + } + connection_pool_kw['_socks_options'] = socks_options + + super(SOCKSProxyManager, self).__init__( + num_pools, headers, **connection_pool_kw + ) + + self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme diff --git a/env/lib/python3.6/site-packages/urllib3/exceptions.py b/env/lib/python3.6/site-packages/urllib3/exceptions.py new file mode 100755 index 0000000..7bbaa98 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/exceptions.py @@ -0,0 +1,246 @@ +from __future__ import absolute_import +from .packages.six.moves.http_client import ( + IncompleteRead as httplib_IncompleteRead +) +# Base Exceptions + + +class HTTPError(Exception): + "Base exception used by this module." + pass + + +class HTTPWarning(Warning): + "Base warning used by this module." + pass + + +class PoolError(HTTPError): + "Base exception for errors caused within a pool." + def __init__(self, pool, message): + self.pool = pool + HTTPError.__init__(self, "%s: %s" % (pool, message)) + + def __reduce__(self): + # For pickling purposes. + return self.__class__, (None, None) + + +class RequestError(PoolError): + "Base exception for PoolErrors that have associated URLs." + def __init__(self, pool, url, message): + self.url = url + PoolError.__init__(self, pool, message) + + def __reduce__(self): + # For pickling purposes. + return self.__class__, (None, self.url, None) + + +class SSLError(HTTPError): + "Raised when SSL certificate fails in an HTTPS connection." + pass + + +class ProxyError(HTTPError): + "Raised when the connection to a proxy fails." + pass + + +class DecodeError(HTTPError): + "Raised when automatic decoding based on Content-Type fails." + pass + + +class ProtocolError(HTTPError): + "Raised when something unexpected happens mid-request/response." + pass + + +#: Renamed to ProtocolError but aliased for backwards compatibility. +ConnectionError = ProtocolError + + +# Leaf Exceptions + +class MaxRetryError(RequestError): + """Raised when the maximum number of retries is exceeded. + + :param pool: The connection pool + :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` + :param string url: The requested Url + :param exceptions.Exception reason: The underlying error + + """ + + def __init__(self, pool, url, reason=None): + self.reason = reason + + message = "Max retries exceeded with url: %s (Caused by %r)" % ( + url, reason) + + RequestError.__init__(self, pool, url, message) + + +class HostChangedError(RequestError): + "Raised when an existing pool gets a request for a foreign host." + + def __init__(self, pool, url, retries=3): + message = "Tried to open a foreign host with url: %s" % url + RequestError.__init__(self, pool, url, message) + self.retries = retries + + +class TimeoutStateError(HTTPError): + """ Raised when passing an invalid state to a timeout """ + pass + + +class TimeoutError(HTTPError): + """ Raised when a socket timeout error occurs. + + Catching this error will catch both :exc:`ReadTimeoutErrors + ` and :exc:`ConnectTimeoutErrors `. + """ + pass + + +class ReadTimeoutError(TimeoutError, RequestError): + "Raised when a socket timeout occurs while receiving data from a server" + pass + + +# This timeout error does not have a URL attached and needs to inherit from the +# base HTTPError +class ConnectTimeoutError(TimeoutError): + "Raised when a socket timeout occurs while connecting to a server" + pass + + +class NewConnectionError(ConnectTimeoutError, PoolError): + "Raised when we fail to establish a new connection. Usually ECONNREFUSED." + pass + + +class EmptyPoolError(PoolError): + "Raised when a pool runs out of connections and no more are allowed." + pass + + +class ClosedPoolError(PoolError): + "Raised when a request enters a pool after the pool has been closed." + pass + + +class LocationValueError(ValueError, HTTPError): + "Raised when there is something wrong with a given URL input." + pass + + +class LocationParseError(LocationValueError): + "Raised when get_host or similar fails to parse the URL input." + + def __init__(self, location): + message = "Failed to parse: %s" % location + HTTPError.__init__(self, message) + + self.location = location + + +class ResponseError(HTTPError): + "Used as a container for an error reason supplied in a MaxRetryError." + GENERIC_ERROR = 'too many error responses' + SPECIFIC_ERROR = 'too many {status_code} error responses' + + +class SecurityWarning(HTTPWarning): + "Warned when performing security reducing actions" + pass + + +class SubjectAltNameWarning(SecurityWarning): + "Warned when connecting to a host with a certificate missing a SAN." + pass + + +class InsecureRequestWarning(SecurityWarning): + "Warned when making an unverified HTTPS request." + pass + + +class SystemTimeWarning(SecurityWarning): + "Warned when system time is suspected to be wrong" + pass + + +class InsecurePlatformWarning(SecurityWarning): + "Warned when certain SSL configuration is not available on a platform." + pass + + +class SNIMissingWarning(HTTPWarning): + "Warned when making a HTTPS request without SNI available." + pass + + +class DependencyWarning(HTTPWarning): + """ + Warned when an attempt is made to import a module with missing optional + dependencies. + """ + pass + + +class ResponseNotChunked(ProtocolError, ValueError): + "Response needs to be chunked in order to read it as chunks." + pass + + +class BodyNotHttplibCompatible(HTTPError): + """ + Body should be httplib.HTTPResponse like (have an fp attribute which + returns raw chunks) for read_chunked(). + """ + pass + + +class IncompleteRead(HTTPError, httplib_IncompleteRead): + """ + Response length doesn't match expected Content-Length + + Subclass of http_client.IncompleteRead to allow int value + for `partial` to avoid creating large objects on streamed + reads. + """ + def __init__(self, partial, expected): + super(IncompleteRead, self).__init__(partial, expected) + + def __repr__(self): + return ('IncompleteRead(%i bytes read, ' + '%i more expected)' % (self.partial, self.expected)) + + +class InvalidHeader(HTTPError): + "The header provided was somehow invalid." + pass + + +class ProxySchemeUnknown(AssertionError, ValueError): + "ProxyManager does not support the supplied scheme" + # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. + + def __init__(self, scheme): + message = "Not supported proxy scheme %s" % scheme + super(ProxySchemeUnknown, self).__init__(message) + + +class HeaderParsingError(HTTPError): + "Raised by assert_header_parsing, but we convert it to a log.warning statement." + def __init__(self, defects, unparsed_data): + message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data) + super(HeaderParsingError, self).__init__(message) + + +class UnrewindableBodyError(HTTPError): + "urllib3 encountered an error when trying to rewind a body" + pass diff --git a/env/lib/python3.6/site-packages/urllib3/fields.py b/env/lib/python3.6/site-packages/urllib3/fields.py new file mode 100755 index 0000000..37fe64a --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/fields.py @@ -0,0 +1,178 @@ +from __future__ import absolute_import +import email.utils +import mimetypes + +from .packages import six + + +def guess_content_type(filename, default='application/octet-stream'): + """ + Guess the "Content-Type" of a file. + + :param filename: + The filename to guess the "Content-Type" of using :mod:`mimetypes`. + :param default: + If no "Content-Type" can be guessed, default to `default`. + """ + if filename: + return mimetypes.guess_type(filename)[0] or default + return default + + +def format_header_param(name, value): + """ + Helper function to format and quote a single header parameter. + + Particularly useful for header parameters which might contain + non-ASCII values, like file names. This follows RFC 2231, as + suggested by RFC 2388 Section 4.4. + + :param name: + The name of the parameter, a string expected to be ASCII only. + :param value: + The value of the parameter, provided as a unicode string. + """ + if not any(ch in value for ch in '"\\\r\n'): + result = '%s="%s"' % (name, value) + try: + result.encode('ascii') + except (UnicodeEncodeError, UnicodeDecodeError): + pass + else: + return result + if not six.PY3 and isinstance(value, six.text_type): # Python 2: + value = value.encode('utf-8') + value = email.utils.encode_rfc2231(value, 'utf-8') + value = '%s*=%s' % (name, value) + return value + + +class RequestField(object): + """ + A data container for request body parameters. + + :param name: + The name of this request field. + :param data: + The data/value body. + :param filename: + An optional filename of the request field. + :param headers: + An optional dict-like object of headers to initially use for the field. + """ + def __init__(self, name, data, filename=None, headers=None): + self._name = name + self._filename = filename + self.data = data + self.headers = {} + if headers: + self.headers = dict(headers) + + @classmethod + def from_tuples(cls, fieldname, value): + """ + A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. + + Supports constructing :class:`~urllib3.fields.RequestField` from + parameter of key/value strings AND key/filetuple. A filetuple is a + (filename, data, MIME type) tuple where the MIME type is optional. + For example:: + + 'foo': 'bar', + 'fakefile': ('foofile.txt', 'contents of foofile'), + 'realfile': ('barfile.txt', open('realfile').read()), + 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), + 'nonamefile': 'contents of nonamefile field', + + Field names and filenames must be unicode. + """ + if isinstance(value, tuple): + if len(value) == 3: + filename, data, content_type = value + else: + filename, data = value + content_type = guess_content_type(filename) + else: + filename = None + content_type = None + data = value + + request_param = cls(fieldname, data, filename=filename) + request_param.make_multipart(content_type=content_type) + + return request_param + + def _render_part(self, name, value): + """ + Overridable helper function to format a single header parameter. + + :param name: + The name of the parameter, a string expected to be ASCII only. + :param value: + The value of the parameter, provided as a unicode string. + """ + return format_header_param(name, value) + + def _render_parts(self, header_parts): + """ + Helper function to format and quote a single header. + + Useful for single headers that are composed of multiple items. E.g., + 'Content-Disposition' fields. + + :param header_parts: + A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format + as `k1="v1"; k2="v2"; ...`. + """ + parts = [] + iterable = header_parts + if isinstance(header_parts, dict): + iterable = header_parts.items() + + for name, value in iterable: + if value is not None: + parts.append(self._render_part(name, value)) + + return '; '.join(parts) + + def render_headers(self): + """ + Renders the headers for this request field. + """ + lines = [] + + sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location'] + for sort_key in sort_keys: + if self.headers.get(sort_key, False): + lines.append('%s: %s' % (sort_key, self.headers[sort_key])) + + for header_name, header_value in self.headers.items(): + if header_name not in sort_keys: + if header_value: + lines.append('%s: %s' % (header_name, header_value)) + + lines.append('\r\n') + return '\r\n'.join(lines) + + def make_multipart(self, content_disposition=None, content_type=None, + content_location=None): + """ + Makes this request field into a multipart request field. + + This method overrides "Content-Disposition", "Content-Type" and + "Content-Location" headers to the request parameter. + + :param content_type: + The 'Content-Type' of the request body. + :param content_location: + The 'Content-Location' of the request body. + + """ + self.headers['Content-Disposition'] = content_disposition or 'form-data' + self.headers['Content-Disposition'] += '; '.join([ + '', self._render_parts( + (('name', self._name), ('filename', self._filename)) + ) + ]) + self.headers['Content-Type'] = content_type + self.headers['Content-Location'] = content_location diff --git a/env/lib/python3.6/site-packages/urllib3/filepost.py b/env/lib/python3.6/site-packages/urllib3/filepost.py new file mode 100755 index 0000000..78f1e19 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/filepost.py @@ -0,0 +1,98 @@ +from __future__ import absolute_import +import binascii +import codecs +import os + +from io import BytesIO + +from .packages import six +from .packages.six import b +from .fields import RequestField + +writer = codecs.lookup('utf-8')[3] + + +def choose_boundary(): + """ + Our embarrassingly-simple replacement for mimetools.choose_boundary. + """ + boundary = binascii.hexlify(os.urandom(16)) + if six.PY3: + boundary = boundary.decode('ascii') + return boundary + + +def iter_field_objects(fields): + """ + Iterate over fields. + + Supports list of (k, v) tuples and dicts, and lists of + :class:`~urllib3.fields.RequestField`. + + """ + if isinstance(fields, dict): + i = six.iteritems(fields) + else: + i = iter(fields) + + for field in i: + if isinstance(field, RequestField): + yield field + else: + yield RequestField.from_tuples(*field) + + +def iter_fields(fields): + """ + .. deprecated:: 1.6 + + Iterate over fields. + + The addition of :class:`~urllib3.fields.RequestField` makes this function + obsolete. Instead, use :func:`iter_field_objects`, which returns + :class:`~urllib3.fields.RequestField` objects. + + Supports list of (k, v) tuples and dicts. + """ + if isinstance(fields, dict): + return ((k, v) for k, v in six.iteritems(fields)) + + return ((k, v) for k, v in fields) + + +def encode_multipart_formdata(fields, boundary=None): + """ + Encode a dictionary of ``fields`` using the multipart/form-data MIME format. + + :param fields: + Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). + + :param boundary: + If not specified, then a random boundary will be generated using + :func:`urllib3.filepost.choose_boundary`. + """ + body = BytesIO() + if boundary is None: + boundary = choose_boundary() + + for field in iter_field_objects(fields): + body.write(b('--%s\r\n' % (boundary))) + + writer(body).write(field.render_headers()) + data = field.data + + if isinstance(data, int): + data = str(data) # Backwards compatibility + + if isinstance(data, six.text_type): + writer(body).write(data) + else: + body.write(data) + + body.write(b'\r\n') + + body.write(b('--%s--\r\n' % (boundary))) + + content_type = str('multipart/form-data; boundary=%s' % boundary) + + return body.getvalue(), content_type diff --git a/env/lib/python3.6/site-packages/urllib3/packages/backports/makefile.py b/env/lib/python3.6/site-packages/urllib3/packages/backports/makefile.py new file mode 100755 index 0000000..75b80dc --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/packages/backports/makefile.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +""" +backports.makefile +~~~~~~~~~~~~~~~~~~ + +Backports the Python 3 ``socket.makefile`` method for use with anything that +wants to create a "fake" socket object. +""" +import io + +from socket import SocketIO + + +def backport_makefile(self, mode="r", buffering=None, encoding=None, + errors=None, newline=None): + """ + Backport of ``socket.makefile`` from Python 3.5. + """ + if not set(mode) <= set(["r", "w", "b"]): + raise ValueError( + "invalid mode %r (only r, w, b allowed)" % (mode,) + ) + writing = "w" in mode + reading = "r" in mode or not writing + assert reading or writing + binary = "b" in mode + rawmode = "" + if reading: + rawmode += "r" + if writing: + rawmode += "w" + raw = SocketIO(self, rawmode) + self._makefile_refs += 1 + if buffering is None: + buffering = -1 + if buffering < 0: + buffering = io.DEFAULT_BUFFER_SIZE + if buffering == 0: + if not binary: + raise ValueError("unbuffered streams must be binary") + return raw + if reading and writing: + buffer = io.BufferedRWPair(raw, raw, buffering) + elif reading: + buffer = io.BufferedReader(raw, buffering) + else: + assert writing + buffer = io.BufferedWriter(raw, buffering) + if binary: + return buffer + text = io.TextIOWrapper(buffer, encoding, errors, newline) + text.mode = mode + return text diff --git a/env/lib/python3.6/site-packages/urllib3/packages/ordered_dict.py b/env/lib/python3.6/site-packages/urllib3/packages/ordered_dict.py new file mode 100755 index 0000000..4479363 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/packages/ordered_dict.py @@ -0,0 +1,259 @@ +# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. +# Passes Python2.7's test suite and incorporates all the latest updates. +# Copyright 2009 Raymond Hettinger, released under the MIT License. +# http://code.activestate.com/recipes/576693/ +try: + from thread import get_ident as _get_ident +except ImportError: + from dummy_thread import get_ident as _get_ident + +try: + from _abcoll import KeysView, ValuesView, ItemsView +except ImportError: + pass + + +class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as for regular dictionaries. + + # The internal self.__map dictionary maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # Each link is stored as a list of length three: [PREV, NEXT, KEY]. + + def __init__(self, *args, **kwds): + '''Initialize an ordered dictionary. Signature is the same as for + regular dictionaries, but keyword arguments are not recommended + because their insertion order is arbitrary. + + ''' + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__root = root = [] # sentinel node + root[:] = [root, root, None] + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, dict_setitem=dict.__setitem__): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link which goes at the end of the linked + # list, and the inherited dictionary is updated with the new key/value pair. + if key not in self: + root = self.__root + last = root[0] + last[1] = root[0] = self.__map[key] = [last, root, key] + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which is + # then removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link_prev, link_next, key = self.__map.pop(key) + link_prev[1] = link_next + link_next[0] = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + root = self.__root + curr = root[1] + while curr is not root: + yield curr[2] + curr = curr[1] + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + root = self.__root + curr = root[0] + while curr is not root: + yield curr[2] + curr = curr[0] + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + try: + for node in self.__map.itervalues(): + del node[:] + root = self.__root + root[:] = [root, root, None] + self.__map.clear() + except AttributeError: + pass + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root[0] + link_prev = link[0] + link_prev[1] = root + root[0] = link_prev + else: + link = root[1] + link_next = link[1] + root[1] = link_next + link_next[0] = root + key = link[2] + del self.__map[key] + value = dict.pop(self, key) + return key, value + + # -- the following methods do not depend on the internal structure -- + + def keys(self): + 'od.keys() -> list of keys in od' + return list(self) + + def values(self): + 'od.values() -> list of values in od' + return [self[key] for key in self] + + def items(self): + 'od.items() -> list of (key, value) pairs in od' + return [(key, self[key]) for key in self] + + def iterkeys(self): + 'od.iterkeys() -> an iterator over the keys in od' + return iter(self) + + def itervalues(self): + 'od.itervalues -> an iterator over the values in od' + for k in self: + yield self[k] + + def iteritems(self): + 'od.iteritems -> an iterator over the (key, value) items in od' + for k in self: + yield (k, self[k]) + + def update(*args, **kwds): + '''od.update(E, **F) -> None. Update od from dict/iterable E and F. + + If E is a dict instance, does: for k in E: od[k] = E[k] + If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] + Or if E is an iterable of items, does: for k, v in E: od[k] = v + In either case, this is followed by: for k, v in F.items(): od[k] = v + + ''' + if len(args) > 2: + raise TypeError('update() takes at most 2 positional ' + 'arguments (%d given)' % (len(args),)) + elif not args: + raise TypeError('update() takes at least 1 argument (0 given)') + self = args[0] + # Make progressively weaker assumptions about "other" + other = () + if len(args) == 2: + other = args[1] + if isinstance(other, dict): + for key in other: + self[key] = other[key] + elif hasattr(other, 'keys'): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + __update = update # let subclasses override update without breaking __init__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + def __repr__(self, _repr_running={}): + 'od.__repr__() <==> repr(od)' + call_key = id(self), _get_ident() + if call_key in _repr_running: + return '...' + _repr_running[call_key] = 1 + try: + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + finally: + del _repr_running[call_key] + + def __reduce__(self): + 'Return state information for pickling' + items = [[k, self[k]] for k in self] + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S + and values equal to v (which defaults to None). + + ''' + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return len(self)==len(other) and self.items() == other.items() + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other + + # -- the following methods are only used in Python 2.7 -- + + def viewkeys(self): + "od.viewkeys() -> a set-like object providing a view on od's keys" + return KeysView(self) + + def viewvalues(self): + "od.viewvalues() -> an object providing a view on od's values" + return ValuesView(self) + + def viewitems(self): + "od.viewitems() -> a set-like object providing a view on od's items" + return ItemsView(self) diff --git a/env/lib/python3.6/site-packages/urllib3/packages/six.py b/env/lib/python3.6/site-packages/urllib3/packages/six.py new file mode 100755 index 0000000..190c023 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/packages/six.py @@ -0,0 +1,868 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +# Copyright (c) 2010-2015 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.10.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + if from_value is None: + raise value + raise value from from_value +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + raise value from from_value +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/env/lib/python3.6/site-packages/urllib3/packages/ssl_match_hostname/_implementation.py b/env/lib/python3.6/site-packages/urllib3/packages/ssl_match_hostname/_implementation.py new file mode 100755 index 0000000..1fd42f3 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/packages/ssl_match_hostname/_implementation.py @@ -0,0 +1,157 @@ +"""The match_hostname() function from Python 3.3.3, essential when using SSL.""" + +# Note: This file is under the PSF license as the code comes from the python +# stdlib. http://docs.python.org/3/license.html + +import re +import sys + +# ipaddress has been backported to 2.6+ in pypi. If it is installed on the +# system, use it to handle IPAddress ServerAltnames (this was added in +# python-3.5) otherwise only do DNS matching. This allows +# backports.ssl_match_hostname to continue to be used all the way back to +# python-2.4. +try: + import ipaddress +except ImportError: + ipaddress = None + +__version__ = '3.5.0.1' + + +class CertificateError(ValueError): + pass + + +def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + # Ported from python3-syntax: + # leftmost, *remainder = dn.split(r'.') + parts = dn.split(r'.') + leftmost = parts[0] + remainder = parts[1:] + + wildcards = leftmost.count('*') + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn)) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + return pat.match(hostname) + + +def _to_unicode(obj): + if isinstance(obj, str) and sys.version_info < (3,): + obj = unicode(obj, encoding='ascii', errors='strict') + return obj + +def _ipaddress_match(ipname, host_ip): + """Exact matching of IP addresses. + + RFC 6125 explicitly doesn't define an algorithm for this + (section 1.7.2 - "Out of Scope"). + """ + # OpenSSL may add a trailing newline to a subjectAltName's IP address + # Divergence from upstream: ipaddress can't handle byte str + ip = ipaddress.ip_address(_to_unicode(ipname).rstrip()) + return ip == host_ip + + +def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED") + try: + # Divergence from upstream: ipaddress can't handle byte str + host_ip = ipaddress.ip_address(_to_unicode(hostname)) + except ValueError: + # Not an IP address (common case) + host_ip = None + except UnicodeError: + # Divergence from upstream: Have to deal with ipaddress not taking + # byte strings. addresses should be all ascii, so we consider it not + # an ipaddress in this case + host_ip = None + except AttributeError: + # Divergence from upstream: Make ipaddress library optional + if ipaddress is None: + host_ip = None + else: + raise + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if host_ip is None and _dnsname_match(value, hostname): + return + dnsnames.append(value) + elif key == 'IP Address': + if host_ip is not None and _ipaddress_match(value, host_ip): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" + % (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" + % (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") diff --git a/env/lib/python3.6/site-packages/urllib3/poolmanager.py b/env/lib/python3.6/site-packages/urllib3/poolmanager.py new file mode 100755 index 0000000..506a3c9 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/poolmanager.py @@ -0,0 +1,449 @@ +from __future__ import absolute_import +import collections +import functools +import logging + +from ._collections import RecentlyUsedContainer +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool +from .connectionpool import port_by_scheme +from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown +from .packages.six.moves.urllib.parse import urljoin +from .request import RequestMethods +from .util.url import parse_url +from .util.retry import Retry + + +__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url'] + + +log = logging.getLogger(__name__) + +SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs', + 'ssl_version', 'ca_cert_dir', 'ssl_context') + +# All known keyword arguments that could be provided to the pool manager, its +# pools, or the underlying connections. This is used to construct a pool key. +_key_fields = ( + 'key_scheme', # str + 'key_host', # str + 'key_port', # int + 'key_timeout', # int or float or Timeout + 'key_retries', # int or Retry + 'key_strict', # bool + 'key_block', # bool + 'key_source_address', # str + 'key_key_file', # str + 'key_cert_file', # str + 'key_cert_reqs', # str + 'key_ca_certs', # str + 'key_ssl_version', # str + 'key_ca_cert_dir', # str + 'key_ssl_context', # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext + 'key_maxsize', # int + 'key_headers', # dict + 'key__proxy', # parsed proxy url + 'key__proxy_headers', # dict + 'key_socket_options', # list of (level (int), optname (int), value (int or str)) tuples + 'key__socks_options', # dict + 'key_assert_hostname', # bool or string + 'key_assert_fingerprint', # str +) + +#: The namedtuple class used to construct keys for the connection pool. +#: All custom key schemes should include the fields in this key at a minimum. +PoolKey = collections.namedtuple('PoolKey', _key_fields) + + +def _default_key_normalizer(key_class, request_context): + """ + Create a pool key out of a request context dictionary. + + According to RFC 3986, both the scheme and host are case-insensitive. + Therefore, this function normalizes both before constructing the pool + key for an HTTPS request. If you wish to change this behaviour, provide + alternate callables to ``key_fn_by_scheme``. + + :param key_class: + The class to use when constructing the key. This should be a namedtuple + with the ``scheme`` and ``host`` keys at a minimum. + :type key_class: namedtuple + :param request_context: + A dictionary-like object that contain the context for a request. + :type request_context: dict + + :return: A namedtuple that can be used as a connection pool key. + :rtype: PoolKey + """ + # Since we mutate the dictionary, make a copy first + context = request_context.copy() + context['scheme'] = context['scheme'].lower() + context['host'] = context['host'].lower() + + # These are both dictionaries and need to be transformed into frozensets + for key in ('headers', '_proxy_headers', '_socks_options'): + if key in context and context[key] is not None: + context[key] = frozenset(context[key].items()) + + # The socket_options key may be a list and needs to be transformed into a + # tuple. + socket_opts = context.get('socket_options') + if socket_opts is not None: + context['socket_options'] = tuple(socket_opts) + + # Map the kwargs to the names in the namedtuple - this is necessary since + # namedtuples can't have fields starting with '_'. + for key in list(context.keys()): + context['key_' + key] = context.pop(key) + + # Default to ``None`` for keys missing from the context + for field in key_class._fields: + if field not in context: + context[field] = None + + return key_class(**context) + + +#: A dictionary that maps a scheme to a callable that creates a pool key. +#: This can be used to alter the way pool keys are constructed, if desired. +#: Each PoolManager makes a copy of this dictionary so they can be configured +#: globally here, or individually on the instance. +key_fn_by_scheme = { + 'http': functools.partial(_default_key_normalizer, PoolKey), + 'https': functools.partial(_default_key_normalizer, PoolKey), +} + +pool_classes_by_scheme = { + 'http': HTTPConnectionPool, + 'https': HTTPSConnectionPool, +} + + +class PoolManager(RequestMethods): + """ + Allows for arbitrary requests while transparently keeping track of + necessary connection pools for you. + + :param num_pools: + Number of connection pools to cache before discarding the least + recently used pool. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + + :param \\**connection_pool_kw: + Additional parameters are used to create fresh + :class:`urllib3.connectionpool.ConnectionPool` instances. + + Example:: + + >>> manager = PoolManager(num_pools=2) + >>> r = manager.request('GET', 'http://google.com/') + >>> r = manager.request('GET', 'http://google.com/mail') + >>> r = manager.request('GET', 'http://yahoo.com/') + >>> len(manager.pools) + 2 + + """ + + proxy = None + + def __init__(self, num_pools=10, headers=None, **connection_pool_kw): + RequestMethods.__init__(self, headers) + self.connection_pool_kw = connection_pool_kw + self.pools = RecentlyUsedContainer(num_pools, + dispose_func=lambda p: p.close()) + + # Locally set the pool classes and keys so other PoolManagers can + # override them. + self.pool_classes_by_scheme = pool_classes_by_scheme + self.key_fn_by_scheme = key_fn_by_scheme.copy() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.clear() + # Return False to re-raise any potential exceptions + return False + + def _new_pool(self, scheme, host, port, request_context=None): + """ + Create a new :class:`ConnectionPool` based on host, port, scheme, and + any additional pool keyword arguments. + + If ``request_context`` is provided, it is provided as keyword arguments + to the pool class used. This method is used to actually create the + connection pools handed out by :meth:`connection_from_url` and + companion methods. It is intended to be overridden for customization. + """ + pool_cls = self.pool_classes_by_scheme[scheme] + if request_context is None: + request_context = self.connection_pool_kw.copy() + + # Although the context has everything necessary to create the pool, + # this function has historically only used the scheme, host, and port + # in the positional args. When an API change is acceptable these can + # be removed. + for key in ('scheme', 'host', 'port'): + request_context.pop(key, None) + + if scheme == 'http': + for kw in SSL_KEYWORDS: + request_context.pop(kw, None) + + return pool_cls(host, port, **request_context) + + def clear(self): + """ + Empty our store of pools and direct them all to close. + + This will not affect in-flight connections, but they will not be + re-used after completion. + """ + self.pools.clear() + + def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): + """ + Get a :class:`ConnectionPool` based on the host, port, and scheme. + + If ``port`` isn't given, it will be derived from the ``scheme`` using + ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is + provided, it is merged with the instance's ``connection_pool_kw`` + variable and used to create the new connection pool, if one is + needed. + """ + + if not host: + raise LocationValueError("No host specified.") + + request_context = self._merge_pool_kwargs(pool_kwargs) + request_context['scheme'] = scheme or 'http' + if not port: + port = port_by_scheme.get(request_context['scheme'].lower(), 80) + request_context['port'] = port + request_context['host'] = host + + return self.connection_from_context(request_context) + + def connection_from_context(self, request_context): + """ + Get a :class:`ConnectionPool` based on the request context. + + ``request_context`` must at least contain the ``scheme`` key and its + value must be a key in ``key_fn_by_scheme`` instance variable. + """ + scheme = request_context['scheme'].lower() + pool_key_constructor = self.key_fn_by_scheme[scheme] + pool_key = pool_key_constructor(request_context) + + return self.connection_from_pool_key(pool_key, request_context=request_context) + + def connection_from_pool_key(self, pool_key, request_context=None): + """ + Get a :class:`ConnectionPool` based on the provided pool key. + + ``pool_key`` should be a namedtuple that only contains immutable + objects. At a minimum it must have the ``scheme``, ``host``, and + ``port`` fields. + """ + with self.pools.lock: + # If the scheme, host, or port doesn't match existing open + # connections, open a new ConnectionPool. + pool = self.pools.get(pool_key) + if pool: + return pool + + # Make a fresh ConnectionPool of the desired type + scheme = request_context['scheme'] + host = request_context['host'] + port = request_context['port'] + pool = self._new_pool(scheme, host, port, request_context=request_context) + self.pools[pool_key] = pool + + return pool + + def connection_from_url(self, url, pool_kwargs=None): + """ + Similar to :func:`urllib3.connectionpool.connection_from_url`. + + If ``pool_kwargs`` is not provided and a new pool needs to be + constructed, ``self.connection_pool_kw`` is used to initialize + the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs`` + is provided, it is used instead. Note that if a new pool does not + need to be created for the request, the provided ``pool_kwargs`` are + not used. + """ + u = parse_url(url) + return self.connection_from_host(u.host, port=u.port, scheme=u.scheme, + pool_kwargs=pool_kwargs) + + def _merge_pool_kwargs(self, override): + """ + Merge a dictionary of override values for self.connection_pool_kw. + + This does not modify self.connection_pool_kw and returns a new dict. + Any keys in the override dictionary with a value of ``None`` are + removed from the merged dictionary. + """ + base_pool_kwargs = self.connection_pool_kw.copy() + if override: + for key, value in override.items(): + if value is None: + try: + del base_pool_kwargs[key] + except KeyError: + pass + else: + base_pool_kwargs[key] = value + return base_pool_kwargs + + def urlopen(self, method, url, redirect=True, **kw): + """ + Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen` + with custom cross-host redirect logic and only sends the request-uri + portion of the ``url``. + + The given ``url`` parameter must be absolute, such that an appropriate + :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. + """ + u = parse_url(url) + conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) + + kw['assert_same_host'] = False + kw['redirect'] = False + + if 'headers' not in kw: + kw['headers'] = self.headers.copy() + + if self.proxy is not None and u.scheme == "http": + response = conn.urlopen(method, url, **kw) + else: + response = conn.urlopen(method, u.request_uri, **kw) + + redirect_location = redirect and response.get_redirect_location() + if not redirect_location: + return response + + # Support relative URLs for redirecting. + redirect_location = urljoin(url, redirect_location) + + # RFC 7231, Section 6.4.4 + if response.status == 303: + method = 'GET' + + retries = kw.get('retries') + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect) + + # Strip headers marked as unsafe to forward to the redirected location. + # Check remove_headers_on_redirect to avoid a potential network call within + # conn.is_same_host() which may use socket.gethostbyname() in the future. + if (retries.remove_headers_on_redirect + and not conn.is_same_host(redirect_location)): + for header in retries.remove_headers_on_redirect: + kw['headers'].pop(header, None) + + try: + retries = retries.increment(method, url, response=response, _pool=conn) + except MaxRetryError: + if retries.raise_on_redirect: + raise + return response + + kw['retries'] = retries + kw['redirect'] = redirect + + log.info("Redirecting %s -> %s", url, redirect_location) + return self.urlopen(method, redirect_location, **kw) + + +class ProxyManager(PoolManager): + """ + Behaves just like :class:`PoolManager`, but sends all requests through + the defined proxy, using the CONNECT method for HTTPS URLs. + + :param proxy_url: + The URL of the proxy to be used. + + :param proxy_headers: + A dictionary containing headers that will be sent to the proxy. In case + of HTTP they are being sent with each request, while in the + HTTPS/CONNECT case they are sent only once. Could be used for proxy + authentication. + + Example: + >>> proxy = urllib3.ProxyManager('http://localhost:3128/') + >>> r1 = proxy.request('GET', 'http://google.com/') + >>> r2 = proxy.request('GET', 'http://httpbin.org/') + >>> len(proxy.pools) + 1 + >>> r3 = proxy.request('GET', 'https://httpbin.org/') + >>> r4 = proxy.request('GET', 'https://twitter.com/') + >>> len(proxy.pools) + 3 + + """ + + def __init__(self, proxy_url, num_pools=10, headers=None, + proxy_headers=None, **connection_pool_kw): + + if isinstance(proxy_url, HTTPConnectionPool): + proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host, + proxy_url.port) + proxy = parse_url(proxy_url) + if not proxy.port: + port = port_by_scheme.get(proxy.scheme, 80) + proxy = proxy._replace(port=port) + + if proxy.scheme not in ("http", "https"): + raise ProxySchemeUnknown(proxy.scheme) + + self.proxy = proxy + self.proxy_headers = proxy_headers or {} + + connection_pool_kw['_proxy'] = self.proxy + connection_pool_kw['_proxy_headers'] = self.proxy_headers + + super(ProxyManager, self).__init__( + num_pools, headers, **connection_pool_kw) + + def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): + if scheme == "https": + return super(ProxyManager, self).connection_from_host( + host, port, scheme, pool_kwargs=pool_kwargs) + + return super(ProxyManager, self).connection_from_host( + self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs) + + def _set_proxy_headers(self, url, headers=None): + """ + Sets headers needed by proxies: specifically, the Accept and Host + headers. Only sets headers not provided by the user. + """ + headers_ = {'Accept': '*/*'} + + netloc = parse_url(url).netloc + if netloc: + headers_['Host'] = netloc + + if headers: + headers_.update(headers) + return headers_ + + def urlopen(self, method, url, redirect=True, **kw): + "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." + u = parse_url(url) + + if u.scheme == "http": + # For proxied HTTPS requests, httplib sets the necessary headers + # on the CONNECT to the proxy. For HTTP, we'll definitely + # need to set 'Host' at the very least. + headers = kw.get('headers', self.headers) + kw['headers'] = self._set_proxy_headers(url, headers) + + return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw) + + +def proxy_from_url(url, **kw): + return ProxyManager(proxy_url=url, **kw) diff --git a/env/lib/python3.6/site-packages/urllib3/request.py b/env/lib/python3.6/site-packages/urllib3/request.py new file mode 100755 index 0000000..1be3334 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/request.py @@ -0,0 +1,150 @@ +from __future__ import absolute_import + +from .filepost import encode_multipart_formdata +from .packages.six.moves.urllib.parse import urlencode + + +__all__ = ['RequestMethods'] + + +class RequestMethods(object): + """ + Convenience mixin for classes who implement a :meth:`urlopen` method, such + as :class:`~urllib3.connectionpool.HTTPConnectionPool` and + :class:`~urllib3.poolmanager.PoolManager`. + + Provides behavior for making common types of HTTP request methods and + decides which type of request field encoding to use. + + Specifically, + + :meth:`.request_encode_url` is for sending requests whose fields are + encoded in the URL (such as GET, HEAD, DELETE). + + :meth:`.request_encode_body` is for sending requests whose fields are + encoded in the *body* of the request using multipart or www-form-urlencoded + (such as for POST, PUT, PATCH). + + :meth:`.request` is for making any kind of request, it will look up the + appropriate encoding format and use one of the above two methods to make + the request. + + Initializer parameters: + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + """ + + _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS']) + + def __init__(self, headers=None): + self.headers = headers or {} + + def urlopen(self, method, url, body=None, headers=None, + encode_multipart=True, multipart_boundary=None, + **kw): # Abstract + raise NotImplementedError("Classes extending RequestMethods must implement " + "their own ``urlopen`` method.") + + def request(self, method, url, fields=None, headers=None, **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the appropriate encoding of + ``fields`` based on the ``method`` used. + + This is a convenience method that requires the least amount of manual + effort. It can be used in most situations, while still having the + option to drop down to more specific methods when necessary, such as + :meth:`request_encode_url`, :meth:`request_encode_body`, + or even the lowest level :meth:`urlopen`. + """ + method = method.upper() + + urlopen_kw['request_url'] = url + + if method in self._encode_url_methods: + return self.request_encode_url(method, url, fields=fields, + headers=headers, + **urlopen_kw) + else: + return self.request_encode_body(method, url, fields=fields, + headers=headers, + **urlopen_kw) + + def request_encode_url(self, method, url, fields=None, headers=None, + **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the url. This is useful for request methods like GET, HEAD, DELETE, etc. + """ + if headers is None: + headers = self.headers + + extra_kw = {'headers': headers} + extra_kw.update(urlopen_kw) + + if fields: + url += '?' + urlencode(fields) + + return self.urlopen(method, url, **extra_kw) + + def request_encode_body(self, method, url, fields=None, headers=None, + encode_multipart=True, multipart_boundary=None, + **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the body. This is useful for request methods like POST, PUT, PATCH, etc. + + When ``encode_multipart=True`` (default), then + :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode + the payload with the appropriate content type. Otherwise + :meth:`urllib.urlencode` is used with the + 'application/x-www-form-urlencoded' content type. + + Multipart encoding must be used when posting files, and it's reasonably + safe to use it in other times too. However, it may break request + signing, such as with OAuth. + + Supports an optional ``fields`` parameter of key/value strings AND + key/filetuple. A filetuple is a (filename, data, MIME type) tuple where + the MIME type is optional. For example:: + + fields = { + 'foo': 'bar', + 'fakefile': ('foofile.txt', 'contents of foofile'), + 'realfile': ('barfile.txt', open('realfile').read()), + 'typedfile': ('bazfile.bin', open('bazfile').read(), + 'image/jpeg'), + 'nonamefile': 'contents of nonamefile field', + } + + When uploading a file, providing a filename (the first parameter of the + tuple) is optional but recommended to best mimic behavior of browsers. + + Note that if ``headers`` are supplied, the 'Content-Type' header will + be overwritten because it depends on the dynamic random boundary string + which is used to compose the body of the request. The random boundary + string can be explicitly set with the ``multipart_boundary`` parameter. + """ + if headers is None: + headers = self.headers + + extra_kw = {'headers': {}} + + if fields: + if 'body' in urlopen_kw: + raise TypeError( + "request got values for both 'fields' and 'body', can only specify one.") + + if encode_multipart: + body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary) + else: + body, content_type = urlencode(fields), 'application/x-www-form-urlencoded' + + extra_kw['body'] = body + extra_kw['headers'] = {'Content-Type': content_type} + + extra_kw['headers'].update(headers) + extra_kw.update(urlopen_kw) + + return self.urlopen(method, url, **extra_kw) diff --git a/env/lib/python3.6/site-packages/urllib3/response.py b/env/lib/python3.6/site-packages/urllib3/response.py new file mode 100755 index 0000000..9873cb9 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/response.py @@ -0,0 +1,676 @@ +from __future__ import absolute_import +from contextlib import contextmanager +import zlib +import io +import logging +from socket import timeout as SocketTimeout +from socket import error as SocketError + +from ._collections import HTTPHeaderDict +from .exceptions import ( + BodyNotHttplibCompatible, ProtocolError, DecodeError, ReadTimeoutError, + ResponseNotChunked, IncompleteRead, InvalidHeader +) +from .packages.six import string_types as basestring, binary_type, PY3 +from .packages.six.moves import http_client as httplib +from .connection import HTTPException, BaseSSLError +from .util.response import is_fp_closed, is_response_to_head + +log = logging.getLogger(__name__) + + +class DeflateDecoder(object): + + def __init__(self): + self._first_try = True + self._data = binary_type() + self._obj = zlib.decompressobj() + + def __getattr__(self, name): + return getattr(self._obj, name) + + def decompress(self, data): + if not data: + return data + + if not self._first_try: + return self._obj.decompress(data) + + self._data += data + try: + decompressed = self._obj.decompress(data) + if decompressed: + self._first_try = False + self._data = None + return decompressed + except zlib.error: + self._first_try = False + self._obj = zlib.decompressobj(-zlib.MAX_WBITS) + try: + return self.decompress(self._data) + finally: + self._data = None + + +class GzipDecoderState(object): + + FIRST_MEMBER = 0 + OTHER_MEMBERS = 1 + SWALLOW_DATA = 2 + + +class GzipDecoder(object): + + def __init__(self): + self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) + self._state = GzipDecoderState.FIRST_MEMBER + + def __getattr__(self, name): + return getattr(self._obj, name) + + def decompress(self, data): + ret = binary_type() + if self._state == GzipDecoderState.SWALLOW_DATA or not data: + return ret + while True: + try: + ret += self._obj.decompress(data) + except zlib.error: + previous_state = self._state + # Ignore data after the first error + self._state = GzipDecoderState.SWALLOW_DATA + if previous_state == GzipDecoderState.OTHER_MEMBERS: + # Allow trailing garbage acceptable in other gzip clients + return ret + raise + data = self._obj.unused_data + if not data: + return ret + self._state = GzipDecoderState.OTHER_MEMBERS + self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) + + +def _get_decoder(mode): + if mode == 'gzip': + return GzipDecoder() + + return DeflateDecoder() + + +class HTTPResponse(io.IOBase): + """ + HTTP Response container. + + Backwards-compatible to httplib's HTTPResponse but the response ``body`` is + loaded and decoded on-demand when the ``data`` property is accessed. This + class is also compatible with the Python standard library's :mod:`io` + module, and can hence be treated as a readable object in the context of that + framework. + + Extra parameters for behaviour not present in httplib.HTTPResponse: + + :param preload_content: + If True, the response's body will be preloaded during construction. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param original_response: + When this HTTPResponse wrapper is generated from an httplib.HTTPResponse + object, it's convenient to include the original for debug purposes. It's + otherwise unused. + + :param retries: + The retries contains the last :class:`~urllib3.util.retry.Retry` that + was used during the request. + + :param enforce_content_length: + Enforce content length checking. Body returned by server must match + value of Content-Length header, if present. Otherwise, raise error. + """ + + CONTENT_DECODERS = ['gzip', 'deflate'] + REDIRECT_STATUSES = [301, 302, 303, 307, 308] + + def __init__(self, body='', headers=None, status=0, version=0, reason=None, + strict=0, preload_content=True, decode_content=True, + original_response=None, pool=None, connection=None, msg=None, + retries=None, enforce_content_length=False, + request_method=None, request_url=None): + + if isinstance(headers, HTTPHeaderDict): + self.headers = headers + else: + self.headers = HTTPHeaderDict(headers) + self.status = status + self.version = version + self.reason = reason + self.strict = strict + self.decode_content = decode_content + self.retries = retries + self.enforce_content_length = enforce_content_length + + self._decoder = None + self._body = None + self._fp = None + self._original_response = original_response + self._fp_bytes_read = 0 + self.msg = msg + self._request_url = request_url + + if body and isinstance(body, (basestring, binary_type)): + self._body = body + + self._pool = pool + self._connection = connection + + if hasattr(body, 'read'): + self._fp = body + + # Are we using the chunked-style of transfer encoding? + self.chunked = False + self.chunk_left = None + tr_enc = self.headers.get('transfer-encoding', '').lower() + # Don't incur the penalty of creating a list and then discarding it + encodings = (enc.strip() for enc in tr_enc.split(",")) + if "chunked" in encodings: + self.chunked = True + + # Determine length of response + self.length_remaining = self._init_length(request_method) + + # If requested, preload the body. + if preload_content and not self._body: + self._body = self.read(decode_content=decode_content) + + def get_redirect_location(self): + """ + Should we redirect and where to? + + :returns: Truthy redirect location string if we got a redirect status + code and valid location. ``None`` if redirect status and no + location. ``False`` if not a redirect status code. + """ + if self.status in self.REDIRECT_STATUSES: + return self.headers.get('location') + + return False + + def release_conn(self): + if not self._pool or not self._connection: + return + + self._pool._put_conn(self._connection) + self._connection = None + + @property + def data(self): + # For backwords-compat with earlier urllib3 0.4 and earlier. + if self._body: + return self._body + + if self._fp: + return self.read(cache_content=True) + + @property + def connection(self): + return self._connection + + def isclosed(self): + return is_fp_closed(self._fp) + + def tell(self): + """ + Obtain the number of bytes pulled over the wire so far. May differ from + the amount of content returned by :meth:``HTTPResponse.read`` if bytes + are encoded on the wire (e.g, compressed). + """ + return self._fp_bytes_read + + def _init_length(self, request_method): + """ + Set initial length value for Response content if available. + """ + length = self.headers.get('content-length') + + if length is not None: + if self.chunked: + # This Response will fail with an IncompleteRead if it can't be + # received as chunked. This method falls back to attempt reading + # the response before raising an exception. + log.warning("Received response with both Content-Length and " + "Transfer-Encoding set. This is expressly forbidden " + "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " + "attempting to process response as Transfer-Encoding: " + "chunked.") + return None + + try: + # RFC 7230 section 3.3.2 specifies multiple content lengths can + # be sent in a single Content-Length header + # (e.g. Content-Length: 42, 42). This line ensures the values + # are all valid ints and that as long as the `set` length is 1, + # all values are the same. Otherwise, the header is invalid. + lengths = set([int(val) for val in length.split(',')]) + if len(lengths) > 1: + raise InvalidHeader("Content-Length contained multiple " + "unmatching values (%s)" % length) + length = lengths.pop() + except ValueError: + length = None + else: + if length < 0: + length = None + + # Convert status to int for comparison + # In some cases, httplib returns a status of "_UNKNOWN" + try: + status = int(self.status) + except ValueError: + status = 0 + + # Check for responses that shouldn't include a body + if status in (204, 304) or 100 <= status < 200 or request_method == 'HEAD': + length = 0 + + return length + + def _init_decoder(self): + """ + Set-up the _decoder attribute if necessary. + """ + # Note: content-encoding value should be case-insensitive, per RFC 7230 + # Section 3.2 + content_encoding = self.headers.get('content-encoding', '').lower() + if self._decoder is None and content_encoding in self.CONTENT_DECODERS: + self._decoder = _get_decoder(content_encoding) + + def _decode(self, data, decode_content, flush_decoder): + """ + Decode the data passed in and potentially flush the decoder. + """ + try: + if decode_content and self._decoder: + data = self._decoder.decompress(data) + except (IOError, zlib.error) as e: + content_encoding = self.headers.get('content-encoding', '').lower() + raise DecodeError( + "Received response with content-encoding: %s, but " + "failed to decode it." % content_encoding, e) + + if flush_decoder and decode_content: + data += self._flush_decoder() + + return data + + def _flush_decoder(self): + """ + Flushes the decoder. Should only be called if the decoder is actually + being used. + """ + if self._decoder: + buf = self._decoder.decompress(b'') + return buf + self._decoder.flush() + + return b'' + + @contextmanager + def _error_catcher(self): + """ + Catch low-level python exceptions, instead re-raising urllib3 + variants, so that low-level exceptions are not leaked in the + high-level api. + + On exit, release the connection back to the pool. + """ + clean_exit = False + + try: + try: + yield + + except SocketTimeout: + # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but + # there is yet no clean way to get at it from this context. + raise ReadTimeoutError(self._pool, None, 'Read timed out.') + + except BaseSSLError as e: + # FIXME: Is there a better way to differentiate between SSLErrors? + if 'read operation timed out' not in str(e): # Defensive: + # This shouldn't happen but just in case we're missing an edge + # case, let's avoid swallowing SSL errors. + raise + + raise ReadTimeoutError(self._pool, None, 'Read timed out.') + + except (HTTPException, SocketError) as e: + # This includes IncompleteRead. + raise ProtocolError('Connection broken: %r' % e, e) + + # If no exception is thrown, we should avoid cleaning up + # unnecessarily. + clean_exit = True + finally: + # If we didn't terminate cleanly, we need to throw away our + # connection. + if not clean_exit: + # The response may not be closed but we're not going to use it + # anymore so close it now to ensure that the connection is + # released back to the pool. + if self._original_response: + self._original_response.close() + + # Closing the response may not actually be sufficient to close + # everything, so if we have a hold of the connection close that + # too. + if self._connection: + self._connection.close() + + # If we hold the original response but it's closed now, we should + # return the connection back to the pool. + if self._original_response and self._original_response.isclosed(): + self.release_conn() + + def read(self, amt=None, decode_content=None, cache_content=False): + """ + Similar to :meth:`httplib.HTTPResponse.read`, but with two additional + parameters: ``decode_content`` and ``cache_content``. + + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param cache_content: + If True, will save the returned data such that the same result is + returned despite of the state of the underlying file object. This + is useful if you want the ``.data`` property to continue working + after having ``.read()`` the file object. (Overridden if ``amt`` is + set.) + """ + self._init_decoder() + if decode_content is None: + decode_content = self.decode_content + + if self._fp is None: + return + + flush_decoder = False + data = None + + with self._error_catcher(): + if amt is None: + # cStringIO doesn't like amt=None + data = self._fp.read() + flush_decoder = True + else: + cache_content = False + data = self._fp.read(amt) + if amt != 0 and not data: # Platform-specific: Buggy versions of Python. + # Close the connection when no data is returned + # + # This is redundant to what httplib/http.client _should_ + # already do. However, versions of python released before + # December 15, 2012 (http://bugs.python.org/issue16298) do + # not properly close the connection in all cases. There is + # no harm in redundantly calling close. + self._fp.close() + flush_decoder = True + if self.enforce_content_length and self.length_remaining not in (0, None): + # This is an edge case that httplib failed to cover due + # to concerns of backward compatibility. We're + # addressing it here to make sure IncompleteRead is + # raised during streaming, so all calls with incorrect + # Content-Length are caught. + raise IncompleteRead(self._fp_bytes_read, self.length_remaining) + + if data: + self._fp_bytes_read += len(data) + if self.length_remaining is not None: + self.length_remaining -= len(data) + + data = self._decode(data, decode_content, flush_decoder) + + if cache_content: + self._body = data + + return data + + def stream(self, amt=2**16, decode_content=None): + """ + A generator wrapper for the read() method. A call will block until + ``amt`` bytes have been read from the connection or until the + connection is closed. + + :param amt: + How much of the content to read. The generator will return up to + much data per iteration, but may return less. This is particularly + likely when using compressed data. However, the empty string will + never be returned. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + if self.chunked and self.supports_chunked_reads(): + for line in self.read_chunked(amt, decode_content=decode_content): + yield line + else: + while not is_fp_closed(self._fp): + data = self.read(amt=amt, decode_content=decode_content) + + if data: + yield data + + @classmethod + def from_httplib(ResponseCls, r, **response_kw): + """ + Given an :class:`httplib.HTTPResponse` instance ``r``, return a + corresponding :class:`urllib3.response.HTTPResponse` object. + + Remaining parameters are passed to the HTTPResponse constructor, along + with ``original_response=r``. + """ + headers = r.msg + + if not isinstance(headers, HTTPHeaderDict): + if PY3: # Python 3 + headers = HTTPHeaderDict(headers.items()) + else: # Python 2 + headers = HTTPHeaderDict.from_httplib(headers) + + # HTTPResponse objects in Python 3 don't have a .strict attribute + strict = getattr(r, 'strict', 0) + resp = ResponseCls(body=r, + headers=headers, + status=r.status, + version=r.version, + reason=r.reason, + strict=strict, + original_response=r, + **response_kw) + return resp + + # Backwards-compatibility methods for httplib.HTTPResponse + def getheaders(self): + return self.headers + + def getheader(self, name, default=None): + return self.headers.get(name, default) + + # Backwards compatibility for http.cookiejar + def info(self): + return self.headers + + # Overrides from io.IOBase + def close(self): + if not self.closed: + self._fp.close() + + if self._connection: + self._connection.close() + + @property + def closed(self): + if self._fp is None: + return True + elif hasattr(self._fp, 'isclosed'): + return self._fp.isclosed() + elif hasattr(self._fp, 'closed'): + return self._fp.closed + else: + return True + + def fileno(self): + if self._fp is None: + raise IOError("HTTPResponse has no file to get a fileno from") + elif hasattr(self._fp, "fileno"): + return self._fp.fileno() + else: + raise IOError("The file-like object this HTTPResponse is wrapped " + "around has no file descriptor") + + def flush(self): + if self._fp is not None and hasattr(self._fp, 'flush'): + return self._fp.flush() + + def readable(self): + # This method is required for `io` module compatibility. + return True + + def readinto(self, b): + # This method is required for `io` module compatibility. + temp = self.read(len(b)) + if len(temp) == 0: + return 0 + else: + b[:len(temp)] = temp + return len(temp) + + def supports_chunked_reads(self): + """ + Checks if the underlying file-like object looks like a + httplib.HTTPResponse object. We do this by testing for the fp + attribute. If it is present we assume it returns raw chunks as + processed by read_chunked(). + """ + return hasattr(self._fp, 'fp') + + def _update_chunk_length(self): + # First, we'll figure out length of a chunk and then + # we'll try to read it from socket. + if self.chunk_left is not None: + return + line = self._fp.fp.readline() + line = line.split(b';', 1)[0] + try: + self.chunk_left = int(line, 16) + except ValueError: + # Invalid chunked protocol response, abort. + self.close() + raise httplib.IncompleteRead(line) + + def _handle_chunk(self, amt): + returned_chunk = None + if amt is None: + chunk = self._fp._safe_read(self.chunk_left) + returned_chunk = chunk + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + elif amt < self.chunk_left: + value = self._fp._safe_read(amt) + self.chunk_left = self.chunk_left - amt + returned_chunk = value + elif amt == self.chunk_left: + value = self._fp._safe_read(amt) + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + returned_chunk = value + else: # amt > self.chunk_left + returned_chunk = self._fp._safe_read(self.chunk_left) + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + return returned_chunk + + def read_chunked(self, amt=None, decode_content=None): + """ + Similar to :meth:`HTTPResponse.read`, but with an additional + parameter: ``decode_content``. + + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + self._init_decoder() + # FIXME: Rewrite this method and make it a class with a better structured logic. + if not self.chunked: + raise ResponseNotChunked( + "Response is not chunked. " + "Header 'transfer-encoding: chunked' is missing.") + if not self.supports_chunked_reads(): + raise BodyNotHttplibCompatible( + "Body should be httplib.HTTPResponse like. " + "It should have have an fp attribute which returns raw chunks.") + + with self._error_catcher(): + # Don't bother reading the body of a HEAD request. + if self._original_response and is_response_to_head(self._original_response): + self._original_response.close() + return + + # If a response is already read and closed + # then return immediately. + if self._fp.fp is None: + return + + while True: + self._update_chunk_length() + if self.chunk_left == 0: + break + chunk = self._handle_chunk(amt) + decoded = self._decode(chunk, decode_content=decode_content, + flush_decoder=False) + if decoded: + yield decoded + + if decode_content: + # On CPython and PyPy, we should never need to flush the + # decoder. However, on Jython we *might* need to, so + # lets defensively do it anyway. + decoded = self._flush_decoder() + if decoded: # Platform-specific: Jython. + yield decoded + + # Chunk content ends with \r\n: discard it. + while True: + line = self._fp.fp.readline() + if not line: + # Some sites may not end with '\r\n'. + break + if line == b'\r\n': + break + + # We read everything; close the "file". + if self._original_response: + self._original_response.close() + + def geturl(self): + """ + Returns the URL that was the source of this response. + If the request that generated this response redirected, this method + will return the final redirect location. + """ + if self.retries is not None and len(self.retries.history): + return self.retries.history[-1].redirect_location + else: + return self._request_url diff --git a/env/lib/python3.6/site-packages/urllib3/util/connection.py b/env/lib/python3.6/site-packages/urllib3/util/connection.py new file mode 100755 index 0000000..5cf488f --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/util/connection.py @@ -0,0 +1,126 @@ +from __future__ import absolute_import +import socket +from .wait import NoWayToWaitForSocketError, wait_for_read + + +def is_connection_dropped(conn): # Platform-specific + """ + Returns True if the connection is dropped and should be closed. + + :param conn: + :class:`httplib.HTTPConnection` object. + + Note: For platforms like AppEngine, this will always return ``False`` to + let the platform handle connection recycling transparently for us. + """ + sock = getattr(conn, 'sock', False) + if sock is False: # Platform-specific: AppEngine + return False + if sock is None: # Connection already closed (such as by httplib). + return True + try: + # Returns True if readable, which here means it's been dropped + return wait_for_read(sock, timeout=0.0) + except NoWayToWaitForSocketError: # Platform-specific: AppEngine + return False + + +# This function is copied from socket.py in the Python 2.7 standard +# library test suite. Added to its signature is only `socket_options`. +# One additional modification is that we avoid binding to IPv6 servers +# discovered in DNS if the system doesn't have IPv6 functionality. +def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + source_address=None, socket_options=None): + """Connect to *address* and return the socket object. + + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + An host of '' or port 0 tells the OS to use the default. + """ + + host, port = address + if host.startswith('['): + host = host.strip('[]') + err = None + + # Using the value from allowed_gai_family() in the context of getaddrinfo lets + # us select whether to work with IPv4 DNS records, IPv6 records, or both. + # The original create_connection function always returns all records. + family = allowed_gai_family() + + for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + sock = None + try: + sock = socket.socket(af, socktype, proto) + + # If provided, set socket level options before connecting. + _set_socket_options(sock, socket_options) + + if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if source_address: + sock.bind(source_address) + sock.connect(sa) + return sock + + except socket.error as e: + err = e + if sock is not None: + sock.close() + sock = None + + if err is not None: + raise err + + raise socket.error("getaddrinfo returns an empty list") + + +def _set_socket_options(sock, options): + if options is None: + return + + for opt in options: + sock.setsockopt(*opt) + + +def allowed_gai_family(): + """This function is designed to work in the context of + getaddrinfo, where family=socket.AF_UNSPEC is the default and + will perform a DNS search for both IPv6 and IPv4 records.""" + + family = socket.AF_INET + if HAS_IPV6: + family = socket.AF_UNSPEC + return family + + +def _has_ipv6(host): + """ Returns True if the system can bind an IPv6 address. """ + sock = None + has_ipv6 = False + + if socket.has_ipv6: + # has_ipv6 returns true if cPython was compiled with IPv6 support. + # It does not tell us if the system has IPv6 support enabled. To + # determine that we must bind to an IPv6 address. + # https://github.com/shazow/urllib3/pull/611 + # https://bugs.python.org/issue658327 + try: + sock = socket.socket(socket.AF_INET6) + sock.bind((host, 0)) + has_ipv6 = True + except Exception: + pass + + if sock: + sock.close() + return has_ipv6 + + +HAS_IPV6 = _has_ipv6('::1') diff --git a/env/lib/python3.6/site-packages/urllib3/util/queue.py b/env/lib/python3.6/site-packages/urllib3/util/queue.py new file mode 100755 index 0000000..d3d379a --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/util/queue.py @@ -0,0 +1,21 @@ +import collections +from ..packages import six +from ..packages.six.moves import queue + +if six.PY2: + # Queue is imported for side effects on MS Windows. See issue #229. + import Queue as _unused_module_Queue # noqa: F401 + + +class LifoQueue(queue.Queue): + def _init(self, _): + self.queue = collections.deque() + + def _qsize(self, len=len): + return len(self.queue) + + def _put(self, item): + self.queue.append(item) + + def _get(self): + return self.queue.pop() diff --git a/env/lib/python3.6/site-packages/urllib3/util/request.py b/env/lib/python3.6/site-packages/urllib3/util/request.py new file mode 100755 index 0000000..3ddfcd5 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/util/request.py @@ -0,0 +1,118 @@ +from __future__ import absolute_import +from base64 import b64encode + +from ..packages.six import b, integer_types +from ..exceptions import UnrewindableBodyError + +ACCEPT_ENCODING = 'gzip,deflate' +_FAILEDTELL = object() + + +def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, + basic_auth=None, proxy_basic_auth=None, disable_cache=None): + """ + Shortcuts for generating request headers. + + :param keep_alive: + If ``True``, adds 'connection: keep-alive' header. + + :param accept_encoding: + Can be a boolean, list, or string. + ``True`` translates to 'gzip,deflate'. + List will get joined by comma. + String will be used as provided. + + :param user_agent: + String representing the user-agent you want, such as + "python-urllib3/0.6" + + :param basic_auth: + Colon-separated username:password string for 'authorization: basic ...' + auth header. + + :param proxy_basic_auth: + Colon-separated username:password string for 'proxy-authorization: basic ...' + auth header. + + :param disable_cache: + If ``True``, adds 'cache-control: no-cache' header. + + Example:: + + >>> make_headers(keep_alive=True, user_agent="Batman/1.0") + {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} + >>> make_headers(accept_encoding=True) + {'accept-encoding': 'gzip,deflate'} + """ + headers = {} + if accept_encoding: + if isinstance(accept_encoding, str): + pass + elif isinstance(accept_encoding, list): + accept_encoding = ','.join(accept_encoding) + else: + accept_encoding = ACCEPT_ENCODING + headers['accept-encoding'] = accept_encoding + + if user_agent: + headers['user-agent'] = user_agent + + if keep_alive: + headers['connection'] = 'keep-alive' + + if basic_auth: + headers['authorization'] = 'Basic ' + \ + b64encode(b(basic_auth)).decode('utf-8') + + if proxy_basic_auth: + headers['proxy-authorization'] = 'Basic ' + \ + b64encode(b(proxy_basic_auth)).decode('utf-8') + + if disable_cache: + headers['cache-control'] = 'no-cache' + + return headers + + +def set_file_position(body, pos): + """ + If a position is provided, move file to that point. + Otherwise, we'll attempt to record a position for future use. + """ + if pos is not None: + rewind_body(body, pos) + elif getattr(body, 'tell', None) is not None: + try: + pos = body.tell() + except (IOError, OSError): + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body. + pos = _FAILEDTELL + + return pos + + +def rewind_body(body, body_pos): + """ + Attempt to rewind body to a certain position. + Primarily used for request redirects and retries. + + :param body: + File-like object that supports seek. + + :param int pos: + Position to seek to in file. + """ + body_seek = getattr(body, 'seek', None) + if body_seek is not None and isinstance(body_pos, integer_types): + try: + body_seek(body_pos) + except (IOError, OSError): + raise UnrewindableBodyError("An error occurred when rewinding request " + "body for redirect/retry.") + elif body_pos is _FAILEDTELL: + raise UnrewindableBodyError("Unable to record file position for rewinding " + "request body during a redirect/retry.") + else: + raise ValueError("body_pos must be of type integer, " + "instead it was %s." % type(body_pos)) diff --git a/env/lib/python3.6/site-packages/urllib3/util/response.py b/env/lib/python3.6/site-packages/urllib3/util/response.py new file mode 100755 index 0000000..67cf730 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/util/response.py @@ -0,0 +1,81 @@ +from __future__ import absolute_import +from ..packages.six.moves import http_client as httplib + +from ..exceptions import HeaderParsingError + + +def is_fp_closed(obj): + """ + Checks whether a given file-like object is closed. + + :param obj: + The file-like object to check. + """ + + try: + # Check `isclosed()` first, in case Python3 doesn't set `closed`. + # GH Issue #928 + return obj.isclosed() + except AttributeError: + pass + + try: + # Check via the official file-like-object way. + return obj.closed + except AttributeError: + pass + + try: + # Check if the object is a container for another file-like object that + # gets released on exhaustion (e.g. HTTPResponse). + return obj.fp is None + except AttributeError: + pass + + raise ValueError("Unable to determine whether fp is closed.") + + +def assert_header_parsing(headers): + """ + Asserts whether all headers have been successfully parsed. + Extracts encountered errors from the result of parsing headers. + + Only works on Python 3. + + :param headers: Headers to verify. + :type headers: `httplib.HTTPMessage`. + + :raises urllib3.exceptions.HeaderParsingError: + If parsing errors are found. + """ + + # This will fail silently if we pass in the wrong kind of parameter. + # To make debugging easier add an explicit check. + if not isinstance(headers, httplib.HTTPMessage): + raise TypeError('expected httplib.Message, got {0}.'.format( + type(headers))) + + defects = getattr(headers, 'defects', None) + get_payload = getattr(headers, 'get_payload', None) + + unparsed_data = None + if get_payload: # Platform-specific: Python 3. + unparsed_data = get_payload() + + if defects or unparsed_data: + raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) + + +def is_response_to_head(response): + """ + Checks whether the request of a response has been a HEAD-request. + Handles the quirks of AppEngine. + + :param conn: + :type conn: :class:`httplib.HTTPResponse` + """ + # FIXME: Can we do this somehow without accessing private httplib _method? + method = response._method + if isinstance(method, int): # Platform-specific: Appengine + return method == 3 + return method.upper() == 'HEAD' diff --git a/env/lib/python3.6/site-packages/urllib3/util/retry.py b/env/lib/python3.6/site-packages/urllib3/util/retry.py new file mode 100755 index 0000000..7ad3dc6 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/util/retry.py @@ -0,0 +1,411 @@ +from __future__ import absolute_import +import time +import logging +from collections import namedtuple +from itertools import takewhile +import email +import re + +from ..exceptions import ( + ConnectTimeoutError, + MaxRetryError, + ProtocolError, + ReadTimeoutError, + ResponseError, + InvalidHeader, +) +from ..packages import six + + +log = logging.getLogger(__name__) + + +# Data structure for representing the metadata of requests that result in a retry. +RequestHistory = namedtuple('RequestHistory', ["method", "url", "error", + "status", "redirect_location"]) + + +class Retry(object): + """ Retry configuration. + + Each retry attempt will create a new Retry object with updated values, so + they can be safely reused. + + Retries can be defined as a default for a pool:: + + retries = Retry(connect=5, read=2, redirect=5) + http = PoolManager(retries=retries) + response = http.request('GET', 'http://example.com/') + + Or per-request (which overrides the default for the pool):: + + response = http.request('GET', 'http://example.com/', retries=Retry(10)) + + Retries can be disabled by passing ``False``:: + + response = http.request('GET', 'http://example.com/', retries=False) + + Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless + retries are disabled, in which case the causing exception will be raised. + + :param int total: + Total number of retries to allow. Takes precedence over other counts. + + Set to ``None`` to remove this constraint and fall back on other + counts. It's a good idea to set this to some sensibly-high value to + account for unexpected edge cases and avoid infinite retry loops. + + Set to ``0`` to fail on the first retry. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param int connect: + How many connection-related errors to retry on. + + These are errors raised before the request is sent to the remote server, + which we assume has not triggered the server to process the request. + + Set to ``0`` to fail on the first retry of this type. + + :param int read: + How many times to retry on read errors. + + These errors are raised after the request was sent to the server, so the + request may have side-effects. + + Set to ``0`` to fail on the first retry of this type. + + :param int redirect: + How many redirects to perform. Limit this to avoid infinite redirect + loops. + + A redirect is a HTTP response with a status code 301, 302, 303, 307 or + 308. + + Set to ``0`` to fail on the first retry of this type. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param int status: + How many times to retry on bad status codes. + + These are retries made on responses, where status code matches + ``status_forcelist``. + + Set to ``0`` to fail on the first retry of this type. + + :param iterable method_whitelist: + Set of uppercased HTTP method verbs that we should retry on. + + By default, we only retry on methods which are considered to be + idempotent (multiple requests with the same parameters end with the + same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`. + + Set to a ``False`` value to retry on any verb. + + :param iterable status_forcelist: + A set of integer HTTP status codes that we should force a retry on. + A retry is initiated if the request method is in ``method_whitelist`` + and the response status code is in ``status_forcelist``. + + By default, this is disabled with ``None``. + + :param float backoff_factor: + A backoff factor to apply between attempts after the second try + (most errors are resolved immediately by a second try without a + delay). urllib3 will sleep for:: + + {backoff factor} * (2 ^ ({number of total retries} - 1)) + + seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep + for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer + than :attr:`Retry.BACKOFF_MAX`. + + By default, backoff is disabled (set to 0). + + :param bool raise_on_redirect: Whether, if the number of redirects is + exhausted, to raise a MaxRetryError, or to return a response with a + response code in the 3xx range. + + :param bool raise_on_status: Similar meaning to ``raise_on_redirect``: + whether we should raise an exception, or return a response, + if status falls in ``status_forcelist`` range and retries have + been exhausted. + + :param tuple history: The history of the request encountered during + each call to :meth:`~Retry.increment`. The list is in the order + the requests occurred. Each list item is of class :class:`RequestHistory`. + + :param bool respect_retry_after_header: + Whether to respect Retry-After header on status codes defined as + :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not. + + :param iterable remove_headers_on_redirect: + Sequence of headers to remove from the request when a response + indicating a redirect is returned before firing off the redirected + request. + """ + + DEFAULT_METHOD_WHITELIST = frozenset([ + 'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE']) + + RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) + + DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(['Authorization']) + + #: Maximum backoff time. + BACKOFF_MAX = 120 + + def __init__(self, total=10, connect=None, read=None, redirect=None, status=None, + method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None, + backoff_factor=0, raise_on_redirect=True, raise_on_status=True, + history=None, respect_retry_after_header=True, + remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST): + + self.total = total + self.connect = connect + self.read = read + self.status = status + + if redirect is False or total is False: + redirect = 0 + raise_on_redirect = False + + self.redirect = redirect + self.status_forcelist = status_forcelist or set() + self.method_whitelist = method_whitelist + self.backoff_factor = backoff_factor + self.raise_on_redirect = raise_on_redirect + self.raise_on_status = raise_on_status + self.history = history or tuple() + self.respect_retry_after_header = respect_retry_after_header + self.remove_headers_on_redirect = remove_headers_on_redirect + + def new(self, **kw): + params = dict( + total=self.total, + connect=self.connect, read=self.read, redirect=self.redirect, status=self.status, + method_whitelist=self.method_whitelist, + status_forcelist=self.status_forcelist, + backoff_factor=self.backoff_factor, + raise_on_redirect=self.raise_on_redirect, + raise_on_status=self.raise_on_status, + history=self.history, + remove_headers_on_redirect=self.remove_headers_on_redirect + ) + params.update(kw) + return type(self)(**params) + + @classmethod + def from_int(cls, retries, redirect=True, default=None): + """ Backwards-compatibility for the old retries format.""" + if retries is None: + retries = default if default is not None else cls.DEFAULT + + if isinstance(retries, Retry): + return retries + + redirect = bool(redirect) and None + new_retries = cls(retries, redirect=redirect) + log.debug("Converted retries value: %r -> %r", retries, new_retries) + return new_retries + + def get_backoff_time(self): + """ Formula for computing the current backoff + + :rtype: float + """ + # We want to consider only the last consecutive errors sequence (Ignore redirects). + consecutive_errors_len = len(list(takewhile(lambda x: x.redirect_location is None, + reversed(self.history)))) + if consecutive_errors_len <= 1: + return 0 + + backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1)) + return min(self.BACKOFF_MAX, backoff_value) + + def parse_retry_after(self, retry_after): + # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4 + if re.match(r"^\s*[0-9]+\s*$", retry_after): + seconds = int(retry_after) + else: + retry_date_tuple = email.utils.parsedate(retry_after) + if retry_date_tuple is None: + raise InvalidHeader("Invalid Retry-After header: %s" % retry_after) + retry_date = time.mktime(retry_date_tuple) + seconds = retry_date - time.time() + + if seconds < 0: + seconds = 0 + + return seconds + + def get_retry_after(self, response): + """ Get the value of Retry-After in seconds. """ + + retry_after = response.getheader("Retry-After") + + if retry_after is None: + return None + + return self.parse_retry_after(retry_after) + + def sleep_for_retry(self, response=None): + retry_after = self.get_retry_after(response) + if retry_after: + time.sleep(retry_after) + return True + + return False + + def _sleep_backoff(self): + backoff = self.get_backoff_time() + if backoff <= 0: + return + time.sleep(backoff) + + def sleep(self, response=None): + """ Sleep between retry attempts. + + This method will respect a server's ``Retry-After`` response header + and sleep the duration of the time requested. If that is not present, it + will use an exponential backoff. By default, the backoff factor is 0 and + this method will return immediately. + """ + + if response: + slept = self.sleep_for_retry(response) + if slept: + return + + self._sleep_backoff() + + def _is_connection_error(self, err): + """ Errors when we're fairly sure that the server did not receive the + request, so it should be safe to retry. + """ + return isinstance(err, ConnectTimeoutError) + + def _is_read_error(self, err): + """ Errors that occur after the request has been started, so we should + assume that the server began processing it. + """ + return isinstance(err, (ReadTimeoutError, ProtocolError)) + + def _is_method_retryable(self, method): + """ Checks if a given HTTP method should be retried upon, depending if + it is included on the method whitelist. + """ + if self.method_whitelist and method.upper() not in self.method_whitelist: + return False + + return True + + def is_retry(self, method, status_code, has_retry_after=False): + """ Is this method/status code retryable? (Based on whitelists and control + variables such as the number of total retries to allow, whether to + respect the Retry-After header, whether this header is present, and + whether the returned status code is on the list of status codes to + be retried upon on the presence of the aforementioned header) + """ + if not self._is_method_retryable(method): + return False + + if self.status_forcelist and status_code in self.status_forcelist: + return True + + return (self.total and self.respect_retry_after_header and + has_retry_after and (status_code in self.RETRY_AFTER_STATUS_CODES)) + + def is_exhausted(self): + """ Are we out of retries? """ + retry_counts = (self.total, self.connect, self.read, self.redirect, self.status) + retry_counts = list(filter(None, retry_counts)) + if not retry_counts: + return False + + return min(retry_counts) < 0 + + def increment(self, method=None, url=None, response=None, error=None, + _pool=None, _stacktrace=None): + """ Return a new Retry object with incremented retry counters. + + :param response: A response object, or None, if the server did not + return a response. + :type response: :class:`~urllib3.response.HTTPResponse` + :param Exception error: An error encountered during the request, or + None if the response was received successfully. + + :return: A new ``Retry`` object. + """ + if self.total is False and error: + # Disabled, indicate to re-raise the error. + raise six.reraise(type(error), error, _stacktrace) + + total = self.total + if total is not None: + total -= 1 + + connect = self.connect + read = self.read + redirect = self.redirect + status_count = self.status + cause = 'unknown' + status = None + redirect_location = None + + if error and self._is_connection_error(error): + # Connect retry? + if connect is False: + raise six.reraise(type(error), error, _stacktrace) + elif connect is not None: + connect -= 1 + + elif error and self._is_read_error(error): + # Read retry? + if read is False or not self._is_method_retryable(method): + raise six.reraise(type(error), error, _stacktrace) + elif read is not None: + read -= 1 + + elif response and response.get_redirect_location(): + # Redirect retry? + if redirect is not None: + redirect -= 1 + cause = 'too many redirects' + redirect_location = response.get_redirect_location() + status = response.status + + else: + # Incrementing because of a server error like a 500 in + # status_forcelist and a the given method is in the whitelist + cause = ResponseError.GENERIC_ERROR + if response and response.status: + if status_count is not None: + status_count -= 1 + cause = ResponseError.SPECIFIC_ERROR.format( + status_code=response.status) + status = response.status + + history = self.history + (RequestHistory(method, url, error, status, redirect_location),) + + new_retry = self.new( + total=total, + connect=connect, read=read, redirect=redirect, status=status_count, + history=history) + + if new_retry.is_exhausted(): + raise MaxRetryError(_pool, url, error or ResponseError(cause)) + + log.debug("Incremented Retry for (url='%s'): %r", url, new_retry) + + return new_retry + + def __repr__(self): + return ('{cls.__name__}(total={self.total}, connect={self.connect}, ' + 'read={self.read}, redirect={self.redirect}, status={self.status})').format( + cls=type(self), self=self) + + +# For backwards compatibility (equivalent to pre-v1.9): +Retry.DEFAULT = Retry(3) diff --git a/env/lib/python3.6/site-packages/urllib3/util/ssl_.py b/env/lib/python3.6/site-packages/urllib3/util/ssl_.py new file mode 100755 index 0000000..2893752 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/util/ssl_.py @@ -0,0 +1,396 @@ +from __future__ import absolute_import +import errno +import warnings +import hmac +import socket + +from binascii import hexlify, unhexlify +from hashlib import md5, sha1, sha256 + +from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning +from ..packages import six + + +SSLContext = None +HAS_SNI = False +IS_PYOPENSSL = False +IS_SECURETRANSPORT = False + +# Maps the length of a digest to a possible hash function producing this digest +HASHFUNC_MAP = { + 32: md5, + 40: sha1, + 64: sha256, +} + + +def _const_compare_digest_backport(a, b): + """ + Compare two digests of equal length in constant time. + + The digests must be of type str/bytes. + Returns True if the digests match, and False otherwise. + """ + result = abs(len(a) - len(b)) + for l, r in zip(bytearray(a), bytearray(b)): + result |= l ^ r + return result == 0 + + +_const_compare_digest = getattr(hmac, 'compare_digest', + _const_compare_digest_backport) + + +try: # Test for SSL features + import ssl + from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 + from ssl import HAS_SNI # Has SNI? +except ImportError: + pass + + +try: + from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION +except ImportError: + OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 + OP_NO_COMPRESSION = 0x20000 + + +# Python 2.7 and earlier didn't have inet_pton on non-Linux +# so we fallback on inet_aton in those cases. This means that +# we can only detect IPv4 addresses in this case. +if hasattr(socket, 'inet_pton'): + inet_pton = socket.inet_pton +else: + # Maybe we can use ipaddress if the user has urllib3[secure]? + try: + import ipaddress + + def inet_pton(_, host): + if isinstance(host, six.binary_type): + host = host.decode('ascii') + return ipaddress.ip_address(host) + + except ImportError: # Platform-specific: Non-Linux + def inet_pton(_, host): + return socket.inet_aton(host) + + +# A secure default. +# Sources for more information on TLS ciphers: +# +# - https://wiki.mozilla.org/Security/Server_Side_TLS +# - https://www.ssllabs.com/projects/best-practices/index.html +# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ +# +# The general intent is: +# - Prefer TLS 1.3 cipher suites +# - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), +# - prefer ECDHE over DHE for better performance, +# - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and +# security, +# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common, +# - disable NULL authentication, MD5 MACs and DSS for security reasons. +DEFAULT_CIPHERS = ':'.join([ + 'TLS13-AES-256-GCM-SHA384', + 'TLS13-CHACHA20-POLY1305-SHA256', + 'TLS13-AES-128-GCM-SHA256', + 'ECDH+AESGCM', + 'ECDH+CHACHA20', + 'DH+AESGCM', + 'DH+CHACHA20', + 'ECDH+AES256', + 'DH+AES256', + 'ECDH+AES128', + 'DH+AES', + 'RSA+AESGCM', + 'RSA+AES', + '!aNULL', + '!eNULL', + '!MD5', +]) + +try: + from ssl import SSLContext # Modern SSL? +except ImportError: + import sys + + class SSLContext(object): # Platform-specific: Python 2 & 3.1 + supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or + (3, 2) <= sys.version_info) + + def __init__(self, protocol_version): + self.protocol = protocol_version + # Use default values from a real SSLContext + self.check_hostname = False + self.verify_mode = ssl.CERT_NONE + self.ca_certs = None + self.options = 0 + self.certfile = None + self.keyfile = None + self.ciphers = None + + def load_cert_chain(self, certfile, keyfile): + self.certfile = certfile + self.keyfile = keyfile + + def load_verify_locations(self, cafile=None, capath=None): + self.ca_certs = cafile + + if capath is not None: + raise SSLError("CA directories not supported in older Pythons") + + def set_ciphers(self, cipher_suite): + if not self.supports_set_ciphers: + raise TypeError( + 'Your version of Python does not support setting ' + 'a custom cipher suite. Please upgrade to Python ' + '2.7, 3.2, or later if you need this functionality.' + ) + self.ciphers = cipher_suite + + def wrap_socket(self, socket, server_hostname=None, server_side=False): + warnings.warn( + 'A true SSLContext object is not available. This prevents ' + 'urllib3 from configuring SSL appropriately and may cause ' + 'certain SSL connections to fail. You can upgrade to a newer ' + 'version of Python to solve this. For more information, see ' + 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' + '#ssl-warnings', + InsecurePlatformWarning + ) + kwargs = { + 'keyfile': self.keyfile, + 'certfile': self.certfile, + 'ca_certs': self.ca_certs, + 'cert_reqs': self.verify_mode, + 'ssl_version': self.protocol, + 'server_side': server_side, + } + if self.supports_set_ciphers: # Platform-specific: Python 2.7+ + return wrap_socket(socket, ciphers=self.ciphers, **kwargs) + else: # Platform-specific: Python 2.6 + return wrap_socket(socket, **kwargs) + + +def assert_fingerprint(cert, fingerprint): + """ + Checks if given fingerprint matches the supplied certificate. + + :param cert: + Certificate as bytes object. + :param fingerprint: + Fingerprint as string of hexdigits, can be interspersed by colons. + """ + + fingerprint = fingerprint.replace(':', '').lower() + digest_length = len(fingerprint) + hashfunc = HASHFUNC_MAP.get(digest_length) + if not hashfunc: + raise SSLError( + 'Fingerprint of invalid length: {0}'.format(fingerprint)) + + # We need encode() here for py32; works on py2 and p33. + fingerprint_bytes = unhexlify(fingerprint.encode()) + + cert_digest = hashfunc(cert).digest() + + if not _const_compare_digest(cert_digest, fingerprint_bytes): + raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' + .format(fingerprint, hexlify(cert_digest))) + + +def resolve_cert_reqs(candidate): + """ + Resolves the argument to a numeric constant, which can be passed to + the wrap_socket function/method from the ssl module. + Defaults to :data:`ssl.CERT_NONE`. + If given a string it is assumed to be the name of the constant in the + :mod:`ssl` module or its abbreviation. + (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. + If it's neither `None` nor a string we assume it is already the numeric + constant which can directly be passed to wrap_socket. + """ + if candidate is None: + return CERT_NONE + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, 'CERT_' + candidate) + return res + + return candidate + + +def resolve_ssl_version(candidate): + """ + like resolve_cert_reqs + """ + if candidate is None: + return PROTOCOL_SSLv23 + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, 'PROTOCOL_' + candidate) + return res + + return candidate + + +def create_urllib3_context(ssl_version=None, cert_reqs=None, + options=None, ciphers=None): + """All arguments have the same meaning as ``ssl_wrap_socket``. + + By default, this function does a lot of the same work that + ``ssl.create_default_context`` does on Python 3.4+. It: + + - Disables SSLv2, SSLv3, and compression + - Sets a restricted set of server ciphers + + If you wish to enable SSLv3, you can do:: + + from urllib3.util import ssl_ + context = ssl_.create_urllib3_context() + context.options &= ~ssl_.OP_NO_SSLv3 + + You can do the same to enable compression (substituting ``COMPRESSION`` + for ``SSLv3`` in the last line above). + + :param ssl_version: + The desired protocol version to use. This will default to + PROTOCOL_SSLv23 which will negotiate the highest protocol that both + the server and your installation of OpenSSL support. + :param cert_reqs: + Whether to require the certificate verification. This defaults to + ``ssl.CERT_REQUIRED``. + :param options: + Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, + ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``. + :param ciphers: + Which cipher suites to allow the server to select. + :returns: + Constructed SSLContext object with specified options + :rtype: SSLContext + """ + context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23) + + # Setting the default here, as we may have no ssl module on import + cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs + + if options is None: + options = 0 + # SSLv2 is easily broken and is considered harmful and dangerous + options |= OP_NO_SSLv2 + # SSLv3 has several problems and is now dangerous + options |= OP_NO_SSLv3 + # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ + # (issue #309) + options |= OP_NO_COMPRESSION + + context.options |= options + + if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6 + context.set_ciphers(ciphers or DEFAULT_CIPHERS) + + context.verify_mode = cert_reqs + if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2 + # We do our own verification, including fingerprints and alternative + # hostnames. So disable it here + context.check_hostname = False + return context + + +def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, + ca_certs=None, server_hostname=None, + ssl_version=None, ciphers=None, ssl_context=None, + ca_cert_dir=None): + """ + All arguments except for server_hostname, ssl_context, and ca_cert_dir have + the same meaning as they do when using :func:`ssl.wrap_socket`. + + :param server_hostname: + When SNI is supported, the expected hostname of the certificate + :param ssl_context: + A pre-made :class:`SSLContext` object. If none is provided, one will + be created using :func:`create_urllib3_context`. + :param ciphers: + A string of ciphers we wish the client to support. This is not + supported on Python 2.6 as the ssl module does not support it. + :param ca_cert_dir: + A directory containing CA certificates in multiple separate files, as + supported by OpenSSL's -CApath flag or the capath argument to + SSLContext.load_verify_locations(). + """ + context = ssl_context + if context is None: + # Note: This branch of code and all the variables in it are no longer + # used by urllib3 itself. We should consider deprecating and removing + # this code. + context = create_urllib3_context(ssl_version, cert_reqs, + ciphers=ciphers) + + if ca_certs or ca_cert_dir: + try: + context.load_verify_locations(ca_certs, ca_cert_dir) + except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2 + raise SSLError(e) + # Py33 raises FileNotFoundError which subclasses OSError + # These are not equivalent unless we check the errno attribute + except OSError as e: # Platform-specific: Python 3.3 and beyond + if e.errno == errno.ENOENT: + raise SSLError(e) + raise + elif getattr(context, 'load_default_certs', None) is not None: + # try to load OS default certs; works well on Windows (require Python3.4+) + context.load_default_certs() + + if certfile: + context.load_cert_chain(certfile, keyfile) + + # If we detect server_hostname is an IP address then the SNI + # extension should not be used according to RFC3546 Section 3.1 + # We shouldn't warn the user if SNI isn't available but we would + # not be using SNI anyways due to IP address for server_hostname. + if ((server_hostname is not None and not is_ipaddress(server_hostname)) + or IS_SECURETRANSPORT): + if HAS_SNI and server_hostname is not None: + return context.wrap_socket(sock, server_hostname=server_hostname) + + warnings.warn( + 'An HTTPS request has been made, but the SNI (Server Name ' + 'Indication) extension to TLS is not available on this platform. ' + 'This may cause the server to present an incorrect TLS ' + 'certificate, which can cause validation failures. You can upgrade to ' + 'a newer version of Python to solve this. For more information, see ' + 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' + '#ssl-warnings', + SNIMissingWarning + ) + + return context.wrap_socket(sock) + + +def is_ipaddress(hostname): + """Detects whether the hostname given is an IP address. + + :param str hostname: Hostname to examine. + :return: True if the hostname is an IP address, False otherwise. + """ + if six.PY3 and isinstance(hostname, six.binary_type): + # IDN A-label bytes are ASCII compatible. + hostname = hostname.decode('ascii') + + families = [socket.AF_INET] + if hasattr(socket, 'AF_INET6'): + families.append(socket.AF_INET6) + + for af in families: + try: + inet_pton(af, hostname) + except (socket.error, ValueError, OSError): + pass + else: + return True + return False diff --git a/env/lib/python3.6/site-packages/urllib3/util/timeout.py b/env/lib/python3.6/site-packages/urllib3/util/timeout.py new file mode 100755 index 0000000..cec817e --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/util/timeout.py @@ -0,0 +1,242 @@ +from __future__ import absolute_import +# The default socket timeout, used by httplib to indicate that no timeout was +# specified by the user +from socket import _GLOBAL_DEFAULT_TIMEOUT +import time + +from ..exceptions import TimeoutStateError + +# A sentinel value to indicate that no timeout was specified by the user in +# urllib3 +_Default = object() + + +# Use time.monotonic if available. +current_time = getattr(time, "monotonic", time.time) + + +class Timeout(object): + """ Timeout configuration. + + Timeouts can be defined as a default for a pool:: + + timeout = Timeout(connect=2.0, read=7.0) + http = PoolManager(timeout=timeout) + response = http.request('GET', 'http://example.com/') + + Or per-request (which overrides the default for the pool):: + + response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) + + Timeouts can be disabled by setting all the parameters to ``None``:: + + no_timeout = Timeout(connect=None, read=None) + response = http.request('GET', 'http://example.com/, timeout=no_timeout) + + + :param total: + This combines the connect and read timeouts into one; the read timeout + will be set to the time leftover from the connect attempt. In the + event that both a connect timeout and a total are specified, or a read + timeout and a total are specified, the shorter timeout will be applied. + + Defaults to None. + + :type total: integer, float, or None + + :param connect: + The maximum amount of time to wait for a connection attempt to a server + to succeed. Omitting the parameter will default the connect timeout to + the system default, probably `the global default timeout in socket.py + `_. + None will set an infinite timeout for connection attempts. + + :type connect: integer, float, or None + + :param read: + The maximum amount of time to wait between consecutive + read operations for a response from the server. Omitting + the parameter will default the read timeout to the system + default, probably `the global default timeout in socket.py + `_. + None will set an infinite timeout. + + :type read: integer, float, or None + + .. note:: + + Many factors can affect the total amount of time for urllib3 to return + an HTTP response. + + For example, Python's DNS resolver does not obey the timeout specified + on the socket. Other factors that can affect total request time include + high CPU load, high swap, the program running at a low priority level, + or other behaviors. + + In addition, the read and total timeouts only measure the time between + read operations on the socket connecting the client and the server, + not the total amount of time for the request to return a complete + response. For most requests, the timeout is raised because the server + has not sent the first byte in the specified time. This is not always + the case; if a server streams one byte every fifteen seconds, a timeout + of 20 seconds will not trigger, even though the request will take + several minutes to complete. + + If your goal is to cut off any request after a set amount of wall clock + time, consider having a second "watcher" thread to cut off a slow + request. + """ + + #: A sentinel object representing the default timeout value + DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT + + def __init__(self, total=None, connect=_Default, read=_Default): + self._connect = self._validate_timeout(connect, 'connect') + self._read = self._validate_timeout(read, 'read') + self.total = self._validate_timeout(total, 'total') + self._start_connect = None + + def __str__(self): + return '%s(connect=%r, read=%r, total=%r)' % ( + type(self).__name__, self._connect, self._read, self.total) + + @classmethod + def _validate_timeout(cls, value, name): + """ Check that a timeout attribute is valid. + + :param value: The timeout value to validate + :param name: The name of the timeout attribute to validate. This is + used to specify in error messages. + :return: The validated and casted version of the given value. + :raises ValueError: If it is a numeric value less than or equal to + zero, or the type is not an integer, float, or None. + """ + if value is _Default: + return cls.DEFAULT_TIMEOUT + + if value is None or value is cls.DEFAULT_TIMEOUT: + return value + + if isinstance(value, bool): + raise ValueError("Timeout cannot be a boolean value. It must " + "be an int, float or None.") + try: + float(value) + except (TypeError, ValueError): + raise ValueError("Timeout value %s was %s, but it must be an " + "int, float or None." % (name, value)) + + try: + if value <= 0: + raise ValueError("Attempted to set %s timeout to %s, but the " + "timeout cannot be set to a value less " + "than or equal to 0." % (name, value)) + except TypeError: # Python 3 + raise ValueError("Timeout value %s was %s, but it must be an " + "int, float or None." % (name, value)) + + return value + + @classmethod + def from_float(cls, timeout): + """ Create a new Timeout from a legacy timeout value. + + The timeout value used by httplib.py sets the same timeout on the + connect(), and recv() socket requests. This creates a :class:`Timeout` + object that sets the individual timeouts to the ``timeout`` value + passed to this function. + + :param timeout: The legacy timeout value. + :type timeout: integer, float, sentinel default object, or None + :return: Timeout object + :rtype: :class:`Timeout` + """ + return Timeout(read=timeout, connect=timeout) + + def clone(self): + """ Create a copy of the timeout object + + Timeout properties are stored per-pool but each request needs a fresh + Timeout object to ensure each one has its own start/stop configured. + + :return: a copy of the timeout object + :rtype: :class:`Timeout` + """ + # We can't use copy.deepcopy because that will also create a new object + # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to + # detect the user default. + return Timeout(connect=self._connect, read=self._read, + total=self.total) + + def start_connect(self): + """ Start the timeout clock, used during a connect() attempt + + :raises urllib3.exceptions.TimeoutStateError: if you attempt + to start a timer that has been started already. + """ + if self._start_connect is not None: + raise TimeoutStateError("Timeout timer has already been started.") + self._start_connect = current_time() + return self._start_connect + + def get_connect_duration(self): + """ Gets the time elapsed since the call to :meth:`start_connect`. + + :return: Elapsed time. + :rtype: float + :raises urllib3.exceptions.TimeoutStateError: if you attempt + to get duration for a timer that hasn't been started. + """ + if self._start_connect is None: + raise TimeoutStateError("Can't get connect duration for timer " + "that has not started.") + return current_time() - self._start_connect + + @property + def connect_timeout(self): + """ Get the value to use when setting a connection timeout. + + This will be a positive float or integer, the value None + (never timeout), or the default system timeout. + + :return: Connect timeout. + :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None + """ + if self.total is None: + return self._connect + + if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: + return self.total + + return min(self._connect, self.total) + + @property + def read_timeout(self): + """ Get the value for the read timeout. + + This assumes some time has elapsed in the connection timeout and + computes the read timeout appropriately. + + If self.total is set, the read timeout is dependent on the amount of + time taken by the connect timeout. If the connection time has not been + established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be + raised. + + :return: Value to use for the read timeout. + :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None + :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` + has not yet been called on this object. + """ + if (self.total is not None and + self.total is not self.DEFAULT_TIMEOUT and + self._read is not None and + self._read is not self.DEFAULT_TIMEOUT): + # In case the connect timeout has not yet been established. + if self._start_connect is None: + return self._read + return max(0, min(self.total - self.get_connect_duration(), + self._read)) + elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: + return max(0, self.total - self.get_connect_duration()) + else: + return self._read diff --git a/env/lib/python3.6/site-packages/urllib3/util/url.py b/env/lib/python3.6/site-packages/urllib3/util/url.py new file mode 100755 index 0000000..6b6f996 --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/util/url.py @@ -0,0 +1,230 @@ +from __future__ import absolute_import +from collections import namedtuple + +from ..exceptions import LocationParseError + + +url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'] + +# We only want to normalize urls with an HTTP(S) scheme. +# urllib3 infers URLs without a scheme (None) to be http. +NORMALIZABLE_SCHEMES = ('http', 'https', None) + + +class Url(namedtuple('Url', url_attrs)): + """ + Datastructure for representing an HTTP URL. Used as a return value for + :func:`parse_url`. Both the scheme and host are normalized as they are + both case-insensitive according to RFC 3986. + """ + __slots__ = () + + def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, + query=None, fragment=None): + if path and not path.startswith('/'): + path = '/' + path + if scheme: + scheme = scheme.lower() + if host and scheme in NORMALIZABLE_SCHEMES: + host = host.lower() + return super(Url, cls).__new__(cls, scheme, auth, host, port, path, + query, fragment) + + @property + def hostname(self): + """For backwards-compatibility with urlparse. We're nice like that.""" + return self.host + + @property + def request_uri(self): + """Absolute path including the query string.""" + uri = self.path or '/' + + if self.query is not None: + uri += '?' + self.query + + return uri + + @property + def netloc(self): + """Network location including host and port""" + if self.port: + return '%s:%d' % (self.host, self.port) + return self.host + + @property + def url(self): + """ + Convert self into a url + + This function should more or less round-trip with :func:`.parse_url`. The + returned url may not be exactly the same as the url inputted to + :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls + with a blank port will have : removed). + + Example: :: + + >>> U = parse_url('http://google.com/mail/') + >>> U.url + 'http://google.com/mail/' + >>> Url('http', 'username:password', 'host.com', 80, + ... '/path', 'query', 'fragment').url + 'http://username:password@host.com:80/path?query#fragment' + """ + scheme, auth, host, port, path, query, fragment = self + url = '' + + # We use "is not None" we want things to happen with empty strings (or 0 port) + if scheme is not None: + url += scheme + '://' + if auth is not None: + url += auth + '@' + if host is not None: + url += host + if port is not None: + url += ':' + str(port) + if path is not None: + url += path + if query is not None: + url += '?' + query + if fragment is not None: + url += '#' + fragment + + return url + + def __str__(self): + return self.url + + +def split_first(s, delims): + """ + Given a string and an iterable of delimiters, split on the first found + delimiter. Return two split parts and the matched delimiter. + + If not found, then the first part is the full input string. + + Example:: + + >>> split_first('foo/bar?baz', '?/=') + ('foo', 'bar?baz', '/') + >>> split_first('foo/bar?baz', '123') + ('foo/bar?baz', '', None) + + Scales linearly with number of delims. Not ideal for large number of delims. + """ + min_idx = None + min_delim = None + for d in delims: + idx = s.find(d) + if idx < 0: + continue + + if min_idx is None or idx < min_idx: + min_idx = idx + min_delim = d + + if min_idx is None or min_idx < 0: + return s, '', None + + return s[:min_idx], s[min_idx + 1:], min_delim + + +def parse_url(url): + """ + Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is + performed to parse incomplete urls. Fields not provided will be None. + + Partly backwards-compatible with :mod:`urlparse`. + + Example:: + + >>> parse_url('http://google.com/mail/') + Url(scheme='http', host='google.com', port=None, path='/mail/', ...) + >>> parse_url('google.com:80') + Url(scheme=None, host='google.com', port=80, path=None, ...) + >>> parse_url('/foo?bar') + Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) + """ + + # While this code has overlap with stdlib's urlparse, it is much + # simplified for our needs and less annoying. + # Additionally, this implementations does silly things to be optimal + # on CPython. + + if not url: + # Empty + return Url() + + scheme = None + auth = None + host = None + port = None + path = None + fragment = None + query = None + + # Scheme + if '://' in url: + scheme, url = url.split('://', 1) + + # Find the earliest Authority Terminator + # (http://tools.ietf.org/html/rfc3986#section-3.2) + url, path_, delim = split_first(url, ['/', '?', '#']) + + if delim: + # Reassemble the path + path = delim + path_ + + # Auth + if '@' in url: + # Last '@' denotes end of auth part + auth, url = url.rsplit('@', 1) + + # IPv6 + if url and url[0] == '[': + host, url = url.split(']', 1) + host += ']' + + # Port + if ':' in url: + _host, port = url.split(':', 1) + + if not host: + host = _host + + if port: + # If given, ports must be integers. No whitespace, no plus or + # minus prefixes, no non-integer digits such as ^2 (superscript). + if not port.isdigit(): + raise LocationParseError(url) + try: + port = int(port) + except ValueError: + raise LocationParseError(url) + else: + # Blank ports are cool, too. (rfc3986#section-3.2.3) + port = None + + elif not host and url: + host = url + + if not path: + return Url(scheme, auth, host, port, path, query, fragment) + + # Fragment + if '#' in path: + path, fragment = path.split('#', 1) + + # Query + if '?' in path: + path, query = path.split('?', 1) + + return Url(scheme, auth, host, port, path, query, fragment) + + +def get_host(url): + """ + Deprecated. Use :func:`parse_url` instead. + """ + p = parse_url(url) + return p.scheme or 'http', p.hostname, p.port diff --git a/env/lib/python3.6/site-packages/urllib3/util/wait.py b/env/lib/python3.6/site-packages/urllib3/util/wait.py new file mode 100755 index 0000000..fa686ef --- /dev/null +++ b/env/lib/python3.6/site-packages/urllib3/util/wait.py @@ -0,0 +1,153 @@ +import errno +from functools import partial +import select +import sys +try: + from time import monotonic +except ImportError: + from time import time as monotonic + +__all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"] + + +class NoWayToWaitForSocketError(Exception): + pass + + +# How should we wait on sockets? +# +# There are two types of APIs you can use for waiting on sockets: the fancy +# modern stateful APIs like epoll/kqueue, and the older stateless APIs like +# select/poll. The stateful APIs are more efficient when you have a lots of +# sockets to keep track of, because you can set them up once and then use them +# lots of times. But we only ever want to wait on a single socket at a time +# and don't want to keep track of state, so the stateless APIs are actually +# more efficient. So we want to use select() or poll(). +# +# Now, how do we choose between select() and poll()? On traditional Unixes, +# select() has a strange calling convention that makes it slow, or fail +# altogether, for high-numbered file descriptors. The point of poll() is to fix +# that, so on Unixes, we prefer poll(). +# +# On Windows, there is no poll() (or at least Python doesn't provide a wrapper +# for it), but that's OK, because on Windows, select() doesn't have this +# strange calling convention; plain select() works fine. +# +# So: on Windows we use select(), and everywhere else we use poll(). We also +# fall back to select() in case poll() is somehow broken or missing. + +if sys.version_info >= (3, 5): + # Modern Python, that retries syscalls by default + def _retry_on_intr(fn, timeout): + return fn(timeout) +else: + # Old and broken Pythons. + def _retry_on_intr(fn, timeout): + if timeout is not None and timeout <= 0: + return fn(timeout) + + if timeout is None: + deadline = float("inf") + else: + deadline = monotonic() + timeout + + while True: + try: + return fn(timeout) + # OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7 + except (OSError, select.error) as e: + # 'e.args[0]' incantation works for both OSError and select.error + if e.args[0] != errno.EINTR: + raise + else: + timeout = deadline - monotonic() + if timeout < 0: + timeout = 0 + if timeout == float("inf"): + timeout = None + continue + + +def select_wait_for_socket(sock, read=False, write=False, timeout=None): + if not read and not write: + raise RuntimeError("must specify at least one of read=True, write=True") + rcheck = [] + wcheck = [] + if read: + rcheck.append(sock) + if write: + wcheck.append(sock) + # When doing a non-blocking connect, most systems signal success by + # marking the socket writable. Windows, though, signals success by marked + # it as "exceptional". We paper over the difference by checking the write + # sockets for both conditions. (The stdlib selectors module does the same + # thing.) + fn = partial(select.select, rcheck, wcheck, wcheck) + rready, wready, xready = _retry_on_intr(fn, timeout) + return bool(rready or wready or xready) + + +def poll_wait_for_socket(sock, read=False, write=False, timeout=None): + if not read and not write: + raise RuntimeError("must specify at least one of read=True, write=True") + mask = 0 + if read: + mask |= select.POLLIN + if write: + mask |= select.POLLOUT + poll_obj = select.poll() + poll_obj.register(sock, mask) + + # For some reason, poll() takes timeout in milliseconds + def do_poll(t): + if t is not None: + t *= 1000 + return poll_obj.poll(t) + + return bool(_retry_on_intr(do_poll, timeout)) + + +def null_wait_for_socket(*args, **kwargs): + raise NoWayToWaitForSocketError("no select-equivalent available") + + +def _have_working_poll(): + # Apparently some systems have a select.poll that fails as soon as you try + # to use it, either due to strange configuration or broken monkeypatching + # from libraries like eventlet/greenlet. + try: + poll_obj = select.poll() + poll_obj.poll(0) + except (AttributeError, OSError): + return False + else: + return True + + +def wait_for_socket(*args, **kwargs): + # We delay choosing which implementation to use until the first time we're + # called. We could do it at import time, but then we might make the wrong + # decision if someone goes wild with monkeypatching select.poll after + # we're imported. + global wait_for_socket + if _have_working_poll(): + wait_for_socket = poll_wait_for_socket + elif hasattr(select, "select"): + wait_for_socket = select_wait_for_socket + else: # Platform-specific: Appengine. + wait_for_socket = null_wait_for_socket + return wait_for_socket(*args, **kwargs) + + +def wait_for_read(sock, timeout=None): + """ Waits for reading to be available on a given socket. + Returns True if the socket is readable, or False if the timeout expired. + """ + return wait_for_socket(sock, read=True, timeout=timeout) + + +def wait_for_write(sock, timeout=None): + """ Waits for writing to be available on a given socket. + Returns True if the socket is readable, or False if the timeout expired. + """ + return wait_for_socket(sock, write=True, timeout=timeout) diff --git a/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/INSTALLER b/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/LICENSE.txt b/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/LICENSE.txt new file mode 100644 index 0000000..c3441e6 --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/LICENSE.txt @@ -0,0 +1,22 @@ +"wheel" copyright (c) 2012-2014 Daniel Holth and +contributors. + +The MIT License + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/METADATA b/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/METADATA new file mode 100644 index 0000000..2bc2acf --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/METADATA @@ -0,0 +1,395 @@ +Metadata-Version: 2.1 +Name: wheel +Version: 0.31.1 +Summary: A built-package format for Python. +Home-page: https://github.com/pypa/wheel +Author: Daniel Holth +Author-email: dholth@fastmail.fm +Maintainer: Alex Grönholm +Maintainer-email: alex.gronholm@nextday.fi +License: MIT +Keywords: wheel,packaging +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +Provides-Extra: test +Provides-Extra: signatures +Provides-Extra: faster-signatures +Provides-Extra: faster-signatures +Requires-Dist: ed25519ll; extra == 'faster-signatures' +Provides-Extra: signatures +Requires-Dist: keyring; extra == 'signatures' +Requires-Dist: keyrings.alt; extra == 'signatures' +Provides-Extra: signatures +Requires-Dist: pyxdg; (sys_platform!="win32") and extra == 'signatures' +Provides-Extra: test +Requires-Dist: pytest (>=3.0.0); extra == 'test' +Requires-Dist: pytest-cov; extra == 'test' + +Wheel +===== + +A built-package format for Python. + +A wheel is a ZIP-format archive with a specially formatted filename +and the .whl extension. It is designed to contain all the files for a +PEP 376 compatible install in a way that is very close to the on-disk +format. Many packages will be properly installed with only the "Unpack" +step (simply extracting the file onto sys.path), and the unpacked archive +preserves enough information to "Spread" (copy data and scripts to their +final locations) at any later time. + +The wheel project provides a `bdist_wheel` command for setuptools +(requires setuptools >= 0.8.0). Wheel files can be installed with a +newer `pip` from https://github.com/pypa/pip or with wheel's own command +line utility. + +The wheel documentation is at https://wheel.readthedocs.io/. The file format is +documented in PEP 427 (https://www.python.org/dev/peps/pep-0427/). + +The reference implementation is at https://github.com/pypa/wheel + +Why not egg? +------------ + +Python's egg format predates the packaging related standards we have +today, the most important being PEP 376 "Database of Installed Python +Distributions" which specifies the .dist-info directory (instead of +.egg-info) and PEP 426 "Metadata for Python Software Packages 2.0" +which specifies how to express dependencies (instead of requires.txt +in .egg-info). + +Wheel implements these things. It also provides a richer file naming +convention that communicates the Python implementation and ABI as well +as simply the language version used in a particular package. + +Unlike .egg, wheel will be a fully-documented standard at the binary +level that is truly easy to install even if you do not want to use the +reference implementation. + + +Code of Conduct +--------------- + +Everyone interacting in the wheel project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. + +.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ + + +0.31.1 +====== +- Fixed arch as ``None`` when converting eggs to wheels + +0.31.0 +====== +- Fixed displaying of errors on Python 3 +- Fixed single digit versions in wheel files not being properly recognized +- Fixed wrong character encodings being used (instead of UTF-8) to read and + write ``RECORD`` (this sometimes crashed bdist_wheel too) +- Enabled Zip64 support in wheels by default +- Metadata-Version is now 2.1 +- Dropped DESCRIPTION.rst and metadata.json from the list of generated files +- Dropped support for the non-standard, undocumented ``provides-extra`` and + ``requires-dist`` keywords in setup.cfg metadata +- Deprecated all wheel signing and signature verification commands +- Removed the (already defunct) ``tool`` extras from setup.py + +0.30.0 +====== +- Added py-limited-api {cp32|cp33|cp34|...} flag to produce cpNN.abi3.{arch} + tags on CPython 3. +- Documented the ``license_file`` metadata key +- Improved Python, abi tagging for `wheel convert`. Thanks Ales Erjavec. +- Fixed `>` being prepended to lines starting with "From" in the long description +- Added support for specifying a build number (as per PEP 427). + Thanks Ian Cordasco. +- Made the order of files in generated ZIP files deterministic. + Thanks Matthias Bach. +- Made the order of requirements in metadata deterministic. Thanks Chris Lamb. +- Fixed `wheel install` clobbering existing files +- Improved the error message when trying to verify an unsigned wheel file +- Removed support for Python 2.6, 3.2 and 3.3. + +0.29.0 +====== +- Fix compression type of files in archive (Issue #155, Pull Request #62, + thanks Xavier Fernandez) + +0.28.0 +====== +- Fix file modes in archive (Issue #154) + +0.27.0 +====== +- Support forcing a platform tag using `--plat-name` on pure-Python wheels, as + well as nonstandard platform tags on non-pure wheels (Pull Request #60, Issue + #144, thanks Andrés Díaz) +- Add SOABI tags to platform-specific wheels built for Python 2.X (Pull Request + #55, Issue #63, Issue #101) +- Support reproducible wheel files, wheels that can be rebuilt and will hash to + the same values as previous builds (Pull Request #52, Issue #143, thanks + Barry Warsaw) +- Support for changes in keyring >= 8.0 (Pull Request #61, thanks Jason R. + Coombs) +- Use the file context manager when checking if dependency_links.txt is empty, + fixes problems building wheels under PyPy on Windows (Issue #150, thanks + Cosimo Lupo) +- Don't attempt to (recursively) create a build directory ending with `..` + (invalid on all platforms, but code was only executed on Windows) (Issue #91) +- Added the PyPA Code of Conduct (Pull Request #56) + +0.26.0 +====== +- Fix multiple entrypoint comparison failure on Python 3 (Issue #148) + +0.25.0 +====== +- Add Python 3.5 to tox configuration +- Deterministic (sorted) metadata +- Fix tagging for Python 3.5 compatibility +- Support py2-none-'arch' and py3-none-'arch' tags +- Treat data-only wheels as pure +- Write to temporary file and rename when using wheel install --force + +0.24.0 +====== +- The python tag used for pure-python packages is now .pyN (major version + only). This change actually occurred in 0.23.0 when the --python-tag + option was added, but was not explicitly mentioned in the changelog then. +- wininst2wheel and egg2wheel removed. Use "wheel convert [archive]" + instead. +- Wheel now supports setuptools style conditional requirements via the + extras_require={} syntax. Separate 'extra' names from conditions using + the : character. Wheel's own setup.py does this. (The empty-string + extra is the same as install_requires.) These conditional requirements + should work the same whether the package is installed by wheel or + by setup.py. + +0.23.0 +====== +- Compatibility tag flags added to the bdist_wheel command +- sdist should include files necessary for tests +- 'wheel convert' can now also convert unpacked eggs to wheel +- Rename pydist.json to metadata.json to avoid stepping on the PEP +- The --skip-scripts option has been removed, and not generating scripts is now + the default. The option was a temporary approach until installers could + generate scripts themselves. That is now the case with pip 1.5 and later. + Note that using pip 1.4 to install a wheel without scripts will leave the + installation without entry-point wrappers. The "wheel install-scripts" + command can be used to generate the scripts in such cases. +- Thank you contributors + +0.22.0 +====== +- Include entry_points.txt, scripts a.k.a. commands, in experimental + pydist.json +- Improved test_requires parsing +- Python 2.6 fixes, "wheel version" command courtesy pombredanne + +0.21.0 +====== +- Pregenerated scripts are the default again. +- "setup.py bdist_wheel --skip-scripts" turns them off. +- setuptools is no longer a listed requirement for the 'wheel' + package. It is of course still required in order for bdist_wheel + to work. +- "python -m wheel" avoids importing pkg_resources until it's necessary. + +0.20.0 +====== +- No longer include console_scripts in wheels. Ordinary scripts (shell files, + standalone Python files) are included as usual. +- Include new command "python -m wheel install-scripts [distribution + [distribution ...]]" to install the console_scripts (setuptools-style + scripts using pkg_resources) for a distribution. + +0.19.0 +====== +- pymeta.json becomes pydist.json + +0.18.0 +====== +- Python 3 Unicode improvements + +0.17.0 +====== +- Support latest PEP-426 "pymeta.json" (json-format metadata) + +0.16.0 +====== +- Python 2.6 compatibility bugfix (thanks John McFarlane) +- Non-prerelease version number + +1.0.0a2 +======= +- Bugfix for C-extension tags for CPython 3.3 (using SOABI) + +1.0.0a1 +======= +- Bugfix for bdist_wininst converter "wheel convert" +- Bugfix for dists where "is pure" is None instead of True or False + +1.0.0a0 +======= +- Update for version 1.0 of Wheel (PEP accepted). +- Python 3 fix for moving Unicode Description to metadata body +- Include rudimentary API documentation in Sphinx (thanks Kevin Horn) + +0.15.0 +====== +- Various improvements + +0.14.0 +====== +- Changed the signature format to better comply with the current JWS spec. + Breaks all existing signatures. +- Include ``wheel unsign`` command to remove RECORD.jws from an archive. +- Put the description in the newly allowed payload section of PKG-INFO + (METADATA) files. + +0.13.0 +====== +- Use distutils instead of sysconfig to get installation paths; can install + headers. +- Improve WheelFile() sort. +- Allow bootstrap installs without any pkg_resources. + +0.12.0 +====== +- Unit test for wheel.tool.install + +0.11.0 +====== +- API cleanup + +0.10.3 +====== +- Scripts fixer fix + +0.10.2 +====== +- Fix keygen + +0.10.1 +====== +- Preserve attributes on install. + +0.10.0 +====== +- Include a copy of pkg_resources. Wheel can now install into a virtualenv + that does not have distribute (though most packages still require + pkg_resources to actually work; wheel install distribute) +- Define a new setup.cfg section [wheel]. universal=1 will + apply the py2.py3-none-any tag for pure python wheels. + +0.9.7 +===== +- Only import dirspec when needed. dirspec is only needed to find the + configuration for keygen/signing operations. + +0.9.6 +===== +- requires-dist from setup.cfg overwrites any requirements from setup.py + Care must be taken that the requirements are the same in both cases, + or just always install from wheel. +- drop dirspec requirement on win32 +- improved command line utility, adds 'wheel convert [egg or wininst]' to + convert legacy binary formats to wheel + +0.9.5 +===== +- Wheel's own wheel file can be executed by Python, and can install itself: + ``python wheel-0.9.5-py27-none-any/wheel install ...`` +- Use argparse; basic ``wheel install`` command should run with only stdlib + dependencies. +- Allow requires_dist in setup.cfg's [metadata] section. In addition to + dependencies in setup.py, but will only be interpreted when installing + from wheel, not from sdist. Can be qualified with environment markers. + +0.9.4 +===== +- Fix wheel.signatures in sdist + +0.9.3 +===== +- Integrated digital signatures support without C extensions. +- Integrated "wheel install" command (single package, no dependency + resolution) including compatibility check. +- Support Python 3.3 +- Use Metadata 1.3 (PEP 426) + +0.9.2 +===== +- Automatic signing if WHEEL_TOOL points to the wheel binary +- Even more Python 3 fixes + +0.9.1 +===== +- 'wheel sign' uses the keys generated by 'wheel keygen' (instead of generating + a new key at random each time) +- Python 2/3 encoding/decoding fixes +- Run tests on Python 2.6 (without signature verification) + +0.9 +=== +- Updated digital signatures scheme +- Python 3 support for digital signatures +- Always verify RECORD hashes on extract +- "wheel" command line tool to sign, verify, unpack wheel files + +0.8 +=== +- none/any draft pep tags update +- improved wininst2wheel script +- doc changes and other improvements + +0.7 +=== +- sort .dist-info at end of wheel archive +- Windows & Python 3 fixes from Paul Moore +- pep8 +- scripts to convert wininst & egg to wheel + +0.6 +=== +- require distribute >= 0.6.28 +- stop using verlib + +0.5 +=== +- working pretty well + +0.4.2 +===== +- hyphenated name fix + +0.4 +=== +- improve test coverage +- improve Windows compatibility +- include tox.ini courtesy of Marc Abramowitz +- draft hmac sha-256 signing function + +0.3 +=== +- prototype egg2wheel conversion script + +0.2 +=== +- Python 3 compatibility + +0.1 +=== +- Initial version + + diff --git a/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/RECORD b/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/RECORD new file mode 100644 index 0000000..4944068 --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/RECORD @@ -0,0 +1,42 @@ +wheel/__init__.py,sha256=EJBbDrY1g5CaE8xmai7elVg3eKNOnD6TxhfFTdxLNDY,96 +wheel/__main__.py,sha256=K--m7mq-27NO0fm-a8KlthkucCe0w_-0hVxL3uDujkU,419 +wheel/archive.py,sha256=RbdcLcuNYMiku7N1VbM7A3qbkSb6RkodHaoqY9CRVjs,2426 +wheel/bdist_wheel.py,sha256=PB76eFyXFx-5eHhsPj1DODAmW7b6rNM0rmS44PSY9VI,15904 +wheel/egg2wheel.py,sha256=yY5j8r3YbSAxT-bf7xKkktRAQmEuatH_H_r66Tvb_uk,3087 +wheel/install.py,sha256=19vooBHSrL8v7V8npfBdhEtddSvE4B-tcdZVJbs67W0,18989 +wheel/metadata.py,sha256=729-wUZkyMhbwz2PUPC3pEXRis1EfVWEFBiS3qBJEYU,4454 +wheel/paths.py,sha256=OAtaJgCivlKvJKw1qC3YbJypvp2d38Eka8GQWdBWNZw,1129 +wheel/pep425tags.py,sha256=Lk9zYm1rrHG1X3RKlf9plcwpsoSZT8UR7fG3jhaoZrQ,5760 +wheel/pkginfo.py,sha256=GR76kupQzn1x9sKDaXuE6B6FsZ4OkfRtG7pndlXPvQ4,1257 +wheel/util.py,sha256=ewwxdLSStBtYYW6k-VxSjq9MZtfU7SaSAiknplRi-wk,4140 +wheel/wininst2wheel.py,sha256=PRniuPqQ70Mi-fc7NAh_jgkHk3p-qBHvpcanPqvBxk0,7791 +wheel/signatures/__init__.py,sha256=O7kZICZvXxN5YRkCYrPmAEr1LpGaZKJh5sLPWIRIoYE,3766 +wheel/signatures/djbec.py,sha256=C_tRkVy1oI4_lupYIF18xQDsv4kjs9x9fwP1O1yoOZc,7068 +wheel/signatures/ed25519py.py,sha256=nFKDMq4LW2iJKk4IZKMxY46GyZNYPKxuWha9xYHk9lE,1669 +wheel/signatures/keys.py,sha256=k4j4yGZL31Dt2pa5TneIEeq6qkVIXEPExmFxiZxpE1Y,3299 +wheel/tool/__init__.py,sha256=a_seWF51T_3mtbiOjinbqxbVsf4HjhP-BQw7PVahFjU,13587 +wheel-0.31.1.dist-info/LICENSE.txt,sha256=zKniDGrx_Pv2lAjzd3aShsvuvN7TNhAMm0o_NfvmNeQ,1125 +wheel-0.31.1.dist-info/METADATA,sha256=tYscYj_1jH3_C-SNImnqtd_XCUp79Yo5SBRZYcl9qR8,12676 +wheel-0.31.1.dist-info/RECORD,, +wheel-0.31.1.dist-info/WHEEL,sha256=gduuPyBvFJQSQ0zdyxF7k0zynDXbIbvg5ZBHoXum5uk,110 +wheel-0.31.1.dist-info/entry_points.txt,sha256=pTyeGVsucyfr_BXe5OQKuA1Bp5YKaIAWy5pejkq4Qx0,109 +wheel-0.31.1.dist-info/top_level.txt,sha256=HxSBIbgEstMPe4eFawhA66Mq-QYHMopXVoAncfjb_1c,6 +../../../bin/wheel,sha256=wUA-V2NrR1rk0Mi7b3VGmP-nOMJEQx46yOTyAXKrLfA,278 +wheel-0.31.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +wheel/signatures/__pycache__/djbec.cpython-36.pyc,, +wheel/signatures/__pycache__/keys.cpython-36.pyc,, +wheel/signatures/__pycache__/ed25519py.cpython-36.pyc,, +wheel/signatures/__pycache__/__init__.cpython-36.pyc,, +wheel/__pycache__/util.cpython-36.pyc,, +wheel/__pycache__/bdist_wheel.cpython-36.pyc,, +wheel/__pycache__/archive.cpython-36.pyc,, +wheel/__pycache__/wininst2wheel.cpython-36.pyc,, +wheel/__pycache__/paths.cpython-36.pyc,, +wheel/__pycache__/__main__.cpython-36.pyc,, +wheel/__pycache__/pep425tags.cpython-36.pyc,, +wheel/__pycache__/install.cpython-36.pyc,, +wheel/__pycache__/egg2wheel.cpython-36.pyc,, +wheel/__pycache__/pkginfo.cpython-36.pyc,, +wheel/__pycache__/metadata.cpython-36.pyc,, +wheel/__pycache__/__init__.cpython-36.pyc,, +wheel/tool/__pycache__/__init__.cpython-36.pyc,, diff --git a/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/WHEEL b/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/WHEEL new file mode 100644 index 0000000..1316c41 --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/entry_points.txt b/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/entry_points.txt new file mode 100644 index 0000000..4ad253e --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/entry_points.txt @@ -0,0 +1,6 @@ +[console_scripts] +wheel = wheel.tool:main + +[distutils.commands] +bdist_wheel = wheel.bdist_wheel:bdist_wheel + diff --git a/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/top_level.txt b/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/top_level.txt new file mode 100644 index 0000000..2309722 --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel-0.31.1.dist-info/top_level.txt @@ -0,0 +1 @@ +wheel diff --git a/env/lib/python3.6/site-packages/wheel/archive.py b/env/lib/python3.6/site-packages/wheel/archive.py new file mode 100644 index 0000000..a11aa60 --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel/archive.py @@ -0,0 +1,77 @@ +""" +Archive tools for wheel. +""" + +import os +import os.path +import time +import zipfile +from distutils import log + + +def archive_wheelfile(base_name, base_dir): + """Archive all files under `base_dir` in a whl file and name it like + `base_name`. + """ + olddir = os.path.abspath(os.curdir) + base_name = os.path.abspath(base_name) + try: + os.chdir(base_dir) + return make_wheelfile_inner(base_name) + finally: + os.chdir(olddir) + + +def make_wheelfile_inner(base_name, base_dir='.'): + """Create a whl file from all the files under 'base_dir'. + + Places .dist-info at the end of the archive.""" + + zip_filename = base_name + ".whl" + + log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) + + # Some applications need reproducible .whl files, but they can't do this + # without forcing the timestamp of the individual ZipInfo objects. See + # issue #143. + timestamp = os.environ.get('SOURCE_DATE_EPOCH') + if timestamp is None: + date_time = None + else: + date_time = time.gmtime(int(timestamp))[0:6] + + score = {'WHEEL': 1, 'METADATA': 2, 'RECORD': 3} + + def writefile(path, date_time): + st = os.stat(path) + if date_time is None: + mtime = time.gmtime(st.st_mtime) + date_time = mtime[0:6] + zinfo = zipfile.ZipInfo(path, date_time) + zinfo.external_attr = st.st_mode << 16 + zinfo.compress_type = zipfile.ZIP_DEFLATED + with open(path, 'rb') as fp: + zip.writestr(zinfo, fp.read()) + log.info("adding '%s'" % path) + + with zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_DEFLATED, + allowZip64=True) as zip: + deferred = [] + for dirpath, dirnames, filenames in os.walk(base_dir): + # Sort the directory names so that `os.walk` will walk them in a + # defined order on the next iteration. + dirnames.sort() + for name in sorted(filenames): + path = os.path.normpath(os.path.join(dirpath, name)) + + if os.path.isfile(path): + if dirpath.endswith('.dist-info'): + deferred.append((score.get(name, 0), path)) + else: + writefile(path, date_time) + + deferred.sort() + for score, path in deferred: + writefile(path, date_time) + + return zip_filename diff --git a/env/lib/python3.6/site-packages/wheel/bdist_wheel.py b/env/lib/python3.6/site-packages/wheel/bdist_wheel.py new file mode 100644 index 0000000..119e555 --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel/bdist_wheel.py @@ -0,0 +1,409 @@ +""" +Create a wheel (.whl) distribution. + +A wheel is a built archive format. +""" + +import csv +import hashlib +import os +import subprocess +import shutil +import sys +import re +from email.generator import Generator +from distutils.core import Command +from distutils.sysconfig import get_python_version +from distutils import log as logger +from shutil import rmtree + +import pkg_resources + +from .pep425tags import get_abbr_impl, get_impl_ver, get_abi_tag, get_platform +from .util import native, open_for_csv +from .archive import archive_wheelfile +from .pkginfo import write_pkg_info +from .metadata import pkginfo_to_metadata +from . import pep425tags +from . import __version__ as wheel_version + + +safe_name = pkg_resources.safe_name +safe_version = pkg_resources.safe_version + +PY_LIMITED_API_PATTERN = r'cp3\d' + + +def safer_name(name): + return safe_name(name).replace('-', '_') + + +def safer_version(version): + return safe_version(version).replace('-', '_') + + +class bdist_wheel(Command): + + description = 'create a wheel distribution' + + user_options = [('bdist-dir=', 'b', + "temporary directory for creating the distribution"), + ('plat-name=', 'p', + "platform name to embed in generated filenames " + "(default: %s)" % get_platform()), + ('keep-temp', 'k', + "keep the pseudo-installation tree around after " + + "creating the distribution archive"), + ('dist-dir=', 'd', + "directory to put final built distributions in"), + ('skip-build', None, + "skip rebuilding everything (for testing/debugging)"), + ('relative', None, + "build the archive using relative paths" + "(default: false)"), + ('owner=', 'u', + "Owner name used when creating a tar file" + " [default: current user]"), + ('group=', 'g', + "Group name used when creating a tar file" + " [default: current group]"), + ('universal', None, + "make a universal wheel" + " (default: false)"), + ('python-tag=', None, + "Python implementation compatibility tag" + " (default: py%s)" % get_impl_ver()[0]), + ('build-number=', None, + "Build number for this particular version. " + "As specified in PEP-0427, this must start with a digit. " + "[default: None]"), + ('py-limited-api=', None, + "Python tag (cp32|cp33|cpNN) for abi3 wheel tag" + " (default: false)"), + ] + + boolean_options = ['keep-temp', 'skip-build', 'relative', 'universal'] + + def initialize_options(self): + self.bdist_dir = None + self.data_dir = None + self.plat_name = None + self.plat_tag = None + self.format = 'zip' + self.keep_temp = False + self.dist_dir = None + self.distinfo_dir = None + self.egginfo_dir = None + self.root_is_pure = None + self.skip_build = None + self.relative = False + self.owner = None + self.group = None + self.universal = False + self.python_tag = 'py' + get_impl_ver()[0] + self.build_number = None + self.py_limited_api = False + self.plat_name_supplied = False + + def finalize_options(self): + if self.bdist_dir is None: + bdist_base = self.get_finalized_command('bdist').bdist_base + self.bdist_dir = os.path.join(bdist_base, 'wheel') + + self.data_dir = self.wheel_dist_name + '.data' + self.plat_name_supplied = self.plat_name is not None + + need_options = ('dist_dir', 'plat_name', 'skip_build') + + self.set_undefined_options('bdist', + *zip(need_options, need_options)) + + self.root_is_pure = not (self.distribution.has_ext_modules() + or self.distribution.has_c_libraries()) + + if self.py_limited_api and not re.match(PY_LIMITED_API_PATTERN, self.py_limited_api): + raise ValueError("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN) + + # Support legacy [wheel] section for setting universal + wheel = self.distribution.get_option_dict('wheel') + if 'universal' in wheel: + # please don't define this in your global configs + val = wheel['universal'][1].strip() + if val.lower() in ('1', 'true', 'yes'): + self.universal = True + + if self.build_number is not None and not self.build_number[:1].isdigit(): + raise ValueError("Build tag (build-number) must start with a digit.") + + @property + def wheel_dist_name(self): + """Return distribution full name with - replaced with _""" + components = (safer_name(self.distribution.get_name()), + safer_version(self.distribution.get_version())) + if self.build_number: + components += (self.build_number,) + return '-'.join(components) + + def get_tag(self): + # bdist sets self.plat_name if unset, we should only use it for purepy + # wheels if the user supplied it. + if self.plat_name_supplied: + plat_name = self.plat_name + elif self.root_is_pure: + plat_name = 'any' + else: + plat_name = self.plat_name or get_platform() + if plat_name in ('linux-x86_64', 'linux_x86_64') and sys.maxsize == 2147483647: + plat_name = 'linux_i686' + plat_name = plat_name.replace('-', '_').replace('.', '_') + + if self.root_is_pure: + if self.universal: + impl = 'py2.py3' + else: + impl = self.python_tag + tag = (impl, 'none', plat_name) + else: + impl_name = get_abbr_impl() + impl_ver = get_impl_ver() + impl = impl_name + impl_ver + # We don't work on CPython 3.1, 3.0. + if self.py_limited_api and (impl_name + impl_ver).startswith('cp3'): + impl = self.py_limited_api + abi_tag = 'abi3' + else: + abi_tag = str(get_abi_tag()).lower() + tag = (impl, abi_tag, plat_name) + supported_tags = pep425tags.get_supported( + supplied_platform=plat_name if self.plat_name_supplied else None) + # XXX switch to this alternate implementation for non-pure: + if not self.py_limited_api: + assert tag == supported_tags[0], "%s != %s" % (tag, supported_tags[0]) + assert tag in supported_tags, "would build wheel with unsupported tag {}".format(tag) + return tag + + def get_archive_basename(self): + """Return archive name without extension""" + + impl_tag, abi_tag, plat_tag = self.get_tag() + + archive_basename = "%s-%s-%s-%s" % ( + self.wheel_dist_name, + impl_tag, + abi_tag, + plat_tag) + return archive_basename + + def run(self): + build_scripts = self.reinitialize_command('build_scripts') + build_scripts.executable = 'python' + + if not self.skip_build: + self.run_command('build') + + install = self.reinitialize_command('install', + reinit_subcommands=True) + install.root = self.bdist_dir + install.compile = False + install.skip_build = self.skip_build + install.warn_dir = False + + # A wheel without setuptools scripts is more cross-platform. + # Use the (undocumented) `no_ep` option to setuptools' + # install_scripts command to avoid creating entry point scripts. + install_scripts = self.reinitialize_command('install_scripts') + install_scripts.no_ep = True + + # Use a custom scheme for the archive, because we have to decide + # at installation time which scheme to use. + for key in ('headers', 'scripts', 'data', 'purelib', 'platlib'): + setattr(install, + 'install_' + key, + os.path.join(self.data_dir, key)) + + basedir_observed = '' + + if os.name == 'nt': + # win32 barfs if any of these are ''; could be '.'? + # (distutils.command.install:change_roots bug) + basedir_observed = os.path.normpath(os.path.join(self.data_dir, '..')) + self.install_libbase = self.install_lib = basedir_observed + + setattr(install, + 'install_purelib' if self.root_is_pure else 'install_platlib', + basedir_observed) + + logger.info("installing to %s", self.bdist_dir) + + self.run_command('install') + + archive_basename = self.get_archive_basename() + + pseudoinstall_root = os.path.join(self.dist_dir, archive_basename) + if not self.relative: + archive_root = self.bdist_dir + else: + archive_root = os.path.join( + self.bdist_dir, + self._ensure_relative(install.install_base)) + + self.set_undefined_options( + 'install_egg_info', ('target', 'egginfo_dir')) + self.distinfo_dir = os.path.join(self.bdist_dir, + '%s.dist-info' % self.wheel_dist_name) + self.egg2dist(self.egginfo_dir, + self.distinfo_dir) + + self.write_wheelfile(self.distinfo_dir) + + self.write_record(self.bdist_dir, self.distinfo_dir) + + # Make the archive + if not os.path.exists(self.dist_dir): + os.makedirs(self.dist_dir) + wheel_name = archive_wheelfile(pseudoinstall_root, archive_root) + + # Sign the archive + if 'WHEEL_TOOL' in os.environ: + subprocess.call([os.environ['WHEEL_TOOL'], 'sign', wheel_name]) + + # Add to 'Distribution.dist_files' so that the "upload" command works + getattr(self.distribution, 'dist_files', []).append( + ('bdist_wheel', get_python_version(), wheel_name)) + + if not self.keep_temp: + logger.info('removing %s', self.bdist_dir) + if not self.dry_run: + rmtree(self.bdist_dir) + + def write_wheelfile(self, wheelfile_base, generator='bdist_wheel (' + wheel_version + ')'): + from email.message import Message + msg = Message() + msg['Wheel-Version'] = '1.0' # of the spec + msg['Generator'] = generator + msg['Root-Is-Purelib'] = str(self.root_is_pure).lower() + if self.build_number is not None: + msg['Build'] = self.build_number + + # Doesn't work for bdist_wininst + impl_tag, abi_tag, plat_tag = self.get_tag() + for impl in impl_tag.split('.'): + for abi in abi_tag.split('.'): + for plat in plat_tag.split('.'): + msg['Tag'] = '-'.join((impl, abi, plat)) + + wheelfile_path = os.path.join(wheelfile_base, 'WHEEL') + logger.info('creating %s', wheelfile_path) + with open(wheelfile_path, 'w') as f: + Generator(f, maxheaderlen=0).flatten(msg) + + def _ensure_relative(self, path): + # copied from dir_util, deleted + drive, path = os.path.splitdrive(path) + if path[0:1] == os.sep: + path = drive + path[1:] + return path + + def license_file(self): + """Return license filename from a license-file key in setup.cfg, or None.""" + metadata = self.distribution.get_option_dict('metadata') + if 'license_file' not in metadata: + return None + return metadata['license_file'][1] + + def egg2dist(self, egginfo_path, distinfo_path): + """Convert an .egg-info directory into a .dist-info directory""" + def adios(p): + """Appropriately delete directory, file or link.""" + if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p): + shutil.rmtree(p) + elif os.path.exists(p): + os.unlink(p) + + adios(distinfo_path) + + if not os.path.exists(egginfo_path): + # There is no egg-info. This is probably because the egg-info + # file/directory is not named matching the distribution name used + # to name the archive file. Check for this case and report + # accordingly. + import glob + pat = os.path.join(os.path.dirname(egginfo_path), '*.egg-info') + possible = glob.glob(pat) + err = "Egg metadata expected at %s but not found" % (egginfo_path,) + if possible: + alt = os.path.basename(possible[0]) + err += " (%s found - possible misnamed archive file?)" % (alt,) + + raise ValueError(err) + + if os.path.isfile(egginfo_path): + # .egg-info is a single file + pkginfo_path = egginfo_path + pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path) + os.mkdir(distinfo_path) + else: + # .egg-info is a directory + pkginfo_path = os.path.join(egginfo_path, 'PKG-INFO') + pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path) + + # ignore common egg metadata that is useless to wheel + shutil.copytree(egginfo_path, distinfo_path, + ignore=lambda x, y: {'PKG-INFO', 'requires.txt', 'SOURCES.txt', + 'not-zip-safe'} + ) + + # delete dependency_links if it is only whitespace + dependency_links_path = os.path.join(distinfo_path, 'dependency_links.txt') + with open(dependency_links_path, 'r') as dependency_links_file: + dependency_links = dependency_links_file.read().strip() + if not dependency_links: + adios(dependency_links_path) + + write_pkg_info(os.path.join(distinfo_path, 'METADATA'), pkg_info) + + # XXX heuristically copy any LICENSE/LICENSE.txt? + license = self.license_file() + if license: + license_filename = 'LICENSE.txt' + shutil.copy(license, os.path.join(distinfo_path, license_filename)) + + adios(egginfo_path) + + def write_record(self, bdist_dir, distinfo_dir): + from .util import urlsafe_b64encode + + record_path = os.path.join(distinfo_dir, 'RECORD') + record_relpath = os.path.relpath(record_path, bdist_dir) + + def walk(): + for dir, dirs, files in os.walk(bdist_dir): + dirs.sort() + for f in sorted(files): + yield os.path.join(dir, f) + + def skip(path): + """Wheel hashes every possible file.""" + return (path == record_relpath) + + with open_for_csv(record_path, 'w+') as record_file: + writer = csv.writer(record_file) + for path in walk(): + relpath = os.path.relpath(path, bdist_dir) + if skip(relpath): + hash = '' + size = '' + else: + with open(path, 'rb') as f: + data = f.read() + digest = hashlib.sha256(data).digest() + hash = 'sha256=' + native(urlsafe_b64encode(digest)) + size = len(data) + + record_path = os.path.relpath(path, bdist_dir).replace(os.path.sep, '/') + + # On Python 2, re-encode the path as UTF-8 from the default file system encoding + if isinstance(record_path, bytes): + record_path = record_path.decode(sys.getfilesystemencoding()).encode('utf-8') + + writer.writerow((record_path, hash, size)) diff --git a/env/lib/python3.6/site-packages/wheel/egg2wheel.py b/env/lib/python3.6/site-packages/wheel/egg2wheel.py new file mode 100644 index 0000000..41ba0b3 --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel/egg2wheel.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python +import distutils.dist +import os.path +import re +import shutil +import sys +import tempfile +import zipfile +from argparse import ArgumentParser +from distutils.archive_util import make_archive +from glob import iglob + +import wheel.bdist_wheel +from wheel.tool import WheelError +from wheel.wininst2wheel import _bdist_wheel_tag + +egg_info_re = re.compile(r''' + (?P.+?)-(?P.+?) + (-(?Ppy\d\.\d) + (-(?P.+?))? + )?.egg$''', re.VERBOSE) + + +def egg2wheel(egg_path, dest_dir): + filename = os.path.basename(egg_path) + match = egg_info_re.match(filename) + if not match: + raise WheelError('Invalid egg file name: {}'.format(filename)) + + egg_info = match.groupdict() + dir = tempfile.mkdtemp(suffix="_e2w") + if os.path.isfile(egg_path): + # assume we have a bdist_egg otherwise + egg = zipfile.ZipFile(egg_path) + egg.extractall(dir) + else: + # support buildout-style installed eggs directories + for pth in os.listdir(egg_path): + src = os.path.join(egg_path, pth) + if os.path.isfile(src): + shutil.copy2(src, dir) + else: + shutil.copytree(src, os.path.join(dir, pth)) + + pyver = egg_info['pyver'] + if pyver: + pyver = pyver.replace('.', '') + + arch = (egg_info['arch'] or 'any').replace('.', '_').replace('-', '_') + + # assume all binary eggs are for CPython + abi = 'cp' + pyver[2:] if arch != 'any' else 'none' + + root_is_purelib = egg_info['arch'] is None + if root_is_purelib: + bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution()) + else: + bw = _bdist_wheel_tag(distutils.dist.Distribution()) + + bw.root_is_pure = root_is_purelib + bw.python_tag = pyver + bw.plat_name_supplied = True + bw.plat_name = egg_info['arch'] or 'any' + if not root_is_purelib: + bw.full_tag_supplied = True + bw.full_tag = (pyver, abi, arch) + + dist_info_dir = os.path.join(dir, '{name}-{ver}.dist-info'.format(**egg_info)) + bw.egg2dist(os.path.join(dir, 'EGG-INFO'), dist_info_dir) + bw.write_wheelfile(dist_info_dir, generator='egg2wheel') + bw.write_record(dir, dist_info_dir) + wheel_name = '{name}-{ver}-{pyver}-{}-{}'.format(abi, arch, **egg_info) + filename = make_archive(os.path.join(dest_dir, wheel_name), 'zip', root_dir=dir) + os.rename(filename, filename[:-3] + 'whl') + shutil.rmtree(dir) + + +def main(): + parser = ArgumentParser() + parser.add_argument('eggs', nargs='*', help="Eggs to convert") + parser.add_argument('--dest-dir', '-d', default=os.path.curdir, + help="Directory to store wheels (default %(default)s)") + parser.add_argument('--verbose', '-v', action='store_true') + args = parser.parse_args() + for pat in args.eggs: + for egg in iglob(pat): + if args.verbose: + print("{}... ".format(egg)) + sys.stdout.flush() + + egg2wheel(egg, args.dest_dir) + if args.verbose: + print("OK") + + +if __name__ == "__main__": + main() diff --git a/env/lib/python3.6/site-packages/wheel/install.py b/env/lib/python3.6/site-packages/wheel/install.py new file mode 100644 index 0000000..87f2e49 --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel/install.py @@ -0,0 +1,512 @@ +""" +Operations on existing wheel files, including basic installation. +""" +# XXX see patched pip to install + +from __future__ import print_function + +import csv +import hashlib +import os.path +import re +import shutil +import sys +import warnings +import zipfile + +from . import signatures +from .paths import get_install_paths +from .pep425tags import get_supported +from .pkginfo import read_pkg_info_bytes +from .util import ( + urlsafe_b64encode, from_json, urlsafe_b64decode, native, binary, HashingFile, open_for_csv) + +try: + _big_number = sys.maxsize +except NameError: + _big_number = sys.maxint + +# The next major version after this version of the 'wheel' tool: +VERSION_TOO_HIGH = (1, 0) + +# Non-greedy matching of an optional build number may be too clever (more +# invalid wheel filenames will match). Separate regex for .dist-info? +WHEEL_INFO_RE = re.compile( + r"""^(?P(?P.+?)-(?P\d.*?))(-(?P\d.*?))? + -(?P[a-z].+?)-(?P.+?)-(?P.+?)(\.whl|\.dist-info)$""", + re.VERBOSE).match + + +def parse_version(version): + """Use parse_version from pkg_resources or distutils as available.""" + global parse_version + try: + from pkg_resources import parse_version + except ImportError: + from distutils.version import LooseVersion as parse_version + return parse_version(version) + + +class reify(object): + """Put the result of a method which uses this (non-data) + descriptor decorator in the instance dict after the first call, + effectively replacing the decorator with an instance variable. + """ + + def __init__(self, wrapped): + self.wrapped = wrapped + self.__doc__ = wrapped.__doc__ + + def __get__(self, inst, objtype=None): + if inst is None: + return self + val = self.wrapped(inst) + setattr(inst, self.wrapped.__name__, val) + return val + + +class BadWheelFile(ValueError): + pass + + +class WheelFile(object): + """Parse wheel-specific attributes from a wheel (.whl) file and offer + basic installation and verification support. + + WheelFile can be used to simply parse a wheel filename by avoiding the + methods that require the actual file contents.""" + + WHEEL_INFO = "WHEEL" + RECORD = "RECORD" + + def __init__(self, + filename, + fp=None, + append=False, + context=get_supported): + """ + :param fp: A seekable file-like object or None to open(filename). + :param append: Open archive in append mode. + :param context: Function returning list of supported tags. Wheels + must have the same context to be sortable. + """ + self.filename = filename + self.fp = fp + self.append = append + self.context = context + basename = os.path.basename(filename) + self.parsed_filename = WHEEL_INFO_RE(basename) + if not basename.endswith('.whl') or self.parsed_filename is None: + raise BadWheelFile("Bad filename '%s'" % filename) + + def __repr__(self): + return self.filename + + @property + def distinfo_name(self): + return "%s.dist-info" % self.parsed_filename.group('namever') + + @property + def datadir_name(self): + return "%s.data" % self.parsed_filename.group('namever') + + @property + def record_name(self): + return "%s/%s" % (self.distinfo_name, self.RECORD) + + @property + def wheelinfo_name(self): + return "%s/%s" % (self.distinfo_name, self.WHEEL_INFO) + + @property + def tags(self): + """A wheel file is compatible with the Cartesian product of the + period-delimited tags in its filename. + To choose a wheel file among several candidates having the same + distribution version 'ver', an installer ranks each triple of + (pyver, abi, plat) that its Python installation can run, sorting + the wheels by the best-ranked tag it supports and then by their + arity which is just len(list(compatibility_tags)). + """ + tags = self.parsed_filename.groupdict() + for pyver in tags['pyver'].split('.'): + for abi in tags['abi'].split('.'): + for plat in tags['plat'].split('.'): + yield (pyver, abi, plat) + + compatibility_tags = tags + + @property + def arity(self): + """The number of compatibility tags the wheel declares.""" + return len(list(self.compatibility_tags)) + + @property + def rank(self): + """ + Lowest index of any of this wheel's tags in self.context(), and the + arity e.g. (0, 1) + """ + return self.compatibility_rank(self.context()) + + @property + def compatible(self): + return self.rank[0] != _big_number # bad API! + + # deprecated: + def compatibility_rank(self, supported): + """Rank the wheel against the supported tags. Smaller ranks are more + compatible! + + :param supported: A list of compatibility tags that the current + Python implemenation can run. + """ + preferences = [] + for tag in self.compatibility_tags: + try: + preferences.append(supported.index(tag)) + # Tag not present + except ValueError: + pass + if len(preferences): + return (min(preferences), self.arity) + return (_big_number, 0) + + # deprecated + def supports_current_python(self, x): + assert self.context == x, 'context mismatch' + return self.compatible + + # Comparability. + # Wheels are equal if they refer to the same file. + # If two wheels are not equal, compare based on (in this order): + # 1. Name + # 2. Version + # 3. Compatibility rank + # 4. Filename (as a tiebreaker) + @property + def _sort_key(self): + return (self.parsed_filename.group('name'), + parse_version(self.parsed_filename.group('ver')), + tuple(-x for x in self.rank), + self.filename) + + def __eq__(self, other): + return self.filename == other.filename + + def __ne__(self, other): + return self.filename != other.filename + + def __lt__(self, other): + if self.context != other.context: + raise TypeError("{0}.context != {1}.context".format(self, other)) + + return self._sort_key < other._sort_key + + # XXX prune + + sn = self.parsed_filename.group('name') + on = other.parsed_filename.group('name') + if sn != on: + return sn < on + sv = parse_version(self.parsed_filename.group('ver')) + ov = parse_version(other.parsed_filename.group('ver')) + if sv != ov: + return sv < ov + # Compatibility + if self.context != other.context: + raise TypeError("{0}.context != {1}.context".format(self, other)) + sc = self.rank + oc = other.rank + if sc is not None and oc is not None and sc != oc: + # Smaller compatibility ranks are "better" than larger ones, + # so we have to reverse the sense of the comparison here! + return sc > oc + elif sc is None and oc is not None: + return False + return self.filename < other.filename + + def __gt__(self, other): + return other < self + + def __le__(self, other): + return self == other or self < other + + def __ge__(self, other): + return self == other or other < self + + # + # Methods using the file's contents: + # + + @reify + def zipfile(self): + mode = "r" + if self.append: + mode = "a" + vzf = VerifyingZipFile(self.fp if self.fp else self.filename, mode) + if not self.append: + self.verify(vzf) + return vzf + + @reify + def parsed_wheel_info(self): + """Parse wheel metadata (the .data/WHEEL file)""" + return read_pkg_info_bytes(self.zipfile.read(self.wheelinfo_name)) + + def check_version(self): + version = self.parsed_wheel_info['Wheel-Version'] + if tuple(map(int, version.split('.'))) >= VERSION_TOO_HIGH: + raise ValueError("Wheel version is too high") + + @reify + def install_paths(self): + """ + Consult distutils to get the install paths for our dist. A dict with + ('purelib', 'platlib', 'headers', 'scripts', 'data'). + + We use the name from our filename as the dist name, which means headers + could be installed in the wrong place if the filesystem-escaped name + is different than the Name. Who cares? + """ + name = self.parsed_filename.group('name') + return get_install_paths(name) + + def install(self, force=False, overrides={}): + """ + Install the wheel into site-packages. + """ + + # Utility to get the target directory for a particular key + def get_path(key): + return overrides.get(key) or self.install_paths[key] + + # The base target location is either purelib or platlib + if self.parsed_wheel_info['Root-Is-Purelib'] == 'true': + root = get_path('purelib') + else: + root = get_path('platlib') + + # Parse all the names in the archive + name_trans = {} + for info in self.zipfile.infolist(): + name = info.filename + # Zip files can contain entries representing directories. + # These end in a '/'. + # We ignore these, as we create directories on demand. + if name.endswith('/'): + continue + + # Pathnames in a zipfile namelist are always /-separated. + # In theory, paths could start with ./ or have other oddities + # but this won't happen in practical cases of well-formed wheels. + # We'll cover the simple case of an initial './' as it's both easy + # to do and more common than most other oddities. + if name.startswith('./'): + name = name[2:] + + # Split off the base directory to identify files that are to be + # installed in non-root locations + basedir, sep, filename = name.partition('/') + if sep and basedir == self.datadir_name: + # Data file. Target destination is elsewhere + key, sep, filename = filename.partition('/') + if not sep: + raise ValueError("Invalid filename in wheel: {0}".format(name)) + target = get_path(key) + else: + # Normal file. Target destination is root + key = '' + target = root + filename = name + + # Map the actual filename from the zipfile to its intended target + # directory and the pathname relative to that directory. + dest = os.path.normpath(os.path.join(target, filename)) + name_trans[info] = (key, target, filename, dest) + + # We're now ready to start processing the actual install. The process + # is as follows: + # 1. Prechecks - is the wheel valid, is its declared architecture + # OK, etc. [[Responsibility of the caller]] + # 2. Overwrite check - do any of the files to be installed already + # exist? + # 3. Actual install - put the files in their target locations. + # 4. Update RECORD - write a suitably modified RECORD file to + # reflect the actual installed paths. + + if not force: + for info, v in name_trans.items(): + k = info.filename + key, target, filename, dest = v + if os.path.exists(dest): + raise ValueError( + "Wheel file {0} would overwrite {1}. Use force if this is intended".format( + k, dest)) + + # Get the name of our executable, for use when replacing script + # wrapper hashbang lines. + # We encode it using getfilesystemencoding, as that is "the name of + # the encoding used to convert Unicode filenames into system file + # names". + exename = sys.executable.encode(sys.getfilesystemencoding()) + record_data = [] + record_name = self.distinfo_name + '/RECORD' + for info, (key, target, filename, dest) in name_trans.items(): + name = info.filename + source = self.zipfile.open(info) + # Skip the RECORD file + if name == record_name: + continue + ddir = os.path.dirname(dest) + if not os.path.isdir(ddir): + os.makedirs(ddir) + + temp_filename = dest + '.part' + try: + with HashingFile(temp_filename, 'wb') as destination: + if key == 'scripts': + hashbang = source.readline() + if hashbang.startswith(b'#!python'): + hashbang = b'#!' + exename + binary(os.linesep) + destination.write(hashbang) + + shutil.copyfileobj(source, destination) + except BaseException: + if os.path.exists(temp_filename): + os.unlink(temp_filename) + + raise + + os.rename(temp_filename, dest) + reldest = os.path.relpath(dest, root) + reldest.replace(os.sep, '/') + record_data.append((reldest, destination.digest(), destination.length)) + destination.close() + source.close() + # preserve attributes (especially +x bit for scripts) + attrs = info.external_attr >> 16 + if attrs: # tends to be 0 if Windows. + os.chmod(dest, info.external_attr >> 16) + + record_name = os.path.join(root, self.record_name) + with open_for_csv(record_name, 'w+') as record_file: + writer = csv.writer(record_file) + for reldest, digest, length in sorted(record_data): + writer.writerow((reldest, digest, length)) + writer.writerow((self.record_name, '', '')) + + def verify(self, zipfile=None): + """Configure the VerifyingZipFile `zipfile` by verifying its signature + and setting expected hashes for every hash in RECORD. + Caller must complete the verification process by completely reading + every file in the archive (e.g. with extractall).""" + sig = None + if zipfile is None: + zipfile = self.zipfile + zipfile.strict = True + + record_name = '/'.join((self.distinfo_name, 'RECORD')) + sig_name = '/'.join((self.distinfo_name, 'RECORD.jws')) + # tolerate s/mime signatures: + smime_sig_name = '/'.join((self.distinfo_name, 'RECORD.p7s')) + zipfile.set_expected_hash(record_name, None) + zipfile.set_expected_hash(sig_name, None) + zipfile.set_expected_hash(smime_sig_name, None) + record = zipfile.read(record_name) + + record_digest = urlsafe_b64encode(hashlib.sha256(record).digest()) + try: + sig = from_json(native(zipfile.read(sig_name))) + except KeyError: # no signature + pass + if sig: + headers, payload = signatures.verify(sig) + if payload['hash'] != "sha256=" + native(record_digest): + msg = "RECORD.jws claimed RECORD hash {} != computed hash {}." + raise BadWheelFile(msg.format(payload['hash'], + native(record_digest))) + + reader = csv.reader((native(r, 'utf-8') for r in record.splitlines())) + + for row in reader: + filename = row[0] + hash = row[1] + if not hash: + if filename not in (record_name, sig_name): + print("%s has no hash!" % filename, file=sys.stderr) + continue + + algo, data = row[1].split('=', 1) + assert algo == "sha256", "Unsupported hash algorithm" + zipfile.set_expected_hash(filename, urlsafe_b64decode(binary(data))) + + +class VerifyingZipFile(zipfile.ZipFile): + """ZipFile that can assert that each of its extracted contents matches + an expected sha256 hash. Note that each file must be completely read in + order for its hash to be checked.""" + + def __init__(self, file, mode="r", + compression=zipfile.ZIP_STORED, + allowZip64=True): + super(VerifyingZipFile, self).__init__(file, mode, compression, allowZip64) + + self.strict = False + self._expected_hashes = {} + self._hash_algorithm = hashlib.sha256 + + def set_expected_hash(self, name, hash): + """ + :param name: name of zip entry + :param hash: bytes of hash (or None for "don't care") + """ + self._expected_hashes[name] = hash + + def open(self, name_or_info, mode="r", pwd=None): + """Return file-like object for 'name'.""" + # A non-monkey-patched version would contain most of zipfile.py + ef = super(VerifyingZipFile, self).open(name_or_info, mode, pwd) + if isinstance(name_or_info, zipfile.ZipInfo): + name = name_or_info.filename + else: + name = name_or_info + + if name in self._expected_hashes and self._expected_hashes[name] is not None: + expected_hash = self._expected_hashes[name] + try: + _update_crc_orig = ef._update_crc + except AttributeError: + warnings.warn('Need ZipExtFile._update_crc to implement ' + 'file hash verification (in Python >= 2.7)') + return ef + running_hash = self._hash_algorithm() + if hasattr(ef, '_eof'): # py33 + def _update_crc(data): + _update_crc_orig(data) + running_hash.update(data) + if ef._eof and running_hash.digest() != expected_hash: + raise BadWheelFile("Bad hash for file %r" % ef.name) + else: + def _update_crc(data, eof=None): + _update_crc_orig(data, eof=eof) + running_hash.update(data) + if eof and running_hash.digest() != expected_hash: + raise BadWheelFile("Bad hash for file %r" % ef.name) + ef._update_crc = _update_crc + elif self.strict and name not in self._expected_hashes: + raise BadWheelFile("No expected hash for file %r" % ef.name) + return ef + + def pop(self): + """Truncate the last file off this zipfile. + Assumes infolist() is in the same order as the files (true for + ordinary zip files created by Python)""" + if not self.fp: + raise RuntimeError( + "Attempt to pop from ZIP archive that was already closed") + last = self.infolist().pop() + del self.NameToInfo[last.filename] + self.fp.seek(last.header_offset, os.SEEK_SET) + self.fp.truncate() + self._didModify = True diff --git a/env/lib/python3.6/site-packages/wheel/metadata.py b/env/lib/python3.6/site-packages/wheel/metadata.py new file mode 100644 index 0000000..6aa495b --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel/metadata.py @@ -0,0 +1,130 @@ +""" +Tools for converting old- to new-style metadata. +""" + +import os.path +import re +import textwrap +from collections import namedtuple + +import pkg_resources + +from .pkginfo import read_pkg_info + +# Wheel itself is probably the only program that uses non-extras markers +# in METADATA/PKG-INFO. Support its syntax with the extra at the end only. +EXTRA_RE = re.compile("""^(?P.*?)(;\s*(?P.*?)(extra == '(?P.*?)')?)$""") + +MayRequiresKey = namedtuple('MayRequiresKey', ('condition', 'extra')) + + +def requires_to_requires_dist(requirement): + """Compose the version predicates for requirement in PEP 345 fashion.""" + requires_dist = [] + for op, ver in requirement.specs: + requires_dist.append(op + ver) + if not requires_dist: + return '' + return " (%s)" % ','.join(sorted(requires_dist)) + + +def convert_requirements(requirements): + """Yield Requires-Dist: strings for parsed requirements strings.""" + for req in requirements: + parsed_requirement = pkg_resources.Requirement.parse(req) + spec = requires_to_requires_dist(parsed_requirement) + extras = ",".join(parsed_requirement.extras) + if extras: + extras = "[%s]" % extras + yield (parsed_requirement.project_name + extras + spec) + + +def generate_requirements(extras_require): + """ + Convert requirements from a setup()-style dictionary to ('Requires-Dist', 'requirement') + and ('Provides-Extra', 'extra') tuples. + + extras_require is a dictionary of {extra: [requirements]} as passed to setup(), + using the empty extra {'': [requirements]} to hold install_requires. + """ + for extra, depends in extras_require.items(): + condition = '' + if extra and ':' in extra: # setuptools extra:condition syntax + extra, condition = extra.split(':', 1) + extra = pkg_resources.safe_extra(extra) + if extra: + yield ('Provides-Extra', extra) + if condition: + condition = "(" + condition + ") and " + condition += "extra == '%s'" % extra + if condition: + condition = '; ' + condition + for new_req in convert_requirements(depends): + yield ('Requires-Dist', new_req + condition) + + +def pkginfo_to_metadata(egg_info_path, pkginfo_path): + """ + Convert .egg-info directory with PKG-INFO to the Metadata 2.1 format + """ + pkg_info = read_pkg_info(pkginfo_path) + pkg_info.replace_header('Metadata-Version', '2.1') + requires_path = os.path.join(egg_info_path, 'requires.txt') + if os.path.exists(requires_path): + with open(requires_path) as requires_file: + requires = requires_file.read() + for extra, reqs in sorted(pkg_resources.split_sections(requires), + key=lambda x: x[0] or ''): + for item in generate_requirements({extra: reqs}): + pkg_info[item[0]] = item[1] + + description = pkg_info['Description'] + if description: + pkg_info.set_payload(dedent_description(pkg_info)) + del pkg_info['Description'] + + return pkg_info + + +def pkginfo_unicode(pkg_info, field): + """Hack to coax Unicode out of an email Message() - Python 3.3+""" + text = pkg_info[field] + field = field.lower() + if not isinstance(text, str): + if not hasattr(pkg_info, 'raw_items'): # Python 3.2 + return str(text) + for item in pkg_info.raw_items(): + if item[0].lower() == field: + text = item[1].encode('ascii', 'surrogateescape') \ + .decode('utf-8') + break + + return text + + +def dedent_description(pkg_info): + """ + Dedent and convert pkg_info['Description'] to Unicode. + """ + description = pkg_info['Description'] + + # Python 3 Unicode handling, sorta. + surrogates = False + if not isinstance(description, str): + surrogates = True + description = pkginfo_unicode(pkg_info, 'Description') + + description_lines = description.splitlines() + description_dedent = '\n'.join( + # if the first line of long_description is blank, + # the first line here will be indented. + (description_lines[0].lstrip(), + textwrap.dedent('\n'.join(description_lines[1:])), + '\n')) + + if surrogates: + description_dedent = description_dedent \ + .encode("utf8") \ + .decode("ascii", "surrogateescape") + + return description_dedent diff --git a/env/lib/python3.6/site-packages/wheel/paths.py b/env/lib/python3.6/site-packages/wheel/paths.py new file mode 100644 index 0000000..afb3cae --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel/paths.py @@ -0,0 +1,43 @@ +""" +Installation paths. + +Map the .data/ subdirectory names to install paths. +""" + +import distutils.command.install as install +import distutils.dist as dist +import os.path +import sys + + +def get_install_command(name): + # late binding due to potential monkeypatching + d = dist.Distribution({'name': name}) + i = install.install(d) + i.finalize_options() + return i + + +def get_install_paths(name): + """ + Return the (distutils) install paths for the named dist. + + A dict with ('purelib', 'platlib', 'headers', 'scripts', 'data') keys. + """ + paths = {} + + i = get_install_command(name) + + for key in install.SCHEME_KEYS: + paths[key] = getattr(i, 'install_' + key) + + # pip uses a similar path as an alternative to the system's (read-only) + # include directory: + if hasattr(sys, 'real_prefix'): # virtualenv + paths['headers'] = os.path.join(sys.prefix, + 'include', + 'site', + 'python' + sys.version[:3], + name) + + return paths diff --git a/env/lib/python3.6/site-packages/wheel/pep425tags.py b/env/lib/python3.6/site-packages/wheel/pep425tags.py new file mode 100644 index 0000000..29afdc3 --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel/pep425tags.py @@ -0,0 +1,180 @@ +"""Generate and work with PEP 425 Compatibility Tags.""" + +import distutils.util +import platform +import sys +import sysconfig +import warnings + + +def get_config_var(var): + try: + return sysconfig.get_config_var(var) + except IOError as e: # pip Issue #1074 + warnings.warn("{0}".format(e), RuntimeWarning) + return None + + +def get_abbr_impl(): + """Return abbreviated implementation name.""" + impl = platform.python_implementation() + if impl == 'PyPy': + return 'pp' + elif impl == 'Jython': + return 'jy' + elif impl == 'IronPython': + return 'ip' + elif impl == 'CPython': + return 'cp' + + raise LookupError('Unknown Python implementation: ' + impl) + + +def get_impl_ver(): + """Return implementation version.""" + impl_ver = get_config_var("py_version_nodot") + if not impl_ver or get_abbr_impl() == 'pp': + impl_ver = ''.join(map(str, get_impl_version_info())) + return impl_ver + + +def get_impl_version_info(): + """Return sys.version_info-like tuple for use in decrementing the minor + version.""" + if get_abbr_impl() == 'pp': + # as per https://github.com/pypa/pip/issues/2882 + return (sys.version_info[0], sys.pypy_version_info.major, + sys.pypy_version_info.minor) + else: + return sys.version_info[0], sys.version_info[1] + + +def get_flag(var, fallback, expected=True, warn=True): + """Use a fallback method for determining SOABI flags if the needed config + var is unset or unavailable.""" + val = get_config_var(var) + if val is None: + if warn: + warnings.warn("Config variable '{0}' is unset, Python ABI tag may " + "be incorrect".format(var), RuntimeWarning, 2) + return fallback() + return val == expected + + +def get_abi_tag(): + """Return the ABI tag based on SOABI (if available) or emulate SOABI + (CPython 2, PyPy).""" + soabi = get_config_var('SOABI') + impl = get_abbr_impl() + if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'): + d = '' + m = '' + u = '' + if get_flag('Py_DEBUG', + lambda: hasattr(sys, 'gettotalrefcount'), + warn=(impl == 'cp')): + d = 'd' + if get_flag('WITH_PYMALLOC', + lambda: impl == 'cp', + warn=(impl == 'cp')): + m = 'm' + if get_flag('Py_UNICODE_SIZE', + lambda: sys.maxunicode == 0x10ffff, + expected=4, + warn=(impl == 'cp' and + sys.version_info < (3, 3))) \ + and sys.version_info < (3, 3): + u = 'u' + abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) + elif soabi and soabi.startswith('cpython-'): + abi = 'cp' + soabi.split('-')[1] + elif soabi: + abi = soabi.replace('.', '_').replace('-', '_') + else: + abi = None + return abi + + +def get_platform(): + """Return our platform name 'win32', 'linux_x86_64'""" + # XXX remove distutils dependency + result = distutils.util.get_platform().replace('.', '_').replace('-', '_') + if result == "linux_x86_64" and sys.maxsize == 2147483647: + # pip pull request #3497 + result = "linux_i686" + return result + + +def get_supported(versions=None, supplied_platform=None): + """Return a list of supported tags for each version specified in + `versions`. + + :param versions: a list of string versions, of the form ["33", "32"], + or None. The first version will be assumed to support our ABI. + """ + supported = [] + + # Versions must be given with respect to the preference + if versions is None: + versions = [] + version_info = get_impl_version_info() + major = version_info[:-1] + # Support all previous minor Python versions. + for minor in range(version_info[-1], -1, -1): + versions.append(''.join(map(str, major + (minor,)))) + + impl = get_abbr_impl() + + abis = [] + + abi = get_abi_tag() + if abi: + abis[0:0] = [abi] + + abi3s = set() + import imp + for suffix in imp.get_suffixes(): + if suffix[0].startswith('.abi'): + abi3s.add(suffix[0].split('.', 2)[1]) + + abis.extend(sorted(list(abi3s))) + + abis.append('none') + + platforms = [] + if supplied_platform: + platforms.append(supplied_platform) + platforms.append(get_platform()) + + # Current version, current API (built specifically for our Python): + for abi in abis: + for arch in platforms: + supported.append(('%s%s' % (impl, versions[0]), abi, arch)) + + # abi3 modules compatible with older version of Python + for version in versions[1:]: + # abi3 was introduced in Python 3.2 + if version in ('31', '30'): + break + for abi in abi3s: # empty set if not Python 3 + for arch in platforms: + supported.append(("%s%s" % (impl, version), abi, arch)) + + # No abi / arch, but requires our implementation: + for i, version in enumerate(versions): + supported.append(('%s%s' % (impl, version), 'none', 'any')) + if i == 0: + # Tagged specifically as being cross-version compatible + # (with just the major version specified) + supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) + + # Major Python version + platform; e.g. binaries not using the Python API + supported.append(('py%s' % (versions[0][0]), 'none', arch)) + + # No abi / arch, generic Python + for i, version in enumerate(versions): + supported.append(('py%s' % (version,), 'none', 'any')) + if i == 0: + supported.append(('py%s' % (version[0]), 'none', 'any')) + + return supported diff --git a/env/lib/python3.6/site-packages/wheel/pkginfo.py b/env/lib/python3.6/site-packages/wheel/pkginfo.py new file mode 100644 index 0000000..115be45 --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel/pkginfo.py @@ -0,0 +1,43 @@ +"""Tools for reading and writing PKG-INFO / METADATA without caring +about the encoding.""" + +from email.parser import Parser + +try: + unicode + _PY3 = False +except NameError: + _PY3 = True + +if not _PY3: + from email.generator import Generator + + def read_pkg_info_bytes(bytestr): + return Parser().parsestr(bytestr) + + def read_pkg_info(path): + with open(path, "r") as headers: + message = Parser().parse(headers) + return message + + def write_pkg_info(path, message): + with open(path, 'w') as metadata: + Generator(metadata, mangle_from_=False, maxheaderlen=0).flatten(message) +else: + from email.generator import BytesGenerator + + def read_pkg_info_bytes(bytestr): + headers = bytestr.decode(encoding="ascii", errors="surrogateescape") + message = Parser().parsestr(headers) + return message + + def read_pkg_info(path): + with open(path, "r", + encoding="ascii", + errors="surrogateescape") as headers: + message = Parser().parse(headers) + return message + + def write_pkg_info(path, message): + with open(path, "wb") as out: + BytesGenerator(out, mangle_from_=False, maxheaderlen=0).flatten(message) diff --git a/env/lib/python3.6/site-packages/wheel/signatures/djbec.py b/env/lib/python3.6/site-packages/wheel/signatures/djbec.py new file mode 100644 index 0000000..e9b3115 --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel/signatures/djbec.py @@ -0,0 +1,323 @@ +# Ed25519 digital signatures +# Based on https://ed25519.cr.yp.to/python/ed25519.py +# See also https://ed25519.cr.yp.to/software.html +# Adapted by Ron Garret +# Sped up considerably using coordinate transforms found on: +# https://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html +# Specifically add-2008-hwcd-4 and dbl-2008-hwcd + +import hashlib +import random + +try: # pragma nocover + unicode + PY3 = False + + def asbytes(b): + """Convert array of integers to byte string""" + return ''.join(chr(x) for x in b) + + def joinbytes(b): + """Convert array of bytes to byte string""" + return ''.join(b) + + def bit(h, i): + """Return i'th bit of bytestring h""" + return (ord(h[i // 8]) >> (i % 8)) & 1 +except NameError: # pragma nocover + PY3 = True + asbytes = bytes + joinbytes = bytes + + def bit(h, i): + return (h[i // 8] >> (i % 8)) & 1 + +b = 256 +q = 2 ** 255 - 19 +l = 2 ** 252 + 27742317777372353535851937790883648493 # noqa: E741 + + +def H(m): + return hashlib.sha512(m).digest() + + +def expmod(b, e, m): + if e == 0: + return 1 + + t = expmod(b, e // 2, m) ** 2 % m + if e & 1: + t = (t * b) % m + + return t + + +# Can probably get some extra speedup here by replacing this with +# an extended-euclidean, but performance seems OK without that +def inv(x): + return expmod(x, q - 2, q) + + +d = -121665 * inv(121666) +I = expmod(2, (q - 1) // 4, q) # noqa: E741 + + +def xrecover(y): + xx = (y * y - 1) * inv(d * y * y + 1) + x = expmod(xx, (q + 3) // 8, q) + if (x * x - xx) % q != 0: + x = (x * I) % q + + if x % 2 != 0: + x = q - x + + return x + + +By = 4 * inv(5) +Bx = xrecover(By) +B = [Bx % q, By % q] + + +# def edwards(P,Q): +# x1 = P[0] +# y1 = P[1] +# x2 = Q[0] +# y2 = Q[1] +# x3 = (x1*y2+x2*y1) * inv(1+d*x1*x2*y1*y2) +# y3 = (y1*y2+x1*x2) * inv(1-d*x1*x2*y1*y2) +# return (x3 % q,y3 % q) + +# def scalarmult(P,e): +# if e == 0: return [0,1] +# Q = scalarmult(P,e/2) +# Q = edwards(Q,Q) +# if e & 1: Q = edwards(Q,P) +# return Q + +# Faster (!) version based on: +# https://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html + +def xpt_add(pt1, pt2): + (X1, Y1, Z1, T1) = pt1 + (X2, Y2, Z2, T2) = pt2 + A = ((Y1 - X1) * (Y2 + X2)) % q + B = ((Y1 + X1) * (Y2 - X2)) % q + C = (Z1 * 2 * T2) % q + D = (T1 * 2 * Z2) % q + E = (D + C) % q + F = (B - A) % q + G = (B + A) % q + H = (D - C) % q + X3 = (E * F) % q + Y3 = (G * H) % q + Z3 = (F * G) % q + T3 = (E * H) % q + return (X3, Y3, Z3, T3) + + +def xpt_double(pt): + (X1, Y1, Z1, _) = pt + A = (X1 * X1) + B = (Y1 * Y1) + C = (2 * Z1 * Z1) + D = (-A) % q + J = (X1 + Y1) % q + E = (J * J - A - B) % q + G = (D + B) % q + F = (G - C) % q + H = (D - B) % q + X3 = (E * F) % q + Y3 = (G * H) % q + Z3 = (F * G) % q + T3 = (E * H) % q + return X3, Y3, Z3, T3 + + +def pt_xform(pt): + (x, y) = pt + return x, y, 1, (x * y) % q + + +def pt_unxform(pt): + (x, y, z, _) = pt + return (x * inv(z)) % q, (y * inv(z)) % q + + +def xpt_mult(pt, n): + if n == 0: + return pt_xform((0, 1)) + + _ = xpt_double(xpt_mult(pt, n >> 1)) + return xpt_add(_, pt) if n & 1 else _ + + +def scalarmult(pt, e): + return pt_unxform(xpt_mult(pt_xform(pt), e)) + + +def encodeint(y): + bits = [(y >> i) & 1 for i in range(b)] + e = [(sum([bits[i * 8 + j] << j for j in range(8)])) + for i in range(b // 8)] + return asbytes(e) + + +def encodepoint(P): + x = P[0] + y = P[1] + bits = [(y >> i) & 1 for i in range(b - 1)] + [x & 1] + e = [(sum([bits[i * 8 + j] << j for j in range(8)])) + for i in range(b // 8)] + return asbytes(e) + + +def publickey(sk): + h = H(sk) + a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2)) + A = scalarmult(B, a) + return encodepoint(A) + + +def Hint(m): + h = H(m) + return sum(2 ** i * bit(h, i) for i in range(2 * b)) + + +def signature(m, sk, pk): + h = H(sk) + a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2)) + inter = joinbytes([h[i] for i in range(b // 8, b // 4)]) + r = Hint(inter + m) + R = scalarmult(B, r) + S = (r + Hint(encodepoint(R) + pk + m) * a) % l + return encodepoint(R) + encodeint(S) + + +def isoncurve(P): + x = P[0] + y = P[1] + return (-x * x + y * y - 1 - d * x * x * y * y) % q == 0 + + +def decodeint(s): + return sum(2 ** i * bit(s, i) for i in range(0, b)) + + +def decodepoint(s): + y = sum(2 ** i * bit(s, i) for i in range(0, b - 1)) + x = xrecover(y) + if x & 1 != bit(s, b - 1): + x = q - x + + P = [x, y] + if not isoncurve(P): + raise Exception("decoding point that is not on curve") + + return P + + +def checkvalid(s, m, pk): + if len(s) != b // 4: + raise Exception("signature length is wrong") + if len(pk) != b // 8: + raise Exception("public-key length is wrong") + + R = decodepoint(s[0:b // 8]) + A = decodepoint(pk) + S = decodeint(s[b // 8:b // 4]) + h = Hint(encodepoint(R) + pk + m) + v1 = scalarmult(B, S) + # v2 = edwards(R,scalarmult(A,h)) + v2 = pt_unxform(xpt_add(pt_xform(R), pt_xform(scalarmult(A, h)))) + return v1 == v2 + + +########################################################## +# +# Curve25519 reference implementation by Matthew Dempsky, from: +# https://cr.yp.to/highspeed/naclcrypto-20090310.pdf + +# P = 2 ** 255 - 19 +P = q +A = 486662 + + +# def expmod(b, e, m): +# if e == 0: return 1 +# t = expmod(b, e / 2, m) ** 2 % m +# if e & 1: t = (t * b) % m +# return t + +# def inv(x): return expmod(x, P - 2, P) + + +def add(n, m, d): + (xn, zn) = n + (xm, zm) = m + (xd, zd) = d + x = 4 * (xm * xn - zm * zn) ** 2 * zd + z = 4 * (xm * zn - zm * xn) ** 2 * xd + return (x % P, z % P) + + +def double(n): + (xn, zn) = n + x = (xn ** 2 - zn ** 2) ** 2 + z = 4 * xn * zn * (xn ** 2 + A * xn * zn + zn ** 2) + return (x % P, z % P) + + +def curve25519(n, base=9): + one = (base, 1) + two = double(one) + + # f(m) evaluates to a tuple + # containing the mth multiple and the + # (m+1)th multiple of base. + def f(m): + if m == 1: + return (one, two) + + (pm, pm1) = f(m // 2) + if m & 1: + return (add(pm, pm1, one), double(pm1)) + + return (double(pm), add(pm, pm1, one)) + + ((x, z), _) = f(n) + return (x * inv(z)) % P + + +def genkey(n=0): + n = n or random.randint(0, P) + n &= ~7 + n &= ~(128 << 8 * 31) + n |= 64 << 8 * 31 + return n + + +# def str2int(s): +# return int(hexlify(s), 16) +# # return sum(ord(s[i]) << (8 * i) for i in range(32)) +# +# def int2str(n): +# return unhexlify("%x" % n) +# # return ''.join([chr((n >> (8 * i)) & 255) for i in range(32)]) + +################################################# + + +def dsa_test(): + import os + msg = str(random.randint(q, q + q)).encode('utf-8') + sk = os.urandom(32) + pk = publickey(sk) + sig = signature(msg, sk, pk) + return checkvalid(sig, msg, pk) + + +def dh_test(): + sk1 = genkey() + sk2 = genkey() + return curve25519(sk1, curve25519(sk2)) == curve25519(sk2, curve25519(sk1)) diff --git a/env/lib/python3.6/site-packages/wheel/signatures/ed25519py.py b/env/lib/python3.6/site-packages/wheel/signatures/ed25519py.py new file mode 100644 index 0000000..0c4ab8f --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel/signatures/ed25519py.py @@ -0,0 +1,50 @@ +import os +import warnings +from collections import namedtuple + +from . import djbec + +__all__ = ['crypto_sign', 'crypto_sign_open', 'crypto_sign_keypair', 'Keypair', + 'PUBLICKEYBYTES', 'SECRETKEYBYTES', 'SIGNATUREBYTES'] + +PUBLICKEYBYTES = 32 +SECRETKEYBYTES = 64 +SIGNATUREBYTES = 64 + +Keypair = namedtuple('Keypair', ('vk', 'sk')) # verifying key, secret key + + +def crypto_sign_keypair(seed=None): + """Return (verifying, secret) key from a given seed, or os.urandom(32)""" + if seed is None: + seed = os.urandom(PUBLICKEYBYTES) + else: + warnings.warn("ed25519ll should choose random seed.", + RuntimeWarning) + if len(seed) != 32: + raise ValueError("seed must be 32 random bytes or None.") + skbytes = seed + vkbytes = djbec.publickey(skbytes) + return Keypair(vkbytes, skbytes+vkbytes) + + +def crypto_sign(msg, sk): + """Return signature+message given message and secret key. + The signature is the first SIGNATUREBYTES bytes of the return value. + A copy of msg is in the remainder.""" + if len(sk) != SECRETKEYBYTES: + raise ValueError("Bad signing key length %d" % len(sk)) + vkbytes = sk[PUBLICKEYBYTES:] + skbytes = sk[:PUBLICKEYBYTES] + sig = djbec.signature(msg, skbytes, vkbytes) + return sig + msg + + +def crypto_sign_open(signed, vk): + """Return message given signature+message and the verifying key.""" + if len(vk) != PUBLICKEYBYTES: + raise ValueError("Bad verifying key length %d" % len(vk)) + rc = djbec.checkvalid(signed[:SIGNATUREBYTES], signed[SIGNATUREBYTES:], vk) + if not rc: + raise ValueError("rc != True", rc) + return signed[SIGNATUREBYTES:] diff --git a/env/lib/python3.6/site-packages/wheel/signatures/keys.py b/env/lib/python3.6/site-packages/wheel/signatures/keys.py new file mode 100644 index 0000000..eb5d4ac --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel/signatures/keys.py @@ -0,0 +1,101 @@ +"""Store and retrieve wheel signing / verifying keys. + +Given a scope (a package name, + meaning "all packages", or - meaning +"no packages"), return a list of verifying keys that are trusted for that +scope. + +Given a package name, return a list of (scope, key) suggested keys to sign +that package (only the verifying keys; the private signing key is stored +elsewhere). + +Keys here are represented as urlsafe_b64encoded strings with no padding. + +Tentative command line interface: + +# list trusts +wheel trust +# trust a particular key for all +wheel trust + key +# trust key for beaglevote +wheel trust beaglevote key +# stop trusting a key for all +wheel untrust + key + +# generate a key pair +wheel keygen + +# import a signing key from a file +wheel import keyfile + +# export a signing key +wheel export key +""" + +import json +import os.path + +from ..util import native, load_config_paths, save_config_path + + +class WheelKeys(object): + SCHEMA = 1 + CONFIG_NAME = 'wheel.json' + + def __init__(self): + self.data = {'signers': [], 'verifiers': []} + + def load(self): + # XXX JSON is not a great database + for path in load_config_paths('wheel'): + conf = os.path.join(native(path), self.CONFIG_NAME) + if os.path.exists(conf): + with open(conf, 'r') as infile: + self.data = json.load(infile) + for x in ('signers', 'verifiers'): + if x not in self.data: + self.data[x] = [] + if 'schema' not in self.data: + self.data['schema'] = self.SCHEMA + elif self.data['schema'] != self.SCHEMA: + raise ValueError( + "Bad wheel.json version {0}, expected {1}".format( + self.data['schema'], self.SCHEMA)) + break + return self + + def save(self): + # Try not to call this a very long time after load() + path = save_config_path('wheel') + conf = os.path.join(native(path), self.CONFIG_NAME) + with open(conf, 'w+') as out: + json.dump(self.data, out, indent=2) + return self + + def trust(self, scope, vk): + """Start trusting a particular key for given scope.""" + self.data['verifiers'].append({'scope': scope, 'vk': vk}) + return self + + def untrust(self, scope, vk): + """Stop trusting a particular key for given scope.""" + self.data['verifiers'].remove({'scope': scope, 'vk': vk}) + return self + + def trusted(self, scope=None): + """Return list of [(scope, trusted key), ...] for given scope.""" + trust = [(x['scope'], x['vk']) for x in self.data['verifiers'] + if x['scope'] in (scope, '+')] + trust.sort(key=lambda x: x[0]) + trust.reverse() + return trust + + def signers(self, scope): + """Return list of signing key(s).""" + sign = [(x['scope'], x['vk']) for x in self.data['signers'] if x['scope'] in (scope, '+')] + sign.sort(key=lambda x: x[0]) + sign.reverse() + return sign + + def add_signer(self, scope, vk): + """Remember verifying key vk as being valid for signing in scope.""" + self.data['signers'].append({'scope': scope, 'vk': vk}) diff --git a/env/lib/python3.6/site-packages/wheel/util.py b/env/lib/python3.6/site-packages/wheel/util.py new file mode 100644 index 0000000..71802bf --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel/util.py @@ -0,0 +1,156 @@ +"""Utility functions.""" + +import base64 +import hashlib +import json +import os +import sys + +__all__ = ['urlsafe_b64encode', 'urlsafe_b64decode', 'utf8', + 'to_json', 'from_json', 'matches_requirement'] + + +# For encoding ascii back and forth between bytestrings, as is repeatedly +# necessary in JSON-based crypto under Python 3 +if sys.version_info[0] < 3: + text_type = unicode # noqa: F821 + + def native(s, encoding='ascii'): + return s +else: + text_type = str + + def native(s, encoding='ascii'): + if isinstance(s, bytes): + return s.decode(encoding) + return s + + +def urlsafe_b64encode(data): + """urlsafe_b64encode without padding""" + return base64.urlsafe_b64encode(data).rstrip(binary('=')) + + +def urlsafe_b64decode(data): + """urlsafe_b64decode without padding""" + pad = b'=' * (4 - (len(data) & 3)) + return base64.urlsafe_b64decode(data + pad) + + +def to_json(o): + """Convert given data to JSON.""" + return json.dumps(o, sort_keys=True) + + +def from_json(j): + """Decode a JSON payload.""" + return json.loads(j) + + +def open_for_csv(name, mode): + if sys.version_info[0] < 3: + kwargs = {} + mode += 'b' + else: + kwargs = {'newline': '', 'encoding': 'utf-8'} + + return open(name, mode, **kwargs) + + +def utf8(data): + """Utf-8 encode data.""" + if isinstance(data, text_type): + return data.encode('utf-8') + return data + + +def binary(s): + if isinstance(s, text_type): + return s.encode('ascii') + return s + + +class HashingFile(object): + def __init__(self, path, mode, hashtype='sha256'): + self.fd = open(path, mode) + self.hashtype = hashtype + self.hash = hashlib.new(hashtype) + self.length = 0 + + def write(self, data): + self.hash.update(data) + self.length += len(data) + self.fd.write(data) + + def close(self): + self.fd.close() + + def digest(self): + if self.hashtype == 'md5': + return self.hash.hexdigest() + digest = self.hash.digest() + return self.hashtype + '=' + native(urlsafe_b64encode(digest)) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.fd.close() + + +if sys.platform == 'win32': + import ctypes.wintypes + # CSIDL_APPDATA for reference - not used here for compatibility with + # dirspec, which uses LOCAL_APPDATA and COMMON_APPDATA in that order + csidl = {'CSIDL_APPDATA': 26, 'CSIDL_LOCAL_APPDATA': 28, 'CSIDL_COMMON_APPDATA': 35} + + def get_path(name): + SHGFP_TYPE_CURRENT = 0 + buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH) + ctypes.windll.shell32.SHGetFolderPathW(0, csidl[name], 0, SHGFP_TYPE_CURRENT, buf) + return buf.value + + def save_config_path(*resource): + appdata = get_path("CSIDL_LOCAL_APPDATA") + path = os.path.join(appdata, *resource) + if not os.path.isdir(path): + os.makedirs(path) + return path + + def load_config_paths(*resource): + ids = ["CSIDL_LOCAL_APPDATA", "CSIDL_COMMON_APPDATA"] + for id in ids: + base = get_path(id) + path = os.path.join(base, *resource) + if os.path.exists(path): + yield path +else: + def save_config_path(*resource): + import xdg.BaseDirectory + return xdg.BaseDirectory.save_config_path(*resource) + + def load_config_paths(*resource): + import xdg.BaseDirectory + return xdg.BaseDirectory.load_config_paths(*resource) + + +def matches_requirement(req, wheels): + """List of wheels matching a requirement. + + :param req: The requirement to satisfy + :param wheels: List of wheels to search. + """ + try: + from pkg_resources import Distribution, Requirement + except ImportError: + raise RuntimeError("Cannot use requirements without pkg_resources") + + req = Requirement.parse(req) + + selected = [] + for wf in wheels: + f = wf.parsed_filename + dist = Distribution(project_name=f.group("name"), version=f.group("ver")) + if dist in req: + selected.append(wf) + return selected diff --git a/env/lib/python3.6/site-packages/wheel/wininst2wheel.py b/env/lib/python3.6/site-packages/wheel/wininst2wheel.py new file mode 100644 index 0000000..16efa21 --- /dev/null +++ b/env/lib/python3.6/site-packages/wheel/wininst2wheel.py @@ -0,0 +1,219 @@ +#!/usr/bin/env python +import distutils.dist +import os.path +import re +import sys +import tempfile +import zipfile +from argparse import ArgumentParser +from glob import iglob +from shutil import rmtree + +import wheel.bdist_wheel +from wheel.archive import archive_wheelfile + +egg_info_re = re.compile(r'''(^|/)(?P[^/]+?)-(?P.+?) + (-(?P.+?))?(-(?P.+?))?.egg-info(/|$)''', re.VERBOSE) + + +def parse_info(wininfo_name, egginfo_name): + """Extract metadata from filenames. + + Extracts the 4 metadataitems needed (name, version, pyversion, arch) from + the installer filename and the name of the egg-info directory embedded in + the zipfile (if any). + + The egginfo filename has the format:: + + name-ver(-pyver)(-arch).egg-info + + The installer filename has the format:: + + name-ver.arch(-pyver).exe + + Some things to note: + + 1. The installer filename is not definitive. An installer can be renamed + and work perfectly well as an installer. So more reliable data should + be used whenever possible. + 2. The egg-info data should be preferred for the name and version, because + these come straight from the distutils metadata, and are mandatory. + 3. The pyver from the egg-info data should be ignored, as it is + constructed from the version of Python used to build the installer, + which is irrelevant - the installer filename is correct here (even to + the point that when it's not there, any version is implied). + 4. The architecture must be taken from the installer filename, as it is + not included in the egg-info data. + 5. Architecture-neutral installers still have an architecture because the + installer format itself (being executable) is architecture-specific. We + should therefore ignore the architecture if the content is pure-python. + """ + + egginfo = None + if egginfo_name: + egginfo = egg_info_re.search(egginfo_name) + if not egginfo: + raise ValueError("Egg info filename %s is not valid" % (egginfo_name,)) + + # Parse the wininst filename + # 1. Distribution name (up to the first '-') + w_name, sep, rest = wininfo_name.partition('-') + if not sep: + raise ValueError("Installer filename %s is not valid" % (wininfo_name,)) + + # Strip '.exe' + rest = rest[:-4] + # 2. Python version (from the last '-', must start with 'py') + rest2, sep, w_pyver = rest.rpartition('-') + if sep and w_pyver.startswith('py'): + rest = rest2 + w_pyver = w_pyver.replace('.', '') + else: + # Not version specific - use py2.py3. While it is possible that + # pure-Python code is not compatible with both Python 2 and 3, there + # is no way of knowing from the wininst format, so we assume the best + # here (the user can always manually rename the wheel to be more + # restrictive if needed). + w_pyver = 'py2.py3' + # 3. Version and architecture + w_ver, sep, w_arch = rest.rpartition('.') + if not sep: + raise ValueError("Installer filename %s is not valid" % (wininfo_name,)) + + if egginfo: + w_name = egginfo.group('name') + w_ver = egginfo.group('ver') + + return {'name': w_name, 'ver': w_ver, 'arch': w_arch, 'pyver': w_pyver} + + +def bdist_wininst2wheel(path, dest_dir=os.path.curdir): + bdw = zipfile.ZipFile(path) + + # Search for egg-info in the archive + egginfo_name = None + for filename in bdw.namelist(): + if '.egg-info' in filename: + egginfo_name = filename + break + + info = parse_info(os.path.basename(path), egginfo_name) + + root_is_purelib = True + for zipinfo in bdw.infolist(): + if zipinfo.filename.startswith('PLATLIB'): + root_is_purelib = False + break + if root_is_purelib: + paths = {'purelib': ''} + else: + paths = {'platlib': ''} + + dist_info = "%(name)s-%(ver)s" % info + datadir = "%s.data/" % dist_info + + # rewrite paths to trick ZipFile into extracting an egg + # XXX grab wininst .ini - between .exe, padding, and first zip file. + members = [] + egginfo_name = '' + for zipinfo in bdw.infolist(): + key, basename = zipinfo.filename.split('/', 1) + key = key.lower() + basepath = paths.get(key, None) + if basepath is None: + basepath = datadir + key.lower() + '/' + oldname = zipinfo.filename + newname = basepath + basename + zipinfo.filename = newname + del bdw.NameToInfo[oldname] + bdw.NameToInfo[newname] = zipinfo + # Collect member names, but omit '' (from an entry like "PLATLIB/" + if newname: + members.append(newname) + # Remember egg-info name for the egg2dist call below + if not egginfo_name: + if newname.endswith('.egg-info'): + egginfo_name = newname + elif '.egg-info/' in newname: + egginfo_name, sep, _ = newname.rpartition('/') + dir = tempfile.mkdtemp(suffix="_b2w") + bdw.extractall(dir, members) + + # egg2wheel + abi = 'none' + pyver = info['pyver'] + arch = (info['arch'] or 'any').replace('.', '_').replace('-', '_') + # Wininst installers always have arch even if they are not + # architecture-specific (because the format itself is). + # So, assume the content is architecture-neutral if root is purelib. + if root_is_purelib: + arch = 'any' + # If the installer is architecture-specific, it's almost certainly also + # CPython-specific. + if arch != 'any': + pyver = pyver.replace('py', 'cp') + wheel_name = '-'.join(( + dist_info, + pyver, + abi, + arch + )) + if root_is_purelib: + bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution()) + else: + bw = _bdist_wheel_tag(distutils.dist.Distribution()) + + bw.root_is_pure = root_is_purelib + bw.python_tag = pyver + bw.plat_name_supplied = True + bw.plat_name = info['arch'] or 'any' + + if not root_is_purelib: + bw.full_tag_supplied = True + bw.full_tag = (pyver, abi, arch) + + dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info) + bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir) + bw.write_wheelfile(dist_info_dir, generator='wininst2wheel') + bw.write_record(dir, dist_info_dir) + + archive_wheelfile(os.path.join(dest_dir, wheel_name), dir) + rmtree(dir) + + +class _bdist_wheel_tag(wheel.bdist_wheel.bdist_wheel): + # allow the client to override the default generated wheel tag + # The default bdist_wheel implementation uses python and abi tags + # of the running python process. This is not suitable for + # generating/repackaging prebuild binaries. + + full_tag_supplied = False + full_tag = None # None or a (pytag, soabitag, plattag) triple + + def get_tag(self): + if self.full_tag_supplied and self.full_tag is not None: + return self.full_tag + else: + return super(_bdist_wheel_tag, self).get_tag() + + +def main(): + parser = ArgumentParser() + parser.add_argument('installers', nargs='*', help="Installers to convert") + parser.add_argument('--dest-dir', '-d', default=os.path.curdir, + help="Directory to store wheels (default %(default)s)") + parser.add_argument('--verbose', '-v', action='store_true') + args = parser.parse_args() + for pat in args.installers: + for installer in iglob(pat): + if args.verbose: + print("{}... ".format(installer)) + sys.stdout.flush() + + bdist_wininst2wheel(installer, args.dest_dir) + if args.verbose: + print("OK") + + +if __name__ == "__main__": + main() diff --git a/env/pip-selfcheck.json b/env/pip-selfcheck.json new file mode 100644 index 0000000..6f2b942 --- /dev/null +++ b/env/pip-selfcheck.json @@ -0,0 +1 @@ +{"last_check":"2017-12-12T16:36:54Z","pypi_version":"9.0.1"} \ No newline at end of file diff --git a/env/pyvenv.cfg b/env/pyvenv.cfg new file mode 100644 index 0000000..43dbd8e --- /dev/null +++ b/env/pyvenv.cfg @@ -0,0 +1,3 @@ +home = /usr/local/bin +include-system-site-packages = false +version = 3.6.3 diff --git a/gJD.png b/gJD.png new file mode 100644 index 0000000000000000000000000000000000000000..c4cebb20b461f318dea8c6db7d5291ebd218e876 GIT binary patch literal 1551 zcmV+q2JrcbP)5WY&3jMqDU(N4R#sMKQMjvBoBKE*fB_6(00S7n0Dd)KM{8NPo9vr9l0tni2?koz%Cg!kOPt)6j?SMw-lYX z0Pim`fKLV?Nk{Jkr0OI2i9VWvQVrsf0&+>C_W@!_a^KK12Jk94kR%JU0CaVRc!&hl z`4Iq6R#~@$Nb)ond~?qj!1GO?HXHZ=5{-bI_XbfeR^rrx1$aHMVwRYl_lrJCjhTX~ zX!n@RICLQn@Ot3D4Jg&*RwN4~y{!k^w2vmr0ZA9zdNApqs%iiq0mhmE)EQ#40BO66 zB(X@3!xo?pX}bd;hj#y5RRj14aM+5d$|mU{(&cq(!@TC+dxPHhs|tiE;Pt>R83C!v zTXF-DnMHst04SO_IBkIP@MKj3_|Bk@1B?KWOMNs$Qj;w1ejiC6mjgf=;sCD)dcUsD zx@4qK6=}PJNLOcFGLV$&v@oWK_Yw*b)n>%X-1e@FYpLcu_r_7H4W0NMyO5%j7Yuz>cb9@-2xtdolg=Ew@8fe!6dpBY7N}lFckna`y0TWeZ2ba7x3dMU>D5*ybvv_mTLSj zResS27z&f<&R$&yv;KFJcumzW+OfY+V$r}PI^cMdM2sd8z<$L9tn?f&5diwLcU-ps z$CZmK;A|4z@=jO4Q`jfzrwx{&wDcUhntSsG%jS|q59;RE;ZA+^cF;fLg|;e@zu7Zp z#~UU7)7|lmC^j}}+zbLhZ z7Qrh9;5<0v56Gc`1Ae~+RPiHlKmmuTSOHHNLAL3y-sb~EqhIb@DxIoGuG zB?A}Z06lP3*R$aNvF0;g0sqr7*6hvKE#O<)_W=5gpm7QFQUH+H7C^pi6Euk|Iq)s3 zfXApBz(;@|13~s@Bw4sGnt{>}tEDG|l(3frPjLYH@$Sv3?*M9G@BM(p;s8Lf0Ivro zT|AMhvSkXe3;{Arx>!P0Hr;O8SVZXi-V1n1&?ouipXCGsD5W+T0jkQ?M-yrJqEA#c zfbR@q$p*Kq4_H7z`gFzzyjzHn5b~N}JVcfR4gh4?VAxW$6j0GN0f!AjR`AP$qR5jE zP-nI;Y{`L?SfJxpL|J7|S2cj|34T!-WOiOXyz}4Vllnm`5)4zotAWc2A&IOyxMeb0 zbuesqpynRa<_qxeS2cj|41Qi6e6-K<#%cr20#FtZczZs;V^j^`13`a6$l~g^jAzgx zZ4*sI#S8<t;L= z6}ca_3gzLi2l8Sz-Fgd$MF%i|0SsUO1Nd!&e*$J8kzT{y*DL@4002ovPDHLkV1iD1 Bxw`-W literal 0 HcmV?d00001 diff --git a/gJE.png b/gJE.png new file mode 100644 index 0000000000000000000000000000000000000000..8843d752b0171eeafea4790e4f62fc2bfd1592f7 GIT binary patch literal 1500 zcmV<21ta>2P)gq#G;vcYgWEXppa(tZK@WP+gWn9;nJc?4p3Ers2aBfws9inO zwq;sc#g8=AgKrEXNo^!as?yfgWoii}fTTaxM5F0b(ua~%+LkFm z#S>2ez|3}!Hr0b~3T~dg>wX>qGBLaxF*3R>!3>b})7vO|@LL84Go#!C$fVM?(I_LY z?xyI$$AX{D0V16`e3Oc-N&0Yh@BJKL#o%COW_w@?Ag|Ik8o^X%hhzvvUZpL_%*?ir zHPwT6028^=Bt0P8!w^lTBCFFbnq3Xy(yD)?sUCb|kmXyQk*ZWZ9AB6l3!)JuHAI?$ zticZfYXUzx0A!Mm13jUng0uzGGANi-fOmiuf`bK+mpg`NhH`({Mx)H6CXrhh)@dJ3 z2BdxnSRojVCdisDMh11J{EdukMJDxAD4ChX zQz%*9JlIqZ-UEz=09o#4_*P_f+F2+{+jVg`nMrD0``S`J1gr^MCV`_p3kEewA4+b{ zKQ22$8bO6$1*{Bov4G*-WRTU{s8=BB>=0wF!LY$x%^x;fUZF>s>cM-1b*3JZ+5V9B z;fz#f%Z5PKKscFff7pHtrhXl8^Kx|;6JUUW?4_j2@9i18pGTqcKNZ8f|Gn^0Mn=+4 z8=&41_kT;#gWn)HY`|RX4^({KNChx#OjdB5C)~qSn~`)bPx+Ff-KdBtZUgOB@M0SF zofKaK6Aj_g&-0{9Gbqh)%YQ)T;)8FXc&2>K|7up;s91fRx#bx{-aEw$*TqwS72Bom)T-ROzoznXPT;U#6m!kjn7ZqKbBYCA1=99T@oa-L_4|e! z6&amkait>FqIjXa`s=vT)Y&N(rJb42IIxS|qE9X1jEdr@xD*x8#XOtuyr{T<*ZXR5 zg|B`FF3)(OoK>`6Z&dt8aZ4|^4fa?|6IK>O#(J>T=dY|+tfb1jmPZ}B7 z;Y0s!M9_R_FDP!`yN|rq)F$3R_|r_0>#uW=3m{R&NPAxH9JnCk)u#50%+OyAj{NyV z_zH05cT~_^t*HG!p<;gtbQ?i^I72SAk!DmtI1)W^fxH4-YEXJPBWMm7E_?YIFgVw_ zT?T)7jXDSZySWjpw&!}&S3r0_-UcSkZHmqF-|@?3*$G^f+y-DGeI}WyJtc|hyaV*` zs;w`A|AqZ6U_R3=k?Uhf>kAdXr2H_T3-IM?h(?*rc0@w}dF9RpNTr>^!R>oIshWN?WD?S>7bw2ImQJI2q;MX9TSf>=MmL zr3bq(f$ss<20oeCOKMNYKsZkRPp1VVNy`e+uL4#Eo^A+9 zx{V3ytaZU(4r#Y>XN6iOC-Md1;5(Y?!MlLV>0pRPnM(?tLYYc;X^a(ciUl0}Dqv0E z64;nFkpByYc!>f>0?Mk{5st2AFjvz(%2W^DAME1LT@;}1X~;{qKcvTvAb&1k#bDAS z15O)&#}i?v)4_G-hk#Xq&u<8k#?z^A`;;T(O1Q)ed@Nv1pmTubb?OoWave8=Ts$0000#-4d4yIE*loGsdDw>0xOHAvt)wpnykNx!~otIj6eB18ch@p zAQS9Xpaq-1{_tiJ1Nd2iNK)&4fV@riCN|ZRSCIA5YeRW*P&05iGPWPLOtTf>lyCfn5c;R=y|s=ueI0lX0? zgL=gf%b;jThDfH0&SDuG)k|jq*1NC|@KE6F157|6NfXZCyQx2KiR7k$WN%W5cMpOO z@L1ruz*_*5Y}4SpK{X`90x0Udi)BC_rho?ohuD4HUFzq8tREMstZg|YqXBsfV5;cC z6!1vkvt?k(ywOWl>u;EHGQsWqY?k2vfl0ft@9znN%+mjmBnc%R>xkh)qyi z-eWRV!3lgU(3|&YLaK7N40M*?Z4RpkR2B^`K3Ftg&=l}#pv1MEfdTW+x0HA<3y|&M znR)-TxfkCr6~h$pP~hwY;-bKEX#jtErpnd3M`&;E8&{4x@Ha6^lHfpUeT-3Xiyg|(S(&xvs?S|)28j&oWOqquLn7{dx=N??`teT^U|G{-4*7?+jKZG1e_0$ zj*+0KmjEhZetOA#rAz%Ur&XPY=*43-<&|Ug1&Iw@(W@z}_~r@=&_lzo*I0m<6>hj* z2m(;9xWcZkp?vy6Ur@<4nW%r*rN6ri`vp+WcEBk9o%dhuu}#^0+n$tYR>GQ#Rh{?; z@LkoU$sBQh9SDDeDm!-bI5^LB)fINNqJi?_0Z{SdGr<>tjaK~1bol`I<-ZMkemgz@ z+(uBBOjuboS(kVxa9pmMvHTaEnWb#D&iU6KhB%W8cXyFn-}~fE!C;5QfPmfduBry`jX@4+0KQGIQR4!{u0jMPHJN-cC`T=oyC6V-%SBN24vc^I=$zHBzWQakwY3{3P z0N)t&Ie~lkz8~@de6g6CtdHcvP6%Aja_vEO*a-|9K@SNYP6%4ds~hX%XM6^7*MV(?X>3uNnE-eYsj<#SLq zfVTj%&dk_uYD-5m+1gc!nksMwq#-Hv!N8AC2$8P{YkUa6mjt>NeQ{~wUk>=}gph+1 z=no1Yl2nqn$*^#y;J&H`@NL1DQz*{2a9DHz0~o*n1~7o{8~h(UPklPleSY2m0000< KMNUMnLSTZQtoB|2 literal 0 HcmV?d00001 diff --git a/mB.png b/mB.png new file mode 100644 index 0000000000000000000000000000000000000000..86cd86207e16088c2f565ac2526e951ae5ac2525 GIT binary patch literal 1479 zcmV;&1vvVNP)sO?H?HE(a4(v`a~H57fs9z8zc+V7A%k0BmcYBTZYu`_~x3?+MJh)FcPlVN672 z<$je&X0|?W1@Bs81V0%}{jWs84oLoIqO9qWL0NZ<|2UqEMFP6dYH9>O4(!d$Y(tmz ziA>eH%LWtMY7Kz7-MU*LGc(ISx2X|)UoZv8PLFt(Vo>gi!x(bvmxx-xGl891A}AYg_Ng{x8cb5XFAk#_urIn)X6Lo>Xnl;} zObT`YlT?#Q$Ds5jN1$v%sZ!FpOxyvU4opV#(>*3KcmBHY^X_(LtK!zctk7nmG@o#iBwsj}jLjB2yb8I#EXWMV77NFK3|x2N=#ysDxzz<|!cUURB^ zpgx|D0CQ!-vR`nq&rhie7T^m7;oX{51zu_DA6IL< zyQy=jg~QBOvtU*yhq)2G0w#xFzx?#iYmJ$$(WU<~*TU*XFk!LrlRwkR888KNY?xg* zK?ppA$;~u`3jLKp*0OB~*933$2>K7bBhOwrNUk;&{>^`D0Div-Oa)Ct3(QnG1BR~V zH-j9Z_9ObsZ6G*kTW)=jQ*e;y%r2ezK00XWY%{aB2XI^K=|L`=lW{ov zmJsNm&EYUp;(8xBP0U-t;XJA{Y4%vdS#ie>060eu9B%{DQ&Z>u^VL4)`D}Z>F&)eg zf|mjOvf8b)!&$0zc+#ifF6h2h<9V%qnHim#mJHykq9qgK>m{zu<8QRgE}g4`ljq#A zue!^zc{4biNA-`JK+a9A?*Ln%ch|t>08lnT8iX;Wa{Kk(i+r*TFfItT!Rs}CBSrB0 z1G6p#Q&sAajP~{EZL0USLKbcZ{{I@^3*4&7q$BuytG}VfX9FiIp9E$2;H^EAZO|Y6 z{>s?<2}eud6M;bsz!c&Ff~Ze&ya*YX90M*~gC3}l5xkZ{uuq8kS?Y*X7YdOu62K{3V#J{fq{0zxRL>0%|QVG<9dA?^Ur3yx-{Y7NO~=Gf;X zqsca8(&&SRER|7hq9N$Hz$q|9lNR9j`N=8NUk{zMKkpwNcYr4XCp8f8=lcu4FHXWh zb9z5OTmVl9PHzawy41W=XrFo~aQf8bUlHRqggGKIo%Q zy-hef_`r};m@q8+WHJeUYEvWluHZ{8AUFpGU5~9APCLK9D2!_0lYy_sHM}9kmqkY~ hf)R{h1S9y^;GeB%r5vACO(g&T002ovPDHLkV1lVmwu=A& literal 0 HcmV?d00001 diff --git a/mC.png b/mC.png new file mode 100644 index 0000000000000000000000000000000000000000..9b1c3126bb0edcaeb15d1b47a5efbce2fd9209cc GIT binary patch literal 1495 zcmV;|1t|K7P)J3Dnr0q>)paAW&-ptmM z8GEx0Q@Cq=PgNs$6EKq`)j)E>+@Wl<5ll6CpPVq&#c_>ElG^*K8o`@@lbJcldrY$3 zaWkYK+Z8QS^0ZJev+ccAjo_z(NK%`PKvfqliG1r52}-pj5>QB^2`Ji3lH5+XRQ1gyM(~FK6G;wWN@R|stuX=QJ-~iu6Ln3b-keQin?yG79Zvf`O@Hcs%oRIBc$R?9jhvIzY$vwEIsu8>q7)<|= z%Ajn>hD4^C&Qckb4jVrM1Y7V^_A!F5wJ@wtKpFn<%YhSsUl&l-MVpQ0{e-v*ctCJk zLABp9q$XL_kc}oPHDtpIC=W%K%77x8fX4$nOHFcR)2)Lo$opx9+`%?OHW^T~;A4Yb zGy#tU@)Y)|YRE>DRC~9ON@_W;2NOxIR15??9@wp?6>_E2BQsYv3}z-X+mKA=QvlNn zI9L%)z{7#LMqqE-&O!>{xK2)(E1O)y3bP$uKKcbhGy#tXc9u2~P%@d3%z;^qqyG{EyYXbkl;FjfG0RPOt>kojbplLn;E`2X= zf}EgU8p9Vsa4e3D3t)O_*0=_+*^|9G3PIPbfan&sT z>xJg50N@W-Nd%np59hkc%J;Et*NZA$?Bj-%_30`QFL7y7<@|7_6mi()?Q~Yn8DM-L z!6^S;^uLT)<(faXmn4SwI8xUO5-;@8(Vi&jFRJ=VbM{xn;&*rP;cRfwTm^>rNPVsI z;NK%OulQLPL4vCP;=c<&UwSySO!cDwa9##d&Gi7kRj#Tb6bJH!mVsXZe4Y-bP=Brs zd)Z2#6!`IS0p(L7Bkx~&us4$UVBm8Tz;V+KhBz46s5wONJH)~A{+Iumn5zVTrangS zU7)!T{2XvfBzM?$ z>twR<-cL}~beKtnyilgsB<+4GLDs*Yvh4+#1Ipwx8h$xJS!1+Pl?QxO_vY;><(TY!0&%J9cQ$h_(I zheScUzFoF?`U<7||@c*wmaNByL)GYu2002ovPDHLkV1mEj&~N|% literal 0 HcmV?d00001 diff --git a/mC2.png b/mC2.png new file mode 100644 index 0000000000000000000000000000000000000000..208996a09a8233f59ea5cad267464ab41b5e76db GIT binary patch literal 1578 zcmV+_2G#kAP)i zM~SBZs734@kv)^E6+co`58efgR8_TtA(|0W=k2fqF?o~p;f!dN9%DpRRolmk>cP8! zlZY5dbO=?oI_?Q))YN&)6rL9fB9c8?R1e+}>|%hjNL5D=1D!-Olm$tDmli$vsevjS zDqs>(lh^z6oqM(D!AF9qsz1s2;onn5Zq*=2t0Th$c}v)amICp{vzDQdAG#2~>eP zW7w%c-VhC;idJ;8Q{kZ27!WCr-Vd-T5cgWoK<@rHRB4M<{b@_6Rs}?(s2ZGk;B9~{ zfn7WWsFoSSPE_U4V68!Ih=u^j>$Kac0I632+XIsxQISK_#ZDI_{S=_4mi-V-0;DZ~ zXytqbpIsk4cm?bvno%uf?^M6_1d7SkdIC$4J)zpk;h~~>@Rh-x{g{H8^4|&*C@4cL zvzC%J{g=Oq0t{0yLoJC{0hBGep45-N)BAro_H`Z(P zCYFJ__1j&HEPuRmi;tQCk`W+pI$7TPtv7qE#Sq>RT&Qvl+(E!Q#M}zTAnc*I72IF>?kbo~ zF(!lTBADnX3VFT9K4n{qpMRyGGkX^&Qc;sPFIfe&3dQ@MZWh%vm{l>KZLy+)s=Wmq z$&Fy(yPI0fe+W(W9Y9z8?*h0!R%8Co*8uPN7s(Bxe#>7VxSGJf3KsA+I~UEpFn}Bw zP6Bm)lpTnv3LsedpQ7dUoWQBz$MaSDVo~||y?%eX$gYX{VJTYX`Fz#97_2_HGSyn0 zbx|8eX9IBhd=W?wda7Yw3>J3Quw}PC4leZZNsIBkR^^E+81g33AwUgJTilPRaR$#5 z?eKgbvwpS4l|Ie_``cdRb07Z-G%p7scJfIje;5wWfc;_wW)~jgl|HIsC@ww5D}59w zFwKR>c(IS5c2eDs>ov}kl~VuXzhC{L0y*}#*@sKx`9IVC%Mm%4=9~6%i*Bq|@7_U{ zEcv4^5VfN3QJS#D6tHStU+QBFv~Covj=9TYRp{ZuPkITo&yf*E%q^l;R=OGff2*i5 z2wL#dZK5vk<9{Y2To83x2C8yc@&U}BDjh1oef5Txfo}(l1`$fh_`nD}m-66(0_1x9 zq?UpCGI}c;(z<_(8v}O$-B-VW!ttAE5R_VoMqOM4U!c^IEI|Pu3)n2^oIIixfJ+Q; zJ4AFz4)hs8+X9!Y;2FppH;d*rA;cO3p(^CIj-HCQ0k#At{S>O2I_u(a68GF^I8-D$ z$Z64Q!$S`h)q}4Lx|iDJ{Hv-ehvkmR?MhBnn?zkO$maxZ5A5P_hEnWaU=_(a>k{Oc z_Mz$%^PtQgE2;bBi<3TLGXM+h}#;L^r4=&hS{tKw^!)Xas_+vbwGDQ2+;_l)lozOpw0ldVSEq~ zN^b;i8tmdwRSl`0j|I>fs|(yn@G4-#VA9FylrRyi`f<^LNbOlMJ}Gol;D;xKES|u| zg#g@^KwH8$mlpnZz$Yh!r~o8A?w|miQfQ+gSOSHQ6xD;T3$CY7JYW1_(H``m2R-OP c4}RX@?`FDg#c9WTlK=n!07*qoM6N<$f-^+jg8%>k literal 0 HcmV?d00001 diff --git a/wB.png b/wB.png new file mode 100644 index 0000000000000000000000000000000000000000..afe349f1a1cd1e806e4e012473e43a140f07d412 GIT binary patch literal 1449 zcmV;a1y=frP)Z+nm+|+if+c*ed00S7n00uCC?*=@YYn$VGGy@K1wgPIi^=QV9Gd33gl3?k+tB@STCq2XF_fYRf=IxxZhJ z0elZ&B&hZw5S2R1K69H+4gicZy8ruMSe*8NR<1$1(}&y z_E=E^cn2_*Yfaik6S6Jz$!M~&&QE8E{9WB6MGfGcKpE5<`d9`f5~>iihXfIOIhCj^thWL41|*WMPS-MB(kZCRg;2IMV(X+OsbKHL}sxD0yV z&n)w%|Bw+)SXHQHjO20wk(_dPsHg#aV-QJl_Fa-A6LNEbO1v(8tG~rx6P588+2?{sy0a7`)(6^ugo&@yj zJ(`gAxuiJ)?9MplpsMiiMpZPQ$OOc%pFXZxVVrVjZ`k9bmVxO2UcH}lzvpJ!=jz54 zP;Y_>cq)){jYql9ohqpQea}9hZ9(4NGZ2(W!%g4-QFRL#1Haw^0%W&<18|e-h*?xh z|2r2jn4qY$WUv=?;=^d!3h;R%nW~~m|3e@shCeU%c(bTWZGh<}aOsI&0E>MxaIu_z z0cfDxo|M{Or0_tDHI6Hw8?B}Oy6u;%&4}YzoXntxh_PD2m7=BzDqZ^zyivpce{G}Rr)d_SZVgAx5 zS_Xa|Fd2ZI6{oPzKiaPJNdXES-qkX&kUE#JCS4Hzy@>km!BTaW^kz_-xxlXv5lB1B zyJ$3+|;0ud29{^%gTm62CE9q#etzB=()gea{*q97Oo(~IRlXdg<6*; z2)Te~1XJ~qoCb(S^S}FqX-}$t{!+kM*hBVEQ3LqKAm=iGb4{S+BeI2CAV_LbUpLr< zn!x7+k82kRG+U36WWF%qg$X#h2fNK_uiq%Z?;ex4 zNxzyCg^v_9fNu-FUPAHx7B-6xU;qOczyJpDV}pMK9{xXeHts%_00000NkvXXu0mjf Dr9GN_ literal 0 HcmV?d00001 diff --git a/wD.png b/wD.png new file mode 100644 index 0000000000000000000000000000000000000000..ffe65e2137ae7683eb066a0cd84458934470d83b GIT binary patch literal 1369 zcmV-f1*ZCmP)4e1J;fb#?*Mm3+(9o1c7i?XAYp zIe!xqFSUL08V3=KU<4x=!3ak1mjRFF+ZK=kht0{%*zXQ@y)=fO`8%f?!EXUGNm9+x zEERj(LV5-$IxxwV%?3=8)V+7A5&T*ZN$RozsOmx*OkCdk^^(+O)1^$3+;4P`5qu7m zD_clMGY61LGP9!pz7Zq%3xP7K{dTPZ}Go?I0V#ul#_iF1qZLK(#MBkeQin-aFL@egn9cZ!M%}WIH%z zqsgkK=%;^3L$CgwQ;pykgJ^01>prLe^+3=C0Hve>B(+{zq6%0RI3`d@hdez4Wn=3B zfJ1Ryg`TGsnC*%~30=7OcQ2y=ED}up_YQ!$W_!%>fuG`H4Nk7&yQdn# zuLsxO*K4-xJ{=9f|ETJsJFR3;HkKok<52v60zs=!|2-Ha7+_yM;(Ogdslz*cKej4_ zhKT9lWr9A)V_U#<$pu`RtZI*I?+-ScFBx&BWzxSdfxsUxfw#$zr@$4kdj$me>lJXg zbM4_&^ShwW_^2s#N^Vzx(cB7e6$CXw@iV!A0sfxr@Mx<2+_gtfb@ur@e5!Lx`3zXr z*??AXesaWidv7Lpz)KVENifHBPjdWQJ>{781{Q39ZbNd_fuh+N>3~rl{_K8gs=?Kh2-*md{b)g6)B0NAsil|9-`GdrT|XkU?4 z2U}K<%#Hl$dz^hf!3MAdXtwwgP=K?S(%g%})iK;Y7_p{@VP4DK|EyN2INhD>cAH1L`Xg3_l2spLb!J|^rd1VT)>s);MW zqQD>kw|337C{$|wnoxy$(U=TcCU{(HG${R2V6JS69vS-`$F(F`)oijs=8FU0In@Y$ zF*q54<|~4HI=E~vo5h%F0z>I|tO;Bgc$*FmD}Xnsbjc5T*;rqVfhYhA0&m2Us9@v`ms~Y~$<0YhMj+s(~?; zT&a#R!5q^n@w?q)1ZUb0^#DwJz(2TwlUW$uz{P=~EZnzC;h(P$_wIjs#>amKQ#1k> z3f?$